new branch for merging lsm and fullstack

git-svn-id: https://hyracks.googlecode.com/svn/branches/fullstack_lsm_staging@3014 123451ca-8445-de46-9d55-352943316053
diff --git a/algebricks/algebricks-common/pom.xml b/algebricks/algebricks-common/pom.xml
new file mode 100644
index 0000000..c93fe3e
--- /dev/null
+++ b/algebricks/algebricks-common/pom.xml
@@ -0,0 +1,32 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <artifactId>algebricks-common</artifactId>
+  <name>algebricks-common</name>
+
+  <parent>
+    <groupId>edu.uci.ics.hyracks</groupId>
+    <artifactId>algebricks</artifactId>
+    <version>0.2.3-SNAPSHOT</version>
+  </parent>
+
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-compiler-plugin</artifactId>
+        <version>2.0.2</version>
+        <configuration>
+          <source>1.7</source>
+          <target>1.7</target>
+        </configuration>
+      </plugin>
+    </plugins>
+  </build>
+  <dependencies>
+  <dependency>
+  	<groupId>edu.uci.ics.hyracks</groupId>
+  	<artifactId>hyracks-api</artifactId>
+  	<version>0.2.3-SNAPSHOT</version>
+  </dependency>
+  </dependencies>
+</project>
diff --git a/hyracks-algebricks/hyracks-algebricks-common/src/main/java/edu/uci/ics/hyracks/algebricks/common/constraints/AlgebricksAbsolutePartitionConstraint.java b/algebricks/algebricks-common/src/main/java/edu/uci/ics/hyracks/algebricks/common/constraints/AlgebricksAbsolutePartitionConstraint.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-common/src/main/java/edu/uci/ics/hyracks/algebricks/common/constraints/AlgebricksAbsolutePartitionConstraint.java
rename to algebricks/algebricks-common/src/main/java/edu/uci/ics/hyracks/algebricks/common/constraints/AlgebricksAbsolutePartitionConstraint.java
diff --git a/hyracks-algebricks/hyracks-algebricks-common/src/main/java/edu/uci/ics/hyracks/algebricks/common/constraints/AlgebricksCountPartitionConstraint.java b/algebricks/algebricks-common/src/main/java/edu/uci/ics/hyracks/algebricks/common/constraints/AlgebricksCountPartitionConstraint.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-common/src/main/java/edu/uci/ics/hyracks/algebricks/common/constraints/AlgebricksCountPartitionConstraint.java
rename to algebricks/algebricks-common/src/main/java/edu/uci/ics/hyracks/algebricks/common/constraints/AlgebricksCountPartitionConstraint.java
diff --git a/hyracks-algebricks/hyracks-algebricks-common/src/main/java/edu/uci/ics/hyracks/algebricks/common/constraints/AlgebricksPartitionConstraint.java b/algebricks/algebricks-common/src/main/java/edu/uci/ics/hyracks/algebricks/common/constraints/AlgebricksPartitionConstraint.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-common/src/main/java/edu/uci/ics/hyracks/algebricks/common/constraints/AlgebricksPartitionConstraint.java
rename to algebricks/algebricks-common/src/main/java/edu/uci/ics/hyracks/algebricks/common/constraints/AlgebricksPartitionConstraint.java
diff --git a/hyracks-algebricks/hyracks-algebricks-common/src/main/java/edu/uci/ics/hyracks/algebricks/common/constraints/AlgebricksPartitionConstraintHelper.java b/algebricks/algebricks-common/src/main/java/edu/uci/ics/hyracks/algebricks/common/constraints/AlgebricksPartitionConstraintHelper.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-common/src/main/java/edu/uci/ics/hyracks/algebricks/common/constraints/AlgebricksPartitionConstraintHelper.java
rename to algebricks/algebricks-common/src/main/java/edu/uci/ics/hyracks/algebricks/common/constraints/AlgebricksPartitionConstraintHelper.java
diff --git a/hyracks-algebricks/hyracks-algebricks-common/src/main/java/edu/uci/ics/hyracks/algebricks/common/exceptions/AlgebricksException.java b/algebricks/algebricks-common/src/main/java/edu/uci/ics/hyracks/algebricks/common/exceptions/AlgebricksException.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-common/src/main/java/edu/uci/ics/hyracks/algebricks/common/exceptions/AlgebricksException.java
rename to algebricks/algebricks-common/src/main/java/edu/uci/ics/hyracks/algebricks/common/exceptions/AlgebricksException.java
diff --git a/hyracks-algebricks/hyracks-algebricks-common/src/main/java/edu/uci/ics/hyracks/algebricks/common/exceptions/NotImplementedException.java b/algebricks/algebricks-common/src/main/java/edu/uci/ics/hyracks/algebricks/common/exceptions/NotImplementedException.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-common/src/main/java/edu/uci/ics/hyracks/algebricks/common/exceptions/NotImplementedException.java
rename to algebricks/algebricks-common/src/main/java/edu/uci/ics/hyracks/algebricks/common/exceptions/NotImplementedException.java
diff --git a/hyracks-algebricks/hyracks-algebricks-common/src/main/java/edu/uci/ics/hyracks/algebricks/common/utils/ListSet.java b/algebricks/algebricks-common/src/main/java/edu/uci/ics/hyracks/algebricks/common/utils/ListSet.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-common/src/main/java/edu/uci/ics/hyracks/algebricks/common/utils/ListSet.java
rename to algebricks/algebricks-common/src/main/java/edu/uci/ics/hyracks/algebricks/common/utils/ListSet.java
diff --git a/hyracks-algebricks/hyracks-algebricks-common/src/main/java/edu/uci/ics/hyracks/algebricks/common/utils/Pair.java b/algebricks/algebricks-common/src/main/java/edu/uci/ics/hyracks/algebricks/common/utils/Pair.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-common/src/main/java/edu/uci/ics/hyracks/algebricks/common/utils/Pair.java
rename to algebricks/algebricks-common/src/main/java/edu/uci/ics/hyracks/algebricks/common/utils/Pair.java
diff --git a/hyracks-algebricks/hyracks-algebricks-common/src/main/java/edu/uci/ics/hyracks/algebricks/common/utils/Triple.java b/algebricks/algebricks-common/src/main/java/edu/uci/ics/hyracks/algebricks/common/utils/Triple.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-common/src/main/java/edu/uci/ics/hyracks/algebricks/common/utils/Triple.java
rename to algebricks/algebricks-common/src/main/java/edu/uci/ics/hyracks/algebricks/common/utils/Triple.java
diff --git a/algebricks/algebricks-compiler/pom.xml b/algebricks/algebricks-compiler/pom.xml
new file mode 100644
index 0000000..f1f4521
--- /dev/null
+++ b/algebricks/algebricks-compiler/pom.xml
@@ -0,0 +1,37 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <artifactId>algebricks-compiler</artifactId>
+  <name>algebricks-compiler</name>
+
+  <parent>
+    <groupId>edu.uci.ics.hyracks</groupId>
+    <artifactId>algebricks</artifactId>
+    <version>0.2.3-SNAPSHOT</version>
+  </parent>
+
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-compiler-plugin</artifactId>
+        <version>2.0.2</version>
+        <configuration>
+          <source>1.7</source>
+          <target>1.7</target>
+        </configuration>
+      </plugin>
+    </plugins>
+  </build>
+  <dependencies>
+  <dependency>
+  	<groupId>edu.uci.ics.hyracks</groupId>
+  	<artifactId>algebricks-rewriter</artifactId>
+  	<version>0.2.3-SNAPSHOT</version>
+  </dependency>
+  <dependency>
+  	<groupId>edu.uci.ics.hyracks</groupId>
+  	<artifactId>algebricks-core</artifactId>
+  	<version>0.2.3-SNAPSHOT</version>
+  </dependency>
+  </dependencies>
+</project>
diff --git a/algebricks/algebricks-compiler/src/main/java/edu/uci/ics/hyracks/algebricks/compiler/api/AbstractCompilerFactoryBuilder.java b/algebricks/algebricks-compiler/src/main/java/edu/uci/ics/hyracks/algebricks/compiler/api/AbstractCompilerFactoryBuilder.java
new file mode 100644
index 0000000..dde4443
--- /dev/null
+++ b/algebricks/algebricks-compiler/src/main/java/edu/uci/ics/hyracks/algebricks/compiler/api/AbstractCompilerFactoryBuilder.java
@@ -0,0 +1,229 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.algebricks.compiler.api;
+
+import java.util.List;
+
+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IExpressionEvalSizeComputer;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IExpressionRuntimeProvider;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IExpressionTypeComputer;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IMergeAggregationExpressionFactory;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.INullableTypeComputer;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IPartialAggregationTypeComputer;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.AbstractRuleController;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.PhysicalOptimizationConfig;
+import edu.uci.ics.hyracks.algebricks.data.IBinaryBooleanInspectorFactory;
+import edu.uci.ics.hyracks.algebricks.data.IBinaryComparatorFactoryProvider;
+import edu.uci.ics.hyracks.algebricks.data.IBinaryHashFunctionFactoryProvider;
+import edu.uci.ics.hyracks.algebricks.data.IBinaryHashFunctionFamilyProvider;
+import edu.uci.ics.hyracks.algebricks.data.IBinaryIntegerInspectorFactory;
+import edu.uci.ics.hyracks.algebricks.data.INormalizedKeyComputerFactoryProvider;
+import edu.uci.ics.hyracks.algebricks.data.IPrinterFactoryProvider;
+import edu.uci.ics.hyracks.algebricks.data.ISerializerDeserializerProvider;
+import edu.uci.ics.hyracks.algebricks.data.ITypeTraitProvider;
+import edu.uci.ics.hyracks.api.dataflow.value.INullWriterFactory;
+
+public abstract class AbstractCompilerFactoryBuilder {
+
+    protected List<Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>> logicalRewrites;
+    protected List<Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>> physicalRewrites;
+    protected ITypeTraitProvider typeTraitProvider;
+    protected ISerializerDeserializerProvider serializerDeserializerProvider;
+    protected IBinaryHashFunctionFactoryProvider hashFunctionFactoryProvider;
+    protected IBinaryHashFunctionFamilyProvider hashFunctionFamilyProvider;
+    protected IBinaryComparatorFactoryProvider comparatorFactoryProvider;
+    protected IBinaryBooleanInspectorFactory binaryBooleanInspectorFactory;
+    protected IBinaryIntegerInspectorFactory binaryIntegerInspectorFactory;
+    protected IPrinterFactoryProvider printerProvider;
+    protected IExpressionRuntimeProvider expressionRuntimeProvider;
+    protected IExpressionTypeComputer expressionTypeComputer;
+    protected INullableTypeComputer nullableTypeComputer;
+    protected IExpressionEvalSizeComputer expressionEvalSizeComputer;
+    protected INullWriterFactory nullWriterFactory;
+    protected INormalizedKeyComputerFactoryProvider normalizedKeyComputerFactoryProvider;
+    protected IPartialAggregationTypeComputer partialAggregationTypeComputer;
+    protected IMergeAggregationExpressionFactory mergeAggregationExpressionFactory;
+    protected PhysicalOptimizationConfig physicalOptimizationConfig = new PhysicalOptimizationConfig();
+    protected AlgebricksPartitionConstraint clusterLocations;
+    protected int frameSize = -1;
+
+    public abstract ICompilerFactory create();
+
+    public void setLogicalRewrites(List<Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>> logicalRewrites) {
+        this.logicalRewrites = logicalRewrites;
+    }
+
+    public void setPhysicalRewrites(List<Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>> physicalRewrites) {
+        this.physicalRewrites = physicalRewrites;
+    }
+
+    public void setTypeTraitProvider(ITypeTraitProvider typeTraitProvider) {
+        this.typeTraitProvider = typeTraitProvider;
+    }
+
+    public ITypeTraitProvider getTypeTraitProvider() {
+        return typeTraitProvider;
+    }
+
+    public void setSerializerDeserializerProvider(ISerializerDeserializerProvider serializerDeserializerProvider) {
+        this.serializerDeserializerProvider = serializerDeserializerProvider;
+    }
+
+    public ISerializerDeserializerProvider getSerializerDeserializerProvider() {
+        return serializerDeserializerProvider;
+    }
+
+    public void setHashFunctionFactoryProvider(IBinaryHashFunctionFactoryProvider hashFunctionFactoryProvider) {
+        this.hashFunctionFactoryProvider = hashFunctionFactoryProvider;
+    }
+
+    public IBinaryHashFunctionFactoryProvider getHashFunctionFactoryProvider() {
+        return hashFunctionFactoryProvider;
+    }
+
+    public void setHashFunctionFamilyProvider(IBinaryHashFunctionFamilyProvider hashFunctionFamilyProvider) {
+        this.hashFunctionFamilyProvider = hashFunctionFamilyProvider;
+    }
+
+    public IBinaryHashFunctionFamilyProvider getHashFunctionFamilyProvider() {
+        return hashFunctionFamilyProvider;
+    }
+
+    public void setComparatorFactoryProvider(IBinaryComparatorFactoryProvider comparatorFactoryProvider) {
+        this.comparatorFactoryProvider = comparatorFactoryProvider;
+    }
+
+    public IBinaryComparatorFactoryProvider getComparatorFactoryProvider() {
+        return comparatorFactoryProvider;
+    }
+
+    public void setBinaryBooleanInspectorFactory(IBinaryBooleanInspectorFactory binaryBooleanInspectorFactory) {
+        this.binaryBooleanInspectorFactory = binaryBooleanInspectorFactory;
+    }
+
+    public IBinaryBooleanInspectorFactory getBinaryBooleanInspectorFactory() {
+        return binaryBooleanInspectorFactory;
+    }
+
+    public void setBinaryIntegerInspectorFactory(IBinaryIntegerInspectorFactory binaryIntegerInspectorFactory) {
+        this.binaryIntegerInspectorFactory = binaryIntegerInspectorFactory;
+    }
+
+    public IBinaryIntegerInspectorFactory getBinaryIntegerInspectorFactory() {
+        return binaryIntegerInspectorFactory;
+    }
+
+    public void setPrinterProvider(IPrinterFactoryProvider printerProvider) {
+        this.printerProvider = printerProvider;
+    }
+
+    public IPrinterFactoryProvider getPrinterProvider() {
+        return printerProvider;
+    }
+
+    public void setExpressionRuntimeProvider(IExpressionRuntimeProvider expressionRuntimeProvider) {
+        this.expressionRuntimeProvider = expressionRuntimeProvider;
+    }
+
+    public IExpressionRuntimeProvider getExpressionRuntimeProvider() {
+        return expressionRuntimeProvider;
+    }
+
+    public void setExpressionTypeComputer(IExpressionTypeComputer expressionTypeComputer) {
+        this.expressionTypeComputer = expressionTypeComputer;
+    }
+
+    public IExpressionTypeComputer getExpressionTypeComputer() {
+        return expressionTypeComputer;
+    }
+
+    public void setClusterLocations(AlgebricksPartitionConstraint clusterLocations) {
+        this.clusterLocations = clusterLocations;
+    }
+
+    public AlgebricksPartitionConstraint getClusterLocations() {
+        return clusterLocations;
+    }
+
+    public void setNullWriterFactory(INullWriterFactory nullWriterFactory) {
+        this.nullWriterFactory = nullWriterFactory;
+    }
+
+    public INullWriterFactory getNullWriterFactory() {
+        return nullWriterFactory;
+    }
+
+    public void setExpressionEvalSizeComputer(IExpressionEvalSizeComputer expressionEvalSizeComputer) {
+        this.expressionEvalSizeComputer = expressionEvalSizeComputer;
+    }
+
+    public IExpressionEvalSizeComputer getExpressionEvalSizeComputer() {
+        return expressionEvalSizeComputer;
+    }
+
+    public void setNormalizedKeyComputerFactoryProvider(
+            INormalizedKeyComputerFactoryProvider normalizedKeyComputerFactoryProvider) {
+        this.normalizedKeyComputerFactoryProvider = normalizedKeyComputerFactoryProvider;
+    }
+
+    public INormalizedKeyComputerFactoryProvider getNormalizedKeyComputerFactoryProvider() {
+        return normalizedKeyComputerFactoryProvider;
+    }
+
+    public void setFrameSize(int frameSize) {
+        this.frameSize = frameSize;
+    }
+
+    public int getFrameSize() {
+        return frameSize;
+    }
+
+    public IPartialAggregationTypeComputer getPartialAggregationTypeComputer() {
+        return partialAggregationTypeComputer;
+    }
+
+    public void setPartialAggregationTypeComputer(IPartialAggregationTypeComputer partialAggregationTypeComputer) {
+        this.partialAggregationTypeComputer = partialAggregationTypeComputer;
+    }
+
+    public IMergeAggregationExpressionFactory getIMergeAggregationExpressionFactory() {
+        return mergeAggregationExpressionFactory;
+    }
+
+    public void setIMergeAggregationExpressionFactory(
+            IMergeAggregationExpressionFactory mergeAggregationExpressionFactory) {
+        this.mergeAggregationExpressionFactory = mergeAggregationExpressionFactory;
+    }
+
+    public PhysicalOptimizationConfig getPhysicalOptimizationConfig() {
+        return physicalOptimizationConfig;
+    }
+
+    public void setPhysicalOptimizationConfig(PhysicalOptimizationConfig physicalOptimizationConfig) {
+        this.physicalOptimizationConfig = physicalOptimizationConfig;
+    }
+
+    public void setNullableTypeComputer(INullableTypeComputer nullableTypeComputer) {
+        this.nullableTypeComputer = nullableTypeComputer;
+    }
+
+    public INullableTypeComputer getNullableTypeComputer() {
+        return nullableTypeComputer;
+    }
+
+}
diff --git a/algebricks/algebricks-compiler/src/main/java/edu/uci/ics/hyracks/algebricks/compiler/api/HeuristicCompilerFactoryBuilder.java b/algebricks/algebricks-compiler/src/main/java/edu/uci/ics/hyracks/algebricks/compiler/api/HeuristicCompilerFactoryBuilder.java
new file mode 100644
index 0000000..edc1b66
--- /dev/null
+++ b/algebricks/algebricks-compiler/src/main/java/edu/uci/ics/hyracks/algebricks/compiler/api/HeuristicCompilerFactoryBuilder.java
@@ -0,0 +1,101 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.algebricks.compiler.api;
+
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalPlan;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IExpressionEvalSizeComputer;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IExpressionTypeComputer;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IMergeAggregationExpressionFactory;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.INullableTypeComputer;
+import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IMetadataProvider;
+import edu.uci.ics.hyracks.algebricks.core.config.AlgebricksConfig;
+import edu.uci.ics.hyracks.algebricks.core.jobgen.impl.JobGenContext;
+import edu.uci.ics.hyracks.algebricks.core.jobgen.impl.PlanCompiler;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.AlgebricksOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.HeuristicOptimizer;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IOptimizationContextFactory;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.PhysicalOptimizationConfig;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+
+public class HeuristicCompilerFactoryBuilder extends AbstractCompilerFactoryBuilder {
+
+    public static class DefaultOptimizationContextFactory implements IOptimizationContextFactory {
+
+        public static final DefaultOptimizationContextFactory INSTANCE = new DefaultOptimizationContextFactory();
+
+        private DefaultOptimizationContextFactory() {
+        }
+
+        @Override
+        public IOptimizationContext createOptimizationContext(int varCounter, int frameSize,
+                IExpressionEvalSizeComputer expressionEvalSizeComputer,
+                IMergeAggregationExpressionFactory mergeAggregationExpressionFactory,
+                IExpressionTypeComputer expressionTypeComputer, INullableTypeComputer nullableTypeComputer,
+                PhysicalOptimizationConfig physicalOptimizationConfig) {
+            return new AlgebricksOptimizationContext(varCounter, frameSize, expressionEvalSizeComputer,
+                    mergeAggregationExpressionFactory, expressionTypeComputer, nullableTypeComputer,
+                    physicalOptimizationConfig);
+        }
+    }
+
+    private IOptimizationContextFactory optCtxFactory;
+
+    public HeuristicCompilerFactoryBuilder() {
+        this.optCtxFactory = DefaultOptimizationContextFactory.INSTANCE;
+    }
+
+    public HeuristicCompilerFactoryBuilder(IOptimizationContextFactory optCtxFactory) {
+        this.optCtxFactory = optCtxFactory;
+    }
+
+    @Override
+    public ICompilerFactory create() {
+        return new ICompilerFactory() {
+            @Override
+            public ICompiler createCompiler(final ILogicalPlan plan, final IMetadataProvider<?, ?> metadata,
+                    int varCounter) {
+                final IOptimizationContext oc = optCtxFactory.createOptimizationContext(varCounter, frameSize,
+                        expressionEvalSizeComputer, mergeAggregationExpressionFactory, expressionTypeComputer,
+                        nullableTypeComputer, physicalOptimizationConfig);
+                oc.setMetadataDeclarations(metadata);
+                final HeuristicOptimizer opt = new HeuristicOptimizer(plan, logicalRewrites, physicalRewrites, oc);
+                return new ICompiler() {
+
+                    @Override
+                    public void optimize() throws AlgebricksException {
+                        opt.optimize();
+                    }
+
+                    @Override
+                    public JobSpecification createJob(Object appContext) throws AlgebricksException {
+                        AlgebricksConfig.ALGEBRICKS_LOGGER.fine("Starting Job Generation.\n");
+                        JobGenContext context = new JobGenContext(null, metadata, appContext,
+                                serializerDeserializerProvider, hashFunctionFactoryProvider,
+                                hashFunctionFamilyProvider, comparatorFactoryProvider, typeTraitProvider,
+                                binaryBooleanInspectorFactory, binaryIntegerInspectorFactory, printerProvider,
+                                nullWriterFactory, normalizedKeyComputerFactoryProvider, expressionRuntimeProvider,
+                                expressionTypeComputer, nullableTypeComputer, oc, expressionEvalSizeComputer,
+                                partialAggregationTypeComputer, frameSize, clusterLocations);
+                        PlanCompiler pc = new PlanCompiler(context);
+                        return pc.compilePlan(plan, null);
+                    }
+                };
+            }
+        };
+    }
+
+}
diff --git a/algebricks/algebricks-compiler/src/main/java/edu/uci/ics/hyracks/algebricks/compiler/api/ICompiler.java b/algebricks/algebricks-compiler/src/main/java/edu/uci/ics/hyracks/algebricks/compiler/api/ICompiler.java
new file mode 100644
index 0000000..9146722
--- /dev/null
+++ b/algebricks/algebricks-compiler/src/main/java/edu/uci/ics/hyracks/algebricks/compiler/api/ICompiler.java
@@ -0,0 +1,24 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.algebricks.compiler.api;
+
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+
+public interface ICompiler {
+    public void optimize() throws AlgebricksException;
+
+    public JobSpecification createJob(Object appContext) throws AlgebricksException;
+}
diff --git a/hyracks-algebricks/hyracks-algebricks-compiler/src/main/java/edu/uci/ics/hyracks/algebricks/compiler/api/ICompilerFactory.java b/algebricks/algebricks-compiler/src/main/java/edu/uci/ics/hyracks/algebricks/compiler/api/ICompilerFactory.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-compiler/src/main/java/edu/uci/ics/hyracks/algebricks/compiler/api/ICompilerFactory.java
rename to algebricks/algebricks-compiler/src/main/java/edu/uci/ics/hyracks/algebricks/compiler/api/ICompilerFactory.java
diff --git a/hyracks-algebricks/hyracks-algebricks-compiler/src/main/java/edu/uci/ics/hyracks/algebricks/compiler/rewriter/rulecontrollers/PrioritizedRuleController.java b/algebricks/algebricks-compiler/src/main/java/edu/uci/ics/hyracks/algebricks/compiler/rewriter/rulecontrollers/PrioritizedRuleController.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-compiler/src/main/java/edu/uci/ics/hyracks/algebricks/compiler/rewriter/rulecontrollers/PrioritizedRuleController.java
rename to algebricks/algebricks-compiler/src/main/java/edu/uci/ics/hyracks/algebricks/compiler/rewriter/rulecontrollers/PrioritizedRuleController.java
diff --git a/hyracks-algebricks/hyracks-algebricks-compiler/src/main/java/edu/uci/ics/hyracks/algebricks/compiler/rewriter/rulecontrollers/SequentialFixpointRuleController.java b/algebricks/algebricks-compiler/src/main/java/edu/uci/ics/hyracks/algebricks/compiler/rewriter/rulecontrollers/SequentialFixpointRuleController.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-compiler/src/main/java/edu/uci/ics/hyracks/algebricks/compiler/rewriter/rulecontrollers/SequentialFixpointRuleController.java
rename to algebricks/algebricks-compiler/src/main/java/edu/uci/ics/hyracks/algebricks/compiler/rewriter/rulecontrollers/SequentialFixpointRuleController.java
diff --git a/hyracks-algebricks/hyracks-algebricks-compiler/src/main/java/edu/uci/ics/hyracks/algebricks/compiler/rewriter/rulecontrollers/SequentialOnceRuleController.java b/algebricks/algebricks-compiler/src/main/java/edu/uci/ics/hyracks/algebricks/compiler/rewriter/rulecontrollers/SequentialOnceRuleController.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-compiler/src/main/java/edu/uci/ics/hyracks/algebricks/compiler/rewriter/rulecontrollers/SequentialOnceRuleController.java
rename to algebricks/algebricks-compiler/src/main/java/edu/uci/ics/hyracks/algebricks/compiler/rewriter/rulecontrollers/SequentialOnceRuleController.java
diff --git a/algebricks/algebricks-core/pom.xml b/algebricks/algebricks-core/pom.xml
new file mode 100644
index 0000000..118ea57
--- /dev/null
+++ b/algebricks/algebricks-core/pom.xml
@@ -0,0 +1,52 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <artifactId>algebricks-core</artifactId>
+  <name>algebricks-core</name>
+
+  <parent>
+    <groupId>edu.uci.ics.hyracks</groupId>
+    <artifactId>algebricks</artifactId>
+    <version>0.2.3-SNAPSHOT</version>
+  </parent>
+
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-compiler-plugin</artifactId>
+        <version>2.0.2</version>
+        <configuration>
+          <source>1.7</source>
+          <target>1.7</target>
+        </configuration>
+      </plugin>
+    </plugins>
+  </build>
+  <dependencies>
+  <dependency>
+  	<groupId>edu.uci.ics.hyracks</groupId>
+  	<artifactId>hyracks-storage-am-btree</artifactId>
+  	<version>0.2.3-SNAPSHOT</version>
+  </dependency>
+  <dependency>
+  	<groupId>edu.uci.ics.hyracks</groupId>
+  	<artifactId>hyracks-storage-am-rtree</artifactId>
+  	<version>0.2.3-SNAPSHOT</version>
+  </dependency>
+  <dependency>
+  	<groupId>edu.uci.ics.hyracks</groupId>
+  	<artifactId>hyracks-dataflow-std</artifactId>
+  	<version>0.2.3-SNAPSHOT</version>
+  </dependency>
+  <dependency>
+  	<groupId>edu.uci.ics.hyracks</groupId>
+  	<artifactId>algebricks-runtime</artifactId>
+  	<version>0.2.3-SNAPSHOT</version>
+  </dependency>
+  <dependency>
+  	<groupId>edu.uci.ics.hyracks</groupId>
+  	<artifactId>algebricks-common</artifactId>
+  	<version>0.2.3-SNAPSHOT</version>
+  </dependency>
+  </dependencies>
+</project>
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/base/Counter.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/base/Counter.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/base/Counter.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/base/Counter.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/base/EquivalenceClass.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/base/EquivalenceClass.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/base/EquivalenceClass.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/base/EquivalenceClass.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/base/IHyracksJobBuilder.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/base/IHyracksJobBuilder.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/base/IHyracksJobBuilder.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/base/IHyracksJobBuilder.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/base/ILogicalExpression.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/base/ILogicalExpression.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/base/ILogicalExpression.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/base/ILogicalExpression.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/base/ILogicalOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/base/ILogicalOperator.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/base/ILogicalOperator.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/base/ILogicalOperator.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/base/ILogicalPlan.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/base/ILogicalPlan.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/base/ILogicalPlan.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/base/ILogicalPlan.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/base/ILogicalPlanAndMetadata.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/base/ILogicalPlanAndMetadata.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/base/ILogicalPlanAndMetadata.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/base/ILogicalPlanAndMetadata.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/base/IOptimizationContext.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/base/IOptimizationContext.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/base/IOptimizationContext.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/base/IOptimizationContext.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/base/IPhysicalOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/base/IPhysicalOperator.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/base/IPhysicalOperator.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/base/IPhysicalOperator.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/base/LogicalExpressionTag.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/base/LogicalExpressionTag.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/base/LogicalExpressionTag.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/base/LogicalExpressionTag.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/base/LogicalOperatorTag.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/base/LogicalOperatorTag.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/base/LogicalOperatorTag.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/base/LogicalOperatorTag.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/base/LogicalVariable.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/base/LogicalVariable.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/base/LogicalVariable.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/base/LogicalVariable.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/base/OperatorAnnotations.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/base/OperatorAnnotations.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/base/OperatorAnnotations.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/base/OperatorAnnotations.java
diff --git a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/base/PhysicalOperatorTag.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/base/PhysicalOperatorTag.java
new file mode 100644
index 0000000..a969372
--- /dev/null
+++ b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/base/PhysicalOperatorTag.java
@@ -0,0 +1,48 @@
+package edu.uci.ics.hyracks.algebricks.core.algebra.base;
+
+public enum PhysicalOperatorTag {
+    AGGREGATE,
+    ASSIGN,
+    BROADCAST_EXCHANGE,
+    BTREE_SEARCH,
+    STATS,
+    DATASOURCE_SCAN,
+    EMPTY_TUPLE_SOURCE,
+    EXTERNAL_GROUP_BY,
+    IN_MEMORY_HASH_JOIN,
+    HASH_GROUP_BY,
+    HASH_PARTITION_EXCHANGE,
+    HASH_PARTITION_MERGE_EXCHANGE,
+    HYBRID_HASH_JOIN,
+    HDFS_READER,
+    IN_MEMORY_STABLE_SORT,
+    MICRO_PRE_CLUSTERED_GROUP_BY,
+    NESTED_LOOP,
+    NESTED_TUPLE_SOURCE,
+    ONE_TO_ONE_EXCHANGE,
+    PRE_SORTED_DISTINCT_BY,
+    PRE_CLUSTERED_GROUP_BY,
+    RANGE_PARTITION_EXCHANGE,
+    RANDOM_MERGE_EXCHANGE,
+    RTREE_SEARCH,
+    RUNNING_AGGREGATE,
+    SORT_MERGE_EXCHANGE,
+    SINK,
+    SINK_WRITE,
+    SPLIT,
+    STABLE_SORT,
+    STREAM_LIMIT,
+    STREAM_DIE,
+    STREAM_SELECT,
+    STREAM_PROJECT,
+    STRING_STREAM_SCRIPT,
+    SUBPLAN,
+    UNION_ALL,
+    UNNEST,
+    WRITE_RESULT,
+    INSERT_DELETE,
+    INDEX_INSERT_DELETE,
+	UPDATE,
+    INVERTED_INDEX_SEARCH,
+    PARTITIONINGSPLIT
+}
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/AbstractFunctionCallExpression.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/AbstractFunctionCallExpression.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/AbstractFunctionCallExpression.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/AbstractFunctionCallExpression.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/AbstractLogicalExpression.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/AbstractLogicalExpression.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/AbstractLogicalExpression.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/AbstractLogicalExpression.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/AggregateFunctionCallExpression.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/AggregateFunctionCallExpression.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/AggregateFunctionCallExpression.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/AggregateFunctionCallExpression.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/BroadcastExpressionAnnotation.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/BroadcastExpressionAnnotation.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/BroadcastExpressionAnnotation.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/BroadcastExpressionAnnotation.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/ConstantExpression.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/ConstantExpression.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/ConstantExpression.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/ConstantExpression.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/ExpressionAnnotationNoCopyImpl.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/ExpressionAnnotationNoCopyImpl.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/ExpressionAnnotationNoCopyImpl.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/ExpressionAnnotationNoCopyImpl.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/IAlgebricksConstantValue.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/IAlgebricksConstantValue.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/IAlgebricksConstantValue.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/IAlgebricksConstantValue.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/IExpressionAnnotation.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/IExpressionAnnotation.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/IExpressionAnnotation.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/IExpressionAnnotation.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/IExpressionEvalSizeComputer.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/IExpressionEvalSizeComputer.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/IExpressionEvalSizeComputer.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/IExpressionEvalSizeComputer.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/IExpressionRuntimeProvider.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/IExpressionRuntimeProvider.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/IExpressionRuntimeProvider.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/IExpressionRuntimeProvider.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/IExpressionTypeComputer.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/IExpressionTypeComputer.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/IExpressionTypeComputer.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/IExpressionTypeComputer.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/ILogicalExpressionJobGen.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/ILogicalExpressionJobGen.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/ILogicalExpressionJobGen.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/ILogicalExpressionJobGen.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/IMergeAggregationExpressionFactory.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/IMergeAggregationExpressionFactory.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/IMergeAggregationExpressionFactory.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/IMergeAggregationExpressionFactory.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/INullableTypeComputer.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/INullableTypeComputer.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/INullableTypeComputer.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/INullableTypeComputer.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/IPartialAggregationTypeComputer.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/IPartialAggregationTypeComputer.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/IPartialAggregationTypeComputer.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/IPartialAggregationTypeComputer.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/IVariableEvalSizeEnvironment.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/IVariableEvalSizeEnvironment.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/IVariableEvalSizeEnvironment.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/IVariableEvalSizeEnvironment.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/IVariableTypeEnvironment.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/IVariableTypeEnvironment.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/IVariableTypeEnvironment.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/IVariableTypeEnvironment.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/IndexedNLJoinExpressionAnnotation.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/IndexedNLJoinExpressionAnnotation.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/IndexedNLJoinExpressionAnnotation.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/IndexedNLJoinExpressionAnnotation.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/LogicalExpressionJobGenToExpressionRuntimeProviderAdapter.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/LogicalExpressionJobGenToExpressionRuntimeProviderAdapter.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/LogicalExpressionJobGenToExpressionRuntimeProviderAdapter.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/LogicalExpressionJobGenToExpressionRuntimeProviderAdapter.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/ScalarFunctionCallExpression.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/ScalarFunctionCallExpression.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/ScalarFunctionCallExpression.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/ScalarFunctionCallExpression.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/StatefulFunctionCallExpression.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/StatefulFunctionCallExpression.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/StatefulFunctionCallExpression.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/StatefulFunctionCallExpression.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/UnnestingFunctionCallExpression.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/UnnestingFunctionCallExpression.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/UnnestingFunctionCallExpression.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/UnnestingFunctionCallExpression.java
diff --git a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/VariableReferenceExpression.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/VariableReferenceExpression.java
new file mode 100644
index 0000000..76a7ec6
--- /dev/null
+++ b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/VariableReferenceExpression.java
@@ -0,0 +1,110 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.algebricks.core.algebra.expressions;
+
+import java.util.Collection;
+import java.util.List;
+
+import org.apache.commons.lang3.mutable.Mutable;
+
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.visitors.ILogicalExpressionVisitor;
+
+public class VariableReferenceExpression extends AbstractLogicalExpression {
+    private int tupleRef;
+    private LogicalVariable variable;
+
+    public VariableReferenceExpression(int tupleRef, LogicalVariable variable) {
+        this.tupleRef = tupleRef;
+        this.variable = variable;
+    }
+
+    public VariableReferenceExpression(LogicalVariable variable) {
+        this(0, variable);
+    }
+
+    public int getTupleRef() {
+        return tupleRef;
+    }
+
+    public void setTupleRef(int tupleRef) {
+        this.tupleRef = tupleRef;
+    }
+
+    public LogicalVariable getVariableReference() {
+        return variable;
+    }
+
+    public void setVariable(LogicalVariable variable) {
+        this.variable = variable;
+    }
+
+    @Override
+    public LogicalExpressionTag getExpressionTag() {
+        return LogicalExpressionTag.VARIABLE;
+    }
+
+    @Override
+    public String toString() {
+        return "%" + tupleRef + "->" + variable.toString();
+    }
+
+    @Override
+    public void getUsedVariables(Collection<LogicalVariable> vars) {
+        // if (!vars.contains(variable)) {
+        vars.add(variable);
+        // }
+    }
+
+    @Override
+    public void substituteVar(LogicalVariable v1, LogicalVariable v2) {
+        if (variable.equals(v1)) {
+            variable = v2;
+        }
+    }
+
+    @Override
+    public boolean equals(Object obj) {
+        if (!(obj instanceof VariableReferenceExpression)) {
+            return false;
+        } else {
+            return tupleRef == ((VariableReferenceExpression) obj).tupleRef
+                    && variable.equals(((VariableReferenceExpression) obj).getVariableReference());
+        }
+    }
+
+    @Override
+    public int hashCode() {
+        return tupleRef + variable.getId();
+    }
+
+    @Override
+    public <R, T> R accept(ILogicalExpressionVisitor<R, T> visitor, T arg) throws AlgebricksException {
+        return visitor.visitVariableReferenceExpression(this, arg);
+    }
+
+    @Override
+    public AbstractLogicalExpression cloneExpression() {
+        return new VariableReferenceExpression(variable);
+    }
+
+    @Override
+    public boolean splitIntoConjuncts(List<Mutable<ILogicalExpression>> conjs) {
+        return false;
+    }
+}
\ No newline at end of file
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/functions/AlgebricksBuiltinFunctions.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/functions/AlgebricksBuiltinFunctions.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/functions/AlgebricksBuiltinFunctions.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/functions/AlgebricksBuiltinFunctions.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/functions/FunctionIdentifier.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/functions/FunctionIdentifier.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/functions/FunctionIdentifier.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/functions/FunctionIdentifier.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/functions/IFunctionInfo.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/functions/IFunctionInfo.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/functions/IFunctionInfo.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/functions/IFunctionInfo.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/metadata/IDataSink.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/metadata/IDataSink.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/metadata/IDataSink.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/metadata/IDataSink.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/metadata/IDataSource.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/metadata/IDataSource.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/metadata/IDataSource.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/metadata/IDataSource.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/metadata/IDataSourceIndex.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/metadata/IDataSourceIndex.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/metadata/IDataSourceIndex.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/metadata/IDataSourceIndex.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/metadata/IDataSourcePropertiesProvider.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/metadata/IDataSourcePropertiesProvider.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/metadata/IDataSourcePropertiesProvider.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/metadata/IDataSourcePropertiesProvider.java
diff --git a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/metadata/IMetadataProvider.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/metadata/IMetadataProvider.java
new file mode 100644
index 0000000..899b633
--- /dev/null
+++ b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/metadata/IMetadataProvider.java
@@ -0,0 +1,139 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.algebricks.core.algebra.metadata;
+
+import java.util.List;
+
+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.IFunctionInfo;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.IOperatorSchema;
+import edu.uci.ics.hyracks.algebricks.core.jobgen.impl.JobGenContext;
+import edu.uci.ics.hyracks.algebricks.data.IPrinterFactory;
+import edu.uci.ics.hyracks.algebricks.runtime.base.IPushRuntimeFactory;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+
+public interface IMetadataProvider<S, I> {
+    public IDataSource<S> findDataSource(S id) throws AlgebricksException;
+
+    /**
+     * Obs: A scanner may choose to contribute a null
+     * AlgebricksPartitionConstraint and implement
+     * contributeSchedulingConstraints instead.
+     */
+    public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getScannerRuntime(IDataSource<S> dataSource,
+            List<LogicalVariable> scanVariables, List<LogicalVariable> projectVariables, boolean projectPushed,
+            IOperatorSchema opSchema, IVariableTypeEnvironment typeEnv, JobGenContext context, JobSpecification jobSpec)
+            throws AlgebricksException;
+
+    public boolean scannerOperatorIsLeaf(IDataSource<S> dataSource);
+
+    public Pair<IPushRuntimeFactory, AlgebricksPartitionConstraint> getWriteFileRuntime(IDataSink sink,
+            int[] printColumns, IPrinterFactory[] printerFactories, RecordDescriptor inputDesc)
+            throws AlgebricksException;
+
+    public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getWriteResultRuntime(IDataSource<S> dataSource,
+            IOperatorSchema propagatedSchema, List<LogicalVariable> keys, LogicalVariable payLoadVar,
+            JobGenContext context, JobSpecification jobSpec) throws AlgebricksException;
+
+    public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getInsertRuntime(IDataSource<S> dataSource,
+            IOperatorSchema propagatedSchema, List<LogicalVariable> keys, LogicalVariable payLoadVar,
+            RecordDescriptor recordDesc, JobGenContext context, JobSpecification jobSpec) throws AlgebricksException;
+
+    public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getDeleteRuntime(IDataSource<S> dataSource,
+            IOperatorSchema propagatedSchema, List<LogicalVariable> keys, LogicalVariable payLoadVar,
+            RecordDescriptor recordDesc, JobGenContext context, JobSpecification jobSpec) throws AlgebricksException;
+
+    /**
+     * Creates the insert runtime of IndexInsertDeletePOperator, which models
+     * insert/delete operations into a secondary index.
+     * 
+     * @param dataSource
+     *            Target secondary index.
+     * @param propagatedSchema
+     *            Output schema of the insert/delete operator to be created.
+     * @param inputSchemas
+     *            Output schemas of the insert/delete operator to be created.
+     * @param typeEnv
+     *            Type environment of the original IndexInsertDeleteOperator operator.
+     * @param primaryKeys
+     *            Variables for the dataset's primary keys that the dataSource secondary index belongs to.
+     * @param secondaryKeys
+     *            Variables for the secondary-index keys.
+     * @param filterExpr
+     *            Filtering expression to be pushed inside the runtime op.
+     *            Such a filter may, e.g., exclude NULLs from being inserted/deleted.
+     * @param recordDesc
+     *            Output record descriptor of the runtime op to be created.
+     * @param context
+     *            Job generation context.
+     * @param spec
+     *            Target job specification.
+     * @return
+     *         A Hyracks IOperatorDescriptor and its partition constraint.
+     * @throws AlgebricksException
+     */
+    public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getIndexInsertRuntime(
+            IDataSourceIndex<I, S> dataSource, IOperatorSchema propagatedSchema, IOperatorSchema[] inputSchemas,
+            IVariableTypeEnvironment typeEnv, List<LogicalVariable> primaryKeys, List<LogicalVariable> secondaryKeys,
+            ILogicalExpression filterExpr, RecordDescriptor recordDesc, JobGenContext context, JobSpecification spec)
+            throws AlgebricksException;
+
+    /**
+     * Creates the delete runtime of IndexInsertDeletePOperator, which models
+     * insert/delete operations into a secondary index.
+     * 
+     * @param dataSource
+     *            Target secondary index.
+     * @param propagatedSchema
+     *            Output schema of the insert/delete operator to be created.
+     * @param inputSchemas
+     *            Output schemas of the insert/delete operator to be created.
+     * @param typeEnv
+     *            Type environment of the original IndexInsertDeleteOperator operator.
+     * @param primaryKeys
+     *            Variables for the dataset's primary keys that the dataSource secondary index belongs to.
+     * @param secondaryKeys
+     *            Variables for the secondary-index keys.
+     * @param filterExpr
+     *            Filtering expression to be pushed inside the runtime op.
+     *            Such a filter may, e.g., exclude NULLs from being inserted/deleted.
+     * @param recordDesc
+     *            Output record descriptor of the runtime op to be created.
+     * @param context
+     *            Job generation context.
+     * @param spec
+     *            Target job specification.
+     * @return
+     *         A Hyracks IOperatorDescriptor and its partition constraint.
+     * @throws AlgebricksException
+     */
+    public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getIndexDeleteRuntime(
+            IDataSourceIndex<I, S> dataSource, IOperatorSchema propagatedSchema, IOperatorSchema[] inputSchemas,
+            IVariableTypeEnvironment typeEnv, List<LogicalVariable> primaryKeys, List<LogicalVariable> secondaryKeys,
+            ILogicalExpression filterExpr, RecordDescriptor recordDesc, JobGenContext context, JobSpecification spec)
+            throws AlgebricksException;
+
+    public IDataSourceIndex<I, S> findDataSourceIndex(I indexId, S dataSourceId) throws AlgebricksException;
+
+    public IFunctionInfo lookupFunction(FunctionIdentifier fid);
+}
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/AbstractAssignOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/AbstractAssignOperator.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/AbstractAssignOperator.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/AbstractAssignOperator.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/AbstractBinaryJoinOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/AbstractBinaryJoinOperator.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/AbstractBinaryJoinOperator.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/AbstractBinaryJoinOperator.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/AbstractExtensibleLogicalOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/AbstractExtensibleLogicalOperator.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/AbstractExtensibleLogicalOperator.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/AbstractExtensibleLogicalOperator.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/AbstractLogicalOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/AbstractLogicalOperator.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/AbstractLogicalOperator.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/AbstractLogicalOperator.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/AbstractOperatorWithNestedPlans.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/AbstractOperatorWithNestedPlans.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/AbstractOperatorWithNestedPlans.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/AbstractOperatorWithNestedPlans.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/AbstractScanOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/AbstractScanOperator.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/AbstractScanOperator.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/AbstractScanOperator.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/AbstractUnnestOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/AbstractUnnestOperator.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/AbstractUnnestOperator.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/AbstractUnnestOperator.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/AggregateOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/AggregateOperator.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/AggregateOperator.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/AggregateOperator.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/AssignOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/AssignOperator.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/AssignOperator.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/AssignOperator.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/DataSourceScanOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/DataSourceScanOperator.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/DataSourceScanOperator.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/DataSourceScanOperator.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/DieOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/DieOperator.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/DieOperator.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/DieOperator.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/DistinctOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/DistinctOperator.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/DistinctOperator.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/DistinctOperator.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/EmptyTupleSourceOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/EmptyTupleSourceOperator.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/EmptyTupleSourceOperator.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/EmptyTupleSourceOperator.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/ExchangeOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/ExchangeOperator.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/ExchangeOperator.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/ExchangeOperator.java
diff --git a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/ExtensionOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/ExtensionOperator.java
new file mode 100644
index 0000000..5aa858f
--- /dev/null
+++ b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/ExtensionOperator.java
@@ -0,0 +1,120 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IPhysicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
+import edu.uci.ics.hyracks.algebricks.core.algebra.properties.VariablePropagationPolicy;
+import edu.uci.ics.hyracks.algebricks.core.algebra.typing.ITypingContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.visitors.ILogicalExpressionReferenceTransform;
+import edu.uci.ics.hyracks.algebricks.core.algebra.visitors.ILogicalOperatorVisitor;
+
+/**
+ * @author rico
+ */
+public class ExtensionOperator extends AbstractLogicalOperator {
+
+    private IOperatorExtension delegate;
+
+    public ExtensionOperator(IOperatorExtension delegate) {
+        super();
+        if (delegate == null) {
+            throw new IllegalArgumentException("delegate cannot be null!");
+        }
+        this.delegate = delegate;
+        setExecutionMode(delegate.getExecutionMode());
+    }
+
+    @Override
+    public void recomputeSchema() throws AlgebricksException {
+        schema = new ArrayList<LogicalVariable>(inputs.get(0).getValue().getSchema());
+        delegate.setSchema(schema);
+    }
+
+    @Override
+    public boolean acceptExpressionTransform(ILogicalExpressionReferenceTransform transform) throws AlgebricksException {
+        return delegate.acceptExpressionTransform(transform);
+    }
+
+    @Override
+    public <R, T> R accept(ILogicalOperatorVisitor<R, T> visitor, T arg) throws AlgebricksException {
+        return visitor.visitExtensionOperator(this, arg);
+    }
+
+    @Override
+    public boolean isMap() {
+        return this.delegate.isMap();
+    }
+
+    @Override
+    public VariablePropagationPolicy getVariablePropagationPolicy() {
+        return VariablePropagationPolicy.ALL;
+    }
+
+    @Override
+    public IVariableTypeEnvironment computeOutputTypeEnvironment(ITypingContext ctx) throws AlgebricksException {
+        return this.createPropagatingAllInputsTypeEnvironment(ctx);
+    }
+
+    @Override
+    public LogicalOperatorTag getOperatorTag() {
+        return LogicalOperatorTag.EXTENSION_OPERATOR;
+    }
+
+    public IOperatorExtension getNewInstanceOfDelegateOperator() {
+        return delegate.newInstance();
+    }
+
+    @Override
+    public List<LogicalVariable> getSchema() {
+        return this.schema;
+    }
+
+    @Override
+    public ExecutionMode getExecutionMode() {
+        return delegate.getExecutionMode();
+    }
+
+    @Override
+    public void setExecutionMode(ExecutionMode mode) {
+        delegate.setExecutionMode(mode);
+    }
+
+    @Override
+    public IPhysicalOperator getPhysicalOperator() {
+        return delegate.getPhysicalOperator();
+    }
+
+    @Override
+    public IVariableTypeEnvironment computeInputTypeEnvironment(ITypingContext ctx) throws AlgebricksException {
+        return this.createPropagatingAllInputsTypeEnvironment(ctx);
+    }
+
+    @Override
+    public String toString() {
+        return delegate.toString();
+    }
+    
+    public IOperatorExtension getDelegate() {
+        return delegate;
+    }
+
+}
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/GroupByOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/GroupByOperator.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/GroupByOperator.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/GroupByOperator.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/IOperatorExtension.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/IOperatorExtension.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/IOperatorExtension.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/IOperatorExtension.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/IOperatorSchema.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/IOperatorSchema.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/IOperatorSchema.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/IOperatorSchema.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/IndexInsertDeleteOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/IndexInsertDeleteOperator.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/IndexInsertDeleteOperator.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/IndexInsertDeleteOperator.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/InnerJoinOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/InnerJoinOperator.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/InnerJoinOperator.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/InnerJoinOperator.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/InsertDeleteOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/InsertDeleteOperator.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/InsertDeleteOperator.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/InsertDeleteOperator.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/LeftOuterJoinOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/LeftOuterJoinOperator.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/LeftOuterJoinOperator.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/LeftOuterJoinOperator.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/LimitOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/LimitOperator.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/LimitOperator.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/LimitOperator.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/NestedTupleSourceOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/NestedTupleSourceOperator.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/NestedTupleSourceOperator.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/NestedTupleSourceOperator.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/OrderOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/OrderOperator.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/OrderOperator.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/OrderOperator.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/PartitioningSplitOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/PartitioningSplitOperator.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/PartitioningSplitOperator.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/PartitioningSplitOperator.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/ProjectOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/ProjectOperator.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/ProjectOperator.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/ProjectOperator.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/ReplicateOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/ReplicateOperator.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/ReplicateOperator.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/ReplicateOperator.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/RunningAggregateOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/RunningAggregateOperator.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/RunningAggregateOperator.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/RunningAggregateOperator.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/ScriptOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/ScriptOperator.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/ScriptOperator.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/ScriptOperator.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/SelectOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/SelectOperator.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/SelectOperator.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/SelectOperator.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/SinkOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/SinkOperator.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/SinkOperator.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/SinkOperator.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/SubplanOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/SubplanOperator.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/SubplanOperator.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/SubplanOperator.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/UnionAllOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/UnionAllOperator.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/UnionAllOperator.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/UnionAllOperator.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/UnnestMapOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/UnnestMapOperator.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/UnnestMapOperator.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/UnnestMapOperator.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/UnnestOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/UnnestOperator.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/UnnestOperator.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/UnnestOperator.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/UpdateOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/UpdateOperator.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/UpdateOperator.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/UpdateOperator.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/WriteOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/WriteOperator.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/WriteOperator.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/WriteOperator.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/WriteResultOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/WriteResultOperator.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/WriteResultOperator.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/WriteResultOperator.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/FDsAndEquivClassesVisitor.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/FDsAndEquivClassesVisitor.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/FDsAndEquivClassesVisitor.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/FDsAndEquivClassesVisitor.java
diff --git a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/IsomorphismOperatorVisitor.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/IsomorphismOperatorVisitor.java
new file mode 100644
index 0000000..b97597d
--- /dev/null
+++ b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/IsomorphismOperatorVisitor.java
@@ -0,0 +1,825 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.visitors;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+
+import org.apache.commons.lang3.mutable.Mutable;
+import org.apache.commons.lang3.mutable.MutableObject;
+
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
+import edu.uci.ics.hyracks.algebricks.common.utils.Triple;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalPlan;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractLogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AggregateOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DieOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DistinctOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.EmptyTupleSourceOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ExchangeOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ExtensionOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.GroupByOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.IndexInsertDeleteOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.InnerJoinOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.InsertDeleteOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.LeftOuterJoinOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.LimitOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.NestedTupleSourceOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.OrderOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.OrderOperator.IOrder;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.PartitioningSplitOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ProjectOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ReplicateOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.RunningAggregateOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ScriptOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SinkOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SubplanOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnionAllOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestMapOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.WriteOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.WriteResultOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.plan.ALogicalPlanImpl;
+import edu.uci.ics.hyracks.algebricks.core.algebra.properties.IPartitioningProperty;
+import edu.uci.ics.hyracks.algebricks.core.algebra.properties.IPhysicalPropertiesVector;
+import edu.uci.ics.hyracks.algebricks.core.algebra.visitors.ILogicalOperatorVisitor;
+
+public class IsomorphismOperatorVisitor implements ILogicalOperatorVisitor<Boolean, ILogicalOperator> {
+
+    private Map<LogicalVariable, LogicalVariable> variableMapping = new HashMap<LogicalVariable, LogicalVariable>();
+
+    public IsomorphismOperatorVisitor() {
+    }
+
+    @Override
+    public Boolean visitAggregateOperator(AggregateOperator op, ILogicalOperator arg) throws AlgebricksException {
+        AbstractLogicalOperator aop = (AbstractLogicalOperator) arg;
+        if (aop.getOperatorTag() != LogicalOperatorTag.AGGREGATE)
+            return Boolean.FALSE;
+        AggregateOperator aggOpArg = (AggregateOperator) copyAndSubstituteVar(op, arg);
+        boolean isomorphic = VariableUtilities.varListEqualUnordered(
+                getPairList(op.getVariables(), op.getExpressions()),
+                getPairList(aggOpArg.getVariables(), aggOpArg.getExpressions()));
+        return isomorphic;
+    }
+
+    @Override
+    public Boolean visitRunningAggregateOperator(RunningAggregateOperator op, ILogicalOperator arg)
+            throws AlgebricksException {
+        AbstractLogicalOperator aop = (AbstractLogicalOperator) arg;
+        if (aop.getOperatorTag() != LogicalOperatorTag.RUNNINGAGGREGATE)
+            return Boolean.FALSE;
+        RunningAggregateOperator aggOpArg = (RunningAggregateOperator) copyAndSubstituteVar(op, arg);
+        boolean isomorphic = VariableUtilities.varListEqualUnordered(
+                getPairList(op.getVariables(), op.getExpressions()),
+                getPairList(aggOpArg.getVariables(), aggOpArg.getExpressions()));
+        return isomorphic;
+    }
+
+    @Override
+    public Boolean visitEmptyTupleSourceOperator(EmptyTupleSourceOperator op, ILogicalOperator arg)
+            throws AlgebricksException {
+        AbstractLogicalOperator aop = (AbstractLogicalOperator) copyAndSubstituteVar(op, arg);
+        if (aop.getOperatorTag() != LogicalOperatorTag.EMPTYTUPLESOURCE)
+            return Boolean.FALSE;
+        return Boolean.TRUE;
+    }
+
+    @Override
+    public Boolean visitExtensionOperator(ExtensionOperator op, ILogicalOperator arg) throws AlgebricksException {
+        ExtensionOperator aop = (ExtensionOperator) copyAndSubstituteVar(op, arg);
+        if (aop.getOperatorTag() != LogicalOperatorTag.EXTENSION_OPERATOR)
+            return Boolean.FALSE;
+        return Boolean.TRUE;
+    }
+
+    @Override
+    public Boolean visitGroupByOperator(GroupByOperator op, ILogicalOperator arg) throws AlgebricksException {
+        AbstractLogicalOperator aop = (AbstractLogicalOperator) arg;
+        // require the same physical operator, otherwise delivers different data
+        // properties
+        if (aop.getOperatorTag() != LogicalOperatorTag.GROUP
+                || aop.getPhysicalOperator().getOperatorTag() != op.getPhysicalOperator().getOperatorTag())
+            return Boolean.FALSE;
+
+        List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> keyLists = op.getGroupByList();
+        GroupByOperator gbyOpArg = (GroupByOperator) copyAndSubstituteVar(op, arg);
+        List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> keyListsArg = gbyOpArg.getGroupByList();
+
+        List<Pair<LogicalVariable, ILogicalExpression>> listLeft = new ArrayList<Pair<LogicalVariable, ILogicalExpression>>();
+        List<Pair<LogicalVariable, ILogicalExpression>> listRight = new ArrayList<Pair<LogicalVariable, ILogicalExpression>>();
+
+        for (Pair<LogicalVariable, Mutable<ILogicalExpression>> pair : keyLists)
+            listLeft.add(new Pair<LogicalVariable, ILogicalExpression>(pair.first, pair.second.getValue()));
+        for (Pair<LogicalVariable, Mutable<ILogicalExpression>> pair : keyListsArg)
+            listRight.add(new Pair<LogicalVariable, ILogicalExpression>(pair.first, pair.second.getValue()));
+
+        boolean isomorphic = VariableUtilities.varListEqualUnordered(listLeft, listRight);
+
+        if (!isomorphic)
+            return Boolean.FALSE;
+        int sizeOp = op.getNestedPlans().size();
+        int sizeArg = gbyOpArg.getNestedPlans().size();
+        if (sizeOp != sizeArg)
+            return Boolean.FALSE;
+
+        GroupByOperator argOp = (GroupByOperator) arg;
+        List<ILogicalPlan> plans = op.getNestedPlans();
+        List<ILogicalPlan> plansArg = argOp.getNestedPlans();
+        for (int i = 0; i < plans.size(); i++) {
+            List<Mutable<ILogicalOperator>> roots = plans.get(i).getRoots();
+            List<Mutable<ILogicalOperator>> rootsArg = plansArg.get(i).getRoots();
+            if (roots.size() != rootsArg.size())
+                return Boolean.FALSE;
+            for (int j = 0; j < roots.size(); j++) {
+                ILogicalOperator topOp1 = roots.get(j).getValue();
+                ILogicalOperator topOp2 = rootsArg.get(j).getValue();
+                isomorphic = this.checkBottomUp(topOp1, topOp2);
+                if (!isomorphic)
+                    return Boolean.FALSE;
+            }
+        }
+        return isomorphic;
+    }
+
+    @Override
+    public Boolean visitLimitOperator(LimitOperator op, ILogicalOperator arg) throws AlgebricksException {
+        AbstractLogicalOperator aop = (AbstractLogicalOperator) arg;
+        if (aop.getOperatorTag() != LogicalOperatorTag.LIMIT)
+            return Boolean.FALSE;
+        LimitOperator limitOpArg = (LimitOperator) copyAndSubstituteVar(op, arg);
+        if (op.getOffset() != limitOpArg.getOffset())
+            return Boolean.FALSE;
+        boolean isomorphic = op.getMaxObjects().getValue().equals(limitOpArg.getMaxObjects().getValue());
+        return isomorphic;
+    }
+
+    @Override
+    public Boolean visitDieOperator(DieOperator op, ILogicalOperator arg) throws AlgebricksException {
+        AbstractLogicalOperator aop = (AbstractLogicalOperator) arg;
+        if (aop.getOperatorTag() != LogicalOperatorTag.DIE)
+            return Boolean.FALSE;
+        DieOperator dieOpArg = (DieOperator) copyAndSubstituteVar(op, arg);
+        boolean isomorphic = op.getAfterObjects().getValue().equals(dieOpArg.getAfterObjects().getValue());
+        return isomorphic;
+    }
+
+    @Override
+    public Boolean visitInnerJoinOperator(InnerJoinOperator op, ILogicalOperator arg) throws AlgebricksException {
+        AbstractLogicalOperator aop = (AbstractLogicalOperator) arg;
+        if (aop.getOperatorTag() != LogicalOperatorTag.INNERJOIN)
+            return Boolean.FALSE;
+        InnerJoinOperator joinOpArg = (InnerJoinOperator) copyAndSubstituteVar(op, arg);
+        boolean isomorphic = op.getCondition().getValue().equals(joinOpArg.getCondition().getValue());
+        return isomorphic;
+    }
+
+    @Override
+    public Boolean visitLeftOuterJoinOperator(LeftOuterJoinOperator op, ILogicalOperator arg)
+            throws AlgebricksException {
+        AbstractLogicalOperator aop = (AbstractLogicalOperator) arg;
+        if (aop.getOperatorTag() != LogicalOperatorTag.LEFTOUTERJOIN)
+            return Boolean.FALSE;
+        LeftOuterJoinOperator joinOpArg = (LeftOuterJoinOperator) copyAndSubstituteVar(op, arg);
+        boolean isomorphic = op.getCondition().getValue().equals(joinOpArg.getCondition().getValue());
+        return isomorphic;
+    }
+
+    @Override
+    public Boolean visitNestedTupleSourceOperator(NestedTupleSourceOperator op, ILogicalOperator arg)
+            throws AlgebricksException {
+        AbstractLogicalOperator aop = (AbstractLogicalOperator) arg;
+        if (aop.getOperatorTag() != LogicalOperatorTag.NESTEDTUPLESOURCE)
+            return Boolean.FALSE;
+        return Boolean.TRUE;
+    }
+
+    @Override
+    public Boolean visitOrderOperator(OrderOperator op, ILogicalOperator arg) throws AlgebricksException {
+        AbstractLogicalOperator aop = (AbstractLogicalOperator) arg;
+        if (aop.getOperatorTag() != LogicalOperatorTag.ORDER)
+            return Boolean.FALSE;
+        OrderOperator orderOpArg = (OrderOperator) copyAndSubstituteVar(op, arg);
+        boolean isomorphic = compareIOrderAndExpressions(op.getOrderExpressions(), orderOpArg.getOrderExpressions());
+        return isomorphic;
+    }
+
+    @Override
+    public Boolean visitAssignOperator(AssignOperator op, ILogicalOperator arg) throws AlgebricksException {
+        AbstractLogicalOperator aop = (AbstractLogicalOperator) arg;
+        if (aop.getOperatorTag() != LogicalOperatorTag.ASSIGN)
+            return Boolean.FALSE;
+        AssignOperator assignOpArg = (AssignOperator) copyAndSubstituteVar(op, arg);
+        boolean isomorphic = VariableUtilities.varListEqualUnordered(
+                getPairList(op.getVariables(), op.getExpressions()),
+                getPairList(assignOpArg.getVariables(), assignOpArg.getExpressions()));
+        return isomorphic;
+    }
+
+    @Override
+    public Boolean visitSelectOperator(SelectOperator op, ILogicalOperator arg) throws AlgebricksException {
+        AbstractLogicalOperator aop = (AbstractLogicalOperator) arg;
+        if (aop.getOperatorTag() != LogicalOperatorTag.SELECT)
+            return Boolean.FALSE;
+        SelectOperator selectOpArg = (SelectOperator) copyAndSubstituteVar(op, arg);
+        boolean isomorphic = op.getCondition().getValue().equals(selectOpArg.getCondition().getValue());
+        return isomorphic;
+    }
+
+    @Override
+    public Boolean visitProjectOperator(ProjectOperator op, ILogicalOperator arg) throws AlgebricksException {
+        AbstractLogicalOperator aop = (AbstractLogicalOperator) arg;
+        if (aop.getOperatorTag() != LogicalOperatorTag.PROJECT)
+            return Boolean.FALSE;
+        ProjectOperator projectOpArg = (ProjectOperator) copyAndSubstituteVar(op, arg);
+        boolean isomorphic = VariableUtilities.varListEqualUnordered(op.getVariables(), projectOpArg.getVariables());
+        return isomorphic;
+    }
+
+    @Override
+    public Boolean visitPartitioningSplitOperator(PartitioningSplitOperator op, ILogicalOperator arg)
+            throws AlgebricksException {
+        AbstractLogicalOperator aop = (AbstractLogicalOperator) arg;
+        if (aop.getOperatorTag() != LogicalOperatorTag.PARTITIONINGSPLIT)
+            return Boolean.FALSE;
+        PartitioningSplitOperator partitionOpArg = (PartitioningSplitOperator) copyAndSubstituteVar(op, arg);
+        boolean isomorphic = compareExpressions(op.getExpressions(), partitionOpArg.getExpressions());
+        return isomorphic;
+    }
+
+    @Override
+    public Boolean visitReplicateOperator(ReplicateOperator op, ILogicalOperator arg) throws AlgebricksException {
+        AbstractLogicalOperator aop = (AbstractLogicalOperator) arg;
+        if (aop.getOperatorTag() != LogicalOperatorTag.REPLICATE)
+            return Boolean.FALSE;
+        return Boolean.TRUE;
+    }
+
+    @Override
+    public Boolean visitScriptOperator(ScriptOperator op, ILogicalOperator arg) throws AlgebricksException {
+        AbstractLogicalOperator aop = (AbstractLogicalOperator) arg;
+        if (aop.getOperatorTag() != LogicalOperatorTag.SCRIPT)
+            return Boolean.FALSE;
+        ScriptOperator scriptOpArg = (ScriptOperator) copyAndSubstituteVar(op, arg);
+        boolean isomorphic = op.getScriptDescription().equals(scriptOpArg.getScriptDescription());
+        return isomorphic;
+    }
+
+    @Override
+    public Boolean visitSubplanOperator(SubplanOperator op, ILogicalOperator arg) throws AlgebricksException {
+        AbstractLogicalOperator aop = (AbstractLogicalOperator) arg;
+        if (aop.getOperatorTag() != LogicalOperatorTag.SUBPLAN)
+            return Boolean.FALSE;
+        SubplanOperator subplanOpArg = (SubplanOperator) copyAndSubstituteVar(op, arg);
+        List<ILogicalPlan> plans = op.getNestedPlans();
+        List<ILogicalPlan> plansArg = subplanOpArg.getNestedPlans();
+        for (int i = 0; i < plans.size(); i++) {
+            List<Mutable<ILogicalOperator>> roots = plans.get(i).getRoots();
+            List<Mutable<ILogicalOperator>> rootsArg = plansArg.get(i).getRoots();
+            if (roots.size() == rootsArg.size())
+                return Boolean.FALSE;
+            for (int j = 0; j < roots.size(); j++) {
+                ILogicalOperator topOp1 = roots.get(j).getValue();
+                ILogicalOperator topOp2 = rootsArg.get(j).getValue();
+                boolean isomorphic = this.checkBottomUp(topOp1, topOp2);
+                if (!isomorphic)
+                    return Boolean.FALSE;
+            }
+        }
+        return Boolean.TRUE;
+    }
+
+    @Override
+    public Boolean visitUnionOperator(UnionAllOperator op, ILogicalOperator arg) throws AlgebricksException {
+        AbstractLogicalOperator aop = (AbstractLogicalOperator) arg;
+        if (aop.getOperatorTag() != LogicalOperatorTag.UNIONALL)
+            return Boolean.FALSE;
+        UnionAllOperator unionOpArg = (UnionAllOperator) copyAndSubstituteVar(op, arg);
+        List<Triple<LogicalVariable, LogicalVariable, LogicalVariable>> mapping = op.getVariableMappings();
+        List<Triple<LogicalVariable, LogicalVariable, LogicalVariable>> mappingArg = unionOpArg.getVariableMappings();
+        if (mapping.size() != mappingArg.size())
+            return Boolean.FALSE;
+        return VariableUtilities.varListEqualUnordered(mapping, mappingArg);
+    }
+
+    @Override
+    public Boolean visitUnnestOperator(UnnestOperator op, ILogicalOperator arg) throws AlgebricksException {
+        AbstractLogicalOperator aop = (AbstractLogicalOperator) arg;
+        if (aop.getOperatorTag() != LogicalOperatorTag.UNNEST)
+            return Boolean.FALSE;
+        UnnestOperator unnestOpArg = (UnnestOperator) copyAndSubstituteVar(op, arg);
+        boolean isomorphic = VariableUtilities.varListEqualUnordered(op.getVariables(), unnestOpArg.getVariables())
+                && variableEqual(op.getPositionalVariable(), unnestOpArg.getPositionalVariable());
+        if (!isomorphic)
+            return Boolean.FALSE;
+        isomorphic = op.getExpressionRef().getValue().equals(unnestOpArg.getExpressionRef().getValue());
+        return isomorphic;
+    }
+
+    @Override
+    public Boolean visitUnnestMapOperator(UnnestMapOperator op, ILogicalOperator arg) throws AlgebricksException {
+        AbstractLogicalOperator aop = (AbstractLogicalOperator) arg;
+        if (aop.getOperatorTag() != LogicalOperatorTag.UNNEST_MAP)
+            return Boolean.FALSE;
+        UnnestMapOperator unnestOpArg = (UnnestMapOperator) copyAndSubstituteVar(op, arg);
+        boolean isomorphic = VariableUtilities.varListEqualUnordered(op.getVariables(), unnestOpArg.getVariables());
+        if (!isomorphic)
+            return Boolean.FALSE;
+        isomorphic = op.getExpressionRef().getValue().equals(unnestOpArg.getExpressionRef().getValue());
+        return isomorphic;
+    }
+
+    @Override
+    public Boolean visitDataScanOperator(DataSourceScanOperator op, ILogicalOperator arg) throws AlgebricksException {
+        AbstractLogicalOperator aop = (AbstractLogicalOperator) arg;
+        if (aop.getOperatorTag() != LogicalOperatorTag.DATASOURCESCAN)
+            return Boolean.FALSE;
+        DataSourceScanOperator argScan = (DataSourceScanOperator) arg;
+        if (!argScan.getDataSource().toString().equals(op.getDataSource().toString()))
+            return Boolean.FALSE;
+        DataSourceScanOperator scanOpArg = (DataSourceScanOperator) copyAndSubstituteVar(op, arg);
+        boolean isomorphic = VariableUtilities.varListEqualUnordered(op.getVariables(), scanOpArg.getVariables())
+                && op.getDataSource().toString().equals(scanOpArg.getDataSource().toString());
+        return isomorphic;
+    }
+
+    @Override
+    public Boolean visitDistinctOperator(DistinctOperator op, ILogicalOperator arg) throws AlgebricksException {
+        AbstractLogicalOperator aop = (AbstractLogicalOperator) arg;
+        if (aop.getOperatorTag() != LogicalOperatorTag.DISTINCT)
+            return Boolean.FALSE;
+        DistinctOperator distinctOpArg = (DistinctOperator) copyAndSubstituteVar(op, arg);
+        boolean isomorphic = compareExpressions(op.getExpressions(), distinctOpArg.getExpressions());
+        return isomorphic;
+    }
+
+    @Override
+    public Boolean visitExchangeOperator(ExchangeOperator op, ILogicalOperator arg) throws AlgebricksException {
+        AbstractLogicalOperator aop = (AbstractLogicalOperator) arg;
+        if (aop.getOperatorTag() != LogicalOperatorTag.EXCHANGE)
+            return Boolean.FALSE;
+        // require the same partition property
+        if (!(op.getPhysicalOperator().getOperatorTag() == aop.getPhysicalOperator().getOperatorTag()))
+            return Boolean.FALSE;
+        variableMapping.clear();
+        IsomorphismUtilities.mapVariablesTopDown(op, arg, variableMapping);
+        IPhysicalPropertiesVector properties = op.getPhysicalOperator().getDeliveredProperties();
+        IPhysicalPropertiesVector propertiesArg = aop.getPhysicalOperator().getDeliveredProperties();
+        if (properties == null && propertiesArg == null)
+            return Boolean.TRUE;
+        if (properties == null || propertiesArg == null)
+            return Boolean.FALSE;
+        IPartitioningProperty partProp = properties.getPartitioningProperty();
+        IPartitioningProperty partPropArg = propertiesArg.getPartitioningProperty();
+        if (!partProp.getPartitioningType().equals(partPropArg.getPartitioningType()))
+            return Boolean.FALSE;
+        List<LogicalVariable> columns = new ArrayList<LogicalVariable>();
+        partProp.getColumns(columns);
+        List<LogicalVariable> columnsArg = new ArrayList<LogicalVariable>();
+        partPropArg.getColumns(columnsArg);
+        if (columns.size() != columnsArg.size())
+            return Boolean.FALSE;
+        if (columns.size() == 0)
+            return Boolean.TRUE;
+        for (int i = 0; i < columnsArg.size(); i++) {
+            LogicalVariable rightVar = columnsArg.get(i);
+            LogicalVariable leftVar = variableMapping.get(rightVar);
+            if (leftVar != null)
+                columnsArg.set(i, leftVar);
+        }
+        return VariableUtilities.varListEqualUnordered(columns, columnsArg);
+    }
+
+    @Override
+    public Boolean visitWriteOperator(WriteOperator op, ILogicalOperator arg) throws AlgebricksException {
+        AbstractLogicalOperator aop = (AbstractLogicalOperator) arg;
+        if (aop.getOperatorTag() != LogicalOperatorTag.WRITE)
+            return Boolean.FALSE;
+        WriteOperator writeOpArg = (WriteOperator) copyAndSubstituteVar(op, arg);
+        boolean isomorphic = VariableUtilities.varListEqualUnordered(op.getSchema(), writeOpArg.getSchema());
+        return isomorphic;
+    }
+
+    @Override
+    public Boolean visitWriteResultOperator(WriteResultOperator op, ILogicalOperator arg) throws AlgebricksException {
+        AbstractLogicalOperator aop = (AbstractLogicalOperator) arg;
+        if (aop.getOperatorTag() != LogicalOperatorTag.WRITE_RESULT)
+            return Boolean.FALSE;
+        WriteResultOperator writeOpArg = (WriteResultOperator) copyAndSubstituteVar(op, arg);
+        boolean isomorphic = VariableUtilities.varListEqualUnordered(op.getSchema(), writeOpArg.getSchema());
+        if (!op.getDataSource().equals(writeOpArg.getDataSource()))
+            isomorphic = false;
+        if (!op.getPayloadExpression().equals(writeOpArg.getPayloadExpression()))
+            isomorphic = false;
+        return isomorphic;
+    }
+
+    @Override
+    public Boolean visitInsertDeleteOperator(InsertDeleteOperator op, ILogicalOperator arg) throws AlgebricksException {
+        AbstractLogicalOperator aop = (AbstractLogicalOperator) arg;
+        if (aop.getOperatorTag() != LogicalOperatorTag.INSERT_DELETE)
+            return Boolean.FALSE;
+        InsertDeleteOperator insertOpArg = (InsertDeleteOperator) copyAndSubstituteVar(op, arg);
+        boolean isomorphic = VariableUtilities.varListEqualUnordered(op.getSchema(), insertOpArg.getSchema());
+        if (!op.getDataSource().equals(insertOpArg.getDataSource()))
+            isomorphic = false;
+        if (!op.getPayloadExpression().equals(insertOpArg.getPayloadExpression()))
+            isomorphic = false;
+        return isomorphic;
+    }
+
+    @Override
+    public Boolean visitIndexInsertDeleteOperator(IndexInsertDeleteOperator op, ILogicalOperator arg)
+            throws AlgebricksException {
+        AbstractLogicalOperator aop = (AbstractLogicalOperator) arg;
+        if (aop.getOperatorTag() != LogicalOperatorTag.INDEX_INSERT_DELETE)
+            return Boolean.FALSE;
+        IndexInsertDeleteOperator insertOpArg = (IndexInsertDeleteOperator) copyAndSubstituteVar(op, arg);
+        boolean isomorphic = VariableUtilities.varListEqualUnordered(op.getSchema(), insertOpArg.getSchema());
+        if (!op.getDataSourceIndex().equals(insertOpArg.getDataSourceIndex()))
+            isomorphic = false;
+        return isomorphic;
+    }
+
+    @Override
+    public Boolean visitSinkOperator(SinkOperator op, ILogicalOperator arg) throws AlgebricksException {
+        return true;
+    }
+
+    private Boolean compareExpressions(List<Mutable<ILogicalExpression>> opExprs,
+            List<Mutable<ILogicalExpression>> argExprs) {
+        if (opExprs.size() != argExprs.size())
+            return Boolean.FALSE;
+        for (int i = 0; i < opExprs.size(); i++) {
+            boolean isomorphic = opExprs.get(i).getValue().equals(argExprs.get(i).getValue());
+            if (!isomorphic)
+                return Boolean.FALSE;
+        }
+        return Boolean.TRUE;
+    }
+
+    private Boolean compareIOrderAndExpressions(List<Pair<IOrder, Mutable<ILogicalExpression>>> opOrderExprs,
+            List<Pair<IOrder, Mutable<ILogicalExpression>>> argOrderExprs) {
+        if (opOrderExprs.size() != argOrderExprs.size())
+            return Boolean.FALSE;
+        for (int i = 0; i < opOrderExprs.size(); i++) {
+            boolean isomorphic = opOrderExprs.get(i).first.equals(argOrderExprs.get(i).first);
+            if (!isomorphic)
+                return Boolean.FALSE;
+            isomorphic = opOrderExprs.get(i).second.getValue().equals(argOrderExprs.get(i).second.getValue());
+            if (!isomorphic)
+                return Boolean.FALSE;
+        }
+        return Boolean.TRUE;
+    }
+
+    private Boolean checkBottomUp(ILogicalOperator op1, ILogicalOperator op2) throws AlgebricksException {
+        List<Mutable<ILogicalOperator>> inputs1 = op1.getInputs();
+        List<Mutable<ILogicalOperator>> inputs2 = op2.getInputs();
+        if (inputs1.size() != inputs2.size())
+            return Boolean.FALSE;
+        for (int i = 0; i < inputs1.size(); i++) {
+            ILogicalOperator input1 = inputs1.get(i).getValue();
+            ILogicalOperator input2 = inputs2.get(i).getValue();
+            boolean isomorphic = checkBottomUp(input1, input2);
+            if (!isomorphic)
+                return Boolean.FALSE;
+        }
+        return IsomorphismUtilities.isOperatorIsomorphic(op1, op2);
+    }
+
+    private ILogicalOperator copyAndSubstituteVar(ILogicalOperator op, ILogicalOperator argOp)
+            throws AlgebricksException {
+        ILogicalOperator newOp = IsomorphismOperatorVisitor.deepCopy(argOp);
+        variableMapping.clear();
+        IsomorphismUtilities.mapVariablesTopDown(op, argOp, variableMapping);
+
+        List<LogicalVariable> liveVars = new ArrayList<LogicalVariable>();
+        if (argOp.getInputs().size() > 0)
+            for (int i = 0; i < argOp.getInputs().size(); i++)
+                VariableUtilities.getLiveVariables(argOp.getInputs().get(i).getValue(), liveVars);
+        List<LogicalVariable> producedVars = new ArrayList<LogicalVariable>();
+        VariableUtilities.getProducedVariables(argOp, producedVars);
+        List<LogicalVariable> producedVarsNew = new ArrayList<LogicalVariable>();
+        VariableUtilities.getProducedVariables(op, producedVarsNew);
+
+        if (producedVars.size() != producedVarsNew.size())
+            return newOp;
+        for (Entry<LogicalVariable, LogicalVariable> map : variableMapping.entrySet()) {
+            if (liveVars.contains(map.getKey())) {
+                VariableUtilities.substituteVariables(newOp, map.getKey(), map.getValue(), null);
+            }
+        }
+        for (int i = 0; i < producedVars.size(); i++)
+            VariableUtilities.substituteVariables(newOp, producedVars.get(i), producedVarsNew.get(i), null);
+        return newOp;
+    }
+
+    public List<Pair<LogicalVariable, ILogicalExpression>> getPairList(List<LogicalVariable> vars,
+            List<Mutable<ILogicalExpression>> exprs) throws AlgebricksException {
+        List<Pair<LogicalVariable, ILogicalExpression>> list = new ArrayList<Pair<LogicalVariable, ILogicalExpression>>();
+        if (vars.size() != exprs.size())
+            throw new AlgebricksException("variable list size does not equal to expression list size ");
+        for (int i = 0; i < vars.size(); i++) {
+            list.add(new Pair<LogicalVariable, ILogicalExpression>(vars.get(i), exprs.get(i).getValue()));
+        }
+        return list;
+    }
+
+    private static ILogicalOperator deepCopy(ILogicalOperator op) throws AlgebricksException {
+        OperatorDeepCopyVisitor visitor = new OperatorDeepCopyVisitor();
+        return op.accept(visitor, null);
+    }
+
+    private static ILogicalPlan deepCopy(ILogicalPlan plan) throws AlgebricksException {
+        List<Mutable<ILogicalOperator>> roots = plan.getRoots();
+        List<Mutable<ILogicalOperator>> newRoots = new ArrayList<Mutable<ILogicalOperator>>();
+        for (Mutable<ILogicalOperator> opRef : roots)
+            newRoots.add(new MutableObject<ILogicalOperator>(bottomUpCopyOperators(opRef.getValue())));
+        return new ALogicalPlanImpl(newRoots);
+    }
+
+    private static ILogicalOperator bottomUpCopyOperators(ILogicalOperator op) throws AlgebricksException {
+        ILogicalOperator newOp = deepCopy(op);
+        newOp.getInputs().clear();
+        for (Mutable<ILogicalOperator> child : op.getInputs())
+            newOp.getInputs().add(new MutableObject<ILogicalOperator>(bottomUpCopyOperators(child.getValue())));
+        return newOp;
+    }
+
+    private static boolean variableEqual(LogicalVariable var, LogicalVariable varArg) {
+        if (var == null && varArg == null)
+            return true;
+        if (var.equals(varArg))
+            return true;
+        else
+            return false;
+    }
+
+    private static class OperatorDeepCopyVisitor implements ILogicalOperatorVisitor<ILogicalOperator, Void> {
+
+        @Override
+        public ILogicalOperator visitAggregateOperator(AggregateOperator op, Void arg) throws AlgebricksException {
+            ArrayList<LogicalVariable> newList = new ArrayList<LogicalVariable>();
+            ArrayList<Mutable<ILogicalExpression>> newExpressions = new ArrayList<Mutable<ILogicalExpression>>();
+            newList.addAll(op.getVariables());
+            deepCopyExpressionRefs(newExpressions, op.getExpressions());
+            return new AggregateOperator(newList, newExpressions);
+        }
+
+        @Override
+        public ILogicalOperator visitRunningAggregateOperator(RunningAggregateOperator op, Void arg)
+                throws AlgebricksException {
+            ArrayList<LogicalVariable> newList = new ArrayList<LogicalVariable>();
+            ArrayList<Mutable<ILogicalExpression>> newExpressions = new ArrayList<Mutable<ILogicalExpression>>();
+            newList.addAll(op.getVariables());
+            deepCopyExpressionRefs(newExpressions, op.getExpressions());
+            return new RunningAggregateOperator(newList, newExpressions);
+        }
+
+        @Override
+        public ILogicalOperator visitEmptyTupleSourceOperator(EmptyTupleSourceOperator op, Void arg)
+                throws AlgebricksException {
+            return new EmptyTupleSourceOperator();
+        }
+
+        @Override
+        public ILogicalOperator visitGroupByOperator(GroupByOperator op, Void arg) throws AlgebricksException {
+            List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> groupByList = new ArrayList<Pair<LogicalVariable, Mutable<ILogicalExpression>>>();
+            List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> decoList = new ArrayList<Pair<LogicalVariable, Mutable<ILogicalExpression>>>();
+            ArrayList<ILogicalPlan> newSubplans = new ArrayList<ILogicalPlan>();
+            for (Pair<LogicalVariable, Mutable<ILogicalExpression>> pair : op.getGroupByList())
+                groupByList.add(new Pair<LogicalVariable, Mutable<ILogicalExpression>>(pair.first,
+                        deepCopyExpressionRef(pair.second)));
+            for (Pair<LogicalVariable, Mutable<ILogicalExpression>> pair : op.getDecorList())
+                decoList.add(new Pair<LogicalVariable, Mutable<ILogicalExpression>>(pair.first,
+                        deepCopyExpressionRef(pair.second)));
+            for (ILogicalPlan plan : op.getNestedPlans()) {
+                newSubplans.add(IsomorphismOperatorVisitor.deepCopy(plan));
+            }
+            return new GroupByOperator(groupByList, decoList, newSubplans);
+        }
+
+        @Override
+        public ILogicalOperator visitLimitOperator(LimitOperator op, Void arg) throws AlgebricksException {
+            return new LimitOperator(deepCopyExpressionRef(op.getMaxObjects()).getValue(), deepCopyExpressionRef(
+                    op.getOffset()).getValue(), op.isTopmostLimitOp());
+        }
+
+        @Override
+        public ILogicalOperator visitDieOperator(DieOperator op, Void arg) throws AlgebricksException {
+            return new DieOperator(deepCopyExpressionRef(op.getAfterObjects()).getValue());
+        }
+
+        @Override
+        public ILogicalOperator visitInnerJoinOperator(InnerJoinOperator op, Void arg) throws AlgebricksException {
+            return new InnerJoinOperator(deepCopyExpressionRef(op.getCondition()), op.getInputs().get(0), op
+                    .getInputs().get(1));
+        }
+
+        @Override
+        public ILogicalOperator visitLeftOuterJoinOperator(LeftOuterJoinOperator op, Void arg)
+                throws AlgebricksException {
+            return new LeftOuterJoinOperator(deepCopyExpressionRef(op.getCondition()), op.getInputs().get(0), op
+                    .getInputs().get(1));
+        }
+
+        @Override
+        public ILogicalOperator visitNestedTupleSourceOperator(NestedTupleSourceOperator op, Void arg)
+                throws AlgebricksException {
+            return new NestedTupleSourceOperator(null);
+        }
+
+        @Override
+        public ILogicalOperator visitOrderOperator(OrderOperator op, Void arg) throws AlgebricksException {
+            return new OrderOperator(this.deepCopyOrderAndExpression(op.getOrderExpressions()));
+        }
+
+        @Override
+        public ILogicalOperator visitAssignOperator(AssignOperator op, Void arg) throws AlgebricksException {
+            ArrayList<LogicalVariable> newList = new ArrayList<LogicalVariable>();
+            ArrayList<Mutable<ILogicalExpression>> newExpressions = new ArrayList<Mutable<ILogicalExpression>>();
+            newList.addAll(op.getVariables());
+            deepCopyExpressionRefs(newExpressions, op.getExpressions());
+            return new AssignOperator(newList, newExpressions);
+        }
+
+        @Override
+        public ILogicalOperator visitSelectOperator(SelectOperator op, Void arg) throws AlgebricksException {
+            return new SelectOperator(deepCopyExpressionRef(op.getCondition()));
+        }
+
+        @Override
+        public ILogicalOperator visitProjectOperator(ProjectOperator op, Void arg) throws AlgebricksException {
+            ArrayList<LogicalVariable> newList = new ArrayList<LogicalVariable>();
+            newList.addAll(op.getVariables());
+            return new ProjectOperator(newList);
+        }
+
+        @Override
+        public ILogicalOperator visitPartitioningSplitOperator(PartitioningSplitOperator op, Void arg)
+                throws AlgebricksException {
+            ArrayList<Mutable<ILogicalExpression>> newExpressions = new ArrayList<Mutable<ILogicalExpression>>();
+            deepCopyExpressionRefs(newExpressions, op.getExpressions());
+            return new PartitioningSplitOperator(newExpressions, op.getDefaultBranchIndex());
+        }
+
+        @Override
+        public ILogicalOperator visitReplicateOperator(ReplicateOperator op, Void arg) throws AlgebricksException {
+            return new ReplicateOperator(op.getOutputArity());
+        }
+
+        @Override
+        public ILogicalOperator visitScriptOperator(ScriptOperator op, Void arg) throws AlgebricksException {
+            ArrayList<LogicalVariable> newInputList = new ArrayList<LogicalVariable>();
+            ArrayList<LogicalVariable> newOutputList = new ArrayList<LogicalVariable>();
+            newInputList.addAll(op.getInputVariables());
+            newOutputList.addAll(op.getOutputVariables());
+            return new ScriptOperator(op.getScriptDescription(), newInputList, newOutputList);
+        }
+
+        @Override
+        public ILogicalOperator visitSubplanOperator(SubplanOperator op, Void arg) throws AlgebricksException {
+            ArrayList<ILogicalPlan> newSubplans = new ArrayList<ILogicalPlan>();
+            for (ILogicalPlan plan : op.getNestedPlans()) {
+                newSubplans.add(IsomorphismOperatorVisitor.deepCopy(plan));
+            }
+            return new SubplanOperator(newSubplans);
+        }
+
+        @Override
+        public ILogicalOperator visitUnionOperator(UnionAllOperator op, Void arg) throws AlgebricksException {
+            List<Triple<LogicalVariable, LogicalVariable, LogicalVariable>> newVarMap = new ArrayList<Triple<LogicalVariable, LogicalVariable, LogicalVariable>>();
+            List<Triple<LogicalVariable, LogicalVariable, LogicalVariable>> varMap = op.getVariableMappings();
+            for (Triple<LogicalVariable, LogicalVariable, LogicalVariable> triple : varMap)
+                newVarMap.add(new Triple<LogicalVariable, LogicalVariable, LogicalVariable>(triple.first,
+                        triple.second, triple.third));
+            return new UnionAllOperator(newVarMap);
+        }
+
+        @Override
+        public ILogicalOperator visitUnnestOperator(UnnestOperator op, Void arg) throws AlgebricksException {
+            return new UnnestOperator(op.getVariable(), deepCopyExpressionRef(op.getExpressionRef()),
+                    op.getPositionalVariable(), op.getPositionalVariableType());
+        }
+
+        @Override
+        public ILogicalOperator visitUnnestMapOperator(UnnestMapOperator op, Void arg) throws AlgebricksException {
+            ArrayList<LogicalVariable> newInputList = new ArrayList<LogicalVariable>();
+            newInputList.addAll(op.getVariables());
+            return new UnnestMapOperator(newInputList, deepCopyExpressionRef(op.getExpressionRef()),
+                    new ArrayList<Object>(op.getVariableTypes()), op.propagatesInput());
+        }
+
+        @Override
+        public ILogicalOperator visitDataScanOperator(DataSourceScanOperator op, Void arg) throws AlgebricksException {
+            ArrayList<LogicalVariable> newInputList = new ArrayList<LogicalVariable>();
+            newInputList.addAll(op.getVariables());
+            return new DataSourceScanOperator(newInputList, op.getDataSource());
+        }
+
+        @Override
+        public ILogicalOperator visitDistinctOperator(DistinctOperator op, Void arg) throws AlgebricksException {
+            ArrayList<Mutable<ILogicalExpression>> newExpressions = new ArrayList<Mutable<ILogicalExpression>>();
+            deepCopyExpressionRefs(newExpressions, op.getExpressions());
+            return new DistinctOperator(newExpressions);
+        }
+
+        @Override
+        public ILogicalOperator visitExchangeOperator(ExchangeOperator op, Void arg) throws AlgebricksException {
+            return new ExchangeOperator();
+        }
+
+        @Override
+        public ILogicalOperator visitWriteOperator(WriteOperator op, Void arg) throws AlgebricksException {
+            ArrayList<Mutable<ILogicalExpression>> newExpressions = new ArrayList<Mutable<ILogicalExpression>>();
+            deepCopyExpressionRefs(newExpressions, op.getExpressions());
+            return new WriteOperator(newExpressions, op.getDataSink());
+        }
+
+        @Override
+        public ILogicalOperator visitWriteResultOperator(WriteResultOperator op, Void arg) throws AlgebricksException {
+            ArrayList<Mutable<ILogicalExpression>> newKeyExpressions = new ArrayList<Mutable<ILogicalExpression>>();
+            deepCopyExpressionRefs(newKeyExpressions, op.getKeyExpressions());
+            return new WriteResultOperator(op.getDataSource(), deepCopyExpressionRef(op.getPayloadExpression()),
+                    newKeyExpressions);
+        }
+
+        @Override
+        public ILogicalOperator visitInsertDeleteOperator(InsertDeleteOperator op, Void arg) throws AlgebricksException {
+            List<Mutable<ILogicalExpression>> newKeyExpressions = new ArrayList<Mutable<ILogicalExpression>>();
+            deepCopyExpressionRefs(newKeyExpressions, op.getPrimaryKeyExpressions());
+            return new InsertDeleteOperator(op.getDataSource(), deepCopyExpressionRef(op.getPayloadExpression()),
+                    newKeyExpressions, op.getOperation());
+        }
+
+        @Override
+        public ILogicalOperator visitIndexInsertDeleteOperator(IndexInsertDeleteOperator op, Void arg)
+                throws AlgebricksException {
+            List<Mutable<ILogicalExpression>> newPrimaryKeyExpressions = new ArrayList<Mutable<ILogicalExpression>>();
+            deepCopyExpressionRefs(newPrimaryKeyExpressions, op.getPrimaryKeyExpressions());
+            List<Mutable<ILogicalExpression>> newSecondaryKeyExpressions = new ArrayList<Mutable<ILogicalExpression>>();
+            deepCopyExpressionRefs(newSecondaryKeyExpressions, op.getSecondaryKeyExpressions());
+            Mutable<ILogicalExpression> newFilterExpression = new MutableObject<ILogicalExpression>(((AbstractLogicalExpression)op.getFilterExpression())
+                    .cloneExpression());
+            return new IndexInsertDeleteOperator(op.getDataSourceIndex(), newPrimaryKeyExpressions,
+                    newSecondaryKeyExpressions, newFilterExpression, op.getOperation());
+        }
+
+        @Override
+        public ILogicalOperator visitSinkOperator(SinkOperator op, Void arg) throws AlgebricksException {
+            return new SinkOperator();
+        }
+
+        private void deepCopyExpressionRefs(List<Mutable<ILogicalExpression>> newExprs,
+                List<Mutable<ILogicalExpression>> oldExprs) {
+            for (Mutable<ILogicalExpression> oldExpr : oldExprs)
+                newExprs.add(new MutableObject<ILogicalExpression>(((AbstractLogicalExpression) oldExpr.getValue())
+                        .cloneExpression()));
+        }
+
+        private Mutable<ILogicalExpression> deepCopyExpressionRef(Mutable<ILogicalExpression> oldExpr) {
+            return new MutableObject<ILogicalExpression>(
+                    ((AbstractLogicalExpression) oldExpr.getValue()).cloneExpression());
+        }
+
+        private List<Pair<IOrder, Mutable<ILogicalExpression>>> deepCopyOrderAndExpression(
+                List<Pair<IOrder, Mutable<ILogicalExpression>>> ordersAndExprs) {
+            List<Pair<IOrder, Mutable<ILogicalExpression>>> newOrdersAndExprs = new ArrayList<Pair<IOrder, Mutable<ILogicalExpression>>>();
+            for (Pair<IOrder, Mutable<ILogicalExpression>> pair : ordersAndExprs)
+                newOrdersAndExprs.add(new Pair<IOrder, Mutable<ILogicalExpression>>(pair.first,
+                        deepCopyExpressionRef(pair.second)));
+            return newOrdersAndExprs;
+        }
+
+        @Override
+        public ILogicalOperator visitExtensionOperator(ExtensionOperator op, Void arg) throws AlgebricksException {
+            return new ExtensionOperator(op.getNewInstanceOfDelegateOperator());
+        }
+    }
+
+}
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/IsomorphismUtilities.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/IsomorphismUtilities.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/IsomorphismUtilities.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/IsomorphismUtilities.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/IsomorphismVariableMappingVisitor.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/IsomorphismVariableMappingVisitor.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/IsomorphismVariableMappingVisitor.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/IsomorphismVariableMappingVisitor.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/LogicalPropertiesVisitor.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/LogicalPropertiesVisitor.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/LogicalPropertiesVisitor.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/LogicalPropertiesVisitor.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/ProducedVariableVisitor.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/ProducedVariableVisitor.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/ProducedVariableVisitor.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/ProducedVariableVisitor.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/SchemaVariableVisitor.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/SchemaVariableVisitor.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/SchemaVariableVisitor.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/SchemaVariableVisitor.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/SubstituteVariableVisitor.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/SubstituteVariableVisitor.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/SubstituteVariableVisitor.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/SubstituteVariableVisitor.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/UsedVariableVisitor.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/UsedVariableVisitor.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/UsedVariableVisitor.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/UsedVariableVisitor.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/VariableUtilities.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/VariableUtilities.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/VariableUtilities.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/VariableUtilities.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/AbstractExchangePOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/AbstractExchangePOperator.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/AbstractExchangePOperator.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/AbstractExchangePOperator.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/AbstractHashJoinPOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/AbstractHashJoinPOperator.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/AbstractHashJoinPOperator.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/AbstractHashJoinPOperator.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/AbstractJoinPOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/AbstractJoinPOperator.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/AbstractJoinPOperator.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/AbstractJoinPOperator.java
diff --git a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/AbstractPhysicalOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/AbstractPhysicalOperator.java
new file mode 100644
index 0000000..c5f4c71
--- /dev/null
+++ b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/AbstractPhysicalOperator.java
@@ -0,0 +1,137 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.algebricks.core.algebra.operators.physical;
+
+import java.util.Map;
+
+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksCountPartitionConstraint;
+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.NotImplementedException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IHyracksJobBuilder;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalPlan;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IPhysicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator.ExecutionMode;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractOperatorWithNestedPlans;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.IOperatorSchema;
+import edu.uci.ics.hyracks.algebricks.core.algebra.properties.IPartitioningRequirementsCoordinator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.properties.IPhysicalPropertiesVector;
+import edu.uci.ics.hyracks.algebricks.core.algebra.properties.PhysicalRequirements;
+import edu.uci.ics.hyracks.algebricks.core.algebra.properties.StructuralPropertiesVector;
+import edu.uci.ics.hyracks.algebricks.core.jobgen.impl.JobGenContext;
+import edu.uci.ics.hyracks.algebricks.core.jobgen.impl.PlanCompiler;
+import edu.uci.ics.hyracks.algebricks.runtime.base.AlgebricksPipeline;
+import edu.uci.ics.hyracks.algebricks.runtime.operators.meta.AlgebricksMetaOperatorDescriptor;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
+import edu.uci.ics.hyracks.api.dataflow.OperatorDescriptorId;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+
+public abstract class AbstractPhysicalOperator implements IPhysicalOperator {
+
+    protected IPhysicalPropertiesVector deliveredProperties;
+    private boolean disableJobGenBelow = false;
+    private Object hostQueryContext;
+
+    @Override
+    public final IPhysicalPropertiesVector getDeliveredProperties() {
+        return deliveredProperties;
+    }
+
+    @Override
+    public String toString() {
+        return getOperatorTag().toString();
+    }
+
+    public void setHostQueryContext(Object context) {
+        this.hostQueryContext = context;
+    }
+
+    public Object getHostQueryContext() {
+        return hostQueryContext;
+    }
+
+    protected PhysicalRequirements emptyUnaryRequirements() {
+        StructuralPropertiesVector[] req = new StructuralPropertiesVector[] { StructuralPropertiesVector.EMPTY_PROPERTIES_VECTOR };
+        return new PhysicalRequirements(req, IPartitioningRequirementsCoordinator.NO_COORDINATION);
+    }
+
+    @Override
+    public void disableJobGenBelowMe() {
+        this.disableJobGenBelow = true;
+    }
+
+    @Override
+    public boolean isJobGenDisabledBelowMe() {
+        return disableJobGenBelow;
+    }
+
+    protected void contributeOpDesc(IHyracksJobBuilder builder, AbstractLogicalOperator op, IOperatorDescriptor opDesc) {
+        if (op.getExecutionMode() == ExecutionMode.UNPARTITIONED) {
+            AlgebricksPartitionConstraint apc = new AlgebricksCountPartitionConstraint(1);
+            builder.contributeAlgebricksPartitionConstraint(opDesc, apc);
+        }
+        builder.contributeHyracksOperator(op, opDesc);
+    }
+
+    protected AlgebricksPipeline[] compileSubplans(IOperatorSchema outerPlanSchema,
+            AbstractOperatorWithNestedPlans npOp, IOperatorSchema opSchema, JobGenContext context)
+            throws AlgebricksException {
+        AlgebricksPipeline[] subplans = new AlgebricksPipeline[npOp.getNestedPlans().size()];
+        PlanCompiler pc = new PlanCompiler(context);
+        int i = 0;
+        for (ILogicalPlan p : npOp.getNestedPlans()) {
+            subplans[i++] = buildPipelineWithProjection(p, outerPlanSchema, npOp, opSchema, pc);
+        }
+        return subplans;
+    }
+
+    private AlgebricksPipeline buildPipelineWithProjection(ILogicalPlan p, IOperatorSchema outerPlanSchema,
+            AbstractOperatorWithNestedPlans npOp, IOperatorSchema opSchema, PlanCompiler pc) throws AlgebricksException {
+        if (p.getRoots().size() > 1) {
+            throw new NotImplementedException("Nested plans with several roots are not supported.");
+        }
+        JobSpecification nestedJob = pc.compilePlan(p, outerPlanSchema);
+        ILogicalOperator topOpInSubplan = p.getRoots().get(0).getValue();
+        JobGenContext context = pc.getContext();
+        IOperatorSchema topOpInSubplanScm = context.getSchema(topOpInSubplan);
+        opSchema.addAllVariables(topOpInSubplanScm);
+
+        Map<OperatorDescriptorId, IOperatorDescriptor> opMap = nestedJob.getOperatorMap();
+        if (opMap.size() != 1) {
+            throw new AlgebricksException(
+                    "Attempting to construct a nested plan with "
+                            + opMap.size()
+                            + " operator descriptors. Currently, nested plans can only consist in linear pipelines of Asterix micro operators.");
+        }
+
+        for (OperatorDescriptorId oid : opMap.keySet()) {
+            IOperatorDescriptor opd = opMap.get(oid);
+            if (!(opd instanceof AlgebricksMetaOperatorDescriptor)) {
+                throw new AlgebricksException(
+                        "Can only generate Hyracks jobs for pipelinable Asterix nested plans, not for "
+                                + opd.getClass().getName());
+            }
+            AlgebricksMetaOperatorDescriptor amod = (AlgebricksMetaOperatorDescriptor) opd;
+
+            return amod.getPipeline();
+            // we suppose that the top operator in the subplan already does the
+            // projection for us
+        }
+
+        throw new IllegalStateException();
+    }
+}
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/AbstractPreclusteredGroupByPOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/AbstractPreclusteredGroupByPOperator.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/AbstractPreclusteredGroupByPOperator.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/AbstractPreclusteredGroupByPOperator.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/AbstractPropagatePropertiesForUsedVariablesPOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/AbstractPropagatePropertiesForUsedVariablesPOperator.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/AbstractPropagatePropertiesForUsedVariablesPOperator.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/AbstractPropagatePropertiesForUsedVariablesPOperator.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/AbstractScanPOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/AbstractScanPOperator.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/AbstractScanPOperator.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/AbstractScanPOperator.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/AbstractStableSortPOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/AbstractStableSortPOperator.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/AbstractStableSortPOperator.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/AbstractStableSortPOperator.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/AggregatePOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/AggregatePOperator.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/AggregatePOperator.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/AggregatePOperator.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/AssignPOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/AssignPOperator.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/AssignPOperator.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/AssignPOperator.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/BroadcastPOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/BroadcastPOperator.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/BroadcastPOperator.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/BroadcastPOperator.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/DataSourceScanPOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/DataSourceScanPOperator.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/DataSourceScanPOperator.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/DataSourceScanPOperator.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/EmptyTupleSourcePOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/EmptyTupleSourcePOperator.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/EmptyTupleSourcePOperator.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/EmptyTupleSourcePOperator.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/ExternalGroupByPOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/ExternalGroupByPOperator.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/ExternalGroupByPOperator.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/ExternalGroupByPOperator.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/HashPartitionExchangePOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/HashPartitionExchangePOperator.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/HashPartitionExchangePOperator.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/HashPartitionExchangePOperator.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/HashPartitionMergeExchangePOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/HashPartitionMergeExchangePOperator.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/HashPartitionMergeExchangePOperator.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/HashPartitionMergeExchangePOperator.java
diff --git a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/HybridHashJoinPOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/HybridHashJoinPOperator.java
new file mode 100644
index 0000000..6da42b4
--- /dev/null
+++ b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/HybridHashJoinPOperator.java
@@ -0,0 +1,262 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.algebricks.core.algebra.operators.physical;
+
+import java.util.LinkedList;
+import java.util.List;
+
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.NotImplementedException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IHyracksJobBuilder;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.PhysicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractBinaryJoinOperator.JoinKind;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.IOperatorSchema;
+import edu.uci.ics.hyracks.algebricks.core.algebra.properties.ILocalStructuralProperty;
+import edu.uci.ics.hyracks.algebricks.core.jobgen.impl.JobGenContext;
+import edu.uci.ics.hyracks.algebricks.core.jobgen.impl.JobGenHelper;
+import edu.uci.ics.hyracks.algebricks.data.IBinaryComparatorFactoryProvider;
+import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFamily;
+import edu.uci.ics.hyracks.api.dataflow.value.INullWriterFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ITuplePairComparator;
+import edu.uci.ics.hyracks.api.dataflow.value.ITuplePairComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.job.IOperatorDescriptorRegistry;
+import edu.uci.ics.hyracks.dataflow.std.join.HybridHashJoinOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.join.OptimizedHybridHashJoinOperatorDescriptor;
+
+public class HybridHashJoinPOperator extends AbstractHashJoinPOperator {
+
+    private final int memSizeInFrames;
+    private final int maxInputBuildSizeInFrames;
+    private final int aveRecordsPerFrame;
+    private final double fudgeFactor;
+
+    public HybridHashJoinPOperator(JoinKind kind, JoinPartitioningType partitioningType,
+            List<LogicalVariable> sideLeftOfEqualities, List<LogicalVariable> sideRightOfEqualities,
+            int memSizeInFrames, int maxInputSize0InFrames, int aveRecordsPerFrame, double fudgeFactor) {
+        super(kind, partitioningType, sideLeftOfEqualities, sideRightOfEqualities);
+        this.memSizeInFrames = memSizeInFrames;
+        this.maxInputBuildSizeInFrames = maxInputSize0InFrames;
+        this.aveRecordsPerFrame = aveRecordsPerFrame;
+        this.fudgeFactor = fudgeFactor;
+    }
+
+    @Override
+    public PhysicalOperatorTag getOperatorTag() {
+        return PhysicalOperatorTag.HYBRID_HASH_JOIN;
+    }
+
+    @Override
+    public boolean isMicroOperator() {
+        return false;
+    }
+
+    public double getFudgeFactor() {
+        return fudgeFactor;
+    }
+
+    public int getMemSizeInFrames() {
+        return memSizeInFrames;
+    }
+
+    @Override
+    public String toString() {
+        return getOperatorTag().toString() + " " + keysLeftBranch + keysRightBranch;
+    }
+
+    @Override
+    public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op,
+            IOperatorSchema propagatedSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema)
+            throws AlgebricksException {
+        int[] keysLeft = JobGenHelper.variablesToFieldIndexes(keysLeftBranch, inputSchemas[0]);
+        int[] keysRight = JobGenHelper.variablesToFieldIndexes(keysRightBranch, inputSchemas[1]);
+        IVariableTypeEnvironment env = context.getTypeEnvironment(op);
+        IBinaryHashFunctionFactory[] hashFunFactories = JobGenHelper.variablesToBinaryHashFunctionFactories(
+                keysLeftBranch, env, context);
+        IBinaryHashFunctionFamily[] hashFunFamilies = JobGenHelper.variablesToBinaryHashFunctionFamilies(
+                keysLeftBranch, env, context);
+        IBinaryComparatorFactory[] comparatorFactories = new IBinaryComparatorFactory[keysLeft.length];
+        int i = 0;
+        IBinaryComparatorFactoryProvider bcfp = context.getBinaryComparatorFactoryProvider();
+        for (LogicalVariable v : keysLeftBranch) {
+            Object t = env.getVarType(v);
+            comparatorFactories[i++] = bcfp.getBinaryComparatorFactory(t, true);
+        }
+        RecordDescriptor recDescriptor = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op),
+                propagatedSchema, context);
+        IOperatorDescriptorRegistry spec = builder.getJobSpec();
+        IOperatorDescriptor opDesc = null;
+
+        boolean optimizedHashJoin = true;
+        for (IBinaryHashFunctionFamily family : hashFunFamilies) {
+            if (family == null) {
+                optimizedHashJoin = false;
+                break;
+            }
+        }
+
+        if (!optimizedHashJoin) {
+            try {
+                switch (kind) {
+                    case INNER: {
+                        opDesc = new HybridHashJoinOperatorDescriptor(spec, getMemSizeInFrames(),
+                                maxInputBuildSizeInFrames, aveRecordsPerFrame, getFudgeFactor(), keysLeft, keysRight,
+                                hashFunFactories, comparatorFactories, recDescriptor);
+                        break;
+                    }
+                    case LEFT_OUTER: {
+                        INullWriterFactory[] nullWriterFactories = new INullWriterFactory[inputSchemas[1].getSize()];
+                        for (int j = 0; j < nullWriterFactories.length; j++) {
+                            nullWriterFactories[j] = context.getNullWriterFactory();
+                        }
+                        opDesc = new HybridHashJoinOperatorDescriptor(spec, getMemSizeInFrames(),
+                                maxInputBuildSizeInFrames, aveRecordsPerFrame, getFudgeFactor(), keysLeft, keysRight,
+                                hashFunFactories, comparatorFactories, recDescriptor, true, nullWriterFactories);
+                        break;
+                    }
+                    default: {
+                        throw new NotImplementedException();
+                    }
+                }
+            } catch (HyracksDataException e) {
+                throw new AlgebricksException(e);
+            }
+        } else {
+            try {
+                switch (kind) {
+                    case INNER: {
+                        opDesc = new OptimizedHybridHashJoinOperatorDescriptor(spec, getMemSizeInFrames(),
+                                maxInputBuildSizeInFrames, getFudgeFactor(), keysLeft, keysRight, hashFunFamilies,
+                                comparatorFactories, recDescriptor, new JoinMultiComparatorFactory(comparatorFactories,
+                                        keysLeft, keysRight), new JoinMultiComparatorFactory(comparatorFactories,
+                                        keysRight, keysLeft));
+                        break;
+                    }
+                    case LEFT_OUTER: {
+                        INullWriterFactory[] nullWriterFactories = new INullWriterFactory[inputSchemas[1].getSize()];
+                        for (int j = 0; j < nullWriterFactories.length; j++) {
+                            nullWriterFactories[j] = context.getNullWriterFactory();
+                        }
+                        opDesc = new OptimizedHybridHashJoinOperatorDescriptor(spec, getMemSizeInFrames(),
+                                maxInputBuildSizeInFrames, getFudgeFactor(), keysLeft, keysRight, hashFunFamilies,
+                                comparatorFactories, recDescriptor, new JoinMultiComparatorFactory(comparatorFactories,
+                                        keysLeft, keysRight), new JoinMultiComparatorFactory(comparatorFactories,
+                                        keysRight, keysLeft), true, nullWriterFactories);
+                        break;
+                    }
+                    default: {
+                        throw new NotImplementedException();
+                    }
+                }
+            } catch (HyracksDataException e) {
+                throw new AlgebricksException(e);
+            }
+        }
+        contributeOpDesc(builder, (AbstractLogicalOperator) op, opDesc);
+
+        ILogicalOperator src1 = op.getInputs().get(0).getValue();
+        builder.contributeGraphEdge(src1, 0, op, 0);
+        ILogicalOperator src2 = op.getInputs().get(1).getValue();
+        builder.contributeGraphEdge(src2, 0, op, 1);
+    }
+
+    @Override
+    protected List<ILocalStructuralProperty> deliveredLocalProperties(ILogicalOperator op, IOptimizationContext context)
+            throws AlgebricksException {
+        return new LinkedList<ILocalStructuralProperty>();
+    }
+
+}
+
+/**
+ * {@ ITuplePairComparatorFactory} implementation for optimized hybrid hash join.
+ */
+class JoinMultiComparatorFactory implements ITuplePairComparatorFactory {
+    private static final long serialVersionUID = 1L;
+
+    private final IBinaryComparatorFactory[] binaryComparatorFactories;
+    private final int[] keysLeft;
+    private final int[] keysRight;
+
+    public JoinMultiComparatorFactory(IBinaryComparatorFactory[] binaryComparatorFactory, int[] keysLeft,
+            int[] keysRight) {
+        this.binaryComparatorFactories = binaryComparatorFactory;
+        this.keysLeft = keysLeft;
+        this.keysRight = keysRight;
+    }
+
+    @Override
+    public ITuplePairComparator createTuplePairComparator(IHyracksTaskContext ctx) {
+        IBinaryComparator[] binaryComparators = new IBinaryComparator[binaryComparatorFactories.length];
+        for (int i = 0; i < binaryComparators.length; i++) {
+            binaryComparators[i] = binaryComparatorFactories[i].createBinaryComparator();
+        }
+        return new JoinMultiComparator(binaryComparators, keysLeft, keysRight);
+    }
+}
+
+/**
+ * {@ ITuplePairComparator} implementation for optimized hybrid hash join.
+ * The comparator applies multiple binary comparators, one for each key pairs
+ */
+class JoinMultiComparator implements ITuplePairComparator {
+    private final IBinaryComparator[] binaryComparators;
+    private final int[] keysLeft;
+    private final int[] keysRight;
+
+    public JoinMultiComparator(IBinaryComparator[] bComparator, int[] keysLeft, int[] keysRight) {
+        this.binaryComparators = bComparator;
+        this.keysLeft = keysLeft;
+        this.keysRight = keysRight;
+    }
+
+    @Override
+    public int compare(IFrameTupleAccessor accessor0, int tIndex0, IFrameTupleAccessor accessor1, int tIndex1) {
+        int tStart0 = accessor0.getTupleStartOffset(tIndex0);
+        int fStartOffset0 = accessor0.getFieldSlotsLength() + tStart0;
+
+        int tStart1 = accessor1.getTupleStartOffset(tIndex1);
+        int fStartOffset1 = accessor1.getFieldSlotsLength() + tStart1;
+
+        for (int i = 0; i < binaryComparators.length; i++) {
+            int fStart0 = accessor0.getFieldStartOffset(tIndex0, keysLeft[i]);
+            int fEnd0 = accessor0.getFieldEndOffset(tIndex0, keysLeft[i]);
+            int fLen0 = fEnd0 - fStart0;
+
+            int fStart1 = accessor1.getFieldStartOffset(tIndex1, keysRight[i]);
+            int fEnd1 = accessor1.getFieldEndOffset(tIndex1, keysRight[i]);
+            int fLen1 = fEnd1 - fStart1;
+
+            int c = binaryComparators[i].compare(accessor0.getBuffer().array(), fStart0 + fStartOffset0, fLen0,
+                    accessor1.getBuffer().array(), fStart1 + fStartOffset1, fLen1);
+            if (c != 0) {
+                return c;
+            }
+        }
+        return 0;
+    }
+}
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/InMemoryHashJoinPOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/InMemoryHashJoinPOperator.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/InMemoryHashJoinPOperator.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/InMemoryHashJoinPOperator.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/InMemoryStableSortPOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/InMemoryStableSortPOperator.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/InMemoryStableSortPOperator.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/InMemoryStableSortPOperator.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/IndexInsertDeletePOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/IndexInsertDeletePOperator.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/IndexInsertDeletePOperator.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/IndexInsertDeletePOperator.java
diff --git a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/InsertDeletePOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/InsertDeletePOperator.java
new file mode 100644
index 0000000..477d257
--- /dev/null
+++ b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/InsertDeletePOperator.java
@@ -0,0 +1,96 @@
+package edu.uci.ics.hyracks.algebricks.core.algebra.operators.physical;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IHyracksJobBuilder;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.PhysicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IDataSource;
+import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IMetadataProvider;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.IOperatorSchema;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.InsertDeleteOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.InsertDeleteOperator.Kind;
+import edu.uci.ics.hyracks.algebricks.core.algebra.properties.IPartitioningRequirementsCoordinator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.properties.IPhysicalPropertiesVector;
+import edu.uci.ics.hyracks.algebricks.core.algebra.properties.PhysicalRequirements;
+import edu.uci.ics.hyracks.algebricks.core.algebra.properties.StructuralPropertiesVector;
+import edu.uci.ics.hyracks.algebricks.core.jobgen.impl.JobGenContext;
+import edu.uci.ics.hyracks.algebricks.core.jobgen.impl.JobGenHelper;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+
+@SuppressWarnings("rawtypes")
+public class InsertDeletePOperator extends AbstractPhysicalOperator {
+
+    private LogicalVariable payload;
+    private List<LogicalVariable> keys;
+    private IDataSource<?> dataSource;
+
+    public InsertDeletePOperator(LogicalVariable payload, List<LogicalVariable> keys, IDataSource dataSource) {
+        this.payload = payload;
+        this.keys = keys;
+        this.dataSource = dataSource;
+    }
+
+    @Override
+    public PhysicalOperatorTag getOperatorTag() {
+        return PhysicalOperatorTag.INSERT_DELETE;
+    }
+
+    @Override
+    public void computeDeliveredProperties(ILogicalOperator op, IOptimizationContext context) {
+        AbstractLogicalOperator op2 = (AbstractLogicalOperator) op.getInputs().get(0).getValue();
+        deliveredProperties = (StructuralPropertiesVector) op2.getDeliveredPhysicalProperties().clone();
+    }
+
+    @Override
+    public PhysicalRequirements getRequiredPropertiesForChildren(ILogicalOperator op,
+            IPhysicalPropertiesVector reqdByParent) {
+        List<LogicalVariable> scanVariables = new ArrayList<LogicalVariable>();
+        scanVariables.addAll(keys);
+        scanVariables.add(new LogicalVariable(-1));
+        IPhysicalPropertiesVector r = dataSource.getPropertiesProvider().computePropertiesVector(scanVariables);
+        IPhysicalPropertiesVector[] requirements = new IPhysicalPropertiesVector[1];
+        requirements[0] = r;
+        return new PhysicalRequirements(requirements, IPartitioningRequirementsCoordinator.NO_COORDINATION);
+    }
+
+    @SuppressWarnings("unchecked")
+    @Override
+    public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op,
+            IOperatorSchema propagatedSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema)
+            throws AlgebricksException {
+        InsertDeleteOperator insertDeleteOp = (InsertDeleteOperator) op;
+        IMetadataProvider mp = context.getMetadataProvider();
+        JobSpecification spec = builder.getJobSpec();
+        RecordDescriptor inputDesc = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op.getInputs().get(0).getValue()), inputSchemas[0],
+                context);
+
+        Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> runtimeAndConstraints = null;
+        if (insertDeleteOp.getOperation() == Kind.INSERT)
+            runtimeAndConstraints = mp.getInsertRuntime(dataSource, propagatedSchema, keys, payload, inputDesc,
+                    context, spec);
+        else
+            runtimeAndConstraints = mp.getDeleteRuntime(dataSource, propagatedSchema, keys, payload, inputDesc,
+                    context, spec);
+
+        builder.contributeHyracksOperator(insertDeleteOp, runtimeAndConstraints.first);
+        builder.contributeAlgebricksPartitionConstraint(runtimeAndConstraints.first, runtimeAndConstraints.second);
+        ILogicalOperator src = insertDeleteOp.getInputs().get(0).getValue();
+        builder.contributeGraphEdge(src, 0, insertDeleteOp, 0);
+    }
+
+    @Override
+    public boolean isMicroOperator() {
+        return false;
+    }
+
+}
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/MicroPreclusteredGroupByPOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/MicroPreclusteredGroupByPOperator.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/MicroPreclusteredGroupByPOperator.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/MicroPreclusteredGroupByPOperator.java
diff --git a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/NLJoinPOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/NLJoinPOperator.java
new file mode 100644
index 0000000..d153f90
--- /dev/null
+++ b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/NLJoinPOperator.java
@@ -0,0 +1,288 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.algebricks.core.algebra.operators.physical;
+
+import java.util.LinkedList;
+import java.util.List;
+
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.NotImplementedException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IHyracksJobBuilder;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.PhysicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IExpressionRuntimeProvider;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractBinaryJoinOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractBinaryJoinOperator.JoinKind;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.IOperatorSchema;
+import edu.uci.ics.hyracks.algebricks.core.algebra.properties.BroadcastPartitioningProperty;
+import edu.uci.ics.hyracks.algebricks.core.algebra.properties.ILocalStructuralProperty;
+import edu.uci.ics.hyracks.algebricks.core.algebra.properties.IPartitioningProperty;
+import edu.uci.ics.hyracks.algebricks.core.algebra.properties.IPartitioningRequirementsCoordinator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.properties.IPhysicalPropertiesVector;
+import edu.uci.ics.hyracks.algebricks.core.algebra.properties.PhysicalRequirements;
+import edu.uci.ics.hyracks.algebricks.core.algebra.properties.StructuralPropertiesVector;
+import edu.uci.ics.hyracks.algebricks.core.jobgen.impl.JobGenContext;
+import edu.uci.ics.hyracks.algebricks.core.jobgen.impl.JobGenHelper;
+import edu.uci.ics.hyracks.algebricks.data.IBinaryBooleanInspector;
+import edu.uci.ics.hyracks.algebricks.data.IBinaryBooleanInspectorFactory;
+import edu.uci.ics.hyracks.algebricks.runtime.base.IScalarEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory;
+import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
+import edu.uci.ics.hyracks.api.dataflow.value.INullWriterFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ITuplePairComparator;
+import edu.uci.ics.hyracks.api.dataflow.value.ITuplePairComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.job.IOperatorDescriptorRegistry;
+import edu.uci.ics.hyracks.data.std.api.IPointable;
+import edu.uci.ics.hyracks.data.std.primitive.VoidPointable;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.FrameTupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+import edu.uci.ics.hyracks.dataflow.std.join.NestedLoopJoinOperatorDescriptor;
+
+/**
+ * Left input is broadcast and preserves its local properties. Right input can
+ * be partitioned in any way.
+ */
+public class NLJoinPOperator extends AbstractJoinPOperator {
+
+    private final int memSize;
+
+    public NLJoinPOperator(JoinKind kind, JoinPartitioningType partitioningType, int memSize) {
+        super(kind, partitioningType);
+        this.memSize = memSize;
+    }
+
+    @Override
+    public PhysicalOperatorTag getOperatorTag() {
+        return PhysicalOperatorTag.NESTED_LOOP;
+    }
+
+    @Override
+    public boolean isMicroOperator() {
+        return false;
+    }
+
+    @Override
+    public void computeDeliveredProperties(ILogicalOperator iop, IOptimizationContext context) {
+        if (partitioningType != JoinPartitioningType.BROADCAST) {
+            throw new NotImplementedException(partitioningType + " nested loop joins are not implemented.");
+        }
+
+        IPartitioningProperty pp;
+
+        AbstractLogicalOperator op = (AbstractLogicalOperator) iop;
+
+        if (op.getExecutionMode() == AbstractLogicalOperator.ExecutionMode.PARTITIONED) {
+            AbstractLogicalOperator op2 = (AbstractLogicalOperator) op.getInputs().get(1).getValue();
+            IPhysicalPropertiesVector pv1 = op2.getPhysicalOperator().getDeliveredProperties();
+            if (pv1 == null) {
+                pp = null;
+            } else {
+                pp = pv1.getPartitioningProperty();
+            }
+        } else {
+            pp = IPartitioningProperty.UNPARTITIONED;
+        }
+
+        List<ILocalStructuralProperty> localProps = new LinkedList<ILocalStructuralProperty>();
+        this.deliveredProperties = new StructuralPropertiesVector(pp, localProps);
+    }
+
+    @Override
+    public PhysicalRequirements getRequiredPropertiesForChildren(ILogicalOperator op,
+            IPhysicalPropertiesVector reqdByParent) {
+        if (partitioningType != JoinPartitioningType.BROADCAST) {
+            throw new NotImplementedException(partitioningType + " nested loop joins are not implemented.");
+        }
+
+        StructuralPropertiesVector[] pv = new StructuralPropertiesVector[2];
+        pv[0] = new StructuralPropertiesVector(new BroadcastPartitioningProperty(null), null);
+        pv[1] = new StructuralPropertiesVector(null, null);
+        return new PhysicalRequirements(pv, IPartitioningRequirementsCoordinator.NO_COORDINATION);
+    }
+
+    @Override
+    public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op,
+            IOperatorSchema propagatedSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema)
+            throws AlgebricksException {
+        AbstractBinaryJoinOperator join = (AbstractBinaryJoinOperator) op;
+        RecordDescriptor recDescriptor = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op),
+                propagatedSchema, context);
+        IOperatorSchema[] conditionInputSchemas = new IOperatorSchema[1];
+        conditionInputSchemas[0] = propagatedSchema;
+        IExpressionRuntimeProvider expressionRuntimeProvider = context.getExpressionRuntimeProvider();
+        IScalarEvaluatorFactory cond = expressionRuntimeProvider.createEvaluatorFactory(join.getCondition().getValue(),
+                context.getTypeEnvironment(op), conditionInputSchemas, context);
+        ITuplePairComparatorFactory comparatorFactory = new TuplePairEvaluatorFactory(cond,
+                context.getBinaryBooleanInspectorFactory());
+        IOperatorDescriptorRegistry spec = builder.getJobSpec();
+        IOperatorDescriptor opDesc = null;
+
+        switch (kind) {
+            case INNER: {
+                opDesc = new NestedLoopJoinOperatorDescriptor(spec, comparatorFactory, recDescriptor, memSize, false,
+                        null);
+                break;
+            }
+            case LEFT_OUTER: {
+                INullWriterFactory[] nullWriterFactories = new INullWriterFactory[inputSchemas[1].getSize()];
+                for (int j = 0; j < nullWriterFactories.length; j++) {
+                    nullWriterFactories[j] = context.getNullWriterFactory();
+                }
+                opDesc = new NestedLoopJoinOperatorDescriptor(spec, comparatorFactory, recDescriptor, memSize, true,
+                        nullWriterFactories);
+                break;
+            }
+            default: {
+                throw new NotImplementedException();
+            }
+        }
+        contributeOpDesc(builder, (AbstractLogicalOperator) op, opDesc);
+
+        ILogicalOperator src1 = op.getInputs().get(0).getValue();
+        builder.contributeGraphEdge(src1, 0, op, 0);
+        ILogicalOperator src2 = op.getInputs().get(1).getValue();
+        builder.contributeGraphEdge(src2, 0, op, 1);
+    }
+
+    public static class TuplePairEvaluatorFactory implements ITuplePairComparatorFactory {
+
+        private static final long serialVersionUID = 1L;
+        private final IScalarEvaluatorFactory cond;
+        private final IBinaryBooleanInspectorFactory binaryBooleanInspectorFactory;
+
+        public TuplePairEvaluatorFactory(IScalarEvaluatorFactory cond,
+                IBinaryBooleanInspectorFactory binaryBooleanInspectorFactory) {
+            this.cond = cond;
+            this.binaryBooleanInspectorFactory = binaryBooleanInspectorFactory;
+        }
+
+        @Override
+        public synchronized ITuplePairComparator createTuplePairComparator(IHyracksTaskContext ctx) {
+            return new TuplePairEvaluator(ctx, cond, binaryBooleanInspectorFactory.createBinaryBooleanInspector(ctx));
+        }
+    }
+
+    public static class TuplePairEvaluator implements ITuplePairComparator {
+        private final IHyracksTaskContext ctx;
+        private IScalarEvaluator condEvaluator;
+        private final IScalarEvaluatorFactory condFactory;
+        private final IPointable p;
+        private final CompositeFrameTupleReference compositeTupleRef;
+        private final FrameTupleReference leftRef;
+        private final FrameTupleReference rightRef;
+        private final IBinaryBooleanInspector binaryBooleanInspector;
+
+        public TuplePairEvaluator(IHyracksTaskContext ctx, IScalarEvaluatorFactory condFactory,
+                IBinaryBooleanInspector binaryBooleanInspector) {
+            this.ctx = ctx;
+            this.condFactory = condFactory;
+            this.binaryBooleanInspector = binaryBooleanInspector;
+            this.leftRef = new FrameTupleReference();
+            this.p = VoidPointable.FACTORY.createPointable();
+            this.rightRef = new FrameTupleReference();
+            this.compositeTupleRef = new CompositeFrameTupleReference(leftRef, rightRef);
+        }
+
+        @Override
+        public int compare(IFrameTupleAccessor outerAccessor, int outerIndex, IFrameTupleAccessor innerAccessor,
+                int innerIndex) throws HyracksDataException {
+            if (condEvaluator == null) {
+                try {
+                    this.condEvaluator = condFactory.createScalarEvaluator(ctx);
+                } catch (AlgebricksException ae) {
+                    throw new HyracksDataException(ae);
+                }
+            }
+            compositeTupleRef.reset(outerAccessor, outerIndex, innerAccessor, innerIndex);
+            try {
+                condEvaluator.evaluate(compositeTupleRef, p);
+            } catch (AlgebricksException ae) {
+                throw new HyracksDataException(ae);
+            }
+            boolean result = binaryBooleanInspector
+                    .getBooleanValue(p.getByteArray(), p.getStartOffset(), p.getLength());
+            if (result)
+                return 0;
+            else
+                return 1;
+        }
+    }
+
+    public static class CompositeFrameTupleReference implements IFrameTupleReference {
+
+        private final FrameTupleReference refLeft;
+        private final FrameTupleReference refRight;
+
+        public CompositeFrameTupleReference(FrameTupleReference refLeft, FrameTupleReference refRight) {
+            this.refLeft = refLeft;
+            this.refRight = refRight;
+        }
+
+        public void reset(IFrameTupleAccessor outerAccessor, int outerIndex, IFrameTupleAccessor innerAccessor,
+                int innerIndex) {
+            refLeft.reset(outerAccessor, outerIndex);
+            refRight.reset(innerAccessor, innerIndex);
+        }
+
+        @Override
+        public int getFieldCount() {
+            return refLeft.getFieldCount() + refRight.getFieldCount();
+        }
+
+        @Override
+        public byte[] getFieldData(int fIdx) {
+            int leftFieldCount = refLeft.getFieldCount();
+            if (fIdx < leftFieldCount)
+                return refLeft.getFieldData(fIdx);
+            else
+                return refRight.getFieldData(fIdx - leftFieldCount);
+        }
+
+        @Override
+        public int getFieldStart(int fIdx) {
+            int leftFieldCount = refLeft.getFieldCount();
+            if (fIdx < leftFieldCount)
+                return refLeft.getFieldStart(fIdx);
+            else
+                return refRight.getFieldStart(fIdx - leftFieldCount);
+        }
+
+        @Override
+        public int getFieldLength(int fIdx) {
+            int leftFieldCount = refLeft.getFieldCount();
+            if (fIdx < leftFieldCount)
+                return refLeft.getFieldLength(fIdx);
+            else
+                return refRight.getFieldLength(fIdx - leftFieldCount);
+        }
+
+        @Override
+        public IFrameTupleAccessor getFrameTupleAccessor() {
+            throw new NotImplementedException();
+        }
+
+        @Override
+        public int getTupleIndex() {
+            throw new NotImplementedException();
+        }
+
+    }
+}
\ No newline at end of file
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/NestedTupleSourcePOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/NestedTupleSourcePOperator.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/NestedTupleSourcePOperator.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/NestedTupleSourcePOperator.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/OneToOneExchangePOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/OneToOneExchangePOperator.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/OneToOneExchangePOperator.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/OneToOneExchangePOperator.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/PreSortedDistinctByPOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/PreSortedDistinctByPOperator.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/PreSortedDistinctByPOperator.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/PreSortedDistinctByPOperator.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/PreclusteredGroupByPOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/PreclusteredGroupByPOperator.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/PreclusteredGroupByPOperator.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/PreclusteredGroupByPOperator.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/RandomMergeExchangePOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/RandomMergeExchangePOperator.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/RandomMergeExchangePOperator.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/RandomMergeExchangePOperator.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/RangePartitionPOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/RangePartitionPOperator.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/RangePartitionPOperator.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/RangePartitionPOperator.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/ReplicatePOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/ReplicatePOperator.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/ReplicatePOperator.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/ReplicatePOperator.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/RunningAggregatePOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/RunningAggregatePOperator.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/RunningAggregatePOperator.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/RunningAggregatePOperator.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/SinkPOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/SinkPOperator.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/SinkPOperator.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/SinkPOperator.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/SinkWritePOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/SinkWritePOperator.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/SinkWritePOperator.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/SinkWritePOperator.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/SortMergeExchangePOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/SortMergeExchangePOperator.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/SortMergeExchangePOperator.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/SortMergeExchangePOperator.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/StableSortPOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/StableSortPOperator.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/StableSortPOperator.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/StableSortPOperator.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/StreamDiePOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/StreamDiePOperator.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/StreamDiePOperator.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/StreamDiePOperator.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/StreamLimitPOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/StreamLimitPOperator.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/StreamLimitPOperator.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/StreamLimitPOperator.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/StreamProjectPOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/StreamProjectPOperator.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/StreamProjectPOperator.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/StreamProjectPOperator.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/StreamSelectPOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/StreamSelectPOperator.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/StreamSelectPOperator.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/StreamSelectPOperator.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/StringStreamingScriptPOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/StringStreamingScriptPOperator.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/StringStreamingScriptPOperator.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/StringStreamingScriptPOperator.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/SubplanPOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/SubplanPOperator.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/SubplanPOperator.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/SubplanPOperator.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/UnionAllPOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/UnionAllPOperator.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/UnionAllPOperator.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/UnionAllPOperator.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/UnnestPOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/UnnestPOperator.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/UnnestPOperator.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/UnnestPOperator.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/WriteResultPOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/WriteResultPOperator.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/WriteResultPOperator.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/WriteResultPOperator.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/plan/ALogicalPlanImpl.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/plan/ALogicalPlanImpl.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/plan/ALogicalPlanImpl.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/plan/ALogicalPlanImpl.java
diff --git a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/prettyprint/LogicalOperatorPrettyPrintVisitor.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/prettyprint/LogicalOperatorPrettyPrintVisitor.java
new file mode 100644
index 0000000..a94c78e
--- /dev/null
+++ b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/prettyprint/LogicalOperatorPrettyPrintVisitor.java
@@ -0,0 +1,356 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.algebricks.core.algebra.prettyprint;
+
+import java.util.List;
+
+import org.apache.commons.lang3.mutable.Mutable;
+
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
+import edu.uci.ics.hyracks.algebricks.common.utils.Triple;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalPlan;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractOperatorWithNestedPlans;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AggregateOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DieOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DistinctOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.EmptyTupleSourceOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ExchangeOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.GroupByOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.IndexInsertDeleteOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.InnerJoinOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.InsertDeleteOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.InsertDeleteOperator.Kind;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.LeftOuterJoinOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.LimitOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.NestedTupleSourceOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.OrderOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.PartitioningSplitOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ProjectOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ReplicateOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.RunningAggregateOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ScriptOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SinkOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ExtensionOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SubplanOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnionAllOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestMapOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.WriteOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.WriteResultOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.visitors.ILogicalOperatorVisitor;
+
+public class LogicalOperatorPrettyPrintVisitor implements ILogicalOperatorVisitor<String, Integer> {
+
+    public LogicalOperatorPrettyPrintVisitor() {
+    }
+
+    @Override
+    public String visitAggregateOperator(AggregateOperator op, Integer indent) {
+        StringBuilder buffer = new StringBuilder();
+        addIndent(buffer, indent).append("aggregate ").append(op.getVariables()).append(" <- ");
+        pprintExprList(op.getExpressions(), buffer);
+        return buffer.toString();
+    }
+
+    @Override
+    public String visitRunningAggregateOperator(RunningAggregateOperator op, Integer indent) {
+        StringBuilder buffer = new StringBuilder();
+        addIndent(buffer, indent).append("running-aggregate ").append(op.getVariables()).append(" <- ");
+        pprintExprList(op.getExpressions(), buffer);
+        return buffer.toString();
+    }
+
+    @Override
+    public String visitEmptyTupleSourceOperator(EmptyTupleSourceOperator op, Integer indent) {
+        StringBuilder buffer = new StringBuilder();
+        addIndent(buffer, indent).append("empty-tuple-source");
+        return buffer.toString();
+    }
+
+    @Override
+    public String visitGroupByOperator(GroupByOperator op, Integer indent) throws AlgebricksException {
+        StringBuilder buffer = new StringBuilder();
+        addIndent(buffer, indent).append("group by (").append(op.gByListToString()).append(") decor (")
+                .append(op.decorListToString()).append(") {");
+        printNestedPlans(op, indent, buffer);
+        return buffer.toString();
+    }
+
+    @Override
+    public String visitDistinctOperator(DistinctOperator op, Integer indent) {
+        StringBuilder buffer = new StringBuilder();
+        addIndent(buffer, indent).append("distinct " + "(");
+        pprintExprList(op.getExpressions(), buffer);
+        buffer.append(")");
+        return buffer.toString();
+    }
+
+    @Override
+    public String visitInnerJoinOperator(InnerJoinOperator op, Integer indent) {
+        StringBuilder buffer = new StringBuilder();
+        addIndent(buffer, indent).append("join (").append(op.getCondition().getValue()).append(")");
+        return buffer.toString();
+    }
+
+    @Override
+    public String visitLeftOuterJoinOperator(LeftOuterJoinOperator op, Integer indent) {
+        StringBuilder buffer = new StringBuilder();
+        addIndent(buffer, indent).append("left outer join (").append(op.getCondition().getValue()).append(")");
+        return buffer.toString();
+    }
+
+    @Override
+    public String visitNestedTupleSourceOperator(NestedTupleSourceOperator op, Integer indent) {
+        StringBuilder buffer = new StringBuilder();
+        addIndent(buffer, indent).append("nested tuple source");
+        return buffer.toString();
+    }
+
+    @Override
+    public String visitOrderOperator(OrderOperator op, Integer indent) {
+        StringBuilder buffer = new StringBuilder();
+        addIndent(buffer, indent).append("order ");
+        for (Pair<OrderOperator.IOrder, Mutable<ILogicalExpression>> p : op.getOrderExpressions()) {
+            String fst;
+            switch (p.first.getKind()) {
+                case ASC: {
+                    fst = "ASC";
+                    break;
+                }
+                case DESC: {
+                    fst = "DESC";
+                    break;
+                }
+                default: {
+                    fst = p.first.getExpressionRef().toString();
+                }
+            }
+            buffer.append("(" + fst + ", " + p.second.getValue() + ") ");
+        }
+        return buffer.toString();
+    }
+
+    @Override
+    public String visitAssignOperator(AssignOperator op, Integer indent) {
+        StringBuilder buffer = new StringBuilder();
+        addIndent(buffer, indent).append("assign ").append(op.getVariables()).append(" <- ");
+        pprintExprList(op.getExpressions(), buffer);
+        return buffer.toString();
+    }
+
+    @Override
+    public String visitWriteOperator(WriteOperator op, Integer indent) {
+        StringBuilder buffer = new StringBuilder();
+        addIndent(buffer, indent).append("write ").append(op.getExpressions());
+        return buffer.toString();
+    }
+
+    @Override
+    public String visitWriteResultOperator(WriteResultOperator op, Integer indent) {
+        StringBuilder buffer = new StringBuilder();
+        addIndent(buffer, indent).append("load ").append(op.getDataSource()).append(" from ")
+                .append(op.getPayloadExpression()).append(" partitioned by ").append(op.getKeyExpressions().toString());
+        return buffer.toString();
+    }
+
+    @Override
+    public String visitSelectOperator(SelectOperator op, Integer indent) {
+        StringBuilder buffer = new StringBuilder();
+        addIndent(buffer, indent).append("select " + "(" + op.getCondition().getValue() + ")");
+        return buffer.toString();
+    }
+
+    @Override
+    public String visitProjectOperator(ProjectOperator op, Integer indent) {
+        StringBuilder buffer = new StringBuilder();
+        addIndent(buffer, indent).append("project " + "(" + op.getVariables() + ")");
+        return buffer.toString();
+    }
+
+    @Override
+    public String visitPartitioningSplitOperator(PartitioningSplitOperator op, Integer indent) {
+        StringBuilder buffer = new StringBuilder();
+        addIndent(buffer, indent).append("partitioning-split (" + op.getExpressions() + ")");
+        return buffer.toString();
+    }
+
+    @Override
+    public String visitSubplanOperator(SubplanOperator op, Integer indent) throws AlgebricksException {
+        StringBuilder buffer = new StringBuilder();
+        addIndent(buffer, indent).append("subplan {");
+        printNestedPlans(op, indent, buffer);
+        return buffer.toString();
+    }
+
+    @Override
+    public String visitUnionOperator(UnionAllOperator op, Integer indent) {
+        StringBuilder buffer = new StringBuilder();
+        addIndent(buffer, indent).append("union");
+        for (Triple<LogicalVariable, LogicalVariable, LogicalVariable> v : op.getVariableMappings()) {
+            buffer.append(" (" + v.first + ", " + v.second + ", " + v.third + ")");
+        }
+        return buffer.toString();
+    }
+
+    @Override
+    public String visitUnnestOperator(UnnestOperator op, Integer indent) {
+        StringBuilder buffer = new StringBuilder();
+        addIndent(buffer, indent).append("unnest " + op.getVariable());
+        if (op.getPositionalVariable() != null) {
+            buffer.append(" at " + op.getPositionalVariable());
+        }
+        buffer.append(" <- " + op.getExpressionRef().getValue());
+        return buffer.toString();
+    }
+
+    @Override
+    public String visitUnnestMapOperator(UnnestMapOperator op, Integer indent) {
+        StringBuilder buffer = new StringBuilder();
+        addIndent(buffer, indent).append("unnest-map " + op.getVariables() + " <- " + op.getExpressionRef().getValue());
+        return buffer.toString();
+    }
+
+    @Override
+    public String visitDataScanOperator(DataSourceScanOperator op, Integer indent) {
+        StringBuilder buffer = new StringBuilder();
+        addIndent(buffer, indent).append(
+                "data-scan " + op.getProjectVariables() + "<-" + op.getVariables() + " <- " + op.getDataSource());
+        return buffer.toString();
+    }
+
+    @Override
+    public String visitLimitOperator(LimitOperator op, Integer indent) {
+        StringBuilder buffer = new StringBuilder();
+        addIndent(buffer, indent).append("limit " + op.getMaxObjects().getValue());
+        ILogicalExpression offset = op.getOffset().getValue();
+        if (offset != null) {
+            buffer.append(", " + offset);
+        }
+        return buffer.toString();
+    }
+
+    @Override
+    public String visitDieOperator(DieOperator op, Integer indent) {
+        StringBuilder buffer = new StringBuilder();
+        addIndent(buffer, indent).append("die after " + op.getAfterObjects().getValue());
+        return buffer.toString();
+    }
+
+    @Override
+    public String visitExchangeOperator(ExchangeOperator op, Integer indent) {
+        StringBuilder buffer = new StringBuilder();
+        addIndent(buffer, indent).append("exchange ");
+        return buffer.toString();
+    }
+
+    protected static final StringBuilder addIndent(StringBuilder buffer, int level) {
+        for (int i = 0; i < level; ++i) {
+            buffer.append(' ');
+        }
+        return buffer;
+    }
+
+    private void printNestedPlans(AbstractOperatorWithNestedPlans op, Integer indent, StringBuilder buffer)
+            throws AlgebricksException {
+        boolean first = true;
+        if (op.getNestedPlans().isEmpty()) {
+            buffer.append("}");
+        } else {
+            for (ILogicalPlan p : op.getNestedPlans()) {
+                // PrettyPrintUtil.indent(buffer, level + 10).append("var " +
+                // p.first + ":\n");
+                buffer.append("\n");
+                if (first) {
+                    first = false;
+                } else {
+                    addIndent(buffer, indent).append("       {\n");
+                }
+                PlanPrettyPrinter.printPlan(p, buffer, this, indent + 10);
+                addIndent(buffer, indent).append("       }");
+            }
+        }
+    }
+
+    @Override
+    public String visitScriptOperator(ScriptOperator op, Integer indent) {
+        StringBuilder buffer = new StringBuilder();
+        addIndent(buffer, indent).append(
+                "script (in: " + op.getInputVariables() + ") (out: " + op.getOutputVariables() + ")");
+        return buffer.toString();
+    }
+
+    private void pprintExprList(List<Mutable<ILogicalExpression>> expressions, StringBuilder buffer) {
+        buffer.append("[");
+        boolean first = true;
+        for (Mutable<ILogicalExpression> exprRef : expressions) {
+            if (first) {
+                first = false;
+            } else {
+                buffer.append(", ");
+            }
+            buffer.append(exprRef.getValue());
+        }
+        buffer.append("]");
+    }
+
+    @Override
+    public String visitReplicateOperator(ReplicateOperator op, Integer indent) throws AlgebricksException {
+        StringBuilder buffer = new StringBuilder();
+        addIndent(buffer, indent).append("replicate ");
+        return buffer.toString();
+    }
+
+    @Override
+    public String visitInsertDeleteOperator(InsertDeleteOperator op, Integer indent) throws AlgebricksException {
+        StringBuilder buffer = new StringBuilder();
+        String header = op.getOperation() == Kind.INSERT ? "insert into " : "delete from ";
+        addIndent(buffer, indent).append(header).append(op.getDataSource()).append(" from ")
+                .append(op.getPayloadExpression()).append(" partitioned by ")
+                .append(op.getPrimaryKeyExpressions().toString());
+        return buffer.toString();
+    }
+
+    @Override
+    public String visitIndexInsertDeleteOperator(IndexInsertDeleteOperator op, Integer indent)
+            throws AlgebricksException {
+        StringBuilder buffer = new StringBuilder();
+        String header = op.getOperation() == Kind.INSERT ? "insert into " : "delete from ";
+        addIndent(buffer, indent).append(header).append(op.getDataSourceIndex()).append(" from ")
+                .append(op.getSecondaryKeyExpressions().toString()).append(" ")
+                .append(op.getPrimaryKeyExpressions().toString());
+        return buffer.toString();
+    }
+
+    @Override
+    public String visitSinkOperator(SinkOperator op, Integer indent) throws AlgebricksException {
+        StringBuilder buffer = new StringBuilder();
+        addIndent(buffer, indent).append("sink");
+        return buffer.toString();
+    }
+
+    @Override
+    public String visitExtensionOperator(ExtensionOperator op, Integer indent) throws AlgebricksException {
+        StringBuilder buffer = new StringBuilder();
+        addIndent(buffer, indent).append("statistics collection");
+        return buffer.toString();
+    }
+
+}
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/prettyprint/PlanPrettyPrinter.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/prettyprint/PlanPrettyPrinter.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/prettyprint/PlanPrettyPrinter.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/prettyprint/PlanPrettyPrinter.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/AbstractGroupingProperty.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/AbstractGroupingProperty.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/AbstractGroupingProperty.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/AbstractGroupingProperty.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/AsterixNodeGroupDomain.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/AsterixNodeGroupDomain.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/AsterixNodeGroupDomain.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/AsterixNodeGroupDomain.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/BroadcastPartitioningProperty.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/BroadcastPartitioningProperty.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/BroadcastPartitioningProperty.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/BroadcastPartitioningProperty.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/FileSplitDomain.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/FileSplitDomain.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/FileSplitDomain.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/FileSplitDomain.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/FilteredVariablePropagationPolicy.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/FilteredVariablePropagationPolicy.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/FilteredVariablePropagationPolicy.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/FilteredVariablePropagationPolicy.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/FunctionalDependency.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/FunctionalDependency.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/FunctionalDependency.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/FunctionalDependency.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/ILocalStructuralProperty.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/ILocalStructuralProperty.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/ILocalStructuralProperty.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/ILocalStructuralProperty.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/ILogicalPropertiesVector.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/ILogicalPropertiesVector.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/ILogicalPropertiesVector.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/ILogicalPropertiesVector.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/INodeDomain.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/INodeDomain.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/INodeDomain.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/INodeDomain.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/IPartitioningProperty.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/IPartitioningProperty.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/IPartitioningProperty.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/IPartitioningProperty.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/IPartitioningRequirementsCoordinator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/IPartitioningRequirementsCoordinator.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/IPartitioningRequirementsCoordinator.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/IPartitioningRequirementsCoordinator.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/IPhysicalPropertiesVector.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/IPhysicalPropertiesVector.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/IPhysicalPropertiesVector.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/IPhysicalPropertiesVector.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/IPropertiesComputer.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/IPropertiesComputer.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/IPropertiesComputer.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/IPropertiesComputer.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/IStructuralProperty.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/IStructuralProperty.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/IStructuralProperty.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/IStructuralProperty.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/LocalGroupingProperty.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/LocalGroupingProperty.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/LocalGroupingProperty.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/LocalGroupingProperty.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/LocalOrderProperty.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/LocalOrderProperty.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/LocalOrderProperty.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/LocalOrderProperty.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/LogicalPropertiesVectorImpl.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/LogicalPropertiesVectorImpl.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/LogicalPropertiesVectorImpl.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/LogicalPropertiesVectorImpl.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/OrderColumn.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/OrderColumn.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/OrderColumn.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/OrderColumn.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/OrderedPartitionedProperty.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/OrderedPartitionedProperty.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/OrderedPartitionedProperty.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/OrderedPartitionedProperty.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/PhysicalRequirements.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/PhysicalRequirements.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/PhysicalRequirements.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/PhysicalRequirements.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/PropertiesUtil.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/PropertiesUtil.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/PropertiesUtil.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/PropertiesUtil.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/RandomPartitioningProperty.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/RandomPartitioningProperty.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/RandomPartitioningProperty.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/RandomPartitioningProperty.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/StructuralPropertiesVector.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/StructuralPropertiesVector.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/StructuralPropertiesVector.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/StructuralPropertiesVector.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/TypePropagationPolicy.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/TypePropagationPolicy.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/TypePropagationPolicy.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/TypePropagationPolicy.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/UnorderedPartitionedProperty.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/UnorderedPartitionedProperty.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/UnorderedPartitionedProperty.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/UnorderedPartitionedProperty.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/UnpartitionedPropertyComputer.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/UnpartitionedPropertyComputer.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/UnpartitionedPropertyComputer.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/UnpartitionedPropertyComputer.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/VariablePropagationPolicy.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/VariablePropagationPolicy.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/VariablePropagationPolicy.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/VariablePropagationPolicy.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/scripting/IScriptDescription.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/scripting/IScriptDescription.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/scripting/IScriptDescription.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/scripting/IScriptDescription.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/scripting/StringStreamingScriptDescription.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/scripting/StringStreamingScriptDescription.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/scripting/StringStreamingScriptDescription.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/scripting/StringStreamingScriptDescription.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/typing/AbstractTypeEnvironment.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/typing/AbstractTypeEnvironment.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/typing/AbstractTypeEnvironment.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/typing/AbstractTypeEnvironment.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/typing/ITypeEnvPointer.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/typing/ITypeEnvPointer.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/typing/ITypeEnvPointer.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/typing/ITypeEnvPointer.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/typing/ITypingContext.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/typing/ITypingContext.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/typing/ITypingContext.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/typing/ITypingContext.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/typing/NonPropagatingTypeEnvironment.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/typing/NonPropagatingTypeEnvironment.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/typing/NonPropagatingTypeEnvironment.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/typing/NonPropagatingTypeEnvironment.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/typing/OpRefTypeEnvPointer.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/typing/OpRefTypeEnvPointer.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/typing/OpRefTypeEnvPointer.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/typing/OpRefTypeEnvPointer.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/typing/PropagateOperatorInputsTypeEnvironment.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/typing/PropagateOperatorInputsTypeEnvironment.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/typing/PropagateOperatorInputsTypeEnvironment.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/typing/PropagateOperatorInputsTypeEnvironment.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/typing/PropagatingTypeEnvironment.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/typing/PropagatingTypeEnvironment.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/typing/PropagatingTypeEnvironment.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/typing/PropagatingTypeEnvironment.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/util/OperatorManipulationUtil.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/util/OperatorManipulationUtil.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/util/OperatorManipulationUtil.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/util/OperatorManipulationUtil.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/util/OperatorPropertiesUtil.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/util/OperatorPropertiesUtil.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/util/OperatorPropertiesUtil.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/util/OperatorPropertiesUtil.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/visitors/AbstractConstVarFunVisitor.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/visitors/AbstractConstVarFunVisitor.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/visitors/AbstractConstVarFunVisitor.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/visitors/AbstractConstVarFunVisitor.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/visitors/ILogicalExpressionReferenceTransform.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/visitors/ILogicalExpressionReferenceTransform.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/visitors/ILogicalExpressionReferenceTransform.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/visitors/ILogicalExpressionReferenceTransform.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/visitors/ILogicalExpressionVisitor.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/visitors/ILogicalExpressionVisitor.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/visitors/ILogicalExpressionVisitor.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/visitors/ILogicalExpressionVisitor.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/visitors/ILogicalOperatorVisitor.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/visitors/ILogicalOperatorVisitor.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/visitors/ILogicalOperatorVisitor.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/visitors/ILogicalOperatorVisitor.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/config/AlgebricksConfig.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/config/AlgebricksConfig.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/config/AlgebricksConfig.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/config/AlgebricksConfig.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/config/SysoutFormatter.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/config/SysoutFormatter.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/config/SysoutFormatter.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/config/SysoutFormatter.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/jobgen/impl/ConnectorPolicyAssignmentPolicy.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/jobgen/impl/ConnectorPolicyAssignmentPolicy.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/jobgen/impl/ConnectorPolicyAssignmentPolicy.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/jobgen/impl/ConnectorPolicyAssignmentPolicy.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/jobgen/impl/JobBuilder.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/jobgen/impl/JobBuilder.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/jobgen/impl/JobBuilder.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/jobgen/impl/JobBuilder.java
diff --git a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/jobgen/impl/JobGenContext.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/jobgen/impl/JobGenContext.java
new file mode 100644
index 0000000..365d1a5
--- /dev/null
+++ b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/jobgen/impl/JobGenContext.java
@@ -0,0 +1,209 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.algebricks.core.jobgen.impl;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IExpressionEvalSizeComputer;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IExpressionRuntimeProvider;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IExpressionTypeComputer;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.INullableTypeComputer;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IPartialAggregationTypeComputer;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
+import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IMetadataProvider;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.IOperatorSchema;
+import edu.uci.ics.hyracks.algebricks.core.algebra.typing.ITypingContext;
+import edu.uci.ics.hyracks.algebricks.data.IBinaryBooleanInspectorFactory;
+import edu.uci.ics.hyracks.algebricks.data.IBinaryComparatorFactoryProvider;
+import edu.uci.ics.hyracks.algebricks.data.IBinaryHashFunctionFactoryProvider;
+import edu.uci.ics.hyracks.algebricks.data.IBinaryHashFunctionFamilyProvider;
+import edu.uci.ics.hyracks.algebricks.data.IBinaryIntegerInspectorFactory;
+import edu.uci.ics.hyracks.algebricks.data.INormalizedKeyComputerFactoryProvider;
+import edu.uci.ics.hyracks.algebricks.data.IPrinterFactoryProvider;
+import edu.uci.ics.hyracks.algebricks.data.ISerializerDeserializerProvider;
+import edu.uci.ics.hyracks.algebricks.data.ITypeTraitProvider;
+import edu.uci.ics.hyracks.api.dataflow.value.INullWriterFactory;
+
+public class JobGenContext {
+	private final IOperatorSchema outerFlowSchema;
+	private final Map<ILogicalOperator, IOperatorSchema> schemaMap = new HashMap<ILogicalOperator, IOperatorSchema>();
+	private final ISerializerDeserializerProvider serializerDeserializerProvider;
+	private final IBinaryHashFunctionFactoryProvider hashFunctionFactoryProvider;
+	private final IBinaryHashFunctionFamilyProvider hashFunctionFamilyProvider;
+	private final IBinaryComparatorFactoryProvider comparatorFactoryProvider;
+	private final IPrinterFactoryProvider printerFactoryProvider;
+	private final ITypeTraitProvider typeTraitProvider;
+	private final IMetadataProvider<?, ?> metadataProvider;
+	private final INullWriterFactory nullWriterFactory;
+	private final INormalizedKeyComputerFactoryProvider normalizedKeyComputerFactoryProvider;
+	private final Object appContext;
+	private final IBinaryBooleanInspectorFactory booleanInspectorFactory;
+	private final IBinaryIntegerInspectorFactory integerInspectorFactory;
+	private final IExpressionRuntimeProvider expressionRuntimeProvider;
+	private final IExpressionTypeComputer expressionTypeComputer;
+	private final IExpressionEvalSizeComputer expressionEvalSizeComputer;
+	private final IPartialAggregationTypeComputer partialAggregationTypeComputer;
+	private final int frameSize;
+	private AlgebricksPartitionConstraint clusterLocations;
+	private int varCounter;
+	private final ITypingContext typingContext;
+
+	public JobGenContext(
+			IOperatorSchema outerFlowSchema,
+			IMetadataProvider<?, ?> metadataProvider,
+			Object appContext,
+			ISerializerDeserializerProvider serializerDeserializerProvider,
+			IBinaryHashFunctionFactoryProvider hashFunctionFactoryProvider,
+			IBinaryHashFunctionFamilyProvider hashFunctionFamilyProvider,
+			IBinaryComparatorFactoryProvider comparatorFactoryProvider,
+			ITypeTraitProvider typeTraitProvider,
+			IBinaryBooleanInspectorFactory booleanInspectorFactory,
+			IBinaryIntegerInspectorFactory integerInspectorFactory,
+			IPrinterFactoryProvider printerFactoryProvider,
+			INullWriterFactory nullWriterFactory,
+			INormalizedKeyComputerFactoryProvider normalizedKeyComputerFactoryProvider,
+			IExpressionRuntimeProvider expressionRuntimeProvider,
+			IExpressionTypeComputer expressionTypeComputer,
+			INullableTypeComputer nullableTypeComputer,
+			ITypingContext typingContext,
+			IExpressionEvalSizeComputer expressionEvalSizeComputer,
+			IPartialAggregationTypeComputer partialAggregationTypeComputer,
+			int frameSize, AlgebricksPartitionConstraint clusterLocations) {
+		this.outerFlowSchema = outerFlowSchema;
+		this.metadataProvider = metadataProvider;
+		this.appContext = appContext;
+		this.serializerDeserializerProvider = serializerDeserializerProvider;
+		this.hashFunctionFactoryProvider = hashFunctionFactoryProvider;
+		this.hashFunctionFamilyProvider = hashFunctionFamilyProvider;
+		this.comparatorFactoryProvider = comparatorFactoryProvider;
+		this.typeTraitProvider = typeTraitProvider;
+		this.booleanInspectorFactory = booleanInspectorFactory;
+		this.integerInspectorFactory = integerInspectorFactory;
+		this.printerFactoryProvider = printerFactoryProvider;
+		this.clusterLocations = clusterLocations;
+		this.normalizedKeyComputerFactoryProvider = normalizedKeyComputerFactoryProvider;
+		this.nullWriterFactory = nullWriterFactory;
+		this.expressionRuntimeProvider = expressionRuntimeProvider;
+		this.expressionTypeComputer = expressionTypeComputer;
+		this.typingContext = typingContext;
+		this.expressionEvalSizeComputer = expressionEvalSizeComputer;
+		this.partialAggregationTypeComputer = partialAggregationTypeComputer;
+		this.frameSize = frameSize;
+		this.varCounter = 0;
+	}
+
+	public IOperatorSchema getOuterFlowSchema() {
+		return outerFlowSchema;
+	}
+
+	public AlgebricksPartitionConstraint getClusterLocations() {
+		return clusterLocations;
+	}
+
+	public IMetadataProvider<?, ?> getMetadataProvider() {
+		return metadataProvider;
+	}
+
+	public Object getAppContext() {
+		return appContext;
+	}
+
+	public ISerializerDeserializerProvider getSerializerDeserializerProvider() {
+		return serializerDeserializerProvider;
+	}
+
+	public IBinaryHashFunctionFactoryProvider getBinaryHashFunctionFactoryProvider() {
+		return hashFunctionFactoryProvider;
+	}
+
+	public IBinaryHashFunctionFamilyProvider getBinaryHashFunctionFamilyProvider() {
+		return hashFunctionFamilyProvider;
+	}
+
+	public IBinaryComparatorFactoryProvider getBinaryComparatorFactoryProvider() {
+		return comparatorFactoryProvider;
+	}
+
+	public ITypeTraitProvider getTypeTraitProvider() {
+		return typeTraitProvider;
+	}
+
+	public IBinaryBooleanInspectorFactory getBinaryBooleanInspectorFactory() {
+		return booleanInspectorFactory;
+	}
+
+	public IBinaryIntegerInspectorFactory getBinaryIntegerInspectorFactory() {
+		return integerInspectorFactory;
+	}
+
+	public IPrinterFactoryProvider getPrinterFactoryProvider() {
+		return printerFactoryProvider;
+	}
+
+	public IExpressionRuntimeProvider getExpressionRuntimeProvider() {
+		return expressionRuntimeProvider;
+	}
+
+	public IOperatorSchema getSchema(ILogicalOperator op) {
+		return schemaMap.get(op);
+	}
+
+	public void putSchema(ILogicalOperator op, IOperatorSchema schema) {
+		schemaMap.put(op, schema);
+	}
+
+	public LogicalVariable createNewVar() {
+		varCounter++;
+		LogicalVariable var = new LogicalVariable(-varCounter);
+		return var;
+	}
+
+	public Object getType(ILogicalExpression expr, IVariableTypeEnvironment env)
+			throws AlgebricksException {
+		return expressionTypeComputer.getType(expr,
+				typingContext.getMetadataProvider(), env);
+	}
+
+	public INullWriterFactory getNullWriterFactory() {
+		return nullWriterFactory;
+	}
+
+	public INormalizedKeyComputerFactoryProvider getNormalizedKeyComputerFactoryProvider() {
+		return normalizedKeyComputerFactoryProvider;
+	}
+
+	public IExpressionEvalSizeComputer getExpressionEvalSizeComputer() {
+		return expressionEvalSizeComputer;
+	}
+
+	public int getFrameSize() {
+		return frameSize;
+	}
+
+	public IPartialAggregationTypeComputer getPartialAggregationTypeComputer() {
+		return partialAggregationTypeComputer;
+	}
+
+	public IVariableTypeEnvironment getTypeEnvironment(ILogicalOperator op) {
+		return typingContext.getOutputTypeEnvironment(op);
+	}
+
+}
diff --git a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/jobgen/impl/JobGenHelper.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/jobgen/impl/JobGenHelper.java
new file mode 100644
index 0000000..530d19c
--- /dev/null
+++ b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/jobgen/impl/JobGenHelper.java
@@ -0,0 +1,182 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.algebricks.core.jobgen.impl;
+
+import java.util.Collection;
+import java.util.List;
+import java.util.logging.Logger;
+
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.IOperatorSchema;
+import edu.uci.ics.hyracks.algebricks.data.IBinaryComparatorFactoryProvider;
+import edu.uci.ics.hyracks.algebricks.data.IBinaryHashFunctionFactoryProvider;
+import edu.uci.ics.hyracks.algebricks.data.IBinaryHashFunctionFamilyProvider;
+import edu.uci.ics.hyracks.algebricks.data.INormalizedKeyComputerFactoryProvider;
+import edu.uci.ics.hyracks.algebricks.data.IPrinterFactory;
+import edu.uci.ics.hyracks.algebricks.data.IPrinterFactoryProvider;
+import edu.uci.ics.hyracks.algebricks.data.ISerializerDeserializerProvider;
+import edu.uci.ics.hyracks.algebricks.data.ITypeTraitProvider;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFamily;
+import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputerFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+
+public final class JobGenHelper {
+
+    private static final Logger LOGGER = Logger.getLogger(JobGenHelper.class.getName());
+
+    @SuppressWarnings("rawtypes")
+    public static RecordDescriptor mkRecordDescriptor(IVariableTypeEnvironment env, IOperatorSchema opSchema,
+            JobGenContext context) throws AlgebricksException {
+        ISerializerDeserializer[] fields = new ISerializerDeserializer[opSchema.getSize()];
+        ITypeTraits[] typeTraits = new ITypeTraits[opSchema.getSize()];
+        ISerializerDeserializerProvider sdp = context.getSerializerDeserializerProvider();
+        ITypeTraitProvider ttp = context.getTypeTraitProvider();
+        int i = 0;
+        for (LogicalVariable var : opSchema) {
+            Object t = env.getVarType(var);
+            if (t == null) {
+                LOGGER.warning("No type for variable " + var);
+            }
+            fields[i] = sdp.getSerializerDeserializer(t);
+            typeTraits[i] = ttp.getTypeTrait(t);
+            i++;
+        }
+        return new RecordDescriptor(fields, typeTraits);
+    }
+
+    public static IPrinterFactory[] mkPrinterFactories(IOperatorSchema opSchema, IVariableTypeEnvironment env,
+            JobGenContext context, int[] printColumns) throws AlgebricksException {
+        IPrinterFactory[] pf = new IPrinterFactory[printColumns.length];
+        IPrinterFactoryProvider pff = context.getPrinterFactoryProvider();
+        for (int i = 0; i < pf.length; i++) {
+            LogicalVariable v = opSchema.getVariable(printColumns[i]);
+            Object t = env.getVarType(v);
+            pf[i] = pff.getPrinterFactory(t);
+        }
+        return pf;
+    }
+
+    public static int[] variablesToFieldIndexes(Collection<LogicalVariable> varLogical, IOperatorSchema opSchema) {
+        int[] tuplePos = new int[varLogical.size()];
+        int i = 0;
+        for (LogicalVariable var : varLogical) {
+            tuplePos[i] = opSchema.findVariable(var);
+            i++;
+        }
+        return tuplePos;
+    }
+
+    public static IBinaryHashFunctionFactory[] variablesToBinaryHashFunctionFactories(
+            Collection<LogicalVariable> varLogical, IVariableTypeEnvironment env, JobGenContext context)
+            throws AlgebricksException {
+        IBinaryHashFunctionFactory[] funFactories = new IBinaryHashFunctionFactory[varLogical.size()];
+        int i = 0;
+        IBinaryHashFunctionFactoryProvider bhffProvider = context.getBinaryHashFunctionFactoryProvider();
+        for (LogicalVariable var : varLogical) {
+            Object type = env.getVarType(var);
+            funFactories[i++] = bhffProvider.getBinaryHashFunctionFactory(type);
+        }
+        return funFactories;
+    }
+
+    public static IBinaryHashFunctionFamily[] variablesToBinaryHashFunctionFamilies(
+            Collection<LogicalVariable> varLogical, IVariableTypeEnvironment env, JobGenContext context)
+            throws AlgebricksException {
+        IBinaryHashFunctionFamily[] funFamilies = new IBinaryHashFunctionFamily[varLogical.size()];
+        int i = 0;
+        IBinaryHashFunctionFamilyProvider bhffProvider = context.getBinaryHashFunctionFamilyProvider();
+        for (LogicalVariable var : varLogical) {
+            Object type = env.getVarType(var);
+            funFamilies[i++] = bhffProvider.getBinaryHashFunctionFamily(type);
+        }
+        return funFamilies;
+    }
+
+    public static IBinaryComparatorFactory[] variablesToAscBinaryComparatorFactories(
+            Collection<LogicalVariable> varLogical, IVariableTypeEnvironment env, JobGenContext context)
+            throws AlgebricksException {
+        IBinaryComparatorFactory[] compFactories = new IBinaryComparatorFactory[varLogical.size()];
+        IBinaryComparatorFactoryProvider bcfProvider = context.getBinaryComparatorFactoryProvider();
+        int i = 0;
+        for (LogicalVariable v : varLogical) {
+            Object type = env.getVarType(v);
+            compFactories[i++] = bcfProvider.getBinaryComparatorFactory(type, true);
+        }
+        return compFactories;
+    }
+
+    public static IBinaryComparatorFactory[] variablesToAscBinaryComparatorFactories(List<LogicalVariable> varLogical,
+            int start, int size, IVariableTypeEnvironment env, JobGenContext context) throws AlgebricksException {
+        IBinaryComparatorFactory[] compFactories = new IBinaryComparatorFactory[size];
+        IBinaryComparatorFactoryProvider bcfProvider = context.getBinaryComparatorFactoryProvider();
+        for (int i = 0; i < size; i++) {
+            Object type = env.getVarType(varLogical.get(start + i));
+            compFactories[i] = bcfProvider.getBinaryComparatorFactory(type, true);
+        }
+        return compFactories;
+    }
+
+    public static INormalizedKeyComputerFactory variablesToAscNormalizedKeyComputerFactory(
+            Collection<LogicalVariable> varLogical, IVariableTypeEnvironment env, JobGenContext context)
+            throws AlgebricksException {
+        INormalizedKeyComputerFactoryProvider nkcfProvider = context.getNormalizedKeyComputerFactoryProvider();
+        if (nkcfProvider == null)
+            return null;
+        for (LogicalVariable v : varLogical) {
+            Object type = env.getVarType(v);
+            return nkcfProvider.getNormalizedKeyComputerFactory(type, true);
+        }
+        return null;
+    }
+
+    public static ITypeTraits[] variablesToTypeTraits(Collection<LogicalVariable> varLogical,
+            IVariableTypeEnvironment env, JobGenContext context) throws AlgebricksException {
+        ITypeTraits[] typeTraits = new ITypeTraits[varLogical.size()];
+        ITypeTraitProvider typeTraitProvider = context.getTypeTraitProvider();
+        int i = 0;
+        for (LogicalVariable v : varLogical) {
+            Object type = env.getVarType(v);
+            typeTraits[i++] = typeTraitProvider.getTypeTrait(type);
+        }
+        return typeTraits;
+    }
+
+    public static ITypeTraits[] variablesToTypeTraits(List<LogicalVariable> varLogical, int start, int size,
+            IVariableTypeEnvironment env, JobGenContext context) throws AlgebricksException {
+        ITypeTraits[] typeTraits = new ITypeTraits[size];
+        ITypeTraitProvider typeTraitProvider = context.getTypeTraitProvider();
+        for (int i = 0; i < size; i++) {
+            Object type = env.getVarType(varLogical.get(start + i));
+            typeTraits[i] = typeTraitProvider.getTypeTrait(type);
+        }
+        return typeTraits;
+    }
+
+    public static int[] projectAllVariables(IOperatorSchema opSchema) {
+        int[] projectionList = new int[opSchema.getSize()];
+        int k = 0;
+        for (LogicalVariable v : opSchema) {
+            projectionList[k++] = opSchema.findVariable(v);
+        }
+        return projectionList;
+    }
+
+}
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/jobgen/impl/OperatorSchemaImpl.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/jobgen/impl/OperatorSchemaImpl.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/jobgen/impl/OperatorSchemaImpl.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/jobgen/impl/OperatorSchemaImpl.java
diff --git a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/jobgen/impl/PlanCompiler.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/jobgen/impl/PlanCompiler.java
new file mode 100644
index 0000000..63a6852
--- /dev/null
+++ b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/jobgen/impl/PlanCompiler.java
@@ -0,0 +1,107 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.algebricks.core.jobgen.impl;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+
+import org.apache.commons.lang3.mutable.Mutable;
+
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IHyracksJobBuilder;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalPlan;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.IOperatorSchema;
+import edu.uci.ics.hyracks.api.job.IOperatorDescriptorRegistry;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+
+public class PlanCompiler {
+    private JobGenContext context;
+    private Map<Mutable<ILogicalOperator>, List<Mutable<ILogicalOperator>>> operatorVisitedToParents = new HashMap<Mutable<ILogicalOperator>, List<Mutable<ILogicalOperator>>>();
+
+    public PlanCompiler(JobGenContext context) {
+        this.context = context;
+    }
+
+    public JobGenContext getContext() {
+        return context;
+    }
+
+    public JobSpecification compilePlan(ILogicalPlan plan, IOperatorSchema outerPlanSchema) throws AlgebricksException {
+        JobSpecification spec = new JobSpecification();
+        List<ILogicalOperator> rootOps = new ArrayList<ILogicalOperator>();
+        IHyracksJobBuilder builder = new JobBuilder(spec, context.getClusterLocations());
+        for (Mutable<ILogicalOperator> opRef : plan.getRoots()) {
+            compileOpRef(opRef, spec, builder, outerPlanSchema);
+            rootOps.add(opRef.getValue());
+        }
+        reviseEdges(builder);
+        operatorVisitedToParents.clear();
+        builder.buildSpec(rootOps);
+        spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy());
+        // Do not do activity cluster planning because it is slow on large clusters
+        spec.setUseConnectorPolicyForScheduling(false);
+        return spec;
+    }
+
+    private void compileOpRef(Mutable<ILogicalOperator> opRef, IOperatorDescriptorRegistry spec, IHyracksJobBuilder builder,
+            IOperatorSchema outerPlanSchema) throws AlgebricksException {
+        ILogicalOperator op = opRef.getValue();
+        int n = op.getInputs().size();
+        IOperatorSchema[] schemas = new IOperatorSchema[n];
+        int i = 0;
+        for (Mutable<ILogicalOperator> opRef2 : op.getInputs()) {
+            List<Mutable<ILogicalOperator>> parents = operatorVisitedToParents.get(opRef2);
+            if (parents == null) {
+                parents = new ArrayList<Mutable<ILogicalOperator>>();
+                operatorVisitedToParents.put(opRef2, parents);
+                parents.add(opRef);
+                compileOpRef(opRef2, spec, builder, outerPlanSchema);
+                schemas[i++] = context.getSchema(opRef2.getValue());
+            } else {
+                if (!parents.contains(opRef))
+                    parents.add(opRef);
+                schemas[i++] = context.getSchema(opRef2.getValue());
+                continue;
+            }
+        }
+
+        IOperatorSchema opSchema = new OperatorSchemaImpl();
+        context.putSchema(op, opSchema);
+        op.getVariablePropagationPolicy().propagateVariables(opSchema, schemas);
+        op.contributeRuntimeOperator(builder, context, opSchema, schemas, outerPlanSchema);
+    }
+
+    private void reviseEdges(IHyracksJobBuilder builder) {
+        /**
+         * revise the edges for the case of replicate operator
+         */
+        for (Entry<Mutable<ILogicalOperator>, List<Mutable<ILogicalOperator>>> entry : operatorVisitedToParents
+                .entrySet()) {
+            Mutable<ILogicalOperator> child = entry.getKey();
+            List<Mutable<ILogicalOperator>> parents = entry.getValue();
+            if (parents.size() > 1) {
+                int i = 0;
+                for (Mutable<ILogicalOperator> parent : parents) {
+                    builder.contributeGraphEdge(child.getValue(), i, parent.getValue(), 0);
+                    i++;
+                }
+            }
+        }
+    }
+}
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/rewriter/base/AbstractRuleController.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/rewriter/base/AbstractRuleController.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/rewriter/base/AbstractRuleController.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/rewriter/base/AbstractRuleController.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/rewriter/base/AlgebricksOptimizationContext.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/rewriter/base/AlgebricksOptimizationContext.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/rewriter/base/AlgebricksOptimizationContext.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/rewriter/base/AlgebricksOptimizationContext.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/rewriter/base/HeuristicOptimizer.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/rewriter/base/HeuristicOptimizer.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/rewriter/base/HeuristicOptimizer.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/rewriter/base/HeuristicOptimizer.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/rewriter/base/IAlgebraicRewriteRule.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/rewriter/base/IAlgebraicRewriteRule.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/rewriter/base/IAlgebraicRewriteRule.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/rewriter/base/IAlgebraicRewriteRule.java
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/rewriter/base/IOptimizationContextFactory.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/rewriter/base/IOptimizationContextFactory.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/rewriter/base/IOptimizationContextFactory.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/rewriter/base/IOptimizationContextFactory.java
diff --git a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/rewriter/base/PhysicalOptimizationConfig.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/rewriter/base/PhysicalOptimizationConfig.java
new file mode 100644
index 0000000..fc6c198
--- /dev/null
+++ b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/rewriter/base/PhysicalOptimizationConfig.java
@@ -0,0 +1,91 @@
+package edu.uci.ics.hyracks.algebricks.core.rewriter.base;
+
+import java.util.Properties;
+
+public class PhysicalOptimizationConfig {
+    private static final int MB = 1048576;
+    private static final String FRAMESIZE = "FRAMESIZE";
+    private static final String MAX_FRAMES_EXTERNAL_SORT = "MAX_FRAMES_EXTERNAL_SORT";
+    private static final String MAX_FRAMES_EXTERNAL_GROUP_BY = "MAX_FRAMES_EXTERNAL_GROUP_BY";
+
+    private static final String DEFAULT_HASH_GROUP_TABLE_SIZE = "DEFAULT_HASH_GROUP_TABLE_SIZE";
+    private static final String DEFAULT_EXTERNAL_GROUP_TABLE_SIZE = "DEFAULT_EXTERNAL_GROUP_TABLE_SIZE";
+    private static final String DEFAULT_IN_MEM_HASH_JOIN_TABLE_SIZE = "DEFAULT_IN_MEM_HASH_JOIN_TABLE_SIZE";
+
+    private Properties properties = new Properties();
+
+    public PhysicalOptimizationConfig() {
+        int frameSize = 32768;
+        setInt(FRAMESIZE, frameSize);
+        setInt(MAX_FRAMES_EXTERNAL_SORT, (int) (((long) 32 * MB) / frameSize));
+        setInt(MAX_FRAMES_EXTERNAL_GROUP_BY, (int) (((long) 32 * MB) / frameSize));
+
+        // use http://www.rsok.com/~jrm/printprimes.html to find prime numbers
+        setInt(DEFAULT_HASH_GROUP_TABLE_SIZE, 10485767);
+        setInt(DEFAULT_EXTERNAL_GROUP_TABLE_SIZE, 10485767);
+        setInt(DEFAULT_IN_MEM_HASH_JOIN_TABLE_SIZE, 10485767);
+    }
+
+    public int getFrameSize() {
+        return getInt(FRAMESIZE, 32768);
+    }
+
+    public void setFrameSize(int frameSize) {
+        setInt(FRAMESIZE, frameSize);
+    }
+
+    public int getMaxFramesExternalSort() {
+        int frameSize = getFrameSize();
+        return getInt(MAX_FRAMES_EXTERNAL_SORT, (int) (((long) 512 * MB) / frameSize));
+    }
+
+    public void setMaxFramesExternalSort(int frameLimit) {
+        setInt(MAX_FRAMES_EXTERNAL_SORT, frameLimit);
+    }
+
+    public int getMaxFramesExternalGroupBy() {
+        int frameSize = getFrameSize();
+        return getInt(MAX_FRAMES_EXTERNAL_GROUP_BY, (int) (((long) 256 * MB) / frameSize));
+    }
+
+    public void setMaxFramesExternalGroupBy(int frameLimit) {
+        setInt(MAX_FRAMES_EXTERNAL_GROUP_BY, frameLimit);
+    }
+
+    public int getHashGroupByTableSize() {
+        return getInt(DEFAULT_HASH_GROUP_TABLE_SIZE, 10485767);
+    }
+
+    public void setHashGroupByTableSize(int tableSize) {
+        setInt(DEFAULT_HASH_GROUP_TABLE_SIZE, tableSize);
+    }
+
+    public int getExternalGroupByTableSize() {
+        return getInt(DEFAULT_EXTERNAL_GROUP_TABLE_SIZE, 10485767);
+    }
+
+    public void setExternalGroupByTableSize(int tableSize) {
+        setInt(DEFAULT_EXTERNAL_GROUP_TABLE_SIZE, tableSize);
+    }
+
+    public int getInMemHashJoinTableSize() {
+        return getInt(DEFAULT_IN_MEM_HASH_JOIN_TABLE_SIZE, 10485767);
+    }
+
+    public void setInMemHashJoinTableSize(int tableSize) {
+        setInt(DEFAULT_IN_MEM_HASH_JOIN_TABLE_SIZE, tableSize);
+    }
+
+    private void setInt(String property, int value) {
+        properties.setProperty(property, Integer.toString(value));
+    }
+
+    private int getInt(String property, int defaultValue) {
+        String value = properties.getProperty(property);
+        if (value == null)
+            return defaultValue;
+        else
+            return Integer.parseInt(value);
+    }
+
+}
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/utils/Substitution.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/utils/Substitution.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/utils/Substitution.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/utils/Substitution.java
diff --git a/algebricks/algebricks-data/pom.xml b/algebricks/algebricks-data/pom.xml
new file mode 100644
index 0000000..3f7592a
--- /dev/null
+++ b/algebricks/algebricks-data/pom.xml
@@ -0,0 +1,37 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <artifactId>algebricks-data</artifactId>
+  <name>algebricks-data</name>
+
+  <parent>
+    <groupId>edu.uci.ics.hyracks</groupId>
+    <artifactId>algebricks</artifactId>
+    <version>0.2.3-SNAPSHOT</version>
+  </parent>
+
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-compiler-plugin</artifactId>
+        <version>2.0.2</version>
+        <configuration>
+          <source>1.7</source>
+          <target>1.7</target>
+        </configuration>
+      </plugin>
+    </plugins>
+  </build>
+  <dependencies>
+  <dependency>
+  	<groupId>edu.uci.ics.hyracks</groupId>
+  	<artifactId>algebricks-common</artifactId>
+  	<version>0.2.3-SNAPSHOT</version>
+  </dependency>
+  <dependency>
+  	<groupId>edu.uci.ics.hyracks</groupId>
+  	<artifactId>hyracks-data-std</artifactId>
+  	<version>0.2.3-SNAPSHOT</version>
+  </dependency>
+  </dependencies>
+</project>
diff --git a/hyracks-algebricks/hyracks-algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/IAWriter.java b/algebricks/algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/IAWriter.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/IAWriter.java
rename to algebricks/algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/IAWriter.java
diff --git a/hyracks-algebricks/hyracks-algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/IAWriterFactory.java b/algebricks/algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/IAWriterFactory.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/IAWriterFactory.java
rename to algebricks/algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/IAWriterFactory.java
diff --git a/hyracks-algebricks/hyracks-algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/IBinaryBooleanInspector.java b/algebricks/algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/IBinaryBooleanInspector.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/IBinaryBooleanInspector.java
rename to algebricks/algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/IBinaryBooleanInspector.java
diff --git a/hyracks-algebricks/hyracks-algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/IBinaryBooleanInspectorFactory.java b/algebricks/algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/IBinaryBooleanInspectorFactory.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/IBinaryBooleanInspectorFactory.java
rename to algebricks/algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/IBinaryBooleanInspectorFactory.java
diff --git a/hyracks-algebricks/hyracks-algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/IBinaryComparatorFactoryProvider.java b/algebricks/algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/IBinaryComparatorFactoryProvider.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/IBinaryComparatorFactoryProvider.java
rename to algebricks/algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/IBinaryComparatorFactoryProvider.java
diff --git a/hyracks-algebricks/hyracks-algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/IBinaryHashFunctionFactoryProvider.java b/algebricks/algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/IBinaryHashFunctionFactoryProvider.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/IBinaryHashFunctionFactoryProvider.java
rename to algebricks/algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/IBinaryHashFunctionFactoryProvider.java
diff --git a/algebricks/algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/IBinaryHashFunctionFamilyProvider.java b/algebricks/algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/IBinaryHashFunctionFamilyProvider.java
new file mode 100644
index 0000000..8a992b3
--- /dev/null
+++ b/algebricks/algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/IBinaryHashFunctionFamilyProvider.java
@@ -0,0 +1,25 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.algebricks.data;
+
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFamily;
+
+public interface IBinaryHashFunctionFamilyProvider {
+
+	public IBinaryHashFunctionFamily getBinaryHashFunctionFamily(Object type)
+			throws AlgebricksException;
+}
diff --git a/hyracks-algebricks/hyracks-algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/IBinaryIntegerInspector.java b/algebricks/algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/IBinaryIntegerInspector.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/IBinaryIntegerInspector.java
rename to algebricks/algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/IBinaryIntegerInspector.java
diff --git a/hyracks-algebricks/hyracks-algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/IBinaryIntegerInspectorFactory.java b/algebricks/algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/IBinaryIntegerInspectorFactory.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/IBinaryIntegerInspectorFactory.java
rename to algebricks/algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/IBinaryIntegerInspectorFactory.java
diff --git a/hyracks-algebricks/hyracks-algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/INormalizedKeyComputerFactoryProvider.java b/algebricks/algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/INormalizedKeyComputerFactoryProvider.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/INormalizedKeyComputerFactoryProvider.java
rename to algebricks/algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/INormalizedKeyComputerFactoryProvider.java
diff --git a/hyracks-algebricks/hyracks-algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/IPrinter.java b/algebricks/algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/IPrinter.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/IPrinter.java
rename to algebricks/algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/IPrinter.java
diff --git a/hyracks-algebricks/hyracks-algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/IPrinterFactory.java b/algebricks/algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/IPrinterFactory.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/IPrinterFactory.java
rename to algebricks/algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/IPrinterFactory.java
diff --git a/hyracks-algebricks/hyracks-algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/IPrinterFactoryProvider.java b/algebricks/algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/IPrinterFactoryProvider.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/IPrinterFactoryProvider.java
rename to algebricks/algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/IPrinterFactoryProvider.java
diff --git a/hyracks-algebricks/hyracks-algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/ISerializerDeserializerProvider.java b/algebricks/algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/ISerializerDeserializerProvider.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/ISerializerDeserializerProvider.java
rename to algebricks/algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/ISerializerDeserializerProvider.java
diff --git a/hyracks-algebricks/hyracks-algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/ITypeTraitProvider.java b/algebricks/algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/ITypeTraitProvider.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/ITypeTraitProvider.java
rename to algebricks/algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/ITypeTraitProvider.java
diff --git a/hyracks-algebricks/hyracks-algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/impl/BinaryBooleanInspectorImpl.java b/algebricks/algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/impl/BinaryBooleanInspectorImpl.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/impl/BinaryBooleanInspectorImpl.java
rename to algebricks/algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/impl/BinaryBooleanInspectorImpl.java
diff --git a/hyracks-algebricks/hyracks-algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/impl/BinaryIntegerInspectorImpl.java b/algebricks/algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/impl/BinaryIntegerInspectorImpl.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/impl/BinaryIntegerInspectorImpl.java
rename to algebricks/algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/impl/BinaryIntegerInspectorImpl.java
diff --git a/hyracks-algebricks/hyracks-algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/impl/IntegerPrinterFactory.java b/algebricks/algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/impl/IntegerPrinterFactory.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/impl/IntegerPrinterFactory.java
rename to algebricks/algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/impl/IntegerPrinterFactory.java
diff --git a/hyracks-algebricks/hyracks-algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/impl/NoopNullWriterFactory.java b/algebricks/algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/impl/NoopNullWriterFactory.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/impl/NoopNullWriterFactory.java
rename to algebricks/algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/impl/NoopNullWriterFactory.java
diff --git a/hyracks-algebricks/hyracks-algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/impl/UTF8StringPrinterFactory.java b/algebricks/algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/impl/UTF8StringPrinterFactory.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/impl/UTF8StringPrinterFactory.java
rename to algebricks/algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/impl/UTF8StringPrinterFactory.java
diff --git a/hyracks-algebricks/hyracks-algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/utils/WriteValueTools.java b/algebricks/algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/utils/WriteValueTools.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/utils/WriteValueTools.java
rename to algebricks/algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/utils/WriteValueTools.java
diff --git a/algebricks/algebricks-examples/piglet-example/pom.xml b/algebricks/algebricks-examples/piglet-example/pom.xml
new file mode 100644
index 0000000..ea36fb6
--- /dev/null
+++ b/algebricks/algebricks-examples/piglet-example/pom.xml
@@ -0,0 +1,66 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+	<modelVersion>4.0.0</modelVersion>
+	<artifactId>piglet-example</artifactId>
+        <name>piglet-example</name>
+
+	<parent>
+		<groupId>edu.uci.ics.hyracks</groupId>
+		<artifactId>algebricks-examples</artifactId>
+		<version>0.2.3-SNAPSHOT</version>
+	</parent>
+
+	<build>
+		<plugins>
+			<plugin>
+				<groupId>org.apache.maven.plugins</groupId>
+				<artifactId>maven-compiler-plugin</artifactId>
+				<version>2.0.2</version>
+				<configuration>
+					<source>1.7</source>
+					<target>1.7</target>
+				</configuration>
+			</plugin>
+			<plugin>
+				<groupId>org.codehaus.mojo</groupId>
+				<artifactId>javacc-maven-plugin</artifactId>
+				<version>2.6</version>
+				<executions>
+					<execution>
+						<id>javacc</id>
+						<goals>
+							<goal>javacc</goal>
+						</goals>
+						<configuration>
+							<isStatic>false</isStatic>
+						</configuration>
+					</execution>
+				</executions>
+			</plugin>
+			<plugin>
+				<groupId>org.apache.maven.plugins</groupId>
+				<artifactId>maven-surefire-plugin</artifactId>
+				<version>2.7.2</version>
+				<configuration>
+					<includes>
+						<include>**/*TestSuite.java</include>
+						<include>**/*Test.java</include>
+					</includes>
+				</configuration>
+			</plugin>
+		</plugins>
+	</build>
+	<dependencies>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>algebricks-compiler</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+		</dependency>
+		<dependency>
+			<groupId>junit</groupId>
+			<artifactId>junit</artifactId>
+			<version>4.8.2</version>
+			<type>jar</type>
+			<scope>test</scope>
+		</dependency>
+	</dependencies>
+</project>
diff --git a/hyracks-algebricks/hyracks-algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/ast/ASTNode.java b/algebricks/algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/ast/ASTNode.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/ast/ASTNode.java
rename to algebricks/algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/ast/ASTNode.java
diff --git a/hyracks-algebricks/hyracks-algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/ast/AssignmentNode.java b/algebricks/algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/ast/AssignmentNode.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/ast/AssignmentNode.java
rename to algebricks/algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/ast/AssignmentNode.java
diff --git a/hyracks-algebricks/hyracks-algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/ast/DumpNode.java b/algebricks/algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/ast/DumpNode.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/ast/DumpNode.java
rename to algebricks/algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/ast/DumpNode.java
diff --git a/hyracks-algebricks/hyracks-algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/ast/ExpressionNode.java b/algebricks/algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/ast/ExpressionNode.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/ast/ExpressionNode.java
rename to algebricks/algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/ast/ExpressionNode.java
diff --git a/hyracks-algebricks/hyracks-algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/ast/FieldAccessExpressionNode.java b/algebricks/algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/ast/FieldAccessExpressionNode.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/ast/FieldAccessExpressionNode.java
rename to algebricks/algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/ast/FieldAccessExpressionNode.java
diff --git a/hyracks-algebricks/hyracks-algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/ast/FilterNode.java b/algebricks/algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/ast/FilterNode.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/ast/FilterNode.java
rename to algebricks/algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/ast/FilterNode.java
diff --git a/hyracks-algebricks/hyracks-algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/ast/FunctionTag.java b/algebricks/algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/ast/FunctionTag.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/ast/FunctionTag.java
rename to algebricks/algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/ast/FunctionTag.java
diff --git a/hyracks-algebricks/hyracks-algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/ast/LiteralExpressionNode.java b/algebricks/algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/ast/LiteralExpressionNode.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/ast/LiteralExpressionNode.java
rename to algebricks/algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/ast/LiteralExpressionNode.java
diff --git a/hyracks-algebricks/hyracks-algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/ast/LoadNode.java b/algebricks/algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/ast/LoadNode.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/ast/LoadNode.java
rename to algebricks/algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/ast/LoadNode.java
diff --git a/hyracks-algebricks/hyracks-algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/ast/RelationNode.java b/algebricks/algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/ast/RelationNode.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/ast/RelationNode.java
rename to algebricks/algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/ast/RelationNode.java
diff --git a/hyracks-algebricks/hyracks-algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/ast/ScalarFunctionExpressionNode.java b/algebricks/algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/ast/ScalarFunctionExpressionNode.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/ast/ScalarFunctionExpressionNode.java
rename to algebricks/algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/ast/ScalarFunctionExpressionNode.java
diff --git a/hyracks-algebricks/hyracks-algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/compiler/ConstantValue.java b/algebricks/algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/compiler/ConstantValue.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/compiler/ConstantValue.java
rename to algebricks/algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/compiler/ConstantValue.java
diff --git a/algebricks/algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/compiler/PigletCompiler.java b/algebricks/algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/compiler/PigletCompiler.java
new file mode 100644
index 0000000..d105759
--- /dev/null
+++ b/algebricks/algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/compiler/PigletCompiler.java
@@ -0,0 +1,362 @@
+package edu.uci.ics.hyracks.algebricks.examples.piglet.compiler;
+
+import java.io.Reader;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.apache.commons.lang3.mutable.Mutable;
+import org.apache.commons.lang3.mutable.MutableObject;
+
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
+import edu.uci.ics.hyracks.algebricks.compiler.api.HeuristicCompilerFactoryBuilder;
+import edu.uci.ics.hyracks.algebricks.compiler.api.ICompiler;
+import edu.uci.ics.hyracks.algebricks.compiler.api.ICompilerFactory;
+import edu.uci.ics.hyracks.algebricks.compiler.rewriter.rulecontrollers.SequentialFixpointRuleController;
+import edu.uci.ics.hyracks.algebricks.compiler.rewriter.rulecontrollers.SequentialOnceRuleController;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalPlan;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IExpressionTypeComputer;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.LogicalExpressionJobGenToExpressionRuntimeProviderAdapter;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.AlgebricksBuiltinFunctions;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.IFunctionInfo;
+import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IMetadataProvider;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.EmptyTupleSourceOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.WriteOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.plan.ALogicalPlanImpl;
+import edu.uci.ics.hyracks.algebricks.core.algebra.prettyprint.LogicalOperatorPrettyPrintVisitor;
+import edu.uci.ics.hyracks.algebricks.core.algebra.prettyprint.PlanPrettyPrinter;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.AbstractRuleController;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
+import edu.uci.ics.hyracks.algebricks.data.ISerializerDeserializerProvider;
+import edu.uci.ics.hyracks.algebricks.data.ITypeTraitProvider;
+import edu.uci.ics.hyracks.algebricks.examples.piglet.ast.ASTNode;
+import edu.uci.ics.hyracks.algebricks.examples.piglet.ast.AssignmentNode;
+import edu.uci.ics.hyracks.algebricks.examples.piglet.ast.DumpNode;
+import edu.uci.ics.hyracks.algebricks.examples.piglet.ast.ExpressionNode;
+import edu.uci.ics.hyracks.algebricks.examples.piglet.ast.FieldAccessExpressionNode;
+import edu.uci.ics.hyracks.algebricks.examples.piglet.ast.FilterNode;
+import edu.uci.ics.hyracks.algebricks.examples.piglet.ast.FunctionTag;
+import edu.uci.ics.hyracks.algebricks.examples.piglet.ast.LiteralExpressionNode;
+import edu.uci.ics.hyracks.algebricks.examples.piglet.ast.LoadNode;
+import edu.uci.ics.hyracks.algebricks.examples.piglet.ast.RelationNode;
+import edu.uci.ics.hyracks.algebricks.examples.piglet.ast.ScalarFunctionExpressionNode;
+import edu.uci.ics.hyracks.algebricks.examples.piglet.exceptions.PigletException;
+import edu.uci.ics.hyracks.algebricks.examples.piglet.metadata.PigletFileDataSink;
+import edu.uci.ics.hyracks.algebricks.examples.piglet.metadata.PigletFileDataSource;
+import edu.uci.ics.hyracks.algebricks.examples.piglet.metadata.PigletMetadataProvider;
+import edu.uci.ics.hyracks.algebricks.examples.piglet.parser.ParseException;
+import edu.uci.ics.hyracks.algebricks.examples.piglet.parser.PigletParser;
+import edu.uci.ics.hyracks.algebricks.examples.piglet.rewriter.PigletRewriteRuleset;
+import edu.uci.ics.hyracks.algebricks.examples.piglet.runtime.PigletExpressionJobGen;
+import edu.uci.ics.hyracks.algebricks.examples.piglet.types.Schema;
+import edu.uci.ics.hyracks.algebricks.examples.piglet.types.Type;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+
+public class PigletCompiler {
+    private static final Logger LOGGER = Logger.getLogger(PigletCompiler.class.getName());
+
+    private static List<Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>> buildDefaultLogicalRewrites() {
+        List<Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>> defaultLogicalRewrites = new ArrayList<Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>>();
+        SequentialFixpointRuleController seqCtrlNoDfs = new SequentialFixpointRuleController(false);
+        SequentialFixpointRuleController seqCtrlFullDfs = new SequentialFixpointRuleController(true);
+        SequentialOnceRuleController seqOnceCtrl = new SequentialOnceRuleController(true);
+        defaultLogicalRewrites.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqOnceCtrl,
+                PigletRewriteRuleset.buildTypeInferenceRuleCollection()));
+        defaultLogicalRewrites.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqCtrlFullDfs,
+                PigletRewriteRuleset.buildNormalizationRuleCollection()));
+        defaultLogicalRewrites.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqCtrlNoDfs,
+                PigletRewriteRuleset.buildCondPushDownRuleCollection()));
+        defaultLogicalRewrites.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqCtrlNoDfs,
+                PigletRewriteRuleset.buildJoinInferenceRuleCollection()));
+        defaultLogicalRewrites.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqCtrlNoDfs,
+                PigletRewriteRuleset.buildOpPushDownRuleCollection()));
+        defaultLogicalRewrites.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqOnceCtrl,
+                PigletRewriteRuleset.buildDataExchangeRuleCollection()));
+        defaultLogicalRewrites.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqCtrlNoDfs,
+                PigletRewriteRuleset.buildConsolidationRuleCollection()));
+        return defaultLogicalRewrites;
+    }
+
+    private static List<Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>> buildDefaultPhysicalRewrites() {
+        List<Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>> defaultPhysicalRewrites = new ArrayList<Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>>();
+        SequentialOnceRuleController seqOnceCtrlAllLevels = new SequentialOnceRuleController(true);
+        SequentialOnceRuleController seqOnceCtrlTopLevel = new SequentialOnceRuleController(false);
+        defaultPhysicalRewrites.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqOnceCtrlAllLevels,
+                PigletRewriteRuleset.buildPhysicalRewritesAllLevelsRuleCollection()));
+        defaultPhysicalRewrites.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqOnceCtrlTopLevel,
+                PigletRewriteRuleset.buildPhysicalRewritesTopLevelRuleCollection()));
+        defaultPhysicalRewrites.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqOnceCtrlAllLevels,
+                PigletRewriteRuleset.prepareForJobGenRuleCollection()));
+        return defaultPhysicalRewrites;
+    }
+
+    private final ICompilerFactory cFactory;
+
+    private final PigletMetadataProvider metadataProvider;
+
+    private int varCounter;
+
+    private ILogicalOperator previousOp;
+
+    public PigletCompiler() {
+        HeuristicCompilerFactoryBuilder builder = new HeuristicCompilerFactoryBuilder();
+        builder.setLogicalRewrites(buildDefaultLogicalRewrites());
+        builder.setPhysicalRewrites(buildDefaultPhysicalRewrites());
+        builder.setSerializerDeserializerProvider(new ISerializerDeserializerProvider() {
+            @SuppressWarnings("unchecked")
+            @Override
+            public ISerializerDeserializer getSerializerDeserializer(Object type) throws AlgebricksException {
+                return null;
+            }
+        });
+        builder.setTypeTraitProvider(new ITypeTraitProvider() {
+			public ITypeTraits getTypeTrait(Object type) {
+				return null;
+			}
+        });
+        builder.setPrinterProvider(PigletPrinterFactoryProvider.INSTANCE);
+        builder.setExpressionRuntimeProvider(new LogicalExpressionJobGenToExpressionRuntimeProviderAdapter(
+                new PigletExpressionJobGen()));
+        builder.setExpressionTypeComputer(new IExpressionTypeComputer() {
+            @Override
+            public Object getType(ILogicalExpression expr, IMetadataProvider<?, ?> metadataProvider,
+                    IVariableTypeEnvironment env) throws AlgebricksException {
+                return null;
+            }
+        });
+        cFactory = builder.create();
+        metadataProvider = new PigletMetadataProvider();
+    }
+
+    public List<ASTNode> parse(Reader in) throws ParseException {
+        PigletParser parser = new PigletParser(in);
+        List<ASTNode> statements = parser.Statements();
+        return statements;
+    }
+
+    public JobSpecification compile(List<ASTNode> ast) throws AlgebricksException, PigletException {
+        ILogicalPlan plan = translate(ast);
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Translated Plan:");
+            LOGGER.info(getPrettyPrintedPlan(plan));
+        }
+        ICompiler compiler = cFactory.createCompiler(plan, metadataProvider, varCounter);
+        compiler.optimize();
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Optimized Plan:");
+            LOGGER.info(getPrettyPrintedPlan(plan));
+        }
+        return compiler.createJob(null);
+    }
+
+    private ILogicalPlan translate(List<ASTNode> ast) throws PigletException {
+        Map<String, Relation> symMap = new HashMap<String, Relation>();
+        List<Mutable<ILogicalOperator>> roots = new ArrayList<Mutable<ILogicalOperator>>();
+        previousOp = null;
+        for (ASTNode an : ast) {
+            switch (an.getTag()) {
+                case DUMP: {
+                    DumpNode dn = (DumpNode) an;
+                    Relation input = symMap.get(dn.getAlias());
+                    List<Mutable<ILogicalExpression>> expressions = new ArrayList<Mutable<ILogicalExpression>>();
+                    for (LogicalVariable v : input.schema.values()) {
+                        expressions.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(v)));
+                    }
+                    PigletFileDataSink dataSink = new PigletFileDataSink(dn.getFile());
+                    ILogicalOperator op = new WriteOperator(expressions, dataSink);
+                    op.getInputs().add(new MutableObject<ILogicalOperator>(input.op));
+                    roots.add(new MutableObject<ILogicalOperator>(op));
+                }
+                    break;
+
+                case ASSIGNMENT: {
+                    AssignmentNode asn = (AssignmentNode) an;
+                    String alias = asn.getAlias();
+                    RelationNode rn = asn.getRelation();
+                    Relation rel = translate(rn, symMap);
+                    previousOp = rel.op;
+                    rel.alias = alias;
+                    symMap.put(alias, rel);
+                }
+                    break;
+            }
+        }
+        return new ALogicalPlanImpl(roots);
+    }
+
+    private Relation translate(RelationNode rn, Map<String, Relation> symMap) throws PigletException {
+        switch (rn.getTag()) {
+            case LOAD: {
+                LoadNode ln = (LoadNode) rn;
+                String file = ln.getDataFile();
+                Schema schema = ln.getSchema();
+                List<Pair<String, Type>> fieldsSchema = schema.getSchema();
+                List<LogicalVariable> variables = new ArrayList<LogicalVariable>();
+                List<Object> types = new ArrayList<Object>();
+                Relation rel = new Relation();
+                for (Pair<String, Type> p : fieldsSchema) {
+                    LogicalVariable v = newVariable();
+                    rel.schema.put(p.first, v);
+                    variables.add(v);
+                    types.add(p.second);
+                }
+                PigletFileDataSource ds = new PigletFileDataSource(file, types.toArray());
+                rel.op = new DataSourceScanOperator(variables, ds);
+                rel.op.getInputs().add(
+                        new MutableObject<ILogicalOperator>(previousOp == null ? new EmptyTupleSourceOperator()
+                                : previousOp));
+                return rel;
+            }
+
+            case FILTER: {
+                FilterNode fn = (FilterNode) rn;
+                String alias = fn.getAlias();
+                ExpressionNode conditionNode = fn.getExpression();
+                Relation inputRel = findInputRelation(alias, symMap);
+                Pair<Relation, LogicalVariable> tempInput = translateScalarExpression(inputRel, conditionNode);
+                Relation rel = new Relation();
+                rel.op = new SelectOperator(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(
+                        tempInput.second)));
+                rel.op.getInputs().add(new MutableObject<ILogicalOperator>(tempInput.first.op));
+                rel.schema.putAll(tempInput.first.schema);
+                return rel;
+            }
+        }
+        throw new IllegalArgumentException("Unknown node: " + rn.getTag() + " encountered");
+    }
+
+    private Pair<Relation, LogicalVariable> translateScalarExpression(Relation inputRel, ExpressionNode expressionNode)
+            throws PigletException {
+        switch (expressionNode.getTag()) {
+            case FIELD_ACCESS: {
+                FieldAccessExpressionNode faen = (FieldAccessExpressionNode) expressionNode;
+                String fieldName = faen.getFieldName();
+                LogicalVariable lVar = findField(fieldName, inputRel.schema);
+                return new Pair<Relation, LogicalVariable>(inputRel, lVar);
+            }
+
+            case LITERAL: {
+                LiteralExpressionNode len = (LiteralExpressionNode) expressionNode;
+                String image = len.getImage();
+                Type type = len.getType();
+                ConstantExpression ce = new ConstantExpression(new ConstantValue(type, image));
+                Relation rel = new Relation();
+                LogicalVariable var = newVariable();
+                List<LogicalVariable> vars = new ArrayList<LogicalVariable>();
+                vars.add(var);
+
+                List<Mutable<ILogicalExpression>> exprs = new ArrayList<Mutable<ILogicalExpression>>();
+                exprs.add(new MutableObject<ILogicalExpression>(ce));
+
+                rel.op = new AssignOperator(vars, exprs);
+                rel.op.getInputs().add(new MutableObject<ILogicalOperator>(inputRel.op));
+                rel.schema.putAll(inputRel.schema);
+
+                return new Pair<Relation, LogicalVariable>(rel, var);
+            }
+
+            case SCALAR_FUNCTION: {
+                ScalarFunctionExpressionNode sfen = (ScalarFunctionExpressionNode) expressionNode;
+                List<Mutable<ILogicalExpression>> argExprs = new ArrayList<Mutable<ILogicalExpression>>();
+                List<ASTNode> arguments = sfen.getArguments();
+                Relation rel = inputRel;
+                for (ASTNode a : arguments) {
+                    Pair<Relation, LogicalVariable> argPair = translateScalarExpression(rel, (ExpressionNode) a);
+                    rel = argPair.first;
+                    argExprs.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(argPair.second)));
+                }
+                Relation outRel = new Relation();
+                outRel.schema.putAll(rel.schema);
+                LogicalVariable var = newVariable();
+                List<LogicalVariable> vars = new ArrayList<LogicalVariable>();
+                vars.add(var);
+
+                IFunctionInfo fInfo = lookupFunction(sfen.getFunctionTag(), sfen.getFunctionName());
+
+                List<Mutable<ILogicalExpression>> exprs = new ArrayList<Mutable<ILogicalExpression>>();
+                exprs.add(new MutableObject<ILogicalExpression>(new ScalarFunctionCallExpression(fInfo, argExprs)));
+                outRel.op = new AssignOperator(vars, exprs);
+                outRel.op.getInputs().add(new MutableObject<ILogicalOperator>(rel.op));
+                return new Pair<Relation, LogicalVariable>(outRel, var);
+            }
+        }
+        return null;
+    }
+
+    private IFunctionInfo lookupFunction(FunctionTag functionTag, String functionName) throws PigletException {
+        switch (functionTag) {
+            case EQ:
+                return metadataProvider.lookupFunction(AlgebricksBuiltinFunctions.EQ);
+
+            case NEQ:
+                return metadataProvider.lookupFunction(AlgebricksBuiltinFunctions.NEQ);
+
+            case LT:
+                return metadataProvider.lookupFunction(AlgebricksBuiltinFunctions.LT);
+
+            case LTE:
+                return metadataProvider.lookupFunction(AlgebricksBuiltinFunctions.LE);
+
+            case GT:
+                return metadataProvider.lookupFunction(AlgebricksBuiltinFunctions.GT);
+
+            case GTE:
+                return metadataProvider.lookupFunction(AlgebricksBuiltinFunctions.GE);
+        }
+        throw new PigletException("Unsupported function: " + functionTag);
+    }
+
+    private LogicalVariable newVariable() {
+        return new LogicalVariable(varCounter++);
+    }
+
+    private LogicalVariable findField(String fieldName, Map<String, LogicalVariable> schema) throws PigletException {
+        LogicalVariable var = schema.get(fieldName);
+        if (var == null) {
+            throw new PigletException("Unable to find field named: " + fieldName);
+        }
+        return var;
+    }
+
+    private Relation findInputRelation(String alias, Map<String, Relation> symMap) throws PigletException {
+        Relation rel = symMap.get(alias);
+        if (rel == null) {
+            throw new PigletException("Unknown alias " + alias + "referenced");
+        }
+        return rel;
+    }
+
+    private static class Relation {
+        String alias;
+        ILogicalOperator op;
+        final Map<String, LogicalVariable> schema;
+
+        public Relation() {
+            schema = new LinkedHashMap<String, LogicalVariable>();
+        }
+    }
+
+    private String getPrettyPrintedPlan(ILogicalPlan plan) throws AlgebricksException {
+        LogicalOperatorPrettyPrintVisitor v = new LogicalOperatorPrettyPrintVisitor();
+        StringBuilder buffer = new StringBuilder();
+        PlanPrettyPrinter.printPlan(plan, buffer, v, 0);
+        return buffer.toString();
+    }
+}
\ No newline at end of file
diff --git a/hyracks-algebricks/hyracks-algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/compiler/PigletPrinterFactoryProvider.java b/algebricks/algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/compiler/PigletPrinterFactoryProvider.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/compiler/PigletPrinterFactoryProvider.java
rename to algebricks/algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/compiler/PigletPrinterFactoryProvider.java
diff --git a/hyracks-algebricks/hyracks-algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/exceptions/PigletException.java b/algebricks/algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/exceptions/PigletException.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/exceptions/PigletException.java
rename to algebricks/algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/exceptions/PigletException.java
diff --git a/hyracks-algebricks/hyracks-algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/metadata/FileSplitUtils.java b/algebricks/algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/metadata/FileSplitUtils.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/metadata/FileSplitUtils.java
rename to algebricks/algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/metadata/FileSplitUtils.java
diff --git a/hyracks-algebricks/hyracks-algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/metadata/PigletFileDataSink.java b/algebricks/algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/metadata/PigletFileDataSink.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/metadata/PigletFileDataSink.java
rename to algebricks/algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/metadata/PigletFileDataSink.java
diff --git a/hyracks-algebricks/hyracks-algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/metadata/PigletFileDataSource.java b/algebricks/algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/metadata/PigletFileDataSource.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/metadata/PigletFileDataSource.java
rename to algebricks/algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/metadata/PigletFileDataSource.java
diff --git a/hyracks-algebricks/hyracks-algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/metadata/PigletFunction.java b/algebricks/algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/metadata/PigletFunction.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/metadata/PigletFunction.java
rename to algebricks/algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/metadata/PigletFunction.java
diff --git a/algebricks/algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/metadata/PigletMetadataProvider.java b/algebricks/algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/metadata/PigletMetadataProvider.java
new file mode 100644
index 0000000..d678803
--- /dev/null
+++ b/algebricks/algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/metadata/PigletMetadataProvider.java
@@ -0,0 +1,195 @@
+package edu.uci.ics.hyracks.algebricks.examples.piglet.metadata;
+
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.AlgebricksBuiltinFunctions;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.IFunctionInfo;
+import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IDataSink;
+import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IDataSource;
+import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IDataSourceIndex;
+import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IMetadataProvider;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.IOperatorSchema;
+import edu.uci.ics.hyracks.algebricks.core.jobgen.impl.JobGenContext;
+import edu.uci.ics.hyracks.algebricks.data.IPrinterFactory;
+import edu.uci.ics.hyracks.algebricks.examples.piglet.types.Type;
+import edu.uci.ics.hyracks.algebricks.runtime.base.IPushRuntimeFactory;
+import edu.uci.ics.hyracks.algebricks.runtime.operators.std.SinkWriterRuntimeFactory;
+import edu.uci.ics.hyracks.algebricks.runtime.writers.PrinterBasedWriterFactory;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.FloatSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.parsers.FloatParserFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.parsers.IValueParserFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.parsers.IntegerParserFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.parsers.UTF8StringParserFactory;
+import edu.uci.ics.hyracks.dataflow.std.file.ConstantFileSplitProvider;
+import edu.uci.ics.hyracks.dataflow.std.file.DelimitedDataTupleParserFactory;
+import edu.uci.ics.hyracks.dataflow.std.file.FileScanOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.dataflow.std.file.ITupleParserFactory;
+
+public class PigletMetadataProvider implements IMetadataProvider<String, String> {
+    private static final Map<FunctionIdentifier, PigletFunction> FN_MAP;
+
+    static {
+        Map<FunctionIdentifier, PigletFunction> map = new HashMap<FunctionIdentifier, PigletFunction>();
+
+        map.put(AlgebricksBuiltinFunctions.EQ, new PigletFunction(AlgebricksBuiltinFunctions.EQ));
+
+        FN_MAP = Collections.unmodifiableMap(map);
+    }
+
+    @Override
+    public IDataSource<String> findDataSource(String id) throws AlgebricksException {
+        return null;
+    }
+
+    @SuppressWarnings("unchecked")
+    @Override
+	public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getScannerRuntime(
+			IDataSource<String> dataSource,
+			List<LogicalVariable> scanVariables,
+			List<LogicalVariable> projectVariables, boolean projectPushed,
+			IOperatorSchema opSchema, IVariableTypeEnvironment typeEnv,
+			JobGenContext context, JobSpecification jobSpec)
+			throws AlgebricksException {
+        PigletFileDataSource ds = (PigletFileDataSource) dataSource;
+
+        FileSplit[] fileSplits = ds.getFileSplits();
+        String[] locations = new String[fileSplits.length];
+        for (int i = 0; i < fileSplits.length; ++i) {
+            locations[i] = fileSplits[i].getNodeName();
+        }
+        IFileSplitProvider fsp = new ConstantFileSplitProvider(fileSplits);
+
+        Object[] colTypes = ds.getSchemaTypes();
+        IValueParserFactory[] vpfs = new IValueParserFactory[colTypes.length];
+        ISerializerDeserializer[] serDesers = new ISerializerDeserializer[colTypes.length];
+
+        for (int i = 0; i < colTypes.length; ++i) {
+            Type colType = (Type) colTypes[i];
+            IValueParserFactory vpf;
+            ISerializerDeserializer serDeser;
+            switch (colType.getTag()) {
+                case INTEGER:
+                    vpf = IntegerParserFactory.INSTANCE;
+                    serDeser = IntegerSerializerDeserializer.INSTANCE;
+                    break;
+
+                case CHAR_ARRAY:
+                    vpf = UTF8StringParserFactory.INSTANCE;
+                    serDeser = UTF8StringSerializerDeserializer.INSTANCE;
+                    break;
+
+                case FLOAT:
+                    vpf = FloatParserFactory.INSTANCE;
+                    serDeser = FloatSerializerDeserializer.INSTANCE;
+                    break;
+
+                default:
+                    throw new UnsupportedOperationException();
+            }
+            vpfs[i] = vpf;
+            serDesers[i] = serDeser;
+        }
+
+        ITupleParserFactory tpf = new DelimitedDataTupleParserFactory(vpfs, ',');
+        RecordDescriptor rDesc = new RecordDescriptor(serDesers);
+
+        IOperatorDescriptor scanner = new FileScanOperatorDescriptor(jobSpec, fsp, tpf, rDesc);
+        AlgebricksAbsolutePartitionConstraint constraint = new AlgebricksAbsolutePartitionConstraint(locations);
+        return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(scanner, constraint);
+    }
+
+    @Override
+    public boolean scannerOperatorIsLeaf(IDataSource<String> dataSource) {
+        return true;
+    }
+
+    @Override
+    public Pair<IPushRuntimeFactory, AlgebricksPartitionConstraint> getWriteFileRuntime(IDataSink sink,
+            int[] printColumns, IPrinterFactory[] printerFactories, RecordDescriptor inputDesc)
+            throws AlgebricksException {
+        PigletFileDataSink ds = (PigletFileDataSink) sink;
+        FileSplit[] fileSplits = ds.getFileSplits();
+        String[] locations = new String[fileSplits.length];
+        for (int i = 0; i < fileSplits.length; ++i) {
+            locations[i] = fileSplits[i].getNodeName();
+        }
+        IPushRuntimeFactory prf = new SinkWriterRuntimeFactory(printColumns, printerFactories, fileSplits[0]
+                .getLocalFile().getFile(), PrinterBasedWriterFactory.INSTANCE, inputDesc);
+        AlgebricksAbsolutePartitionConstraint constraint = new AlgebricksAbsolutePartitionConstraint(locations);
+        return new Pair<IPushRuntimeFactory, AlgebricksPartitionConstraint>(prf, constraint);
+    }
+
+    @Override
+    public IDataSourceIndex<String, String> findDataSourceIndex(String indexId, String dataSourceId)
+            throws AlgebricksException {
+        return null;
+    }
+
+    @Override
+    public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getWriteResultRuntime(
+            IDataSource<String> dataSource, IOperatorSchema propagatedSchema, List<LogicalVariable> keys,
+            LogicalVariable payLoadVar, JobGenContext context, JobSpecification jobSpec) throws AlgebricksException {
+        // TODO Auto-generated method stub
+        return null;
+    }
+
+    @Override
+    public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getInsertRuntime(IDataSource<String> dataSource,
+            IOperatorSchema propagatedSchema, List<LogicalVariable> keys, LogicalVariable payLoadVar,
+            RecordDescriptor recordDesc, JobGenContext context, JobSpecification jobSpec) throws AlgebricksException {
+        // TODO Auto-generated method stub
+        return null;
+    }
+
+    @Override
+    public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getDeleteRuntime(IDataSource<String> dataSource,
+            IOperatorSchema propagatedSchema, List<LogicalVariable> keys, LogicalVariable payLoadVar,
+            RecordDescriptor recordDesc, JobGenContext context, JobSpecification jobSpec) throws AlgebricksException {
+        // TODO Auto-generated method stub
+        return null;
+    }
+
+    @Override
+    public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getIndexInsertRuntime(
+            IDataSourceIndex<String, String> dataSource, IOperatorSchema propagatedSchema,
+            IOperatorSchema[] inputSchemas, IVariableTypeEnvironment typeEnv, List<LogicalVariable> primaryKeys,
+            List<LogicalVariable> secondaryKeys, ILogicalExpression filterExpr, RecordDescriptor recordDesc,
+            JobGenContext context, JobSpecification spec) throws AlgebricksException {
+        // TODO Auto-generated method stub
+        return null;
+    }
+
+    @Override
+    public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getIndexDeleteRuntime(
+            IDataSourceIndex<String, String> dataSource, IOperatorSchema propagatedSchema,
+            IOperatorSchema[] inputSchemas, IVariableTypeEnvironment typeEnv, List<LogicalVariable> primaryKeys,
+            List<LogicalVariable> secondaryKeys, ILogicalExpression filterExpr, RecordDescriptor recordDesc,
+            JobGenContext context, JobSpecification spec) throws AlgebricksException {
+        // TODO Auto-generated method stub
+        return null;
+    }
+    
+    @Override
+    public IFunctionInfo lookupFunction(FunctionIdentifier fid) {
+        return FN_MAP.get(fid);
+    }
+}
diff --git a/hyracks-algebricks/hyracks-algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/rewriter/PigletRewriteRuleset.java b/algebricks/algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/rewriter/PigletRewriteRuleset.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/rewriter/PigletRewriteRuleset.java
rename to algebricks/algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/rewriter/PigletRewriteRuleset.java
diff --git a/hyracks-algebricks/hyracks-algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/runtime/PigletExpressionJobGen.java b/algebricks/algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/runtime/PigletExpressionJobGen.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/runtime/PigletExpressionJobGen.java
rename to algebricks/algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/runtime/PigletExpressionJobGen.java
diff --git a/hyracks-algebricks/hyracks-algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/runtime/functions/IPigletFunctionEvaluatorFactoryBuilder.java b/algebricks/algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/runtime/functions/IPigletFunctionEvaluatorFactoryBuilder.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/runtime/functions/IPigletFunctionEvaluatorFactoryBuilder.java
rename to algebricks/algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/runtime/functions/IPigletFunctionEvaluatorFactoryBuilder.java
diff --git a/hyracks-algebricks/hyracks-algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/runtime/functions/IntegerEqFunctionEvaluatorFactory.java b/algebricks/algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/runtime/functions/IntegerEqFunctionEvaluatorFactory.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/runtime/functions/IntegerEqFunctionEvaluatorFactory.java
rename to algebricks/algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/runtime/functions/IntegerEqFunctionEvaluatorFactory.java
diff --git a/hyracks-algebricks/hyracks-algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/runtime/functions/PigletFunctionRegistry.java b/algebricks/algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/runtime/functions/PigletFunctionRegistry.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/runtime/functions/PigletFunctionRegistry.java
rename to algebricks/algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/runtime/functions/PigletFunctionRegistry.java
diff --git a/hyracks-algebricks/hyracks-algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/types/BagType.java b/algebricks/algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/types/BagType.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/types/BagType.java
rename to algebricks/algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/types/BagType.java
diff --git a/hyracks-algebricks/hyracks-algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/types/CharArrayType.java b/algebricks/algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/types/CharArrayType.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/types/CharArrayType.java
rename to algebricks/algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/types/CharArrayType.java
diff --git a/hyracks-algebricks/hyracks-algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/types/DoubleType.java b/algebricks/algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/types/DoubleType.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/types/DoubleType.java
rename to algebricks/algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/types/DoubleType.java
diff --git a/hyracks-algebricks/hyracks-algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/types/FloatType.java b/algebricks/algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/types/FloatType.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/types/FloatType.java
rename to algebricks/algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/types/FloatType.java
diff --git a/hyracks-algebricks/hyracks-algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/types/IntegerType.java b/algebricks/algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/types/IntegerType.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/types/IntegerType.java
rename to algebricks/algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/types/IntegerType.java
diff --git a/hyracks-algebricks/hyracks-algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/types/LongType.java b/algebricks/algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/types/LongType.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/types/LongType.java
rename to algebricks/algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/types/LongType.java
diff --git a/hyracks-algebricks/hyracks-algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/types/MapType.java b/algebricks/algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/types/MapType.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/types/MapType.java
rename to algebricks/algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/types/MapType.java
diff --git a/hyracks-algebricks/hyracks-algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/types/Schema.java b/algebricks/algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/types/Schema.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/types/Schema.java
rename to algebricks/algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/types/Schema.java
diff --git a/hyracks-algebricks/hyracks-algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/types/TupleType.java b/algebricks/algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/types/TupleType.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/types/TupleType.java
rename to algebricks/algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/types/TupleType.java
diff --git a/hyracks-algebricks/hyracks-algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/types/Type.java b/algebricks/algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/types/Type.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/types/Type.java
rename to algebricks/algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/types/Type.java
diff --git a/hyracks-algebricks/hyracks-algebricks-examples/piglet-example/src/main/javacc/PigletParser.jj b/algebricks/algebricks-examples/piglet-example/src/main/javacc/PigletParser.jj
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-examples/piglet-example/src/main/javacc/PigletParser.jj
rename to algebricks/algebricks-examples/piglet-example/src/main/javacc/PigletParser.jj
diff --git a/hyracks-algebricks/hyracks-algebricks-examples/piglet-example/src/test/java/edu/uci/ics/algebricks/examples/piglet/test/PigletTest.java b/algebricks/algebricks-examples/piglet-example/src/test/java/edu/uci/ics/algebricks/examples/piglet/test/PigletTest.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-examples/piglet-example/src/test/java/edu/uci/ics/algebricks/examples/piglet/test/PigletTest.java
rename to algebricks/algebricks-examples/piglet-example/src/test/java/edu/uci/ics/algebricks/examples/piglet/test/PigletTest.java
diff --git a/hyracks-algebricks/hyracks-algebricks-examples/piglet-example/src/test/java/edu/uci/ics/algebricks/examples/piglet/test/PigletTestCase.java b/algebricks/algebricks-examples/piglet-example/src/test/java/edu/uci/ics/algebricks/examples/piglet/test/PigletTestCase.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-examples/piglet-example/src/test/java/edu/uci/ics/algebricks/examples/piglet/test/PigletTestCase.java
rename to algebricks/algebricks-examples/piglet-example/src/test/java/edu/uci/ics/algebricks/examples/piglet/test/PigletTestCase.java
diff --git a/hyracks-algebricks/hyracks-algebricks-examples/piglet-example/testcases/q1.piglet b/algebricks/algebricks-examples/piglet-example/testcases/q1.piglet
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-examples/piglet-example/testcases/q1.piglet
rename to algebricks/algebricks-examples/piglet-example/testcases/q1.piglet
diff --git a/hyracks-algebricks/hyracks-algebricks-examples/piglet-example/testcases/q2.piglet b/algebricks/algebricks-examples/piglet-example/testcases/q2.piglet
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-examples/piglet-example/testcases/q2.piglet
rename to algebricks/algebricks-examples/piglet-example/testcases/q2.piglet
diff --git a/algebricks/algebricks-examples/pom.xml b/algebricks/algebricks-examples/pom.xml
new file mode 100644
index 0000000..e538712
--- /dev/null
+++ b/algebricks/algebricks-examples/pom.xml
@@ -0,0 +1,16 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <artifactId>algebricks-examples</artifactId>
+  <packaging>pom</packaging>
+  <name>algebricks-examples</name>
+
+  <parent>
+    <groupId>edu.uci.ics.hyracks</groupId>
+    <artifactId>algebricks</artifactId>
+    <version>0.2.3-SNAPSHOT</version>
+  </parent>
+
+  <modules>
+    <module>piglet-example</module>
+  </modules>
+</project>
diff --git a/algebricks/algebricks-rewriter/pom.xml b/algebricks/algebricks-rewriter/pom.xml
new file mode 100644
index 0000000..448221d
--- /dev/null
+++ b/algebricks/algebricks-rewriter/pom.xml
@@ -0,0 +1,32 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <artifactId>algebricks-rewriter</artifactId>
+  <name>algebricks-rewriter</name>
+
+  <parent>
+    <groupId>edu.uci.ics.hyracks</groupId>
+    <artifactId>algebricks</artifactId>
+    <version>0.2.3-SNAPSHOT</version>
+  </parent>
+
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-compiler-plugin</artifactId>
+        <version>2.0.2</version>
+        <configuration>
+          <source>1.7</source>
+          <target>1.7</target>
+        </configuration>
+      </plugin>
+    </plugins>
+  </build>
+  <dependencies>
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>algebricks-core</artifactId>
+  		<version>0.2.3-SNAPSHOT</version>
+  	</dependency>
+  </dependencies>
+</project>
diff --git a/hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/AbstractDecorrelationRule.java b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/AbstractDecorrelationRule.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/AbstractDecorrelationRule.java
rename to algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/AbstractDecorrelationRule.java
diff --git a/hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/AbstractExtractExprRule.java b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/AbstractExtractExprRule.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/AbstractExtractExprRule.java
rename to algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/AbstractExtractExprRule.java
diff --git a/hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/AbstractIntroduceCombinerRule.java b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/AbstractIntroduceCombinerRule.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/AbstractIntroduceCombinerRule.java
rename to algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/AbstractIntroduceCombinerRule.java
diff --git a/hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/BreakSelectIntoConjunctsRule.java b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/BreakSelectIntoConjunctsRule.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/BreakSelectIntoConjunctsRule.java
rename to algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/BreakSelectIntoConjunctsRule.java
diff --git a/hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/ComplexJoinInferenceRule.java b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/ComplexJoinInferenceRule.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/ComplexJoinInferenceRule.java
rename to algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/ComplexJoinInferenceRule.java
diff --git a/hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/ComplexUnnestToProductRule.java b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/ComplexUnnestToProductRule.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/ComplexUnnestToProductRule.java
rename to algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/ComplexUnnestToProductRule.java
diff --git a/hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/ConsolidateAssignsRule.java b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/ConsolidateAssignsRule.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/ConsolidateAssignsRule.java
rename to algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/ConsolidateAssignsRule.java
diff --git a/hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/ConsolidateSelectsRule.java b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/ConsolidateSelectsRule.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/ConsolidateSelectsRule.java
rename to algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/ConsolidateSelectsRule.java
diff --git a/hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/EliminateSubplanRule.java b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/EliminateSubplanRule.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/EliminateSubplanRule.java
rename to algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/EliminateSubplanRule.java
diff --git a/hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/EnforceStructuralPropertiesRule.java b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/EnforceStructuralPropertiesRule.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/EnforceStructuralPropertiesRule.java
rename to algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/EnforceStructuralPropertiesRule.java
diff --git a/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/ExtractCommonExpressionsRule.java b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/ExtractCommonExpressionsRule.java
new file mode 100644
index 0000000..f017e0f
--- /dev/null
+++ b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/ExtractCommonExpressionsRule.java
@@ -0,0 +1,439 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.algebricks.rewriter.rules;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.commons.lang3.mutable.Mutable;
+import org.apache.commons.lang3.mutable.MutableObject;
+
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractLogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.AlgebricksBuiltinFunctions;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractBinaryJoinOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;
+import edu.uci.ics.hyracks.algebricks.core.algebra.visitors.ILogicalExpressionReferenceTransform;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
+
+/**
+ * Factors out common sub-expressions by assigning them to a variables, and replacing the common sub-expressions with references to those variables.
+ *
+ * Preconditions/Assumptions:
+ * Assumes no projects are in the plan. This rule ignores variable reference expressions and constants (other rules deal with those separately).
+ * 
+ * Postconditions/Examples:
+ * Plan with extracted sub-expressions. Generates one assign operator per extracted expression.
+ * 
+ * Example 1 - Simple Arithmetic Example (simplified)
+ * 
+ * Before plan:
+ * assign [$$1] <- [5 + 6 - 10]
+ *   assign [$$0] <- [5 + 6 + 30]
+ * 
+ * After plan:
+ * assign [$$1] <- [$$5 - 10]
+ *   assign [$$0] <- [$$5 + 30]
+ *     assign [$$5] <- [5 + 6]
+ * 
+ * Example 2 - Cleaning up 'Distinct By' (simplified)
+ * 
+ * Before plan: (notice how $$0 is not live after the distinct)
+ * assign [$$3] <- [field-access($$0, 1)]
+ *   distinct ([%0->$$5])
+ *     assign [$$5] <- [field-access($$0, 1)]
+ *       unnest $$0 <- [scan-dataset]
+ * 
+ * After plan: (notice how the issue of $$0 is fixed)
+ * assign [$$3] <- [$$5]
+ *   distinct ([$$5])
+ *     assign [$$5] <- [field-access($$0, 1)]
+ *       unnest $$0 <- [scan-dataset]
+ * 
+ * Example 3 - Pulling Common Expressions Above Joins (simplified)
+ * 
+ * Before plan:
+ * assign [$$9] <- funcZ(funcY($$8))
+ *   join (funcX(funcY($$8)))
+ * 
+ * After plan:
+ * assign [$$9] <- funcZ($$10))
+ *   select (funcX($$10))
+ *     assign [$$10] <- [funcY($$8)]
+ *       join (TRUE)
+ */
+public class ExtractCommonExpressionsRule implements IAlgebraicRewriteRule {
+
+    private final List<ILogicalExpression> originalAssignExprs = new ArrayList<ILogicalExpression>();
+    
+    private final CommonExpressionSubstitutionVisitor substVisitor = new CommonExpressionSubstitutionVisitor();
+    private final Map<ILogicalExpression, ExprEquivalenceClass> exprEqClassMap = new HashMap<ILogicalExpression, ExprEquivalenceClass>();
+    
+    // Set of operators for which common subexpression elimination should not be performed.
+    private static final Set<LogicalOperatorTag> ignoreOps = new HashSet<LogicalOperatorTag>();
+    static {
+        ignoreOps.add(LogicalOperatorTag.UNNEST);
+        ignoreOps.add(LogicalOperatorTag.UNNEST_MAP);
+        ignoreOps.add(LogicalOperatorTag.ORDER);
+        ignoreOps.add(LogicalOperatorTag.PROJECT);
+        ignoreOps.add(LogicalOperatorTag.AGGREGATE);
+        ignoreOps.add(LogicalOperatorTag.RUNNINGAGGREGATE);
+    }
+    
+    @Override
+    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+        return false;
+    }
+
+    @Override
+    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+        exprEqClassMap.clear();
+        substVisitor.setContext(context);
+        boolean modified = removeCommonExpressions(opRef, context);
+        if (modified) {
+            context.computeAndSetTypeEnvironmentForOperator(opRef.getValue());
+        }
+        return modified;
+    }
+
+    private void updateEquivalenceClassMap(LogicalVariable lhs, Mutable<ILogicalExpression> rhsExprRef, ILogicalExpression rhsExpr, ILogicalOperator op) {
+        ExprEquivalenceClass exprEqClass = exprEqClassMap.get(rhsExpr);
+        if (exprEqClass == null) {
+            exprEqClass = new ExprEquivalenceClass(op, rhsExprRef);
+            exprEqClassMap.put(rhsExpr, exprEqClass);
+        }
+        exprEqClass.setVariable(lhs);
+    }
+
+    private boolean removeCommonExpressions(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+            throws AlgebricksException {
+        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
+        if (context.checkIfInDontApplySet(this, opRef.getValue())) {
+            return false;
+        }
+        
+        boolean modified = false;
+        // Recurse into children.
+        for (Mutable<ILogicalOperator> inputOpRef : op.getInputs()) {
+            if (removeCommonExpressions(inputOpRef, context)) {
+                modified = true;
+            }
+        }
+        
+        // TODO: Deal with replicate properly. Currently, we just clear the expr equivalence map, since we want to avoid incorrect expression replacement
+        // (the resulting new variables should be assigned live below a replicate).
+        if (op.getOperatorTag() == LogicalOperatorTag.REPLICATE) {
+            exprEqClassMap.clear();
+            return modified;
+        }
+        // Exclude these operators.
+        if (ignoreOps.contains(op.getOperatorTag())) {
+            return modified;
+        }
+        
+        // Remember a copy of the original assign expressions, so we can add them to the equivalence class map
+        // after replacing expressions within the assign operator itself.
+        if (op.getOperatorTag() == LogicalOperatorTag.ASSIGN) {
+            AssignOperator assignOp = (AssignOperator) op;
+            originalAssignExprs.clear();
+            int numVars = assignOp.getVariables().size();
+            for (int i = 0; i < numVars; i++) {
+                Mutable<ILogicalExpression> exprRef = assignOp.getExpressions().get(i);
+                ILogicalExpression expr = exprRef.getValue();
+                originalAssignExprs.add(expr.cloneExpression());
+            }
+        }
+        
+        // Perform common subexpression elimination.
+        substVisitor.setOperator(op);
+        if (op.acceptExpressionTransform(substVisitor)) {
+            modified = true;
+        }
+        
+        // Update equivalence class map.
+        if (op.getOperatorTag() == LogicalOperatorTag.ASSIGN) {
+            AssignOperator assignOp = (AssignOperator) op;
+            int numVars = assignOp.getVariables().size();
+            for (int i = 0; i < numVars; i++) {
+                Mutable<ILogicalExpression> exprRef = assignOp.getExpressions().get(i);
+                ILogicalExpression expr = exprRef.getValue();
+                if (expr.getExpressionTag() == LogicalExpressionTag.VARIABLE
+                        || expr.getExpressionTag() == LogicalExpressionTag.CONSTANT) {
+                    continue;
+                }
+                // Update equivalence class map.
+                LogicalVariable lhs = assignOp.getVariables().get(i);
+                updateEquivalenceClassMap(lhs, exprRef, exprRef.getValue(), op);
+                
+                // Update equivalence class map with original assign expression.
+                updateEquivalenceClassMap(lhs, exprRef, originalAssignExprs.get(i), op);
+            }
+        }
+
+        // TODO: For now do not perform replacement in nested plans
+        // due to the complication of figuring out whether the firstOp in an equivalence class is within a subplan, 
+        // and the resulting variable will not be visible to the outside.
+        // Since subplans should be eliminated in most cases, this behavior is acceptable for now.
+        /*
+        if (op.hasNestedPlans()) {
+            AbstractOperatorWithNestedPlans opWithNestedPlan = (AbstractOperatorWithNestedPlans) op;
+            for (ILogicalPlan nestedPlan : opWithNestedPlan.getNestedPlans()) {
+                for (Mutable<ILogicalOperator> rootRef : nestedPlan.getRoots()) {
+                    if (removeCommonExpressions(rootRef, context)) {
+                        modified = true;
+                    }
+                }
+            }
+        }
+        */
+
+        if (modified) {
+            context.computeAndSetTypeEnvironmentForOperator(op);
+            context.addToDontApplySet(this, op);
+        }
+        return modified;
+    }
+
+    private class CommonExpressionSubstitutionVisitor implements ILogicalExpressionReferenceTransform {
+                
+        private final Set<LogicalVariable> liveVars = new HashSet<LogicalVariable>();
+        private final List<LogicalVariable> usedVars = new ArrayList<LogicalVariable>();
+        private IOptimizationContext context;
+        private ILogicalOperator op;        
+        
+        public void setContext(IOptimizationContext context) {
+            this.context = context;
+        }
+        
+        public void setOperator(ILogicalOperator op) throws AlgebricksException {
+            this.op = op;
+            liveVars.clear();
+            usedVars.clear();
+        }
+        
+        @Override
+        public boolean transform(Mutable<ILogicalExpression> exprRef) throws AlgebricksException {
+            if (liveVars.isEmpty() && usedVars.isEmpty()) {
+                VariableUtilities.getLiveVariables(op, liveVars);
+                VariableUtilities.getUsedVariables(op, usedVars);
+            }
+            
+            AbstractLogicalExpression expr = (AbstractLogicalExpression) exprRef.getValue();
+            boolean modified = false;
+            ExprEquivalenceClass exprEqClass = exprEqClassMap.get(expr);
+            if (exprEqClass != null) {
+                // Replace common subexpression with existing variable. 
+                if (exprEqClass.variableIsSet()) {
+                    // Check if the replacing variable is live at this op.
+                    // However, if the op is already using variables that are not live, then a replacement may enable fixing the plan.
+                    // This behavior is necessary to, e.g., properly deal with distinct by.
+                    // Also just replace the expr if we are replacing common exprs from within the same operator.
+                    if (liveVars.contains(exprEqClass.getVariable()) || !liveVars.containsAll(usedVars)
+                            || op == exprEqClass.getFirstOperator()) {
+                        exprRef.setValue(new VariableReferenceExpression(exprEqClass.getVariable()));
+                        // Do not descend into children since this expr has been completely replaced.
+                        return true;
+                    }
+                } else {
+                    if (assignCommonExpression(exprEqClass, expr)) {
+                        exprRef.setValue(new VariableReferenceExpression(exprEqClass.getVariable()));
+                        // Do not descend into children since this expr has been completely replaced.
+                        return true;
+                    }
+                }
+            } else {
+                if (expr.getExpressionTag() != LogicalExpressionTag.VARIABLE
+                        && expr.getExpressionTag() != LogicalExpressionTag.CONSTANT) {
+                    exprEqClass = new ExprEquivalenceClass(op, exprRef);
+                    exprEqClassMap.put(expr, exprEqClass);
+                }
+            }
+            
+            // Descend into function arguments.
+            if (expr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
+                AbstractFunctionCallExpression funcExpr = (AbstractFunctionCallExpression) expr;
+                for (Mutable<ILogicalExpression> arg : funcExpr.getArguments()) {
+                    if (transform(arg)) {
+                        modified = true;
+                    }
+                }
+            }
+            return modified;
+        }
+        
+        private boolean assignCommonExpression(ExprEquivalenceClass exprEqClass, ILogicalExpression expr) throws AlgebricksException {
+            AbstractLogicalOperator firstOp = (AbstractLogicalOperator) exprEqClass.getFirstOperator();
+            Mutable<ILogicalExpression> firstExprRef = exprEqClass.getFirstExpression();
+            if (firstOp.getOperatorTag() == LogicalOperatorTag.INNERJOIN || firstOp.getOperatorTag() == LogicalOperatorTag.LEFTOUTERJOIN) {
+                // Do not extract common expressions from within the same join operator.
+                if (firstOp == op) {
+                    return false;
+                }
+                AbstractBinaryJoinOperator joinOp = (AbstractBinaryJoinOperator) firstOp;
+                Mutable<ILogicalExpression> joinCond = joinOp.getCondition();                
+                ILogicalExpression enclosingExpr = getEnclosingExpression(joinCond, firstExprRef.getValue());
+                if (enclosingExpr == null) {
+                    // No viable enclosing expression that we can pull out from the join.
+                    return false;
+                }
+                // Place a Select operator beneath op that contains the enclosing expression.
+                SelectOperator selectOp = new SelectOperator(new MutableObject<ILogicalExpression>(enclosingExpr));
+                selectOp.getInputs().add(new MutableObject<ILogicalOperator>(op.getInputs().get(0).getValue()));
+                op.getInputs().get(0).setValue(selectOp);
+                // Set firstOp to be the select below op, since we want to assign the common subexpr there.
+                firstOp = (AbstractLogicalOperator) selectOp;
+            } else if (firstOp.getInputs().size() > 1) { 
+                // Bail for any non-join operator with multiple inputs.
+                return false;
+            }                        
+            LogicalVariable newVar = context.newVar();
+            AssignOperator newAssign = new AssignOperator(newVar, new MutableObject<ILogicalExpression>(firstExprRef.getValue().cloneExpression()));            
+            // Place assign below firstOp.
+            newAssign.getInputs().add(new MutableObject<ILogicalOperator>(firstOp.getInputs().get(0).getValue()));
+            newAssign.setExecutionMode(firstOp.getExecutionMode());
+            firstOp.getInputs().get(0).setValue(newAssign);
+            // Replace original expr with variable reference, and set var in expression equivalence class.
+            firstExprRef.setValue(new VariableReferenceExpression(newVar));
+            exprEqClass.setVariable(newVar);
+            context.computeAndSetTypeEnvironmentForOperator(newAssign);
+            context.computeAndSetTypeEnvironmentForOperator(firstOp);
+            return true;
+        }
+
+        private ILogicalExpression getEnclosingExpression(Mutable<ILogicalExpression> conditionExprRef, ILogicalExpression commonSubExpr) {
+            ILogicalExpression conditionExpr = conditionExprRef.getValue();
+            if (conditionExpr.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
+                return null;
+            }
+            if (isEqJoinCondition(commonSubExpr)) {
+                // Do not eliminate the common expression if we could use it for an equi-join.
+                return null;
+            }
+            AbstractFunctionCallExpression conditionFuncExpr = (AbstractFunctionCallExpression) conditionExpr;
+            // Boolean expression that encloses the common subexpression.
+            ILogicalExpression enclosingBoolExpr = null;
+            // We are not dealing with arbitrarily nested and/or expressions here.
+            FunctionIdentifier funcIdent = conditionFuncExpr.getFunctionIdentifier();
+            if (funcIdent.equals(AlgebricksBuiltinFunctions.AND) || funcIdent.equals(AlgebricksBuiltinFunctions.OR)) {
+                Iterator<Mutable<ILogicalExpression>> argIter = conditionFuncExpr.getArguments().iterator();
+                while (argIter.hasNext()) {
+                    Mutable<ILogicalExpression> argRef = argIter.next();
+                    if (containsExpr(argRef.getValue(), commonSubExpr)) {
+                        enclosingBoolExpr = argRef.getValue();
+                        // Remove the enclosing expression from the argument list.
+                        // We are going to pull it out into a new select operator.
+                        argIter.remove();
+                        break;
+                    }
+                }
+                // If and/or only has a single argument left, pull it out and remove the and/or function.
+                if (conditionFuncExpr.getArguments().size() == 1) {
+                    conditionExprRef.setValue(conditionFuncExpr.getArguments().get(0).getValue());
+                }
+            } else {
+                if (!containsExpr(conditionExprRef.getValue(), commonSubExpr)) {
+                    return null;
+                }
+                enclosingBoolExpr = conditionFuncExpr;
+                // Replace the enclosing expression with TRUE.
+                conditionExprRef.setValue(ConstantExpression.TRUE);
+            }
+            return enclosingBoolExpr;
+        }
+    }
+    
+    private boolean containsExpr(ILogicalExpression expr, ILogicalExpression searchExpr) {
+        if (expr == searchExpr) {
+            return true;
+        }
+        if (expr.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
+            return false;
+        }
+        AbstractFunctionCallExpression funcExpr = (AbstractFunctionCallExpression) expr;
+        for (Mutable<ILogicalExpression> argRef : funcExpr.getArguments()) {
+            if (containsExpr(argRef.getValue(), searchExpr)) {
+                return true;
+            }
+        }
+        return false;
+    }
+    
+    private boolean isEqJoinCondition(ILogicalExpression expr) {
+        AbstractFunctionCallExpression funcExpr = (AbstractFunctionCallExpression) expr;
+        if (funcExpr.getFunctionIdentifier().equals(AlgebricksBuiltinFunctions.EQ)) {
+            ILogicalExpression arg1 = funcExpr.getArguments().get(0).getValue();
+            ILogicalExpression arg2 = funcExpr.getArguments().get(1).getValue();
+            if (arg1.getExpressionTag() == LogicalExpressionTag.VARIABLE
+                    && arg2.getExpressionTag() == LogicalExpressionTag.VARIABLE) {
+                return true;
+            }
+        }
+        return false;
+    }
+    
+    private final class ExprEquivalenceClass {
+        // First operator in which expression is used.
+        private final ILogicalOperator firstOp;
+        
+        // Reference to expression in first op.
+        private final Mutable<ILogicalExpression> firstExprRef;
+        
+        // Variable that this expression has been assigned to.
+        private LogicalVariable var;
+        
+        public ExprEquivalenceClass(ILogicalOperator firstOp, Mutable<ILogicalExpression> firstExprRef) {
+            this.firstOp = firstOp;
+            this.firstExprRef = firstExprRef;
+        }
+        
+        public ILogicalOperator getFirstOperator() {
+            return firstOp;
+        }
+        
+        public Mutable<ILogicalExpression> getFirstExpression() {
+            return firstExprRef;
+        }
+        
+        public void setVariable(LogicalVariable var) {
+            this.var = var;
+        }
+        
+        public LogicalVariable getVariable() {
+            return var;
+        }
+        
+        public boolean variableIsSet() {
+            return var != null;
+        }
+    }
+}
diff --git a/hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/ExtractCommonOperatorsRule.java b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/ExtractCommonOperatorsRule.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/ExtractCommonOperatorsRule.java
rename to algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/ExtractCommonOperatorsRule.java
diff --git a/hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/ExtractGbyExpressionsRule.java b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/ExtractGbyExpressionsRule.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/ExtractGbyExpressionsRule.java
rename to algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/ExtractGbyExpressionsRule.java
diff --git a/hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/FactorRedundantGroupAndDecorVarsRule.java b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/FactorRedundantGroupAndDecorVarsRule.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/FactorRedundantGroupAndDecorVarsRule.java
rename to algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/FactorRedundantGroupAndDecorVarsRule.java
diff --git a/hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/InferTypesRule.java b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/InferTypesRule.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/InferTypesRule.java
rename to algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/InferTypesRule.java
diff --git a/hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/InlineAssignIntoAggregateRule.java b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/InlineAssignIntoAggregateRule.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/InlineAssignIntoAggregateRule.java
rename to algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/InlineAssignIntoAggregateRule.java
diff --git a/hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/InlineSingleReferenceVariablesRule.java b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/InlineSingleReferenceVariablesRule.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/InlineSingleReferenceVariablesRule.java
rename to algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/InlineSingleReferenceVariablesRule.java
diff --git a/hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/InlineVariablesRule.java b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/InlineVariablesRule.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/InlineVariablesRule.java
rename to algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/InlineVariablesRule.java
diff --git a/hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/InsertOuterJoinRule.java b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/InsertOuterJoinRule.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/InsertOuterJoinRule.java
rename to algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/InsertOuterJoinRule.java
diff --git a/hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/InsertProjectBeforeUnionRule.java b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/InsertProjectBeforeUnionRule.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/InsertProjectBeforeUnionRule.java
rename to algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/InsertProjectBeforeUnionRule.java
diff --git a/hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/IntroHashPartitionMergeExchange.java b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/IntroHashPartitionMergeExchange.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/IntroHashPartitionMergeExchange.java
rename to algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/IntroHashPartitionMergeExchange.java
diff --git a/hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/IntroJoinInsideSubplanRule.java b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/IntroJoinInsideSubplanRule.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/IntroJoinInsideSubplanRule.java
rename to algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/IntroJoinInsideSubplanRule.java
diff --git a/hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/IntroduceAggregateCombinerRule.java b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/IntroduceAggregateCombinerRule.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/IntroduceAggregateCombinerRule.java
rename to algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/IntroduceAggregateCombinerRule.java
diff --git a/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/IntroduceGroupByCombinerRule.java b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/IntroduceGroupByCombinerRule.java
new file mode 100644
index 0000000..a8864fe
--- /dev/null
+++ b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/IntroduceGroupByCombinerRule.java
@@ -0,0 +1,218 @@
+package edu.uci.ics.hyracks.algebricks.rewriter.rules;
+
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.commons.lang3.mutable.Mutable;
+import org.apache.commons.lang3.mutable.MutableObject;
+
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalPlan;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.OperatorAnnotations;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator.ExecutionMode;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractOperatorWithNestedPlans;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AggregateOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.GroupByOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;
+import edu.uci.ics.hyracks.algebricks.core.algebra.plan.ALogicalPlanImpl;
+import edu.uci.ics.hyracks.algebricks.core.algebra.util.OperatorPropertiesUtil;
+
+public class IntroduceGroupByCombinerRule extends AbstractIntroduceCombinerRule {
+
+    @Override
+    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+            throws AlgebricksException {
+        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
+        if (context.checkIfInDontApplySet(this, op)) {
+            return false;
+        }
+        if (op.getOperatorTag() != LogicalOperatorTag.GROUP) {
+            return false;
+        }
+        GroupByOperator gbyOp = (GroupByOperator) op;
+        if (gbyOp.getExecutionMode() != ExecutionMode.PARTITIONED) {
+            return false;
+        }
+
+        BookkeepingInfo bi = new BookkeepingInfo();
+        GroupByOperator newGbyOp = opToPush(gbyOp, bi, context);
+        if (newGbyOp == null) {
+            return false;
+        }
+
+        replaceOriginalAggFuncs(bi.toReplaceMap);
+
+        for (Pair<LogicalVariable, Mutable<ILogicalExpression>> p : gbyOp.getDecorList()) {
+            LogicalVariable newDecorVar = context.newVar();
+            newGbyOp.addDecorExpression(newDecorVar, p.second.getValue());
+            p.second.setValue(new VariableReferenceExpression(newDecorVar));
+        }
+        newGbyOp.setExecutionMode(ExecutionMode.LOCAL);
+        Object v = gbyOp.getAnnotations().get(OperatorAnnotations.USE_HASH_GROUP_BY);
+        newGbyOp.getAnnotations().put(OperatorAnnotations.USE_HASH_GROUP_BY, v);
+
+        Object v2 = gbyOp.getAnnotations().get(OperatorAnnotations.USE_EXTERNAL_GROUP_BY);
+        newGbyOp.getAnnotations().put(OperatorAnnotations.USE_EXTERNAL_GROUP_BY, v2);
+
+        List<LogicalVariable> propagatedVars = new LinkedList<LogicalVariable>();
+        VariableUtilities.getProducedVariables(newGbyOp, propagatedVars);
+
+        Set<LogicalVariable> freeVars = new HashSet<LogicalVariable>();
+        OperatorPropertiesUtil.getFreeVariablesInSubplans(gbyOp, freeVars);
+
+        for (LogicalVariable var : freeVars) {
+            if (!propagatedVars.contains(var)) {
+                LogicalVariable newDecorVar = context.newVar();
+                newGbyOp.addDecorExpression(newDecorVar, new VariableReferenceExpression(var));
+                VariableUtilities.substituteVariables(gbyOp.getNestedPlans().get(0).getRoots().get(0).getValue(), var,
+                        newDecorVar, context);
+            }
+        }
+
+        Mutable<ILogicalOperator> opRef3 = gbyOp.getInputs().get(0);
+        opRef3.setValue(newGbyOp);
+        typeGby(newGbyOp, context);
+        typeGby(gbyOp, context);
+    	context.addToDontApplySet(this, op);
+        return true;
+    }
+
+    private void typeGby(AbstractOperatorWithNestedPlans op, IOptimizationContext context) throws AlgebricksException {
+        for (ILogicalPlan p : op.getNestedPlans()) {
+            OperatorPropertiesUtil.typePlan(p, context);
+        }
+        context.computeAndSetTypeEnvironmentForOperator(op);
+    }
+
+    private GroupByOperator opToPush(GroupByOperator gbyOp, BookkeepingInfo bi, IOptimizationContext context)
+            throws AlgebricksException {
+        // Hook up input to new group-by.
+        Mutable<ILogicalOperator> opRef3 = gbyOp.getInputs().get(0);
+        ILogicalOperator op3 = opRef3.getValue();
+        GroupByOperator newGbyOp = new GroupByOperator();
+        newGbyOp.getInputs().add(new MutableObject<ILogicalOperator>(op3));
+        // Copy annotations.        
+        Map<String, Object> annotations = newGbyOp.getAnnotations();
+        annotations.putAll(gbyOp.getAnnotations());
+
+        List<LogicalVariable> gbyVars = gbyOp.getGbyVarList();
+        for (ILogicalPlan p : gbyOp.getNestedPlans()) {
+            Pair<Boolean, ILogicalPlan> bip = tryToPushSubplan(p, gbyOp, newGbyOp, bi, gbyVars, context);
+            if (!bip.first) {
+                // For now, if we cannot push everything, give up.
+                return null;
+            }
+            ILogicalPlan pushedSubplan = bip.second;
+            if (pushedSubplan != null) {
+                newGbyOp.getNestedPlans().add(pushedSubplan);
+            }
+        }
+
+        ArrayList<LogicalVariable> newOpGbyList = new ArrayList<LogicalVariable>();
+        ArrayList<LogicalVariable> replGbyList = new ArrayList<LogicalVariable>();
+        // Find maximal sequence of variable.
+        for (Map.Entry<GroupByOperator, List<LogicalVariable>> e : bi.modifyGbyMap.entrySet()) {
+            List<LogicalVariable> varList = e.getValue();
+            boolean see1 = true;
+            int sz1 = newOpGbyList.size();
+            int i = 0;
+            for (LogicalVariable v : varList) {
+                if (see1) {
+                    if (i < sz1) {
+                        LogicalVariable v2 = newOpGbyList.get(i);
+                        if (v != v2) {
+                            // cannot linearize
+                            return null;
+                        }
+                    } else {
+                        see1 = false;
+                        newOpGbyList.add(v);
+                        replGbyList.add(context.newVar());
+                    }
+                    i++;
+                } else {
+                    newOpGbyList.add(v);
+                    replGbyList.add(context.newVar());
+                }
+            }
+        }
+        // set the vars in the new op
+        int n = newOpGbyList.size();
+        for (int i = 0; i < n; i++) {
+            newGbyOp.addGbyExpression(replGbyList.get(i), new VariableReferenceExpression(newOpGbyList.get(i)));
+            VariableUtilities.substituteVariables(gbyOp, newOpGbyList.get(i), replGbyList.get(i), false, context);
+        }
+        return newGbyOp;
+    }
+
+    private Pair<Boolean, ILogicalPlan> tryToPushSubplan(ILogicalPlan nestedPlan, GroupByOperator oldGbyOp,
+            GroupByOperator newGbyOp, BookkeepingInfo bi, List<LogicalVariable> gbyVars, IOptimizationContext context)
+            throws AlgebricksException {
+        List<Mutable<ILogicalOperator>> pushedRoots = new ArrayList<Mutable<ILogicalOperator>>();
+        for (Mutable<ILogicalOperator> r : nestedPlan.getRoots()) {
+            if (!tryToPushRoot(r, oldGbyOp, newGbyOp, bi, gbyVars, context, pushedRoots)) {
+                // For now, if we cannot push everything, give up.
+                return new Pair<Boolean, ILogicalPlan>(false, null);
+            }
+        }
+        if (pushedRoots.isEmpty()) {
+            return new Pair<Boolean, ILogicalPlan>(true, null);
+        } else {
+            return new Pair<Boolean, ILogicalPlan>(true, new ALogicalPlanImpl(pushedRoots));
+        }
+    }
+
+    private boolean tryToPushRoot(Mutable<ILogicalOperator> root, GroupByOperator oldGbyOp, GroupByOperator newGbyOp,
+            BookkeepingInfo bi, List<LogicalVariable> gbyVars, IOptimizationContext context,
+            List<Mutable<ILogicalOperator>> toPushAccumulate) throws AlgebricksException {
+        AbstractLogicalOperator op1 = (AbstractLogicalOperator) root.getValue();
+        if (op1.getOperatorTag() != LogicalOperatorTag.AGGREGATE) {
+            return false;
+        }
+        AbstractLogicalOperator op2 = (AbstractLogicalOperator) op1.getInputs().get(0).getValue();
+        if (op2.getOperatorTag() == LogicalOperatorTag.NESTEDTUPLESOURCE) {
+            AggregateOperator initAgg = (AggregateOperator) op1;
+            Pair<Boolean, Mutable<ILogicalOperator>> pOpRef = tryToPushAgg(initAgg, newGbyOp, bi.toReplaceMap, context);
+            if (!pOpRef.first) {
+                return false;
+            }
+            Mutable<ILogicalOperator> opRef = pOpRef.second;
+            if (opRef != null) {
+                toPushAccumulate.add(opRef);
+            }
+            bi.modifyGbyMap.put(oldGbyOp, gbyVars);
+            return true;
+        } else {
+            while (op2.getOperatorTag() != LogicalOperatorTag.GROUP && op2.getInputs().size() == 1) {
+                op2 = (AbstractLogicalOperator) op2.getInputs().get(0).getValue();
+            }
+            if (op2.getOperatorTag() != LogicalOperatorTag.GROUP) {
+                return false;
+            }
+            GroupByOperator nestedGby = (GroupByOperator) op2;
+            List<LogicalVariable> gbyVars2 = nestedGby.getGbyVarList();
+            List<LogicalVariable> concatGbyVars = new ArrayList<LogicalVariable>(gbyVars);
+            concatGbyVars.addAll(gbyVars2);
+            for (ILogicalPlan p : nestedGby.getNestedPlans()) {
+                for (Mutable<ILogicalOperator> r2 : p.getRoots()) {
+                    if (!tryToPushRoot(r2, nestedGby, newGbyOp, bi, concatGbyVars, context, toPushAccumulate)) {
+                        return false;
+                    }
+                }
+            }
+            return true;
+        }
+    }
+}
diff --git a/hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/IntroduceGroupByForSubplanRule.java b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/IntroduceGroupByForSubplanRule.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/IntroduceGroupByForSubplanRule.java
rename to algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/IntroduceGroupByForSubplanRule.java
diff --git a/hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/IntroduceProjectsRule.java b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/IntroduceProjectsRule.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/IntroduceProjectsRule.java
rename to algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/IntroduceProjectsRule.java
diff --git a/hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/IsolateHyracksOperatorsRule.java b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/IsolateHyracksOperatorsRule.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/IsolateHyracksOperatorsRule.java
rename to algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/IsolateHyracksOperatorsRule.java
diff --git a/hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/PullSelectOutOfEqJoin.java b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/PullSelectOutOfEqJoin.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/PullSelectOutOfEqJoin.java
rename to algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/PullSelectOutOfEqJoin.java
diff --git a/hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/PushAssignBelowUnionAllRule.java b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/PushAssignBelowUnionAllRule.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/PushAssignBelowUnionAllRule.java
rename to algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/PushAssignBelowUnionAllRule.java
diff --git a/hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/PushAssignDownThroughProductRule.java b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/PushAssignDownThroughProductRule.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/PushAssignDownThroughProductRule.java
rename to algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/PushAssignDownThroughProductRule.java
diff --git a/hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/PushDieUpRule.java b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/PushDieUpRule.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/PushDieUpRule.java
rename to algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/PushDieUpRule.java
diff --git a/hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/PushFunctionsBelowJoin.java b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/PushFunctionsBelowJoin.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/PushFunctionsBelowJoin.java
rename to algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/PushFunctionsBelowJoin.java
diff --git a/hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/PushLimitDownRule.java b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/PushLimitDownRule.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/PushLimitDownRule.java
rename to algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/PushLimitDownRule.java
diff --git a/hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/PushNestedOrderByUnderPreSortedGroupByRule.java b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/PushNestedOrderByUnderPreSortedGroupByRule.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/PushNestedOrderByUnderPreSortedGroupByRule.java
rename to algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/PushNestedOrderByUnderPreSortedGroupByRule.java
diff --git a/hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/PushProjectDownRule.java b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/PushProjectDownRule.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/PushProjectDownRule.java
rename to algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/PushProjectDownRule.java
diff --git a/hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/PushProjectIntoDataSourceScanRule.java b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/PushProjectIntoDataSourceScanRule.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/PushProjectIntoDataSourceScanRule.java
rename to algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/PushProjectIntoDataSourceScanRule.java
diff --git a/hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/PushSelectDownRule.java b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/PushSelectDownRule.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/PushSelectDownRule.java
rename to algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/PushSelectDownRule.java
diff --git a/hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/PushSelectIntoJoinRule.java b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/PushSelectIntoJoinRule.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/PushSelectIntoJoinRule.java
rename to algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/PushSelectIntoJoinRule.java
diff --git a/hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/PushSubplanWithAggregateDownThroughProductRule.java b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/PushSubplanWithAggregateDownThroughProductRule.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/PushSubplanWithAggregateDownThroughProductRule.java
rename to algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/PushSubplanWithAggregateDownThroughProductRule.java
diff --git a/hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/ReinferAllTypesRule.java b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/ReinferAllTypesRule.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/ReinferAllTypesRule.java
rename to algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/ReinferAllTypesRule.java
diff --git a/hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/RemoveRedundantGroupByDecorVars.java b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/RemoveRedundantGroupByDecorVars.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/RemoveRedundantGroupByDecorVars.java
rename to algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/RemoveRedundantGroupByDecorVars.java
diff --git a/hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/RemoveRedundantProjectionRule.java b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/RemoveRedundantProjectionRule.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/RemoveRedundantProjectionRule.java
rename to algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/RemoveRedundantProjectionRule.java
diff --git a/hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/RemoveRedundantVariablesRule.java b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/RemoveRedundantVariablesRule.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/RemoveRedundantVariablesRule.java
rename to algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/RemoveRedundantVariablesRule.java
diff --git a/hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/RemoveUnusedAssignAndAggregateRule.java b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/RemoveUnusedAssignAndAggregateRule.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/RemoveUnusedAssignAndAggregateRule.java
rename to algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/RemoveUnusedAssignAndAggregateRule.java
diff --git a/hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/SetAlgebricksPhysicalOperatorsRule.java b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/SetAlgebricksPhysicalOperatorsRule.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/SetAlgebricksPhysicalOperatorsRule.java
rename to algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/SetAlgebricksPhysicalOperatorsRule.java
diff --git a/hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/SetExecutionModeRule.java b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/SetExecutionModeRule.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/SetExecutionModeRule.java
rename to algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/SetExecutionModeRule.java
diff --git a/hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/SimpleUnnestToProductRule.java b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/SimpleUnnestToProductRule.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/SimpleUnnestToProductRule.java
rename to algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/SimpleUnnestToProductRule.java
diff --git a/hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/SubplanOutOfGroupRule.java b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/SubplanOutOfGroupRule.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/SubplanOutOfGroupRule.java
rename to algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/SubplanOutOfGroupRule.java
diff --git a/hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/util/JoinUtils.java b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/util/JoinUtils.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/util/JoinUtils.java
rename to algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/util/JoinUtils.java
diff --git a/hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/util/PhysicalOptimizationsUtil.java b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/util/PhysicalOptimizationsUtil.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/util/PhysicalOptimizationsUtil.java
rename to algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/util/PhysicalOptimizationsUtil.java
diff --git a/algebricks/algebricks-runtime/pom.xml b/algebricks/algebricks-runtime/pom.xml
new file mode 100644
index 0000000..70243d2
--- /dev/null
+++ b/algebricks/algebricks-runtime/pom.xml
@@ -0,0 +1,52 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <artifactId>algebricks-runtime</artifactId>
+  <name>algebricks-runtime</name>
+
+  <parent>
+    <groupId>edu.uci.ics.hyracks</groupId>
+    <artifactId>algebricks</artifactId>
+    <version>0.2.3-SNAPSHOT</version>
+  </parent>
+
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-compiler-plugin</artifactId>
+        <version>2.0.2</version>
+        <configuration>
+          <source>1.7</source>
+          <target>1.7</target>
+        </configuration>
+      </plugin>
+    </plugins>
+  </build>
+  <dependencies>
+  <dependency>
+  	<groupId>edu.uci.ics.hyracks</groupId>
+  	<artifactId>hyracks-storage-am-btree</artifactId>
+  	<version>0.2.3-SNAPSHOT</version>
+  </dependency>
+  <dependency>
+  	<groupId>edu.uci.ics.hyracks</groupId>
+  	<artifactId>hyracks-storage-am-rtree</artifactId>
+  	<version>0.2.3-SNAPSHOT</version>
+  </dependency>
+  <dependency>
+  	<groupId>edu.uci.ics.hyracks</groupId>
+  	<artifactId>hyracks-dataflow-std</artifactId>
+  	<version>0.2.3-SNAPSHOT</version>
+  </dependency>
+  <dependency>
+  	<groupId>edu.uci.ics.hyracks</groupId>
+  	<artifactId>algebricks-common</artifactId>
+  	<version>0.2.3-SNAPSHOT</version>
+  </dependency>
+  <dependency>
+  	<groupId>edu.uci.ics.hyracks</groupId>
+  	<artifactId>algebricks-data</artifactId>
+  	<version>0.2.3-SNAPSHOT</version>
+  </dependency>
+  </dependencies>
+</project>
diff --git a/hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/aggregators/TupleCountAggregateFunctionFactory.java b/algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/aggregators/TupleCountAggregateFunctionFactory.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/aggregators/TupleCountAggregateFunctionFactory.java
rename to algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/aggregators/TupleCountAggregateFunctionFactory.java
diff --git a/hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/aggregators/TupleCountRunningAggregateFunctionFactory.java b/algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/aggregators/TupleCountRunningAggregateFunctionFactory.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/aggregators/TupleCountRunningAggregateFunctionFactory.java
rename to algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/aggregators/TupleCountRunningAggregateFunctionFactory.java
diff --git a/hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/base/AlgebricksPipeline.java b/algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/base/AlgebricksPipeline.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/base/AlgebricksPipeline.java
rename to algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/base/AlgebricksPipeline.java
diff --git a/hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/base/IAggregateEvaluator.java b/algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/base/IAggregateEvaluator.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/base/IAggregateEvaluator.java
rename to algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/base/IAggregateEvaluator.java
diff --git a/hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/base/IAggregateEvaluatorFactory.java b/algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/base/IAggregateEvaluatorFactory.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/base/IAggregateEvaluatorFactory.java
rename to algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/base/IAggregateEvaluatorFactory.java
diff --git a/hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/base/ICopyAggregateFunction.java b/algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/base/ICopyAggregateFunction.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/base/ICopyAggregateFunction.java
rename to algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/base/ICopyAggregateFunction.java
diff --git a/hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/base/ICopyAggregateFunctionFactory.java b/algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/base/ICopyAggregateFunctionFactory.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/base/ICopyAggregateFunctionFactory.java
rename to algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/base/ICopyAggregateFunctionFactory.java
diff --git a/hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/base/ICopyEvaluator.java b/algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/base/ICopyEvaluator.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/base/ICopyEvaluator.java
rename to algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/base/ICopyEvaluator.java
diff --git a/hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/base/ICopyEvaluatorFactory.java b/algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/base/ICopyEvaluatorFactory.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/base/ICopyEvaluatorFactory.java
rename to algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/base/ICopyEvaluatorFactory.java
diff --git a/hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/base/ICopyRunningAggregateFunction.java b/algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/base/ICopyRunningAggregateFunction.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/base/ICopyRunningAggregateFunction.java
rename to algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/base/ICopyRunningAggregateFunction.java
diff --git a/hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/base/ICopyRunningAggregateFunctionFactory.java b/algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/base/ICopyRunningAggregateFunctionFactory.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/base/ICopyRunningAggregateFunctionFactory.java
rename to algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/base/ICopyRunningAggregateFunctionFactory.java
diff --git a/hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/base/ICopySerializableAggregateFunction.java b/algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/base/ICopySerializableAggregateFunction.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/base/ICopySerializableAggregateFunction.java
rename to algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/base/ICopySerializableAggregateFunction.java
diff --git a/hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/base/ICopySerializableAggregateFunctionFactory.java b/algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/base/ICopySerializableAggregateFunctionFactory.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/base/ICopySerializableAggregateFunctionFactory.java
rename to algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/base/ICopySerializableAggregateFunctionFactory.java
diff --git a/hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/base/ICopyUnnestingFunction.java b/algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/base/ICopyUnnestingFunction.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/base/ICopyUnnestingFunction.java
rename to algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/base/ICopyUnnestingFunction.java
diff --git a/hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/base/ICopyUnnestingFunctionFactory.java b/algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/base/ICopyUnnestingFunctionFactory.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/base/ICopyUnnestingFunctionFactory.java
rename to algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/base/ICopyUnnestingFunctionFactory.java
diff --git a/hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/base/IPushRuntime.java b/algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/base/IPushRuntime.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/base/IPushRuntime.java
rename to algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/base/IPushRuntime.java
diff --git a/hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/base/IPushRuntimeFactory.java b/algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/base/IPushRuntimeFactory.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/base/IPushRuntimeFactory.java
rename to algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/base/IPushRuntimeFactory.java
diff --git a/hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/base/IRunningAggregateEvaluator.java b/algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/base/IRunningAggregateEvaluator.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/base/IRunningAggregateEvaluator.java
rename to algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/base/IRunningAggregateEvaluator.java
diff --git a/hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/base/IRunningAggregateEvaluatorFactory.java b/algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/base/IRunningAggregateEvaluatorFactory.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/base/IRunningAggregateEvaluatorFactory.java
rename to algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/base/IRunningAggregateEvaluatorFactory.java
diff --git a/hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/base/IScalarEvaluator.java b/algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/base/IScalarEvaluator.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/base/IScalarEvaluator.java
rename to algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/base/IScalarEvaluator.java
diff --git a/hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/base/IScalarEvaluatorFactory.java b/algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/base/IScalarEvaluatorFactory.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/base/IScalarEvaluatorFactory.java
rename to algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/base/IScalarEvaluatorFactory.java
diff --git a/hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/base/IUnnestingEvaluator.java b/algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/base/IUnnestingEvaluator.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/base/IUnnestingEvaluator.java
rename to algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/base/IUnnestingEvaluator.java
diff --git a/hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/base/IUnnestingEvaluatorFactory.java b/algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/base/IUnnestingEvaluatorFactory.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/base/IUnnestingEvaluatorFactory.java
rename to algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/base/IUnnestingEvaluatorFactory.java
diff --git a/hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/context/AsterixBTreeRegistry.java b/algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/context/AsterixBTreeRegistry.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/context/AsterixBTreeRegistry.java
rename to algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/context/AsterixBTreeRegistry.java
diff --git a/hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/evaluators/ColumnAccessEvalFactory.java b/algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/evaluators/ColumnAccessEvalFactory.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/evaluators/ColumnAccessEvalFactory.java
rename to algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/evaluators/ColumnAccessEvalFactory.java
diff --git a/hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/evaluators/ConstantEvalFactory.java b/algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/evaluators/ConstantEvalFactory.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/evaluators/ConstantEvalFactory.java
rename to algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/evaluators/ConstantEvalFactory.java
diff --git a/hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/evaluators/ConstantEvaluatorFactory.java b/algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/evaluators/ConstantEvaluatorFactory.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/evaluators/ConstantEvaluatorFactory.java
rename to algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/evaluators/ConstantEvaluatorFactory.java
diff --git a/hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/evaluators/TupleFieldEvaluatorFactory.java b/algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/evaluators/TupleFieldEvaluatorFactory.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/evaluators/TupleFieldEvaluatorFactory.java
rename to algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/evaluators/TupleFieldEvaluatorFactory.java
diff --git a/hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/aggreg/AggregateRuntimeFactory.java b/algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/aggreg/AggregateRuntimeFactory.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/aggreg/AggregateRuntimeFactory.java
rename to algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/aggreg/AggregateRuntimeFactory.java
diff --git a/hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/aggreg/NestedPlansAccumulatingAggregatorFactory.java b/algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/aggreg/NestedPlansAccumulatingAggregatorFactory.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/aggreg/NestedPlansAccumulatingAggregatorFactory.java
rename to algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/aggreg/NestedPlansAccumulatingAggregatorFactory.java
diff --git a/hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/aggreg/SerializableAggregatorDescriptorFactory.java b/algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/aggreg/SerializableAggregatorDescriptorFactory.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/aggreg/SerializableAggregatorDescriptorFactory.java
rename to algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/aggreg/SerializableAggregatorDescriptorFactory.java
diff --git a/hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/aggreg/SimpleAlgebricksAccumulatingAggregatorFactory.java b/algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/aggreg/SimpleAlgebricksAccumulatingAggregatorFactory.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/aggreg/SimpleAlgebricksAccumulatingAggregatorFactory.java
rename to algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/aggreg/SimpleAlgebricksAccumulatingAggregatorFactory.java
diff --git a/hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/base/AbstractOneInputOneOutputOneFramePushRuntime.java b/algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/base/AbstractOneInputOneOutputOneFramePushRuntime.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/base/AbstractOneInputOneOutputOneFramePushRuntime.java
rename to algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/base/AbstractOneInputOneOutputOneFramePushRuntime.java
diff --git a/hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/base/AbstractOneInputOneOutputPushRuntime.java b/algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/base/AbstractOneInputOneOutputPushRuntime.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/base/AbstractOneInputOneOutputPushRuntime.java
rename to algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/base/AbstractOneInputOneOutputPushRuntime.java
diff --git a/hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/base/AbstractOneInputOneOutputRuntimeFactory.java b/algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/base/AbstractOneInputOneOutputRuntimeFactory.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/base/AbstractOneInputOneOutputRuntimeFactory.java
rename to algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/base/AbstractOneInputOneOutputRuntimeFactory.java
diff --git a/hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/base/AbstractOneInputPushRuntime.java b/algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/base/AbstractOneInputPushRuntime.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/base/AbstractOneInputPushRuntime.java
rename to algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/base/AbstractOneInputPushRuntime.java
diff --git a/hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/base/AbstractOneInputSinkPushRuntime.java b/algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/base/AbstractOneInputSinkPushRuntime.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/base/AbstractOneInputSinkPushRuntime.java
rename to algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/base/AbstractOneInputSinkPushRuntime.java
diff --git a/hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/base/AbstractOneInputSourcePushRuntime.java b/algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/base/AbstractOneInputSourcePushRuntime.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/base/AbstractOneInputSourcePushRuntime.java
rename to algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/base/AbstractOneInputSourcePushRuntime.java
diff --git a/hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/base/SinkRuntimeFactory.java b/algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/base/SinkRuntimeFactory.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/base/SinkRuntimeFactory.java
rename to algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/base/SinkRuntimeFactory.java
diff --git a/hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/group/MicroPreClusteredGroupRuntimeFactory.java b/algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/group/MicroPreClusteredGroupRuntimeFactory.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/group/MicroPreClusteredGroupRuntimeFactory.java
rename to algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/group/MicroPreClusteredGroupRuntimeFactory.java
diff --git a/hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/meta/AlgebricksMetaOperatorDescriptor.java b/algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/meta/AlgebricksMetaOperatorDescriptor.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/meta/AlgebricksMetaOperatorDescriptor.java
rename to algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/meta/AlgebricksMetaOperatorDescriptor.java
diff --git a/hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/meta/PipelineAssembler.java b/algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/meta/PipelineAssembler.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/meta/PipelineAssembler.java
rename to algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/meta/PipelineAssembler.java
diff --git a/hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/meta/SubplanRuntimeFactory.java b/algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/meta/SubplanRuntimeFactory.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/meta/SubplanRuntimeFactory.java
rename to algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/meta/SubplanRuntimeFactory.java
diff --git a/hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/sort/InMemorySortRuntimeFactory.java b/algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/sort/InMemorySortRuntimeFactory.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/sort/InMemorySortRuntimeFactory.java
rename to algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/sort/InMemorySortRuntimeFactory.java
diff --git a/hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/std/AssignRuntimeFactory.java b/algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/std/AssignRuntimeFactory.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/std/AssignRuntimeFactory.java
rename to algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/std/AssignRuntimeFactory.java
diff --git a/hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/std/EmptyTupleSourceRuntimeFactory.java b/algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/std/EmptyTupleSourceRuntimeFactory.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/std/EmptyTupleSourceRuntimeFactory.java
rename to algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/std/EmptyTupleSourceRuntimeFactory.java
diff --git a/hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/std/NestedTupleSourceRuntimeFactory.java b/algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/std/NestedTupleSourceRuntimeFactory.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/std/NestedTupleSourceRuntimeFactory.java
rename to algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/std/NestedTupleSourceRuntimeFactory.java
diff --git a/hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/std/PartitioningSplitOperatorDescriptor.java b/algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/std/PartitioningSplitOperatorDescriptor.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/std/PartitioningSplitOperatorDescriptor.java
rename to algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/std/PartitioningSplitOperatorDescriptor.java
diff --git a/hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/std/PrinterRuntimeFactory.java b/algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/std/PrinterRuntimeFactory.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/std/PrinterRuntimeFactory.java
rename to algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/std/PrinterRuntimeFactory.java
diff --git a/hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/std/RunningAggregateRuntimeFactory.java b/algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/std/RunningAggregateRuntimeFactory.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/std/RunningAggregateRuntimeFactory.java
rename to algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/std/RunningAggregateRuntimeFactory.java
diff --git a/hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/std/SinkWriterRuntime.java b/algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/std/SinkWriterRuntime.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/std/SinkWriterRuntime.java
rename to algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/std/SinkWriterRuntime.java
diff --git a/hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/std/SinkWriterRuntimeFactory.java b/algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/std/SinkWriterRuntimeFactory.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/std/SinkWriterRuntimeFactory.java
rename to algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/std/SinkWriterRuntimeFactory.java
diff --git a/hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/std/StreamDieRuntimeFactory.java b/algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/std/StreamDieRuntimeFactory.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/std/StreamDieRuntimeFactory.java
rename to algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/std/StreamDieRuntimeFactory.java
diff --git a/hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/std/StreamLimitRuntimeFactory.java b/algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/std/StreamLimitRuntimeFactory.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/std/StreamLimitRuntimeFactory.java
rename to algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/std/StreamLimitRuntimeFactory.java
diff --git a/hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/std/StreamProjectRuntimeFactory.java b/algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/std/StreamProjectRuntimeFactory.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/std/StreamProjectRuntimeFactory.java
rename to algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/std/StreamProjectRuntimeFactory.java
diff --git a/hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/std/StreamSelectRuntimeFactory.java b/algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/std/StreamSelectRuntimeFactory.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/std/StreamSelectRuntimeFactory.java
rename to algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/std/StreamSelectRuntimeFactory.java
diff --git a/hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/std/StringStreamingRuntimeFactory.java b/algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/std/StringStreamingRuntimeFactory.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/std/StringStreamingRuntimeFactory.java
rename to algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/std/StringStreamingRuntimeFactory.java
diff --git a/hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/std/UnnestRuntimeFactory.java b/algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/std/UnnestRuntimeFactory.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/std/UnnestRuntimeFactory.java
rename to algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/std/UnnestRuntimeFactory.java
diff --git a/hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/writers/PrinterBasedWriterFactory.java b/algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/writers/PrinterBasedWriterFactory.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/writers/PrinterBasedWriterFactory.java
rename to algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/writers/PrinterBasedWriterFactory.java
diff --git a/hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/writers/SerializedDataWriterFactory.java b/algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/writers/SerializedDataWriterFactory.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/writers/SerializedDataWriterFactory.java
rename to algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/writers/SerializedDataWriterFactory.java
diff --git a/hyracks-algebricks/hyracks-algebricks-tests/build-script.xml b/algebricks/algebricks-tests/build-script.xml
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-tests/build-script.xml
rename to algebricks/algebricks-tests/build-script.xml
diff --git a/hyracks-algebricks/hyracks-algebricks-tests/data/simple/int-part1.tbl b/algebricks/algebricks-tests/data/simple/int-part1.tbl
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-tests/data/simple/int-part1.tbl
rename to algebricks/algebricks-tests/data/simple/int-part1.tbl
diff --git a/hyracks-algebricks/hyracks-algebricks-tests/data/simple/int-part2.tbl b/algebricks/algebricks-tests/data/simple/int-part2.tbl
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-tests/data/simple/int-part2.tbl
rename to algebricks/algebricks-tests/data/simple/int-part2.tbl
diff --git a/hyracks-algebricks/hyracks-algebricks-tests/data/tpch0.001/customer-part1.tbl b/algebricks/algebricks-tests/data/tpch0.001/customer-part1.tbl
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-tests/data/tpch0.001/customer-part1.tbl
rename to algebricks/algebricks-tests/data/tpch0.001/customer-part1.tbl
diff --git a/hyracks-algebricks/hyracks-algebricks-tests/data/tpch0.001/customer-part2.tbl b/algebricks/algebricks-tests/data/tpch0.001/customer-part2.tbl
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-tests/data/tpch0.001/customer-part2.tbl
rename to algebricks/algebricks-tests/data/tpch0.001/customer-part2.tbl
diff --git a/hyracks-algebricks/hyracks-algebricks-tests/data/tpch0.001/customer.tbl b/algebricks/algebricks-tests/data/tpch0.001/customer.tbl
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-tests/data/tpch0.001/customer.tbl
rename to algebricks/algebricks-tests/data/tpch0.001/customer.tbl
diff --git a/hyracks-algebricks/hyracks-algebricks-tests/data/tpch0.001/lineitem.tbl b/algebricks/algebricks-tests/data/tpch0.001/lineitem.tbl
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-tests/data/tpch0.001/lineitem.tbl
rename to algebricks/algebricks-tests/data/tpch0.001/lineitem.tbl
diff --git a/hyracks-algebricks/hyracks-algebricks-tests/data/tpch0.001/nation.tbl b/algebricks/algebricks-tests/data/tpch0.001/nation.tbl
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-tests/data/tpch0.001/nation.tbl
rename to algebricks/algebricks-tests/data/tpch0.001/nation.tbl
diff --git a/hyracks-algebricks/hyracks-algebricks-tests/data/tpch0.001/orders-part1.tbl b/algebricks/algebricks-tests/data/tpch0.001/orders-part1.tbl
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-tests/data/tpch0.001/orders-part1.tbl
rename to algebricks/algebricks-tests/data/tpch0.001/orders-part1.tbl
diff --git a/hyracks-algebricks/hyracks-algebricks-tests/data/tpch0.001/orders-part2.tbl b/algebricks/algebricks-tests/data/tpch0.001/orders-part2.tbl
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-tests/data/tpch0.001/orders-part2.tbl
rename to algebricks/algebricks-tests/data/tpch0.001/orders-part2.tbl
diff --git a/hyracks-algebricks/hyracks-algebricks-tests/data/tpch0.001/orders.tbl b/algebricks/algebricks-tests/data/tpch0.001/orders.tbl
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-tests/data/tpch0.001/orders.tbl
rename to algebricks/algebricks-tests/data/tpch0.001/orders.tbl
diff --git a/hyracks-algebricks/hyracks-algebricks-tests/data/tpch0.001/part.tbl b/algebricks/algebricks-tests/data/tpch0.001/part.tbl
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-tests/data/tpch0.001/part.tbl
rename to algebricks/algebricks-tests/data/tpch0.001/part.tbl
diff --git a/hyracks-algebricks/hyracks-algebricks-tests/data/tpch0.001/partsupp.tbl b/algebricks/algebricks-tests/data/tpch0.001/partsupp.tbl
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-tests/data/tpch0.001/partsupp.tbl
rename to algebricks/algebricks-tests/data/tpch0.001/partsupp.tbl
diff --git a/hyracks-algebricks/hyracks-algebricks-tests/data/tpch0.001/region.tbl b/algebricks/algebricks-tests/data/tpch0.001/region.tbl
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-tests/data/tpch0.001/region.tbl
rename to algebricks/algebricks-tests/data/tpch0.001/region.tbl
diff --git a/hyracks-algebricks/hyracks-algebricks-tests/data/tpch0.001/supplier.tbl b/algebricks/algebricks-tests/data/tpch0.001/supplier.tbl
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-tests/data/tpch0.001/supplier.tbl
rename to algebricks/algebricks-tests/data/tpch0.001/supplier.tbl
diff --git a/hyracks-algebricks/hyracks-algebricks-tests/data/tpch0.001/tpch.ddl b/algebricks/algebricks-tests/data/tpch0.001/tpch.ddl
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-tests/data/tpch0.001/tpch.ddl
rename to algebricks/algebricks-tests/data/tpch0.001/tpch.ddl
diff --git a/algebricks/algebricks-tests/pom.xml b/algebricks/algebricks-tests/pom.xml
new file mode 100644
index 0000000..228baa9
--- /dev/null
+++ b/algebricks/algebricks-tests/pom.xml
@@ -0,0 +1,110 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <artifactId>algebricks-tests</artifactId>
+  <name>algebricks-tests</name>
+
+  <parent>
+    <groupId>edu.uci.ics.hyracks</groupId>
+    <artifactId>algebricks</artifactId>
+    <version>0.2.3-SNAPSHOT</version>
+  </parent>
+
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-compiler-plugin</artifactId>
+        <version>2.0.2</version>
+        <configuration>
+          <source>1.7</source>
+          <target>1.7</target>
+        </configuration>
+      </plugin>
+      <plugin>
+        <artifactId>maven-antrun-plugin</artifactId>
+        <executions>
+          <execution>
+            <phase>generate-sources</phase>
+            <configuration>
+              <tasks>
+                <ant antfile="build-script.xml" target="build">
+                  <property name="main.class" value="edu.uci.ics.hyracks.algebricks.tests.script.IdentityStreamingScript" />
+                  <property name="script.classpath" refid="maven.compile.classpath" />
+                  <property name="jvm.params" value="" />
+                  <property name="program.params" value="" />
+                  <property name="source" value="${basedir}/src/main/scripts/run" />
+                  <property name="target.dir" value="${basedir}/target/testscripts" />
+                  <property name="target" value="idscript" />
+                </ant>
+              </tasks>
+            </configuration>
+            <goals>
+              <goal>run</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+    </plugins>
+    <pluginManagement>
+    	<plugins>
+    		<!--This plugin's configuration is used to store Eclipse m2e settings only. It has no influence on the Maven build itself.-->
+    		<plugin>
+    			<groupId>org.eclipse.m2e</groupId>
+    			<artifactId>lifecycle-mapping</artifactId>
+    			<version>1.0.0</version>
+    			<configuration>
+    				<lifecycleMappingMetadata>
+    					<pluginExecutions>
+    						<pluginExecution>
+    							<pluginExecutionFilter>
+    								<groupId>
+    									org.apache.maven.plugins
+    								</groupId>
+    								<artifactId>
+    									maven-antrun-plugin
+    								</artifactId>
+    								<versionRange>[1.3,)</versionRange>
+    								<goals>
+    									<goal>run</goal>
+    								</goals>
+    							</pluginExecutionFilter>
+    							<action>
+    								<ignore />
+    							</action>
+    						</pluginExecution>
+    					</pluginExecutions>
+    				</lifecycleMappingMetadata>
+    			</configuration>
+    		</plugin>
+    	</plugins>
+    </pluginManagement>
+  </build>
+  <dependencies>
+  <dependency>
+  	<groupId>edu.uci.ics.hyracks</groupId>
+  	<artifactId>algebricks-compiler</artifactId>
+  	<version>0.2.3-SNAPSHOT</version>
+  </dependency>
+  <dependency>
+  	<groupId>junit</groupId>
+  	<artifactId>junit</artifactId>
+  	<version>4.8.1</version>
+  	<scope>test</scope>
+  </dependency>
+  <dependency>
+  	<groupId>edu.uci.ics.hyracks</groupId>
+  	<artifactId>hyracks-control-cc</artifactId>
+  	<version>0.2.3-SNAPSHOT</version>
+  </dependency>
+  <dependency>
+  	<groupId>edu.uci.ics.hyracks</groupId>
+  	<artifactId>hyracks-control-nc</artifactId>
+  	<version>0.2.3-SNAPSHOT</version>
+  </dependency>
+  <dependency>
+  	<groupId>edu.uci.ics.hyracks</groupId>
+  	<artifactId>hyracks-data-std</artifactId>
+  	<version>0.2.3-SNAPSHOT</version>
+  </dependency>
+  </dependencies>
+</project>
diff --git a/hyracks-algebricks/hyracks-algebricks-tests/src/main/java/edu/uci/ics/hyracks/algebricks/tests/pushruntime/IntArrayUnnester.java b/algebricks/algebricks-tests/src/main/java/edu/uci/ics/hyracks/algebricks/tests/pushruntime/IntArrayUnnester.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-tests/src/main/java/edu/uci/ics/hyracks/algebricks/tests/pushruntime/IntArrayUnnester.java
rename to algebricks/algebricks-tests/src/main/java/edu/uci/ics/hyracks/algebricks/tests/pushruntime/IntArrayUnnester.java
diff --git a/hyracks-algebricks/hyracks-algebricks-tests/src/main/java/edu/uci/ics/hyracks/algebricks/tests/pushruntime/IntegerAddEvalFactory.java b/algebricks/algebricks-tests/src/main/java/edu/uci/ics/hyracks/algebricks/tests/pushruntime/IntegerAddEvalFactory.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-tests/src/main/java/edu/uci/ics/hyracks/algebricks/tests/pushruntime/IntegerAddEvalFactory.java
rename to algebricks/algebricks-tests/src/main/java/edu/uci/ics/hyracks/algebricks/tests/pushruntime/IntegerAddEvalFactory.java
diff --git a/hyracks-algebricks/hyracks-algebricks-tests/src/main/java/edu/uci/ics/hyracks/algebricks/tests/pushruntime/IntegerConstantEvalFactory.java b/algebricks/algebricks-tests/src/main/java/edu/uci/ics/hyracks/algebricks/tests/pushruntime/IntegerConstantEvalFactory.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-tests/src/main/java/edu/uci/ics/hyracks/algebricks/tests/pushruntime/IntegerConstantEvalFactory.java
rename to algebricks/algebricks-tests/src/main/java/edu/uci/ics/hyracks/algebricks/tests/pushruntime/IntegerConstantEvalFactory.java
diff --git a/hyracks-algebricks/hyracks-algebricks-tests/src/main/java/edu/uci/ics/hyracks/algebricks/tests/pushruntime/IntegerEqualsEvalFactory.java b/algebricks/algebricks-tests/src/main/java/edu/uci/ics/hyracks/algebricks/tests/pushruntime/IntegerEqualsEvalFactory.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-tests/src/main/java/edu/uci/ics/hyracks/algebricks/tests/pushruntime/IntegerEqualsEvalFactory.java
rename to algebricks/algebricks-tests/src/main/java/edu/uci/ics/hyracks/algebricks/tests/pushruntime/IntegerEqualsEvalFactory.java
diff --git a/hyracks-algebricks/hyracks-algebricks-tests/src/main/java/edu/uci/ics/hyracks/algebricks/tests/pushruntime/IntegerGreaterThanEvalFactory.java b/algebricks/algebricks-tests/src/main/java/edu/uci/ics/hyracks/algebricks/tests/pushruntime/IntegerGreaterThanEvalFactory.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-tests/src/main/java/edu/uci/ics/hyracks/algebricks/tests/pushruntime/IntegerGreaterThanEvalFactory.java
rename to algebricks/algebricks-tests/src/main/java/edu/uci/ics/hyracks/algebricks/tests/pushruntime/IntegerGreaterThanEvalFactory.java
diff --git a/hyracks-algebricks/hyracks-algebricks-tests/src/main/java/edu/uci/ics/hyracks/algebricks/tests/script/IdentityStreamingScript.java b/algebricks/algebricks-tests/src/main/java/edu/uci/ics/hyracks/algebricks/tests/script/IdentityStreamingScript.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-tests/src/main/java/edu/uci/ics/hyracks/algebricks/tests/script/IdentityStreamingScript.java
rename to algebricks/algebricks-tests/src/main/java/edu/uci/ics/hyracks/algebricks/tests/script/IdentityStreamingScript.java
diff --git a/hyracks-algebricks/hyracks-algebricks-tests/src/main/scripts/run.cmd b/algebricks/algebricks-tests/src/main/scripts/run.cmd
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-tests/src/main/scripts/run.cmd
rename to algebricks/algebricks-tests/src/main/scripts/run.cmd
diff --git a/hyracks-algebricks/hyracks-algebricks-tests/src/main/scripts/run.sh b/algebricks/algebricks-tests/src/main/scripts/run.sh
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-tests/src/main/scripts/run.sh
rename to algebricks/algebricks-tests/src/main/scripts/run.sh
diff --git a/hyracks-algebricks/hyracks-algebricks-tests/src/test/java/edu/uci/ics/hyracks/algebricks/tests/pushruntime/PushRuntimeTest.java b/algebricks/algebricks-tests/src/test/java/edu/uci/ics/hyracks/algebricks/tests/pushruntime/PushRuntimeTest.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-tests/src/test/java/edu/uci/ics/hyracks/algebricks/tests/pushruntime/PushRuntimeTest.java
rename to algebricks/algebricks-tests/src/test/java/edu/uci/ics/hyracks/algebricks/tests/pushruntime/PushRuntimeTest.java
diff --git a/hyracks-algebricks/hyracks-algebricks-tests/src/test/java/edu/uci/ics/hyracks/algebricks/tests/tools/WriteValueTest.java b/algebricks/algebricks-tests/src/test/java/edu/uci/ics/hyracks/algebricks/tests/tools/WriteValueTest.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-tests/src/test/java/edu/uci/ics/hyracks/algebricks/tests/tools/WriteValueTest.java
rename to algebricks/algebricks-tests/src/test/java/edu/uci/ics/hyracks/algebricks/tests/tools/WriteValueTest.java
diff --git a/hyracks-algebricks/hyracks-algebricks-tests/src/test/java/edu/uci/ics/hyracks/algebricks/tests/util/AlgebricksHyracksIntegrationUtil.java b/algebricks/algebricks-tests/src/test/java/edu/uci/ics/hyracks/algebricks/tests/util/AlgebricksHyracksIntegrationUtil.java
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-tests/src/test/java/edu/uci/ics/hyracks/algebricks/tests/util/AlgebricksHyracksIntegrationUtil.java
rename to algebricks/algebricks-tests/src/test/java/edu/uci/ics/hyracks/algebricks/tests/util/AlgebricksHyracksIntegrationUtil.java
diff --git a/hyracks-algebricks/hyracks-algebricks-tests/src/test/resources/results/scanMicroSortWrite.out b/algebricks/algebricks-tests/src/test/resources/results/scanMicroSortWrite.out
similarity index 100%
rename from hyracks-algebricks/hyracks-algebricks-tests/src/test/resources/results/scanMicroSortWrite.out
rename to algebricks/algebricks-tests/src/test/resources/results/scanMicroSortWrite.out
diff --git a/algebricks/pom.xml b/algebricks/pom.xml
new file mode 100644
index 0000000..ebb8332
--- /dev/null
+++ b/algebricks/pom.xml
@@ -0,0 +1,30 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>edu.uci.ics.hyracks</groupId>
+  <artifactId>algebricks</artifactId>
+  <version>0.2.3-SNAPSHOT</version>
+  <packaging>pom</packaging>
+  <name>algebricks</name>
+
+  <distributionManagement>
+    <repository>
+      <id>hyracks-releases</id>
+      <url>http://obelix.ics.uci.edu/nexus/content/repositories/hyracks-releases/</url>
+    </repository>
+    <snapshotRepository>
+      <id>hyracks-snapshots</id>
+      <url>http://obelix.ics.uci.edu/nexus/content/repositories/hyracks-snapshots/</url>
+    </snapshotRepository>
+  </distributionManagement>
+
+  <modules>
+    <module>algebricks-compiler</module>
+    <module>algebricks-common</module>
+    <module>algebricks-data</module>
+    <module>algebricks-core</module>
+    <module>algebricks-runtime</module>
+    <module>algebricks-rewriter</module>
+    <module>algebricks-tests</module>
+    <module>algebricks-examples</module>
+  </modules>
+</project>
diff --git a/hivesterix/HyracksCodeFormatProfile.xml b/hivesterix/HyracksCodeFormatProfile.xml
new file mode 100755
index 0000000..2cde66d
--- /dev/null
+++ b/hivesterix/HyracksCodeFormatProfile.xml
@@ -0,0 +1,279 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<profiles version="11">
+<profile kind="CodeFormatterProfile" name="HyracksCodeFormatProfile" version="11">
+<setting id="org.eclipse.jdt.core.formatter.comment.insert_new_line_before_root_tags" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.disabling_tag" value="@formatter:off"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_annotation" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_type_parameters" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_type_declaration" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_type_arguments" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.brace_position_for_anonymous_type_declaration" value="end_of_line"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_case" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_brace_in_array_initializer" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.comment.new_lines_at_block_boundaries" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_annotation_declaration" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_closing_brace_in_array_initializer" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_annotation" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_field" value="0"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_while" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.use_on_off_tags" value="false"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_annotation_type_member_declaration" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_else_in_if_statement" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_prefix_operator" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.keep_else_statement_on_same_line" value="false"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_ellipsis" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.comment.insert_new_line_for_parameter" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_annotation_type_declaration" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.indent_breaks_compare_to_cases" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_at_in_annotation" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_multiple_fields" value="16"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_expressions_in_array_initializer" value="16"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_conditional_expression" value="80"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_for" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_binary_operator" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_question_in_wildcard" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.brace_position_for_array_initializer" value="end_of_line"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_enum_constant" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_finally_in_try_statement" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_local_variable" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_catch_in_try_statement" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_while" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.blank_lines_after_package" value="1"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_type_parameters" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.continuation_indentation" value="2"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_postfix_operator" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_method_invocation" value="16"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_type_arguments" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_superinterfaces" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_new_chunk" value="1"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_binary_operator" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_package" value="0"/>
+<setting id="org.eclipse.jdt.core.compiler.source" value="1.5"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_enum_constant_arguments" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_constructor_declaration" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.comment.format_line_comments" value="false"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_closing_angle_bracket_in_type_arguments" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_enum_declarations" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.join_wrapped_lines" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_block" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_explicit_constructor_call" value="16"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_invocation_arguments" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_member_type" value="1"/>
+<setting id="org.eclipse.jdt.core.formatter.align_type_members_on_columns" value="false"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_enum_constant" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_for" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_method_declaration" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_selector_in_method_invocation" value="16"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_switch" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_unary_operator" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_case" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.comment.indent_parameter_description" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_method_declaration" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_switch" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_enum_declaration" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_parameters" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_type_declaration" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.comment.clear_blank_lines_in_block_comment" value="false"/>
+<setting id="org.eclipse.jdt.core.formatter.lineSplit" value="120"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_if" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_between_brackets_in_array_type_reference" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_parenthesized_expression" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_explicitconstructorcall_arguments" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_constructor_declaration" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_first_class_body_declaration" value="0"/>
+<setting id="org.eclipse.jdt.core.formatter.indentation.size" value="4"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_declaration" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.enabling_tag" value="@formatter:on"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_enum_constant" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_superclass_in_type_declaration" value="16"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_assignment" value="0"/>
+<setting id="org.eclipse.jdt.core.compiler.problem.assertIdentifier" value="error"/>
+<setting id="org.eclipse.jdt.core.formatter.tabulation.char" value="space"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_constructor_declaration_parameters" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_prefix_operator" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.indent_statements_compare_to_body" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_method" value="1"/>
+<setting id="org.eclipse.jdt.core.formatter.wrap_outer_expressions_when_nested" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.format_guardian_clause_on_one_line" value="false"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_for" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_cast" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_parameters_in_constructor_declaration" value="16"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_labeled_statement" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.brace_position_for_annotation_type_declaration" value="end_of_line"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_method_body" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_method_declaration" value="0"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_method_invocation" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_bracket_in_array_allocation_expression" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_enum_constant" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_annotation" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_at_in_annotation_type_declaration" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_declaration_throws" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_if" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.brace_position_for_switch" value="end_of_line"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_declaration_throws" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_parenthesized_expression_in_return" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_annotation" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_question_in_conditional" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_question_in_wildcard" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_bracket_in_array_allocation_expression" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_parenthesized_expression_in_throw" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_type_arguments" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.compiler.problem.enumIdentifier" value="error"/>
+<setting id="org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_switch" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_ellipsis" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.brace_position_for_block" value="end_of_line"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_for_inits" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.brace_position_for_method_declaration" value="end_of_line"/>
+<setting id="org.eclipse.jdt.core.formatter.compact_else_if" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_array_initializer" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_for_increments" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.format_line_comment_starting_on_first_column" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_bracket_in_array_reference" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.brace_position_for_enum_constant" value="end_of_line"/>
+<setting id="org.eclipse.jdt.core.formatter.comment.indent_root_tags" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_declarations" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_explicitconstructorcall_arguments" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_switch" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_superinterfaces" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_declaration_parameters" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_allocation_expression" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.tabulation.size" value="4"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_type_reference" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_opening_brace_in_array_initializer" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_closing_brace_in_block" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_reference" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_enum_constant" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_type_arguments" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_constructor_declaration" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_if" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_throws" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.comment.clear_blank_lines_in_javadoc_comment" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_constructor_declaration" value="16"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_assignment_operator" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_assignment_operator" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.indent_empty_lines" value="false"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_synchronized" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_closing_paren_in_cast" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_declaration_parameters" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.brace_position_for_block_in_case" value="end_of_line"/>
+<setting id="org.eclipse.jdt.core.formatter.number_of_empty_lines_to_preserve" value="1"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_method_declaration" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_catch" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_constructor_declaration" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_method_invocation" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_bracket_in_array_reference" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_and_in_type_parameter" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_qualified_allocation_expression" value="16"/>
+<setting id="org.eclipse.jdt.core.compiler.compliance" value="1.5"/>
+<setting id="org.eclipse.jdt.core.formatter.continuation_indentation_for_array_initializer" value="2"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_brackets_in_array_allocation_expression" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_at_in_annotation_type_declaration" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_allocation_expression" value="16"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_cast" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_unary_operator" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_parameterized_type_reference" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_anonymous_type_declaration" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.keep_empty_array_initializer_on_one_line" value="false"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_enum_declaration" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.keep_imple_if_on_one_line" value="false"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_parameters" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_closing_angle_bracket_in_type_parameters" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_new_line_at_end_of_file_if_missing" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_for" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_labeled_statement" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_parameterized_type_reference" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_type_declaration" value="16"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_binary_expression" value="16"/>
+<setting id="org.eclipse.jdt.core.formatter.brace_position_for_enum_declaration" value="end_of_line"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_while" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode" value="enabled"/>
+<setting id="org.eclipse.jdt.core.formatter.put_empty_statement_on_new_line" value="false"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_label" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_parameter" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_type_parameters" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_invocation" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_while_in_do_statement" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_enum_constant" value="48"/>
+<setting id="org.eclipse.jdt.core.formatter.comment.format_javadoc_comments" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.comment.line_length" value="9999"/>
+<setting id="org.eclipse.jdt.core.formatter.blank_lines_between_import_groups" value="1"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_constant_arguments" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_semicolon" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.brace_position_for_constructor_declaration" value="end_of_line"/>
+<setting id="org.eclipse.jdt.core.formatter.number_of_blank_lines_at_beginning_of_method_body" value="0"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_conditional" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_type_header" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_annotation_type_member_declaration" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.wrap_before_binary_operator" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_declaration_header" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.blank_lines_between_type_declarations" value="1"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_synchronized" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.indent_statements_compare_to_block" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_enum_declaration" value="16"/>
+<setting id="org.eclipse.jdt.core.formatter.join_lines_in_comments" value="false"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_question_in_conditional" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_multiple_field_declarations" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_compact_if" value="16"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_for_inits" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_cases" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_array_initializer" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_default" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_and_in_type_parameter" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_constructor_declaration" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_imports" value="1"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_assert" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.comment.format_html" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_method_declaration" value="16"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_type_parameters" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_allocation_expression" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_anonymous_type_declaration" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_conditional" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_parameterized_type_reference" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_for" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_postfix_operator" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.comment.format_source_code" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_synchronized" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_allocation_expression" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_constructor_declaration_throws" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_parameters_in_method_declaration" value="16"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_brace_in_array_initializer" value="insert"/>
+<setting id="org.eclipse.jdt.core.compiler.codegen.targetPlatform" value="1.5"/>
+<setting id="org.eclipse.jdt.core.formatter.use_tabs_only_for_leading_indentations" value="false"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_annotation" value="0"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_member" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.comment.format_header" value="false"/>
+<setting id="org.eclipse.jdt.core.formatter.comment.format_block_comments" value="false"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_enum_constant" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_enum_constants" value="49"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_block" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_annotation_declaration_header" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_parenthesized_expression" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_parenthesized_expression" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_catch" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_multiple_local_declarations" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_switch" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_for_increments" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_method_invocation" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_assert" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.brace_position_for_type_declaration" value="end_of_line"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_array_initializer" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_braces_in_array_initializer" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_method_declaration" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_semicolon_in_for" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_catch" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_parameterized_type_reference" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_multiple_field_declarations" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_annotation" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_parameterized_type_reference" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_invocation_arguments" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.comment.new_lines_at_javadoc_boundaries" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.blank_lines_after_imports" value="1"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_multiple_local_declarations" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_constant_header" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_semicolon_in_for" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.never_indent_line_comments_on_first_column" value="false"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_arguments" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.never_indent_block_comments_on_first_column" value="false"/>
+<setting id="org.eclipse.jdt.core.formatter.keep_then_statement_on_same_line" value="false"/>
+</profile>
+</profiles>
diff --git a/hivesterix/conf/cluster b/hivesterix/conf/cluster
new file mode 100644
index 0000000..6cc8cca
--- /dev/null
+++ b/hivesterix/conf/cluster
@@ -0,0 +1,11 @@
+4
+10.0.0.1 asterix-001
+10.0.0.2 asterix-002
+10.0.0.3 asterix-003
+10.0.0.4 asterix-004
+10.0.0.5 asterix-005
+10.0.0.6 asterix-006
+10.0.0.7 asterix-007
+10.0.0.8 asterix-008
+10.0.0.9 asterix-009
+10.0.0.10 asterix-010
diff --git a/hivesterix/conf/configuration.xsl b/hivesterix/conf/configuration.xsl
new file mode 100644
index 0000000..377cdbe
--- /dev/null
+++ b/hivesterix/conf/configuration.xsl
@@ -0,0 +1,24 @@
+<?xml version="1.0"?>
+<xsl:stylesheet xmlns:xsl="http://www.w3.org/1999/XSL/Transform" version="1.0">
+<xsl:output method="html"/>
+<xsl:template match="configuration">
+<html>
+<body>
+<table border="1">
+<tr>
+ <td>name</td>
+ <td>value</td>
+ <td>description</td>
+</tr>
+<xsl:for-each select="property">
+<tr>
+  <td><a name="{name}"><xsl:value-of select="name"/></a></td>
+  <td><xsl:value-of select="value"/></td>
+  <td><xsl:value-of select="description"/></td>
+</tr>
+</xsl:for-each>
+</table>
+</body>
+</html>
+</xsl:template>
+</xsl:stylesheet>
diff --git a/hivesterix/conf/hive-default.xml b/hivesterix/conf/hive-default.xml
new file mode 100644
index 0000000..034ea61
--- /dev/null
+++ b/hivesterix/conf/hive-default.xml
@@ -0,0 +1,769 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+
+<configuration>
+
+	<!-- Hive Configuration can either be stored in this file or in the hadoop 
+		configuration files -->
+	<!-- that are implied by Hadoop setup variables. -->
+	<!-- Aside from Hadoop setup variables - this file is provided as a convenience 
+		so that Hive -->
+	<!-- users do not have to edit hadoop configuration files (that may be managed 
+		as a centralized -->
+	<!-- resource). -->
+
+	<!-- Hive Execution Parameters -->
+	<property>
+		<name>mapred.reduce.tasks</name>
+		<value>-1</value>
+		<description>The default number of reduce tasks per job. Typically set
+			to a prime close to the number of available hosts. Ignored when
+			mapred.job.tracker is "local". Hadoop set this to 1 by default,
+			whereas hive uses -1 as its default value.
+			By setting this property to -1, Hive will automatically figure out what
+			should be the number of reducers.
+  </description>
+	</property>
+
+	<property>
+		<name>hive.hyracks.host</name>
+		<value>128.195.14.4</value>
+	</property>
+
+	<property>
+		<name>hive.hyracks.port</name>
+		<value>3099</value>
+	</property>
+
+	<property>
+		<name>hive.hyracks.app</name>
+		<value>hivesterix</value>
+	</property>
+
+	<property>
+		<name>hive.hyracks.parrallelism</name>
+		<value>4</value>
+	</property>
+
+	<property>
+		<name>hive.algebricks.groupby.external</name>
+		<value>true</value>
+	</property>
+	
+	<property>
+		<name>hive.algebricks.groupby.external.memory</name>
+		<value>536870912</value>
+	</property>
+	
+	<property>
+		<name>hive.algebricks.sort.memory</name>
+		<value>536870912</value>
+	</property>
+
+	<property>
+		<name>hive.exec.reducers.bytes.per.reducer</name>
+		<value>1000000000</value>
+		<description>size per reducer.The default is 1G, i.e if the input size
+			is 10G, it will use 10 reducers.</description>
+	</property>
+
+	<property>
+		<name>hive.exec.reducers.max</name>
+		<value>999</value>
+		<description>max number of reducers will be used. If the one
+			specified in the configuration parameter mapred.reduce.tasks is
+			negative, hive will use this one as the max number of reducers when
+			automatically determine number of reducers.</description>
+	</property>
+
+	<property>
+		<name>hive.exec.scratchdir</name>
+		<value>/hive-${user.name}</value>
+		<description>Scratch space for Hive jobs</description>
+	</property>
+
+	<property>
+		<name>hive.test.mode</name>
+		<value>false</value>
+		<description>whether hive is running in test mode. If yes, it turns on
+			sampling and prefixes the output tablename</description>
+	</property>
+
+	<property>
+		<name>hive.test.mode.prefix</name>
+		<value>test_</value>
+		<description>if hive is running in test mode, prefixes the output
+			table by this string</description>
+	</property>
+
+	<!-- If the input table is not bucketed, the denominator of the tablesample 
+		is determinied by the parameter below -->
+	<!-- For example, the following query: -->
+	<!-- INSERT OVERWRITE TABLE dest -->
+	<!-- SELECT col1 from src -->
+	<!-- would be converted to -->
+	<!-- INSERT OVERWRITE TABLE test_dest -->
+	<!-- SELECT col1 from src TABLESAMPLE (BUCKET 1 out of 32 on rand(1)) -->
+	<property>
+		<name>hive.test.mode.samplefreq</name>
+		<value>32</value>
+		<description>if hive is running in test mode and table is not
+			bucketed, sampling frequency</description>
+	</property>
+
+	<property>
+		<name>hive.test.mode.nosamplelist</name>
+		<value></value>
+		<description>if hive is running in test mode, dont sample the above
+			comma seperated list of tables</description>
+	</property>
+
+	<property>
+		<name>hive.metastore.local</name>
+		<value>true</value>
+		<description>controls whether to connect to remove metastore server or
+			open a new metastore server in Hive Client JVM</description>
+	</property>
+
+	<property>
+		<name>javax.jdo.option.ConnectionURL</name>
+		<value>jdbc:derby:;databaseName=metastore_db;create=true</value>
+		<description>JDBC connect string for a JDBC metastore</description>
+	</property>
+
+	<property>
+		<name>javax.jdo.option.ConnectionDriverName</name>
+		<value>org.apache.derby.jdbc.EmbeddedDriver</value>
+		<description>Driver class name for a JDBC metastore</description>
+	</property>
+
+	<property>
+		<name>javax.jdo.PersistenceManagerFactoryClass</name>
+		<value>org.datanucleus.jdo.JDOPersistenceManagerFactory</value>
+		<description>class implementing the jdo persistence</description>
+	</property>
+
+	<property>
+		<name>datanucleus.connectionPoolingType</name>
+		<value>DBCP</value>
+		<description>Uses a DBCP connection pool for JDBC metastore
+		</description>
+	</property>
+
+	<property>
+		<name>javax.jdo.option.DetachAllOnCommit</name>
+		<value>true</value>
+		<description>detaches all objects from session so that they can be
+			used after transaction is committed</description>
+	</property>
+
+	<property>
+		<name>javax.jdo.option.NonTransactionalRead</name>
+		<value>true</value>
+		<description>reads outside of transactions</description>
+	</property>
+
+	<property>
+		<name>javax.jdo.option.ConnectionUserName</name>
+		<value>APP</value>
+		<description>username to use against metastore database</description>
+	</property>
+
+	<property>
+		<name>javax.jdo.option.ConnectionPassword</name>
+		<value>mine</value>
+		<description>password to use against metastore database</description>
+	</property>
+
+	<property>
+		<name>datanucleus.validateTables</name>
+		<value>false</value>
+		<description>validates existing schema against code. turn this on if
+			you want to verify existing schema </description>
+	</property>
+
+	<property>
+		<name>datanucleus.validateColumns</name>
+		<value>false</value>
+		<description>validates existing schema against code. turn this on if
+			you want to verify existing schema </description>
+	</property>
+
+	<property>
+		<name>datanucleus.validateConstraints</name>
+		<value>false</value>
+		<description>validates existing schema against code. turn this on if
+			you want to verify existing schema </description>
+	</property>
+
+	<property>
+		<name>datanucleus.storeManagerType</name>
+		<value>rdbms</value>
+		<description>metadata store type</description>
+	</property>
+
+	<property>
+		<name>datanucleus.autoCreateSchema</name>
+		<value>true</value>
+		<description>creates necessary schema on a startup if one doesn't
+			exist. set this to false, after creating it once</description>
+	</property>
+
+	<property>
+		<name>datanucleus.autoStartMechanismMode</name>
+		<value>checked</value>
+		<description>throw exception if metadata tables are incorrect
+		</description>
+	</property>
+
+	<property>
+		<name>datanucleus.transactionIsolation</name>
+		<value>read-committed</value>
+		<description>Default transaction isolation level for identity
+			generation. </description>
+	</property>
+
+	<property>
+		<name>datanucleus.cache.level2</name>
+		<value>false</value>
+		<description>Use a level 2 cache. Turn this off if metadata is changed
+			independently of hive metastore server</description>
+	</property>
+
+	<property>
+		<name>datanucleus.cache.level2.type</name>
+		<value>SOFT</value>
+		<description>SOFT=soft reference based cache, WEAK=weak reference
+			based cache.</description>
+	</property>
+
+	<property>
+		<name>datanucleus.identifierFactory</name>
+		<value>datanucleus</value>
+		<description>Name of the identifier factory to use when generating
+			table/column names etc. 'datanucleus' is used for backward
+			compatibility</description>
+	</property>
+
+	<property>
+		<name>hive.metastore.warehouse.dir</name>
+		<value>/user/hivesterix</value>
+		<description>location of default database for the warehouse
+		</description>
+	</property>
+
+	<property>
+		<name>hive.metastore.connect.retries</name>
+		<value>5</value>
+		<description>Number of retries while opening a connection to metastore
+		</description>
+	</property>
+
+	<property>
+		<name>hive.metastore.rawstore.impl</name>
+		<value>org.apache.hadoop.hive.metastore.ObjectStore</value>
+		<description>Name of the class that implements
+			org.apache.hadoop.hive.metastore.rawstore interface. This class is
+			used to store and retrieval of raw metadata objects such as table,
+			database</description>
+	</property>
+
+	<property>
+		<name>hive.default.fileformat</name>
+		<value>TextFile</value>
+		<description>Default file format for CREATE TABLE statement. Options
+			are TextFile and SequenceFile. Users can explicitly say CREATE TABLE
+			... STORED AS &lt;TEXTFILE|SEQUENCEFILE&gt; to override</description>
+	</property>
+
+	<property>
+		<name>hive.fileformat.check</name>
+		<value>true</value>
+		<description>Whether to check file format or not when loading data
+			files</description>
+	</property>
+
+	<property>
+		<name>hive.map.aggr</name>
+		<value>true</value>
+		<description>Whether to use map-side aggregation in Hive Group By
+			queries</description>
+	</property>
+
+	<property>
+		<name>hive.groupby.skewindata</name>
+		<value>false</value>
+		<description>Whether there is skew in data to optimize group by
+			queries</description>
+	</property>
+
+	<property>
+		<name>hive.groupby.mapaggr.checkinterval</name>
+		<value>100000</value>
+		<description>Number of rows after which size of the grouping
+			keys/aggregation classes is performed</description>
+	</property>
+
+	<property>
+		<name>hive.mapred.local.mem</name>
+		<value>0</value>
+		<description>For local mode, memory of the mappers/reducers
+		</description>
+	</property>
+
+	<property>
+		<name>hive.map.aggr.hash.percentmemory</name>
+		<value>0.5</value>
+		<description>Portion of total memory to be used by map-side grup
+			aggregation hash table</description>
+	</property>
+
+	<property>
+		<name>hive.map.aggr.hash.min.reduction</name>
+		<value>0.5</value>
+		<description>Hash aggregation will be turned off if the ratio between
+			hash
+			table size and input rows is bigger than this number. Set to 1 to make
+			sure
+			hash aggregation is never turned off.</description>
+	</property>
+
+	<property>
+		<name>hive.optimize.cp</name>
+		<value>true</value>
+		<description>Whether to enable column pruner</description>
+	</property>
+
+	<property>
+		<name>hive.optimize.ppd</name>
+		<value>true</value>
+		<description>Whether to enable predicate pushdown</description>
+	</property>
+
+	<property>
+		<name>hive.optimize.pruner</name>
+		<value>true</value>
+		<description>Whether to enable the new partition pruner which depends
+			on predicate pushdown. If this is disabled,
+			the old partition pruner which is based on AST will be enabled.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.optimize.groupby</name>
+		<value>true</value>
+		<description>Whether to enable the bucketed group by from bucketed
+			partitions/tables.</description>
+	</property>
+
+	<property>
+		<name>hive.join.emit.interval</name>
+		<value>1000</value>
+		<description>How many rows in the right-most join operand Hive should
+			buffer before emitting the join result. </description>
+	</property>
+
+	<property>
+		<name>hive.join.cache.size</name>
+		<value>25000</value>
+		<description>How many rows in the joining tables (except the streaming
+			table) should be cached in memory. </description>
+	</property>
+
+	<property>
+		<name>hive.mapjoin.bucket.cache.size</name>
+		<value>100</value>
+		<description>How many values in each keys in the map-joined table
+			should be cached in memory. </description>
+	</property>
+
+	<property>
+		<name>hive.mapjoin.maxsize</name>
+		<value>100000</value>
+		<description>Maximum # of rows of the small table that can be handled
+			by map-side join. If the size is reached and hive.task.progress is
+			set, a fatal error counter is set and the job will be killed.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.mapjoin.cache.numrows</name>
+		<value>25000</value>
+		<description>How many rows should be cached by jdbm for map join.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.optimize.skewjoin</name>
+		<value>false</value>
+		<description>Whether to enable skew join optimization. </description>
+	</property>
+
+	<property>
+		<name>hive.skewjoin.key</name>
+		<value>100000</value>
+		<description>Determine if we get a skew key in join. If we see more
+			than the specified number of rows with the same key in join operator,
+			we think the key as a skew join key. </description>
+	</property>
+
+	<property>
+		<name>hive.skewjoin.mapjoin.map.tasks</name>
+		<value>10000</value>
+		<description> Determine the number of map task used in the follow up
+			map join job
+			for a skew join. It should be used together with
+			hive.skewjoin.mapjoin.min.split
+			to perform a fine grained control.</description>
+	</property>
+
+	<property>
+		<name>hive.skewjoin.mapjoin.min.split</name>
+		<value>33554432</value>
+		<description> Determine the number of map task at most used in the
+			follow up map join job
+			for a skew join by specifying the minimum split size. It should be used
+			together with
+			hive.skewjoin.mapjoin.map.tasks to perform a fine grained control.</description>
+	</property>
+
+	<property>
+		<name>hive.mapred.mode</name>
+		<value>nonstrict</value>
+		<description>The mode in which the hive operations are being
+			performed. In strict mode, some risky queries are not allowed to run
+		</description>
+	</property>
+
+	<property>
+		<name>hive.exec.script.maxerrsize</name>
+		<value>100000</value>
+		<description>Maximum number of bytes a script is allowed to emit to
+			standard error (per map-reduce task). This prevents runaway scripts
+			from filling logs partitions to capacity </description>
+	</property>
+
+	<property>
+		<name>hive.exec.script.allow.partial.consumption</name>
+		<value>false</value>
+		<description> When enabled, this option allows a user script to exit
+			successfully without consuming all the data from the standard input.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.script.operator.id.env.var</name>
+		<value>HIVE_SCRIPT_OPERATOR_ID</value>
+		<description> Name of the environment variable that holds the unique
+			script operator ID in the user's transform function (the custom
+			mapper/reducer that the user has specified in the query)
+		</description>
+	</property>
+
+	<property>
+		<name>hive.exec.compress.output</name>
+		<value>false</value>
+		<description> This controls whether the final outputs of a query (to a
+			local/hdfs file or a hive table) is compressed. The compression codec
+			and other options are determined from hadoop config variables
+			mapred.output.compress* </description>
+	</property>
+
+	<property>
+		<name>hive.exec.compress.intermediate</name>
+		<value>false</value>
+		<description> This controls whether intermediate files produced by
+			hive between multiple map-reduce jobs are compressed. The compression
+			codec and other options are determined from hadoop config variables
+			mapred.output.compress* </description>
+	</property>
+
+	<property>
+		<name>hive.exec.parallel</name>
+		<value>false</value>
+		<description>Whether to execute jobs in parallel</description>
+	</property>
+
+	<property>
+		<name>hive.exec.parallel.thread.number</name>
+		<value>8</value>
+		<description>How many jobs at most can be executed in parallel
+		</description>
+	</property>
+
+	<property>
+		<name>hive.hwi.war.file</name>
+		<value>lib\hive-hwi-0.7.0.war</value>
+		<description>This sets the path to the HWI war file, relative to
+			${HIVE_HOME}. </description>
+	</property>
+
+	<property>
+		<name>hive.hwi.listen.host</name>
+		<value>0.0.0.0</value>
+		<description>This is the host address the Hive Web Interface will
+			listen on</description>
+	</property>
+
+	<property>
+		<name>hive.hwi.listen.port</name>
+		<value>9999</value>
+		<description>This is the port the Hive Web Interface will listen on
+		</description>
+	</property>
+
+	<property>
+		<name>hive.exec.pre.hooks</name>
+		<value></value>
+		<description>Pre Execute Hook for Tests</description>
+	</property>
+
+	<property>
+		<name>hive.merge.mapfiles</name>
+		<value>true</value>
+		<description>Merge small files at the end of a map-only job
+		</description>
+	</property>
+
+	<property>
+		<name>hive.merge.mapredfiles</name>
+		<value>false</value>
+		<description>Merge small files at the end of a map-reduce job
+		</description>
+	</property>
+
+	<property>
+		<name>hive.heartbeat.interval</name>
+		<value>1000</value>
+		<description>Send a heartbeat after this interval - used by mapjoin
+			and filter operators</description>
+	</property>
+
+	<property>
+		<name>hive.merge.size.per.task</name>
+		<value>256000000</value>
+		<description>Size of merged files at the end of the job</description>
+	</property>
+
+	<property>
+		<name>hive.merge.size.smallfiles.avgsize</name>
+		<value>16000000</value>
+		<description>When the average output file size of a job is less than
+			this number, Hive will start an additional map-reduce job to merge
+			the output files into bigger files. This is only done for map-only
+			jobs if hive.merge.mapfiles is true, and for map-reduce jobs if
+			hive.merge.mapredfiles is true.</description>
+	</property>
+
+	<property>
+		<name>hive.script.auto.progress</name>
+		<value>false</value>
+		<description>Whether Hive Tranform/Map/Reduce Clause should
+			automatically send progress information to TaskTracker to avoid the
+			task getting killed because of inactivity. Hive sends progress
+			information when the script is outputting to stderr. This option
+			removes the need of periodically producing stderr messages, but users
+			should be cautious because this may prevent infinite loops in the
+			scripts to be killed by TaskTracker.  </description>
+	</property>
+
+	<property>
+		<name>hive.script.serde</name>
+		<value>org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe</value>
+		<description>The default serde for trasmitting input data to and
+			reading output data from the user scripts. </description>
+	</property>
+
+	<property>
+		<name>hive.script.recordreader</name>
+		<value>org.apache.hadoop.hive.ql.exec.TextRecordReader</value>
+		<description>The default record reader for reading data from the user
+			scripts. </description>
+	</property>
+
+	<property>
+		<name>hive.script.recordwriter</name>
+		<value>org.apache.hadoop.hive.ql.exec.TextRecordWriter</value>
+		<description>The default record writer for writing data to the user
+			scripts. </description>
+	</property>
+
+	<property>
+		<name>hive.input.format</name>
+		<value>org.apache.hadoop.hive.ql.io.HiveInputFormat</value>
+		<description>The default input format, if it is not specified, the
+			system assigns it. It is set to HiveInputFormat for hadoop versions
+			17, 18 and 19, whereas it is set to CombinedHiveInputFormat for
+			hadoop 20. The user can always overwrite it - if there is a bug in
+			CombinedHiveInputFormat, it can always be manually set to
+			HiveInputFormat. </description>
+	</property>
+
+	<property>
+		<name>hive.udtf.auto.progress</name>
+		<value>false</value>
+		<description>Whether Hive should automatically send progress
+			information to TaskTracker when using UDTF's to prevent the task
+			getting killed because of inactivity. Users should be cautious
+			because this may prevent TaskTracker from killing tasks with infinte
+			loops.  </description>
+	</property>
+
+	<property>
+		<name>hive.mapred.reduce.tasks.speculative.execution</name>
+		<value>true</value>
+		<description>Whether speculative execution for reducers should be
+			turned on. </description>
+	</property>
+
+	<property>
+		<name>hive.exec.counters.pull.interval</name>
+		<value>1000</value>
+		<description>The interval with which to poll the JobTracker for the
+			counters the running job. The smaller it is the more load there will
+			be on the jobtracker, the higher it is the less granular the caught
+			will be.</description>
+	</property>
+
+	<property>
+		<name>hive.enforce.bucketing</name>
+		<value>false</value>
+		<description>Whether bucketing is enforced. If true, while inserting
+			into the table, bucketing is enforced. </description>
+	</property>
+
+	<property>
+		<name>hive.enforce.sorting</name>
+		<value>false</value>
+		<description>Whether sorting is enforced. If true, while inserting
+			into the table, sorting is enforced. </description>
+	</property>
+
+	<property>
+		<name>hive.metastore.ds.connection.url.hook</name>
+		<value></value>
+		<description>Name of the hook to use for retriving the JDO connection
+			URL. If empty, the value in javax.jdo.option.ConnectionURL is used
+		</description>
+	</property>
+
+	<property>
+		<name>hive.metastore.ds.retry.attempts</name>
+		<value>1</value>
+		<description>The number of times to retry a metastore call if there
+			were a connection error</description>
+	</property>
+
+	<property>
+		<name>hive.metastore.ds.retry.interval</name>
+		<value>1000</value>
+		<description>The number of miliseconds between metastore retry
+			attempts</description>
+	</property>
+
+	<property>
+		<name>hive.metastore.server.min.threads</name>
+		<value>200</value>
+		<description>Minimum number of worker threads in the Thrift server's
+			pool.</description>
+	</property>
+
+	<property>
+		<name>hive.metastore.server.max.threads</name>
+		<value>100000</value>
+		<description>Maximum number of worker threads in the Thrift server's
+			pool.</description>
+	</property>
+
+	<property>
+		<name>hive.metastore.server.tcp.keepalive</name>
+		<value>true</value>
+		<description>Whether to enable TCP keepalive for the metastore server.
+			Keepalive will prevent accumulation of half-open connections.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.optimize.reducededuplication</name>
+		<value>true</value>
+		<description>Remove extra map-reduce jobs if the data is already
+			clustered by the same key which needs to be used again. This should
+			always be set to true. Since it is a new feature, it has been made
+			configurable.</description>
+	</property>
+
+	<property>
+		<name>hive.exec.dynamic.partition</name>
+		<value>false</value>
+		<description>Whether or not to allow dynamic partitions in DML/DDL.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.exec.dynamic.partition.mode</name>
+		<value>strict</value>
+		<description>In strict mode, the user must specify at least one static
+			partition in case the user accidentally overwrites all partitions.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.exec.max.dynamic.partitions</name>
+		<value>1000</value>
+		<description>Maximum number of dynamic partitions allowed to be
+			created in total.</description>
+	</property>
+
+	<property>
+		<name>hive.exec.max.dynamic.partitions.pernode</name>
+		<value>100</value>
+		<description>Maximum number of dynamic partitions allowed to be
+			created in each mapper/reducer node.</description>
+	</property>
+
+	<property>
+		<name>hive.default.partition.name</name>
+		<value>__HIVE_DEFAULT_PARTITION__</value>
+		<description>The default partition name in case the dynamic partition
+			column value is null/empty string or anyother values that cannot be
+			escaped. This value must not contain any special character used in
+			HDFS URI (e.g., ':', '%', '/' etc). The user has to be aware that the
+			dynamic partition value should not contain this value to avoid
+			confusions.</description>
+	</property>
+
+	<property>
+		<name>fs.har.impl</name>
+		<value>org.apache.hadoop.hive.shims.HiveHarFileSystem</value>
+		<description>The implementation for accessing Hadoop Archives. Note
+			that this won't be applicable to Hadoop vers less than 0.20
+		</description>
+	</property>
+
+	<property>
+		<name>hive.archive.enabled</name>
+		<value>false</value>
+		<description>Whether archiving operations are permitted</description>
+	</property>
+
+	<property>
+		<name>hive.archive.har.parentdir.settable</name>
+		<value>false</value>
+		<description>In new Hadoop versions, the parent directory must be set
+			while
+			creating a HAR. Because this functionality is hard to detect with just
+			version
+			numbers, this conf var needs to be set manually.</description>
+	</property>
+
+	<!-- HBase Storage Handler Parameters -->
+
+	<property>
+		<name>hive.hbase.wal.enabled</name>
+		<value>true</value>
+		<description>Whether writes to HBase should be forced to the
+			write-ahead log. Disabling this improves HBase write performance at
+			the risk of lost writes in case of a crash.</description>
+	</property>
+
+</configuration>
diff --git a/hivesterix/conf/hive-log4j.properties b/hivesterix/conf/hive-log4j.properties
new file mode 100644
index 0000000..784a274
--- /dev/null
+++ b/hivesterix/conf/hive-log4j.properties
@@ -0,0 +1,58 @@
+#------------------------------------------------------------------------------
+#
+#  The following properties set the logging levels and log appender.  The
+#  log4j.rootCategory variable defines the default log level and one or more
+#  appenders.  For the console, use 'S'.  For the daily rolling file, use 'R'.
+#  For an HTML formatted log, use 'H'.
+#
+#  To override the default (rootCategory) log level, define a property of the
+#  form (see below for available values):
+#
+#        log4j.logger. =
+#
+#    Available logger names:
+#      TODO
+#
+#    Possible Log Levels:
+#      FATAL, ERROR, WARN, INFO, DEBUG
+#
+#------------------------------------------------------------------------------
+log4j.rootCategory=INFO, S
+
+log4j.logger.com.dappit.Dapper.parser=ERROR
+log4j.logger.org.w3c.tidy=FATAL
+
+#------------------------------------------------------------------------------
+#
+#  The following properties configure the console (stdout) appender.
+#  See http://logging.apache.org/log4j/docs/api/index.html for details.
+#
+#------------------------------------------------------------------------------
+log4j.appender.S = org.apache.log4j.ConsoleAppender
+log4j.appender.S.layout = org.apache.log4j.PatternLayout
+log4j.appender.S.layout.ConversionPattern = %d{yyyy-MM-dd HH:mm:ss} %c{1} [%p] %m%n
+
+#------------------------------------------------------------------------------
+#
+#  The following properties configure the Daily Rolling File appender.
+#  See http://logging.apache.org/log4j/docs/api/index.html for details.
+#
+#------------------------------------------------------------------------------
+log4j.appender.R = org.apache.log4j.DailyRollingFileAppender
+log4j.appender.R.File = logs/bensApps.log
+log4j.appender.R.Append = true
+log4j.appender.R.DatePattern = '.'yyy-MM-dd
+log4j.appender.R.layout = org.apache.log4j.PatternLayout
+log4j.appender.R.layout.ConversionPattern = %d{yyyy-MM-dd HH:mm:ss} %c{1} [%p] %m%n
+
+#------------------------------------------------------------------------------
+#
+#  The following properties configure the Rolling File appender in HTML.
+#  See http://logging.apache.org/log4j/docs/api/index.html for details.
+#
+#------------------------------------------------------------------------------
+log4j.appender.H = org.apache.log4j.RollingFileAppender
+log4j.appender.H.File = logs/bensApps.html
+log4j.appender.H.MaxFileSize = 100KB
+log4j.appender.H.Append = false
+log4j.appender.H.layout = org.apache.log4j.HTMLLayout
diff --git a/hivesterix/pom.xml b/hivesterix/pom.xml
new file mode 100644
index 0000000..715790c
--- /dev/null
+++ b/hivesterix/pom.xml
@@ -0,0 +1,557 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+	xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+	<modelVersion>4.0.0</modelVersion>
+	<groupId>edu.uci.ics.hivesterix</groupId>
+	<artifactId>hivesterix</artifactId>
+	<version>0.2.3-SNAPSHOT</version>
+ 	<name>hivesterix</name>
+	<dependencies>
+		<dependency>
+			<groupId>javax.servlet</groupId>
+			<artifactId>servlet-api</artifactId>
+			<version>2.5</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>junit</groupId>
+			<artifactId>junit</artifactId>
+			<version>4.8.1</version>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>args4j</groupId>
+			<artifactId>args4j</artifactId>
+			<version>2.0.12</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.json</groupId>
+			<artifactId>json</artifactId>
+			<version>20090211</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.eclipse.jetty</groupId>
+			<artifactId>jetty-server</artifactId>
+			<version>8.0.0.M1</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.eclipse.jetty</groupId>
+			<artifactId>jetty-servlet</artifactId>
+			<version>8.0.0.M1</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.apache.hadoop</groupId>
+			<artifactId>hadoop-core</artifactId>
+			<version>0.20.2</version>
+		</dependency>
+		<dependency>
+			<groupId>jline</groupId>
+			<artifactId>jline</artifactId>
+			<version>0.9.94</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.datanucleus</groupId>
+			<artifactId>datanucleus-core</artifactId>
+			<version>2.0.3</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.datanucleus</groupId>
+			<artifactId>datanucleus-connectionpool</artifactId>
+			<version>2.0.3</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.datanucleus</groupId>
+			<artifactId>datanucleus-enhancer</artifactId>
+			<version>2.0.3</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.datanucleus</groupId>
+			<artifactId>datanucleus-rdbms</artifactId>
+			<version>2.0.3</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>commons-dbcp</groupId>
+			<artifactId>commons-dbcp</artifactId>
+			<version>1.4</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>commons-pool</groupId>
+			<artifactId>commons-pool</artifactId>
+			<version>1.5.4</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>commons-collections</groupId>
+			<artifactId>commons-collections</artifactId>
+			<version>3.2.1</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>commons-lang</groupId>
+			<artifactId>commons-lang</artifactId>
+			<version>2.4</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>javax</groupId>
+			<artifactId>jdo2-api</artifactId>
+			<version>2.3-ec</version>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>com.facebook</groupId>
+			<artifactId>libfb303</artifactId>
+			<version>0.5.0</version>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.apache.thrift</groupId>
+			<artifactId>libthrift</artifactId>
+			<version>0.5.0</version>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.apache.commons</groupId>
+			<artifactId>cli</artifactId>
+			<version>1.2</version>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.apache</groupId>
+			<artifactId>log4j</artifactId>
+			<version>1.2.15</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.antlr</groupId>
+			<artifactId>antlr-runtime</artifactId>
+			<version>3.0.1</version>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.apache.hadoop.hive</groupId>
+			<artifactId>hive-cli</artifactId>
+			<version>0.7.0</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.apache.hadoop.hive</groupId>
+			<artifactId>hive-common</artifactId>
+			<version>0.7.0</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.apache.hadoop.hive</groupId>
+			<artifactId>hive-exec</artifactId>
+			<version>0.7.0</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.apache.hadoop.hive</groupId>
+			<artifactId>hive-hwi</artifactId>
+			<version>0.7.0</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.apache.hadoop.hive</groupId>
+			<artifactId>hive-jdbc</artifactId>
+			<version>0.7.0</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.apache.hadoop.hive</groupId>
+			<artifactId>hive-metastore</artifactId>
+			<version>0.7.0</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.apache.hadoop.hive</groupId>
+			<artifactId>hive-service</artifactId>
+			<version>0.7.0</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.apache.hadoop.hive</groupId>
+			<artifactId>hive-shims</artifactId>
+			<version>0.7.0</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.apache.hadoop.hive</groupId>
+			<artifactId>hive-serde</artifactId>
+			<version>0.7.0</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.slf4j</groupId>
+			<artifactId>slf4j-api</artifactId>
+			<version>1.6.1</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>commons-cli</groupId>
+			<artifactId>commons-cli</artifactId>
+			<version>1.2</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.slf4j</groupId>
+			<artifactId>slf4j-log4j12</artifactId>
+			<version>1.6.1</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.apache.hadoop</groupId>
+			<artifactId>hadoop-test</artifactId>
+			<version>0.20.2</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>commons-logging</groupId>
+			<artifactId>commons-logging</artifactId>
+			<version>1.1.1</version>
+			<type>jar</type>
+			<classifier>api</classifier>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>com.google.guava</groupId>
+			<artifactId>guava</artifactId>
+			<version>r06</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.antlr</groupId>
+			<artifactId>stringtemplate</artifactId>
+			<version>3.2</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.apache.derby</groupId>
+			<artifactId>derby</artifactId>
+			<version>10.8.1.2</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.apache.hbase</groupId>
+			<artifactId>hbase</artifactId>
+			<version>0.90.3</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>algebricks-compiler</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>hyracks-control-cc</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>hyracks-control-nc</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+	</dependencies>
+	<build>
+		<plugins>
+			<plugin>
+				<groupId>org.apache.maven.plugins</groupId>
+				<artifactId>maven-compiler-plugin</artifactId>
+				<version>2.0.2</version>
+				<configuration>
+					<source>1.7</source>
+                                        <target>1.7</target>
+                                </configuration>
+			</plugin>
+			<plugin>
+				<groupId>org.codehaus.mojo</groupId>
+				<artifactId>appassembler-maven-plugin</artifactId>
+				<version>1.3</version>
+				<executions>
+					<execution>
+						<configuration>
+							<programs>
+								<program>
+									<mainClass>edu.uci.ics.asterix.hive.cli.CliDriver</mainClass>
+									<name>algebricks-hivesterix-cmd</name>
+								</program>
+							</programs>
+							<repositoryLayout>flat</repositoryLayout>
+							<repositoryName>lib</repositoryName>
+						</configuration>
+						<phase>package</phase>
+						<goals>
+							<goal>assemble</goal>
+						</goals>
+					</execution>
+				</executions>
+			</plugin>
+			<plugin>
+				<artifactId>maven-assembly-plugin</artifactId>
+				<version>2.2-beta-5</version>
+				<executions>
+					<execution>
+						<configuration>
+							<descriptors>
+								<descriptor>src/main/assembly/binary-assembly.xml</descriptor>
+							</descriptors>
+						</configuration>
+						<phase>package</phase>
+						<goals>
+							<goal>attached</goal>
+						</goals>
+					</execution>
+				</executions>
+			</plugin>
+			<plugin>
+				<groupId>org.apache.maven.plugins</groupId>
+				<artifactId>maven-surefire-plugin</artifactId>
+				<version>2.13</version>
+				<configuration>
+					<forkMode>pertest</forkMode>
+					<argLine>-enableassertions -Xmx2047m -Dfile.encoding=UTF-8 
+                        -Djava.util.logging.config.file=src/test/resources/logging.properties</argLine>
+					<includes>
+						<include>**/test/optimizer/*TestSuite.java</include>
+						<include>**/test/optimizer/*Test.java</include>
+						<include>**/test/runtimefunction/*TestSuite.java</include>
+						<include>**/test/runtimefunction/*Test.java</include>
+					</includes>
+				</configuration>
+			</plugin>
+			<plugin>
+				<artifactId>maven-resources-plugin</artifactId>
+				<version>2.5</version>
+				<executions>
+					<execution>
+						<id>copy-resources</id>
+						<!-- here the phase you need -->
+						<phase>package</phase>
+						<goals>
+							<goal>copy-resources</goal>
+						</goals>
+						<configuration>
+							<outputDirectory>target/appassembler/bin</outputDirectory>
+							<resources>
+								<resource>
+									<directory>resource/bin</directory>
+								</resource>
+							</resources>
+						</configuration>
+					</execution>
+					<execution>
+						<id>copy-conf</id>
+						<!-- here the phase you need -->
+						<phase>package</phase>
+						<goals>
+							<goal>copy-resources</goal>
+						</goals>
+						<configuration>
+							<outputDirectory>target/appassembler/conf</outputDirectory>
+							<resources>
+								<resource>
+									<directory>conf</directory>
+								</resource>
+							</resources>
+						</configuration>
+					</execution>
+					<execution>
+						<id>copy-asterix</id>
+						<!-- here the phase you need -->
+						<phase>package</phase>
+						<goals>
+							<goal>copy-resources</goal>
+						</goals>
+						<configuration>
+							<outputDirectory>target/appassembler/asterix</outputDirectory>
+							<resources>
+								<resource>
+									<directory>resource/asterix</directory>
+								</resource>
+							</resources>
+						</configuration>
+					</execution>
+					<execution>
+						<id>copy-asterix-dbg</id>
+						<!-- here the phase you need -->
+						<phase>package</phase>
+						<goals>
+							<goal>copy-resources</goal>
+						</goals>
+						<configuration>
+							<outputDirectory>target/appassembler/asterix_dbg</outputDirectory>
+							<resources>
+								<resource>
+									<directory>resource/asterix_dbg</directory>
+								</resource>
+							</resources>
+						</configuration>
+					</execution>
+					<execution>
+						<id>copy-hivesterix</id>
+						<!-- here the phase you need -->
+						<phase>package</phase>
+						<goals>
+							<goal>copy-resources</goal>
+						</goals>
+						<configuration>
+							<outputDirectory>target/appassembler/hivesterix</outputDirectory>
+							<resources>
+								<resource>
+									<directory>resource/hivesterix</directory>
+								</resource>
+							</resources>
+						</configuration>
+					</execution>
+					<execution>
+						<id>copy-conf2</id>
+						<!-- here the phase you need -->
+						<phase>package</phase>
+						<goals>
+							<goal>copy-resources</goal>
+						</goals>
+						<configuration>
+							<outputDirectory>target/appassembler/hivesterix/conf</outputDirectory>
+							<resources>
+								<resource>
+									<directory>conf</directory>
+								</resource>
+							</resources>
+						</configuration>
+					</execution>
+					<execution>
+						<id>copy-data</id>
+						<!-- here the phase you need -->
+						<phase>package</phase>
+						<goals>
+							<goal>copy-resources</goal>
+						</goals>
+						<configuration>
+							<outputDirectory>data</outputDirectory>
+							<resources>
+								<resource>
+									<directory>resource/data</directory>
+								</resource>
+							</resources>
+						</configuration>
+					</execution>
+				</executions>
+			</plugin>
+			<plugin>
+				<artifactId>maven-clean-plugin</artifactId>
+				<version>2.5</version>
+				<configuration>
+					<filesets>
+						<fileset>
+							<directory>.</directory>
+							<includes>
+								<include>metastore*</include>
+								<include>hadoop*</include>
+								<include>edu*</include>
+								<include>tmp*</include>
+								<include>build*</include>
+								<include>target*</include>
+								<include>log*</include>
+								<include>derby.log</include>
+								<include>ClusterController*</include>
+							</includes>
+						</fileset>
+					</filesets>
+				</configuration>
+			</plugin>
+		</plugins>
+	</build>
+	<repositories>
+		<repository>
+			<releases>
+				<enabled>true</enabled>
+				<updatePolicy>always</updatePolicy>
+				<checksumPolicy>warn</checksumPolicy>
+			</releases>
+			<snapshots>
+				<enabled>true</enabled>
+				<updatePolicy>always</updatePolicy>
+				<checksumPolicy>fail</checksumPolicy>
+			</snapshots>
+			<id>third-party</id>
+			<url>http://obelix.ics.uci.edu/nexus/content/repositories/third-party</url>
+		</repository>
+		<repository>
+			<releases>
+				<enabled>true</enabled>
+				<updatePolicy>always</updatePolicy>
+				<checksumPolicy>warn</checksumPolicy>
+			</releases>
+			<snapshots>
+				<enabled>true</enabled>
+				<updatePolicy>always</updatePolicy>
+				<checksumPolicy>fail</checksumPolicy>
+			</snapshots>
+			<id>hyracks-public-release</id>
+			<url>http://obelix.ics.uci.edu/nexus/content/repositories/hyracks-public-releases</url>
+		</repository>
+	</repositories>
+	<distributionManagement>
+		<!-- use the following if you're not using a snapshot version. -->
+		<repository>
+			<id>hivesterix</id>
+			<name>hivesterix</name>
+			<url>scp://obelix.ics.uci.edu/nexus/content/groups/hivesterix-public</url>
+		</repository>
+		<!-- use the following if you ARE using a snapshot version. -->
+		<snapshotRepository>
+			<id>hivesterix</id>
+			<name>Repository Name</name>
+			<url>scp://obelix.ics.uci.edu/nexus/content/groups/hivesterix-public</url>
+		</snapshotRepository>
+	</distributionManagement>
+</project>
diff --git a/hivesterix/resource/asterix/destroy.sh b/hivesterix/resource/asterix/destroy.sh
new file mode 100644
index 0000000..9ece8d6
--- /dev/null
+++ b/hivesterix/resource/asterix/destroy.sh
@@ -0,0 +1,2 @@
+connect to "localhost:3099";
+destroy application hivex;
diff --git a/hivesterix/resource/asterix/hivedeploy.hcli b/hivesterix/resource/asterix/hivedeploy.hcli
new file mode 100644
index 0000000..f588ac6
--- /dev/null
+++ b/hivesterix/resource/asterix/hivedeploy.hcli
@@ -0,0 +1,2 @@
+connect to "128.195.14.4:3099";
+create application hivesterix "/home/yingyib/hivesterix/target/algebricks-hivesterix-0.0.1-SNAPSHOT-binary-assembly.zip";
diff --git a/hivesterix/resource/asterix/hivedestroy.hcli b/hivesterix/resource/asterix/hivedestroy.hcli
new file mode 100644
index 0000000..c53fa81
--- /dev/null
+++ b/hivesterix/resource/asterix/hivedestroy.hcli
@@ -0,0 +1,2 @@
+connect to "localhost:3099";
+destroy application hivesterix;
diff --git a/hivesterix/resource/asterix/startall.sh b/hivesterix/resource/asterix/startall.sh
new file mode 100755
index 0000000..e28ae27
--- /dev/null
+++ b/hivesterix/resource/asterix/startall.sh
@@ -0,0 +1,16 @@
+ssh asterix-master './hivesterix/target/appassembler/asterix/startcc.sh'&
+sleep 20
+ssh asterix-001 './hivesterix/target/appassembler/asterix/startnc.sh'&
+ssh asterix-002 './hivesterix/target/appassembler/asterix/startnc.sh'&
+ssh asterix-003 './hivesterix/target/appassembler/asterix/startnc.sh'&
+ssh asterix-004 './hivesterix/target/appassembler/asterix/startnc.sh'&
+ssh asterix-005 './hivesterix/target/appassembler/asterix/startnc.sh'&
+ssh asterix-006 './hivesterix/target/appassembler/asterix/startnc.sh'&
+ssh asterix-007 './hivesterix/target/appassembler/asterix/startnc.sh'&
+ssh asterix-008 './hivesterix/target/appassembler/asterix/startnc.sh'&
+ssh asterix-009 './hivesterix/target/appassembler/asterix/startnc.sh'&
+ssh asterix-010 './hivesterix/target/appassembler/asterix/startnc.sh'&
+
+sleep 30
+export HYRACKS_HOME=/home/yingyib/hyracks_asterix_stabilization
+$HYRACKS_HOME/hyracks-cli/target/appassembler/bin/hyrackscli < ~/hivesterix/target/appassembler/asterix/hivedeploy.hcli
diff --git a/hivesterix/resource/asterix/startcc.sh b/hivesterix/resource/asterix/startcc.sh
new file mode 100755
index 0000000..f313a9f
--- /dev/null
+++ b/hivesterix/resource/asterix/startcc.sh
@@ -0,0 +1,10 @@
+#!/bin/bash
+
+LOGSDIR=/mnt/data/sda/space/yingyi/hyracks/logs
+HYRACKS_HOME=/home/yingyib/hyracks_asterix_stabilization
+
+export JAVA_OPTS="-Xmx2g  -Djava.rmi.server.hostname=128.195.14.4"
+
+cd $LOGSDIR
+echo $HYRACKS_HOME/hyracks-server/target/appassembler/bin/hyrackscc -client-net-ip-address 128.195.14.4 -cluster-net-ip-address 10.1.0.1 -client-net-port 3099 -cluster-net-port 1099 -max-heartbeat-lapse-periods 999999 &> $LOGSDIR/cc-asterix.log&
+$HYRACKS_HOME/hyracks-server/target/appassembler/bin/hyrackscc -client-net-ip-address 128.195.14.4 -cluster-net-ip-address 10.1.0.1 -client-net-port 3099 -cluster-net-port 1099 -max-heartbeat-lapse-periods 999999 &> $LOGSDIR/cc-asterix.log&
diff --git a/hivesterix/resource/asterix/startnc.sh b/hivesterix/resource/asterix/startnc.sh
new file mode 100755
index 0000000..6cbd5e9
--- /dev/null
+++ b/hivesterix/resource/asterix/startnc.sh
@@ -0,0 +1,21 @@
+#!/bin/bash
+
+export JAVA_HOME=/usr/local/java/vms/java
+
+LOGSDIR=/mnt/data/sda/space/yingyi/hyracks/logs
+HYRACKS_HOME=/home/yingyib/hyracks_asterix_stabilization
+
+IPADDR=`/sbin/ifconfig eth0 | grep "inet addr" | awk '{print $2}' | cut -f 2 -d ':'`
+NODEID=`ypcat hosts | grep asterix | grep "$IPADDR " | awk '{print $2}'`
+
+rm -rf /mnt/data/sda/space/yingyi/tmp/*
+rm -rf /mnt/data/sdb/space/yingyi/tmp/*
+rm -rf /mnt/data/sdc/space/yingyi/tmp/*
+rm -rf /mnt/data/sdd/space/yingyi/tmp/*
+
+
+export JAVA_OPTS="-Xmx10G"
+
+cd $LOGSDIR
+echo $HYRACKS_HOME/hyracks-server/target/appassembler/bin/hyracksnc -cc-host 128.195.14.4 -cc-port 3099 -data-ip-address $IPADDR -node-id $NODEID
+$HYRACKS_HOME/hyracks-server/target/appassembler/bin/hyracksnc -cc-host 10.1.0.1 -cc-port 1099 -cluster-net-ip-address $IPADDR -data-ip-address $IPADDR -node-id $NODEID -iodevices "/mnt/data/sda/space/yingyi/tmp/,/mnt/data/sdb/space/yingyi/tmp/,/mnt/data/sdc/space/yingyi/tmp/,/mnt/data/sdd/space/yingyi/tmp/" -frame-size 32768&> $LOGSDIR/$NODEID.log &
diff --git a/hivesterix/resource/asterix/stopall.sh b/hivesterix/resource/asterix/stopall.sh
new file mode 100755
index 0000000..7cd5a5a
--- /dev/null
+++ b/hivesterix/resource/asterix/stopall.sh
@@ -0,0 +1,11 @@
+ssh asterix-master './hivesterix/target/appassembler/asterix/stopcc.sh'&
+ssh asterix-001 '/home/yingyib/hivesterix/target/appassembler/asterix/stopnc.sh'&
+ssh asterix-002 '/home/yingyib/hivesterix/target/appassembler/asterix/stopnc.sh'&
+ssh asterix-003 '/home/yingyib/hivesterix/target/appassembler/asterix/stopnc.sh'&
+ssh asterix-004 '/home/yingyib/hivesterix/target/appassembler/asterix/stopnc.sh'&
+ssh asterix-005 '/home/yingyib/hivesterix/target/appassembler/asterix/stopnc.sh'&
+ssh asterix-006 '/home/yingyib/hivesterix/target/appassembler/asterix/stopnc.sh'&
+ssh asterix-007 '/home/yingyib/hivesterix/target/appassembler/asterix/stopnc.sh'&
+ssh asterix-008 '/home/yingyib/hivesterix/target/appassembler/asterix/stopnc.sh'&
+ssh asterix-009 '/home/yingyib/hivesterix/target/appassembler/asterix/stopnc.sh'&
+ssh asterix-010 '/home/yingyib/hivesterix/target/appassembler/asterix/stopnc.sh'&
diff --git a/hivesterix/resource/asterix/stopcc.sh b/hivesterix/resource/asterix/stopcc.sh
new file mode 100755
index 0000000..51a1066
--- /dev/null
+++ b/hivesterix/resource/asterix/stopcc.sh
@@ -0,0 +1,6 @@
+#!/bin/bash
+
+PID=`/usr/local/java/vms/java/bin/jps | grep CCDriver | awk '{print $1}'`
+
+echo $PID
+kill -9 $PID
diff --git a/hivesterix/resource/asterix/stopnc.sh b/hivesterix/resource/asterix/stopnc.sh
new file mode 100755
index 0000000..77cecfc
--- /dev/null
+++ b/hivesterix/resource/asterix/stopnc.sh
@@ -0,0 +1,24 @@
+#!/bin/bash
+
+#PID=`/usr/local/java/vms/java/bin/jps | grep NCDriver | awk '{print $1}'`
+
+PID=`ps -ef|grep yingyib|grep java|grep hyracks|awk '{print $2}'`
+
+echo $PID
+kill -9 $PID
+
+#PID=`ps -ef|grep yingyib|grep java|grep datanode|awk '{print $2}'`
+
+#echo $PID
+#kill -9 $PID
+
+
+#PID=`ps -ef|grep yingyib|grep java|grep tasktracker|awk '{print $2}'`
+
+#echo $PID
+#kill -9 $PID
+
+rm -rf /data/yingyi/tmp/*
+rm -rf /data/yingyi/tmp/*
+rm -rf /data/yingyi/tmp/*
+rm -rf /data/yingyi/tmp/*
diff --git a/hivesterix/resource/asterix_dbg/destroy.sh b/hivesterix/resource/asterix_dbg/destroy.sh
new file mode 100644
index 0000000..9ece8d6
--- /dev/null
+++ b/hivesterix/resource/asterix_dbg/destroy.sh
@@ -0,0 +1,2 @@
+connect to "localhost:3099";
+destroy application hivex;
diff --git a/hivesterix/resource/asterix_dbg/hivedeploy.hcli b/hivesterix/resource/asterix_dbg/hivedeploy.hcli
new file mode 100644
index 0000000..6012493
--- /dev/null
+++ b/hivesterix/resource/asterix_dbg/hivedeploy.hcli
@@ -0,0 +1,2 @@
+connect to "localhost:3099";
+create application hivesterix "/home/yingyib/hivesterix/target/algebricks-hivesterix-0.0.1-SNAPSHOT-binary-assembly.zip";
diff --git a/hivesterix/resource/asterix_dbg/hivedestroy.hcli b/hivesterix/resource/asterix_dbg/hivedestroy.hcli
new file mode 100644
index 0000000..c53fa81
--- /dev/null
+++ b/hivesterix/resource/asterix_dbg/hivedestroy.hcli
@@ -0,0 +1,2 @@
+connect to "localhost:3099";
+destroy application hivesterix;
diff --git a/hivesterix/resource/asterix_dbg/startall.sh b/hivesterix/resource/asterix_dbg/startall.sh
new file mode 100755
index 0000000..24e57c4
--- /dev/null
+++ b/hivesterix/resource/asterix_dbg/startall.sh
@@ -0,0 +1,17 @@
+HYRACKS_HOME=/home/yingyib/hyracks-0.1.5
+
+ssh asterix-master ./hivesterix/target/appassembler/asterix_dbg/startcc.sh
+sleep 20
+ssh asterix-001 ./hivesterix/target/appassembler/asterix_dbg/startnc.sh
+ssh asterix-002 ./hivesterix/target/appassembler/asterix_dbg/startnc.sh
+ssh asterix-003 ./hivesterix/target/appassembler/asterix_dbg/startnc.sh
+ssh asterix-004 ./hivesterix/target/appassembler/asterix_dbg/startnc.sh
+ssh asterix-005 ./hivesterix/target/appassembler/asterix_dbg/startnc.sh
+ssh asterix-006 ./hivesterix/target/appassembler/asterix_dbg/startnc.sh
+ssh asterix-007 ./hivesterix/target/appassembler/asterix_dbg/startnc.sh
+ssh asterix-008 ./hivesterix/target/appassembler/asterix_dbg/startnc.sh
+ssh asterix-009 ./hivesterix/target/appassembler/asterix_dbg/startnc.sh
+ssh asterix-010 ./hivesterix/target/appassembler/asterix_dbg/startnc.sh
+
+sleep 10
+$HYRACKS_HOME/hyracks-cli/target/appassembler/bin/hyrackscli < ~/hivesterix/target/appassembler/asterix_dbg/hivedeploy.hcli
diff --git a/hivesterix/resource/asterix_dbg/startcc.sh b/hivesterix/resource/asterix_dbg/startcc.sh
new file mode 100755
index 0000000..5a0043e
--- /dev/null
+++ b/hivesterix/resource/asterix_dbg/startcc.sh
@@ -0,0 +1,8 @@
+#!/bin/bash
+
+LOGSDIR=/mnt/data/sda/space/yingyi/hyracks/logs
+HYRACKS_HOME=/home/yingyib/hyracks-0.1.5
+
+export JAVA_OPTS="-Djava.rmi.server.hostname=128.195.14.4 -Xdebug -Xrunjdwp:transport=dt_socket,address=7001,server=y,suspend=n"
+
+$HYRACKS_HOME/hyracks-server/target/appassembler/bin/hyrackscc -port 3099  &> $LOGSDIR/cc-asterix.log&
diff --git a/hivesterix/resource/asterix_dbg/startnc.sh b/hivesterix/resource/asterix_dbg/startnc.sh
new file mode 100755
index 0000000..5a6024b
--- /dev/null
+++ b/hivesterix/resource/asterix_dbg/startnc.sh
@@ -0,0 +1,14 @@
+#!/bin/bash
+
+export JAVA_HOME=/usr/local/java/vms/java
+
+LOGSDIR=/mnt/data/sda/space/yingyi/hyracks/logs
+HYRACKS_HOME=/home/yingyib/hyracks-0.1.5
+
+IPADDR=`/sbin/ifconfig eth0 | grep "inet addr" | awk '{print $2}' | cut -f 2 -d ':'`
+NODEID=`ypcat hosts | grep asterix | grep "$IPADDR " | awk '{print $2}'`
+
+export JAVA_OPTS="-Xmx10G -agentpath:/home/yingyib/yjp-9.5.6/bin/linux-x86-64/libyjpagent.so=listen=28001"
+
+echo $HYRACKS_HOME/hyracks-server/target/appassembler/bin/hyracksnc -cc-host 128.195.14.4 -cc-port 3099 -data-ip-address $IPADDR -node-id $NODEID
+$HYRACKS_HOME/hyracks-server/target/appassembler/bin/hyracksnc -cc-host 128.195.14.4 -cc-port 3099  -data-ip-address $IPADDR -node-id $NODEID -iodevices "/mnt/data/sda/space/yingyi/tmp/,/mnt/data/sdb/space/yingyi/tmp/,/mnt/data/sdc/space/yingyi/tmp/,/mnt/data/sdd/space/yingyi/tmp/" &> $LOGSDIR/$NODEID.log &
diff --git a/hivesterix/resource/asterix_dbg/stopall.sh b/hivesterix/resource/asterix_dbg/stopall.sh
new file mode 100755
index 0000000..e2295f1
--- /dev/null
+++ b/hivesterix/resource/asterix_dbg/stopall.sh
@@ -0,0 +1,11 @@
+ssh asterix-master ./hivesterix/target/appassembler/asterix_dbg/stopcc.sh
+ssh asterix-001 ./hivesterix/target/appassembler/asterix_dbg/stopnc.sh
+ssh asterix-002 ./hivesterix/target/appassembler/asterix_dbg/stopnc.sh
+ssh asterix-003 ./hivesterix/target/appassembler/asterix_dbg/stopnc.sh
+ssh asterix-004 ./hivesterix/target/appassembler/asterix_dbg/stopnc.sh
+ssh asterix-005 ./hivesterix/target/appassembler/asterix_dbg/stopnc.sh
+ssh asterix-006 ./hivesterix/target/appassembler/asterix_dbg/stopnc.sh
+ssh asterix-007 ./hivesterix/target/appassembler/asterix_dbg/stopnc.sh
+ssh asterix-008 ./hivesterix/target/appassembler/asterix_dbg/stopnc.sh
+ssh asterix-009 ./hivesterix/target/appassembler/asterix_dbg/stopnc.sh
+ssh asterix-010 ./hivesterix/target/appassembler/asterix_dbg/stopnc.sh
diff --git a/hivesterix/resource/asterix_dbg/stopcc.sh b/hivesterix/resource/asterix_dbg/stopcc.sh
new file mode 100755
index 0000000..51a1066
--- /dev/null
+++ b/hivesterix/resource/asterix_dbg/stopcc.sh
@@ -0,0 +1,6 @@
+#!/bin/bash
+
+PID=`/usr/local/java/vms/java/bin/jps | grep CCDriver | awk '{print $1}'`
+
+echo $PID
+kill -9 $PID
diff --git a/hivesterix/resource/asterix_dbg/stopnc.sh b/hivesterix/resource/asterix_dbg/stopnc.sh
new file mode 100755
index 0000000..6bbbb3b
--- /dev/null
+++ b/hivesterix/resource/asterix_dbg/stopnc.sh
@@ -0,0 +1,24 @@
+#!/bin/bash
+
+#PID=`/usr/local/java/vms/java/bin/jps | grep NCDriver | awk '{print $1}'`
+
+PID=`ps -ef|grep yingyib|grep java|grep hyracks|awk '{print $2}'`
+
+echo $PID
+kill -9 $PID
+
+PID=`ps -ef|grep yingyib|grep java|grep datanode|awk '{print $2}'`
+
+echo $PID
+kill -9 $PID
+
+
+PID=`ps -ef|grep yingyib|grep java|grep tasktracker|awk '{print $2}'`
+
+echo $PID
+kill -9 $PID
+
+rm -rf /mnt/data/sda/space/yingyi/tmp/*
+rm -rf /mnt/data/sdb/space/yingyi/tmp/*
+rm -rf /mnt/data/sdc/space/yingyi/tmp/*
+rm -rf /mnt/data/sdd/space/yingyi/tmp/*
diff --git a/hivesterix/resource/bin/ext/cli.sh b/hivesterix/resource/bin/ext/cli.sh
new file mode 100644
index 0000000..914aae3
--- /dev/null
+++ b/hivesterix/resource/bin/ext/cli.sh
@@ -0,0 +1,28 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+THISSERVICE=cli
+export SERVICE_LIST="${SERVICE_LIST}${THISSERVICE} "
+
+cli () {
+  CLASS=org.apache.hadoop.hive.cli.CliDriver
+  execHiveCmd $CLASS "$@"
+}
+
+cli_help () {
+  CLASS=org.apache.hadoop.hive.cli.CliDriver
+  execHiveCmd $CLASS "--help"
+} 
+
diff --git a/hivesterix/resource/bin/ext/help.sh b/hivesterix/resource/bin/ext/help.sh
new file mode 100644
index 0000000..432859a
--- /dev/null
+++ b/hivesterix/resource/bin/ext/help.sh
@@ -0,0 +1,36 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+THISSERVICE=help
+export SERVICE_LIST="${SERVICE_LIST}${THISSERVICE} "
+
+help() {
+  echo "Usage ./hive <parameters> --service serviceName <service parameters>"
+  echo "Service List: $SERVICE_LIST"
+  echo "Parameters parsed:"
+  echo "  --auxpath : Auxillary jars "
+  echo "  --config : Hive configuration directory"
+  echo "  --service : Starts specific service/component. cli is default"
+  echo "Parameters used:"
+  echo "  HADOOP_HOME : Hadoop install directory"
+  echo "  HIVE_OPT : Hive options"
+  echo "For help on a particular service:"
+  echo "  ./hive --service serviceName --help"
+}
+
+help_help(){
+  help
+}
+
diff --git a/hivesterix/resource/bin/ext/hiveserver.sh b/hivesterix/resource/bin/ext/hiveserver.sh
new file mode 100644
index 0000000..b5edce4
--- /dev/null
+++ b/hivesterix/resource/bin/ext/hiveserver.sh
@@ -0,0 +1,35 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+THISSERVICE=hiveserver
+export SERVICE_LIST="${SERVICE_LIST}${THISSERVICE} "
+
+hiveserver() {
+  echo "Starting Hive Thrift Server"
+  CLASS=org.apache.hadoop.hive.service.HiveServer
+  if $cygwin; then
+    HIVE_LIB=`cygpath -w "$HIVE_LIB"`
+  fi
+  JAR=${HIVE_LIB}/hive-service-*.jar
+
+  # hadoop 20 or newer - skip the aux_jars option and hiveconf
+  exec $HADOOP jar $JAR $CLASS $HIVE_PORT "$@"
+}
+
+hiveserver_help() {
+  echo "usage HIVE_PORT=xxxx ./hive --service hiveserver" 
+  echo "  HIVE_PORT : Specify the server port"
+}
+
diff --git a/hivesterix/resource/bin/ext/hwi.sh b/hivesterix/resource/bin/ext/hwi.sh
new file mode 100644
index 0000000..f9cd8ec
--- /dev/null
+++ b/hivesterix/resource/bin/ext/hwi.sh
@@ -0,0 +1,50 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+THISSERVICE=hwi
+export SERVICE_LIST="${SERVICE_LIST}${THISSERVICE} "
+
+hwi() {
+
+  if $cygwin; then
+    HIVE_LIB=`cygpath -w "$HIVE_LIB"`
+  fi
+
+  CLASS=org.apache.hadoop.hive.hwi.HWIServer
+  # The ls hack forces the * to be expanded which is required because 
+  # System.getenv doesn't do globbing
+  export HWI_JAR_FILE=$(ls ${HIVE_LIB}/hive-hwi-*.jar)
+  export HWI_WAR_FILE=$(ls ${HIVE_LIB}/hive-hwi-*.war)
+
+  #hwi requires ant jars
+  if [ "$ANT_LIB" = "" ] ; then
+    ANT_LIB=/opt/ant/lib
+  fi
+  for f in ${ANT_LIB}/*.jar; do
+    if [[ ! -f $f ]]; then
+      continue;
+    fi
+    HADOOP_CLASSPATH=${HADOOP_CLASSPATH}:$f
+  done
+
+  export HADOOP_CLASSPATH
+  
+  # hadoop 20 or newer - skip the aux_jars option and hiveconf
+  exec $HADOOP jar ${HWI_JAR_FILE} $CLASS $HIVE_OPTS "$@"
+}
+
+hwi_help(){
+  echo "Usage ANT_LIB=XXXX hive --service hwi"	
+}
diff --git a/hivesterix/resource/bin/ext/jar.sh b/hivesterix/resource/bin/ext/jar.sh
new file mode 100644
index 0000000..b52f9a7
--- /dev/null
+++ b/hivesterix/resource/bin/ext/jar.sh
@@ -0,0 +1,47 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+THISSERVICE=jar
+export SERVICE_LIST="${SERVICE_LIST}${THISSERVICE} "
+
+jar () {
+  RUNJAR=$1
+  shift
+
+  RUNCLASS=$1
+  shift
+
+  if $cygwin; then
+    HIVE_LIB=`cygpath -w "$HIVE_LIB"`
+  fi
+
+  if [ -z "$RUNJAR" ] ; then
+    echo "RUNJAR not specified"
+    exit 3
+  fi
+
+  if [ -z "$RUNCLASS" ] ; then
+    echo "RUNCLASS not specified"
+    exit 3
+  fi
+
+  # hadoop 20 or newer - skip the aux_jars option and hiveconf
+  exec $HADOOP jar $RUNJAR $RUNCLASS $HIVE_OPTS "$@"
+}
+
+jar_help () {
+  echo "Used for applications that require Hadoop and Hive classpath and environment."
+  echo "./hive --service jar <yourjar> <yourclass> HIVE_OPTS <your_args>"
+}
diff --git a/hivesterix/resource/bin/ext/lineage.sh b/hivesterix/resource/bin/ext/lineage.sh
new file mode 100644
index 0000000..993bc8d
--- /dev/null
+++ b/hivesterix/resource/bin/ext/lineage.sh
@@ -0,0 +1,38 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+THISSERVICE=lineage
+export SERVICE_LIST="${SERVICE_LIST}${THISSERVICE} "
+
+lineage () {
+  CLASS=org.apache.hadoop.hive.ql.tools.LineageInfo
+
+  # cli specific code
+  if [ ! -f ${HIVE_LIB}/hive-exec-*.jar ]; then
+    echo "Missing Hive exec Jar"
+    exit 3;
+  fi
+
+  if $cygwin; then
+    HIVE_LIB=`cygpath -w "$HIVE_LIB"`
+  fi
+
+  exec $HADOOP jar ${HIVE_LIB}/hive-exec-*.jar $CLASS  "$@"
+}
+
+lineage_help () {
+  echo "usage ./hive 'hql' "
+} 
+
diff --git a/hivesterix/resource/bin/ext/metastore.sh b/hivesterix/resource/bin/ext/metastore.sh
new file mode 100644
index 0000000..db15f6e
--- /dev/null
+++ b/hivesterix/resource/bin/ext/metastore.sh
@@ -0,0 +1,35 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+THISSERVICE=metastore
+export SERVICE_LIST="${SERVICE_LIST}${THISSERVICE} "
+
+metastore() {
+  echo "Starting Hive Metastore Server"
+  CLASS=org.apache.hadoop.hive.metastore.HiveMetaStore
+  if $cygwin; then
+    HIVE_LIB=`cygpath -w "$HIVE_LIB"`
+  fi
+  JAR=${HIVE_LIB}/hive-service-*.jar
+
+  # hadoop 20 or newer - skip the aux_jars option and hiveconf
+  exec $HADOOP jar $JAR $CLASS $METASTORE_PORT "$@"
+}
+
+metastore_help() {
+  echo "usage METASTORE_PORT=xxxx ./hive --service metastore"
+  echo "  METASTORE_PORT : Specify the metastore server port"
+}
+
diff --git a/hivesterix/resource/bin/ext/rcfilecat.sh b/hivesterix/resource/bin/ext/rcfilecat.sh
new file mode 100644
index 0000000..3a9264b
--- /dev/null
+++ b/hivesterix/resource/bin/ext/rcfilecat.sh
@@ -0,0 +1,27 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+THISSERVICE=rcfilecat
+export SERVICE_LIST="${SERVICE_LIST}${THISSERVICE} "
+
+rcfilecat () {
+  CLASS=org.apache.hadoop.hive.cli.RCFileCat
+  HIVE_OPTS=''
+  execHiveCmd $CLASS "$@"
+}
+
+rcfilecat_help () {
+  echo "usage ./hive rcfilecat [--start='startoffset'] [--length='len'] "
+} 
diff --git a/hivesterix/resource/bin/ext/util/execHiveCmd.sh b/hivesterix/resource/bin/ext/util/execHiveCmd.sh
new file mode 100644
index 0000000..167cc40
--- /dev/null
+++ b/hivesterix/resource/bin/ext/util/execHiveCmd.sh
@@ -0,0 +1,32 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+execHiveCmd () {
+  CLASS=$1;
+  shift;
+
+  # cli specific code
+  if [ ! -f ${HIVE_LIB}/hive-cli-*.jar ]; then
+    echo "Missing Hive CLI Jar"
+    exit 3;
+  fi
+
+  if $cygwin; then
+    HIVE_LIB=`cygpath -w "$HIVE_LIB"`
+  fi
+
+  # hadoop 20 or newer - skip the aux_jars option. picked up from hiveconf
+  exec $HADOOP jar ${HIVE_LIB}/hive-cli-*.jar $CLASS $HIVE_OPTS "$@"
+}
diff --git a/hivesterix/resource/bin/hive b/hivesterix/resource/bin/hive
new file mode 100755
index 0000000..8a83bde
--- /dev/null
+++ b/hivesterix/resource/bin/hive
@@ -0,0 +1,213 @@
+#!/usr/bin/env bash
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+cygwin=false
+case "`uname`" in
+   CYGWIN*) cygwin=true;;
+esac
+
+bin=`dirname "$0"`
+bin=`cd "$bin"; pwd`
+
+. "$bin"/hive-config.sh
+
+SERVICE=""
+HELP=""
+while [ $# -gt 0 ]; do
+  case "$1" in
+    --service)
+      shift
+      SERVICE=$1
+      shift
+      ;;
+    --rcfilecat)
+      SERVICE=rcfilecat
+      shift
+      ;;
+    --help)
+      HELP=_help
+      shift
+      ;;
+    *)
+      break
+      ;;
+  esac
+done
+
+if [ "$SERVICE" = "" ] ; then
+  if [ "$HELP" = "_help" ] ; then
+    SERVICE="help"
+  else
+    SERVICE="cli"
+  fi
+fi
+
+if [ -f "${HIVE_CONF_DIR}/hive-env.sh" ]; then
+  . "${HIVE_CONF_DIR}/hive-env.sh"
+fi
+
+CLASSPATH="${HIVE_CONF_DIR}"
+
+HIVE_LIB=${HIVE_HOME}/lib
+
+# needed for execution
+if [ ! -f ${HIVE_LIB}/hive-exec-*.jar ]; then
+  echo "Missing Hive Execution Jar: ${HIVE_LIB}/hive-exec-*.jar"
+  exit 1;
+fi
+
+if [ ! -f ${HIVE_LIB}/hive-metastore-*.jar ]; then
+  echo "Missing Hive MetaStore Jar"
+  exit 2;
+fi
+
+# cli specific code
+if [ ! -f ${HIVE_LIB}/hive-cli-*.jar ]; then
+  echo "Missing Hive CLI Jar"
+  exit 3;
+fi
+
+CLASSPATH=${CLASSPATH}:${HIVE_LIB}/algebricks-hivesterix-0.0.1-SNAPSHOT.jar
+
+for f in ${HIVE_LIB}/*.jar; do
+  CLASSPATH=${CLASSPATH}:$f;
+done
+
+# add the auxillary jars such as serdes
+if [ -d "${HIVE_AUX_JARS_PATH}" ]; then
+  for f in ${HIVE_AUX_JARS_PATH}/*.jar; do
+    if [[ ! -f $f ]]; then
+        continue;
+    fi
+    if $cygwin; then
+	f=`cygpath -w "$f"`
+    fi
+    AUX_CLASSPATH=${AUX_CLASSPATH}:$f
+    if [ "${AUX_PARAM}" == "" ]; then
+        AUX_PARAM=file://$f
+    else
+        AUX_PARAM=${AUX_PARAM},file://$f;
+    fi
+  done
+elif [ "${HIVE_AUX_JARS_PATH}" != "" ]; then 
+  if $cygwin; then
+      HIVE_AUX_JARS_PATH=`echo $HIVE_AUX_JARS_PATH | sed 's/,/:/g'`
+      HIVE_AUX_JARS_PATH=`cygpath -p -w "$HIVE_AUX_JARS_PATH"`
+      HIVE_AUX_JARS_PATH=`echo $HIVE_AUX_JARS_PATH | sed 's/;/,/g'`
+  fi
+  AUX_CLASSPATH=${HIVE_AUX_JARS_PATH}
+  AUX_PARAM=file://${HIVE_AUX_JARS_PATH}
+  AUX_PARAM=`echo $AUX_PARAM | sed 's/,/,file:\/\//g'`
+fi
+
+# adding jars from auxlib directory
+for f in ${HIVE_HOME}/auxlib/*.jar; do
+  if [[ ! -f $f ]]; then
+      continue;
+  fi
+  if $cygwin; then
+      f=`cygpath -w "$f"`
+  fi
+  AUX_CLASSPATH=${AUX_CLASSPATH}:$f
+  if [ "${AUX_PARAM}" == "" ]; then
+    AUX_PARAM=file://$f
+  else
+    AUX_PARAM=${AUX_PARAM},file://$f;
+  fi
+done
+if $cygwin; then
+    CLASSPATH=`cygpath -p -w "$CLASSPATH"`
+    CLASSPATH=${CLASSPATH};${AUX_CLASSPATH}
+else
+    CLASSPATH=${CLASSPATH}:${AUX_CLASSPATH}
+fi
+
+# pass classpath to hadoop
+export HADOOP_CLASSPATH="${HADOOP_CLASSPATH}:${CLASSPATH}"
+
+# check for hadoop in the path
+HADOOP_IN_PATH=`which hadoop 2>/dev/null`
+if [ -f ${HADOOP_IN_PATH} ]; then
+  HADOOP_DIR=`dirname "$HADOOP_IN_PATH"`/..
+fi
+# HADOOP_HOME env variable overrides hadoop in the path
+HADOOP_HOME=${HADOOP_HOME:-$HADOOP_DIR}
+if [ "$HADOOP_HOME" == "" ]; then
+  echo "Cannot find hadoop installation: \$HADOOP_HOME must be set or hadoop must be in the path";
+  exit 4;
+fi
+
+HADOOP=$HADOOP_HOME/bin/hadoop
+if [ ! -f ${HADOOP} ]; then
+  echo "Cannot find hadoop installation: \$HADOOP_HOME must be set or hadoop must be in the path";
+  exit 4;
+fi
+
+# Make sure we're using a compatible version of Hadoop
+hadoop_version=$($HADOOP version | awk '{if (NR == 1) {print $2;}}');
+
+# Save the regex to a var to workaround quoting incompatabilities
+# between Bash 3.1 and 3.2
+hadoop_version_re="^([[:digit:]]+)\.([[:digit:]]+)(\.([[:digit:]]+))?.*$"
+
+if [[ "$hadoop_version" =~ $hadoop_version_re ]]; then
+    hadoop_major_ver=${BASH_REMATCH[1]}
+    hadoop_minor_ver=${BASH_REMATCH[2]}
+    hadoop_patch_ver=${BASH_REMATCH[4]}
+else
+    echo "Unable to determine Hadoop version information."
+    echo "'hadoop version' returned:"
+    echo `$HADOOP version`
+    exit 5
+fi
+
+if [ $hadoop_minor_ver -ne 20 -o $hadoop_patch_ver -eq 0 ]; then
+    echo "Hive requires Hadoop 0.20.x (x >= 1)."
+    echo "'hadoop version' returned:"
+    echo `$HADOOP version`
+    exit 6
+fi
+
+if [ "${AUX_PARAM}" != "" ]; then
+  HIVE_OPTS="$HIVE_OPTS -hiveconf hive.aux.jars.path=${AUX_PARAM}"
+  AUX_JARS_CMD_LINE="-libjars ${AUX_PARAM}"
+fi
+
+SERVICE_LIST=""
+
+for i in "$bin"/ext/*.sh ; do
+  . $i
+done
+
+for i in "$bin"/ext/util/*.sh ; do
+  . $i
+done
+
+TORUN=""
+for j in $SERVICE_LIST ; do
+  if [ "$j" = "$SERVICE" ] ; then
+    TORUN=${j}$HELP
+  fi
+done
+
+if [ "$TORUN" = "" ] ; then
+  echo "Service $SERVICE not found"
+  echo "Available Services: $SERVICE_LIST"
+  exit 7
+else
+  $TORUN "$@"
+fi
diff --git a/hivesterix/resource/bin/hive-config.sh b/hivesterix/resource/bin/hive-config.sh
new file mode 100755
index 0000000..2524bbc
--- /dev/null
+++ b/hivesterix/resource/bin/hive-config.sh
@@ -0,0 +1,68 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+#
+# processes --config option from command line
+#
+
+this="$0"
+while [ -h "$this" ]; do
+  ls=`ls -ld "$this"`
+  link=`expr "$ls" : '.*-> \(.*\)$'`
+  if expr "$link" : '.*/.*' > /dev/null; then
+    this="$link"
+  else
+    this=`dirname "$this"`/"$link"
+  fi
+done
+
+# convert relative path to absolute path
+bin=`dirname "$this"`
+script=`basename "$this"`
+bin=`cd "$bin"; pwd`
+this="$bin/$script"
+
+# the root of the Hadoop installation
+export HIVE_HOME=`dirname "$bin"`
+
+#check to see if the conf dir is given as an optional argument
+while [ $# -gt 0 ]; do    # Until you run out of parameters . . .
+  case "$1" in
+    --config)
+        shift
+        confdir=$1
+        shift
+        HIVE_CONF_DIR=$confdir
+        ;;
+    --auxpath)
+        shift
+        HIVE_AUX_JARS_PATH=$1
+        shift
+        ;;
+    *)
+        break;
+        ;;
+  esac
+done
+
+
+# Allow alternate conf dir location.
+HIVE_CONF_DIR="${HIVE_CONF_DIR:-$HIVE_HOME/conf}"
+
+export HIVE_CONF_DIR=$HIVE_CONF_DIR
+export HIVE_AUX_JARS_PATH=$HIVE_AUX_JARS_PATH
+
+# Default to use 256MB 
+export HADOOP_HEAPSIZE=${HADOOP_HEAPSIZE:-256}
diff --git a/hivesterix/resource/bin/init-hive-dfs.sh b/hivesterix/resource/bin/init-hive-dfs.sh
new file mode 100755
index 0000000..ec3997a
--- /dev/null
+++ b/hivesterix/resource/bin/init-hive-dfs.sh
@@ -0,0 +1,107 @@
+#!/usr/bin/env bash
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+# The purpose of this script is to set warehouse's directories on HDFS
+
+DEFAULT_WAREHOUSE_DIR="/user/hive/warehouse"
+DEFAULT_TMP_DIR="/tmp"
+
+WAREHOUSE_DIR=${DEFAULT_WAREHOUSE_DIR}
+TMP_DIR=${DEFAULT_TMP_DIR}
+HELP=""
+while [ $# -gt 0 ]; do
+  case "$1" in
+    --warehouse-dir)
+      shift
+      WAREHOUSE_DIR=$1
+      shift
+      ;;
+    --tmp-dir)
+      shift
+      TMP_DIR=$1
+      shift
+      ;;
+    --help)
+      HELP=_help
+      shift
+      ;;
+    *)
+      echo "Invalid parameter: $1"
+      HELP=_help
+      break
+      ;;
+  esac
+done
+
+if [ "$HELP" = "_help" ] ; then
+  echo "Usage $0 [--warehouse-dir <Hive user>] [--tmp-dir <Tmp dir>]"
+  echo "Default value of warehouse directory is: [$DEFAULT_WAREHOUSE_DIR]"
+  echo "Default value of the temporary directory is: [$DEFAULT_TMP_DIR]"
+  exit -1
+fi
+
+
+# check for hadoop in the path
+HADOOP_IN_PATH=`which hadoop 2>/dev/null`
+if [ -f ${HADOOP_IN_PATH} ]; then
+  HADOOP_DIR=`dirname "$HADOOP_IN_PATH"`/..
+fi
+# HADOOP_HOME env variable overrides hadoop in the path
+HADOOP_HOME=${HADOOP_HOME:-$HADOOP_DIR}
+if [ "$HADOOP_HOME" == "" ]; then
+  echo "Cannot find hadoop installation: \$HADOOP_HOME must be set or hadoop must be in the path";
+  exit 4;
+fi
+
+HADOOP_EXEC=$HADOOP_HOME/bin/hadoop
+if [ ! -f ${HADOOP} ]; then
+  echo "Cannot find hadoop installation: \$HADOOP_HOME must be set or hadoop must be in the path";
+  exit 4;
+fi
+
+
+# Ensure /tmp exist
+$HADOOP_EXEC fs -test -d ${TMP_DIR} > /dev/null 2>&1
+if [ $? -ne 0 ] 
+then
+  echo "Creating directory [${TMP_DIR}]"
+  $HADOOP_EXEC fs -mkdir ${TMP_DIR}
+fi
+
+echo "Setting writeable group rights for directory [${TMP_DIR}]"
+$HADOOP_EXEC fs -chmod g+w ${TMP_DIR}
+
+
+# Ensure warehouse dir exist
+$HADOOP_EXEC fs -test -d ${WAREHOUSE_DIR} > /dev/null 2>&1
+if [ $? -ne 0 ] 
+then
+  echo "Creating directory [${WAREHOUSE_DIR}]"
+  $HADOOP_EXEC fs -mkdir ${WAREHOUSE_DIR}
+fi
+
+echo "Setting writeable group rights for directory [${WAREHOUSE_DIR}]"
+$HADOOP_EXEC fs -chmod g+w ${WAREHOUSE_DIR}
+
+echo "Initialization done."
+echo
+echo "Please, do not forget to set the following configuration properties in hive-site.xml:"
+echo "hive.metastore.warehouse.dir=${WAREHOUSE_DIR}"
+echo "hive.exec.scratchdir=${TMP_DIR}"
+
+exit 0
diff --git a/hivesterix/resource/deploy/balance.jar b/hivesterix/resource/deploy/balance.jar
new file mode 100644
index 0000000..d1bfbee
--- /dev/null
+++ b/hivesterix/resource/deploy/balance.jar
Binary files differ
diff --git a/hivesterix/resource/deploy/deploy.sh b/hivesterix/resource/deploy/deploy.sh
new file mode 100755
index 0000000..27cfd39
--- /dev/null
+++ b/hivesterix/resource/deploy/deploy.sh
@@ -0,0 +1,16 @@
+mvn -e deploy:deploy-file -Durl=http://obelix.ics.uci.edu/nexus/content/repositories/third-party/ -DrepositoryId=third-party -Dfile=$HIVE_HOME/lib/hive-anttasks-0.7.0.jar -DgroupId=org.apache.hadoop.hive -DartifactId=hive-anttasks -Dversion=0.7.0 -Dpackaging=jar

+mvn -e deploy:deploy-file -Durl=http://obelix.ics.uci.edu/nexus/content/repositories/third-party/ -DrepositoryId=third-party -Dfile=$HIVE_HOME/lib/hive-cli-0.7.0.jar -DgroupId=org.apache.hadoop.hive -DartifactId=hive-cli -Dversion=0.7.0 -Dpackaging=jar

+mvn -e deploy:deploy-file -Durl=http://obelix.ics.uci.edu/nexus/content/repositories/third-party/ -DrepositoryId=third-party -Dfile=$HIVE_HOME/lib/hive-common-0.7.0.jar -DgroupId=org.apache.hadoop.hive -DartifactId=hive-common -Dversion=0.7.0 -Dpackaging=jar

+mvn -e deploy:deploy-file -Durl=http://obelix.ics.uci.edu/nexus/content/repositories/third-party/ -DrepositoryId=third-party -Dfile=$HIVE_HOME/lib/hive-exec-0.7.0.jar -DgroupId=org.apache.hadoop.hive -DartifactId=hive-exec -Dversion=0.7.0 -Dpackaging=jar

+mvn -e deploy:deploy-file -Durl=http://obelix.ics.uci.edu/nexus/content/repositories/third-party/ -DrepositoryId=third-party -Dfile=$HIVE_HOME/lib/hive-hwi-0.7.0.jar -DgroupId=org.apache.hadoop.hive -DartifactId=hive-hwi -Dversion=0.7.0 -Dpackaging=jar

+mvn -e deploy:deploy-file -Durl=http://obelix.ics.uci.edu/nexus/content/repositories/third-party/ -DrepositoryId=third-party -Dfile=$HIVE_HOME/lib/hive-jdbc-0.7.0.jar -DgroupId=org.apache.hadoop.hive -DartifactId=hive-jdbc -Dversion=0.7.0 -Dpackaging=jar

+mvn -e deploy:deploy-file -Durl=http://obelix.ics.uci.edu/nexus/content/repositories/third-party/ -DrepositoryId=third-party -Dfile=$HIVE_HOME/lib/hive-metastore-0.7.0.jar -DgroupId=org.apache.hadoop.hive -DartifactId=hive-metastore -Dversion=0.7.0 -Dpackaging=jar

+mvn -e deploy:deploy-file -Durl=http://obelix.ics.uci.edu/nexus/content/repositories/third-party/ -DrepositoryId=third-party -Dfile=$HIVE_HOME/lib/hive-serde-0.7.0.jar -DgroupId=org.apache.hadoop.hive -DartifactId=hive-serde -Dversion=0.7.0 -Dpackaging=jar

+mvn -e deploy:deploy-file -Durl=http://obelix.ics.uci.edu/nexus/content/repositories/third-party/ -DrepositoryId=third-party -Dfile=$HIVE_HOME/lib/hive-service-0.7.0.jar -DgroupId=org.apache.hadoop.hive -DartifactId=hive-service -Dversion=0.7.0 -Dpackaging=jar

+mvn -e deploy:deploy-file -Durl=http://obelix.ics.uci.edu/nexus/content/repositories/third-party/ -DrepositoryId=third-party -Dfile=$HIVE_HOME/lib/hive-shims-0.7.0.jar -DgroupId=org.apache.hadoop.hive -DartifactId=hive-shims -Dversion=0.7.0 -Dpackaging=jar

+mvn -e deploy:deploy-file -Durl=http://obelix.ics.uci.edu/nexus/content/repositories/third-party/ -DrepositoryId=third-party -Dfile=$HIVE_HOME/lib/libthrift.jar -DgroupId=org.apache.thrift -DartifactId=libthrift -Dversion=0.5.0 -Dpackaging=jar

+mvn -e deploy:deploy-file -Durl=http://obelix.ics.uci.edu/nexus/content/repositories/third-party/ -DrepositoryId=third-party -Dfile=$HIVE_HOME/lib/libfb303.jar -DgroupId=com.facebook -DartifactId=libfb303 -Dversion=0.5.0 -Dpackaging=jar

+mvn -e deploy:deploy-file -Durl=http://obelix.ics.uci.edu/nexus/content/repositories/third-party/ -DrepositoryId=third-party -Dfile=$HIVE_HOME/lib/commons-cli-1.2.jar -DgroupId=org.apache.commons -DartifactId=cli -Dversion=1.2 -Dpackaging=jar

+mvn -e deploy:deploy-file -Durl=http://obelix.ics.uci.edu/nexus/content/repositories/third-party/ -DrepositoryId=third-party -Dfile=$HIVE_HOME/lib/jdo2-api-2.3-ec.jar -DgroupId=javax -DartifactId=jdo2-api -Dversion=2.3-ec -Dpackaging=jar

+mvn -e deploy:deploy-file -Durl=http://obelix.ics.uci.edu/nexus/content/repositories/third-party/ -DrepositoryId=third-party -Dfile=$HIVE_HOME/lib/log4j-1.2.15.jar -DgroupId=org.apache -DartifactId=log4j -Dversion=1.2.15 -Dpackaging=jar

+

diff --git a/hivesterix/resource/deploy/jar.sh b/hivesterix/resource/deploy/jar.sh
new file mode 100755
index 0000000..a19b1b8
--- /dev/null
+++ b/hivesterix/resource/deploy/jar.sh
@@ -0,0 +1 @@
+jar cmf manifest.txt .
diff --git a/hivesterix/resource/hivesterix/execute.sh b/hivesterix/resource/hivesterix/execute.sh
new file mode 100755
index 0000000..e9f7ace
--- /dev/null
+++ b/hivesterix/resource/hivesterix/execute.sh
@@ -0,0 +1,13 @@
+LOG=perflog/result.log
+echo "">$LOG
+
+for file in $1/*.hive
+do
+   sleep 10
+   START=$(date +%s)
+   echo $file  
+   ../bin/hive -f $file > perflog/$file
+   END=$(date +%s)
+   DIFF=$(( $END - $START ))
+   echo $file       $DIFF>>$LOG$2
+done
diff --git a/hivesterix/resource/hivesterix/loop.sh b/hivesterix/resource/hivesterix/loop.sh
new file mode 100755
index 0000000..f67896c
--- /dev/null
+++ b/hivesterix/resource/hivesterix/loop.sh
@@ -0,0 +1,4 @@
+for((i=1; i<=10; i++))
+do
+	./execute.sh tpch_sample $i
+done
diff --git a/hivesterix/resource/hivesterix/perf.sh b/hivesterix/resource/hivesterix/perf.sh
new file mode 100755
index 0000000..c1d5c0a
--- /dev/null
+++ b/hivesterix/resource/hivesterix/perf.sh
@@ -0,0 +1,28 @@
+#asterix/stopall.sh
+
+#$HADOOP_HOME/bin/stop-all.sh
+#$HADOOP_HOME/bin/start-all.sh
+#sleep 10
+
+#asterix/startall.sh
+
+LOG=perflog/result.log
+echo "">$LOG
+
+for file in $1/*.hive 
+do
+    ../asterix/stopall.sh
+    $HADOOP_HOME/bin/stop-all.sh
+	sleep 10
+	../asterix/startall.sh
+	$HADOOP_HOME/bin/start-dfs.sh
+	sleep 10
+	$HADOOP_HOME/bin/hadoop dfsadmin -safemode leave
+
+	START=$(date +%s)
+ 	echo $file  
+ 	../bin/hive -f $file > perflog/$file
+ 	END=$(date +%s)
+	DIFF=$(( $END - $START ))
+	echo $file	 $DIFF>>$LOG
+done
diff --git a/hivesterix/resource/hivesterix/perflog/tpch100/result.log b/hivesterix/resource/hivesterix/perflog/tpch100/result.log
new file mode 100644
index 0000000..fbf828d
--- /dev/null
+++ b/hivesterix/resource/hivesterix/perflog/tpch100/result.log
@@ -0,0 +1 @@
+log
\ No newline at end of file
diff --git a/hivesterix/resource/hivesterix/perflog/tpch_sample/result.log b/hivesterix/resource/hivesterix/perflog/tpch_sample/result.log
new file mode 100644
index 0000000..fbf828d
--- /dev/null
+++ b/hivesterix/resource/hivesterix/perflog/tpch_sample/result.log
@@ -0,0 +1 @@
+log
\ No newline at end of file
diff --git a/hivesterix/resource/hivesterix/startcluster.sh b/hivesterix/resource/hivesterix/startcluster.sh
new file mode 100755
index 0000000..e7ad708
--- /dev/null
+++ b/hivesterix/resource/hivesterix/startcluster.sh
@@ -0,0 +1,9 @@
+../asterix/stopall.sh
+$HADOOP_HOME/bin/stop-all.sh
+sleep 10
+../asterix/startall.sh
+$HADOOP_HOME/bin/start-dfs.sh
+sleep 10
+$HADOOP_HOME/bin/hadoop dfsadmin -safemode leave
+rm -rf metastore*
+
diff --git a/hivesterix/resource/hivesterix/stress.sh b/hivesterix/resource/hivesterix/stress.sh
new file mode 100755
index 0000000..8dec4c3
--- /dev/null
+++ b/hivesterix/resource/hivesterix/stress.sh
@@ -0,0 +1,5 @@
+for((i=1; i<=3; i++))
+do
+	./startcluster.sh
+	./execute.sh tpch100 $i
+done
diff --git a/hivesterix/resource/hivesterix/tpch100/q10_returned_item.hive b/hivesterix/resource/hivesterix/tpch100/q10_returned_item.hive
new file mode 100644
index 0000000..3fbc0df
--- /dev/null
+++ b/hivesterix/resource/hivesterix/tpch100/q10_returned_item.hive
@@ -0,0 +1,33 @@
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS orders;
+DROP TABLE IF EXISTS customer;
+DROP TABLE IF EXISTS nation;
+DROP TABLE IF EXISTS q10_returned_item;
+
+-- create the tables and load the data
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/orders';
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/customer';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/nation';
+
+-- create the result table
+create table q10_returned_item (c_custkey int, c_name string, revenue double, c_acctbal string, n_name string, c_address string, c_phone string, c_comment string);
+
+-- the query
+insert overwrite table q10_returned_item
+select 
+  c_custkey, c_name, sum(l_extendedprice * (1 - l_discount)) as revenue, 
+  c_acctbal, n_name, c_address, c_phone, c_comment
+from
+  customer c join orders o 
+  on 
+    c.c_custkey = o.o_custkey and o.o_orderdate >= '1993-10-01' and o.o_orderdate < '1994-01-01'
+  join nation n 
+  on 
+    c.c_nationkey = n.n_nationkey
+  join lineitem l 
+  on 
+    l.l_orderkey = o.o_orderkey and l.l_returnflag = 'R'
+group by c_custkey, c_name, c_acctbal, c_phone, n_name, c_address, c_comment 
+order by revenue desc 
+limit 20;
diff --git a/hivesterix/resource/hivesterix/tpch100/q11_important_stock.hive b/hivesterix/resource/hivesterix/tpch100/q11_important_stock.hive
new file mode 100644
index 0000000..4f2f340
--- /dev/null
+++ b/hivesterix/resource/hivesterix/tpch100/q11_important_stock.hive
@@ -0,0 +1,46 @@
+DROP TABLE IF EXISTS partsupp;
+DROP TABLE IF EXISTS supplier;
+DROP TABLE IF EXISTS nation;
+DROP TABLE IF EXISTS q11_important_stock;
+DROP TABLE IF EXISTS q11_part_tmp;
+DROP TABLE IF EXISTS q11_sum_tmp;
+
+-- create tables and load data
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/supplier';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/nation';
+create external table partsupp (PS_PARTKEY INT, PS_SUPPKEY INT, PS_AVAILQTY INT, PS_SUPPLYCOST DOUBLE, PS_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION'/tpch/100/partsupp';
+
+-- create the target table
+create table q11_important_stock(ps_partkey INT, value DOUBLE);
+create table q11_part_tmp(ps_partkey int, part_value double);
+create table q11_sum_tmp(total_value double);
+
+-- the query
+insert overwrite table q11_part_tmp
+select 
+  ps_partkey, sum(ps_supplycost * ps_availqty) as part_value 
+from
+  nation n join supplier s 
+  on 
+    s.s_nationkey = n.n_nationkey and n.n_name = 'GERMANY'
+  join partsupp ps 
+  on 
+    ps.ps_suppkey = s.s_suppkey
+group by ps_partkey;
+
+insert overwrite table q11_sum_tmp
+select 
+  sum(part_value) as total_value
+from 
+  q11_part_tmp;
+
+insert overwrite table q11_important_stock
+select 
+  ps_partkey, part_value as value
+from
+  (
+    select ps_partkey, part_value, total_value
+    from q11_part_tmp join q11_sum_tmp
+  ) a
+where part_value > total_value * 0.0001
+order by value desc;
diff --git a/hivesterix/resource/hivesterix/tpch100/q12_shipping.hive b/hivesterix/resource/hivesterix/tpch100/q12_shipping.hive
new file mode 100644
index 0000000..92f2bcb
--- /dev/null
+++ b/hivesterix/resource/hivesterix/tpch100/q12_shipping.hive
@@ -0,0 +1,39 @@
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS orders;
+DROP TABLE IF EXISTS q12_shipping;
+
+-- create the tables and load the data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/orders';
+
+-- create the result table
+create table q12_shipping(l_shipmode string, high_line_count double, low_line_count double);
+
+-- the query
+insert overwrite table q12_shipping
+select 
+  l_shipmode,
+  sum(case
+    when o_orderpriority ='1-URGENT'
+         or o_orderpriority ='2-HIGH'
+    then 1
+    else 0
+end
+  ) as high_line_count,
+  sum(case
+    when o_orderpriority <> '1-URGENT'
+         and o_orderpriority <> '2-HIGH'
+    then 1
+    else 0
+end
+  ) as low_line_count
+from
+  orders o join lineitem l 
+  on 
+    o.o_orderkey = l.l_orderkey and l.l_commitdate < l.l_receiptdate
+and l.l_shipdate < l.l_commitdate and l.l_receiptdate >= '1994-01-01' 
+and l.l_receiptdate < '1995-01-01'
+where 
+  l.l_shipmode = 'MAIL' or l.l_shipmode = 'SHIP'
+group by l_shipmode
+order by l_shipmode;
diff --git a/hivesterix/resource/hivesterix/tpch100/q13_customer_distribution.hive b/hivesterix/resource/hivesterix/tpch100/q13_customer_distribution.hive
new file mode 100644
index 0000000..983a42a
--- /dev/null
+++ b/hivesterix/resource/hivesterix/tpch100/q13_customer_distribution.hive
@@ -0,0 +1,26 @@
+DROP TABLE IF EXISTS customer;
+DROP TABLE IF EXISTS orders;
+DROP TABLE IF EXISTS q13_customer_distribution;
+
+-- create the tables and load the data
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/customer';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/orders';
+
+-- create the result table
+create table q13_customer_distribution (c_count int, custdist int);
+
+-- the query
+insert overwrite table q13_customer_distribution
+select 
+  c_count, count(1) as custdist
+from 
+  (select 
+     c_custkey, count(o_orderkey) as c_count
+   from 
+     customer c left outer join orders o 
+     on 
+       c.c_custkey = o.o_custkey and not o.o_comment like '%special%requests%'
+   group by c_custkey
+   ) c_orders
+group by c_count
+order by custdist desc, c_count desc;
diff --git a/hivesterix/resource/hivesterix/tpch100/q14_promotion_effect.hive b/hivesterix/resource/hivesterix/tpch100/q14_promotion_effect.hive
new file mode 100644
index 0000000..5966255
--- /dev/null
+++ b/hivesterix/resource/hivesterix/tpch100/q14_promotion_effect.hive
@@ -0,0 +1,24 @@
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS part;
+DROP TABLE IF EXISTS q14_promotion_effect;
+
+-- create the tables and load the data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/lineitem';
+create external table part (P_PARTKEY INT, P_NAME STRING, P_MFGR STRING, P_BRAND STRING, P_TYPE STRING, P_SIZE INT, P_CONTAINER STRING, P_RETAILPRICE DOUBLE, P_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/part';
+
+-- create the result table
+create table q14_promotion_effect(promo_revenue double);
+
+-- the query
+insert overwrite table q14_promotion_effect
+select 
+  100.00 * sum(case
+               when p_type like 'PROMO%'
+               then l_extendedprice*(1-l_discount)
+               else 0.0
+               end
+  ) / sum(l_extendedprice * (1 - l_discount)) as promo_revenue
+from 
+  part p join lineitem l 
+  on 
+    l.l_partkey = p.p_partkey and l.l_shipdate >= '1995-09-01' and l.l_shipdate < '1995-10-01';
diff --git a/hivesterix/resource/hivesterix/tpch100/q15_top_supplier.hive b/hivesterix/resource/hivesterix/tpch100/q15_top_supplier.hive
new file mode 100644
index 0000000..57d72e5
--- /dev/null
+++ b/hivesterix/resource/hivesterix/tpch100/q15_top_supplier.hive
@@ -0,0 +1,41 @@
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS supplier;
+DROP TABLE IF EXISTS revenue;
+DROP TABLE IF EXISTS max_revenue;
+DROP TABLE IF EXISTS q15_top_supplier;
+
+-- create the tables and load the data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/lineitem';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/supplier';
+
+-- create result tables
+create table revenue(supplier_no int, total_revenue double); 
+create table max_revenue(max_revenue double); 
+create table q15_top_supplier(s_suppkey int, s_name string, s_address string, s_phone string, total_revenue double);
+
+-- the query
+insert overwrite table revenue
+select 
+  l_suppkey as supplier_no, sum(l_extendedprice * (1 - l_discount)) as total_revenue
+from 
+  lineitem
+where 
+  l_shipdate >= '1996-01-01' and l_shipdate < '1996-04-01'
+group by l_suppkey;
+
+insert overwrite table max_revenue
+select 
+  max(total_revenue)
+from 
+  revenue;
+
+insert overwrite table q15_top_supplier
+select 
+  s_suppkey, s_name, s_address, s_phone, total_revenue
+from supplier s join revenue r 
+  on 
+    s.s_suppkey = r.supplier_no
+  join max_revenue m 
+  on 
+    r.total_revenue = m.max_revenue
+order by s_suppkey;
diff --git a/hivesterix/resource/hivesterix/tpch100/q16_parts_supplier_relationship.hive b/hivesterix/resource/hivesterix/tpch100/q16_parts_supplier_relationship.hive
new file mode 100644
index 0000000..ef5d4a1
--- /dev/null
+++ b/hivesterix/resource/hivesterix/tpch100/q16_parts_supplier_relationship.hive
@@ -0,0 +1,52 @@
+DROP TABLE IF EXISTS partsupp;
+DROP TABLE IF EXISTS part;
+DROP TABLE IF EXISTS supplier;
+DROP TABLE IF EXISTS q16_parts_supplier_relationship;
+DROP TABLE IF EXISTS q16_tmp;
+DROP TABLE IF EXISTS supplier_tmp;
+
+-- create the tables and load the data
+create external table part (P_PARTKEY INT, P_NAME STRING, P_MFGR STRING, P_BRAND STRING, P_TYPE STRING, P_SIZE INT, P_CONTAINER STRING, P_RETAILPRICE DOUBLE, P_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/part';
+create external table partsupp (PS_PARTKEY INT, PS_SUPPKEY INT, PS_AVAILQTY INT, PS_SUPPLYCOST DOUBLE, PS_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION'/tpch/100/partsupp';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/supplier';
+
+-- create the result table
+create table q16_parts_supplier_relationship(p_brand string, p_type string, p_size int, supplier_cnt int);
+create table q16_tmp(p_brand string, p_type string, p_size int, ps_suppkey int);
+create table supplier_tmp(s_suppkey int);
+
+-- the query
+insert overwrite table supplier_tmp
+select 
+  s_suppkey
+from 
+  supplier
+where 
+  not s_comment like '%Customer%Complaints%';
+
+insert overwrite table q16_tmp
+select 
+  p_brand, p_type, p_size, ps_suppkey
+from 
+  	part p join partsupp ps 
+  on 
+    p.p_partkey = ps.ps_partkey and p.p_brand <> 'Brand#45' 
+    and not p.p_type like 'MEDIUM POLISHED%'
+  join supplier_tmp s 
+  on 
+    ps.ps_suppkey = s.s_suppkey;
+
+insert overwrite table q16_parts_supplier_relationship
+select 
+  p_brand, p_type, p_size, count(distinct ps_suppkey) as supplier_cnt
+from 
+  (select 
+     * 
+   from
+     q16_tmp 
+   where p_size = 49 or p_size = 14 or p_size = 23 or
+         p_size = 45 or p_size = 19 or p_size = 3 or
+         p_size = 36 or p_size = 9
+) q16_all
+group by p_brand, p_type, p_size
+order by supplier_cnt desc, p_brand, p_type, p_size;
diff --git a/hivesterix/resource/hivesterix/tpch100/q17_small_quantity_order_revenue.hive b/hivesterix/resource/hivesterix/tpch100/q17_small_quantity_order_revenue.hive
new file mode 100644
index 0000000..201b89a
--- /dev/null
+++ b/hivesterix/resource/hivesterix/tpch100/q17_small_quantity_order_revenue.hive
@@ -0,0 +1,38 @@
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS part;
+DROP TABLE IF EXISTS q17_small_quantity_order_revenue;
+DROP TABLE IF EXISTS lineitem_tmp;
+
+-- create the tables and load the data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/lineitem';
+create external table part (P_PARTKEY INT, P_NAME STRING, P_MFGR STRING, P_BRAND STRING, P_TYPE STRING, P_SIZE INT, P_CONTAINER STRING, P_RETAILPRICE DOUBLE, P_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/part';
+
+-- create the result table
+create table q17_small_quantity_order_revenue (avg_yearly double);
+create table lineitem_tmp (t_partkey int, t_avg_quantity double);
+
+-- the query
+insert overwrite table lineitem_tmp
+select 
+  l_partkey as t_partkey, 0.2 * avg(l_quantity) as t_avg_quantity
+from 
+  lineitem
+group by l_partkey;
+
+insert overwrite table q17_small_quantity_order_revenue
+select
+  sum(l_extendedprice) / 7.0 as avg_yearly
+from
+  (select l_quantity, l_extendedprice, t_avg_quantity from
+   lineitem_tmp t join
+     (select
+        l_quantity, l_partkey, l_extendedprice
+      from
+        part p join lineitem l
+        on
+          p.p_partkey = l.l_partkey
+          and p.p_brand = 'Brand#23'
+          and p.p_container = 'MED BOX'
+      ) l1 on l1.l_partkey = t.t_partkey
+   ) a
+where l_quantity < t_avg_quantity;
diff --git a/hivesterix/resource/hivesterix/tpch100/q18_large_volume_customer.hive b/hivesterix/resource/hivesterix/tpch100/q18_large_volume_customer.hive
new file mode 100644
index 0000000..0daf7cf
--- /dev/null
+++ b/hivesterix/resource/hivesterix/tpch100/q18_large_volume_customer.hive
@@ -0,0 +1,39 @@
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS orders;
+DROP TABLE IF EXISTS customer;
+DROP TABLE IF EXISTS q18_tmp;
+DROP TABLE IF EXISTS q18_large_volume_customer;
+
+-- create the tables and load the data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/orders';
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/customer';
+
+-- create the result tables
+create table q18_tmp(l_orderkey int, t_sum_quantity double);
+create table q18_large_volume_customer(c_name string, c_custkey int, o_orderkey int, o_orderdate string, o_totalprice double, sum_quantity double);
+
+-- the query
+insert overwrite table q18_tmp
+select 
+  l_orderkey, sum(l_quantity) as t_sum_quantity
+from 
+  lineitem
+group by l_orderkey;
+
+insert overwrite table q18_large_volume_customer
+select 
+  c_name,c_custkey,o_orderkey,o_orderdate,o_totalprice,sum(l_quantity)
+from 
+  customer c join orders o 
+  on 
+    c.c_custkey = o.o_custkey
+  join q18_tmp t 
+  on 
+    o.o_orderkey = t.l_orderkey and t.t_sum_quantity > 300
+  join lineitem l 
+  on 
+    o.o_orderkey = l.l_orderkey
+group by c_name,c_custkey,o_orderkey,o_orderdate,o_totalprice
+order by o_totalprice desc,o_orderdate
+limit 100;
diff --git a/hivesterix/resource/hivesterix/tpch100/q19_discounted_revenue.hive b/hivesterix/resource/hivesterix/tpch100/q19_discounted_revenue.hive
new file mode 100644
index 0000000..1c89a36
--- /dev/null
+++ b/hivesterix/resource/hivesterix/tpch100/q19_discounted_revenue.hive
@@ -0,0 +1,46 @@
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS part;
+DROP TABLE IF EXISTS q19_discounted_revenue;
+
+-- create the tables and load the data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/lineitem';
+create external table part (P_PARTKEY INT, P_NAME STRING, P_MFGR STRING, P_BRAND STRING, P_TYPE STRING, P_SIZE INT, P_CONTAINER STRING, P_RETAILPRICE DOUBLE, P_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/part';
+
+-- create the result table
+create table q19_discounted_revenue(revenue double);
+
+-- the query
+insert overwrite table q19_discounted_revenue
+select
+  sum(l_extendedprice * (1 - l_discount) ) as revenue
+from
+  part p join lineitem l
+  on 
+    p.p_partkey = l.l_partkey    
+where
+  (
+    p_brand = 'Brand#12'
+	and p_container REGEXP 'SM CASE||SM BOX||SM PACK||SM PKG'
+	and l_quantity >= 1 and l_quantity <= 11
+	and p_size >= 1 and p_size <= 5
+	and l_shipmode REGEXP 'AIR||AIR REG'
+	and l_shipinstruct = 'DELIVER IN PERSON'
+  ) 
+  or 
+  (
+    p_brand = 'Brand#23'
+	and p_container REGEXP 'MED BAG||MED BOX||MED PKG||MED PACK'
+	and l_quantity >= 10 and l_quantity <= 20
+	and p_size >= 1 and p_size <= 10
+	and l_shipmode REGEXP 'AIR||AIR REG'
+	and l_shipinstruct = 'DELIVER IN PERSON'
+  )
+  or
+  (
+	p_brand = 'Brand#34'
+	and p_container REGEXP 'LG CASE||LG BOX||LG PACK||LG PKG'
+	and l_quantity >= 20 and l_quantity <= 30
+	and p_size >= 1 and p_size <= 15
+	and l_shipmode REGEXP 'AIR||AIR REG'
+	and l_shipinstruct = 'DELIVER IN PERSON'
+  );
diff --git a/hivesterix/resource/hivesterix/tpch100/q1_pricing_summary_report.hive b/hivesterix/resource/hivesterix/tpch100/q1_pricing_summary_report.hive
new file mode 100644
index 0000000..f59a074
--- /dev/null
+++ b/hivesterix/resource/hivesterix/tpch100/q1_pricing_summary_report.hive
@@ -0,0 +1,19 @@
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS q1_pricing_summary_report;
+
+-- create tables and load data
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/lineitem';
+
+-- create the target table
+CREATE TABLE q1_pricing_summary_report ( L_RETURNFLAG STRING, L_LINESTATUS STRING, SUM_QTY DOUBLE, SUM_BASE_PRICE DOUBLE, SUM_DISC_PRICE DOUBLE, SUM_CHARGE DOUBLE, AVE_QTY DOUBLE, AVE_PRICE DOUBLE, AVE_DISC DOUBLE, COUNT_ORDER INT);
+
+-- the query
+INSERT OVERWRITE TABLE q1_pricing_summary_report 
+SELECT 
+  L_RETURNFLAG, L_LINESTATUS, SUM(L_QUANTITY), SUM(L_EXTENDEDPRICE), SUM(L_EXTENDEDPRICE*(1-L_DISCOUNT)), SUM(L_EXTENDEDPRICE*(1-L_DISCOUNT)*(1+L_TAX)), AVG(L_QUANTITY), AVG(L_EXTENDEDPRICE), AVG(L_DISCOUNT), COUNT(1) 
+FROM 
+  lineitem 
+WHERE 
+  L_SHIPDATE<='1998-09-02' 
+GROUP BY L_RETURNFLAG, L_LINESTATUS 
+ORDER BY L_RETURNFLAG, L_LINESTATUS;
diff --git a/hivesterix/resource/hivesterix/tpch100/q20_potential_part_promotion.hive b/hivesterix/resource/hivesterix/tpch100/q20_potential_part_promotion.hive
new file mode 100644
index 0000000..2a9fd4a
--- /dev/null
+++ b/hivesterix/resource/hivesterix/tpch100/q20_potential_part_promotion.hive
@@ -0,0 +1,76 @@
+DROP TABLE IF EXISTS partsupp;
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS supplier;
+DROP TABLE IF EXISTS nation;
+DROP TABLE IF EXISTS part;
+DROP TABLE q20_tmp1;
+DROP TABLE q20_tmp2;
+DROP TABLE q20_tmp3;
+DROP TABLE q20_tmp4;
+DROP TABLE q20_potential_part_promotion;
+
+-- create tables and load data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/lineitem';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/supplier';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/nation';
+create external table partsupp (PS_PARTKEY INT, PS_SUPPKEY INT, PS_AVAILQTY INT, PS_SUPPLYCOST DOUBLE, PS_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION'/tpch/100/partsupp';
+create external table part (P_PARTKEY INT, P_NAME STRING, P_MFGR STRING, P_BRAND STRING, P_TYPE STRING, P_SIZE INT, P_CONTAINER STRING, P_RETAILPRICE DOUBLE, P_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/part';
+
+-- create the target table
+create table q20_tmp1(p_partkey int);
+create table q20_tmp2(l_partkey int, l_suppkey int, sum_quantity double);
+create table q20_tmp3(ps_suppkey int, ps_availqty int, sum_quantity double);
+create table q20_tmp4(ps_suppkey int);
+create table q20_potential_part_promotion(s_name string, s_address string);
+
+-- the query
+insert overwrite table q20_tmp1
+select distinct p_partkey
+from
+  part 
+where 
+  p_name like 'forest%';
+
+insert overwrite table q20_tmp2
+select 
+  l_partkey, l_suppkey, 0.5 * sum(l_quantity)
+from
+  lineitem
+where
+  l_shipdate >= '1994-01-01'
+  and l_shipdate < '1995-01-01'
+group by l_partkey, l_suppkey;
+
+insert overwrite table q20_tmp3
+select 
+  ps_suppkey, ps_availqty, sum_quantity
+from  
+  partsupp ps join q20_tmp1 t1 
+  on 
+    ps.ps_partkey = t1.p_partkey
+  join q20_tmp2 t2 
+  on 
+    ps.ps_partkey = t2.l_partkey and ps.ps_suppkey = t2.l_suppkey;
+
+insert overwrite table q20_tmp4
+select 
+  ps_suppkey
+from 
+  q20_tmp3
+where 
+  ps_availqty > sum_quantity
+group by ps_suppkey;
+
+insert overwrite table q20_potential_part_promotion
+select 
+  s_name, s_address
+from 
+  	nation n join supplier s
+  on
+    s.s_nationkey = n.n_nationkey
+    and n.n_name = 'CANADA'
+  join q20_tmp4 t4
+  on 
+    s.s_suppkey = t4.ps_suppkey
+order by s_name;
+
diff --git a/hivesterix/resource/hivesterix/tpch100/q21_suppliers_who_kept_orders_waiting.hive b/hivesterix/resource/hivesterix/tpch100/q21_suppliers_who_kept_orders_waiting.hive
new file mode 100644
index 0000000..9381ef3
--- /dev/null
+++ b/hivesterix/resource/hivesterix/tpch100/q21_suppliers_who_kept_orders_waiting.hive
@@ -0,0 +1,73 @@
+DROP TABLE IF EXISTS orders;
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS supplier;
+DROP TABLE IF EXISTS nation;
+DROP TABLE IF EXISTS q21_tmp1;
+DROP TABLE IF EXISTS q21_tmp2;
+DROP TABLE IF EXISTS q21_suppliers_who_kept_orders_waiting;
+
+-- create tables and load data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/orders';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/supplier';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/nation';
+
+-- create target tables
+create table q21_tmp1(l_orderkey int, count_suppkey int, max_suppkey int);
+create table q21_tmp2(l_orderkey int, count_suppkey int, max_suppkey int);
+create table q21_suppliers_who_kept_orders_waiting(s_name string, numwait int);
+
+-- the query
+insert overwrite table q21_tmp1
+select
+  l_orderkey, count(distinct l_suppkey), max(l_suppkey) as max_suppkey
+from
+  lineitem
+group by l_orderkey;
+
+insert overwrite table q21_tmp2
+select
+  l_orderkey, count(distinct l_suppkey), max(l_suppkey) as max_suppkey
+from
+  lineitem
+where
+  l_receiptdate > l_commitdate
+group by l_orderkey;
+
+insert overwrite table q21_suppliers_who_kept_orders_waiting
+select
+  s_name, count(1) as numwait
+from
+  (select s_name from
+(select s_name, t2.l_orderkey, l_suppkey, count_suppkey, max_suppkey 
+ from q21_tmp2 t2 right outer join
+      (select s_name, l_orderkey, l_suppkey from
+         (select s_name, t1.l_orderkey, l_suppkey, count_suppkey, max_suppkey
+          from
+            (select s_name, l_orderkey, l_suppkey
+             from 
+               (select s_name, l_orderkey, l_suppkey
+                from
+                  nation n join supplier s
+                  on
+                    s.s_nationkey = n.n_nationkey
+                    and n.n_name = 'SAUDI ARABIA'
+                  join lineitem l
+                  on
+                    s.s_suppkey = l.l_suppkey
+                where
+                  l.l_receiptdate > l.l_commitdate
+                ) l1 join orders o on o.o_orderkey = l1.l_orderkey and o.o_orderstatus = 'F'
+             ) l2 join q21_tmp1 t1 on l2.l_orderkey = t1.l_orderkey
+          ) a
+          where
+           (count_suppkey > 1) or ((count_suppkey=1) and (l_suppkey <> max_suppkey))
+       ) l3 on l3.l_orderkey = t2.l_orderkey
+    ) b
+    where
+     (count_suppkey is null) or ((count_suppkey=1) and (l_suppkey = max_suppkey))
+  )c
+group by s_name
+order by numwait desc, s_name
+limit 100;
+
diff --git a/hivesterix/resource/hivesterix/tpch100/q22_global_sales_opportunity.hive b/hivesterix/resource/hivesterix/tpch100/q22_global_sales_opportunity.hive
new file mode 100644
index 0000000..0b418fa
--- /dev/null
+++ b/hivesterix/resource/hivesterix/tpch100/q22_global_sales_opportunity.hive
@@ -0,0 +1,70 @@
+DROP TABLE IF EXISTS customer;
+DROP TABLE IF EXISTS orders;
+DROP TABLE IF EXISTS q22_customer_tmp;
+DROP TABLE IF EXISTS q22_customer_tmp1;
+DROP TABLE IF EXISTS q22_orders_tmp;
+DROP TABLE IF EXISTS q22_global_sales_opportunity;
+
+-- create tables and load data
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/customer';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/orders';
+
+-- create target tables
+create table q22_customer_tmp(c_acctbal double, c_custkey int, cntrycode string);
+create table q22_customer_tmp1(avg_acctbal double);
+create table q22_orders_tmp(o_custkey int);
+create table q22_global_sales_opportunity(cntrycode string, numcust int, totacctbal double);
+
+-- the query
+insert overwrite table q22_customer_tmp
+select 
+  c_acctbal, c_custkey, substr(c_phone, 1, 2) as cntrycode
+from 
+  customer
+where 
+  substr(c_phone, 1, 2) = '13' or
+  substr(c_phone, 1, 2) = '31' or
+  substr(c_phone, 1, 2) = '23' or
+  substr(c_phone, 1, 2) = '29' or
+  substr(c_phone, 1, 2) = '30' or
+  substr(c_phone, 1, 2) = '18' or
+  substr(c_phone, 1, 2) = '17';
+ 
+insert overwrite table q22_customer_tmp1
+select
+  avg(c_acctbal)
+from
+  q22_customer_tmp
+where
+  c_acctbal > 0.00;
+
+insert overwrite table q22_orders_tmp
+select 
+  o_custkey 
+from 
+  orders
+group by 
+  o_custkey;
+
+insert overwrite table q22_global_sales_opportunity
+select
+  cntrycode, count(1) as numcust, sum(c_acctbal) as totacctbal
+from
+(
+  select cntrycode, c_acctbal, avg_acctbal from
+  q22_customer_tmp1 ct1 join
+  (
+    select cntrycode, c_acctbal from
+      q22_orders_tmp ot 
+      right outer join q22_customer_tmp ct 
+      on
+        ct.c_custkey = ot.o_custkey
+    where
+      o_custkey is null
+  ) ct2
+) a
+where
+  c_acctbal > avg_acctbal
+group by cntrycode
+order by cntrycode;
+
diff --git a/hivesterix/resource/hivesterix/tpch100/q2_minimum_cost_supplier.hive b/hivesterix/resource/hivesterix/tpch100/q2_minimum_cost_supplier.hive
new file mode 100644
index 0000000..7d4a41a
--- /dev/null
+++ b/hivesterix/resource/hivesterix/tpch100/q2_minimum_cost_supplier.hive
@@ -0,0 +1,56 @@
+DROP TABLE IF EXISTS part;
+DROP TABLE IF EXISTS supplier;
+DROP TABLE IF EXISTS partsupp;
+DROP TABLE IF EXISTS nation;
+DROP TABLE IF EXISTS region;
+DROP TABLE IF EXISTS q2_minimum_cost_supplier;
+DROP TABLE IF EXISTS q2_minimum_cost_supplier_tmp1;
+DROP TABLE IF EXISTS q2_minimum_cost_supplier_tmp2;
+
+-- create the tables and load the data
+create external table part (P_PARTKEY INT, P_NAME STRING, P_MFGR STRING, P_BRAND STRING, P_TYPE STRING, P_SIZE INT, P_CONTAINER STRING, P_RETAILPRICE DOUBLE, P_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/part';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/supplier';
+create external table partsupp (PS_PARTKEY INT, PS_SUPPKEY INT, PS_AVAILQTY INT, PS_SUPPLYCOST DOUBLE, PS_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION'/tpch/100/partsupp';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/nation';
+create external table region (R_REGIONKEY INT, R_NAME STRING, R_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/region';
+
+-- create result tables
+create table q2_minimum_cost_supplier_tmp1 (s_acctbal double, s_name string, n_name string, p_partkey int, ps_supplycost double, p_mfgr string, s_address string, s_phone string, s_comment string);
+create table q2_minimum_cost_supplier_tmp2 (p_partkey int, ps_min_supplycost double);
+create table q2_minimum_cost_supplier (s_acctbal double, s_name string, n_name string, p_partkey int, p_mfgr string, s_address string, s_phone string, s_comment string);
+
+-- the query
+insert overwrite table q2_minimum_cost_supplier_tmp1 
+select 
+  s.s_acctbal, s.s_name, n.n_name, p.p_partkey, ps.ps_supplycost, p.p_mfgr, s.s_address, s.s_phone, s.s_comment 
+from 
+  nation n join region r 
+  on 
+    n.n_regionkey = r.r_regionkey and r.r_name = 'EUROPE' 
+  join supplier s 
+  on 
+s.s_nationkey = n.n_nationkey 
+  join partsupp ps 
+  on  
+s.s_suppkey = ps.ps_suppkey 
+  join part p 
+  on 
+    p.p_partkey = ps.ps_partkey and p.p_size = 15 and p.p_type like '%BRASS' ;
+
+insert overwrite table q2_minimum_cost_supplier_tmp2 
+select 
+  p_partkey, min(ps_supplycost) 
+from  
+  q2_minimum_cost_supplier_tmp1 
+group by p_partkey;
+
+insert overwrite table q2_minimum_cost_supplier 
+select 
+  t1.s_acctbal, t1.s_name, t1.n_name, t1.p_partkey, t1.p_mfgr, t1.s_address, t1.s_phone, t1.s_comment 
+from 
+  q2_minimum_cost_supplier_tmp1 t1 join q2_minimum_cost_supplier_tmp2 t2 
+on 
+  t1.p_partkey = t2.p_partkey and t1.ps_supplycost=t2.ps_min_supplycost 
+order by s_acctbal desc, n_name, s_name, p_partkey 
+limit 100;
+
diff --git a/hivesterix/resource/hivesterix/tpch100/q3_shipping_priority.hive b/hivesterix/resource/hivesterix/tpch100/q3_shipping_priority.hive
new file mode 100644
index 0000000..2758210
--- /dev/null
+++ b/hivesterix/resource/hivesterix/tpch100/q3_shipping_priority.hive
@@ -0,0 +1,27 @@
+DROP TABLE IF EXISTS orders;
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS customer;
+DROP TABLE IF EXISTS q3_shipping_priority;
+
+-- create tables and load data
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/orders';
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/customer';
+
+-- create the target table
+create table q3_shipping_priority (l_orderkey int, revenue double, o_orderdate string, o_shippriority int);
+
+-- the query
+Insert overwrite table q3_shipping_priority
+select
+  l_orderkey, sum(l_extendedprice*(1-l_discount)) as revenue, o_orderdate, o_shippriority
+from
+  customer c join orders o
+    on c.c_mktsegment = 'BUILDING' and c.c_custkey = o.o_custkey
+  join lineitem l
+    on l.l_orderkey = o.o_orderkey
+where
+  o_orderdate < '1995-03-15' and l_shipdate > '1995-03-15'
+group by l_orderkey, o_orderdate, o_shippriority
+order by revenue desc, o_orderdate
+limit 10;
diff --git a/hivesterix/resource/hivesterix/tpch100/q4_order_priority.hive b/hivesterix/resource/hivesterix/tpch100/q4_order_priority.hive
new file mode 100644
index 0000000..c8da39b
--- /dev/null
+++ b/hivesterix/resource/hivesterix/tpch100/q4_order_priority.hive
@@ -0,0 +1,30 @@
+DROP TABLE IF EXISTS orders;
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS q4_order_priority_tmp;
+DROP TABLE IF EXISTS q4_order_priority;
+
+-- create tables and load data
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/orders';
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/lineitem';
+
+-- create the target table
+CREATE TABLE q4_order_priority_tmp (O_ORDERKEY INT);
+CREATE TABLE q4_order_priority (O_ORDERPRIORITY STRING, ORDER_COUNT INT);
+
+-- the query
+INSERT OVERWRITE TABLE q4_order_priority_tmp 
+select 
+  DISTINCT l_orderkey 
+from 
+  lineitem 
+where 
+  l_commitdate < l_receiptdate;
+INSERT OVERWRITE TABLE q4_order_priority 
+select o_orderpriority, count(1) as order_count 
+from 
+  orders o join q4_order_priority_tmp t 
+  on 
+o.o_orderkey = t.o_orderkey and o.o_orderdate >= '1993-07-01' and o.o_orderdate < '1993-10-01' 
+group by o_orderpriority 
+order by o_orderpriority;
+
diff --git a/hivesterix/resource/hivesterix/tpch100/q5_local_supplier_volume.hive b/hivesterix/resource/hivesterix/tpch100/q5_local_supplier_volume.hive
new file mode 100644
index 0000000..87a18df
--- /dev/null
+++ b/hivesterix/resource/hivesterix/tpch100/q5_local_supplier_volume.hive
@@ -0,0 +1,39 @@
+DROP TABLE IF EXISTS customer;
+DROP TABLE IF EXISTS orders;
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS supplier;
+DROP TABLE IF EXISTS nation;
+DROP TABLE IF EXISTS region;
+DROP TABLE IF EXISTS q5_local_supplier_volume;
+
+-- create tables and load data
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/customer';
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/orders';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/supplier';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/nation';
+create external table region (R_REGIONKEY INT, R_NAME STRING, R_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/region';
+
+-- create the target table
+create table q5_local_supplier_volume (N_NAME STRING, REVENUE DOUBLE);
+
+-- the query
+insert overwrite table q5_local_supplier_volume 
+select 
+  n_name, sum(l_extendedprice * (1 - l_discount)) as revenue 
+from
+  customer c join
+    ( select n_name, l_extendedprice, l_discount, s_nationkey, o_custkey from 
+      ( select n_name, l_extendedprice, l_discount, l_orderkey, s_nationkey from
+        ( select n_name, s_suppkey, s_nationkey from
+          ( select n_name, n_nationkey 
+            from nation n join region r 
+            on n.n_regionkey = r.r_regionkey and r.r_name = 'ASIA'
+          ) n1 join supplier s on s.s_nationkey = n1.n_nationkey
+        ) s1  join lineitem l on l.l_suppkey = s1.s_suppkey
+      ) l1 join orders o on l1.l_orderkey = o.o_orderkey and o.o_orderdate >= '1994-01-01' 
+              and o.o_orderdate < '1995-01-01'
+) o1 
+on c.c_nationkey = o1.s_nationkey and c.c_custkey = o1.o_custkey
+group by n_name 
+order by revenue desc;
diff --git a/hivesterix/resource/hivesterix/tpch100/q6_forecast_revenue_change.hive b/hivesterix/resource/hivesterix/tpch100/q6_forecast_revenue_change.hive
new file mode 100644
index 0000000..e4e5d79
--- /dev/null
+++ b/hivesterix/resource/hivesterix/tpch100/q6_forecast_revenue_change.hive
@@ -0,0 +1,20 @@
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS q6_forecast_revenue_change;
+
+-- create tables and load data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/lineitem';
+
+-- create the target table
+create table q6_forecast_revenue_change (revenue double);
+
+-- the query
+insert overwrite table q6_forecast_revenue_change 
+select 
+  sum(l_extendedprice*l_discount) as revenue
+from 
+  lineitem
+where 
+  l_shipdate >= '1994-01-01'
+  and l_shipdate < '1995-01-01'
+  and l_discount >= 0.05 and l_discount <= 0.07
+  and l_quantity < 24;
diff --git a/hivesterix/resource/hivesterix/tpch100/q7_volume_shipping.hive b/hivesterix/resource/hivesterix/tpch100/q7_volume_shipping.hive
new file mode 100644
index 0000000..c0dd1de
--- /dev/null
+++ b/hivesterix/resource/hivesterix/tpch100/q7_volume_shipping.hive
@@ -0,0 +1,67 @@
+DROP TABLE IF EXISTS customer;
+DROP TABLE IF EXISTS orders;
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS supplier;
+DROP TABLE IF EXISTS nation;
+DROP TABLE IF EXISTS q7_volume_shipping;
+DROP TABLE IF EXISTS q7_volume_shipping_tmp;
+
+-- create tables and load data
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/customer';
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/orders';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/supplier';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/nation';
+
+-- create the target table
+create table q7_volume_shipping (supp_nation string, cust_nation string, l_year int, revenue double);
+create table q7_volume_shipping_tmp(supp_nation string, cust_nation string, s_nationkey int, c_nationkey int);
+
+-- the query
+insert overwrite table q7_volume_shipping_tmp
+select 
+  * 
+from
+  (
+    select 
+      n1.n_name as supp_nation, n2.n_name as cust_nation, n1.n_nationkey as s_nationkey,      
+      n2.n_nationkey as c_nationkey
+from 
+  nation n1 join nation n2 
+  on 
+    n1.n_name = 'FRANCE' and n2.n_name = 'GERMANY'
+    UNION ALL
+select 
+  n1.n_name as supp_nation, n2.n_name as cust_nation, n1.n_nationkey as s_nationkey, 
+  n2.n_nationkey as c_nationkey
+from 
+  nation n1 join nation n2 
+  on 
+    n2.n_name = 'FRANCE' and n1.n_name = 'GERMANY'
+) a;
+
+insert overwrite table q7_volume_shipping 
+select 
+  supp_nation, cust_nation, l_year, sum(volume) as revenue
+from 
+  (
+    select
+      supp_nation, cust_nation, year(l_shipdate) as l_year, 
+      l_extendedprice * (1 - l_discount) as volume
+    from
+      q7_volume_shipping_tmp t join
+        (select l_shipdate, l_extendedprice, l_discount, c_nationkey, s_nationkey 
+         from supplier s join
+           (select l_shipdate, l_extendedprice, l_discount, l_suppkey, c_nationkey 
+            from customer c join
+              (select l_shipdate, l_extendedprice, l_discount, l_suppkey, o_custkey 
+               from orders o join lineitem l 
+               on 
+                 o.o_orderkey = l.l_orderkey and l.l_shipdate >= '1995-01-01' 
+                 and l.l_shipdate <= '1996-12-31'
+               ) l1 on c.c_custkey = l1.o_custkey
+            ) l2 on s.s_suppkey = l2.l_suppkey
+         ) l3 on l3.c_nationkey = t.c_nationkey and l3.s_nationkey = t.s_nationkey
+   ) shipping
+group by supp_nation, cust_nation, l_year
+order by supp_nation, cust_nation, l_year;
diff --git a/hivesterix/resource/hivesterix/tpch100/q8_national_market_share.hive b/hivesterix/resource/hivesterix/tpch100/q8_national_market_share.hive
new file mode 100644
index 0000000..d98871f
--- /dev/null
+++ b/hivesterix/resource/hivesterix/tpch100/q8_national_market_share.hive
@@ -0,0 +1,55 @@
+DROP TABLE IF EXISTS customer;
+DROP TABLE IF EXISTS orders;
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS supplier;
+DROP TABLE IF EXISTS nation;
+DROP TABLE IF EXISTS region;
+DROP TABLE IF EXISTS part;
+DROP TABLE IF EXISTS q8_national_market_share;
+
+-- create the tables and load the data
+create external table part (P_PARTKEY INT, P_NAME STRING, P_MFGR STRING, P_BRAND STRING, P_TYPE STRING, P_SIZE INT, P_CONTAINER STRING, P_RETAILPRICE DOUBLE, P_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/part';
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/customer';
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/orders';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/supplier';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/nation';
+create external table region (R_REGIONKEY INT, R_NAME STRING, R_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/region';
+
+-- create the result table
+create table q8_national_market_share(o_year string, mkt_share double);
+
+-- the query
+insert overwrite table q8_national_market_share 
+select 
+  o_year, sum(case when nation = 'BRAZIL' then volume else 0.0 end) / sum(volume) as mkt_share
+from 
+  (
+select 
+  year(o_orderdate) as o_year, l_extendedprice * (1-l_discount) as volume, 
+  n2.n_name as nation
+    from
+      nation n2 join
+        (select o_orderdate, l_discount, l_extendedprice, s_nationkey 
+         from supplier s join
+          (select o_orderdate, l_discount, l_extendedprice, l_suppkey 
+           from part p join
+             (select o_orderdate, l_partkey, l_discount, l_extendedprice, l_suppkey 
+              from 
+                (select o_orderdate, o_orderkey 
+                 from 
+                   (select c.c_custkey 
+                    from 
+                      (select n1.n_nationkey 
+                       from nation n1 join region r
+                       on n1.n_regionkey = r.r_regionkey and r.r_name = 'AMERICA'
+                       ) n11 join customer c on c.c_nationkey = n11.n_nationkey
+                    ) c1 join orders o on c1.c_custkey = o.o_custkey
+                 ) o1 join lineitem l on l.l_orderkey = o1.o_orderkey and o1.o_orderdate >= '1995-01-01' 
+                         and o1.o_orderdate < '1996-12-31'
+              ) l1 on p.p_partkey = l1.l_partkey and p.p_type = 'ECONOMY ANODIZED STEEL'
+           ) p1 on s.s_suppkey = p1.l_suppkey
+        ) s1 on s1.s_nationkey = n2.n_nationkey
+  ) all_nation
+group by o_year
+order by o_year;
diff --git a/hivesterix/resource/hivesterix/tpch100/q9_product_type_profit.hive b/hivesterix/resource/hivesterix/tpch100/q9_product_type_profit.hive
new file mode 100644
index 0000000..1d1a19d
--- /dev/null
+++ b/hivesterix/resource/hivesterix/tpch100/q9_product_type_profit.hive
@@ -0,0 +1,46 @@
+DROP TABLE part;
+DROP TABLE lineitem;
+DROP TABLE supplier;
+DROP TABLE orders;
+DROP TABLE partsupp;
+DROP TABLE nation;
+DROP TABLE q9_product_type_profit;
+
+-- create the tables and load the data
+create external table part (P_PARTKEY INT, P_NAME STRING, P_MFGR STRING, P_BRAND STRING, P_TYPE STRING, P_SIZE INT, P_CONTAINER STRING, P_RETAILPRICE DOUBLE, P_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/part';
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/orders';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/supplier';
+create external table partsupp (PS_PARTKEY INT, PS_SUPPKEY INT, PS_AVAILQTY INT, PS_SUPPLYCOST DOUBLE, PS_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION'/tpch/100/partsupp';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/nation';
+
+-- create the result table
+create table q9_product_type_profit (nation string, o_year string, sum_profit double);
+
+-- the query
+insert overwrite table q9_product_type_profit
+select 
+  nation, o_year, sum(amount) as sum_profit
+from 
+  (
+select 
+  n_name as nation, year(o_orderdate) as o_year, 
+  l_extendedprice * (1 - l_discount) -  ps_supplycost * l_quantity as amount
+    from
+      (select l_extendedprice, l_discount, l_quantity, l_orderkey, n_name, ps_supplycost 
+       from part p join
+         (select l_extendedprice, l_discount, l_quantity, l_partkey, l_orderkey, 
+                 n_name, ps_supplycost 
+          from partsupp ps join
+            (select l_suppkey, l_extendedprice, l_discount, l_quantity, l_partkey, 
+                    l_orderkey, n_name 
+             from
+               (select s_suppkey, n_name 
+                from nation n join supplier s on n.n_nationkey = s.s_nationkey
+               ) s1 join lineitem l on s1.s_suppkey = l.l_suppkey
+            ) l1 on ps.ps_suppkey = l1.l_suppkey and ps.ps_partkey = l1.l_partkey
+         ) l2 on p.p_name like '%green%' and p.p_partkey = l2.l_partkey
+     ) l3 join orders o on o.o_orderkey = l3.l_orderkey
+  )profit
+group by nation, o_year
+order by nation, o_year desc;
diff --git a/hivesterix/resource/hivesterix/tpch_sample/q1_pricing_summary_report.hive b/hivesterix/resource/hivesterix/tpch_sample/q1_pricing_summary_report.hive
new file mode 100644
index 0000000..1785b48
--- /dev/null
+++ b/hivesterix/resource/hivesterix/tpch_sample/q1_pricing_summary_report.hive
@@ -0,0 +1,19 @@
+-- create tables and load data
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/lineitem';
+
+-- create the target table
+CREATE TABLE q1_pricing_summary_report ( L_RETURNFLAG STRING, L_LINESTATUS STRING, SUM_QTY DOUBLE, SUM_BASE_PRICE DOUBLE, SUM_DISC_PRICE DOUBLE, SUM_CHARGE DOUBLE, AVE_QTY DOUBLE, AVE_PRICE DOUBLE, AVE_DISC DOUBLE, COUNT_ORDER INT);
+
+-- the query
+INSERT OVERWRITE TABLE q1_pricing_summary_report 
+SELECT 
+  L_RETURNFLAG, L_LINESTATUS, SUM(L_QUANTITY), SUM(L_EXTENDEDPRICE), SUM(L_EXTENDEDPRICE*(1-L_DISCOUNT)), SUM(L_EXTENDEDPRICE*(1-L_DISCOUNT)*(1+L_TAX)), AVG(L_QUANTITY), AVG(L_EXTENDEDPRICE), AVG(L_DISCOUNT), COUNT(1) 
+FROM 
+  lineitem 
+WHERE 
+  L_SHIPDATE<='1998-09-02' 
+GROUP BY L_RETURNFLAG, L_LINESTATUS 
+ORDER BY L_RETURNFLAG, L_LINESTATUS;
+
+DROP TABLE lineitem;
+DROP TABLE q1_pricing_summary_report;
diff --git a/hivesterix/resource/hivesterix/tpch_sample/q3_shipping_priority.hive b/hivesterix/resource/hivesterix/tpch_sample/q3_shipping_priority.hive
new file mode 100644
index 0000000..1dc68a2
--- /dev/null
+++ b/hivesterix/resource/hivesterix/tpch_sample/q3_shipping_priority.hive
@@ -0,0 +1,27 @@
+-- create tables and load data
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/orders';
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/customer';
+
+-- create the target table
+create table q3_shipping_priority (l_orderkey int, revenue double, o_orderdate string, o_shippriority int);
+
+-- the query
+Insert overwrite table q3_shipping_priority
+select
+  l_orderkey, sum(l_extendedprice*(1-l_discount)) as revenue, o_orderdate, o_shippriority
+from
+  customer c join orders o
+    on c.c_mktsegment = 'BUILDING' and c.c_custkey = o.o_custkey
+  join lineitem l
+    on l.l_orderkey = o.o_orderkey
+where
+  o_orderdate < '1995-03-15' and l_shipdate > '1995-03-15'
+group by l_orderkey, o_orderdate, o_shippriority
+order by revenue desc, o_orderdate
+limit 10;
+
+DROP TABLE orders;
+DROP TABLE lineitem;
+DROP TABLE customer;
+DROP TABLE q3_shipping_priority;
diff --git a/hivesterix/resource/hivesterix/tpch_sample/q5_local_supplier_volume.hive b/hivesterix/resource/hivesterix/tpch_sample/q5_local_supplier_volume.hive
new file mode 100644
index 0000000..be91a25
--- /dev/null
+++ b/hivesterix/resource/hivesterix/tpch_sample/q5_local_supplier_volume.hive
@@ -0,0 +1,39 @@
+-- create tables and load data
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/customer';
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/orders';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/supplier';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/nation';
+create external table region (R_REGIONKEY INT, R_NAME STRING, R_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/region';
+
+-- create the target table
+create table q5_local_supplier_volume (N_NAME STRING, REVENUE DOUBLE);
+
+-- the query
+insert overwrite table q5_local_supplier_volume 
+select 
+  n_name, sum(l_extendedprice * (1 - l_discount)) as revenue 
+from
+  customer c join
+    ( select n_name, l_extendedprice, l_discount, s_nationkey, o_custkey from orders o join
+      ( select n_name, l_extendedprice, l_discount, l_orderkey, s_nationkey from lineitem l join
+        ( select n_name, s_suppkey, s_nationkey from supplier s join
+          ( select n_name, n_nationkey 
+            from nation n join region r 
+            on n.n_regionkey = r.r_regionkey and r.r_name = 'ASIA'
+          ) n1 on s.s_nationkey = n1.n_nationkey
+        ) s1 on l.l_suppkey = s1.s_suppkey
+      ) l1 on l1.l_orderkey = o.o_orderkey and o.o_orderdate >= '1994-01-01' 
+              and o.o_orderdate < '1995-01-01'
+) o1 
+on c.c_nationkey = o1.s_nationkey and c.c_custkey = o1.o_custkey
+group by n_name 
+order by revenue desc;
+
+DROP TABLE customer;
+DROP TABLE orders;
+DROP TABLE lineitem;
+DROP TABLE supplier;
+DROP TABLE nation;
+DROP TABLE region;
+DROP TABLE q5_local_supplier_volume;
diff --git a/hivesterix/resource/hivesterix/tpch_sample/q9_product_type_profit.hive b/hivesterix/resource/hivesterix/tpch_sample/q9_product_type_profit.hive
new file mode 100644
index 0000000..c95a92b
--- /dev/null
+++ b/hivesterix/resource/hivesterix/tpch_sample/q9_product_type_profit.hive
@@ -0,0 +1,47 @@
+-- create the tables and load the data
+create external table part (P_PARTKEY INT, P_NAME STRING, P_MFGR STRING, P_BRAND STRING, P_TYPE STRING, P_SIZE INT, P_CONTAINER STRING, P_RETAILPRICE DOUBLE, P_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/part';
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/orders';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/supplier';
+create external table partsupp (PS_PARTKEY INT, PS_SUPPKEY INT, PS_AVAILQTY INT, PS_SUPPLYCOST DOUBLE, PS_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION'/tpch/100/partsupp';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/nation';
+
+-- create the result table
+create table q9_product_type_profit (nation string, o_year string, sum_profit double);
+
+-- the query
+insert overwrite table q9_product_type_profit
+select 
+  nation, o_year, sum(amount) as sum_profit
+from 
+  (
+select 
+  n_name as nation, year(o_orderdate) as o_year, 
+  l_extendedprice * (1 - l_discount) -  ps_supplycost * l_quantity as amount
+    from
+      orders o join
+      (select l_extendedprice, l_discount, l_quantity, l_orderkey, n_name, ps_supplycost 
+       from part p join
+         (select l_extendedprice, l_discount, l_quantity, l_partkey, l_orderkey, 
+                 n_name, ps_supplycost 
+          from partsupp ps join
+            (select l_suppkey, l_extendedprice, l_discount, l_quantity, l_partkey, 
+                    l_orderkey, n_name 
+             from
+               (select s_suppkey, n_name 
+                from nation n join supplier s on n.n_nationkey = s.s_nationkey
+               ) s1 join lineitem l on s1.s_suppkey = l.l_suppkey
+            ) l1 on ps.ps_suppkey = l1.l_suppkey and ps.ps_partkey = l1.l_partkey
+         ) l2 on p.p_name like '%green%' and p.p_partkey = l2.l_partkey
+     ) l3 on o.o_orderkey = l3.l_orderkey
+  )profit
+group by nation, o_year
+order by nation, o_year desc;
+
+DROP TABLE part;
+DROP TABLE lineitem;
+DROP TABLE supplier;
+DROP TABLE orders;
+DROP TABLE partsupp;
+DROP TABLE nation;
+DROP TABLE q9_product_type_profit;
diff --git a/hivesterix/resource/tpch/q10_returned_item.hive b/hivesterix/resource/tpch/q10_returned_item.hive
new file mode 100644
index 0000000..b6535cb
--- /dev/null
+++ b/hivesterix/resource/tpch/q10_returned_item.hive
@@ -0,0 +1,37 @@
+DROP TABLE lineitem;
+DROP TABLE orders;
+DROP TABLE customer;
+DROP TABLE nation;
+DROP TABLE q10_returned_item;
+
+-- create the tables and load the data
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/orders';
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/customer';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/nation';
+
+-- create the result table
+create table q10_returned_item (c_custkey int, c_name string, revenue double, c_acctbal string, n_name string, c_address string, c_phone string, c_comment string);
+
+set mapred.min.split.size=536870912;
+set hive.exec.reducers.bytes.per.reducer=1024000000;
+
+-- the query
+insert overwrite table q10_returned_item
+select 
+  c_custkey, c_name, sum(l_extendedprice * (1 - l_discount)) as revenue, 
+  c_acctbal, n_name, c_address, c_phone, c_comment
+from
+  customer c join orders o 
+  on 
+    c.c_custkey = o.o_custkey and o.o_orderdate >= '1993-10-01' and o.o_orderdate < '1994-01-01'
+  join nation n 
+  on 
+    c.c_nationkey = n.n_nationkey
+  join lineitem l 
+  on 
+    l.l_orderkey = o.o_orderkey and l.l_returnflag = 'R'
+group by c_custkey, c_name, c_acctbal, c_phone, n_name, c_address, c_comment 
+order by revenue desc 
+limit 20;
+
diff --git a/hivesterix/resource/tpch/q11_important_stock.hive b/hivesterix/resource/tpch/q11_important_stock.hive
new file mode 100644
index 0000000..bfa3743
--- /dev/null
+++ b/hivesterix/resource/tpch/q11_important_stock.hive
@@ -0,0 +1,47 @@
+DROP TABLE partsupp;
+DROP TABLE supplier;
+DROP TABLE nation;
+DROP TABLE q11_important_stock;
+DROP TABLE q11_part_tmp;
+DROP TABLE q11_sum_tmp;
+
+-- create tables and load data
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/supplier';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/nation';
+create external table partsupp (PS_PARTKEY INT, PS_SUPPKEY INT, PS_AVAILQTY INT, PS_SUPPLYCOST DOUBLE, PS_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION'/tpch/10/partsupp';
+
+-- create the target table
+create table q11_important_stock(ps_partkey INT, value DOUBLE);
+create table q11_part_tmp(ps_partkey int, part_value double);
+create table q11_sum_tmp(total_value double);
+
+-- the query
+insert overwrite table q11_part_tmp
+select 
+  ps_partkey, sum(ps_supplycost * ps_availqty) as part_value 
+from
+  nation n join supplier s 
+  on 
+    s.s_nationkey = n.n_nationkey and n.n_name = 'GERMANY'
+  join partsupp ps 
+  on 
+    ps.ps_suppkey = s.s_suppkey
+group by ps_partkey;
+
+insert overwrite table q11_sum_tmp
+select 
+  sum(part_value) as total_value
+from 
+  q11_part_tmp;
+
+insert overwrite table q11_important_stock
+select 
+  ps_partkey, part_value as value
+from
+  (
+    select ps_partkey, part_value, total_value
+    from q11_part_tmp join q11_sum_tmp
+  ) a
+where part_value > total_value * 0.0001
+order by value desc;
+
diff --git a/hivesterix/resource/tpch/q12_shipping.hive b/hivesterix/resource/tpch/q12_shipping.hive
new file mode 100644
index 0000000..0ae896c
--- /dev/null
+++ b/hivesterix/resource/tpch/q12_shipping.hive
@@ -0,0 +1,42 @@
+DROP TABLE lineitem;
+DROP TABLE orders;
+DROP TABLE q12_shipping;
+
+-- create the tables and load the data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/orders';
+
+-- create the result table
+create table q12_shipping(l_shipmode string, high_line_count double, low_line_count double);
+
+set mapred.min.split.size=536870912;
+set hive.exec.reducers.bytes.per.reducer=1225000000;
+
+-- the query
+insert overwrite table q12_shipping
+select 
+  l_shipmode,
+  sum(case
+    when o_orderpriority ='1-URGENT'
+         or o_orderpriority ='2-HIGH'
+    then 1
+    else 0
+end
+  ) as high_line_count,
+  sum(case
+    when o_orderpriority <> '1-URGENT'
+         and o_orderpriority <> '2-HIGH'
+    then 1
+    else 0
+end
+  ) as low_line_count
+from
+  orders o join lineitem l 
+  on 
+    o.o_orderkey = l.l_orderkey and l.l_commitdate < l.l_receiptdate
+and l.l_shipdate < l.l_commitdate and l.l_receiptdate >= '1994-01-01' 
+and l.l_receiptdate < '1995-01-01'
+where 
+  l.l_shipmode = 'MAIL' or l.l_shipmode = 'SHIP'
+group by l_shipmode
+order by l_shipmode;
diff --git a/hivesterix/resource/tpch/q13_customer_distribution.hive b/hivesterix/resource/tpch/q13_customer_distribution.hive
new file mode 100644
index 0000000..dd3674d
--- /dev/null
+++ b/hivesterix/resource/tpch/q13_customer_distribution.hive
@@ -0,0 +1,27 @@
+DROP TABLE customer;
+DROP TABLE orders;
+DROP TABLE q13_customer_distribution;
+
+-- create the tables and load the data
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/customer';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/orders';
+
+-- create the result table
+create table q13_customer_distribution (c_count int, custdist int);
+
+-- the query
+insert overwrite table q13_customer_distribution
+select 
+  c_count, count(1) as custdist
+from 
+  (select 
+     c_custkey, count(o_orderkey) as c_count
+   from 
+     customer c left outer join orders o 
+     on 
+       c.c_custkey = o.o_custkey and not o.o_comment like '%special%requests%'
+   group by c_custkey
+   ) c_orders
+group by c_count
+order by custdist desc, c_count desc;
+
diff --git a/hivesterix/resource/tpch/q14_promotion_effect.hive b/hivesterix/resource/tpch/q14_promotion_effect.hive
new file mode 100644
index 0000000..a7ea773
--- /dev/null
+++ b/hivesterix/resource/tpch/q14_promotion_effect.hive
@@ -0,0 +1,28 @@
+DROP TABLE lineitem;
+DROP TABLE part;
+DROP TABLE q14_promotion_effect;
+
+-- create the tables and load the data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/lineitem';
+create external table part (P_PARTKEY INT, P_NAME STRING, P_MFGR STRING, P_BRAND STRING, P_TYPE STRING, P_SIZE INT, P_CONTAINER STRING, P_RETAILPRICE DOUBLE, P_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/part';
+
+-- create the result table
+create table q14_promotion_effect(promo_revenue double);
+
+set mapred.min.split.size=536870912;
+set hive.exec.reducers.bytes.per.reducer=1040000000;
+
+-- the query
+insert overwrite table q14_promotion_effect
+select 
+  100.00 * sum(case
+               when p_type like 'PROMO%'
+               then l_extendedprice*(1-l_discount)
+               else 0.0
+               end
+  ) / sum(l_extendedprice * (1 - l_discount)) as promo_revenue
+from 
+  part p join lineitem l 
+  on 
+    l.l_partkey = p.p_partkey and l.l_shipdate >= '1995-09-01' and l.l_shipdate < '1995-10-01';
+
diff --git a/hivesterix/resource/tpch/q15_top_supplier.hive b/hivesterix/resource/tpch/q15_top_supplier.hive
new file mode 100644
index 0000000..b38ba2c
--- /dev/null
+++ b/hivesterix/resource/tpch/q15_top_supplier.hive
@@ -0,0 +1,45 @@
+DROP TABLE lineitem;
+DROP TABLE supplier;
+DROP TABLE revenue;
+DROP TABLE max_revenue;
+DROP TABLE q15_top_supplier;
+
+-- create the tables and load the data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/lineitem';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/supplier';
+
+-- create result tables
+create table revenue(supplier_no int, total_revenue double); 
+create table max_revenue(max_revenue double); 
+create table q15_top_supplier(s_suppkey int, s_name string, s_address string, s_phone string, total_revenue double);
+
+
+set mapred.min.split.size=536870912;
+
+-- the query
+insert overwrite table revenue
+select 
+  l_suppkey as supplier_no, sum(l_extendedprice * (1 - l_discount)) as total_revenue
+from 
+  lineitem
+where 
+  l_shipdate >= '1996-01-01' and l_shipdate < '1996-04-01'
+group by l_suppkey;
+
+insert overwrite table max_revenue
+select 
+  max(total_revenue)
+from 
+  revenue;
+
+insert overwrite table q15_top_supplier
+select 
+  s_suppkey, s_name, s_address, s_phone, total_revenue
+from supplier s join revenue r 
+  on 
+    s.s_suppkey = r.supplier_no
+  join max_revenue m 
+  on 
+    r.total_revenue = m.max_revenue
+order by s_suppkey;
+
diff --git a/hivesterix/resource/tpch/q16_parts_supplier_relationship.hive b/hivesterix/resource/tpch/q16_parts_supplier_relationship.hive
new file mode 100644
index 0000000..495a5ea
--- /dev/null
+++ b/hivesterix/resource/tpch/q16_parts_supplier_relationship.hive
@@ -0,0 +1,53 @@
+DROP TABLE partsupp;
+DROP TABLE part;
+DROP TABLE supplier;
+DROP TABLE q16_parts_supplier_relationship;
+DROP TABLE q16_tmp;
+DROP TABLE supplier_tmp;
+
+-- create the tables and load the data
+create external table part (P_PARTKEY INT, P_NAME STRING, P_MFGR STRING, P_BRAND STRING, P_TYPE STRING, P_SIZE INT, P_CONTAINER STRING, P_RETAILPRICE DOUBLE, P_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/part';
+create external table partsupp (PS_PARTKEY INT, PS_SUPPKEY INT, PS_AVAILQTY INT, PS_SUPPLYCOST DOUBLE, PS_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION'/tpch/10/partsupp';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/supplier';
+
+-- create the result table
+create table q16_parts_supplier_relationship(p_brand string, p_type string, p_size int, supplier_cnt int);
+create table q16_tmp(p_brand string, p_type string, p_size int, ps_suppkey int);
+create table supplier_tmp(s_suppkey int);
+
+-- the query
+insert overwrite table supplier_tmp
+select 
+  s_suppkey
+from 
+  supplier
+where 
+  not s_comment like '%Customer%Complaints%';
+
+insert overwrite table q16_tmp
+select 
+  p_brand, p_type, p_size, ps_suppkey
+from 
+  partsupp ps join part p 
+  on 
+    p.p_partkey = ps.ps_partkey and p.p_brand <> 'Brand#45' 
+    and not p.p_type like 'MEDIUM POLISHED%'
+  join supplier_tmp s 
+  on 
+    ps.ps_suppkey = s.s_suppkey;
+
+insert overwrite table q16_parts_supplier_relationship
+select 
+  p_brand, p_type, p_size, count(distinct ps_suppkey) as supplier_cnt
+from 
+  (select 
+     * 
+   from
+     q16_tmp 
+   where p_size = 49 or p_size = 14 or p_size = 23 or
+         p_size = 45 or p_size = 19 or p_size = 3 or
+         p_size = 36 or p_size = 9
+) q16_all
+group by p_brand, p_type, p_size
+order by supplier_cnt desc, p_brand, p_type, p_size;
+
diff --git a/hivesterix/resource/tpch/q17_small_quantity_order_revenue.hive b/hivesterix/resource/tpch/q17_small_quantity_order_revenue.hive
new file mode 100644
index 0000000..448b8f3
--- /dev/null
+++ b/hivesterix/resource/tpch/q17_small_quantity_order_revenue.hive
@@ -0,0 +1,38 @@
+DROP TABLE lineitem;
+DROP TABLE part;
+DROP TABLE q17_small_quantity_order_revenue;
+DROP TABLE lineitem_tmp;
+
+-- create the tables and load the data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/lineitem';
+create external table part (P_PARTKEY INT, P_NAME STRING, P_MFGR STRING, P_BRAND STRING, P_TYPE STRING, P_SIZE INT, P_CONTAINER STRING, P_RETAILPRICE DOUBLE, P_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/part';
+
+-- create the result table
+create table q17_small_quantity_order_revenue (avg_yearly double);
+create table lineitem_tmp (t_partkey int, t_avg_quantity double);
+
+-- the query
+insert overwrite table lineitem_tmp
+select 
+  l_partkey as t_partkey, 0.2 * avg(l_quantity) as t_avg_quantity
+from 
+  lineitem
+group by l_partkey;
+
+insert overwrite table q17_small_quantity_order_revenue
+select
+  sum(l_extendedprice) / 7.0 as avg_yearly
+from
+  (select l_quantity, l_extendedprice, t_avg_quantity from
+   lineitem_tmp t join
+     (select
+        l_quantity, l_partkey, l_extendedprice
+      from
+        part p join lineitem l
+        on
+          p.p_partkey = l.l_partkey
+          and p.p_brand = 'Brand#23'
+          and p.p_container = 'MED BOX'
+      ) l1 on l1.l_partkey = t.t_partkey
+   ) a
+where l_quantity < t_avg_quantity;
diff --git a/hivesterix/resource/tpch/q18_large_volume_customer.hive b/hivesterix/resource/tpch/q18_large_volume_customer.hive
new file mode 100644
index 0000000..04081ad
--- /dev/null
+++ b/hivesterix/resource/tpch/q18_large_volume_customer.hive
@@ -0,0 +1,43 @@
+DROP TABLE lineitem;
+DROP TABLE orders;
+DROP TABLE customer;
+DROP TABLE q18_tmp;
+DROP TABLE q18_large_volume_customer;
+
+-- create the tables and load the data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/orders';
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/customer';
+
+-- create the result tables
+create table q18_tmp(l_orderkey int, t_sum_quantity double);
+create table q18_large_volume_customer(c_name string, c_custkey int, o_orderkey int, o_orderdate string, o_totalprice double, sum_quantity double);
+
+set mapred.min.split.size=268435456;
+set hive.exec.reducers.bytes.per.reducer=1164000000;
+
+-- the query
+insert overwrite table q18_tmp
+select 
+  l_orderkey, sum(l_quantity) as t_sum_quantity
+from 
+  lineitem
+group by l_orderkey;
+
+insert overwrite table q18_large_volume_customer
+select 
+  c_name,c_custkey,o_orderkey,o_orderdate,o_totalprice,sum(l_quantity)
+from 
+  customer c join orders o 
+  on 
+    c.c_custkey = o.o_custkey
+  join q18_tmp t 
+  on 
+    o.o_orderkey = t.l_orderkey and t.t_sum_quantity > 300
+  join lineitem l 
+  on 
+    o.o_orderkey = l.l_orderkey
+group by c_name,c_custkey,o_orderkey,o_orderdate,o_totalprice
+order by o_totalprice desc,o_orderdate
+limit 100;
+
diff --git a/hivesterix/resource/tpch/q19_discounted_revenue.hive b/hivesterix/resource/tpch/q19_discounted_revenue.hive
new file mode 100644
index 0000000..1e821ca
--- /dev/null
+++ b/hivesterix/resource/tpch/q19_discounted_revenue.hive
@@ -0,0 +1,49 @@
+DROP TABLE lineitem;
+DROP TABLE part;
+DROP TABLE q19_discounted_revenue;
+
+-- create the tables and load the data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/lineitem';
+create external table part (P_PARTKEY INT, P_NAME STRING, P_MFGR STRING, P_BRAND STRING, P_TYPE STRING, P_SIZE INT, P_CONTAINER STRING, P_RETAILPRICE DOUBLE, P_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/part';
+
+-- create the result table
+create table q19_discounted_revenue(revenue double);
+
+set mapred.min.split.size=268435456;
+set hive.exec.reducers.bytes.per.reducer=1040000000;
+
+-- the query
+insert overwrite table q19_discounted_revenue
+select
+  sum(l_extendedprice * (1 - l_discount) ) as revenue
+from
+  lineitem l join part p
+  on 
+    p.p_partkey = l.l_partkey    
+where
+  (
+    p_brand = 'Brand#12'
+	and p_container REGEXP 'SM CASE||SM BOX||SM PACK||SM PKG'
+	and l_quantity >= 1 and l_quantity <= 11
+	and p_size >= 1 and p_size <= 5
+	and l_shipmode REGEXP 'AIR||AIR REG'
+	and l_shipinstruct = 'DELIVER IN PERSON'
+  ) 
+  or 
+  (
+    p_brand = 'Brand#23'
+	and p_container REGEXP 'MED BAG||MED BOX||MED PKG||MED PACK'
+	and l_quantity >= 10 and l_quantity <= 20
+	and p_size >= 1 and p_size <= 10
+	and l_shipmode REGEXP 'AIR||AIR REG'
+	and l_shipinstruct = 'DELIVER IN PERSON'
+  )
+  or
+  (
+	p_brand = 'Brand#34'
+	and p_container REGEXP 'LG CASE||LG BOX||LG PACK||LG PKG'
+	and l_quantity >= 20 and l_quantity <= 30
+	and p_size >= 1 and p_size <= 15
+	and l_shipmode REGEXP 'AIR||AIR REG'
+	and l_shipinstruct = 'DELIVER IN PERSON'
+  );
diff --git a/hivesterix/resource/tpch/q1_pricing_summary_report.hive b/hivesterix/resource/tpch/q1_pricing_summary_report.hive
new file mode 100644
index 0000000..94b2913
--- /dev/null
+++ b/hivesterix/resource/tpch/q1_pricing_summary_report.hive
@@ -0,0 +1,21 @@
+DROP TABLE lineitem;
+DROP TABLE q1_pricing_summary_report;
+
+-- create tables and load data
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/lineitem';
+
+-- create the target table
+CREATE TABLE q1_pricing_summary_report ( L_RETURNFLAG STRING, L_LINESTATUS STRING, SUM_QTY DOUBLE, SUM_BASE_PRICE DOUBLE, SUM_DISC_PRICE DOUBLE, SUM_CHARGE DOUBLE, AVE_QTY DOUBLE, AVE_PRICE DOUBLE, AVE_DISC DOUBLE, COUNT_ORDER INT);
+
+set mapred.min.split.size=536870912;
+
+-- the query
+-- INSERT OVERWRITE TABLE q1_pricing_summary_report 
+SELECT 
+  L_RETURNFLAG, L_LINESTATUS, SUM(L_QUANTITY), SUM(L_EXTENDEDPRICE), SUM(L_EXTENDEDPRICE*(1-L_DISCOUNT)), SUM(L_EXTENDEDPRICE*(1-L_DISCOUNT)*(1+L_TAX)), AVG(L_QUANTITY), AVG(L_EXTENDEDPRICE), AVG(L_DISCOUNT), COUNT(1) 
+FROM 
+  lineitem 
+WHERE 
+  L_SHIPDATE<='1998-09-02' 
+GROUP BY L_RETURNFLAG, L_LINESTATUS 
+ORDER BY L_RETURNFLAG, L_LINESTATUS;
diff --git a/hivesterix/resource/tpch/q20_potential_part_promotion.hive b/hivesterix/resource/tpch/q20_potential_part_promotion.hive
new file mode 100644
index 0000000..40ae423
--- /dev/null
+++ b/hivesterix/resource/tpch/q20_potential_part_promotion.hive
@@ -0,0 +1,77 @@
+DROP TABLE partsupp;
+DROP TABLE lineitem;
+DROP TABLE supplier;
+DROP TABLE nation;
+DROP TABLE q20_tmp1;
+DROP TABLE q20_tmp2;
+DROP TABLE q20_tmp3;
+DROP TABLE q20_tmp4;
+DROP TABLE q20_potential_part_promotion;
+
+-- create tables and load data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/lineitem';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/supplier';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/nation';
+create external table partsupp (PS_PARTKEY INT, PS_SUPPKEY INT, PS_AVAILQTY INT, PS_SUPPLYCOST DOUBLE, PS_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION'/tpch/10/partsupp';
+
+-- create the target table
+create table q20_tmp1(p_partkey int);
+create table q20_tmp2(l_partkey int, l_suppkey int, sum_quantity double);
+create table q20_tmp3(ps_suppkey int, ps_availqty int, sum_quantity double);
+create table q20_tmp4(ps_suppkey int);
+create table q20_potential_part_promotion(s_name string, s_address string);
+
+set mapred.min.split.size=536870912;
+
+-- the query
+insert overwrite table q20_tmp1
+select distinct p_partkey
+from
+  part 
+where 
+  p_name like 'forest%';
+
+insert overwrite table q20_tmp2
+select 
+  l_partkey, l_suppkey, 0.5 * sum(l_quantity)
+from
+  lineitem
+where
+  l_shipdate >= '1994-01-01'
+  and l_shipdate < '1995-01-01'
+group by l_partkey, l_suppkey;
+
+insert overwrite table q20_tmp3
+select 
+  ps_suppkey, ps_availqty, sum_quantity
+from  
+  partsupp ps join q20_tmp1 t1 
+  on 
+    ps.ps_partkey = t1.p_partkey
+  join q20_tmp2 t2 
+  on 
+    ps.ps_partkey = t2.l_partkey and ps.ps_suppkey = t2.l_suppkey;
+
+insert overwrite table q20_tmp4
+select 
+  ps_suppkey
+from 
+  q20_tmp3
+where 
+  ps_availqty > sum_quantity
+group by ps_suppkey;
+
+insert overwrite table q20_potential_part_promotion
+select 
+  s_name, s_address
+from 
+  supplier s join nation n
+  on
+    s.s_nationkey = n.n_nationkey
+    and n.n_name = 'CANADA'
+  join q20_tmp4 t4
+  on 
+    s.s_suppkey = t4.ps_suppkey
+order by s_name;
+
+
diff --git a/hivesterix/resource/tpch/q21_suppliers_who_kept_orders_waiting.hive b/hivesterix/resource/tpch/q21_suppliers_who_kept_orders_waiting.hive
new file mode 100644
index 0000000..0418540
--- /dev/null
+++ b/hivesterix/resource/tpch/q21_suppliers_who_kept_orders_waiting.hive
@@ -0,0 +1,74 @@
+DROP TABLE orders;
+DROP TABLE lineitem;
+DROP TABLE supplier;
+DROP TABLE nation;
+DROP TABLE q21_tmp1;
+DROP TABLE q21_tmp2;
+DROP TABLE q21_suppliers_who_kept_orders_waiting;
+
+-- create tables and load data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/orders';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/supplier';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/nation';
+
+-- create target tables
+create table q21_tmp1(l_orderkey int, count_suppkey int, max_suppkey int);
+create table q21_tmp2(l_orderkey int, count_suppkey int, max_suppkey int);
+create table q21_suppliers_who_kept_orders_waiting(s_name string, numwait int);
+
+-- the query
+insert overwrite table q21_tmp1
+select
+  l_orderkey, count(distinct l_suppkey), max(l_suppkey) as max_suppkey
+from
+  lineitem
+group by l_orderkey;
+
+insert overwrite table q21_tmp2
+select
+  l_orderkey, count(distinct l_suppkey), max(l_suppkey) as max_suppkey
+from
+  lineitem
+where
+  l_receiptdate > l_commitdate
+group by l_orderkey;
+
+insert overwrite table q21_suppliers_who_kept_orders_waiting
+select
+  s_name, count(1) as numwait
+from
+  (select s_name from
+(select s_name, t2.l_orderkey, l_suppkey, count_suppkey, max_suppkey 
+ from q21_tmp2 t2 right outer join
+      (select s_name, l_orderkey, l_suppkey from
+         (select s_name, t1.l_orderkey, l_suppkey, count_suppkey, max_suppkey
+          from
+            q21_tmp1 t1 join
+            (select s_name, l_orderkey, l_suppkey
+             from 
+               orders o join
+               (select s_name, l_orderkey, l_suppkey
+                from
+                  nation n join supplier s
+                  on
+                    s.s_nationkey = n.n_nationkey
+                    and n.n_name = 'SAUDI ARABIA'
+                  join lineitem l
+                  on
+                    s.s_suppkey = l.l_suppkey
+                where
+                  l.l_receiptdate > l.l_commitdate
+                ) l1 on o.o_orderkey = l1.l_orderkey and o.o_orderstatus = 'F'
+             ) l2 on l2.l_orderkey = t1.l_orderkey
+          ) a
+          where
+           (count_suppkey > 1) or ((count_suppkey=1) and (l_suppkey <> max_suppkey))
+       ) l3 on l3.l_orderkey = t2.l_orderkey
+    ) b
+    where
+     (count_suppkey is null) or ((count_suppkey=1) and (l_suppkey = max_suppkey))
+  )c
+group by s_name
+order by numwait desc, s_name
+limit 100;
diff --git a/hivesterix/resource/tpch/q22_global_sales_opportunity.hive b/hivesterix/resource/tpch/q22_global_sales_opportunity.hive
new file mode 100644
index 0000000..379cfc9
--- /dev/null
+++ b/hivesterix/resource/tpch/q22_global_sales_opportunity.hive
@@ -0,0 +1,70 @@
+DROP TABLE customer;
+DROP TABLE orders;
+DROP TABLE q22_customer_tmp;
+DROP TABLE q22_customer_tmp1;
+DROP TABLE q22_orders_tmp;
+DROP TABLE q22_global_sales_opportunity;
+
+-- create tables and load data
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/customer';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/orders';
+
+-- create target tables
+create table q22_customer_tmp(c_acctbal double, c_custkey int, cntrycode string);
+create table q22_customer_tmp1(avg_acctbal double);
+create table q22_orders_tmp(o_custkey int);
+create table q22_global_sales_opportunity(cntrycode string, numcust int, totacctbal double);
+
+-- the query
+insert overwrite table q22_customer_tmp
+select 
+  c_acctbal, c_custkey, substr(c_phone, 1, 2) as cntrycode
+from 
+  customer
+where 
+  substr(c_phone, 1, 2) = '13' or
+  substr(c_phone, 1, 2) = '31' or
+  substr(c_phone, 1, 2) = '23' or
+  substr(c_phone, 1, 2) = '29' or
+  substr(c_phone, 1, 2) = '30' or
+  substr(c_phone, 1, 2) = '18' or
+  substr(c_phone, 1, 2) = '17';
+ 
+insert overwrite table q22_customer_tmp1
+select
+  avg(c_acctbal)
+from
+  q22_customer_tmp
+where
+  c_acctbal > 0.00;
+
+insert overwrite table q22_orders_tmp
+select 
+  o_custkey 
+from 
+  orders
+group by 
+  o_custkey;
+
+insert overwrite table q22_global_sales_opportunity
+select
+  cntrycode, count(1) as numcust, sum(c_acctbal) as totacctbal
+from
+(
+  select cntrycode, c_acctbal, avg_acctbal from
+  q22_customer_tmp1 ct1 join
+  (
+    select cntrycode, c_acctbal from
+      q22_orders_tmp ot 
+      right outer join q22_customer_tmp ct 
+      on
+        ct.c_custkey = ot.o_custkey
+    where
+      o_custkey is null
+  ) ct2
+) a
+where
+  c_acctbal > avg_acctbal
+group by cntrycode
+order by cntrycode;
+
diff --git a/hivesterix/resource/tpch/q2_copy.hive b/hivesterix/resource/tpch/q2_copy.hive
new file mode 100644
index 0000000..647d500
--- /dev/null
+++ b/hivesterix/resource/tpch/q2_copy.hive
@@ -0,0 +1,46 @@
+DROP TABLE part;
+DROP TABLE supplier;
+DROP TABLE partsupp;
+DROP TABLE nation;
+DROP TABLE region;
+DROP TABLE q2_minimum_cost_supplier;
+DROP TABLE q2_minimum_cost_supplier_tmp1;
+DROP TABLE q2_minimum_cost_supplier_tmp2;
+
+-- create the tables and load the data
+create external table part (P_PARTKEY INT, P_NAME STRING, P_MFGR STRING, P_BRAND STRING, P_TYPE STRING, P_SIZE INT, P_CONTAINER STRING, P_RETAILPRICE DOUBLE, P_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/part';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/supplier';
+create external table partsupp (PS_PARTKEY INT, PS_SUPPKEY INT, PS_AVAILQTY INT, PS_SUPPLYCOST DOUBLE, PS_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION'/tpch/10/partsupp';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/nation';
+create external table region (R_REGIONKEY INT, R_NAME STRING, R_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/region';
+
+-- create result tables
+create table q2_minimum_cost_supplier_tmp1 (s_acctbal double, s_name string, n_name string, p_partkey int, ps_supplycost double, p_mfgr string, s_address string, s_phone string, s_comment string);
+create table q2_minimum_cost_supplier_tmp2 (p_partkey int, ps_min_supplycost double);
+create table q2_minimum_cost_supplier (s_acctbal double, s_name string, n_name string, p_partkey int, p_mfgr string, s_address string, s_phone string, s_comment string);
+
+-- the query
+insert overwrite table q2_minimum_cost_supplier_tmp1 
+select 
+  s.s_acctbal, s.s_name, n.n_name, p.p_partkey, ps.ps_supplycost, p.p_mfgr, s.s_address, s.s_phone, s.s_comment 
+from 
+  nation n join region r 
+  on 
+    n.n_regionkey = r.r_regionkey and r.r_name = 'EUROPE' 
+  join supplier s 
+  on 
+s.s_nationkey = n.n_nationkey 
+  join partsupp ps 
+  on  
+s.s_suppkey = ps.ps_suppkey 
+  join part p 
+  on 
+    p.p_partkey = ps.ps_partkey and  p.p_size = 15 ;
+
+-- explain insert overwrite table q2_minimum_cost_supplier_tmp2
+-- select
+--  p_partkey, min(ps_supplycost)
+-- from
+--  q2_minimum_cost_supplier_tmp1
+-- group by p_partkey
+
diff --git a/hivesterix/resource/tpch/q2_minimum_cost_supplier.hive b/hivesterix/resource/tpch/q2_minimum_cost_supplier.hive
new file mode 100644
index 0000000..7a68ee2
--- /dev/null
+++ b/hivesterix/resource/tpch/q2_minimum_cost_supplier.hive
@@ -0,0 +1,56 @@
+DROP TABLE part;
+DROP TABLE supplier;
+DROP TABLE partsupp;
+DROP TABLE nation;
+DROP TABLE region;
+DROP TABLE q2_minimum_cost_supplier;
+DROP TABLE q2_minimum_cost_supplier_tmp1;
+DROP TABLE q2_minimum_cost_supplier_tmp2;
+
+-- create the tables and load the data
+create external table part (P_PARTKEY INT, P_NAME STRING, P_MFGR STRING, P_BRAND STRING, P_TYPE STRING, P_SIZE INT, P_CONTAINER STRING, P_RETAILPRICE DOUBLE, P_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/part';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/supplier';
+create external table partsupp (PS_PARTKEY INT, PS_SUPPKEY INT, PS_AVAILQTY INT, PS_SUPPLYCOST DOUBLE, PS_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION'/tpch/10/partsupp';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/nation';
+create external table region (R_REGIONKEY INT, R_NAME STRING, R_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/region';
+
+-- create result tables
+create table q2_minimum_cost_supplier_tmp1 (s_acctbal double, s_name string, n_name string, p_partkey int, ps_supplycost double, p_mfgr string, s_address string, s_phone string, s_comment string);
+create table q2_minimum_cost_supplier_tmp2 (p_partkey int, ps_min_supplycost double);
+create table q2_minimum_cost_supplier (s_acctbal double, s_name string, n_name string, p_partkey int, p_mfgr string, s_address string, s_phone string, s_comment string);
+
+-- the query
+insert overwrite table q2_minimum_cost_supplier_tmp1 
+select 
+  s.s_acctbal, s.s_name, n.n_name, p.p_partkey, ps.ps_supplycost, p.p_mfgr, s.s_address, s.s_phone, s.s_comment 
+from 
+  nation n join region r 
+  on 
+    n.n_regionkey = r.r_regionkey and r.r_name = 'EUROPE' 
+  join supplier s 
+  on 
+s.s_nationkey = n.n_nationkey 
+  join partsupp ps 
+  on  
+s.s_suppkey = ps.ps_suppkey 
+  join part p 
+  on 
+    p.p_partkey = ps.ps_partkey and p.p_size = 15 and p.p_type like '%BRASS' ;
+
+insert overwrite table q2_minimum_cost_supplier_tmp2 
+select 
+  p_partkey, min(ps_supplycost) 
+from  
+  q2_minimum_cost_supplier_tmp1 
+group by p_partkey;
+
+insert overwrite table q2_minimum_cost_supplier 
+select 
+  t1.s_acctbal, t1.s_name, t1.n_name, t1.p_partkey, t1.p_mfgr, t1.s_address, t1.s_phone, t1.s_comment 
+from 
+  q2_minimum_cost_supplier_tmp1 t1 join q2_minimum_cost_supplier_tmp2 t2 
+on 
+  t1.p_partkey = t2.p_partkey and t1.ps_supplycost=t2.ps_min_supplycost 
+order by s_acctbal desc, n_name, s_name, p_partkey 
+limit 100;
+
diff --git a/hivesterix/resource/tpch/q3_shipping_priority.hive b/hivesterix/resource/tpch/q3_shipping_priority.hive
new file mode 100644
index 0000000..888775e
--- /dev/null
+++ b/hivesterix/resource/tpch/q3_shipping_priority.hive
@@ -0,0 +1,30 @@
+DROP TABLE orders;
+DROP TABLE lineitem;
+DROP TABLE customer;
+DROP TABLE q3_shipping_priority;
+
+-- create tables and load data
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/orders';
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/customer';
+
+-- create the target table
+create table q3_shipping_priority (l_orderkey int, revenue double, o_orderdate string, o_shippriority int);
+
+set mapred.min.split.size=536870912;
+set hive.exec.reducers.bytes.per.reducer=1024000000;
+
+-- the query
+Insert overwrite table q3_shipping_priority
+select
+  l_orderkey, sum(l_extendedprice*(1-l_discount)) as revenue, o_orderdate, o_shippriority
+from
+  customer c join orders o
+    on c.c_mktsegment = 'BUILDING' and c.c_custkey = o.o_custkey
+  join lineitem l
+    on l.l_orderkey = o.o_orderkey
+where
+  o_orderdate < '1995-03-15' and l_shipdate > '1995-03-15'
+group by l_orderkey, o_orderdate, o_shippriority
+order by revenue desc, o_orderdate
+limit 10;
diff --git a/hivesterix/resource/tpch/q4_order_priority.hive b/hivesterix/resource/tpch/q4_order_priority.hive
new file mode 100644
index 0000000..18c8d9d
--- /dev/null
+++ b/hivesterix/resource/tpch/q4_order_priority.hive
@@ -0,0 +1,30 @@
+DROP TABLE orders;
+DROP TABLE lineitem;
+DROP TABLE q4_order_priority_tmp;
+DROP TABLE q4_order_priority;
+
+-- create tables and load data
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/orders';
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/lineitem';
+
+-- create the target table
+CREATE TABLE q4_order_priority_tmp (O_ORDERKEY INT);
+CREATE TABLE q4_order_priority (O_ORDERPRIORITY STRING, ORDER_COUNT INT);
+
+set mapred.min.split.size=536870912;
+-- the query
+INSERT OVERWRITE TABLE q4_order_priority_tmp 
+select 
+  DISTINCT l_orderkey 
+from 
+  lineitem 
+where 
+  l_commitdate < l_receiptdate;
+INSERT OVERWRITE TABLE q4_order_priority 
+select o_orderpriority, count(1) as order_count 
+from 
+  orders o join q4_order_priority_tmp t 
+  on 
+o.o_orderkey = t.o_orderkey and o.o_orderdate >= '1993-07-01' and o.o_orderdate < '1993-10-01' 
+group by o_orderpriority 
+order by o_orderpriority;
diff --git a/hivesterix/resource/tpch/q5_local_supplier_volume.hive b/hivesterix/resource/tpch/q5_local_supplier_volume.hive
new file mode 100644
index 0000000..f5b10d8
--- /dev/null
+++ b/hivesterix/resource/tpch/q5_local_supplier_volume.hive
@@ -0,0 +1,42 @@
+DROP TABLE customer;
+DROP TABLE orders;
+DROP TABLE lineitem;
+DROP TABLE supplier;
+DROP TABLE nation;
+DROP TABLE region;
+DROP TABLE q5_local_supplier_volume;
+
+-- create tables and load data
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/customer';
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/orders';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/supplier';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/nation';
+create external table region (R_REGIONKEY INT, R_NAME STRING, R_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/region';
+
+-- create the target table
+create table q5_local_supplier_volume (N_NAME STRING, REVENUE DOUBLE);
+
+set mapred.min.split.size=536870912;
+
+-- the query
+insert overwrite table q5_local_supplier_volume 
+select 
+  n_name, sum(l_extendedprice * (1 - l_discount)) as revenue 
+from
+  customer c join
+    ( select n_name, l_extendedprice, l_discount, s_nationkey, o_custkey from orders o join
+      ( select n_name, l_extendedprice, l_discount, l_orderkey, s_nationkey from lineitem l join
+        ( select n_name, s_suppkey, s_nationkey from supplier s join
+          ( select n_name, n_nationkey 
+            from nation n join region r 
+            on n.n_regionkey = r.r_regionkey and r.r_name = 'ASIA'
+          ) n1 on s.s_nationkey = n1.n_nationkey
+        ) s1 on l.l_suppkey = s1.s_suppkey
+      ) l1 on l1.l_orderkey = o.o_orderkey and o.o_orderdate >= '1994-01-01' 
+              and o.o_orderdate < '1995-01-01'
+) o1 
+on c.c_nationkey = o1.s_nationkey and c.c_custkey = o1.o_custkey
+group by n_name 
+order by revenue desc;
+
diff --git a/hivesterix/resource/tpch/q6_forecast_revenue_change.hive b/hivesterix/resource/tpch/q6_forecast_revenue_change.hive
new file mode 100644
index 0000000..72900c7
--- /dev/null
+++ b/hivesterix/resource/tpch/q6_forecast_revenue_change.hive
@@ -0,0 +1,21 @@
+DROP TABLE lineitem;
+DROP TABLE q6_forecast_revenue_change;
+
+-- create tables and load data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/lineitem';
+
+-- create the target table
+create table q6_forecast_revenue_change (revenue double);
+
+-- the query
+insert overwrite table q6_forecast_revenue_change 
+select 
+  sum(l_extendedprice*l_discount) as revenue
+from 
+  lineitem
+where 
+  l_shipdate >= '1994-01-01'
+  and l_shipdate < '1995-01-01'
+  and l_discount >= 0.05 and l_discount <= 0.07
+  and l_quantity < 24;
+
diff --git a/hivesterix/resource/tpch/q7_volume_shipping.hive b/hivesterix/resource/tpch/q7_volume_shipping.hive
new file mode 100644
index 0000000..da6eab2
--- /dev/null
+++ b/hivesterix/resource/tpch/q7_volume_shipping.hive
@@ -0,0 +1,71 @@
+DROP TABLE customer;
+DROP TABLE orders;
+DROP TABLE lineitem;
+DROP TABLE supplier;
+DROP TABLE nation;
+DROP TABLE q7_volume_shipping;
+DROP TABLE q7_volume_shipping_tmp;
+
+-- create tables and load data
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/customer';
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/orders';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/supplier';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/nation';
+
+-- create the target table
+create table q7_volume_shipping (supp_nation string, cust_nation string, l_year int, revenue double);
+create table q7_volume_shipping_tmp(supp_nation string, cust_nation string, s_nationkey int, c_nationkey int);
+
+set mapred.min.split.size=536870912;
+set hive.exec.reducers.bytes.per.reducer=1225000000;
+
+-- the query
+insert overwrite table q7_volume_shipping_tmp
+select 
+  * 
+from
+  (
+    select 
+      n1.n_name as supp_nation, n2.n_name as cust_nation, n1.n_nationkey as s_nationkey,      
+      n2.n_nationkey as c_nationkey
+from 
+  nation n1 join nation n2 
+  on 
+    n1.n_name = 'FRANCE' and n2.n_name = 'GERMANY'
+    UNION ALL
+select 
+  n1.n_name as supp_nation, n2.n_name as cust_nation, n1.n_nationkey as s_nationkey, 
+  n2.n_nationkey as c_nationkey
+from 
+  nation n1 join nation n2 
+  on 
+    n2.n_name = 'FRANCE' and n1.n_name = 'GERMANY'
+) a;
+
+insert overwrite table q7_volume_shipping 
+select 
+  supp_nation, cust_nation, l_year, sum(volume) as revenue
+from 
+  (
+    select
+      supp_nation, cust_nation, year(l_shipdate) as l_year, 
+      l_extendedprice * (1 - l_discount) as volume
+    from
+      q7_volume_shipping_tmp t join
+        (select l_shipdate, l_extendedprice, l_discount, c_nationkey, s_nationkey 
+         from supplier s join
+           (select l_shipdate, l_extendedprice, l_discount, l_suppkey, c_nationkey 
+            from customer c join
+              (select l_shipdate, l_extendedprice, l_discount, l_suppkey, o_custkey 
+               from orders o join lineitem l 
+               on 
+                 o.o_orderkey = l.l_orderkey and l.l_shipdate >= '1995-01-01' 
+                 and l.l_shipdate <= '1996-12-31'
+               ) l1 on c.c_custkey = l1.o_custkey
+            ) l2 on s.s_suppkey = l2.l_suppkey
+         ) l3 on l3.c_nationkey = t.c_nationkey and l3.s_nationkey = t.s_nationkey
+   ) shipping
+group by supp_nation, cust_nation, l_year
+order by supp_nation, cust_nation, l_year;
+
diff --git a/hivesterix/resource/tpch/q8_national_market_share.hive b/hivesterix/resource/tpch/q8_national_market_share.hive
new file mode 100644
index 0000000..ae2abec
--- /dev/null
+++ b/hivesterix/resource/tpch/q8_national_market_share.hive
@@ -0,0 +1,56 @@
+DROP TABLE customer;
+DROP TABLE orders;
+DROP TABLE lineitem;
+DROP TABLE supplier;
+DROP TABLE nation;
+DROP TABLE region;
+DROP TABLE part;
+DROP TABLE q8_national_market_share;
+
+-- create the tables and load the data
+create external table part (P_PARTKEY INT, P_NAME STRING, P_MFGR STRING, P_BRAND STRING, P_TYPE STRING, P_SIZE INT, P_CONTAINER STRING, P_RETAILPRICE DOUBLE, P_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/part';
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/customer';
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/orders';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/supplier';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/nation';
+create external table region (R_REGIONKEY INT, R_NAME STRING, R_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/region';
+
+-- create the result table
+create table q8_national_market_share(o_year string, mkt_share double);
+
+-- the query
+insert overwrite table q8_national_market_share 
+select 
+  o_year, sum(case when nation = 'BRAZIL' then volume else 0.0 end) / sum(volume) as mkt_share
+from 
+  (
+select 
+  year(o_orderdate) as o_year, l_extendedprice * (1-l_discount) as volume, 
+  n2.n_name as nation
+    from
+      nation n2 join
+        (select o_orderdate, l_discount, l_extendedprice, s_nationkey 
+         from supplier s join
+          (select o_orderdate, l_discount, l_extendedprice, l_suppkey 
+           from part p join
+             (select o_orderdate, l_partkey, l_discount, l_extendedprice, l_suppkey 
+              from lineitem l join
+                (select o_orderdate, o_orderkey 
+                 from orders o join
+                   (select c.c_custkey 
+                    from customer c join
+                      (select n1.n_nationkey 
+                       from nation n1 join region r
+                       on n1.n_regionkey = r.r_regionkey and r.r_name = 'AMERICA'
+                       ) n11 on c.c_nationkey = n11.n_nationkey
+                    ) c1 on c1.c_custkey = o.o_custkey
+                 ) o1 on l.l_orderkey = o1.o_orderkey and o1.o_orderdate >= '1995-01-01' 
+                         and o1.o_orderdate < '1996-12-31'
+              ) l1 on p.p_partkey = l1.l_partkey and p.p_type = 'ECONOMY ANODIZED STEEL'
+           ) p1 on s.s_suppkey = p1.l_suppkey
+        ) s1 on s1.s_nationkey = n2.n_nationkey
+  ) all_nation
+group by o_year
+order by o_year;
+
diff --git a/hivesterix/resource/tpch/q9_product_type_profit.hive b/hivesterix/resource/tpch/q9_product_type_profit.hive
new file mode 100644
index 0000000..bc8ba3f
--- /dev/null
+++ b/hivesterix/resource/tpch/q9_product_type_profit.hive
@@ -0,0 +1,51 @@
+DROP TABLE part;
+DROP TABLE lineitem;
+DROP TABLE supplier;
+DROP TABLE orders;
+DROP TABLE partsupp;
+DROP TABLE nation;
+DROP TABLE q9_product_type_profit;
+
+-- create the tables and load the data
+create external table part (P_PARTKEY INT, P_NAME STRING, P_MFGR STRING, P_BRAND STRING, P_TYPE STRING, P_SIZE INT, P_CONTAINER STRING, P_RETAILPRICE DOUBLE, P_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/part';
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/orders';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/supplier';
+create external table partsupp (PS_PARTKEY INT, PS_SUPPKEY INT, PS_AVAILQTY INT, PS_SUPPLYCOST DOUBLE, PS_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION'/tpch/10/partsupp';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/nation';
+
+-- create the result table
+create table q9_product_type_profit (nation string, o_year string, sum_profit double);
+
+set mapred.min.split.size=536870912;
+set hive.exec.reducers.bytes.per.reducer=1024000000;
+
+-- the query
+insert overwrite table q9_product_type_profit
+select 
+  nation, o_year, sum(amount) as sum_profit
+from 
+  (
+select 
+  n_name as nation, year(o_orderdate) as o_year, 
+  l_extendedprice * (1 - l_discount) -  ps_supplycost * l_quantity as amount
+    from
+      orders o join
+      (select l_extendedprice, l_discount, l_quantity, l_orderkey, n_name, ps_supplycost 
+       from part p join
+         (select l_extendedprice, l_discount, l_quantity, l_partkey, l_orderkey, 
+                 n_name, ps_supplycost 
+          from partsupp ps join
+            (select l_suppkey, l_extendedprice, l_discount, l_quantity, l_partkey, 
+                    l_orderkey, n_name 
+             from
+               (select s_suppkey, n_name 
+                from nation n join supplier s on n.n_nationkey = s.s_nationkey
+               ) s1 join lineitem l on s1.s_suppkey = l.l_suppkey
+            ) l1 on ps.ps_suppkey = l1.l_suppkey and ps.ps_partkey = l1.l_partkey
+         ) l2 on p.p_name like '%green%' and p.p_partkey = l2.l_partkey
+     ) l3 on o.o_orderkey = l3.l_orderkey
+  )profit
+group by nation, o_year
+order by nation, o_year desc;
+
diff --git a/hivesterix/resource/tpch100/q10_returned_item.hive b/hivesterix/resource/tpch100/q10_returned_item.hive
new file mode 100644
index 0000000..1e4e3c6
--- /dev/null
+++ b/hivesterix/resource/tpch100/q10_returned_item.hive
@@ -0,0 +1,36 @@
+-- create the tables and load the data
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/orders';
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/customer';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/nation';
+
+-- create the result table
+create table q10_returned_item (c_custkey int, c_name string, revenue double, c_acctbal string, n_name string, c_address string, c_phone string, c_comment string);
+
+set mapred.min.split.size=536870912;
+set hive.exec.reducers.bytes.per.reducer=1024000000;
+
+-- the query
+insert overwrite table q10_returned_item
+select 
+  c_custkey, c_name, sum(l_extendedprice * (1 - l_discount)) as revenue, 
+  c_acctbal, n_name, c_address, c_phone, c_comment
+from
+  customer c join orders o 
+  on 
+    c.c_custkey = o.o_custkey and o.o_orderdate >= '1993-10-01' and o.o_orderdate < '1994-01-01'
+  join nation n 
+  on 
+    c.c_nationkey = n.n_nationkey
+  join lineitem l 
+  on 
+    l.l_orderkey = o.o_orderkey and l.l_returnflag = 'R'
+group by c_custkey, c_name, c_acctbal, c_phone, n_name, c_address, c_comment 
+order by revenue desc 
+limit 20;
+
+DROP TABLE lineitem;
+DROP TABLE orders;
+DROP TABLE customer;
+DROP TABLE nation;
+DROP TABLE q10_returned_item;
diff --git a/hivesterix/resource/tpch100/q11_important_stock.hive b/hivesterix/resource/tpch100/q11_important_stock.hive
new file mode 100644
index 0000000..271b614
--- /dev/null
+++ b/hivesterix/resource/tpch100/q11_important_stock.hive
@@ -0,0 +1,46 @@
+-- create tables and load data
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/supplier';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/nation';
+create external table partsupp (PS_PARTKEY INT, PS_SUPPKEY INT, PS_AVAILQTY INT, PS_SUPPLYCOST DOUBLE, PS_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION'/tpch/100/partsupp';
+
+-- create the target table
+create table q11_important_stock(ps_partkey INT, value DOUBLE);
+create table q11_part_tmp(ps_partkey int, part_value double);
+create table q11_sum_tmp(total_value double);
+
+-- the query
+insert overwrite table q11_part_tmp
+select 
+  ps_partkey, sum(ps_supplycost * ps_availqty) as part_value 
+from
+  nation n join supplier s 
+  on 
+    s.s_nationkey = n.n_nationkey and n.n_name = 'GERMANY'
+  join partsupp ps 
+  on 
+    ps.ps_suppkey = s.s_suppkey
+group by ps_partkey;
+
+insert overwrite table q11_sum_tmp
+select 
+  sum(part_value) as total_value
+from 
+  q11_part_tmp;
+
+insert overwrite table q11_important_stock
+select 
+  ps_partkey, part_value as value
+from
+  (
+    select ps_partkey, part_value, total_value
+    from q11_part_tmp join q11_sum_tmp
+  ) a
+where part_value > total_value * 0.0001
+order by value desc;
+
+DROP TABLE partsupp;
+DROP TABLE supplier;
+DROP TABLE nation;
+DROP TABLE q11_important_stock;
+DROP TABLE q11_part_tmp;
+DROP TABLE q11_sum_tmp;
\ No newline at end of file
diff --git a/hivesterix/resource/tpch100/q12_shipping.hive b/hivesterix/resource/tpch100/q12_shipping.hive
new file mode 100644
index 0000000..cd5c8aa
--- /dev/null
+++ b/hivesterix/resource/tpch100/q12_shipping.hive
@@ -0,0 +1,42 @@
+-- create the tables and load the data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/orders';
+
+-- create the result table
+create table q12_shipping(l_shipmode string, high_line_count double, low_line_count double);
+
+set mapred.min.split.size=536870912;
+set hive.exec.reducers.bytes.per.reducer=1225000000;
+
+-- the query
+insert overwrite table q12_shipping
+select 
+  l_shipmode,
+  sum(case
+    when o_orderpriority ='1-URGENT'
+         or o_orderpriority ='2-HIGH'
+    then 1
+    else 0
+end
+  ) as high_line_count,
+  sum(case
+    when o_orderpriority <> '1-URGENT'
+         and o_orderpriority <> '2-HIGH'
+    then 1
+    else 0
+end
+  ) as low_line_count
+from
+  orders o join lineitem l 
+  on 
+    o.o_orderkey = l.l_orderkey and l.l_commitdate < l.l_receiptdate
+and l.l_shipdate < l.l_commitdate and l.l_receiptdate >= '1994-01-01' 
+and l.l_receiptdate < '1995-01-01'
+where 
+  l.l_shipmode = 'MAIL' or l.l_shipmode = 'SHIP'
+group by l_shipmode
+order by l_shipmode;
+
+DROP TABLE lineitem;
+DROP TABLE orders;
+DROP TABLE q12_shipping;
diff --git a/hivesterix/resource/tpch100/q13_customer_distribution.hive b/hivesterix/resource/tpch100/q13_customer_distribution.hive
new file mode 100644
index 0000000..dc7f832
--- /dev/null
+++ b/hivesterix/resource/tpch100/q13_customer_distribution.hive
@@ -0,0 +1,26 @@
+-- create the tables and load the data
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/customer';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/orders';
+
+-- create the result table
+create table q13_customer_distribution (c_count int, custdist int);
+
+-- the query
+insert overwrite table q13_customer_distribution
+select 
+  c_count, count(1) as custdist
+from 
+  (select 
+     c_custkey, count(o_orderkey) as c_count
+   from 
+     customer c left outer join orders o 
+     on 
+       c.c_custkey = o.o_custkey and not o.o_comment like '%special%requests%'
+   group by c_custkey
+   ) c_orders
+group by c_count
+order by custdist desc, c_count desc;
+
+DROP TABLE customer;
+DROP TABLE orders;
+DROP TABLE q13_customer_distribution;
diff --git a/hivesterix/resource/tpch100/q14_promotion_effect.hive b/hivesterix/resource/tpch100/q14_promotion_effect.hive
new file mode 100644
index 0000000..ca6d6b2
--- /dev/null
+++ b/hivesterix/resource/tpch100/q14_promotion_effect.hive
@@ -0,0 +1,27 @@
+-- create the tables and load the data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/lineitem';
+create external table part (P_PARTKEY INT, P_NAME STRING, P_MFGR STRING, P_BRAND STRING, P_TYPE STRING, P_SIZE INT, P_CONTAINER STRING, P_RETAILPRICE DOUBLE, P_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/part';
+
+-- create the result table
+create table q14_promotion_effect(promo_revenue double);
+
+set mapred.min.split.size=536870912;
+set hive.exec.reducers.bytes.per.reducer=1040000000;
+
+-- the query
+insert overwrite table q14_promotion_effect
+select 
+  100.00 * sum(case
+               when p_type like 'PROMO%'
+               then l_extendedprice*(1-l_discount)
+               else 0.0
+               end
+  ) / sum(l_extendedprice * (1 - l_discount)) as promo_revenue
+from 
+  part p join lineitem l 
+  on 
+    l.l_partkey = p.p_partkey and l.l_shipdate >= '1995-09-01' and l.l_shipdate < '1995-10-01';
+
+DROP TABLE lineitem;
+DROP TABLE part;
+DROP TABLE q14_promotion_effect;
diff --git a/hivesterix/resource/tpch100/q15_top_supplier.hive b/hivesterix/resource/tpch100/q15_top_supplier.hive
new file mode 100644
index 0000000..d3d73c3
--- /dev/null
+++ b/hivesterix/resource/tpch100/q15_top_supplier.hive
@@ -0,0 +1,44 @@
+-- create the tables and load the data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/lineitem';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/supplier';
+
+-- create result tables
+create table revenue(supplier_no int, total_revenue double); 
+create table max_revenue(max_revenue double); 
+create table q15_top_supplier(s_suppkey int, s_name string, s_address string, s_phone string, total_revenue double);
+
+
+set mapred.min.split.size=536870912;
+
+-- the query
+insert overwrite table revenue
+select 
+  l_suppkey as supplier_no, sum(l_extendedprice * (1 - l_discount)) as total_revenue
+from 
+  lineitem
+where 
+  l_shipdate >= '1996-01-01' and l_shipdate < '1996-04-01'
+group by l_suppkey;
+
+insert overwrite table max_revenue
+select 
+  max(total_revenue)
+from 
+  revenue;
+
+insert overwrite table q15_top_supplier
+select 
+  s_suppkey, s_name, s_address, s_phone, total_revenue
+from supplier s join revenue r 
+  on 
+    s.s_suppkey = r.supplier_no
+  join max_revenue m 
+  on 
+    r.total_revenue = m.max_revenue
+order by s_suppkey;
+
+DROP TABLE lineitem;
+DROP TABLE supplier;
+DROP TABLE revenue;
+DROP TABLE max_revenue;
+DROP TABLE q15_top_supplier;
diff --git a/hivesterix/resource/tpch100/q16_parts_supplier_relationship.hive b/hivesterix/resource/tpch100/q16_parts_supplier_relationship.hive
new file mode 100644
index 0000000..b551581
--- /dev/null
+++ b/hivesterix/resource/tpch100/q16_parts_supplier_relationship.hive
@@ -0,0 +1,52 @@
+-- create the tables and load the data
+create external table part (P_PARTKEY INT, P_NAME STRING, P_MFGR STRING, P_BRAND STRING, P_TYPE STRING, P_SIZE INT, P_CONTAINER STRING, P_RETAILPRICE DOUBLE, P_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/part';
+create external table partsupp (PS_PARTKEY INT, PS_SUPPKEY INT, PS_AVAILQTY INT, PS_SUPPLYCOST DOUBLE, PS_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION'/tpch/100/partsupp';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/supplier';
+
+-- create the result table
+create table q16_parts_supplier_relationship(p_brand string, p_type string, p_size int, supplier_cnt int);
+create table q16_tmp(p_brand string, p_type string, p_size int, ps_suppkey int);
+create table supplier_tmp(s_suppkey int);
+
+-- the query
+insert overwrite table supplier_tmp
+select 
+  s_suppkey
+from 
+  supplier
+where 
+  not s_comment like '%Customer%Complaints%';
+
+insert overwrite table q16_tmp
+select 
+  p_brand, p_type, p_size, ps_suppkey
+from 
+  partsupp ps join part p 
+  on 
+    p.p_partkey = ps.ps_partkey and p.p_brand <> 'Brand#45' 
+    and not p.p_type like 'MEDIUM POLISHED%'
+  join supplier_tmp s 
+  on 
+    ps.ps_suppkey = s.s_suppkey;
+
+insert overwrite table q16_parts_supplier_relationship
+select 
+  p_brand, p_type, p_size, count(distinct ps_suppkey) as supplier_cnt
+from 
+  (select 
+     * 
+   from
+     q16_tmp 
+   where p_size = 49 or p_size = 14 or p_size = 23 or
+         p_size = 45 or p_size = 19 or p_size = 3 or
+         p_size = 36 or p_size = 9
+) q16_all
+group by p_brand, p_type, p_size
+order by supplier_cnt desc, p_brand, p_type, p_size;
+
+DROP TABLE partsupp;
+DROP TABLE part;
+DROP TABLE supplier;
+DROP TABLE q16_parts_supplier_relationship;
+DROP TABLE q16_tmp;
+DROP TABLE supplier_tmp;
diff --git a/hivesterix/resource/tpch100/q17_small_quantity_order_revenue.hive b/hivesterix/resource/tpch100/q17_small_quantity_order_revenue.hive
new file mode 100644
index 0000000..14e87db
--- /dev/null
+++ b/hivesterix/resource/tpch100/q17_small_quantity_order_revenue.hive
@@ -0,0 +1,38 @@
+-- create the tables and load the data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/lineitem';
+create external table part (P_PARTKEY INT, P_NAME STRING, P_MFGR STRING, P_BRAND STRING, P_TYPE STRING, P_SIZE INT, P_CONTAINER STRING, P_RETAILPRICE DOUBLE, P_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/part';
+
+-- create the result table
+create table q17_small_quantity_order_revenue (avg_yearly double);
+create table lineitem_tmp (t_partkey int, t_avg_quantity double);
+
+-- the query
+insert overwrite table lineitem_tmp
+select 
+  l_partkey as t_partkey, 0.2 * avg(l_quantity) as t_avg_quantity
+from 
+  lineitem
+group by l_partkey;
+
+insert overwrite table q17_small_quantity_order_revenue
+select
+  sum(l_extendedprice) / 7.0 as avg_yearly
+from
+  (select l_quantity, l_extendedprice, t_avg_quantity from
+   lineitem_tmp t join
+     (select
+        l_quantity, l_partkey, l_extendedprice
+      from
+        part p join lineitem l
+        on
+          p.p_partkey = l.l_partkey
+          and p.p_brand = 'Brand#23'
+          and p.p_container = 'MED BOX'
+      ) l1 on l1.l_partkey = t.t_partkey
+   ) a
+where l_quantity < t_avg_quantity;
+
+DROP TABLE lineitem;
+DROP TABLE part;
+DROP TABLE q17_small_quantity_order_revenue;
+DROP TABLE lineitem_tmp;
diff --git a/hivesterix/resource/tpch100/q18_large_volume_customer.hive b/hivesterix/resource/tpch100/q18_large_volume_customer.hive
new file mode 100644
index 0000000..f61bd79
--- /dev/null
+++ b/hivesterix/resource/tpch100/q18_large_volume_customer.hive
@@ -0,0 +1,42 @@
+-- create the tables and load the data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/orders';
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/customer';
+
+-- create the result tables
+create table q18_tmp(l_orderkey int, t_sum_quantity double);
+create table q18_large_volume_customer(c_name string, c_custkey int, o_orderkey int, o_orderdate string, o_totalprice double, sum_quantity double);
+
+set mapred.min.split.size=268435456;
+set hive.exec.reducers.bytes.per.reducer=1164000000;
+
+-- the query
+insert overwrite table q18_tmp
+select 
+  l_orderkey, sum(l_quantity) as t_sum_quantity
+from 
+  lineitem
+group by l_orderkey;
+
+insert overwrite table q18_large_volume_customer
+select 
+  c_name,c_custkey,o_orderkey,o_orderdate,o_totalprice,sum(l_quantity)
+from 
+  customer c join orders o 
+  on 
+    c.c_custkey = o.o_custkey
+  join q18_tmp t 
+  on 
+    o.o_orderkey = t.l_orderkey and t.t_sum_quantity > 300
+  join lineitem l 
+  on 
+    o.o_orderkey = l.l_orderkey
+group by c_name,c_custkey,o_orderkey,o_orderdate,o_totalprice
+order by o_totalprice desc,o_orderdate
+limit 100;
+
+DROP TABLE lineitem;
+DROP TABLE orders;
+DROP TABLE customer;
+DROP TABLE q18_tmp;
+DROP TABLE q18_large_volume_customer;
diff --git a/hivesterix/resource/tpch100/q19_discounted_revenue.hive b/hivesterix/resource/tpch100/q19_discounted_revenue.hive
new file mode 100644
index 0000000..cb77a06
--- /dev/null
+++ b/hivesterix/resource/tpch100/q19_discounted_revenue.hive
@@ -0,0 +1,49 @@
+-- create the tables and load the data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/lineitem';
+create external table part (P_PARTKEY INT, P_NAME STRING, P_MFGR STRING, P_BRAND STRING, P_TYPE STRING, P_SIZE INT, P_CONTAINER STRING, P_RETAILPRICE DOUBLE, P_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/part';
+
+-- create the result table
+create table q19_discounted_revenue(revenue double);
+
+set mapred.min.split.size=268435456;
+set hive.exec.reducers.bytes.per.reducer=1040000000;
+
+-- the query
+insert overwrite table q19_discounted_revenue
+select
+  sum(l_extendedprice * (1 - l_discount) ) as revenue
+from
+  lineitem l join part p
+  on 
+    p.p_partkey = l.l_partkey    
+where
+  (
+    p_brand = 'Brand#12'
+	and p_container REGEXP 'SM CASE||SM BOX||SM PACK||SM PKG'
+	and l_quantity >= 1 and l_quantity <= 11
+	and p_size >= 1 and p_size <= 5
+	and l_shipmode REGEXP 'AIR||AIR REG'
+	and l_shipinstruct = 'DELIVER IN PERSON'
+  ) 
+  or 
+  (
+    p_brand = 'Brand#23'
+	and p_container REGEXP 'MED BAG||MED BOX||MED PKG||MED PACK'
+	and l_quantity >= 10 and l_quantity <= 20
+	and p_size >= 1 and p_size <= 10
+	and l_shipmode REGEXP 'AIR||AIR REG'
+	and l_shipinstruct = 'DELIVER IN PERSON'
+  )
+  or
+  (
+	p_brand = 'Brand#34'
+	and p_container REGEXP 'LG CASE||LG BOX||LG PACK||LG PKG'
+	and l_quantity >= 20 and l_quantity <= 30
+	and p_size >= 1 and p_size <= 15
+	and l_shipmode REGEXP 'AIR||AIR REG'
+	and l_shipinstruct = 'DELIVER IN PERSON'
+  );
+
+DROP TABLE lineitem;
+DROP TABLE part;
+DROP TABLE q19_discounted_revenue;
diff --git a/hivesterix/resource/tpch100/q1_pricing_summary_report.hive b/hivesterix/resource/tpch100/q1_pricing_summary_report.hive
new file mode 100644
index 0000000..1899b5c
--- /dev/null
+++ b/hivesterix/resource/tpch100/q1_pricing_summary_report.hive
@@ -0,0 +1,21 @@
+-- create tables and load data
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/lineitem';
+
+-- create the target table
+CREATE TABLE q1_pricing_summary_report ( L_RETURNFLAG STRING, L_LINESTATUS STRING, SUM_QTY DOUBLE, SUM_BASE_PRICE DOUBLE, SUM_DISC_PRICE DOUBLE, SUM_CHARGE DOUBLE, AVE_QTY DOUBLE, AVE_PRICE DOUBLE, AVE_DISC DOUBLE, COUNT_ORDER INT);
+
+set mapred.min.split.size=536870912;
+
+-- the query
+INSERT OVERWRITE TABLE q1_pricing_summary_report 
+SELECT 
+  L_RETURNFLAG, L_LINESTATUS, SUM(L_QUANTITY), SUM(L_EXTENDEDPRICE), SUM(L_EXTENDEDPRICE*(1-L_DISCOUNT)), SUM(L_EXTENDEDPRICE*(1-L_DISCOUNT)*(1+L_TAX)), AVG(L_QUANTITY), AVG(L_EXTENDEDPRICE), AVG(L_DISCOUNT), COUNT(1) 
+FROM 
+  lineitem 
+WHERE 
+  L_SHIPDATE<='1998-09-02' 
+GROUP BY L_RETURNFLAG, L_LINESTATUS 
+ORDER BY L_RETURNFLAG, L_LINESTATUS;
+
+DROP TABLE lineitem;
+DROP TABLE q1_pricing_summary_report;
\ No newline at end of file
diff --git a/hivesterix/resource/tpch100/q20_potential_part_promotion.hive b/hivesterix/resource/tpch100/q20_potential_part_promotion.hive
new file mode 100644
index 0000000..d254793
--- /dev/null
+++ b/hivesterix/resource/tpch100/q20_potential_part_promotion.hive
@@ -0,0 +1,76 @@
+-- create tables and load data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/lineitem';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/supplier';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/nation';
+create external table partsupp (PS_PARTKEY INT, PS_SUPPKEY INT, PS_AVAILQTY INT, PS_SUPPLYCOST DOUBLE, PS_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION'/tpch/100/partsupp';
+create external table part (P_PARTKEY INT, P_NAME STRING, P_MFGR STRING, P_BRAND STRING, P_TYPE STRING, P_SIZE INT, P_CONTAINER STRING, P_RETAILPRICE DOUBLE, P_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/part';
+
+-- create the target table
+create table q20_tmp1(p_partkey int);
+create table q20_tmp2(l_partkey int, l_suppkey int, sum_quantity double);
+create table q20_tmp3(ps_suppkey int, ps_availqty int, sum_quantity double);
+create table q20_tmp4(ps_suppkey int);
+create table q20_potential_part_promotion(s_name string, s_address string);
+
+set mapred.min.split.size=536870912;
+
+-- the query
+insert overwrite table q20_tmp1
+select distinct p_partkey
+from
+  part 
+where 
+  p_name like 'forest%';
+
+insert overwrite table q20_tmp2
+select 
+  l_partkey, l_suppkey, 0.5 * sum(l_quantity)
+from
+  lineitem
+where
+  l_shipdate >= '1994-01-01'
+  and l_shipdate < '1995-01-01'
+group by l_partkey, l_suppkey;
+
+insert overwrite table q20_tmp3
+select 
+  ps_suppkey, ps_availqty, sum_quantity
+from  
+  partsupp ps join q20_tmp1 t1 
+  on 
+    ps.ps_partkey = t1.p_partkey
+  join q20_tmp2 t2 
+  on 
+    ps.ps_partkey = t2.l_partkey and ps.ps_suppkey = t2.l_suppkey;
+
+insert overwrite table q20_tmp4
+select 
+  ps_suppkey
+from 
+  q20_tmp3
+where 
+  ps_availqty > sum_quantity
+group by ps_suppkey;
+
+insert overwrite table q20_potential_part_promotion
+select 
+  s_name, s_address
+from 
+  supplier s join nation n
+  on
+    s.s_nationkey = n.n_nationkey
+    and n.n_name = 'CANADA'
+  join q20_tmp4 t4
+  on 
+    s.s_suppkey = t4.ps_suppkey
+order by s_name;
+
+DROP TABLE partsupp;
+DROP TABLE lineitem;
+DROP TABLE supplier;
+DROP TABLE nation;
+DROP TABLE q20_tmp1;
+DROP TABLE q20_tmp2;
+DROP TABLE q20_tmp3;
+DROP TABLE q20_tmp4;
+DROP TABLE q20_potential_part_promotion;
diff --git a/hivesterix/resource/tpch100/q21_suppliers_who_kept_orders_waiting.hive b/hivesterix/resource/tpch100/q21_suppliers_who_kept_orders_waiting.hive
new file mode 100644
index 0000000..6e0344c
--- /dev/null
+++ b/hivesterix/resource/tpch100/q21_suppliers_who_kept_orders_waiting.hive
@@ -0,0 +1,74 @@
+-- create tables and load data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/orders';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/supplier';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/nation';
+
+-- create target tables
+create table q21_tmp1(l_orderkey int, count_suppkey int, max_suppkey int);
+create table q21_tmp2(l_orderkey int, count_suppkey int, max_suppkey int);
+create table q21_suppliers_who_kept_orders_waiting(s_name string, numwait int);
+
+-- the query
+insert overwrite table q21_tmp1
+select
+  l_orderkey, count(distinct l_suppkey), max(l_suppkey) as max_suppkey
+from
+  lineitem
+group by l_orderkey;
+
+insert overwrite table q21_tmp2
+select
+  l_orderkey, count(distinct l_suppkey), max(l_suppkey) as max_suppkey
+from
+  lineitem
+where
+  l_receiptdate > l_commitdate
+group by l_orderkey;
+
+insert overwrite table q21_suppliers_who_kept_orders_waiting
+select
+  s_name, count(1) as numwait
+from
+  (select s_name from
+(select s_name, t2.l_orderkey, l_suppkey, count_suppkey, max_suppkey 
+ from q21_tmp2 t2 right outer join
+      (select s_name, l_orderkey, l_suppkey from
+         (select s_name, t1.l_orderkey, l_suppkey, count_suppkey, max_suppkey
+          from
+            q21_tmp1 t1 join
+            (select s_name, l_orderkey, l_suppkey
+             from 
+               orders o join
+               (select s_name, l_orderkey, l_suppkey
+                from
+                  nation n join supplier s
+                  on
+                    s.s_nationkey = n.n_nationkey
+                    and n.n_name = 'SAUDI ARABIA'
+                  join lineitem l
+                  on
+                    s.s_suppkey = l.l_suppkey
+                where
+                  l.l_receiptdate > l.l_commitdate
+                ) l1 on o.o_orderkey = l1.l_orderkey and o.o_orderstatus = 'F'
+             ) l2 on l2.l_orderkey = t1.l_orderkey
+          ) a
+          where
+           (count_suppkey > 1) or ((count_suppkey=1) and (l_suppkey <> max_suppkey))
+       ) l3 on l3.l_orderkey = t2.l_orderkey
+    ) b
+    where
+     (count_suppkey is null) or ((count_suppkey=1) and (l_suppkey = max_suppkey))
+  )c
+group by s_name
+order by numwait desc, s_name
+limit 100;
+
+DROP TABLE orders;
+DROP TABLE lineitem;
+DROP TABLE supplier;
+DROP TABLE nation;
+DROP TABLE q21_tmp1;
+DROP TABLE q21_tmp2;
+DROP TABLE q21_suppliers_who_kept_orders_waiting;
diff --git a/hivesterix/resource/tpch100/q22_global_sales_opportunity.hive b/hivesterix/resource/tpch100/q22_global_sales_opportunity.hive
new file mode 100644
index 0000000..381aa9f
--- /dev/null
+++ b/hivesterix/resource/tpch100/q22_global_sales_opportunity.hive
@@ -0,0 +1,69 @@
+-- create tables and load data
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/customer';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/orders';
+
+-- create target tables
+create table q22_customer_tmp(c_acctbal double, c_custkey int, cntrycode string);
+create table q22_customer_tmp1(avg_acctbal double);
+create table q22_orders_tmp(o_custkey int);
+create table q22_global_sales_opportunity(cntrycode string, numcust int, totacctbal double);
+
+-- the query
+insert overwrite table q22_customer_tmp
+select 
+  c_acctbal, c_custkey, substr(c_phone, 1, 2) as cntrycode
+from 
+  customer
+where 
+  substr(c_phone, 1, 2) = '13' or
+  substr(c_phone, 1, 2) = '31' or
+  substr(c_phone, 1, 2) = '23' or
+  substr(c_phone, 1, 2) = '29' or
+  substr(c_phone, 1, 2) = '30' or
+  substr(c_phone, 1, 2) = '18' or
+  substr(c_phone, 1, 2) = '17';
+ 
+insert overwrite table q22_customer_tmp1
+select
+  avg(c_acctbal)
+from
+  q22_customer_tmp
+where
+  c_acctbal > 0.00;
+
+insert overwrite table q22_orders_tmp
+select 
+  o_custkey 
+from 
+  orders
+group by 
+  o_custkey;
+
+insert overwrite table q22_global_sales_opportunity
+select
+  cntrycode, count(1) as numcust, sum(c_acctbal) as totacctbal
+from
+(
+  select cntrycode, c_acctbal, avg_acctbal from
+  q22_customer_tmp1 ct1 join
+  (
+    select cntrycode, c_acctbal from
+      q22_orders_tmp ot 
+      right outer join q22_customer_tmp ct 
+      on
+        ct.c_custkey = ot.o_custkey
+    where
+      o_custkey is null
+  ) ct2
+) a
+where
+  c_acctbal > avg_acctbal
+group by cntrycode
+order by cntrycode;
+
+DROP TABLE customer;
+DROP TABLE orders;
+DROP TABLE q22_customer_tmp;
+DROP TABLE q22_customer_tmp1;
+DROP TABLE q22_orders_tmp;
+DROP TABLE q22_global_sales_opportunity;
diff --git a/hivesterix/resource/tpch100/q2_minimum_cost_supplier.hive b/hivesterix/resource/tpch100/q2_minimum_cost_supplier.hive
new file mode 100644
index 0000000..afea998
--- /dev/null
+++ b/hivesterix/resource/tpch100/q2_minimum_cost_supplier.hive
@@ -0,0 +1,55 @@
+-- create the tables and load the data
+create external table part (P_PARTKEY INT, P_NAME STRING, P_MFGR STRING, P_BRAND STRING, P_TYPE STRING, P_SIZE INT, P_CONTAINER STRING, P_RETAILPRICE DOUBLE, P_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/part';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/supplier';
+create external table partsupp (PS_PARTKEY INT, PS_SUPPKEY INT, PS_AVAILQTY INT, PS_SUPPLYCOST DOUBLE, PS_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION'/tpch/100/partsupp';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/nation';
+create external table region (R_REGIONKEY INT, R_NAME STRING, R_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/region';
+
+-- create result tables
+create table q2_minimum_cost_supplier_tmp1 (s_acctbal double, s_name string, n_name string, p_partkey int, ps_supplycost double, p_mfgr string, s_address string, s_phone string, s_comment string);
+create table q2_minimum_cost_supplier_tmp2 (p_partkey int, ps_min_supplycost double);
+create table q2_minimum_cost_supplier (s_acctbal double, s_name string, n_name string, p_partkey int, p_mfgr string, s_address string, s_phone string, s_comment string);
+
+-- the query
+insert overwrite table q2_minimum_cost_supplier_tmp1 
+select 
+  s.s_acctbal, s.s_name, n.n_name, p.p_partkey, ps.ps_supplycost, p.p_mfgr, s.s_address, s.s_phone, s.s_comment 
+from 
+  nation n join region r 
+  on 
+    n.n_regionkey = r.r_regionkey and r.r_name = 'EUROPE' 
+  join supplier s 
+  on 
+s.s_nationkey = n.n_nationkey 
+  join partsupp ps 
+  on  
+s.s_suppkey = ps.ps_suppkey 
+  join part p 
+  on 
+    p.p_partkey = ps.ps_partkey and p.p_size = 15 and p.p_type like '%BRASS' ;
+
+insert overwrite table q2_minimum_cost_supplier_tmp2 
+select 
+  p_partkey, min(ps_supplycost) 
+from  
+  q2_minimum_cost_supplier_tmp1 
+group by p_partkey;
+
+insert overwrite table q2_minimum_cost_supplier 
+select 
+  t1.s_acctbal, t1.s_name, t1.n_name, t1.p_partkey, t1.p_mfgr, t1.s_address, t1.s_phone, t1.s_comment 
+from 
+  q2_minimum_cost_supplier_tmp1 t1 join q2_minimum_cost_supplier_tmp2 t2 
+on 
+  t1.p_partkey = t2.p_partkey and t1.ps_supplycost=t2.ps_min_supplycost 
+order by s_acctbal desc, n_name, s_name, p_partkey 
+limit 100;
+
+DROP TABLE part;
+DROP TABLE supplier;
+DROP TABLE partsupp;
+DROP TABLE nation;
+DROP TABLE region;
+DROP TABLE q2_minimum_cost_supplier;
+DROP TABLE q2_minimum_cost_supplier_tmp1;
+DROP TABLE q2_minimum_cost_supplier_tmp2;
diff --git a/hivesterix/resource/tpch100/q3_shipping_priority.hive b/hivesterix/resource/tpch100/q3_shipping_priority.hive
new file mode 100644
index 0000000..9e82c99
--- /dev/null
+++ b/hivesterix/resource/tpch100/q3_shipping_priority.hive
@@ -0,0 +1,30 @@
+-- create tables and load data
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/orders';
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/customer';
+
+-- create the target table
+create table q3_shipping_priority (l_orderkey int, revenue double, o_orderdate string, o_shippriority int);
+
+set mapred.min.split.size=536870912;
+set hive.exec.reducers.bytes.per.reducer=1024000000;
+
+-- the query
+Insert overwrite table q3_shipping_priority
+select
+  l_orderkey, sum(l_extendedprice*(1-l_discount)) as revenue, o_orderdate, o_shippriority
+from
+  customer c join orders o
+    on c.c_mktsegment = 'BUILDING' and c.c_custkey = o.o_custkey
+  join lineitem l
+    on l.l_orderkey = o.o_orderkey
+where
+  o_orderdate < '1995-03-15' and l_shipdate > '1995-03-15'
+group by l_orderkey, o_orderdate, o_shippriority
+order by revenue desc, o_orderdate
+limit 10;
+
+DROP TABLE orders;
+DROP TABLE lineitem;
+DROP TABLE customer;
+DROP TABLE q3_shipping_priority;
diff --git a/hivesterix/resource/tpch100/q4_order_priority.hive b/hivesterix/resource/tpch100/q4_order_priority.hive
new file mode 100644
index 0000000..decc493
--- /dev/null
+++ b/hivesterix/resource/tpch100/q4_order_priority.hive
@@ -0,0 +1,30 @@
+-- create tables and load data
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/orders';
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/lineitem';
+
+-- create the target table
+CREATE TABLE q4_order_priority_tmp (O_ORDERKEY INT);
+CREATE TABLE q4_order_priority (O_ORDERPRIORITY STRING, ORDER_COUNT INT);
+
+set mapred.min.split.size=536870912;
+-- the query
+INSERT OVERWRITE TABLE q4_order_priority_tmp 
+select 
+  DISTINCT l_orderkey 
+from 
+  lineitem 
+where 
+  l_commitdate < l_receiptdate;
+INSERT OVERWRITE TABLE q4_order_priority 
+select o_orderpriority, count(1) as order_count 
+from 
+  orders o join q4_order_priority_tmp t 
+  on 
+o.o_orderkey = t.o_orderkey and o.o_orderdate >= '1993-07-01' and o.o_orderdate < '1993-10-01' 
+group by o_orderpriority 
+order by o_orderpriority;
+
+DROP TABLE orders;
+DROP TABLE lineitem;
+DROP TABLE q4_order_priority_tmp;
+DROP TABLE q4_order_priority;
diff --git a/hivesterix/resource/tpch100/q5_local_supplier_volume.hive b/hivesterix/resource/tpch100/q5_local_supplier_volume.hive
new file mode 100644
index 0000000..bd10d75
--- /dev/null
+++ b/hivesterix/resource/tpch100/q5_local_supplier_volume.hive
@@ -0,0 +1,41 @@
+-- create tables and load data
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/customer';
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/orders';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/supplier';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/nation';
+create external table region (R_REGIONKEY INT, R_NAME STRING, R_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/region';
+
+-- create the target table
+create table q5_local_supplier_volume (N_NAME STRING, REVENUE DOUBLE);
+
+set mapred.min.split.size=536870912;
+
+-- the query
+insert overwrite table q5_local_supplier_volume 
+select 
+  n_name, sum(l_extendedprice * (1 - l_discount)) as revenue 
+from
+  customer c join
+    ( select n_name, l_extendedprice, l_discount, s_nationkey, o_custkey from orders o join
+      ( select n_name, l_extendedprice, l_discount, l_orderkey, s_nationkey from lineitem l join
+        ( select n_name, s_suppkey, s_nationkey from supplier s join
+          ( select n_name, n_nationkey 
+            from nation n join region r 
+            on n.n_regionkey = r.r_regionkey and r.r_name = 'ASIA'
+          ) n1 on s.s_nationkey = n1.n_nationkey
+        ) s1 on l.l_suppkey = s1.s_suppkey
+      ) l1 on l1.l_orderkey = o.o_orderkey and o.o_orderdate >= '1994-01-01' 
+              and o.o_orderdate < '1995-01-01'
+) o1 
+on c.c_nationkey = o1.s_nationkey and c.c_custkey = o1.o_custkey
+group by n_name 
+order by revenue desc;
+
+DROP TABLE customer;
+DROP TABLE orders;
+DROP TABLE lineitem;
+DROP TABLE supplier;
+DROP TABLE nation;
+DROP TABLE region;
+DROP TABLE q5_local_supplier_volume;
diff --git a/hivesterix/resource/tpch100/q6_forecast_revenue_change.hive b/hivesterix/resource/tpch100/q6_forecast_revenue_change.hive
new file mode 100644
index 0000000..4840fb0
--- /dev/null
+++ b/hivesterix/resource/tpch100/q6_forecast_revenue_change.hive
@@ -0,0 +1,20 @@
+-- create tables and load data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/lineitem';
+
+-- create the target table
+create table q6_forecast_revenue_change (revenue double);
+
+-- the query
+insert overwrite table q6_forecast_revenue_change 
+select 
+  sum(l_extendedprice*l_discount) as revenue
+from 
+  lineitem
+where 
+  l_shipdate >= '1994-01-01'
+  and l_shipdate < '1995-01-01'
+  and l_discount >= 0.05 and l_discount <= 0.07
+  and l_quantity < 24;
+
+DROP TABLE lineitem;
+DROP TABLE q6_forecast_revenue_change;
diff --git a/hivesterix/resource/tpch100/q7_volume_shipping.hive b/hivesterix/resource/tpch100/q7_volume_shipping.hive
new file mode 100644
index 0000000..dd6b416
--- /dev/null
+++ b/hivesterix/resource/tpch100/q7_volume_shipping.hive
@@ -0,0 +1,70 @@
+-- create tables and load data
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/customer';
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/orders';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/supplier';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/nation';
+
+-- create the target table
+create table q7_volume_shipping (supp_nation string, cust_nation string, l_year int, revenue double);
+create table q7_volume_shipping_tmp(supp_nation string, cust_nation string, s_nationkey int, c_nationkey int);
+
+set mapred.min.split.size=536870912;
+set hive.exec.reducers.bytes.per.reducer=1225000000;
+
+-- the query
+insert overwrite table q7_volume_shipping_tmp
+select 
+  * 
+from
+  (
+    select 
+      n1.n_name as supp_nation, n2.n_name as cust_nation, n1.n_nationkey as s_nationkey,      
+      n2.n_nationkey as c_nationkey
+from 
+  nation n1 join nation n2 
+  on 
+    n1.n_name = 'FRANCE' and n2.n_name = 'GERMANY'
+    UNION ALL
+select 
+  n1.n_name as supp_nation, n2.n_name as cust_nation, n1.n_nationkey as s_nationkey, 
+  n2.n_nationkey as c_nationkey
+from 
+  nation n1 join nation n2 
+  on 
+    n2.n_name = 'FRANCE' and n1.n_name = 'GERMANY'
+) a;
+
+insert overwrite table q7_volume_shipping 
+select 
+  supp_nation, cust_nation, l_year, sum(volume) as revenue
+from 
+  (
+    select
+      supp_nation, cust_nation, year(l_shipdate) as l_year, 
+      l_extendedprice * (1 - l_discount) as volume
+    from
+      q7_volume_shipping_tmp t join
+        (select l_shipdate, l_extendedprice, l_discount, c_nationkey, s_nationkey 
+         from supplier s join
+           (select l_shipdate, l_extendedprice, l_discount, l_suppkey, c_nationkey 
+            from customer c join
+              (select l_shipdate, l_extendedprice, l_discount, l_suppkey, o_custkey 
+               from orders o join lineitem l 
+               on 
+                 o.o_orderkey = l.l_orderkey and l.l_shipdate >= '1995-01-01' 
+                 and l.l_shipdate <= '1996-12-31'
+               ) l1 on c.c_custkey = l1.o_custkey
+            ) l2 on s.s_suppkey = l2.l_suppkey
+         ) l3 on l3.c_nationkey = t.c_nationkey and l3.s_nationkey = t.s_nationkey
+   ) shipping
+group by supp_nation, cust_nation, l_year
+order by supp_nation, cust_nation, l_year;
+
+DROP TABLE customer;
+DROP TABLE orders;
+DROP TABLE lineitem;
+DROP TABLE supplier;
+DROP TABLE nation;
+DROP TABLE q7_volume_shipping;
+DROP TABLE q7_volume_shipping_tmp;
diff --git a/hivesterix/resource/tpch100/q8_national_market_share.hive b/hivesterix/resource/tpch100/q8_national_market_share.hive
new file mode 100644
index 0000000..72d8b69
--- /dev/null
+++ b/hivesterix/resource/tpch100/q8_national_market_share.hive
@@ -0,0 +1,55 @@
+-- create the tables and load the data
+create external table part (P_PARTKEY INT, P_NAME STRING, P_MFGR STRING, P_BRAND STRING, P_TYPE STRING, P_SIZE INT, P_CONTAINER STRING, P_RETAILPRICE DOUBLE, P_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/part';
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/customer';
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/orders';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/supplier';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/nation';
+create external table region (R_REGIONKEY INT, R_NAME STRING, R_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/region';
+
+-- create the result table
+create table q8_national_market_share(o_year string, mkt_share double);
+
+-- the query
+insert overwrite table q8_national_market_share 
+select 
+  o_year, sum(case when nation = 'BRAZIL' then volume else 0.0 end) / sum(volume) as mkt_share
+from 
+  (
+select 
+  year(o_orderdate) as o_year, l_extendedprice * (1-l_discount) as volume, 
+  n2.n_name as nation
+    from
+      nation n2 join
+        (select o_orderdate, l_discount, l_extendedprice, s_nationkey 
+         from supplier s join
+          (select o_orderdate, l_discount, l_extendedprice, l_suppkey 
+           from part p join
+             (select o_orderdate, l_partkey, l_discount, l_extendedprice, l_suppkey 
+              from lineitem l join
+                (select o_orderdate, o_orderkey 
+                 from orders o join
+                   (select c.c_custkey 
+                    from customer c join
+                      (select n1.n_nationkey 
+                       from nation n1 join region r
+                       on n1.n_regionkey = r.r_regionkey and r.r_name = 'AMERICA'
+                       ) n11 on c.c_nationkey = n11.n_nationkey
+                    ) c1 on c1.c_custkey = o.o_custkey
+                 ) o1 on l.l_orderkey = o1.o_orderkey and o1.o_orderdate >= '1995-01-01' 
+                         and o1.o_orderdate < '1996-12-31'
+              ) l1 on p.p_partkey = l1.l_partkey and p.p_type = 'ECONOMY ANODIZED STEEL'
+           ) p1 on s.s_suppkey = p1.l_suppkey
+        ) s1 on s1.s_nationkey = n2.n_nationkey
+  ) all_nation
+group by o_year
+order by o_year;
+
+DROP TABLE customer;
+DROP TABLE orders;
+DROP TABLE lineitem;
+DROP TABLE supplier;
+DROP TABLE nation;
+DROP TABLE region;
+DROP TABLE part;
+DROP TABLE q8_national_market_share;
diff --git a/hivesterix/resource/tpch100/q9_product_type_profit.hive b/hivesterix/resource/tpch100/q9_product_type_profit.hive
new file mode 100644
index 0000000..2519475
--- /dev/null
+++ b/hivesterix/resource/tpch100/q9_product_type_profit.hive
@@ -0,0 +1,50 @@
+-- create the tables and load the data
+create external table part (P_PARTKEY INT, P_NAME STRING, P_MFGR STRING, P_BRAND STRING, P_TYPE STRING, P_SIZE INT, P_CONTAINER STRING, P_RETAILPRICE DOUBLE, P_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/part';
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/orders';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/supplier';
+create external table partsupp (PS_PARTKEY INT, PS_SUPPKEY INT, PS_AVAILQTY INT, PS_SUPPLYCOST DOUBLE, PS_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION'/tpch/100/partsupp';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/nation';
+
+-- create the result table
+create table q9_product_type_profit (nation string, o_year string, sum_profit double);
+
+set mapred.min.split.size=536870912;
+set hive.exec.reducers.bytes.per.reducer=1024000000;
+
+-- the query
+insert overwrite table q9_product_type_profit
+select 
+  nation, o_year, sum(amount) as sum_profit
+from 
+  (
+select 
+  n_name as nation, year(o_orderdate) as o_year, 
+  l_extendedprice * (1 - l_discount) -  ps_supplycost * l_quantity as amount
+    from
+      orders o join
+      (select l_extendedprice, l_discount, l_quantity, l_orderkey, n_name, ps_supplycost 
+       from part p join
+         (select l_extendedprice, l_discount, l_quantity, l_partkey, l_orderkey, 
+                 n_name, ps_supplycost 
+          from partsupp ps join
+            (select l_suppkey, l_extendedprice, l_discount, l_quantity, l_partkey, 
+                    l_orderkey, n_name 
+             from
+               (select s_suppkey, n_name 
+                from nation n join supplier s on n.n_nationkey = s.s_nationkey
+               ) s1 join lineitem l on s1.s_suppkey = l.l_suppkey
+            ) l1 on ps.ps_suppkey = l1.l_suppkey and ps.ps_partkey = l1.l_partkey
+         ) l2 on p.p_name like '%green%' and p.p_partkey = l2.l_partkey
+     ) l3 on o.o_orderkey = l3.l_orderkey
+  )profit
+group by nation, o_year
+order by nation, o_year desc;
+
+DROP TABLE part;
+DROP TABLE lineitem;
+DROP TABLE supplier;
+DROP TABLE orders;
+DROP TABLE partsupp;
+DROP TABLE nation;
+DROP TABLE q9_product_type_profit;
diff --git a/hyracks-cli/src/main/assembly/binary-assembly.xml b/hivesterix/src/main/assembly/binary-assembly.xml
old mode 100644
new mode 100755
similarity index 100%
copy from hyracks-cli/src/main/assembly/binary-assembly.xml
copy to hivesterix/src/main/assembly/binary-assembly.xml
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/expression/ExpressionConstant.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/expression/ExpressionConstant.java
new file mode 100644
index 0000000..3c84566
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/expression/ExpressionConstant.java
@@ -0,0 +1,25 @@
+package edu.uci.ics.hivesterix.logical.expression;

+

+/**

+ * some constants for expression

+ * 

+ * @author yingyib

+ * 

+ */

+public class ExpressionConstant {

+

+	/**

+	 * name space for function identifier

+	 */

+	public static String NAMESPACE = "hive";

+

+	/**

+	 * field expression: modeled as function in Algebricks

+	 */

+	public static String FIELDACCESS = "fieldaccess";

+

+	/**

+	 * null string: modeled as null in Algebricks

+	 */

+	public static String NULL = "null";

+}

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/expression/HiveAlgebricksBuiltInFunctionMap.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/expression/HiveAlgebricksBuiltInFunctionMap.java
new file mode 100644
index 0000000..18380f7
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/expression/HiveAlgebricksBuiltInFunctionMap.java
@@ -0,0 +1,83 @@
+package edu.uci.ics.hivesterix.logical.expression;

+

+import java.util.HashMap;

+

+import org.apache.hadoop.hive.ql.exec.Description;

+

+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.AlgebricksBuiltinFunctions;

+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;

+

+public class HiveAlgebricksBuiltInFunctionMap {

+

+	/**

+	 * hive auqa builtin function map instance

+	 */

+	public static HiveAlgebricksBuiltInFunctionMap INSTANCE = new HiveAlgebricksBuiltInFunctionMap();

+

+	/**

+	 * hive to Algebricks function name mapping

+	 */

+	private HashMap<String, FunctionIdentifier> hiveToAlgebricksMap = new HashMap<String, FunctionIdentifier>();

+

+	/**

+	 * Algebricks to hive function name mapping

+	 */

+	private HashMap<FunctionIdentifier, String> AlgebricksToHiveMap = new HashMap<FunctionIdentifier, String>();

+

+	/**

+	 * the bi-directional mapping between hive functions and Algebricks

+	 * functions

+	 */

+	private HiveAlgebricksBuiltInFunctionMap() {

+		hiveToAlgebricksMap.put("and", AlgebricksBuiltinFunctions.AND);

+		hiveToAlgebricksMap.put("or", AlgebricksBuiltinFunctions.OR);

+		hiveToAlgebricksMap.put("!", AlgebricksBuiltinFunctions.NOT);

+		hiveToAlgebricksMap.put("not", AlgebricksBuiltinFunctions.NOT);

+		hiveToAlgebricksMap.put("=", AlgebricksBuiltinFunctions.EQ);

+		hiveToAlgebricksMap.put("<>", AlgebricksBuiltinFunctions.NEQ);

+		hiveToAlgebricksMap.put(">", AlgebricksBuiltinFunctions.GT);

+		hiveToAlgebricksMap.put("<", AlgebricksBuiltinFunctions.LT);

+		hiveToAlgebricksMap.put(">=", AlgebricksBuiltinFunctions.GE);

+		hiveToAlgebricksMap.put("<=", AlgebricksBuiltinFunctions.LE);

+

+		AlgebricksToHiveMap.put(AlgebricksBuiltinFunctions.AND, "and");

+		AlgebricksToHiveMap.put(AlgebricksBuiltinFunctions.OR, "or");

+		AlgebricksToHiveMap.put(AlgebricksBuiltinFunctions.NOT, "!");

+		AlgebricksToHiveMap.put(AlgebricksBuiltinFunctions.NOT, "not");

+		AlgebricksToHiveMap.put(AlgebricksBuiltinFunctions.EQ, "=");

+		AlgebricksToHiveMap.put(AlgebricksBuiltinFunctions.NEQ, "<>");

+		AlgebricksToHiveMap.put(AlgebricksBuiltinFunctions.GT, ">");

+		AlgebricksToHiveMap.put(AlgebricksBuiltinFunctions.LT, "<");

+		AlgebricksToHiveMap.put(AlgebricksBuiltinFunctions.GE, ">=");

+		AlgebricksToHiveMap.put(AlgebricksBuiltinFunctions.LE, "<=");

+	}

+

+	/**

+	 * get hive function name from Algebricks function identifier

+	 * 

+	 * @param AlgebricksId

+	 * @return hive

+	 */

+	public String getHiveFunctionName(FunctionIdentifier AlgebricksId) {

+		return AlgebricksToHiveMap.get(AlgebricksId);

+	}

+

+	/**

+	 * get hive UDF or Generic class's corresponding built-in functions

+	 * 

+	 * @param funcClass

+	 * @return function identifier

+	 */

+	public FunctionIdentifier getAlgebricksFunctionId(Class<?> funcClass) {

+		Description annotation = (Description) funcClass

+				.getAnnotation(Description.class);

+		String hiveUDFName = "";

+		if (annotation == null) {

+			hiveUDFName = null;

+			return null;

+		} else {

+			hiveUDFName = annotation.name();

+			return hiveToAlgebricksMap.get(hiveUDFName);

+		}

+	}

+}

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/expression/HiveExpressionTypeComputer.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/expression/HiveExpressionTypeComputer.java
new file mode 100644
index 0000000..afb7d39
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/expression/HiveExpressionTypeComputer.java
@@ -0,0 +1,200 @@
+package edu.uci.ics.hivesterix.logical.expression;

+

+import java.util.ArrayList;

+import java.util.List;

+

+import org.apache.commons.lang3.mutable.Mutable;

+import org.apache.hadoop.hive.ql.exec.FunctionInfo;

+import org.apache.hadoop.hive.ql.exec.FunctionRegistry;

+import org.apache.hadoop.hive.ql.metadata.HiveException;

+import org.apache.hadoop.hive.ql.plan.AggregationDesc;

+import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;

+import org.apache.hadoop.hive.ql.plan.UDTFDesc;

+import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator;

+import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;

+import org.apache.hadoop.hive.ql.udf.generic.GenericUDTF;

+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;

+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;

+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;

+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;

+

+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AggregateFunctionCallExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IExpressionTypeComputer;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.UnnestingFunctionCallExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;

+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.IFunctionInfo;

+import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IMetadataProvider;

+

+public class HiveExpressionTypeComputer implements IExpressionTypeComputer {

+

+	public static IExpressionTypeComputer INSTANCE = new HiveExpressionTypeComputer();

+

+	@Override

+	public Object getType(ILogicalExpression expr,

+			IMetadataProvider<?, ?> metadataProvider,

+			IVariableTypeEnvironment env) throws AlgebricksException {

+		if (expr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {

+			/**

+			 * function expression

+			 */

+			AbstractFunctionCallExpression funcExpr = (AbstractFunctionCallExpression) expr;

+			IFunctionInfo funcInfo = funcExpr.getFunctionInfo();

+

+			/**

+			 * argument expressions, types, object inspectors

+			 */

+			List<Mutable<ILogicalExpression>> arguments = funcExpr

+					.getArguments();

+			List<TypeInfo> argumentTypes = new ArrayList<TypeInfo>();

+

+			/**

+			 * get types of argument

+			 */

+			for (Mutable<ILogicalExpression> argument : arguments) {

+				TypeInfo type = (TypeInfo) getType(argument.getValue(),

+						metadataProvider, env);

+				argumentTypes.add(type);

+			}

+

+			ObjectInspector[] childrenOIs = new ObjectInspector[argumentTypes

+					.size()];

+

+			/**

+			 * get object inspector

+			 */

+			for (int i = 0; i < argumentTypes.size(); i++) {

+				childrenOIs[i] = TypeInfoUtils

+						.getStandardWritableObjectInspectorFromTypeInfo(argumentTypes

+								.get(i));

+			}

+

+			/**

+			 * type inference for scalar function

+			 */

+			if (funcExpr instanceof ScalarFunctionCallExpression) {

+

+				FunctionIdentifier AlgebricksId = funcInfo

+						.getFunctionIdentifier();

+				Object functionInfo = ((HiveFunctionInfo) funcInfo).getInfo();

+				String udfName = HiveAlgebricksBuiltInFunctionMap.INSTANCE

+						.getHiveFunctionName(AlgebricksId);

+				GenericUDF udf;

+				if (udfName != null) {

+					/**

+					 * get corresponding function info for built-in functions

+					 */

+					FunctionInfo fInfo = FunctionRegistry

+							.getFunctionInfo(udfName);

+					udf = fInfo.getGenericUDF();

+				} else if (functionInfo != null) {

+					/**

+					 * for GenericUDFBridge: we should not call get type of this

+					 * hive expression, because parameters may have been

+					 * changed!

+					 */

+					ExprNodeGenericFuncDesc hiveExpr = (ExprNodeGenericFuncDesc) functionInfo;

+					udf = hiveExpr.getGenericUDF();

+				} else {

+					/**

+					 * for other generic UDF

+					 */

+					Class<?> udfClass;

+					try {

+						udfClass = Class.forName(AlgebricksId.getName());

+						udf = (GenericUDF) udfClass.newInstance();

+					} catch (Exception e) {

+						e.printStackTrace();

+						throw new AlgebricksException(e.getMessage());

+					}

+				}

+				/**

+				 * doing the actual type inference

+				 */

+				ObjectInspector oi = null;

+				try {

+					oi = udf.initialize(childrenOIs);

+				} catch (Exception e) {

+					e.printStackTrace();

+				}

+

+				TypeInfo exprType = TypeInfoUtils

+						.getTypeInfoFromObjectInspector(oi);

+				return exprType;

+

+			} else if (funcExpr instanceof AggregateFunctionCallExpression) {

+				/**

+				 * hive aggregation info

+				 */

+				AggregationDesc aggregateDesc = (AggregationDesc) ((HiveFunctionInfo) funcExpr

+						.getFunctionInfo()).getInfo();

+				/**

+				 * type inference for aggregation function

+				 */

+				GenericUDAFEvaluator result = aggregateDesc

+						.getGenericUDAFEvaluator();

+

+				ObjectInspector returnOI = null;

+				try {

+					returnOI = result

+							.init(aggregateDesc.getMode(), childrenOIs);

+				} catch (HiveException e) {

+					e.printStackTrace();

+				}

+				TypeInfo exprType = TypeInfoUtils

+						.getTypeInfoFromObjectInspector(returnOI);

+				return exprType;

+			} else if (funcExpr instanceof UnnestingFunctionCallExpression) {

+				/**

+				 * type inference for UDTF function

+				 */

+				UDTFDesc hiveDesc = (UDTFDesc) ((HiveFunctionInfo) funcExpr

+						.getFunctionInfo()).getInfo();

+				GenericUDTF udtf = hiveDesc.getGenericUDTF();

+				ObjectInspector returnOI = null;

+				try {

+					returnOI = udtf.initialize(childrenOIs);

+				} catch (HiveException e) {

+					e.printStackTrace();

+				}

+				TypeInfo exprType = TypeInfoUtils

+						.getTypeInfoFromObjectInspector(returnOI);

+				return exprType;

+			} else {

+				throw new IllegalStateException(

+						"unrecognized function expression "

+								+ expr.getClass().getName());

+			}

+		} else if (expr.getExpressionTag() == LogicalExpressionTag.VARIABLE) {

+			/**

+			 * get type for variable in the environment

+			 */

+			VariableReferenceExpression varExpr = (VariableReferenceExpression) expr;

+			LogicalVariable var = varExpr.getVariableReference();

+			TypeInfo type = (TypeInfo) env.getVarType(var);

+			return type;

+		} else if (expr.getExpressionTag() == LogicalExpressionTag.CONSTANT) {

+			/**

+			 * get type for constant, from its java class

+			 */

+			ConstantExpression constExpr = (ConstantExpression) expr;

+			HivesterixConstantValue value = (HivesterixConstantValue) constExpr

+					.getValue();

+			TypeInfo type = TypeInfoFactory

+					.getPrimitiveTypeInfoFromJavaPrimitive(value.getObject()

+							.getClass());

+			return type;

+		} else {

+			throw new IllegalStateException("illegal expressions "

+					+ expr.getClass().getName());

+		}

+	}

+}

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/expression/HiveFunctionInfo.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/expression/HiveFunctionInfo.java
new file mode 100644
index 0000000..220bd00
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/expression/HiveFunctionInfo.java
@@ -0,0 +1,36 @@
+package edu.uci.ics.hivesterix.logical.expression;

+

+import java.io.Serializable;

+

+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;

+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.IFunctionInfo;

+

+public class HiveFunctionInfo implements IFunctionInfo, Serializable {

+

+	private static final long serialVersionUID = 1L;

+

+	/**

+	 * primary function identifier

+	 */

+	private transient FunctionIdentifier fid;

+

+	/**

+	 * secondary function identifier: function name

+	 */

+	private transient Object secondaryFid;

+

+	public HiveFunctionInfo(FunctionIdentifier fid, Object secondFid) {

+		this.fid = fid;

+		this.secondaryFid = secondFid;

+	}

+

+	@Override

+	public FunctionIdentifier getFunctionIdentifier() {

+		return fid;

+	}

+

+	public Object getInfo() {

+		return secondaryFid;

+	}

+

+}

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/expression/HiveMergeAggregationExpressionFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/expression/HiveMergeAggregationExpressionFactory.java
new file mode 100644
index 0000000..8dea691
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/expression/HiveMergeAggregationExpressionFactory.java
@@ -0,0 +1,84 @@
+package edu.uci.ics.hivesterix.logical.expression;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.commons.lang3.mutable.Mutable;
+import org.apache.commons.lang3.mutable.MutableObject;
+import org.apache.hadoop.hive.ql.plan.AggregationDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AggregateFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IMergeAggregationExpressionFactory;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+
+/**
+ * generate merge aggregation expression from an aggregation expression
+ * 
+ * @author yingyib
+ * 
+ */
+public class HiveMergeAggregationExpressionFactory implements
+		IMergeAggregationExpressionFactory {
+
+	public static IMergeAggregationExpressionFactory INSTANCE = new HiveMergeAggregationExpressionFactory();
+
+	@Override
+	public ILogicalExpression createMergeAggregation(ILogicalExpression expr,
+			IOptimizationContext context) throws AlgebricksException {
+		/**
+		 * type inference for scalar function
+		 */
+		if (expr instanceof AggregateFunctionCallExpression) {
+			AggregateFunctionCallExpression funcExpr = (AggregateFunctionCallExpression) expr;
+			/**
+			 * hive aggregation info
+			 */
+			AggregationDesc aggregator = (AggregationDesc) ((HiveFunctionInfo) funcExpr
+					.getFunctionInfo()).getInfo();
+			LogicalVariable inputVar = context.newVar();
+			ExprNodeDesc col = new ExprNodeColumnDesc(
+					TypeInfoFactory.voidTypeInfo, inputVar.toString(), null,
+					false);
+			ArrayList<ExprNodeDesc> parameters = new ArrayList<ExprNodeDesc>();
+			parameters.add(col);
+
+			GenericUDAFEvaluator.Mode mergeMode;
+			if (aggregator.getMode() == GenericUDAFEvaluator.Mode.PARTIAL1)
+				mergeMode = GenericUDAFEvaluator.Mode.PARTIAL2;
+			else if (aggregator.getMode() == GenericUDAFEvaluator.Mode.COMPLETE)
+				mergeMode = GenericUDAFEvaluator.Mode.FINAL;
+			else
+				mergeMode = aggregator.getMode();
+			AggregationDesc mergeDesc = new AggregationDesc(
+					aggregator.getGenericUDAFName(),
+					aggregator.getGenericUDAFEvaluator(), parameters,
+					aggregator.getDistinct(), mergeMode);
+
+			String UDAFName = mergeDesc.getGenericUDAFName();
+			List<Mutable<ILogicalExpression>> arguments = new ArrayList<Mutable<ILogicalExpression>>();
+			arguments.add(new MutableObject<ILogicalExpression>(
+					new VariableReferenceExpression(inputVar)));
+
+			FunctionIdentifier funcId = new FunctionIdentifier(
+					ExpressionConstant.NAMESPACE, UDAFName + "("
+							+ mergeDesc.getMode() + ")");
+			HiveFunctionInfo funcInfo = new HiveFunctionInfo(funcId, mergeDesc);
+			AggregateFunctionCallExpression aggregationExpression = new AggregateFunctionCallExpression(
+					funcInfo, false, arguments);
+			return aggregationExpression;
+		} else {
+			throw new IllegalStateException("illegal expressions "
+					+ expr.getClass().getName());
+		}
+	}
+
+}
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/expression/HiveNullableTypeComputer.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/expression/HiveNullableTypeComputer.java
new file mode 100644
index 0000000..10c9b8a
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/expression/HiveNullableTypeComputer.java
@@ -0,0 +1,15 @@
+package edu.uci.ics.hivesterix.logical.expression;
+
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.INullableTypeComputer;
+
+public class HiveNullableTypeComputer implements INullableTypeComputer {
+
+	public static INullableTypeComputer INSTANCE = new HiveNullableTypeComputer();
+
+	@Override
+	public Object makeNullableType(Object type) throws AlgebricksException {
+		return type;
+	}
+
+}
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/expression/HivePartialAggregationTypeComputer.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/expression/HivePartialAggregationTypeComputer.java
new file mode 100644
index 0000000..7062e26
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/expression/HivePartialAggregationTypeComputer.java
@@ -0,0 +1,116 @@
+package edu.uci.ics.hivesterix.logical.expression;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.commons.lang3.mutable.Mutable;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.plan.AggregationDesc;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator.Mode;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
+
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AggregateFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IExpressionTypeComputer;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IPartialAggregationTypeComputer;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
+import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IMetadataProvider;
+
+public class HivePartialAggregationTypeComputer implements
+		IPartialAggregationTypeComputer {
+
+	public static IPartialAggregationTypeComputer INSTANCE = new HivePartialAggregationTypeComputer();
+
+	@Override
+	public Object getType(ILogicalExpression expr,
+			IVariableTypeEnvironment env,
+			IMetadataProvider<?, ?> metadataProvider)
+			throws AlgebricksException {
+		if (expr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
+			IExpressionTypeComputer tc = HiveExpressionTypeComputer.INSTANCE;
+			/**
+			 * function expression
+			 */
+			AbstractFunctionCallExpression funcExpr = (AbstractFunctionCallExpression) expr;
+
+			/**
+			 * argument expressions, types, object inspectors
+			 */
+			List<Mutable<ILogicalExpression>> arguments = funcExpr
+					.getArguments();
+			List<TypeInfo> argumentTypes = new ArrayList<TypeInfo>();
+
+			/**
+			 * get types of argument
+			 */
+			for (Mutable<ILogicalExpression> argument : arguments) {
+				TypeInfo type = (TypeInfo) tc.getType(argument.getValue(),
+						metadataProvider, env);
+				argumentTypes.add(type);
+			}
+
+			ObjectInspector[] childrenOIs = new ObjectInspector[argumentTypes
+					.size()];
+
+			/**
+			 * get object inspector
+			 */
+			for (int i = 0; i < argumentTypes.size(); i++) {
+				childrenOIs[i] = TypeInfoUtils
+						.getStandardWritableObjectInspectorFromTypeInfo(argumentTypes
+								.get(i));
+			}
+
+			/**
+			 * type inference for scalar function
+			 */
+			if (funcExpr instanceof AggregateFunctionCallExpression) {
+				/**
+				 * hive aggregation info
+				 */
+				AggregationDesc aggregateDesc = (AggregationDesc) ((HiveFunctionInfo) funcExpr
+						.getFunctionInfo()).getInfo();
+				/**
+				 * type inference for aggregation function
+				 */
+				GenericUDAFEvaluator result = aggregateDesc
+						.getGenericUDAFEvaluator();
+
+				ObjectInspector returnOI = null;
+				try {
+					returnOI = result.init(
+							getPartialMode(aggregateDesc.getMode()),
+							childrenOIs);
+				} catch (HiveException e) {
+					e.printStackTrace();
+				}
+				TypeInfo exprType = TypeInfoUtils
+						.getTypeInfoFromObjectInspector(returnOI);
+				return exprType;
+			} else {
+				throw new IllegalStateException("illegal expressions "
+						+ expr.getClass().getName());
+			}
+		} else {
+			throw new IllegalStateException("illegal expressions "
+					+ expr.getClass().getName());
+		}
+	}
+
+	private Mode getPartialMode(Mode mode) {
+		Mode partialMode;
+		if (mode == Mode.FINAL)
+			partialMode = Mode.PARTIAL2;
+		else if (mode == Mode.COMPLETE)
+			partialMode = Mode.PARTIAL1;
+		else
+			partialMode = mode;
+		return partialMode;
+	}
+}
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/expression/HivesterixConstantValue.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/expression/HivesterixConstantValue.java
new file mode 100644
index 0000000..de9cea6
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/expression/HivesterixConstantValue.java
@@ -0,0 +1,55 @@
+package edu.uci.ics.hivesterix.logical.expression;
+
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IAlgebricksConstantValue;
+
+public class HivesterixConstantValue implements IAlgebricksConstantValue {
+
+	private Object object;
+
+	public HivesterixConstantValue(Object object) {
+		this.setObject(object);
+	}
+
+	@Override
+	public boolean isFalse() {
+		return object == Boolean.FALSE;
+	}
+
+	@Override
+	public boolean isNull() {
+		return object == null;
+	}
+
+	@Override
+	public boolean isTrue() {
+		return object == Boolean.TRUE;
+	}
+
+	public void setObject(Object object) {
+		this.object = object;
+	}
+
+	public Object getObject() {
+		return object;
+	}
+
+	@Override
+	public String toString() {
+		return object.toString();
+	}
+
+	@Override
+	public boolean equals(Object o) {
+		if (!(o instanceof HivesterixConstantValue)) {
+			return false;
+		}
+		HivesterixConstantValue v2 = (HivesterixConstantValue) o;
+		return object.equals(v2.getObject());
+	}
+
+	@Override
+	public int hashCode() {
+		return object.hashCode();
+	}
+
+}
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/expression/Schema.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/expression/Schema.java
new file mode 100644
index 0000000..2b1d191
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/expression/Schema.java
@@ -0,0 +1,39 @@
+package edu.uci.ics.hivesterix.logical.expression;

+

+import java.io.Serializable;

+import java.util.List;

+

+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;

+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;

+

+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils;

+

+public class Schema implements Serializable {

+

+	private static final long serialVersionUID = 1L;

+

+	private List<String> fieldNames;

+

+	private List<TypeInfo> fieldTypes;

+

+	public Schema(List<String> fieldNames, List<TypeInfo> fieldTypes) {

+		this.fieldNames = fieldNames;

+		this.fieldTypes = fieldTypes;

+	}

+

+	public ObjectInspector toObjectInspector() {

+		return LazyUtils.getLazyObjectInspector(fieldNames, fieldTypes);

+	}

+

+	public List<String> getNames() {

+		return fieldNames;

+	}

+

+	public List<TypeInfo> getTypes() {

+		return fieldTypes;

+	}

+

+	public Object[] getSchema() {

+		return fieldTypes.toArray();

+	}

+}

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/HiveAlgebricksTranslator.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/HiveAlgebricksTranslator.java
new file mode 100644
index 0000000..1fb973e
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/HiveAlgebricksTranslator.java
@@ -0,0 +1,808 @@
+package edu.uci.ics.hivesterix.logical.plan;

+

+import java.io.OutputStreamWriter;

+import java.io.PrintWriter;

+import java.util.ArrayList;

+import java.util.HashMap;

+import java.util.List;

+import java.util.Map;

+import java.util.Map.Entry;

+import java.util.Set;

+

+import org.apache.commons.lang3.mutable.Mutable;

+import org.apache.commons.lang3.mutable.MutableObject;

+import org.apache.hadoop.hive.ql.exec.ColumnInfo;

+import org.apache.hadoop.hive.ql.exec.ExtractOperator;

+import org.apache.hadoop.hive.ql.exec.FileSinkOperator;

+import org.apache.hadoop.hive.ql.exec.FilterOperator;

+import org.apache.hadoop.hive.ql.exec.GroupByOperator;

+import org.apache.hadoop.hive.ql.exec.JoinOperator;

+import org.apache.hadoop.hive.ql.exec.LateralViewJoinOperator;

+import org.apache.hadoop.hive.ql.exec.LimitOperator;

+import org.apache.hadoop.hive.ql.exec.MapJoinOperator;

+import org.apache.hadoop.hive.ql.exec.Operator;

+import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator;

+import org.apache.hadoop.hive.ql.exec.SelectOperator;

+import org.apache.hadoop.hive.ql.exec.TableScanOperator;

+import org.apache.hadoop.hive.ql.exec.UDF;

+import org.apache.hadoop.hive.ql.exec.UDTFOperator;

+import org.apache.hadoop.hive.ql.exec.UnionOperator;

+import org.apache.hadoop.hive.ql.plan.AggregationDesc;

+import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;

+import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;

+import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;

+import org.apache.hadoop.hive.ql.plan.ExprNodeFieldDesc;

+import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;

+import org.apache.hadoop.hive.ql.plan.ExprNodeNullDesc;

+import org.apache.hadoop.hive.ql.plan.PartitionDesc;

+import org.apache.hadoop.hive.ql.plan.UDTFDesc;

+import org.apache.hadoop.hive.ql.plan.api.OperatorType;

+import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;

+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge;

+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;

+

+import edu.uci.ics.hivesterix.logical.expression.ExpressionConstant;

+import edu.uci.ics.hivesterix.logical.expression.HiveAlgebricksBuiltInFunctionMap;

+import edu.uci.ics.hivesterix.logical.expression.HiveFunctionInfo;

+import edu.uci.ics.hivesterix.logical.expression.HivesterixConstantValue;

+import edu.uci.ics.hivesterix.logical.expression.Schema;

+import edu.uci.ics.hivesterix.logical.plan.visitor.ExtractVisitor;

+import edu.uci.ics.hivesterix.logical.plan.visitor.FilterVisitor;

+import edu.uci.ics.hivesterix.logical.plan.visitor.GroupByVisitor;

+import edu.uci.ics.hivesterix.logical.plan.visitor.JoinVisitor;

+import edu.uci.ics.hivesterix.logical.plan.visitor.LateralViewJoinVisitor;

+import edu.uci.ics.hivesterix.logical.plan.visitor.LimitVisitor;

+import edu.uci.ics.hivesterix.logical.plan.visitor.MapJoinVisitor;

+import edu.uci.ics.hivesterix.logical.plan.visitor.ProjectVisitor;

+import edu.uci.ics.hivesterix.logical.plan.visitor.SortVisitor;

+import edu.uci.ics.hivesterix.logical.plan.visitor.TableScanWriteVisitor;

+import edu.uci.ics.hivesterix.logical.plan.visitor.UnionVisitor;

+import edu.uci.ics.hivesterix.logical.plan.visitor.base.Translator;

+import edu.uci.ics.hivesterix.logical.plan.visitor.base.Visitor;

+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalPlan;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AggregateFunctionCallExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.UnnestingFunctionCallExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;

+import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IMetadataProvider;

+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;

+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ReplicateOperator;

+import edu.uci.ics.hyracks.algebricks.core.algebra.plan.ALogicalPlanImpl;

+import edu.uci.ics.hyracks.algebricks.core.algebra.prettyprint.LogicalOperatorPrettyPrintVisitor;

+import edu.uci.ics.hyracks.algebricks.core.algebra.prettyprint.PlanPrettyPrinter;

+

+@SuppressWarnings("rawtypes")

+public class HiveAlgebricksTranslator implements Translator {

+

+    private int currentVariable = 0;

+

+    private List<Mutable<ILogicalOperator>> logicalOp = new ArrayList<Mutable<ILogicalOperator>>();

+

+    private boolean continueTraverse = true;

+

+    private IMetadataProvider<PartitionDesc, Object> metaData;

+

+    /**

+     * map variable name to the logical variable

+     */

+    private HashMap<String, LogicalVariable> nameToLogicalVariableMap = new HashMap<String, LogicalVariable>();

+

+    /**

+     * map field name to LogicalVariable

+     */

+    private HashMap<String, LogicalVariable> fieldToLogicalVariableMap = new HashMap<String, LogicalVariable>();

+

+    /**

+     * map logical variable to name

+     */

+    private HashMap<LogicalVariable, String> logicalVariableToFieldMap = new HashMap<LogicalVariable, String>();

+

+    /**

+     * asterix root operators

+     */

+    private List<Mutable<ILogicalOperator>> rootOperators = new ArrayList<Mutable<ILogicalOperator>>();

+

+    /**

+     * a list of visitors

+     */

+    private List<Visitor> visitors = new ArrayList<Visitor>();

+

+    /**

+     * output writer to print things out

+     */

+    private static PrintWriter outputWriter = new PrintWriter(new OutputStreamWriter(System.out));

+

+    /**

+     * map a logical variable to type info

+     */

+    private HashMap<LogicalVariable, TypeInfo> variableToType = new HashMap<LogicalVariable, TypeInfo>();

+

+    @Override

+    public LogicalVariable getVariable(String fieldName, TypeInfo type) {

+        LogicalVariable var = fieldToLogicalVariableMap.get(fieldName);

+        if (var == null) {

+            currentVariable++;

+            var = new LogicalVariable(currentVariable);

+            fieldToLogicalVariableMap.put(fieldName, var);

+            nameToLogicalVariableMap.put(var.toString(), var);

+            variableToType.put(var, type);

+            logicalVariableToFieldMap.put(var, fieldName);

+        }

+        return var;

+    }

+

+    @Override

+    public LogicalVariable getNewVariable(String fieldName, TypeInfo type) {

+        currentVariable++;

+        LogicalVariable var = new LogicalVariable(currentVariable);

+        fieldToLogicalVariableMap.put(fieldName, var);

+        nameToLogicalVariableMap.put(var.toString(), var);

+        variableToType.put(var, type);

+        logicalVariableToFieldMap.put(var, fieldName);

+        return var;

+    }

+

+    @Override

+    public void replaceVariable(LogicalVariable oldVar, LogicalVariable newVar) {

+        String name = this.logicalVariableToFieldMap.get(oldVar);

+        if (name != null) {

+            fieldToLogicalVariableMap.put(name, newVar);

+            nameToLogicalVariableMap.put(newVar.toString(), newVar);

+            nameToLogicalVariableMap.put(oldVar.toString(), newVar);

+            logicalVariableToFieldMap.put(newVar, name);

+        }

+    }

+

+    @Override

+    public IMetadataProvider<PartitionDesc, Object> getMetadataProvider() {

+        return metaData;

+    }

+

+    /**

+     * only get an variable, without rewriting it

+     * 

+     * @param fieldName

+     * @return

+     */

+    private LogicalVariable getVariableOnly(String fieldName) {

+        return fieldToLogicalVariableMap.get(fieldName);

+    }

+

+    private void updateVariable(String fieldName, LogicalVariable variable) {

+        LogicalVariable var = fieldToLogicalVariableMap.get(fieldName);

+        if (var == null) {

+            fieldToLogicalVariableMap.put(fieldName, variable);

+            nameToLogicalVariableMap.put(fieldName, variable);

+        } else if (!var.equals(variable)) {

+            // System.out.println("!!!replace variables!!!");

+            fieldToLogicalVariableMap.put(fieldName, variable);

+            nameToLogicalVariableMap.put(fieldName, variable);

+        }

+    }

+

+    /**

+     * get a list of logical variables from the schema

+     * 

+     * @param schema

+     * @return

+     */

+    @Override

+    public List<LogicalVariable> getVariablesFromSchema(Schema schema) {

+        List<LogicalVariable> variables = new ArrayList<LogicalVariable>();

+        List<String> names = schema.getNames();

+

+        for (String name : names)

+            variables.add(nameToLogicalVariableMap.get(name));

+        return variables;

+    }

+

+    /**

+     * get variable to typeinfo map

+     * 

+     * @return

+     */

+    public HashMap<LogicalVariable, TypeInfo> getVariableContext() {

+        return this.variableToType;

+    }

+

+    /**

+     * get the number of variables

+     * s

+     * 

+     * @return

+     */

+    public int getVariableCounter() {

+        return currentVariable + 1;

+    }

+

+    /**

+     * translate from hive operator tree to asterix operator tree

+     * 

+     * @param hive

+     *            roots

+     * @return Algebricks roots

+     */

+    public void translate(List<Operator> hiveRoot, ILogicalOperator parentOperator,

+            HashMap<String, PartitionDesc> aliasToPathMap) throws AlgebricksException {

+        /**

+         * register visitors

+         */

+        visitors.add(new FilterVisitor());

+        visitors.add(new GroupByVisitor());

+        visitors.add(new JoinVisitor());

+        visitors.add(new LateralViewJoinVisitor());

+        visitors.add(new UnionVisitor());

+        visitors.add(new LimitVisitor());

+        visitors.add(new MapJoinVisitor());

+        visitors.add(new ProjectVisitor());

+        visitors.add(new SortVisitor());

+        visitors.add(new ExtractVisitor());

+        visitors.add(new TableScanWriteVisitor(aliasToPathMap));

+

+        List<Mutable<ILogicalOperator>> refList = translate(hiveRoot, new MutableObject<ILogicalOperator>(

+                parentOperator));

+        insertReplicateOperator(refList);

+        if (refList != null)

+            rootOperators.addAll(refList);

+    }

+

+    /**

+     * translate operator DAG

+     * 

+     * @param hiveRoot

+     * @param AlgebricksParentOperator

+     * @return

+     */

+    private List<Mutable<ILogicalOperator>> translate(List<Operator> hiveRoot,

+            Mutable<ILogicalOperator> AlgebricksParentOperator) throws AlgebricksException {

+

+        for (Operator hiveOperator : hiveRoot) {

+            continueTraverse = true;

+            Mutable<ILogicalOperator> currentOperatorRef = null;

+            if (hiveOperator.getType() == OperatorType.FILTER) {

+                FilterOperator fop = (FilterOperator) hiveOperator;

+                for (Visitor visitor : visitors) {

+                    currentOperatorRef = visitor.visit(fop, AlgebricksParentOperator, this);

+                    if (currentOperatorRef != null)

+                        break;

+                }

+            } else if (hiveOperator.getType() == OperatorType.REDUCESINK) {

+                ReduceSinkOperator fop = (ReduceSinkOperator) hiveOperator;

+                for (Visitor visitor : visitors) {

+                    currentOperatorRef = visitor.visit(fop, AlgebricksParentOperator, this);

+                    if (currentOperatorRef != null)

+                        break;

+                }

+            } else if (hiveOperator.getType() == OperatorType.JOIN) {

+                JoinOperator fop = (JoinOperator) hiveOperator;

+                for (Visitor visitor : visitors) {

+                    currentOperatorRef = visitor.visit(fop, AlgebricksParentOperator, this);

+                    if (currentOperatorRef != null) {

+                        continueTraverse = true;

+                        break;

+                    } else

+                        continueTraverse = false;

+                }

+                if (currentOperatorRef == null)

+                    return null;

+            } else if (hiveOperator.getType() == OperatorType.LATERALVIEWJOIN) {

+                LateralViewJoinOperator fop = (LateralViewJoinOperator) hiveOperator;

+                for (Visitor visitor : visitors) {

+                    currentOperatorRef = visitor.visit(fop, AlgebricksParentOperator, this);

+                    if (currentOperatorRef != null)

+                        break;

+                }

+                if (currentOperatorRef == null)

+                    return null;

+            } else if (hiveOperator.getType() == OperatorType.MAPJOIN) {

+                MapJoinOperator fop = (MapJoinOperator) hiveOperator;

+                for (Visitor visitor : visitors) {

+                    currentOperatorRef = visitor.visit(fop, AlgebricksParentOperator, this);

+                    if (currentOperatorRef != null) {

+                        continueTraverse = true;

+                        break;

+                    } else

+                        continueTraverse = false;

+                }

+                if (currentOperatorRef == null)

+                    return null;

+            } else if (hiveOperator.getType() == OperatorType.SELECT) {

+                SelectOperator fop = (SelectOperator) hiveOperator;

+                for (Visitor visitor : visitors) {

+                    currentOperatorRef = visitor.visit(fop, AlgebricksParentOperator, this);

+                    if (currentOperatorRef != null)

+                        break;

+                }

+            } else if (hiveOperator.getType() == OperatorType.EXTRACT) {

+                ExtractOperator fop = (ExtractOperator) hiveOperator;

+                for (Visitor visitor : visitors) {

+                    currentOperatorRef = visitor.visit(fop, AlgebricksParentOperator, this);

+                    if (currentOperatorRef != null)

+                        break;

+                }

+            } else if (hiveOperator.getType() == OperatorType.GROUPBY) {

+                GroupByOperator fop = (GroupByOperator) hiveOperator;

+                for (Visitor visitor : visitors) {

+                    currentOperatorRef = visitor.visit(fop, AlgebricksParentOperator, this);

+                    if (currentOperatorRef != null)

+                        break;

+                }

+            } else if (hiveOperator.getType() == OperatorType.TABLESCAN) {

+                TableScanOperator fop = (TableScanOperator) hiveOperator;

+                for (Visitor visitor : visitors) {

+                    currentOperatorRef = visitor.visit(fop, AlgebricksParentOperator, this);

+                    if (currentOperatorRef != null)

+                        break;

+                }

+            } else if (hiveOperator.getType() == OperatorType.FILESINK) {

+                FileSinkOperator fop = (FileSinkOperator) hiveOperator;

+                for (Visitor visitor : visitors) {

+                    currentOperatorRef = visitor.visit(fop, AlgebricksParentOperator, this);

+                    if (currentOperatorRef != null)

+                        break;

+                }

+            } else if (hiveOperator.getType() == OperatorType.LIMIT) {

+                LimitOperator lop = (LimitOperator) hiveOperator;

+                for (Visitor visitor : visitors) {

+                    currentOperatorRef = visitor.visit(lop, AlgebricksParentOperator, this);

+                    if (currentOperatorRef != null)

+                        break;

+                }

+            } else if (hiveOperator.getType() == OperatorType.UDTF) {

+                UDTFOperator lop = (UDTFOperator) hiveOperator;

+                for (Visitor visitor : visitors) {

+                    currentOperatorRef = visitor.visit(lop, AlgebricksParentOperator, this);

+                    if (currentOperatorRef != null)

+                        break;

+                }

+            } else if (hiveOperator.getType() == OperatorType.UNION) {

+                UnionOperator lop = (UnionOperator) hiveOperator;

+                for (Visitor visitor : visitors) {

+                    currentOperatorRef = visitor.visit(lop, AlgebricksParentOperator, this);

+                    if (currentOperatorRef != null) {

+                        continueTraverse = true;

+                        break;

+                    } else

+                        continueTraverse = false;

+                }

+            } else

+                ;

+            if (hiveOperator.getChildOperators() != null && hiveOperator.getChildOperators().size() > 0

+                    && continueTraverse) {

+                @SuppressWarnings("unchecked")

+                List<Operator> children = hiveOperator.getChildOperators();

+                if (currentOperatorRef == null)

+                    currentOperatorRef = AlgebricksParentOperator;

+                translate(children, currentOperatorRef);

+            }

+            if (hiveOperator.getChildOperators() == null || hiveOperator.getChildOperators().size() == 0)

+                logicalOp.add(currentOperatorRef);

+        }

+        return logicalOp;

+    }

+

+    /**

+     * used in select, group by to get no-column-expression columns

+     * 

+     * @param cols

+     * @return

+     */

+    public ILogicalOperator getAssignOperator(Mutable<ILogicalOperator> parent, List<ExprNodeDesc> cols,

+            ArrayList<LogicalVariable> variables) {

+

+        ArrayList<Mutable<ILogicalExpression>> expressions = new ArrayList<Mutable<ILogicalExpression>>();

+

+        /**

+         * variables to be appended in the assign operator

+         */

+        ArrayList<LogicalVariable> appendedVariables = new ArrayList<LogicalVariable>();

+

+        // one variable can only be assigned once

+        for (ExprNodeDesc hiveExpr : cols) {

+            rewriteExpression(hiveExpr);

+

+            if (hiveExpr instanceof ExprNodeColumnDesc) {

+                ExprNodeColumnDesc desc2 = (ExprNodeColumnDesc) hiveExpr;

+                String fieldName = desc2.getTabAlias() + "." + desc2.getColumn();

+

+                // System.out.println("project expr: " + fieldName);

+

+                if (fieldName.indexOf("$$") < 0) {

+                    LogicalVariable var = getVariable(fieldName, hiveExpr.getTypeInfo());

+                    desc2.setColumn(var.toString());

+                    desc2.setTabAlias("");

+                    variables.add(var);

+                } else {

+                    LogicalVariable var = nameToLogicalVariableMap.get(desc2.getColumn());

+                    String name = this.logicalVariableToFieldMap.get(var);

+                    var = this.getVariableOnly(name);

+                    variables.add(var);

+                }

+            } else {

+                Mutable<ILogicalExpression> asterixExpr = translateScalarFucntion(hiveExpr);

+                expressions.add(asterixExpr);

+                LogicalVariable var = getVariable(hiveExpr.getExprString() + asterixExpr.hashCode(),

+                        hiveExpr.getTypeInfo());

+                variables.add(var);

+                appendedVariables.add(var);

+            }

+        }

+

+        /**

+         * create an assign operator to deal with appending

+         */

+        ILogicalOperator assignOp = null;

+        if (appendedVariables.size() > 0) {

+            assignOp = new AssignOperator(appendedVariables, expressions);

+            assignOp.getInputs().add(parent);

+        }

+        return assignOp;

+    }

+

+    private ILogicalPlan plan;

+

+    public ILogicalPlan genLogicalPlan() {

+        plan = new ALogicalPlanImpl(rootOperators);

+        return plan;

+    }

+

+    public void printOperators() throws AlgebricksException {

+        LogicalOperatorPrettyPrintVisitor pvisitor = new LogicalOperatorPrettyPrintVisitor();

+        StringBuilder buffer = new StringBuilder();

+        PlanPrettyPrinter.printPlan(plan, buffer, pvisitor, 0);

+        outputWriter.println(buffer);

+        outputWriter.println("rewritten variables: ");

+        outputWriter.flush();

+        printVariables();

+

+    }

+

+    public static void setOutputPrinter(PrintWriter writer) {

+        outputWriter = writer;

+    }

+

+    private void printVariables() {

+        Set<Entry<String, LogicalVariable>> entries = fieldToLogicalVariableMap.entrySet();

+

+        for (Entry<String, LogicalVariable> entry : entries) {

+            outputWriter.println(entry.getKey() + " -> " + entry.getValue());

+        }

+        outputWriter.flush();

+    }

+

+    /**

+     * generate the object inspector for the output of an operator

+     * 

+     * @param operator

+     *            The Hive operator

+     * @return an ObjectInspector object

+     */

+    public Schema generateInputSchema(Operator operator) {

+        List<String> variableNames = new ArrayList<String>();

+        List<TypeInfo> typeList = new ArrayList<TypeInfo>();

+        List<ColumnInfo> columns = operator.getSchema().getSignature();

+

+        for (ColumnInfo col : columns) {

+            // typeList.add();

+            TypeInfo type = col.getType();

+            typeList.add(type);

+

+            String fieldName = col.getInternalName();

+            variableNames.add(fieldName);

+        }

+

+        return new Schema(variableNames, typeList);

+    }

+

+    /**

+     * rewrite the names of output columns for feature expression evaluators to

+     * use

+     * 

+     * @param operator

+     */

+    public void rewriteOperatorOutputSchema(Operator operator) {

+        List<ColumnInfo> columns = operator.getSchema().getSignature();

+

+        for (ColumnInfo column : columns) {

+            String columnName = column.getTabAlias() + "." + column.getInternalName();

+            if (columnName.indexOf("$$") < 0) {

+                LogicalVariable var = getVariable(columnName, column.getType());

+                column.setInternalName(var.toString());

+            }

+        }

+    }

+

+    @Override

+    public void rewriteOperatorOutputSchema(List<LogicalVariable> variables, Operator operator) {

+

+        printOperatorSchema(operator);

+        List<ColumnInfo> columns = operator.getSchema().getSignature();

+        if (variables.size() != columns.size()) {

+            throw new IllegalStateException("output cardinality error " + operator.getName() + " variable size: "

+                    + variables.size() + " expected " + columns.size());

+        }

+

+        for (int i = 0; i < variables.size(); i++) {

+            LogicalVariable var = variables.get(i);

+            ColumnInfo column = columns.get(i);

+            String fieldName = column.getTabAlias() + "." + column.getInternalName();

+            if (fieldName.indexOf("$$") < 0) {

+                updateVariable(fieldName, var);

+                column.setInternalName(var.toString());

+            }

+        }

+        printOperatorSchema(operator);

+    }

+

+    /**

+     * rewrite an expression and substitute variables

+     * 

+     * @param expr

+     *            hive expression

+     */

+    public void rewriteExpression(ExprNodeDesc expr) {

+        if (expr instanceof ExprNodeColumnDesc) {

+            ExprNodeColumnDesc desc = (ExprNodeColumnDesc) expr;

+            String fieldName = desc.getTabAlias() + "." + desc.getColumn();

+            if (fieldName.indexOf("$$") < 0) {

+                LogicalVariable var = getVariableOnly(fieldName);

+                if (var == null) {

+                    fieldName = "." + desc.getColumn();

+                    var = getVariableOnly(fieldName);

+                    if (var == null) {

+                        fieldName = "null." + desc.getColumn();

+                        var = getVariableOnly(fieldName);

+                        if (var == null) {

+                            throw new IllegalStateException(fieldName + " is wrong!!! ");

+                        }

+                    }

+                }

+                String name = this.logicalVariableToFieldMap.get(var);

+                var = getVariableOnly(name);

+                desc.setColumn(var.toString());

+            }

+        } else {

+            if (expr.getChildren() != null && expr.getChildren().size() > 0) {

+                List<ExprNodeDesc> children = expr.getChildren();

+                for (ExprNodeDesc desc : children)

+                    rewriteExpression(desc);

+            }

+        }

+    }

+

+    /**

+     * rewrite an expression and substitute variables

+     * 

+     * @param expr

+     *            hive expression

+     */

+    public void rewriteExpressionPartial(ExprNodeDesc expr) {

+        if (expr instanceof ExprNodeColumnDesc) {

+            ExprNodeColumnDesc desc = (ExprNodeColumnDesc) expr;

+            String fieldName = desc.getTabAlias() + "." + desc.getColumn();

+            if (fieldName.indexOf("$$") < 0) {

+                LogicalVariable var = getVariableOnly(fieldName);

+                desc.setColumn(var.toString());

+            }

+        } else {

+            if (expr.getChildren() != null && expr.getChildren().size() > 0) {

+                List<ExprNodeDesc> children = expr.getChildren();

+                for (ExprNodeDesc desc : children)

+                    rewriteExpressionPartial(desc);

+            }

+        }

+    }

+

+    private void printOperatorSchema(Operator operator) {

+        System.out.println(operator.getName());

+        List<ColumnInfo> columns = operator.getSchema().getSignature();

+        for (ColumnInfo column : columns) {

+            System.out.print(column.getTabAlias() + "." + column.getInternalName() + "  ");

+        }

+        System.out.println();

+    }

+

+    /**

+     * translate scalar function expression

+     * 

+     * @param hiveExpr

+     * @return

+     */

+    public Mutable<ILogicalExpression> translateScalarFucntion(ExprNodeDesc hiveExpr) {

+        ILogicalExpression AlgebricksExpr;

+

+        if (hiveExpr instanceof ExprNodeGenericFuncDesc) {

+            List<Mutable<ILogicalExpression>> arguments = new ArrayList<Mutable<ILogicalExpression>>();

+            List<ExprNodeDesc> children = hiveExpr.getChildren();

+

+            for (ExprNodeDesc child : children)

+                arguments.add(translateScalarFucntion(child));

+

+            ExprNodeGenericFuncDesc funcExpr = (ExprNodeGenericFuncDesc) hiveExpr;

+            GenericUDF genericUdf = funcExpr.getGenericUDF();

+            UDF udf = null;

+            if (genericUdf instanceof GenericUDFBridge) {

+                GenericUDFBridge bridge = (GenericUDFBridge) genericUdf;

+                try {

+                    udf = bridge.getUdfClass().newInstance();

+                } catch (Exception e) {

+                    e.printStackTrace();

+                }

+            }

+

+            /**

+             * set up the hive function

+             */

+            Object hiveFunction = genericUdf;

+            if (udf != null)

+                hiveFunction = udf;

+

+            FunctionIdentifier funcId = HiveAlgebricksBuiltInFunctionMap.INSTANCE.getAlgebricksFunctionId(hiveFunction

+                    .getClass());

+            if (funcId == null) {

+                funcId = new FunctionIdentifier(ExpressionConstant.NAMESPACE, hiveFunction.getClass().getName());

+            }

+

+            Object functionInfo = null;

+            if (genericUdf instanceof GenericUDFBridge) {

+                functionInfo = funcExpr;

+            }

+

+            /**

+             * generate the function call expression

+             */

+            ScalarFunctionCallExpression AlgebricksFuncExpr = new ScalarFunctionCallExpression(new HiveFunctionInfo(

+                    funcId, functionInfo), arguments);

+            AlgebricksExpr = AlgebricksFuncExpr;

+

+        } else if (hiveExpr instanceof ExprNodeColumnDesc) {

+            ExprNodeColumnDesc column = (ExprNodeColumnDesc) hiveExpr;

+            LogicalVariable var = this.getVariable(column.getColumn());

+            AlgebricksExpr = new VariableReferenceExpression(var);

+

+        } else if (hiveExpr instanceof ExprNodeFieldDesc) {

+            FunctionIdentifier funcId;

+            funcId = new FunctionIdentifier(ExpressionConstant.NAMESPACE, ExpressionConstant.FIELDACCESS);

+

+            ScalarFunctionCallExpression AlgebricksFuncExpr = new ScalarFunctionCallExpression(new HiveFunctionInfo(

+                    funcId, hiveExpr));

+            AlgebricksExpr = AlgebricksFuncExpr;

+        } else if (hiveExpr instanceof ExprNodeConstantDesc) {

+            ExprNodeConstantDesc hiveConst = (ExprNodeConstantDesc) hiveExpr;

+            Object value = hiveConst.getValue();

+            AlgebricksExpr = new ConstantExpression(new HivesterixConstantValue(value));

+        } else if (hiveExpr instanceof ExprNodeNullDesc) {

+            FunctionIdentifier funcId;

+            funcId = new FunctionIdentifier(ExpressionConstant.NAMESPACE, ExpressionConstant.NULL);

+

+            ScalarFunctionCallExpression AlgebricksFuncExpr = new ScalarFunctionCallExpression(new HiveFunctionInfo(

+                    funcId, hiveExpr));

+

+            AlgebricksExpr = AlgebricksFuncExpr;

+        } else {

+            throw new IllegalStateException("unknown hive expression");

+        }

+        return new MutableObject<ILogicalExpression>(AlgebricksExpr);

+    }

+

+    /**

+     * translate aggregation function expression

+     * 

+     * @param aggregateDesc

+     * @return

+     */

+    public Mutable<ILogicalExpression> translateAggregation(AggregationDesc aggregateDesc) {

+

+        String UDAFName = aggregateDesc.getGenericUDAFName();

+

+        List<Mutable<ILogicalExpression>> arguments = new ArrayList<Mutable<ILogicalExpression>>();

+        List<ExprNodeDesc> children = aggregateDesc.getParameters();

+

+        for (ExprNodeDesc child : children)

+            arguments.add(translateScalarFucntion(child));

+

+        FunctionIdentifier funcId = new FunctionIdentifier(ExpressionConstant.NAMESPACE, UDAFName + "("

+                + aggregateDesc.getMode() + ")");

+        HiveFunctionInfo funcInfo = new HiveFunctionInfo(funcId, aggregateDesc);

+        AggregateFunctionCallExpression aggregationExpression = new AggregateFunctionCallExpression(funcInfo, false,

+                arguments);

+        return new MutableObject<ILogicalExpression>(aggregationExpression);

+    }

+

+    /**

+     * translate aggregation function expression

+     * 

+     * @param aggregator

+     * @return

+     */

+    public Mutable<ILogicalExpression> translateUnnestFunction(UDTFDesc udtfDesc, Mutable<ILogicalExpression> argument) {

+

+        String UDTFName = udtfDesc.getUDTFName();

+

+        FunctionIdentifier funcId = new FunctionIdentifier(ExpressionConstant.NAMESPACE, UDTFName);

+        UnnestingFunctionCallExpression unnestingExpression = new UnnestingFunctionCallExpression(new HiveFunctionInfo(

+                funcId, udtfDesc));

+        unnestingExpression.getArguments().add(argument);

+        return new MutableObject<ILogicalExpression>(unnestingExpression);

+    }

+

+    /**

+     * get typeinfo

+     */

+    @Override

+    public TypeInfo getType(LogicalVariable var) {

+        return variableToType.get(var);

+    }

+

+    /**

+     * get variable from variable name

+     */

+    @Override

+    public LogicalVariable getVariable(String name) {

+        return nameToLogicalVariableMap.get(name);

+    }

+

+    @Override

+    public LogicalVariable getVariableFromFieldName(String fieldName) {

+        return this.getVariableOnly(fieldName);

+    }

+

+    /**

+     * set the metadata provider

+     */

+    @Override

+    public void setMetadataProvider(IMetadataProvider<PartitionDesc, Object> metadata) {

+        this.metaData = metadata;

+    }

+

+    /**

+     * insert ReplicateOperator when necessary

+     */

+    private void insertReplicateOperator(List<Mutable<ILogicalOperator>> roots) {

+        Map<Mutable<ILogicalOperator>, List<Mutable<ILogicalOperator>>> childToParentsMap = new HashMap<Mutable<ILogicalOperator>, List<Mutable<ILogicalOperator>>>();

+        buildChildToParentsMapping(roots, childToParentsMap);

+        for (Entry<Mutable<ILogicalOperator>, List<Mutable<ILogicalOperator>>> entry : childToParentsMap.entrySet()) {

+            List<Mutable<ILogicalOperator>> pList = entry.getValue();

+            if (pList.size() > 1) {

+                ILogicalOperator rop = new ReplicateOperator(pList.size());

+                Mutable<ILogicalOperator> ropRef = new MutableObject<ILogicalOperator>(rop);

+                Mutable<ILogicalOperator> childRef = entry.getKey();

+                rop.getInputs().add(childRef);

+                for (Mutable<ILogicalOperator> parentRef : pList) {

+                    ILogicalOperator parentOp = parentRef.getValue();

+                    int index = parentOp.getInputs().indexOf(childRef);

+                    parentOp.getInputs().set(index, ropRef);

+                }

+            }

+        }

+    }

+

+    /**

+     * build the mapping from child to Parents

+     * 

+     * @param roots

+     * @param childToParentsMap

+     */

+    private void buildChildToParentsMapping(List<Mutable<ILogicalOperator>> roots,

+            Map<Mutable<ILogicalOperator>, List<Mutable<ILogicalOperator>>> map) {

+        for (Mutable<ILogicalOperator> opRef : roots) {

+            List<Mutable<ILogicalOperator>> childRefs = opRef.getValue().getInputs();

+            for (Mutable<ILogicalOperator> childRef : childRefs) {

+                List<Mutable<ILogicalOperator>> parentList = map.get(childRef);

+                if (parentList == null) {

+                    parentList = new ArrayList<Mutable<ILogicalOperator>>();

+                    map.put(childRef, parentList);

+                }

+                if (!parentList.contains(opRef))

+                    parentList.add(opRef);

+            }

+            buildChildToParentsMapping(childRefs, map);

+        }

+    }

+}

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/HiveLogicalPlanAndMetaData.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/HiveLogicalPlanAndMetaData.java
new file mode 100644
index 0000000..494e796
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/HiveLogicalPlanAndMetaData.java
@@ -0,0 +1,36 @@
+package edu.uci.ics.hivesterix.logical.plan;

+

+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalPlan;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalPlanAndMetadata;

+import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IMetadataProvider;

+

+@SuppressWarnings({ "rawtypes", "unchecked" })

+public class HiveLogicalPlanAndMetaData implements ILogicalPlanAndMetadata {

+

+	IMetadataProvider metadata;

+	ILogicalPlan plan;

+

+	public HiveLogicalPlanAndMetaData(ILogicalPlan plan,

+			IMetadataProvider metadata) {

+		this.plan = plan;

+		this.metadata = metadata;

+	}

+

+	@Override

+	public IMetadataProvider getMetadataProvider() {

+		return metadata;

+	}

+

+	@Override

+	public ILogicalPlan getPlan() {

+		return plan;

+	}

+

+	@Override

+	public AlgebricksPartitionConstraint getClusterLocations() {

+		// TODO Auto-generated method stub

+		return null;

+	}

+

+}

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/HiveOperatorAnnotations.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/HiveOperatorAnnotations.java
new file mode 100644
index 0000000..0d234fb
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/HiveOperatorAnnotations.java
@@ -0,0 +1,8 @@
+package edu.uci.ics.hivesterix.logical.plan;
+
+public class HiveOperatorAnnotations {
+
+	// hints
+	public static final String LOCAL_GROUP_BY = "LOCAL_GROUP_BY";
+
+}
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/ExtractVisitor.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/ExtractVisitor.java
new file mode 100644
index 0000000..9a84164
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/ExtractVisitor.java
@@ -0,0 +1,28 @@
+package edu.uci.ics.hivesterix.logical.plan.visitor;

+

+import java.util.List;

+

+import org.apache.commons.lang3.mutable.Mutable;

+import org.apache.hadoop.hive.ql.exec.ExtractOperator;

+

+import edu.uci.ics.hivesterix.logical.expression.Schema;

+import edu.uci.ics.hivesterix.logical.plan.visitor.base.DefaultVisitor;

+import edu.uci.ics.hivesterix.logical.plan.visitor.base.Translator;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;

+

+public class ExtractVisitor extends DefaultVisitor {

+

+	@Override

+	public Mutable<ILogicalOperator> visit(ExtractOperator operator,

+			Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t) {

+		Schema currentSchema = t.generateInputSchema(operator

+				.getParentOperators().get(0));

+		operator.setSchema(operator.getParentOperators().get(0).getSchema());

+		List<LogicalVariable> latestOutputSchema = t

+				.getVariablesFromSchema(currentSchema);

+		t.rewriteOperatorOutputSchema(latestOutputSchema, operator);

+		return null;

+	}

+

+}

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/FilterVisitor.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/FilterVisitor.java
new file mode 100644
index 0000000..b276ba9
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/FilterVisitor.java
@@ -0,0 +1,44 @@
+package edu.uci.ics.hivesterix.logical.plan.visitor;

+

+import java.util.List;

+

+import org.apache.commons.lang3.mutable.Mutable;

+import org.apache.commons.lang3.mutable.MutableObject;

+import org.apache.hadoop.hive.ql.exec.FilterOperator;

+import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;

+import org.apache.hadoop.hive.ql.plan.FilterDesc;

+

+import edu.uci.ics.hivesterix.logical.expression.Schema;

+import edu.uci.ics.hivesterix.logical.plan.visitor.base.DefaultVisitor;

+import edu.uci.ics.hivesterix.logical.plan.visitor.base.Translator;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;

+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;

+

+public class FilterVisitor extends DefaultVisitor {

+

+	@Override

+	public Mutable<ILogicalOperator> visit(FilterOperator operator,

+			Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t) {

+		Schema currentSchema = t.generateInputSchema(operator

+				.getParentOperators().get(0));

+

+		FilterDesc desc = (FilterDesc) operator.getConf();

+		ExprNodeDesc predicate = desc.getPredicate();

+		t.rewriteExpression(predicate);

+

+		Mutable<ILogicalExpression> exprs = t.translateScalarFucntion(desc

+				.getPredicate());

+		ILogicalOperator currentOperator = new SelectOperator(exprs);

+		currentOperator.getInputs().add(AlgebricksParentOperatorRef);

+

+		// populate the schema from upstream operator

+		operator.setSchema(operator.getParentOperators().get(0).getSchema());

+		List<LogicalVariable> latestOutputSchema = t

+				.getVariablesFromSchema(currentSchema);

+		t.rewriteOperatorOutputSchema(latestOutputSchema, operator);

+		return new MutableObject<ILogicalOperator>(currentOperator);

+	}

+

+}

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/GroupByVisitor.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/GroupByVisitor.java
new file mode 100644
index 0000000..d2180a3
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/GroupByVisitor.java
@@ -0,0 +1,291 @@
+package edu.uci.ics.hivesterix.logical.plan.visitor;

+

+import java.lang.reflect.Field;

+import java.util.ArrayList;

+import java.util.HashMap;

+import java.util.List;

+

+import org.apache.commons.lang3.mutable.Mutable;

+import org.apache.commons.lang3.mutable.MutableObject;

+import org.apache.hadoop.hive.conf.HiveConf;

+import org.apache.hadoop.hive.ql.exec.ColumnInfo;

+import org.apache.hadoop.hive.ql.exec.GroupByOperator;

+import org.apache.hadoop.hive.ql.exec.Operator;

+import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator;

+import org.apache.hadoop.hive.ql.plan.AggregationDesc;

+import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;

+import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;

+import org.apache.hadoop.hive.ql.plan.GroupByDesc;

+import org.apache.hadoop.hive.ql.plan.ReduceSinkDesc;

+import org.apache.hadoop.hive.ql.plan.api.OperatorType;

+import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator;

+import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator.AggregationBuffer;

+import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator.Mode;

+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;

+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;

+

+import edu.uci.ics.hivesterix.logical.plan.HiveOperatorAnnotations;

+import edu.uci.ics.hivesterix.logical.plan.visitor.base.DefaultVisitor;

+import edu.uci.ics.hivesterix.logical.plan.visitor.base.Translator;

+import edu.uci.ics.hivesterix.runtime.config.ConfUtil;

+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;

+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalPlan;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.OperatorAnnotations;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AggregateFunctionCallExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.IFunctionInfo;

+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AggregateOperator;

+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DistinctOperator;

+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.NestedTupleSourceOperator;

+import edu.uci.ics.hyracks.algebricks.core.algebra.plan.ALogicalPlanImpl;

+

+@SuppressWarnings({ "rawtypes", "unchecked" })

+public class GroupByVisitor extends DefaultVisitor {

+

+	private List<Mutable<ILogicalExpression>> AlgebricksAggs = new ArrayList<Mutable<ILogicalExpression>>();

+	private List<IFunctionInfo> localAggs = new ArrayList<IFunctionInfo>();

+	private boolean isDistinct = false;

+	private boolean gbyKeyNotRedKey = false;

+

+	@Override

+	public Mutable<ILogicalOperator> visit(GroupByOperator operator,

+			Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t)

+			throws AlgebricksException {

+

+		// get descriptors

+		GroupByDesc desc = (GroupByDesc) operator.getConf();

+		GroupByDesc.Mode mode = desc.getMode();

+

+		List<ExprNodeDesc> keys = desc.getKeys();

+		List<AggregationDesc> aggregators = desc.getAggregators();

+

+		Operator child = operator.getChildOperators().get(0);

+

+		if (child.getType() == OperatorType.REDUCESINK) {

+			List<ExprNodeDesc> partKeys = ((ReduceSinkDesc) child.getConf())

+					.getPartitionCols();

+			if (keys.size() != partKeys.size())

+				gbyKeyNotRedKey = true;

+		}

+

+		if (mode == GroupByDesc.Mode.PARTIAL1 || mode == GroupByDesc.Mode.HASH

+				|| mode == GroupByDesc.Mode.COMPLETE

+				|| (aggregators.size() == 0 && isDistinct == false)

+				|| gbyKeyNotRedKey) {

+			AlgebricksAggs.clear();

+			// add an assign operator if the key is not a column expression

+			ArrayList<LogicalVariable> keyVariables = new ArrayList<LogicalVariable>();

+			ILogicalOperator currentOperator = null;

+			ILogicalOperator assignOperator = t.getAssignOperator(

+					AlgebricksParentOperatorRef, keys, keyVariables);

+			if (assignOperator != null) {

+				currentOperator = assignOperator;

+				AlgebricksParentOperatorRef = new MutableObject<ILogicalOperator>(

+						currentOperator);

+			}

+

+			// get key variable expression list

+			List<Mutable<ILogicalExpression>> keyExprs = new ArrayList<Mutable<ILogicalExpression>>();

+			for (LogicalVariable var : keyVariables) {

+				keyExprs.add(t.translateScalarFucntion(new ExprNodeColumnDesc(

+						TypeInfoFactory.intTypeInfo, var.toString(), "", false)));

+			}

+

+			if (aggregators.size() == 0) {

+				List<Mutable<ILogicalExpression>> distinctExprs = new ArrayList<Mutable<ILogicalExpression>>();

+				for (LogicalVariable var : keyVariables) {

+					Mutable<ILogicalExpression> varExpr = new MutableObject<ILogicalExpression>(

+							new VariableReferenceExpression(var));

+					distinctExprs.add(varExpr);

+				}

+				t.rewriteOperatorOutputSchema(keyVariables, operator);

+				isDistinct = true;

+				ILogicalOperator lop = new DistinctOperator(distinctExprs);

+				lop.getInputs().add(AlgebricksParentOperatorRef);

+				return new MutableObject<ILogicalOperator>(lop);

+			}

+

+			// get the pair<LogicalVariable, ILogicalExpression> list

+			List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> keyParameters = new ArrayList<Pair<LogicalVariable, Mutable<ILogicalExpression>>>();

+			keyVariables.clear();

+			for (Mutable<ILogicalExpression> expr : keyExprs) {

+				LogicalVariable keyVar = t.getVariable(expr.getValue()

+						.toString(), TypeInfoFactory.unknownTypeInfo);

+				keyParameters.add(new Pair(keyVar, expr));

+				keyVariables.add(keyVar);

+			}

+

+			// get the parameters for the aggregator operator

+			ArrayList<LogicalVariable> aggVariables = new ArrayList<LogicalVariable>();

+			ArrayList<Mutable<ILogicalExpression>> aggExprs = new ArrayList<Mutable<ILogicalExpression>>();

+

+			// get the type of each aggregation function

+			HashMap<AggregationDesc, TypeInfo> aggToType = new HashMap<AggregationDesc, TypeInfo>();

+			List<ColumnInfo> columns = operator.getSchema().getSignature();

+			int offset = keys.size();

+			for (int i = offset; i < columns.size(); i++) {

+				aggToType.put(aggregators.get(i - offset), columns.get(i)

+						.getType());

+			}

+

+			localAggs.clear();

+			// rewrite parameter expressions for all aggregators

+			for (AggregationDesc aggregator : aggregators) {

+				for (ExprNodeDesc parameter : aggregator.getParameters()) {

+					t.rewriteExpression(parameter);

+				}

+				Mutable<ILogicalExpression> aggExpr = t

+						.translateAggregation(aggregator);

+				AbstractFunctionCallExpression localAggExpr = (AbstractFunctionCallExpression) aggExpr

+						.getValue();

+				localAggs.add(localAggExpr.getFunctionInfo());

+

+				AggregationDesc logicalAgg = new AggregationDesc(

+						aggregator.getGenericUDAFName(),

+						aggregator.getGenericUDAFEvaluator(),

+						aggregator.getParameters(), aggregator.getDistinct(),

+						Mode.COMPLETE);

+				Mutable<ILogicalExpression> logicalAggExpr = t

+						.translateAggregation(logicalAgg);

+

+				AlgebricksAggs.add(logicalAggExpr);

+				if (!gbyKeyNotRedKey)

+					aggExprs.add(logicalAggExpr);

+				else

+					aggExprs.add(aggExpr);

+

+				aggVariables.add(t.getVariable(aggregator.getExprString()

+						+ aggregator.getMode(), aggToType.get(aggregator)));

+			}

+

+			if (child.getType() != OperatorType.REDUCESINK)

+				gbyKeyNotRedKey = false;

+

+			// get the sub plan list

+			AggregateOperator aggOperator = new AggregateOperator(aggVariables,

+					aggExprs);

+			NestedTupleSourceOperator nestedTupleSource = new NestedTupleSourceOperator(

+					new MutableObject<ILogicalOperator>());

+			aggOperator.getInputs().add(

+					new MutableObject<ILogicalOperator>(nestedTupleSource));

+

+			List<Mutable<ILogicalOperator>> subRoots = new ArrayList<Mutable<ILogicalOperator>>();

+			subRoots.add(new MutableObject<ILogicalOperator>(aggOperator));

+			ILogicalPlan subPlan = new ALogicalPlanImpl(subRoots);

+			List<ILogicalPlan> subPlans = new ArrayList<ILogicalPlan>();

+			subPlans.add(subPlan);

+

+			// create the group by operator

+			currentOperator = new edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.GroupByOperator(

+					keyParameters,

+					new ArrayList<Pair<LogicalVariable, Mutable<ILogicalExpression>>>(),

+					subPlans);

+			currentOperator.getInputs().add(AlgebricksParentOperatorRef);

+			nestedTupleSource.getDataSourceReference()

+					.setValue(currentOperator);

+

+			List<LogicalVariable> outputVariables = new ArrayList<LogicalVariable>();

+			outputVariables.addAll(keyVariables);

+			outputVariables.addAll(aggVariables);

+			t.rewriteOperatorOutputSchema(outputVariables, operator);

+

+			if (gbyKeyNotRedKey) {

+				currentOperator.getAnnotations().put(

+						HiveOperatorAnnotations.LOCAL_GROUP_BY, Boolean.TRUE);

+			}

+

+			HiveConf conf = ConfUtil.getHiveConf();

+			Boolean extGby = conf.getBoolean(

+					"hive.algebricks.groupby.external", false);

+

+			if (extGby && isSerializable(aggregators)) {

+				currentOperator.getAnnotations()

+						.put(OperatorAnnotations.USE_EXTERNAL_GROUP_BY,

+								Boolean.TRUE);

+			}

+			return new MutableObject<ILogicalOperator>(currentOperator);

+		} else {

+			isDistinct = false;

+			// rewrite parameter expressions for all aggregators

+			int i = 0;

+			for (AggregationDesc aggregator : aggregators) {

+				for (ExprNodeDesc parameter : aggregator.getParameters()) {

+					t.rewriteExpression(parameter);

+				}

+				Mutable<ILogicalExpression> agg = t

+						.translateAggregation(aggregator);

+				AggregateFunctionCallExpression originalAgg = (AggregateFunctionCallExpression) AlgebricksAggs

+						.get(i).getValue();

+				originalAgg.setStepOneAggregate(localAggs.get(i));

+				AggregateFunctionCallExpression currentAgg = (AggregateFunctionCallExpression) agg

+						.getValue();

+				if (currentAgg.getFunctionInfo() != null) {

+					originalAgg.setTwoStep(true);

+					originalAgg.setStepTwoAggregate(currentAgg

+							.getFunctionInfo());

+				}

+				i++;

+			}

+			return null;

+		}

+	}

+

+	@Override

+	public Mutable<ILogicalOperator> visit(ReduceSinkOperator operator,

+			Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t) {

+		Operator downStream = (Operator) operator.getChildOperators().get(0);

+		if (!(downStream instanceof GroupByOperator)) {

+			return null;

+		}

+

+		ReduceSinkDesc desc = (ReduceSinkDesc) operator.getConf();

+		List<ExprNodeDesc> keys = desc.getKeyCols();

+		List<ExprNodeDesc> values = desc.getValueCols();

+

+		// insert assign for keys

+		ArrayList<LogicalVariable> keyVariables = new ArrayList<LogicalVariable>();

+		t.getAssignOperator(AlgebricksParentOperatorRef, keys, keyVariables);

+

+		// insert assign for values

+		ArrayList<LogicalVariable> valueVariables = new ArrayList<LogicalVariable>();

+		t.getAssignOperator(AlgebricksParentOperatorRef, values, valueVariables);

+

+		ArrayList<LogicalVariable> columns = new ArrayList<LogicalVariable>();

+		columns.addAll(keyVariables);

+		columns.addAll(valueVariables);

+

+		t.rewriteOperatorOutputSchema(columns, operator);

+		return null;

+	}

+

+	private boolean isSerializable(List<AggregationDesc> descs)

+			throws AlgebricksException {

+		try {

+			for (AggregationDesc desc : descs) {

+				GenericUDAFEvaluator udaf = desc.getGenericUDAFEvaluator();

+				AggregationBuffer buf = udaf.getNewAggregationBuffer();

+				Class<?> bufferClass = buf.getClass();

+				Field[] fields = bufferClass.getDeclaredFields();

+				for (Field field : fields) {

+					field.setAccessible(true);

+					String type = field.getType().toString();

+					if (!(type.equals("int") || type.equals("long")

+							|| type.equals("float") || type.equals("double") || type

+								.equals("boolean"))) {

+						return false;

+					}

+				}

+

+			}

+			return true;

+		} catch (Exception e) {

+			throw new AlgebricksException(e);

+		}

+	}

+

+}

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/JoinVisitor.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/JoinVisitor.java
new file mode 100644
index 0000000..aea4be5
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/JoinVisitor.java
@@ -0,0 +1,445 @@
+package edu.uci.ics.hivesterix.logical.plan.visitor;

+

+import java.util.ArrayList;

+import java.util.HashMap;

+import java.util.Iterator;

+import java.util.List;

+import java.util.Map;

+import java.util.Map.Entry;

+import java.util.Set;

+

+import org.apache.commons.lang3.mutable.Mutable;

+import org.apache.commons.lang3.mutable.MutableObject;

+import org.apache.hadoop.hive.ql.exec.JoinOperator;

+import org.apache.hadoop.hive.ql.exec.Operator;

+import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator;

+import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;

+import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;

+import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;

+import org.apache.hadoop.hive.ql.plan.JoinCondDesc;

+import org.apache.hadoop.hive.ql.plan.JoinDesc;

+import org.apache.hadoop.hive.ql.plan.ReduceSinkDesc;

+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPAnd;

+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqual;

+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;

+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;

+

+import edu.uci.ics.hivesterix.logical.plan.visitor.base.DefaultVisitor;

+import edu.uci.ics.hivesterix.logical.plan.visitor.base.Translator;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;

+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.InnerJoinOperator;

+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.LeftOuterJoinOperator;

+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ProjectOperator;

+

+@SuppressWarnings("rawtypes")

+public class JoinVisitor extends DefaultVisitor {

+

+	/**

+	 * reduce sink operator to variables

+	 */

+	private HashMap<Operator, List<LogicalVariable>> reduceSinkToKeyVariables = new HashMap<Operator, List<LogicalVariable>>();

+

+	/**

+	 * reduce sink operator to variables

+	 */

+	private HashMap<Operator, List<String>> reduceSinkToFieldNames = new HashMap<Operator, List<String>>();

+

+	/**

+	 * reduce sink operator to variables

+	 */

+	private HashMap<Operator, List<TypeInfo>> reduceSinkToTypes = new HashMap<Operator, List<TypeInfo>>();

+

+	/**

+	 * map a join operator (in hive) to its parent operators (in hive)

+	 */

+	private HashMap<Operator, List<Operator>> operatorToHiveParents = new HashMap<Operator, List<Operator>>();

+

+	/**

+	 * map a join operator (in hive) to its parent operators (in asterix)

+	 */

+	private HashMap<Operator, List<ILogicalOperator>> operatorToAsterixParents = new HashMap<Operator, List<ILogicalOperator>>();

+

+	/**

+	 * the latest traversed reduce sink operator

+	 */

+	private Operator latestReduceSink = null;

+

+	/**

+	 * the latest generated parent for join

+	 */

+	private ILogicalOperator latestAlgebricksOperator = null;

+

+	/**

+	 * process a join operator

+	 */

+	@Override

+	public Mutable<ILogicalOperator> visit(JoinOperator operator,

+			Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t) {

+		latestAlgebricksOperator = AlgebricksParentOperator.getValue();

+		translateJoinOperatorPreprocess(operator, t);

+		List<Operator> parents = operatorToHiveParents.get(operator);

+		if (parents.size() < operator.getParentOperators().size()) {

+			return null;

+		} else {

+			ILogicalOperator joinOp = translateJoinOperator(operator,

+					AlgebricksParentOperator, t);

+			// clearStatus();

+			return new MutableObject<ILogicalOperator>(joinOp);

+		}

+	}

+

+	private void reorder(Byte[] order, List<ILogicalOperator> parents,

+			List<Operator> hiveParents) {

+		ILogicalOperator[] lops = new ILogicalOperator[parents.size()];

+		Operator[] ops = new Operator[hiveParents.size()];

+

+		for (Operator op : hiveParents) {

+			ReduceSinkOperator rop = (ReduceSinkOperator) op;

+			ReduceSinkDesc rdesc = rop.getConf();

+			int tag = rdesc.getTag();

+

+			int index = -1;

+			for (int i = 0; i < order.length; i++)

+				if (order[i] == tag) {

+					index = i;

+					break;

+				}

+			lops[index] = parents.get(hiveParents.indexOf(op));

+			ops[index] = op;

+		}

+

+		parents.clear();

+		hiveParents.clear();

+

+		for (int i = 0; i < lops.length; i++) {

+			parents.add(lops[i]);

+			hiveParents.add(ops[i]);

+		}

+	}

+

+	/**

+	 * translate a hive join operator to asterix join operator->assign

+	 * operator->project operator

+	 * 

+	 * @param parentOperator

+	 * @param operator

+	 * @return

+	 */

+	private ILogicalOperator translateJoinOperator(Operator operator,

+			Mutable<ILogicalOperator> parentOperator, Translator t) {

+

+		JoinDesc joinDesc = (JoinDesc) operator.getConf();

+

+		// get the projection expression (already re-written) from each source

+		// table

+		Map<Byte, List<ExprNodeDesc>> exprMap = joinDesc.getExprs();

+		reorder(joinDesc.getTagOrder(), operatorToAsterixParents.get(operator),

+				operatorToHiveParents.get(operator));

+

+		// make an reduce join operator

+		ILogicalOperator currentOperator = generateJoinTree(

+				joinDesc.getCondsList(),

+				operatorToAsterixParents.get(operator),

+				operatorToHiveParents.get(operator), 0, t);

+		parentOperator = new MutableObject<ILogicalOperator>(currentOperator);

+

+		// add assign and project operator on top of a join

+		// output variables

+		ArrayList<LogicalVariable> variables = new ArrayList<LogicalVariable>();

+		Set<Entry<Byte, List<ExprNodeDesc>>> entries = exprMap.entrySet();

+		Iterator<Entry<Byte, List<ExprNodeDesc>>> iterator = entries.iterator();

+		while (iterator.hasNext()) {

+			List<ExprNodeDesc> outputExprs = iterator.next().getValue();

+			ILogicalOperator assignOperator = t.getAssignOperator(

+					parentOperator, outputExprs, variables);

+

+			if (assignOperator != null) {

+				currentOperator = assignOperator;

+				parentOperator = new MutableObject<ILogicalOperator>(

+						currentOperator);

+			}

+		}

+

+		ILogicalOperator po = new ProjectOperator(variables);

+		po.getInputs().add(parentOperator);

+		t.rewriteOperatorOutputSchema(variables, operator);

+		return po;

+	}

+

+	/**

+	 * deal with reduce sink operator for the case of join

+	 */

+	@Override

+	public Mutable<ILogicalOperator> visit(ReduceSinkOperator operator,

+			Mutable<ILogicalOperator> parentOperator, Translator t) {

+

+		Operator downStream = (Operator) operator.getChildOperators().get(0);

+		if (!(downStream instanceof JoinOperator))

+			return null;

+

+		ReduceSinkDesc desc = (ReduceSinkDesc) operator.getConf();

+		List<ExprNodeDesc> keys = desc.getKeyCols();

+		List<ExprNodeDesc> values = desc.getValueCols();

+		List<ExprNodeDesc> partitionCols = desc.getPartitionCols();

+

+		/**

+		 * rewrite key, value, paritioncol expressions

+		 */

+		for (ExprNodeDesc key : keys)

+			t.rewriteExpression(key);

+		for (ExprNodeDesc value : values)

+			t.rewriteExpression(value);

+		for (ExprNodeDesc col : partitionCols)

+			t.rewriteExpression(col);

+

+		ILogicalOperator currentOperator = null;

+

+		// add assign operator for keys if necessary

+		ArrayList<LogicalVariable> keyVariables = new ArrayList<LogicalVariable>();

+		ILogicalOperator assignOperator = t.getAssignOperator(parentOperator,

+				keys, keyVariables);

+		if (assignOperator != null) {

+			currentOperator = assignOperator;

+			parentOperator = new MutableObject<ILogicalOperator>(

+					currentOperator);

+		}

+

+		// add assign operator for values if necessary

+		ArrayList<LogicalVariable> variables = new ArrayList<LogicalVariable>();

+		assignOperator = t.getAssignOperator(parentOperator, values, variables);

+		if (assignOperator != null) {

+			currentOperator = assignOperator;

+			parentOperator = new MutableObject<ILogicalOperator>(

+					currentOperator);

+		}

+

+		// unified schema: key, value

+		ArrayList<LogicalVariable> unifiedKeyValues = new ArrayList<LogicalVariable>();

+		unifiedKeyValues.addAll(keyVariables);

+		for (LogicalVariable value : variables)

+			if (keyVariables.indexOf(value) < 0)

+				unifiedKeyValues.add(value);

+

+		// insert projection operator, it is a *must*,

+		// in hive, reduce sink sometimes also do the projection operator's

+		// task

+		currentOperator = new ProjectOperator(unifiedKeyValues);

+		currentOperator.getInputs().add(parentOperator);

+		parentOperator = new MutableObject<ILogicalOperator>(currentOperator);

+

+		reduceSinkToKeyVariables.put(operator, keyVariables);

+		List<String> fieldNames = new ArrayList<String>();

+		List<TypeInfo> types = new ArrayList<TypeInfo>();

+		for (LogicalVariable var : unifiedKeyValues) {

+			fieldNames.add(var.toString());

+			types.add(t.getType(var));

+		}

+		reduceSinkToFieldNames.put(operator, fieldNames);

+		reduceSinkToTypes.put(operator, types);

+		t.rewriteOperatorOutputSchema(variables, operator);

+

+		latestAlgebricksOperator = currentOperator;

+		latestReduceSink = operator;

+		return new MutableObject<ILogicalOperator>(currentOperator);

+	}

+

+	/**

+	 * partial rewrite a join operator

+	 * 

+	 * @param operator

+	 * @param t

+	 */

+	private void translateJoinOperatorPreprocess(Operator operator, Translator t) {

+		JoinDesc desc = (JoinDesc) operator.getConf();

+		ReduceSinkDesc reduceSinkDesc = (ReduceSinkDesc) latestReduceSink

+				.getConf();

+		int tag = reduceSinkDesc.getTag();

+

+		Map<Byte, List<ExprNodeDesc>> exprMap = desc.getExprs();

+		List<ExprNodeDesc> exprs = exprMap.get(Byte.valueOf((byte) tag));

+

+		for (ExprNodeDesc expr : exprs)

+			t.rewriteExpression(expr);

+

+		List<Operator> parents = operatorToHiveParents.get(operator);

+		if (parents == null) {

+			parents = new ArrayList<Operator>();

+			operatorToHiveParents.put(operator, parents);

+		}

+		parents.add(latestReduceSink);

+

+		List<ILogicalOperator> asterixParents = operatorToAsterixParents

+				.get(operator);

+		if (asterixParents == null) {

+			asterixParents = new ArrayList<ILogicalOperator>();

+			operatorToAsterixParents.put(operator, asterixParents);

+		}

+		asterixParents.add(latestAlgebricksOperator);

+	}

+

+	// generate a join tree from a list of exchange/reducesink operator

+	// both exchanges and reduce sinks have the same order

+	private ILogicalOperator generateJoinTree(List<JoinCondDesc> conds,

+			List<ILogicalOperator> exchanges, List<Operator> reduceSinks,

+			int offset, Translator t) {

+		// get a list of reduce sink descs (input descs)

+		int inputSize = reduceSinks.size() - offset;

+

+		if (inputSize == 2) {

+			ILogicalOperator currentRoot;

+

+			List<ReduceSinkDesc> reduceSinkDescs = new ArrayList<ReduceSinkDesc>();

+			for (int i = reduceSinks.size() - 1; i >= offset; i--)

+				reduceSinkDescs.add((ReduceSinkDesc) reduceSinks.get(i)

+						.getConf());

+

+			// get the object inspector for the join

+			List<String> fieldNames = new ArrayList<String>();

+			List<TypeInfo> types = new ArrayList<TypeInfo>();

+			for (int i = reduceSinks.size() - 1; i >= offset; i--) {

+				fieldNames

+						.addAll(reduceSinkToFieldNames.get(reduceSinks.get(i)));

+				types.addAll(reduceSinkToTypes.get(reduceSinks.get(i)));

+			}

+

+			// get number of equality conjunctions in the final join condition

+			int size = reduceSinkDescs.get(0).getKeyCols().size();

+

+			// make up the join conditon expression

+			List<ExprNodeDesc> joinConditionChildren = new ArrayList<ExprNodeDesc>();

+			for (int i = 0; i < size; i++) {

+				// create a join key pair

+				List<ExprNodeDesc> keyPair = new ArrayList<ExprNodeDesc>();

+				for (ReduceSinkDesc sink : reduceSinkDescs) {

+					keyPair.add(sink.getKeyCols().get(i));

+				}

+				// create a hive equal condition

+				ExprNodeDesc equality = new ExprNodeGenericFuncDesc(

+						TypeInfoFactory.booleanTypeInfo,

+						new GenericUDFOPEqual(), keyPair);

+				// add the equal condition to the conjunction list

+				joinConditionChildren.add(equality);

+			}

+			// get final conjunction expression

+			ExprNodeDesc conjunct = null;

+

+			if (joinConditionChildren.size() > 1)

+				conjunct = new ExprNodeGenericFuncDesc(

+						TypeInfoFactory.booleanTypeInfo, new GenericUDFOPAnd(),

+						joinConditionChildren);

+			else if (joinConditionChildren.size() == 1)

+				conjunct = joinConditionChildren.get(0);

+			else {

+				// there is no join equality condition, equal-join

+				conjunct = new ExprNodeConstantDesc(

+						TypeInfoFactory.booleanTypeInfo, new Boolean(true));

+			}

+			// get an ILogicalExpression from hive's expression

+			Mutable<ILogicalExpression> expression = t

+					.translateScalarFucntion(conjunct);

+

+			Mutable<ILogicalOperator> leftBranch = new MutableObject<ILogicalOperator>(

+					exchanges.get(exchanges.size() - 1));

+			Mutable<ILogicalOperator> rightBranch = new MutableObject<ILogicalOperator>(

+					exchanges.get(exchanges.size() - 2));

+			// get the join operator

+			if (conds.get(offset).getType() == JoinDesc.LEFT_OUTER_JOIN) {

+				currentRoot = new LeftOuterJoinOperator(expression);

+				Mutable<ILogicalOperator> temp = leftBranch;

+				leftBranch = rightBranch;

+				rightBranch = temp;

+			} else if (conds.get(offset).getType() == JoinDesc.RIGHT_OUTER_JOIN) {

+				currentRoot = new LeftOuterJoinOperator(expression);

+			} else

+				currentRoot = new InnerJoinOperator(expression);

+

+			currentRoot.getInputs().add(leftBranch);

+			currentRoot.getInputs().add(rightBranch);

+

+			// rewriteOperatorOutputSchema(variables, operator);

+			return currentRoot;

+		} else {

+			// get the child join operator and insert and one-to-one exchange

+			ILogicalOperator joinSrcOne = generateJoinTree(conds, exchanges,

+					reduceSinks, offset + 1, t);

+			// joinSrcOne.addInput(childJoin);

+

+			ILogicalOperator currentRoot;

+

+			List<ReduceSinkDesc> reduceSinkDescs = new ArrayList<ReduceSinkDesc>();

+			for (int i = offset; i < offset + 2; i++)

+				reduceSinkDescs.add((ReduceSinkDesc) reduceSinks.get(i)

+						.getConf());

+

+			// get the object inspector for the join

+			List<String> fieldNames = new ArrayList<String>();

+			List<TypeInfo> types = new ArrayList<TypeInfo>();

+			for (int i = offset; i < reduceSinks.size(); i++) {

+				fieldNames

+						.addAll(reduceSinkToFieldNames.get(reduceSinks.get(i)));

+				types.addAll(reduceSinkToTypes.get(reduceSinks.get(i)));

+			}

+

+			// get number of equality conjunctions in the final join condition

+			int size = reduceSinkDescs.get(0).getKeyCols().size();

+

+			// make up the join condition expression

+			List<ExprNodeDesc> joinConditionChildren = new ArrayList<ExprNodeDesc>();

+			for (int i = 0; i < size; i++) {

+				// create a join key pair

+				List<ExprNodeDesc> keyPair = new ArrayList<ExprNodeDesc>();

+				for (ReduceSinkDesc sink : reduceSinkDescs) {

+					keyPair.add(sink.getKeyCols().get(i));

+				}

+				// create a hive equal condition

+				ExprNodeDesc equality = new ExprNodeGenericFuncDesc(

+						TypeInfoFactory.booleanTypeInfo,

+						new GenericUDFOPEqual(), keyPair);

+				// add the equal condition to the conjunction list

+				joinConditionChildren.add(equality);

+			}

+			// get final conjunction expression

+			ExprNodeDesc conjunct = null;

+

+			if (joinConditionChildren.size() > 1)

+				conjunct = new ExprNodeGenericFuncDesc(

+						TypeInfoFactory.booleanTypeInfo, new GenericUDFOPAnd(),

+						joinConditionChildren);

+			else if (joinConditionChildren.size() == 1)

+				conjunct = joinConditionChildren.get(0);

+			else {

+				// there is no join equality condition, full outer join

+				conjunct = new ExprNodeConstantDesc(

+						TypeInfoFactory.booleanTypeInfo, new Boolean(true));

+			}

+			// get an ILogicalExpression from hive's expression

+			Mutable<ILogicalExpression> expression = t

+					.translateScalarFucntion(conjunct);

+

+			Mutable<ILogicalOperator> leftBranch = new MutableObject<ILogicalOperator>(

+					joinSrcOne);

+			Mutable<ILogicalOperator> rightBranch = new MutableObject<ILogicalOperator>(

+					exchanges.get(offset));

+

+			// get the join operator

+			if (conds.get(offset).getType() == JoinDesc.LEFT_OUTER_JOIN) {

+				currentRoot = new LeftOuterJoinOperator(expression);

+				Mutable<ILogicalOperator> temp = leftBranch;

+				leftBranch = rightBranch;

+				rightBranch = temp;

+			} else if (conds.get(offset).getType() == JoinDesc.RIGHT_OUTER_JOIN) {

+				currentRoot = new LeftOuterJoinOperator(expression);

+			} else

+				currentRoot = new InnerJoinOperator(expression);

+

+			// set the inputs from Algebricks join operator

+			// add the current table

+			currentRoot.getInputs().add(leftBranch);

+			currentRoot.getInputs().add(rightBranch);

+

+			return currentRoot;

+		}

+	}

+}

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/LateralViewJoinVisitor.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/LateralViewJoinVisitor.java
new file mode 100644
index 0000000..004a8c2
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/LateralViewJoinVisitor.java
@@ -0,0 +1,124 @@
+package edu.uci.ics.hivesterix.logical.plan.visitor;

+

+import java.util.ArrayList;

+import java.util.List;

+

+import org.apache.commons.lang3.mutable.Mutable;

+import org.apache.commons.lang3.mutable.MutableObject;

+import org.apache.hadoop.hive.ql.exec.LateralViewJoinOperator;

+import org.apache.hadoop.hive.ql.exec.Operator;

+import org.apache.hadoop.hive.ql.exec.UDTFOperator;

+import org.apache.hadoop.hive.ql.plan.UDTFDesc;

+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;

+

+import edu.uci.ics.hivesterix.logical.expression.Schema;

+import edu.uci.ics.hivesterix.logical.plan.visitor.base.DefaultVisitor;

+import edu.uci.ics.hivesterix.logical.plan.visitor.base.Translator;

+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestOperator;

+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;

+

+/**

+ * The lateral view join operator is used for FROM src LATERAL VIEW udtf()...

+ * This operator was implemented with the following operator DAG in mind.

+ * 

+ * For a query such as

+ * 

+ * SELECT pageid, adid.* FROM example_table LATERAL VIEW explode(adid_list) AS

+ * adid

+ * 

+ * The top of the operator DAG will look similar to

+ * 

+ * [Table Scan] | [Lateral View Forward] / \ [Select](*) [Select](adid_list) | |

+ * | [UDTF] (explode) \ / [Lateral View Join] | | [Select] (pageid, adid.*) |

+ * ....

+ * 

+ * Rows from the table scan operator are first to a lateral view forward

+ * operator that just forwards the row and marks the start of a LV. The select

+ * operator on the left picks all the columns while the select operator on the

+ * right picks only the columns needed by the UDTF.

+ * 

+ * The output of select in the left branch and output of the UDTF in the right

+ * branch are then sent to the lateral view join (LVJ). In most cases, the UDTF

+ * will generate > 1 row for every row received from the TS, while the left

+ * select operator will generate only one. For each row output from the TS, the

+ * LVJ outputs all possible rows that can be created by joining the row from the

+ * left select and one of the rows output from the UDTF.

+ * 

+ * Additional lateral views can be supported by adding a similar DAG after the

+ * previous LVJ operator.

+ */

+

+@SuppressWarnings("rawtypes")

+public class LateralViewJoinVisitor extends DefaultVisitor {

+

+	private UDTFDesc udtf;

+

+	private List<Mutable<ILogicalOperator>> parents = new ArrayList<Mutable<ILogicalOperator>>();

+

+	@Override

+	public Mutable<ILogicalOperator> visit(LateralViewJoinOperator operator,

+			Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t)

+			throws AlgebricksException {

+

+		parents.add(AlgebricksParentOperatorRef);

+		if (operator.getParentOperators().size() > parents.size()) {

+			return null;

+		}

+

+		Operator parent0 = operator.getParentOperators().get(0);

+		ILogicalOperator parentOperator;

+		ILogicalExpression unnestArg;

+		if (parent0 instanceof UDTFOperator) {

+			List<LogicalVariable> unnestVars = new ArrayList<LogicalVariable>();

+			VariableUtilities.getLiveVariables(parents.get(1).getValue(),

+					unnestVars);

+			unnestArg = new VariableReferenceExpression(unnestVars.get(0));

+			parentOperator = parents.get(1).getValue();

+		} else {

+			List<LogicalVariable> unnestVars = new ArrayList<LogicalVariable>();

+			VariableUtilities.getLiveVariables(parents.get(0).getValue(),

+					unnestVars);

+			unnestArg = new VariableReferenceExpression(unnestVars.get(0));

+			parentOperator = parents.get(0).getValue();

+		}

+

+		LogicalVariable var = t.getVariable(udtf.toString(),

+				TypeInfoFactory.unknownTypeInfo);

+

+		Mutable<ILogicalExpression> unnestExpr = t.translateUnnestFunction(

+				udtf, new MutableObject<ILogicalExpression>(unnestArg));

+		ILogicalOperator currentOperator = new UnnestOperator(var, unnestExpr);

+

+		List<LogicalVariable> outputVars = new ArrayList<LogicalVariable>();

+		VariableUtilities.getLiveVariables(parentOperator, outputVars);

+		outputVars.add(var);

+		currentOperator.getInputs().add(

+				new MutableObject<ILogicalOperator>(parentOperator));

+

+		parents.clear();

+		udtf = null;

+		t.rewriteOperatorOutputSchema(outputVars, operator);

+		return new MutableObject<ILogicalOperator>(currentOperator);

+	}

+

+	@Override

+	public Mutable<ILogicalOperator> visit(UDTFOperator operator,

+			Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t) {

+		Schema currentSchema = t.generateInputSchema(operator

+				.getParentOperators().get(0));

+		udtf = (UDTFDesc) operator.getConf();

+

+		// populate the schema from upstream operator

+		operator.setSchema(operator.getParentOperators().get(0).getSchema());

+		List<LogicalVariable> latestOutputSchema = t

+				.getVariablesFromSchema(currentSchema);

+		t.rewriteOperatorOutputSchema(latestOutputSchema, operator);

+		return null;

+	}

+

+}

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/LimitVisitor.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/LimitVisitor.java
new file mode 100644
index 0000000..84cdf00
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/LimitVisitor.java
@@ -0,0 +1,44 @@
+package edu.uci.ics.hivesterix.logical.plan.visitor;

+

+import java.util.List;

+

+import org.apache.commons.lang3.mutable.Mutable;

+import org.apache.commons.lang3.mutable.MutableObject;

+import org.apache.hadoop.hive.ql.exec.LimitOperator;

+import org.apache.hadoop.hive.ql.plan.LimitDesc;

+

+import edu.uci.ics.hivesterix.logical.expression.HivesterixConstantValue;

+import edu.uci.ics.hivesterix.logical.expression.Schema;

+import edu.uci.ics.hivesterix.logical.plan.visitor.base.DefaultVisitor;

+import edu.uci.ics.hivesterix.logical.plan.visitor.base.Translator;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;

+

+public class LimitVisitor extends DefaultVisitor {

+

+	@Override

+	public Mutable<ILogicalOperator> visit(LimitOperator operator,

+			Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t) {

+		Schema currentSchema = t.generateInputSchema(operator

+				.getParentOperators().get(0));

+

+		LimitDesc desc = (LimitDesc) operator.getConf();

+		int limit = desc.getLimit();

+		Integer limitValue = new Integer(limit);

+

+		ILogicalExpression expr = new ConstantExpression(

+				new HivesterixConstantValue(limitValue));

+		ILogicalOperator currentOperator = new edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.LimitOperator(

+				expr, true);

+		currentOperator.getInputs().add(AlgebricksParentOperatorRef);

+

+		operator.setSchema(operator.getParentOperators().get(0).getSchema());

+		List<LogicalVariable> latestOutputSchema = t

+				.getVariablesFromSchema(currentSchema);

+		t.rewriteOperatorOutputSchema(latestOutputSchema, operator);

+		return new MutableObject<ILogicalOperator>(currentOperator);

+	}

+

+}

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/MapJoinVisitor.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/MapJoinVisitor.java
new file mode 100644
index 0000000..fa5d014
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/MapJoinVisitor.java
@@ -0,0 +1,183 @@
+package edu.uci.ics.hivesterix.logical.plan.visitor;

+

+import java.io.Serializable;

+import java.util.ArrayList;

+import java.util.HashMap;

+import java.util.Iterator;

+import java.util.List;

+import java.util.Map;

+import java.util.Map.Entry;

+import java.util.Set;

+

+import org.apache.commons.lang3.mutable.Mutable;

+import org.apache.commons.lang3.mutable.MutableObject;

+import org.apache.hadoop.hive.ql.exec.ColumnInfo;

+import org.apache.hadoop.hive.ql.exec.MapJoinOperator;

+import org.apache.hadoop.hive.ql.exec.Operator;

+import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;

+import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;

+import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;

+import org.apache.hadoop.hive.ql.plan.MapJoinDesc;

+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPAnd;

+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqual;

+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;

+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;

+

+import edu.uci.ics.hivesterix.logical.plan.visitor.base.DefaultVisitor;

+import edu.uci.ics.hivesterix.logical.plan.visitor.base.Translator;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;

+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.InnerJoinOperator;

+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ProjectOperator;

+

+@SuppressWarnings("rawtypes")

+public class MapJoinVisitor extends DefaultVisitor {

+

+	/**

+	 * map a join operator (in hive) to its parent operators (in asterix)

+	 */

+	private HashMap<Operator, List<Mutable<ILogicalOperator>>> opMap = new HashMap<Operator, List<Mutable<ILogicalOperator>>>();

+

+	@Override

+	public Mutable<ILogicalOperator> visit(MapJoinOperator operator,

+			Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t) {

+		List<Operator<? extends Serializable>> joinSrc = operator

+				.getParentOperators();

+		List<Mutable<ILogicalOperator>> parents = opMap.get(operator);

+		if (parents == null) {

+			parents = new ArrayList<Mutable<ILogicalOperator>>();

+			opMap.put(operator, parents);

+		}

+		parents.add(AlgebricksParentOperatorRef);

+		if (joinSrc.size() != parents.size())

+			return null;

+

+		ILogicalOperator currentOperator;

+		// make an map join operator

+		// TODO: will have trouble for n-way joins

+		MapJoinDesc joinDesc = (MapJoinDesc) operator.getConf();

+

+		Map<Byte, List<ExprNodeDesc>> keyMap = joinDesc.getKeys();

+		// get the projection expression (already re-written) from each source

+		// table

+		Map<Byte, List<ExprNodeDesc>> exprMap = joinDesc.getExprs();

+

+		int inputSize = operator.getParentOperators().size();

+		// get a list of reduce sink descs (input descs)

+

+		// get the parent operator

+		List<Mutable<ILogicalOperator>> parentOps = parents;

+

+		List<String> fieldNames = new ArrayList<String>();

+		List<TypeInfo> types = new ArrayList<TypeInfo>();

+		for (Operator ts : joinSrc) {

+			List<ColumnInfo> columns = ts.getSchema().getSignature();

+			for (ColumnInfo col : columns) {

+				fieldNames.add(col.getInternalName());

+				types.add(col.getType());

+			}

+		}

+

+		// get number of equality conjunctions in the final join condition

+		Set<Entry<Byte, List<ExprNodeDesc>>> keyEntries = keyMap.entrySet();

+		Iterator<Entry<Byte, List<ExprNodeDesc>>> entry = keyEntries.iterator();

+

+		int size = 0;

+		if (entry.hasNext())

+			size = entry.next().getValue().size();

+

+		// make up the join conditon expression

+		List<ExprNodeDesc> joinConditionChildren = new ArrayList<ExprNodeDesc>();

+		for (int i = 0; i < size; i++) {

+			// create a join key pair

+			List<ExprNodeDesc> keyPair = new ArrayList<ExprNodeDesc>();

+			for (int j = 0; j < inputSize; j++) {

+				keyPair.add(keyMap.get(Byte.valueOf((byte) j)).get(i));

+			}

+			// create a hive equal condition

+			ExprNodeDesc equality = new ExprNodeGenericFuncDesc(

+					TypeInfoFactory.booleanTypeInfo, new GenericUDFOPEqual(),

+					keyPair);

+			// add the equal condition to the conjunction list

+			joinConditionChildren.add(equality);

+		}

+		// get final conjunction expression

+		ExprNodeDesc conjunct = null;

+

+		if (joinConditionChildren.size() > 1)

+			conjunct = new ExprNodeGenericFuncDesc(

+					TypeInfoFactory.booleanTypeInfo, new GenericUDFOPAnd(),

+					joinConditionChildren);

+		else if (joinConditionChildren.size() == 1)

+			conjunct = joinConditionChildren.get(0);

+		else {

+			// there is no join equality condition, full outer join

+			conjunct = new ExprNodeConstantDesc(

+					TypeInfoFactory.booleanTypeInfo, new Boolean(true));

+		}

+		// get an ILogicalExpression from hive's expression

+		Mutable<ILogicalExpression> expression = t

+				.translateScalarFucntion(conjunct);

+

+		ArrayList<LogicalVariable> left = new ArrayList<LogicalVariable>();

+		ArrayList<LogicalVariable> right = new ArrayList<LogicalVariable>();

+

+		Set<Entry<Byte, List<ExprNodeDesc>>> kentries = keyMap.entrySet();

+		Iterator<Entry<Byte, List<ExprNodeDesc>>> kiterator = kentries

+				.iterator();

+		int iteration = 0;

+		ILogicalOperator assignOperator = null;

+		while (kiterator.hasNext()) {

+			List<ExprNodeDesc> outputExprs = kiterator.next().getValue();

+

+			if (iteration == 0)

+				assignOperator = t.getAssignOperator(

+						AlgebricksParentOperatorRef, outputExprs, left);

+			else

+				assignOperator = t.getAssignOperator(

+						AlgebricksParentOperatorRef, outputExprs, right);

+

+			if (assignOperator != null) {

+				currentOperator = assignOperator;

+				AlgebricksParentOperatorRef = new MutableObject<ILogicalOperator>(

+						currentOperator);

+			}

+			iteration++;

+		}

+

+		List<Mutable<ILogicalOperator>> inputs = parentOps;

+

+		// get the join operator

+		currentOperator = new InnerJoinOperator(expression);

+

+		// set the inputs from asterix join operator

+		for (Mutable<ILogicalOperator> input : inputs)

+			currentOperator.getInputs().add(input);

+		AlgebricksParentOperatorRef = new MutableObject<ILogicalOperator>(

+				currentOperator);

+

+		// add assign and project operator

+		// output variables

+		ArrayList<LogicalVariable> variables = new ArrayList<LogicalVariable>();

+		Set<Entry<Byte, List<ExprNodeDesc>>> entries = exprMap.entrySet();

+		Iterator<Entry<Byte, List<ExprNodeDesc>>> iterator = entries.iterator();

+		while (iterator.hasNext()) {

+			List<ExprNodeDesc> outputExprs = iterator.next().getValue();

+			assignOperator = t.getAssignOperator(AlgebricksParentOperatorRef,

+					outputExprs, variables);

+

+			if (assignOperator != null) {

+				currentOperator = assignOperator;

+				AlgebricksParentOperatorRef = new MutableObject<ILogicalOperator>(

+						currentOperator);

+			}

+		}

+

+		currentOperator = new ProjectOperator(variables);

+		currentOperator.getInputs().add(AlgebricksParentOperatorRef);

+		t.rewriteOperatorOutputSchema(variables, operator);

+		// opMap.clear();

+		return new MutableObject<ILogicalOperator>(currentOperator);

+	}

+}

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/ProjectVisitor.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/ProjectVisitor.java
new file mode 100644
index 0000000..0d2067c
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/ProjectVisitor.java
@@ -0,0 +1,58 @@
+package edu.uci.ics.hivesterix.logical.plan.visitor;

+

+import java.util.ArrayList;

+import java.util.List;

+

+import org.apache.commons.lang3.mutable.Mutable;

+import org.apache.commons.lang3.mutable.MutableObject;

+import org.apache.hadoop.hive.ql.exec.SelectOperator;

+import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;

+import org.apache.hadoop.hive.ql.plan.SelectDesc;

+

+import edu.uci.ics.hivesterix.logical.plan.visitor.base.DefaultVisitor;

+import edu.uci.ics.hivesterix.logical.plan.visitor.base.Translator;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;

+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ProjectOperator;

+

+public class ProjectVisitor extends DefaultVisitor {

+

+	/**

+	 * translate project operator

+	 */

+	@Override

+	public Mutable<ILogicalOperator> visit(SelectOperator operator,

+			Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t) {

+

+		SelectDesc desc = (SelectDesc) operator.getConf();

+

+		if (desc == null)

+			return null;

+

+		List<ExprNodeDesc> cols = desc.getColList();

+

+		if (cols == null)

+			return null;

+

+		// insert assign operator if necessary

+		ArrayList<LogicalVariable> variables = new ArrayList<LogicalVariable>();

+

+		for (ExprNodeDesc expr : cols)

+			t.rewriteExpression(expr);

+

+		ILogicalOperator assignOp = t.getAssignOperator(

+				AlgebricksParentOperator, cols, variables);

+		ILogicalOperator currentOperator = null;

+		if (assignOp != null) {

+			currentOperator = assignOp;

+			AlgebricksParentOperator = new MutableObject<ILogicalOperator>(

+					currentOperator);

+		}

+

+		currentOperator = new ProjectOperator(variables);

+		currentOperator.getInputs().add(AlgebricksParentOperator);

+		t.rewriteOperatorOutputSchema(variables, operator);

+		return new MutableObject<ILogicalOperator>(currentOperator);

+	}

+

+}

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/SortVisitor.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/SortVisitor.java
new file mode 100644
index 0000000..a2c0d03
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/SortVisitor.java
@@ -0,0 +1,125 @@
+package edu.uci.ics.hivesterix.logical.plan.visitor;

+

+import java.util.ArrayList;

+import java.util.List;

+

+import org.apache.commons.lang3.mutable.Mutable;

+import org.apache.commons.lang3.mutable.MutableObject;

+import org.apache.hadoop.hive.ql.exec.ExtractOperator;

+import org.apache.hadoop.hive.ql.exec.Operator;

+import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator;

+import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;

+import org.apache.hadoop.hive.ql.plan.ReduceSinkDesc;

+

+import edu.uci.ics.hivesterix.logical.plan.visitor.base.DefaultVisitor;

+import edu.uci.ics.hivesterix.logical.plan.visitor.base.Translator;

+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;

+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;

+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.OrderOperator;

+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.OrderOperator.IOrder;

+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ProjectOperator;

+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;

+import edu.uci.ics.hyracks.algebricks.core.algebra.properties.OrderColumn;

+

+public class SortVisitor extends DefaultVisitor {

+

+	@SuppressWarnings("rawtypes")

+	@Override

+	public Mutable<ILogicalOperator> visit(ReduceSinkOperator operator,

+			Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t)

+			throws AlgebricksException {

+		ReduceSinkDesc desc = (ReduceSinkDesc) operator.getConf();

+		Operator downStream = (Operator) operator.getChildOperators().get(0);

+		List<ExprNodeDesc> keys = desc.getKeyCols();

+		if (!(downStream instanceof ExtractOperator

+				&& desc.getNumReducers() == 1 && keys.size() > 0)) {

+			return null;

+		}

+

+		List<ExprNodeDesc> schema = new ArrayList<ExprNodeDesc>();

+		List<ExprNodeDesc> values = desc.getValueCols();

+		List<ExprNodeDesc> partitionCols = desc.getPartitionCols();

+		for (ExprNodeDesc key : keys) {

+			t.rewriteExpression(key);

+		}

+		for (ExprNodeDesc value : values) {

+			t.rewriteExpression(value);

+		}

+		for (ExprNodeDesc col : partitionCols) {

+			t.rewriteExpression(col);

+		}

+

+		// add a order-by operator and limit if any

+		List<Pair<IOrder, Mutable<ILogicalExpression>>> pairs = new ArrayList<Pair<IOrder, Mutable<ILogicalExpression>>>();

+		char[] orders = desc.getOrder().toCharArray();

+		int i = 0;

+		for (ExprNodeDesc key : keys) {

+			Mutable<ILogicalExpression> expr = t.translateScalarFucntion(key);

+			IOrder order = orders[i] == '+' ? OrderOperator.ASC_ORDER

+					: OrderOperator.DESC_ORDER;

+

+			Pair<IOrder, Mutable<ILogicalExpression>> pair = new Pair<IOrder, Mutable<ILogicalExpression>>(

+					order, expr);

+			pairs.add(pair);

+			i++;

+		}

+

+		// get input variables

+		ArrayList<LogicalVariable> inputVariables = new ArrayList<LogicalVariable>();

+		VariableUtilities.getProducedVariables(

+				AlgebricksParentOperatorRef.getValue(), inputVariables);

+

+		ArrayList<LogicalVariable> keyVariables = new ArrayList<LogicalVariable>();

+		ILogicalOperator currentOperator;

+		ILogicalOperator assignOp = t.getAssignOperator(

+				AlgebricksParentOperatorRef, keys, keyVariables);

+		if (assignOp != null) {

+			currentOperator = assignOp;

+			AlgebricksParentOperatorRef = new MutableObject<ILogicalOperator>(

+					currentOperator);

+		}

+

+		OrderColumn[] keyColumns = new OrderColumn[keyVariables.size()];

+

+		for (int j = 0; j < keyColumns.length; j++)

+			keyColumns[j] = new OrderColumn(keyVariables.get(j),

+					pairs.get(j).first.getKind());

+

+		// handle order operator

+		currentOperator = new OrderOperator(pairs);

+		currentOperator.getInputs().add(AlgebricksParentOperatorRef);

+		AlgebricksParentOperatorRef = new MutableObject<ILogicalOperator>(

+				currentOperator);

+

+		// project back, remove generated sort-key columns if any

+		if (assignOp != null) {

+			currentOperator = new ProjectOperator(inputVariables);

+			currentOperator.getInputs().add(AlgebricksParentOperatorRef);

+			AlgebricksParentOperatorRef = new MutableObject<ILogicalOperator>(

+					currentOperator);

+		}

+

+		/**

+		 * a special rule for hive's order by output schema of reduce sink

+		 * operator only contains the columns

+		 */

+		for (ExprNodeDesc value : values) {

+			schema.add(value);

+		}

+

+		ArrayList<LogicalVariable> variables = new ArrayList<LogicalVariable>();

+		ILogicalOperator assignOperator = t.getAssignOperator(

+				AlgebricksParentOperatorRef, schema, variables);

+		t.rewriteOperatorOutputSchema(variables, operator);

+

+		if (assignOperator != null) {

+			currentOperator = assignOperator;

+			AlgebricksParentOperatorRef = new MutableObject<ILogicalOperator>(

+					currentOperator);

+		}

+		return new MutableObject<ILogicalOperator>(currentOperator);

+	}

+}

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/TableScanWriteVisitor.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/TableScanWriteVisitor.java
new file mode 100644
index 0000000..3e12bb9
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/TableScanWriteVisitor.java
@@ -0,0 +1,148 @@
+package edu.uci.ics.hivesterix.logical.plan.visitor;

+

+import java.util.ArrayList;

+import java.util.HashMap;

+import java.util.List;

+

+import org.apache.commons.lang3.mutable.Mutable;

+import org.apache.commons.lang3.mutable.MutableObject;

+import org.apache.hadoop.hive.ql.exec.ColumnInfo;

+import org.apache.hadoop.hive.ql.exec.FileSinkOperator;

+import org.apache.hadoop.hive.ql.exec.TableScanOperator;

+import org.apache.hadoop.hive.ql.plan.PartitionDesc;

+import org.apache.hadoop.hive.ql.plan.TableScanDesc;

+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;

+

+import edu.uci.ics.hivesterix.logical.expression.Schema;

+import edu.uci.ics.hivesterix.logical.plan.visitor.base.DefaultVisitor;

+import edu.uci.ics.hivesterix.logical.plan.visitor.base.Translator;

+import edu.uci.ics.hivesterix.runtime.jobgen.HiveDataSink;

+import edu.uci.ics.hivesterix.runtime.jobgen.HiveDataSource;

+import edu.uci.ics.hivesterix.runtime.jobgen.HiveMetaDataProvider;

+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IDataSink;

+import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IDataSource;

+import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IMetadataProvider;

+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;

+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.EmptyTupleSourceOperator;

+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.WriteOperator;

+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;

+

+public class TableScanWriteVisitor extends DefaultVisitor {

+

+	/**

+	 * map from alias to partition desc

+	 */

+	private HashMap<String, PartitionDesc> aliasToPathMap;

+

+	/**

+	 * map from partition desc to data source

+	 */

+	private HashMap<PartitionDesc, IDataSource<PartitionDesc>> dataSourceMap = new HashMap<PartitionDesc, IDataSource<PartitionDesc>>();

+

+	/**

+	 * constructor

+	 * 

+	 * @param aliasToPathMap

+	 */

+	public TableScanWriteVisitor(HashMap<String, PartitionDesc> aliasToPathMap) {

+		this.aliasToPathMap = aliasToPathMap;

+	}

+

+	@Override

+	public Mutable<ILogicalOperator> visit(TableScanOperator operator,

+			Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t)

+			throws AlgebricksException {

+		TableScanDesc desc = (TableScanDesc) operator.getConf();

+		if (desc == null) {

+			List<LogicalVariable> schema = new ArrayList<LogicalVariable>();

+			VariableUtilities.getLiveVariables(

+					AlgebricksParentOperator.getValue(), schema);

+			t.rewriteOperatorOutputSchema(schema, operator);

+			return null;

+		}

+

+		List<ColumnInfo> columns = operator.getSchema().getSignature();

+		for (int i = columns.size() - 1; i >= 0; i--)

+			if (columns.get(i).getIsVirtualCol() == true)

+				columns.remove(i);

+

+		// start with empty tuple operator

+		List<TypeInfo> types = new ArrayList<TypeInfo>();

+		ArrayList<LogicalVariable> variables = new ArrayList<LogicalVariable>();

+		List<String> names = new ArrayList<String>();

+		for (ColumnInfo column : columns) {

+			types.add(column.getType());

+

+			LogicalVariable var = t.getVariableFromFieldName(column

+					.getTabAlias() + "." + column.getInternalName());

+			LogicalVariable varNew;

+

+			if (var != null) {

+				varNew = t.getVariable(

+						column.getTabAlias() + "." + column.getInternalName()

+								+ operator.toString(), column.getType());

+				t.replaceVariable(var, varNew);

+				var = varNew;

+			} else

+				var = t.getNewVariable(

+						column.getTabAlias() + "." + column.getInternalName(),

+						column.getType());

+

+			variables.add(var);

+			names.add(column.getInternalName());

+		}

+		Schema currentSchema = new Schema(names, types);

+

+		String alias = desc.getAlias();

+		PartitionDesc partDesc = aliasToPathMap.get(alias);

+		IDataSource<PartitionDesc> dataSource = new HiveDataSource<PartitionDesc>(

+				partDesc, currentSchema.getSchema());

+		ILogicalOperator currentOperator = new DataSourceScanOperator(

+				variables, dataSource);

+

+		// set empty tuple source operator

+		ILogicalOperator ets = new EmptyTupleSourceOperator();

+		currentOperator.getInputs().add(

+				new MutableObject<ILogicalOperator>(ets));

+

+		// setup data source

+		dataSourceMap.put(partDesc, dataSource);

+		t.rewriteOperatorOutputSchema(variables, operator);

+		return new MutableObject<ILogicalOperator>(currentOperator);

+	}

+

+	@Override

+	public Mutable<ILogicalOperator> visit(FileSinkOperator hiveOperator,

+			Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t) {

+

+		if (hiveOperator.getChildOperators() != null

+				&& hiveOperator.getChildOperators().size() > 0)

+			return null;

+

+		Schema currentSchema = t.generateInputSchema(hiveOperator

+				.getParentOperators().get(0));

+

+		IDataSink sink = new HiveDataSink(hiveOperator,

+				currentSchema.getSchema());

+		List<Mutable<ILogicalExpression>> exprList = new ArrayList<Mutable<ILogicalExpression>>();

+		for (String column : currentSchema.getNames()) {

+			exprList.add(new MutableObject<ILogicalExpression>(

+					new VariableReferenceExpression(t.getVariable(column))));

+		}

+

+		ILogicalOperator currentOperator = new WriteOperator(exprList, sink);

+		if (AlgebricksParentOperator != null) {

+			currentOperator.getInputs().add(AlgebricksParentOperator);

+		}

+

+		IMetadataProvider<PartitionDesc, Object> metaData = new HiveMetaDataProvider<PartitionDesc, Object>(

+				hiveOperator, currentSchema, dataSourceMap);

+		t.setMetadataProvider(metaData);

+		return new MutableObject<ILogicalOperator>(currentOperator);

+	}

+}

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/UnionVisitor.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/UnionVisitor.java
new file mode 100644
index 0000000..f4e74f6
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/UnionVisitor.java
@@ -0,0 +1,64 @@
+package edu.uci.ics.hivesterix.logical.plan.visitor;

+

+import java.util.ArrayList;

+import java.util.List;

+

+import org.apache.commons.lang3.mutable.Mutable;

+import org.apache.commons.lang3.mutable.MutableObject;

+import org.apache.hadoop.hive.ql.exec.UnionOperator;

+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;

+

+import edu.uci.ics.hivesterix.logical.plan.visitor.base.DefaultVisitor;

+import edu.uci.ics.hivesterix.logical.plan.visitor.base.Translator;

+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;

+import edu.uci.ics.hyracks.algebricks.common.utils.Triple;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;

+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;

+

+public class UnionVisitor extends DefaultVisitor {

+

+	List<Mutable<ILogicalOperator>> parents = new ArrayList<Mutable<ILogicalOperator>>();

+

+	@Override

+	public Mutable<ILogicalOperator> visit(UnionOperator operator,

+			Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t)

+			throws AlgebricksException {

+

+		parents.add(AlgebricksParentOperator);

+		if (operator.getParentOperators().size() > parents.size()) {

+			return null;

+		}

+

+		List<LogicalVariable> leftVars = new ArrayList<LogicalVariable>();

+		List<LogicalVariable> rightVars = new ArrayList<LogicalVariable>();

+

+		VariableUtilities.getUsedVariables(parents.get(0).getValue(), leftVars);

+		VariableUtilities

+				.getUsedVariables(parents.get(1).getValue(), rightVars);

+

+		List<Triple<LogicalVariable, LogicalVariable, LogicalVariable>> triples = new ArrayList<Triple<LogicalVariable, LogicalVariable, LogicalVariable>>();

+		List<LogicalVariable> unionVars = new ArrayList<LogicalVariable>();

+

+		for (int i = 0; i < leftVars.size(); i++) {

+			LogicalVariable unionVar = t.getVariable(leftVars.get(i).getId()

+					+ "union" + AlgebricksParentOperator.hashCode(),

+					TypeInfoFactory.unknownTypeInfo);

+			unionVars.add(unionVar);

+			Triple<LogicalVariable, LogicalVariable, LogicalVariable> triple = new Triple<LogicalVariable, LogicalVariable, LogicalVariable>(

+					leftVars.get(i), rightVars.get(i), unionVar);

+			t.replaceVariable(leftVars.get(i), unionVar);

+			t.replaceVariable(rightVars.get(i), unionVar);

+			triples.add(triple);

+		}

+		ILogicalOperator currentOperator = new edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnionAllOperator(

+				triples);

+		for (Mutable<ILogicalOperator> parent : parents)

+			currentOperator.getInputs().add(parent);

+

+		t.rewriteOperatorOutputSchema(unionVars, operator);

+		parents.clear();

+		return new MutableObject<ILogicalOperator>(currentOperator);

+	}

+

+}

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/base/DefaultVisitor.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/base/DefaultVisitor.java
new file mode 100644
index 0000000..20013e3
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/base/DefaultVisitor.java
@@ -0,0 +1,166 @@
+package edu.uci.ics.hivesterix.logical.plan.visitor.base;

+

+import org.apache.commons.lang3.mutable.Mutable;

+import org.apache.hadoop.hive.ql.exec.CollectOperator;

+import org.apache.hadoop.hive.ql.exec.ExtractOperator;

+import org.apache.hadoop.hive.ql.exec.FileSinkOperator;

+import org.apache.hadoop.hive.ql.exec.FilterOperator;

+import org.apache.hadoop.hive.ql.exec.ForwardOperator;

+import org.apache.hadoop.hive.ql.exec.GroupByOperator;

+import org.apache.hadoop.hive.ql.exec.JoinOperator;

+import org.apache.hadoop.hive.ql.exec.LateralViewForwardOperator;

+import org.apache.hadoop.hive.ql.exec.LateralViewJoinOperator;

+import org.apache.hadoop.hive.ql.exec.LimitOperator;

+import org.apache.hadoop.hive.ql.exec.MapJoinOperator;

+import org.apache.hadoop.hive.ql.exec.MapOperator;

+import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator;

+import org.apache.hadoop.hive.ql.exec.SMBMapJoinOperator;

+import org.apache.hadoop.hive.ql.exec.ScriptOperator;

+import org.apache.hadoop.hive.ql.exec.SelectOperator;

+import org.apache.hadoop.hive.ql.exec.TableScanOperator;

+import org.apache.hadoop.hive.ql.exec.UDTFOperator;

+import org.apache.hadoop.hive.ql.exec.UnionOperator;

+

+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;

+

+/**

+ * a default empty implementation of visitor

+ * 

+ * @author yingyib

+ */

+public class DefaultVisitor implements Visitor {

+

+	@Override

+	public Mutable<ILogicalOperator> visit(CollectOperator hiveOperator,

+			Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t)

+			throws AlgebricksException {

+		return null;

+	}

+

+	@Override

+	public Mutable<ILogicalOperator> visit(JoinOperator hiveOperator,

+			Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t)

+			throws AlgebricksException {

+		return null;

+	}

+

+	@Override

+	public Mutable<ILogicalOperator> visit(ExtractOperator hiveOperator,

+			Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t)

+			throws AlgebricksException {

+		return null;

+	}

+

+	@Override

+	public Mutable<ILogicalOperator> visit(MapJoinOperator hiveOperator,

+			Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t)

+			throws AlgebricksException {

+		return null;

+	}

+

+	@Override

+	public Mutable<ILogicalOperator> visit(SMBMapJoinOperator hiveOperator,

+			Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t)

+			throws AlgebricksException {

+		return null;

+	}

+

+	public Mutable<ILogicalOperator> visit(FileSinkOperator hiveOperator,

+			Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t)

+			throws AlgebricksException {

+		return null;

+	}

+

+	public Mutable<ILogicalOperator> visit(ReduceSinkOperator hiveOperator,

+			Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t)

+			throws AlgebricksException {

+		return null;

+	}

+

+	@Override

+	public Mutable<ILogicalOperator> visit(FilterOperator hiveOperator,

+			Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t)

+			throws AlgebricksException {

+		return null;

+	}

+

+	@Override

+	public Mutable<ILogicalOperator> visit(ForwardOperator hiveOperator,

+			Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t)

+			throws AlgebricksException {

+		return null;

+	}

+

+	@Override

+	public Mutable<ILogicalOperator> visit(GroupByOperator hiveOperator,

+			Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t)

+			throws AlgebricksException {

+		return null;

+	}

+

+	@Override

+	public Mutable<ILogicalOperator> visit(

+			LateralViewForwardOperator hiveOperator,

+			Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t)

+			throws AlgebricksException {

+		return null;

+	}

+

+	@Override

+	public Mutable<ILogicalOperator> visit(

+			LateralViewJoinOperator hiveOperator,

+			Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t)

+			throws AlgebricksException {

+		return null;

+	}

+

+	@Override

+	public Mutable<ILogicalOperator> visit(LimitOperator hiveOperator,

+			Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t)

+			throws AlgebricksException {

+		return null;

+	}

+

+	@Override

+	public Mutable<ILogicalOperator> visit(MapOperator hiveOperator,

+			Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t)

+			throws AlgebricksException {

+		return null;

+	}

+

+	@Override

+	public Mutable<ILogicalOperator> visit(ScriptOperator hiveOperator,

+			Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t)

+			throws AlgebricksException {

+		return null;

+	}

+

+	@Override

+	public Mutable<ILogicalOperator> visit(SelectOperator hiveOperator,

+			Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t)

+			throws AlgebricksException {

+		return null;

+	}

+

+	@Override

+	public Mutable<ILogicalOperator> visit(TableScanOperator hiveOperator,

+			Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t)

+			throws AlgebricksException {

+		return null;

+	}

+

+	@Override

+	public Mutable<ILogicalOperator> visit(UDTFOperator operator,

+			Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t)

+			throws AlgebricksException {

+		return null;

+	}

+

+	@Override

+	public Mutable<ILogicalOperator> visit(UnionOperator operator,

+			Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t)

+			throws AlgebricksException {

+		return null;

+	}

+}

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/base/Translator.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/base/Translator.java
new file mode 100644
index 0000000..9165386
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/base/Translator.java
@@ -0,0 +1,174 @@
+package edu.uci.ics.hivesterix.logical.plan.visitor.base;

+

+import java.util.ArrayList;

+import java.util.List;

+

+import org.apache.commons.lang3.mutable.Mutable;

+import org.apache.hadoop.hive.ql.exec.Operator;

+import org.apache.hadoop.hive.ql.plan.AggregationDesc;

+import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;

+import org.apache.hadoop.hive.ql.plan.PartitionDesc;

+import org.apache.hadoop.hive.ql.plan.UDTFDesc;

+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;

+

+import edu.uci.ics.hivesterix.logical.expression.Schema;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;

+import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IMetadataProvider;

+

+@SuppressWarnings("rawtypes")

+public interface Translator {

+

+	/**

+	 * generate input schema

+	 * 

+	 * @param operator

+	 * @return

+	 */

+	public Schema generateInputSchema(Operator operator);

+

+	/**

+	 * rewrite the names of output columns for feture expression evaluators to

+	 * use

+	 * 

+	 * @param operator

+	 */

+	public void rewriteOperatorOutputSchema(List<LogicalVariable> vars,

+			Operator operator);

+

+	/**

+	 * rewrite the names of output columns for feture expression evaluators to

+	 * use

+	 * 

+	 * @param operator

+	 */

+	public void rewriteOperatorOutputSchema(Operator operator);

+

+	/**

+	 * rewrite an expression and substitute variables

+	 * 

+	 * @param expr

+	 *            hive expression

+	 */

+	public void rewriteExpression(ExprNodeDesc expr);

+

+	/**

+	 * rewrite an expression and substitute variables

+	 * 

+	 * @param expr

+	 *            hive expression

+	 */

+	public void rewriteExpressionPartial(ExprNodeDesc expr);

+

+	/**

+	 * get an assign operator as a child of parent

+	 * 

+	 * @param parent

+	 * @param cols

+	 * @param variables

+	 * @return

+	 */

+	public ILogicalOperator getAssignOperator(Mutable<ILogicalOperator> parent,

+			List<ExprNodeDesc> cols, ArrayList<LogicalVariable> variables);

+

+	/**

+	 * get type for a logical variable

+	 * 

+	 * @param var

+	 * @return type info

+	 */

+	public TypeInfo getType(LogicalVariable var);

+

+	/**

+	 * translate an expression from hive to Algebricks

+	 * 

+	 * @param desc

+	 * @return

+	 */

+	public Mutable<ILogicalExpression> translateScalarFucntion(ExprNodeDesc desc);

+

+	/**

+	 * translate an aggregation from hive to Algebricks

+	 * 

+	 * @param aggregateDesc

+	 * @return

+	 */

+	public Mutable<ILogicalExpression> translateAggregation(

+			AggregationDesc aggregateDesc);

+

+	/**

+	 * translate unnesting (UDTF) function expression

+	 * 

+	 * @param aggregator

+	 * @return

+	 */

+	public Mutable<ILogicalExpression> translateUnnestFunction(

+			UDTFDesc udtfDesc, Mutable<ILogicalExpression> argument);

+

+	/**

+	 * get variable from a schema

+	 * 

+	 * @param schema

+	 * @return

+	 */

+	public List<LogicalVariable> getVariablesFromSchema(Schema schema);

+

+	/**

+	 * get variable from name

+	 * 

+	 * @param name

+	 * @return

+	 */

+	public LogicalVariable getVariable(String name);

+

+	/**

+	 * get variable from field name

+	 * 

+	 * @param name

+	 * @return

+	 */

+	public LogicalVariable getVariableFromFieldName(String name);

+

+	/**

+	 * get variable from name, type

+	 * 

+	 * @param fieldName

+	 * @param type

+	 * @return

+	 */

+	public LogicalVariable getVariable(String fieldName, TypeInfo type);

+

+	/**

+	 * get new variable from name, type

+	 * 

+	 * @param fieldName

+	 * @param type

+	 * @return

+	 */

+	public LogicalVariable getNewVariable(String fieldName, TypeInfo type);

+

+	/**

+	 * set the metadata provider

+	 * 

+	 * @param metadata

+	 */

+	public void setMetadataProvider(

+			IMetadataProvider<PartitionDesc, Object> metadata);

+

+	/**

+	 * get the metadata provider

+	 * 

+	 * @param metadata

+	 */

+	public IMetadataProvider<PartitionDesc, Object> getMetadataProvider();

+

+	/**

+	 * replace the variable

+	 * 

+	 * @param oldVar

+	 * @param newVar

+	 */

+	public void replaceVariable(LogicalVariable oldVar, LogicalVariable newVar);

+

+}

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/base/Visitor.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/base/Visitor.java
new file mode 100644
index 0000000..745f93e
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/base/Visitor.java
@@ -0,0 +1,106 @@
+package edu.uci.ics.hivesterix.logical.plan.visitor.base;

+

+import org.apache.commons.lang3.mutable.Mutable;

+import org.apache.hadoop.hive.ql.exec.CollectOperator;

+import org.apache.hadoop.hive.ql.exec.ExtractOperator;

+import org.apache.hadoop.hive.ql.exec.FileSinkOperator;

+import org.apache.hadoop.hive.ql.exec.FilterOperator;

+import org.apache.hadoop.hive.ql.exec.ForwardOperator;

+import org.apache.hadoop.hive.ql.exec.GroupByOperator;

+import org.apache.hadoop.hive.ql.exec.JoinOperator;

+import org.apache.hadoop.hive.ql.exec.LateralViewForwardOperator;

+import org.apache.hadoop.hive.ql.exec.LateralViewJoinOperator;

+import org.apache.hadoop.hive.ql.exec.LimitOperator;

+import org.apache.hadoop.hive.ql.exec.MapJoinOperator;

+import org.apache.hadoop.hive.ql.exec.MapOperator;

+import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator;

+import org.apache.hadoop.hive.ql.exec.SMBMapJoinOperator;

+import org.apache.hadoop.hive.ql.exec.ScriptOperator;

+import org.apache.hadoop.hive.ql.exec.SelectOperator;

+import org.apache.hadoop.hive.ql.exec.TableScanOperator;

+import org.apache.hadoop.hive.ql.exec.UDTFOperator;

+import org.apache.hadoop.hive.ql.exec.UnionOperator;

+

+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;

+

+public interface Visitor {

+

+	public Mutable<ILogicalOperator> visit(CollectOperator hiveOperator,

+			Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t)

+			throws AlgebricksException;

+

+	public Mutable<ILogicalOperator> visit(JoinOperator hiveOperator,

+			Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t)

+			throws AlgebricksException;

+

+	public Mutable<ILogicalOperator> visit(ExtractOperator hiveOperator,

+			Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t)

+			throws AlgebricksException;

+

+	public Mutable<ILogicalOperator> visit(MapJoinOperator hiveOperator,

+			Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t)

+			throws AlgebricksException;

+

+	public Mutable<ILogicalOperator> visit(SMBMapJoinOperator hiveOperator,

+			Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t)

+			throws AlgebricksException;

+

+	public Mutable<ILogicalOperator> visit(FilterOperator hiveOperator,

+			Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t)

+			throws AlgebricksException;

+

+	public Mutable<ILogicalOperator> visit(ForwardOperator hiveOperator,

+			Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t)

+			throws AlgebricksException;

+

+	public Mutable<ILogicalOperator> visit(GroupByOperator hiveOperator,

+			Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t)

+			throws AlgebricksException;

+

+	public Mutable<ILogicalOperator> visit(

+			LateralViewForwardOperator hiveOperator,

+			Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t)

+			throws AlgebricksException;

+

+	public Mutable<ILogicalOperator> visit(

+			LateralViewJoinOperator hiveOperator,

+			Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t)

+			throws AlgebricksException;

+

+	public Mutable<ILogicalOperator> visit(LimitOperator hiveOperator,

+			Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t)

+			throws AlgebricksException;

+

+	public Mutable<ILogicalOperator> visit(MapOperator hiveOperator,

+			Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t)

+			throws AlgebricksException;

+

+	public Mutable<ILogicalOperator> visit(ScriptOperator hiveOperator,

+			Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t)

+			throws AlgebricksException;

+

+	public Mutable<ILogicalOperator> visit(SelectOperator hiveOperator,

+			Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t)

+			throws AlgebricksException;

+

+	public Mutable<ILogicalOperator> visit(TableScanOperator hiveOperator,

+			Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t)

+			throws AlgebricksException;

+

+	public Mutable<ILogicalOperator> visit(FileSinkOperator hiveOperator,

+			Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t)

+			throws AlgebricksException;

+

+	public Mutable<ILogicalOperator> visit(ReduceSinkOperator hiveOperator,

+			Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t)

+			throws AlgebricksException;

+

+	public Mutable<ILogicalOperator> visit(UDTFOperator operator,

+			Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t)

+			throws AlgebricksException;

+

+	public Mutable<ILogicalOperator> visit(UnionOperator operator,

+			Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t)

+			throws AlgebricksException;

+}

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/optimizer/rulecollections/HiveRuleCollections.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/optimizer/rulecollections/HiveRuleCollections.java
new file mode 100644
index 0000000..4ebea0a
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/optimizer/rulecollections/HiveRuleCollections.java
@@ -0,0 +1,114 @@
+package edu.uci.ics.hivesterix.optimizer.rulecollections;

+

+import java.util.LinkedList;

+

+import edu.uci.ics.hivesterix.optimizer.rules.InsertProjectBeforeWriteRule;

+import edu.uci.ics.hivesterix.optimizer.rules.IntroduceEarlyProjectRule;

+import edu.uci.ics.hivesterix.optimizer.rules.LocalGroupByRule;

+import edu.uci.ics.hivesterix.optimizer.rules.RemoveRedundantSelectRule;

+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.HeuristicOptimizer;

+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;

+import edu.uci.ics.hyracks.algebricks.rewriter.rules.BreakSelectIntoConjunctsRule;

+import edu.uci.ics.hyracks.algebricks.rewriter.rules.ComplexJoinInferenceRule;

+import edu.uci.ics.hyracks.algebricks.rewriter.rules.ConsolidateAssignsRule;

+import edu.uci.ics.hyracks.algebricks.rewriter.rules.ConsolidateSelectsRule;

+import edu.uci.ics.hyracks.algebricks.rewriter.rules.EliminateSubplanRule;

+import edu.uci.ics.hyracks.algebricks.rewriter.rules.EnforceStructuralPropertiesRule;

+import edu.uci.ics.hyracks.algebricks.rewriter.rules.ExtractCommonOperatorsRule;

+import edu.uci.ics.hyracks.algebricks.rewriter.rules.ExtractGbyExpressionsRule;

+import edu.uci.ics.hyracks.algebricks.rewriter.rules.FactorRedundantGroupAndDecorVarsRule;

+import edu.uci.ics.hyracks.algebricks.rewriter.rules.InferTypesRule;

+import edu.uci.ics.hyracks.algebricks.rewriter.rules.InlineVariablesRule;

+import edu.uci.ics.hyracks.algebricks.rewriter.rules.InsertProjectBeforeUnionRule;

+import edu.uci.ics.hyracks.algebricks.rewriter.rules.IntroduceAggregateCombinerRule;

+import edu.uci.ics.hyracks.algebricks.rewriter.rules.IntroduceGroupByCombinerRule;

+import edu.uci.ics.hyracks.algebricks.rewriter.rules.IsolateHyracksOperatorsRule;

+import edu.uci.ics.hyracks.algebricks.rewriter.rules.PullSelectOutOfEqJoin;

+import edu.uci.ics.hyracks.algebricks.rewriter.rules.PushLimitDownRule;

+import edu.uci.ics.hyracks.algebricks.rewriter.rules.PushProjectDownRule;

+import edu.uci.ics.hyracks.algebricks.rewriter.rules.PushProjectIntoDataSourceScanRule;

+import edu.uci.ics.hyracks.algebricks.rewriter.rules.PushSelectDownRule;

+import edu.uci.ics.hyracks.algebricks.rewriter.rules.PushSelectIntoJoinRule;

+import edu.uci.ics.hyracks.algebricks.rewriter.rules.ReinferAllTypesRule;

+import edu.uci.ics.hyracks.algebricks.rewriter.rules.RemoveRedundantProjectionRule;

+import edu.uci.ics.hyracks.algebricks.rewriter.rules.RemoveUnusedAssignAndAggregateRule;

+import edu.uci.ics.hyracks.algebricks.rewriter.rules.SetAlgebricksPhysicalOperatorsRule;

+import edu.uci.ics.hyracks.algebricks.rewriter.rules.SetExecutionModeRule;

+

+public final class HiveRuleCollections {

+

+	public final static LinkedList<IAlgebraicRewriteRule> NORMALIZATION = new LinkedList<IAlgebraicRewriteRule>();

+	static {

+		NORMALIZATION.add(new EliminateSubplanRule());

+		NORMALIZATION.add(new IntroduceAggregateCombinerRule());

+		NORMALIZATION.add(new BreakSelectIntoConjunctsRule());

+		NORMALIZATION.add(new IntroduceAggregateCombinerRule());

+		NORMALIZATION.add(new PushSelectIntoJoinRule());

+		NORMALIZATION.add(new ExtractGbyExpressionsRule());

+		NORMALIZATION.add(new RemoveRedundantSelectRule());

+	}

+

+	public final static LinkedList<IAlgebraicRewriteRule> COND_PUSHDOWN_AND_JOIN_INFERENCE = new LinkedList<IAlgebraicRewriteRule>();

+	static {

+		COND_PUSHDOWN_AND_JOIN_INFERENCE.add(new PushSelectDownRule());

+		COND_PUSHDOWN_AND_JOIN_INFERENCE.add(new InlineVariablesRule());

+		COND_PUSHDOWN_AND_JOIN_INFERENCE

+				.add(new FactorRedundantGroupAndDecorVarsRule());

+		COND_PUSHDOWN_AND_JOIN_INFERENCE.add(new EliminateSubplanRule());

+	}

+

+	public final static LinkedList<IAlgebraicRewriteRule> LOAD_FIELDS = new LinkedList<IAlgebraicRewriteRule>();

+	static {

+		// should LoadRecordFieldsRule be applied in only one pass over the

+		// plan?

+		LOAD_FIELDS.add(new InlineVariablesRule());

+		// LOAD_FIELDS.add(new RemoveUnusedAssignAndAggregateRule());

+		LOAD_FIELDS.add(new ComplexJoinInferenceRule());

+		LOAD_FIELDS.add(new InferTypesRule());

+	}

+

+	public final static LinkedList<IAlgebraicRewriteRule> OP_PUSHDOWN = new LinkedList<IAlgebraicRewriteRule>();

+	static {

+		OP_PUSHDOWN.add(new PushProjectDownRule());

+		OP_PUSHDOWN.add(new PushSelectDownRule());

+	}

+

+	public final static LinkedList<IAlgebraicRewriteRule> DATA_EXCHANGE = new LinkedList<IAlgebraicRewriteRule>();

+	static {

+		DATA_EXCHANGE.add(new SetExecutionModeRule());

+	}

+

+	public final static LinkedList<IAlgebraicRewriteRule> CONSOLIDATION = new LinkedList<IAlgebraicRewriteRule>();

+	static {

+		CONSOLIDATION.add(new RemoveRedundantProjectionRule());

+		CONSOLIDATION.add(new ConsolidateSelectsRule());

+		CONSOLIDATION.add(new IntroduceEarlyProjectRule());

+		CONSOLIDATION.add(new ConsolidateAssignsRule());

+		CONSOLIDATION.add(new IntroduceGroupByCombinerRule());

+		CONSOLIDATION.add(new RemoveUnusedAssignAndAggregateRule());

+	}

+

+	public final static LinkedList<IAlgebraicRewriteRule> PHYSICAL_PLAN_REWRITES = new LinkedList<IAlgebraicRewriteRule>();

+	static {

+		PHYSICAL_PLAN_REWRITES.add(new PullSelectOutOfEqJoin());

+		PHYSICAL_PLAN_REWRITES.add(new SetAlgebricksPhysicalOperatorsRule());

+		PHYSICAL_PLAN_REWRITES.add(new EnforceStructuralPropertiesRule());

+		PHYSICAL_PLAN_REWRITES.add(new PushProjectDownRule());

+		PHYSICAL_PLAN_REWRITES.add(new SetAlgebricksPhysicalOperatorsRule());

+		PHYSICAL_PLAN_REWRITES.add(new PushLimitDownRule());

+		PHYSICAL_PLAN_REWRITES.add(new InsertProjectBeforeWriteRule());

+		PHYSICAL_PLAN_REWRITES.add(new InsertProjectBeforeUnionRule());

+	}

+

+	public final static LinkedList<IAlgebraicRewriteRule> prepareJobGenRules = new LinkedList<IAlgebraicRewriteRule>();

+	static {

+		prepareJobGenRules.add(new ReinferAllTypesRule());

+		prepareJobGenRules.add(new IsolateHyracksOperatorsRule(

+				HeuristicOptimizer.hyraxOperatorsBelowWhichJobGenIsDisabled));

+		prepareJobGenRules.add(new ExtractCommonOperatorsRule());

+		prepareJobGenRules.add(new LocalGroupByRule());

+		prepareJobGenRules.add(new PushProjectIntoDataSourceScanRule());

+		prepareJobGenRules.add(new ReinferAllTypesRule());

+	}

+

+}

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/optimizer/rules/InsertProjectBeforeWriteRule.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/optimizer/rules/InsertProjectBeforeWriteRule.java
new file mode 100644
index 0000000..c58982e
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/optimizer/rules/InsertProjectBeforeWriteRule.java
@@ -0,0 +1,85 @@
+package edu.uci.ics.hivesterix.optimizer.rules;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.commons.lang3.mutable.Mutable;
+import org.apache.commons.lang3.mutable.MutableObject;
+
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator.ExecutionMode;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ProjectOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.WriteOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.physical.StreamProjectPOperator;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
+
+public class InsertProjectBeforeWriteRule implements IAlgebraicRewriteRule {
+
+	@Override
+	public boolean rewritePost(Mutable<ILogicalOperator> opRef,
+			IOptimizationContext context) {
+		return false;
+	}
+
+	/**
+	 * When the input schema to WriteOperator is different from the output
+	 * schema in terms of variable order, add a project operator to get the
+	 * write order
+	 */
+	@Override
+	public boolean rewritePre(Mutable<ILogicalOperator> opRef,
+			IOptimizationContext context) throws AlgebricksException {
+		AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
+		if (op.getOperatorTag() != LogicalOperatorTag.WRITE) {
+			return false;
+		}
+		WriteOperator opWrite = (WriteOperator) op;
+		ArrayList<LogicalVariable> finalSchema = new ArrayList<LogicalVariable>();
+		VariableUtilities.getUsedVariables(opWrite, finalSchema);
+		ArrayList<LogicalVariable> inputSchema = new ArrayList<LogicalVariable>();
+		VariableUtilities.getLiveVariables(opWrite, inputSchema);
+		if (!isIdentical(finalSchema, inputSchema)) {
+			ProjectOperator projectOp = new ProjectOperator(finalSchema);
+			Mutable<ILogicalOperator> parentOpRef = opWrite.getInputs().get(0);
+			projectOp.getInputs().add(parentOpRef);
+			opWrite.getInputs().clear();
+			opWrite.getInputs().add(
+					new MutableObject<ILogicalOperator>(projectOp));
+			projectOp.setPhysicalOperator(new StreamProjectPOperator());
+			projectOp.setExecutionMode(ExecutionMode.PARTITIONED);
+
+			AbstractLogicalOperator op2 = (AbstractLogicalOperator) parentOpRef
+					.getValue();
+			if (op2.getOperatorTag() == LogicalOperatorTag.PROJECT) {
+				ProjectOperator pi2 = (ProjectOperator) op2;
+				parentOpRef.setValue(pi2.getInputs().get(0).getValue());
+			}
+			context.computeAndSetTypeEnvironmentForOperator(projectOp);
+			return true;
+		} else
+			return false;
+
+	}
+
+	private boolean isIdentical(List<LogicalVariable> finalSchema,
+			List<LogicalVariable> inputSchema) {
+		int finalSchemaSize = finalSchema.size();
+		int inputSchemaSize = inputSchema.size();
+		if (finalSchemaSize != inputSchemaSize)
+			throw new IllegalStateException(
+					"final output schema variables missing!");
+		for (int i = 0; i < finalSchemaSize; i++) {
+			LogicalVariable var1 = finalSchema.get(i);
+			LogicalVariable var2 = inputSchema.get(i);
+			if (!var1.equals(var2))
+				return false;
+		}
+		return true;
+	}
+}
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/optimizer/rules/IntroduceEarlyProjectRule.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/optimizer/rules/IntroduceEarlyProjectRule.java
new file mode 100644
index 0000000..2bebe81
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/optimizer/rules/IntroduceEarlyProjectRule.java
@@ -0,0 +1,77 @@
+package edu.uci.ics.hivesterix.optimizer.rules;
+
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+
+import org.apache.commons.lang3.mutable.Mutable;
+import org.apache.commons.lang3.mutable.MutableObject;
+
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ProjectOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
+
+public class IntroduceEarlyProjectRule implements IAlgebraicRewriteRule {
+
+	@Override
+	public boolean rewritePre(Mutable<ILogicalOperator> opRef,
+			IOptimizationContext context) throws AlgebricksException {
+		return false;
+	}
+
+	@Override
+	public boolean rewritePost(Mutable<ILogicalOperator> opRef,
+			IOptimizationContext context) throws AlgebricksException {
+		AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
+		if (op.getOperatorTag() != LogicalOperatorTag.PROJECT) {
+			return false;
+		}
+		AbstractLogicalOperator middleOp = (AbstractLogicalOperator) op
+				.getInputs().get(0).getValue();
+		List<LogicalVariable> deliveredVars = new ArrayList<LogicalVariable>();
+		List<LogicalVariable> usedVars = new ArrayList<LogicalVariable>();
+		List<LogicalVariable> producedVars = new ArrayList<LogicalVariable>();
+
+		VariableUtilities.getUsedVariables(op, deliveredVars);
+		VariableUtilities.getUsedVariables(middleOp, usedVars);
+		VariableUtilities.getProducedVariables(middleOp, producedVars);
+
+		Set<LogicalVariable> requiredVariables = new HashSet<LogicalVariable>();
+		requiredVariables.addAll(deliveredVars);
+		requiredVariables.addAll(usedVars);
+		requiredVariables.removeAll(producedVars);
+
+		if (middleOp.getInputs().size() <= 0 || middleOp.getInputs().size() > 1)
+			return false;
+
+		AbstractLogicalOperator targetOp = (AbstractLogicalOperator) middleOp
+				.getInputs().get(0).getValue();
+		if (targetOp.getOperatorTag() != LogicalOperatorTag.DATASOURCESCAN)
+			return false;
+
+		Set<LogicalVariable> deliveredEarlyVars = new HashSet<LogicalVariable>();
+		VariableUtilities.getLiveVariables(targetOp, deliveredEarlyVars);
+
+		deliveredEarlyVars.removeAll(requiredVariables);
+		if (deliveredEarlyVars.size() > 0) {
+			ArrayList<LogicalVariable> requiredVars = new ArrayList<LogicalVariable>();
+			requiredVars.addAll(requiredVariables);
+			ILogicalOperator earlyProjectOp = new ProjectOperator(requiredVars);
+			Mutable<ILogicalOperator> earlyProjectOpRef = new MutableObject<ILogicalOperator>(
+					earlyProjectOp);
+			Mutable<ILogicalOperator> targetRef = middleOp.getInputs().get(0);
+			middleOp.getInputs().set(0, earlyProjectOpRef);
+			earlyProjectOp.getInputs().add(targetRef);
+			context.computeAndSetTypeEnvironmentForOperator(earlyProjectOp);
+			return true;
+		}
+		return false;
+	}
+}
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/optimizer/rules/LocalGroupByRule.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/optimizer/rules/LocalGroupByRule.java
new file mode 100644
index 0000000..72cbe21
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/optimizer/rules/LocalGroupByRule.java
@@ -0,0 +1,71 @@
+package edu.uci.ics.hivesterix.optimizer.rules;
+
+import org.apache.commons.lang3.mutable.Mutable;
+
+import edu.uci.ics.hivesterix.logical.plan.HiveOperatorAnnotations;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IPhysicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.OperatorAnnotations;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.PhysicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ExchangeOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.physical.OneToOneExchangePOperator;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
+
+public class LocalGroupByRule implements IAlgebraicRewriteRule {
+
+	@Override
+	public boolean rewritePre(Mutable<ILogicalOperator> opRef,
+			IOptimizationContext context) throws AlgebricksException {
+		return false;
+	}
+
+	@Override
+	public boolean rewritePost(Mutable<ILogicalOperator> opRef,
+			IOptimizationContext context) throws AlgebricksException {
+		AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
+		if (op.getOperatorTag() != LogicalOperatorTag.GROUP) {
+			return false;
+		}
+		Boolean localGby = (Boolean) op.getAnnotations().get(
+				HiveOperatorAnnotations.LOCAL_GROUP_BY);
+		if (localGby != null && localGby.equals(Boolean.TRUE)) {
+			Boolean hashGby = (Boolean) op.getAnnotations().get(
+					OperatorAnnotations.USE_HASH_GROUP_BY);
+			Boolean externalGby = (Boolean) op.getAnnotations().get(
+					OperatorAnnotations.USE_EXTERNAL_GROUP_BY);
+			if ((hashGby != null && (hashGby.equals(Boolean.TRUE)) || (externalGby != null && externalGby
+					.equals(Boolean.TRUE)))) {
+				reviseExchange(op);
+			} else {
+				ILogicalOperator child = op.getInputs().get(0).getValue();
+				AbstractLogicalOperator childOp = (AbstractLogicalOperator) child;
+				while (child.getInputs().size() > 0) {
+					if (childOp.getOperatorTag() == LogicalOperatorTag.ORDER)
+						break;
+					else {
+						child = child.getInputs().get(0).getValue();
+						childOp = (AbstractLogicalOperator) child;
+					}
+				}
+				if (childOp.getOperatorTag() == LogicalOperatorTag.ORDER)
+					reviseExchange(childOp);
+			}
+			return true;
+		}
+		return false;
+	}
+
+	private void reviseExchange(AbstractLogicalOperator op) {
+		ExchangeOperator exchange = (ExchangeOperator) op.getInputs().get(0)
+				.getValue();
+		IPhysicalOperator physicalOp = exchange.getPhysicalOperator();
+		if (physicalOp.getOperatorTag() == PhysicalOperatorTag.HASH_PARTITION_EXCHANGE) {
+			exchange.setPhysicalOperator(new OneToOneExchangePOperator());
+		}
+	}
+
+}
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/optimizer/rules/RemoveRedundantSelectRule.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/optimizer/rules/RemoveRedundantSelectRule.java
new file mode 100644
index 0000000..9958ba8
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/optimizer/rules/RemoveRedundantSelectRule.java
@@ -0,0 +1,46 @@
+package edu.uci.ics.hivesterix.optimizer.rules;
+
+import org.apache.commons.lang3.mutable.Mutable;
+
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
+
+public class RemoveRedundantSelectRule implements IAlgebraicRewriteRule {
+
+	@Override
+	public boolean rewritePre(Mutable<ILogicalOperator> opRef,
+			IOptimizationContext context) throws AlgebricksException {
+		return false;
+	}
+
+	@Override
+	public boolean rewritePost(Mutable<ILogicalOperator> opRef,
+			IOptimizationContext context) throws AlgebricksException {
+		AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
+		if (op.getOperatorTag() != LogicalOperatorTag.SELECT) {
+			return false;
+		}
+		AbstractLogicalOperator inputOp = (AbstractLogicalOperator) op
+				.getInputs().get(0).getValue();
+		if (inputOp.getOperatorTag() != LogicalOperatorTag.SELECT) {
+			return false;
+		}
+		SelectOperator selectOp = (SelectOperator) op;
+		SelectOperator inputSelectOp = (SelectOperator) inputOp;
+		ILogicalExpression expr1 = selectOp.getCondition().getValue();
+		ILogicalExpression expr2 = inputSelectOp.getCondition().getValue();
+
+		if (expr1.equals(expr2)) {
+			selectOp.getInputs().set(0, inputSelectOp.getInputs().get(0));
+			return true;
+		}
+		return false;
+	}
+
+}
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/config/ConfUtil.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/config/ConfUtil.java
new file mode 100644
index 0000000..6b4d697
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/config/ConfUtil.java
@@ -0,0 +1,144 @@
+package edu.uci.ics.hivesterix.runtime.config;

+

+import java.net.InetAddress;

+import java.util.ArrayList;

+import java.util.HashMap;

+import java.util.List;

+import java.util.Map;

+

+import org.apache.hadoop.fs.Path;

+import org.apache.hadoop.hive.conf.HiveConf;

+import org.apache.hadoop.hive.ql.session.SessionState;

+import org.apache.hadoop.mapred.FileInputFormat;

+import org.apache.hadoop.mapred.InputFormat;

+import org.apache.hadoop.mapred.JobConf;

+

+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;

+import edu.uci.ics.hyracks.api.client.HyracksConnection;

+import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;

+import edu.uci.ics.hyracks.api.client.NodeControllerInfo;

+import edu.uci.ics.hyracks.api.topology.ClusterTopology;

+

+@SuppressWarnings({ "rawtypes", "deprecation" })

+public class ConfUtil {

+

+	private static JobConf job;

+	private static HiveConf hconf;

+	private static String[] NCs;

+	private static Map<String, List<String>> ncMapping;

+	private static IHyracksClientConnection hcc = null;

+	private static ClusterTopology topology = null;

+

+	public static JobConf getJobConf(Class<? extends InputFormat> format,

+			Path path) {

+		JobConf conf = new JobConf();

+		if (job != null)

+			conf = job;

+

+		String hadoopPath = System.getProperty("HADOOP_HOME", "/hadoop");

+		Path pathCore = new Path(hadoopPath + "/conf/core-site.xml");

+		conf.addResource(pathCore);

+		Path pathMapRed = new Path(hadoopPath + "/conf/mapred-site.xml");

+		conf.addResource(pathMapRed);

+		Path pathHDFS = new Path(hadoopPath + "/conf/hdfs-site.xml");

+		conf.addResource(pathHDFS);

+

+		conf.setInputFormat(format);

+		FileInputFormat.setInputPaths(conf, path);

+		return conf;

+	}

+

+	public static JobConf getJobConf() {

+		JobConf conf = new JobConf();

+		if (job != null)

+			conf = job;

+

+		String hadoopPath = System.getProperty("HADOOP_HOME", "/hadoop");

+		Path pathCore = new Path(hadoopPath + "/conf/core-site.xml");

+		conf.addResource(pathCore);

+		Path pathMapRed = new Path(hadoopPath + "/conf/mapred-site.xml");

+		conf.addResource(pathMapRed);

+		Path pathHDFS = new Path(hadoopPath + "/conf/hdfs-site.xml");

+		conf.addResource(pathHDFS);

+

+		return conf;

+	}

+

+	public static void setJobConf(JobConf conf) {

+		job = conf;

+	}

+

+	public static void setHiveConf(HiveConf hiveConf) {

+		hconf = hiveConf;

+	}

+

+	public static HiveConf getHiveConf() {

+		if (hconf == null) {

+			hconf = new HiveConf(SessionState.class);

+			hconf.addResource(new Path("conf/hive-default.xml"));

+		}

+		return hconf;

+	}

+

+	public static String[] getNCs() throws AlgebricksException {

+		if (NCs == null) {

+			try {

+				loadClusterConfig();

+			} catch (Exception e) {

+				throw new AlgebricksException(e);

+			}

+		}

+		return NCs;

+	}

+

+	public static Map<String, List<String>> getNCMapping()

+			throws AlgebricksException {

+		if (ncMapping == null) {

+			try {

+				loadClusterConfig();

+			} catch (Exception e) {

+				throw new AlgebricksException(e);

+			}

+		}

+		return ncMapping;

+	}

+

+	private static void loadClusterConfig() {

+		try {

+			getHiveConf();

+			String ipAddress = hconf.get("hive.hyracks.host");

+			int port = Integer.parseInt(hconf.get("hive.hyracks.port"));

+			int mpl = Integer.parseInt(hconf.get("hive.hyracks.parrallelism"));

+			hcc = new HyracksConnection(ipAddress, port);

+			topology = hcc.getClusterTopology();

+			Map<String, NodeControllerInfo> ncNameToNcInfos = hcc

+					.getNodeControllerInfos();

+			NCs = new String[ncNameToNcInfos.size() * mpl];

+			ncMapping = new HashMap<String, List<String>>();

+			int i = 0;

+			for (Map.Entry<String, NodeControllerInfo> entry : ncNameToNcInfos

+					.entrySet()) {

+				String ipAddr = InetAddress.getByAddress(

+						entry.getValue().getNetworkAddress().getIpAddress())

+						.getHostAddress();

+				List<String> matchedNCs = ncMapping.get(ipAddr);

+				if (matchedNCs == null) {

+					matchedNCs = new ArrayList<String>();

+					ncMapping.put(ipAddr, matchedNCs);

+				}

+				matchedNCs.add(entry.getKey());

+				for (int j = i * mpl; j < i * mpl + mpl; j++)

+					NCs[j] = entry.getKey();

+				i++;

+			}

+		} catch (Exception e) {

+			throw new IllegalStateException(e);

+		}

+	}

+

+	public static ClusterTopology getClusterTopology() {

+		if (topology == null)

+			loadClusterConfig();

+		return topology;

+	}

+}

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/AbstractExpressionEvaluator.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/AbstractExpressionEvaluator.java
new file mode 100644
index 0000000..8f6d9ca
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/AbstractExpressionEvaluator.java
@@ -0,0 +1,174 @@
+package edu.uci.ics.hivesterix.runtime.evaluator;

+

+import java.io.DataOutput;

+import java.io.IOException;

+import java.util.ArrayList;

+import java.util.List;

+

+import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator;

+import org.apache.hadoop.hive.ql.metadata.HiveException;

+import org.apache.hadoop.hive.serde2.SerDe;

+import org.apache.hadoop.hive.serde2.SerDeException;

+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;

+import org.apache.hadoop.io.BytesWritable;

+

+import edu.uci.ics.hivesterix.serde.lazy.LazyFactory;

+import edu.uci.ics.hivesterix.serde.lazy.LazyObject;

+import edu.uci.ics.hivesterix.serde.lazy.LazySerDe;

+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;

+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;

+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;

+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;

+

+public abstract class AbstractExpressionEvaluator implements ICopyEvaluator {

+

+	private List<ICopyEvaluator> children;

+

+	private ExprNodeEvaluator evaluator;

+

+	private IDataOutputProvider out;

+

+	private ObjectInspector inspector;

+

+	/**

+	 * output object inspector

+	 */

+	private ObjectInspector outputInspector;

+

+	/**

+	 * cached row object

+	 */

+	private LazyObject<? extends ObjectInspector> cachedRowObject;

+

+	/**

+	 * serializer/derialzer for lazy object

+	 */

+	private SerDe lazySer;

+

+	/**

+	 * data output

+	 */

+	DataOutput dataOutput;

+

+	public AbstractExpressionEvaluator(ExprNodeEvaluator hiveEvaluator,

+			ObjectInspector oi, IDataOutputProvider output)

+			throws AlgebricksException {

+		evaluator = hiveEvaluator;

+		out = output;

+		inspector = oi;

+		dataOutput = out.getDataOutput();

+	}

+

+	protected ObjectInspector getRowInspector() {

+		return null;

+	}

+

+	protected IDataOutputProvider getIDataOutputProvider() {

+		return out;

+	}

+

+	protected ExprNodeEvaluator getHiveEvaluator() {

+		return evaluator;

+	}

+

+	public ObjectInspector getObjectInspector() {

+		return inspector;

+	}

+

+	@Override

+	public void evaluate(IFrameTupleReference r) throws AlgebricksException {

+		// initialize hive evaluator

+		try {

+			if (outputInspector == null)

+				outputInspector = evaluator.initialize(inspector);

+		} catch (Exception e) {

+			e.printStackTrace();

+			throw new AlgebricksException(e.getMessage());

+		}

+

+		readIntoCache(r);

+		try {

+			Object result = evaluator.evaluate(cachedRowObject);

+

+			// if (result == null) {

+			// result = evaluator.evaluate(cachedRowObject);

+			//

+			// // check if result is null

+			//

+			// String errorMsg = "serialize null object in  \n output " +

+			// outputInspector.toString() + " \n input "

+			// + inspector.toString() + "\n ";

+			// errorMsg += "";

+			// List<Object> columns = ((StructObjectInspector)

+			// inspector).getStructFieldsDataAsList(cachedRowObject);

+			// for (Object column : columns) {

+			// errorMsg += column.toString() + " ";

+			// }

+			// errorMsg += "\n";

+			// Log.info(errorMsg);

+			// System.out.println(errorMsg);

+			// // result = new BooleanWritable(true);

+			// throw new IllegalStateException(errorMsg);

+			// }

+

+			serializeResult(result);

+		} catch (HiveException e) {

+			e.printStackTrace();

+			throw new AlgebricksException(e.getMessage());

+		} catch (IOException e) {

+			e.printStackTrace();

+			throw new AlgebricksException(e.getMessage());

+		}

+	}

+

+	/**

+	 * serialize the result

+	 * 

+	 * @param result

+	 *            the evaluation result

+	 * @throws IOException

+	 * @throws AlgebricksException

+	 */

+	private void serializeResult(Object result) throws IOException,

+			AlgebricksException {

+		if (lazySer == null)

+			lazySer = new LazySerDe();

+

+		try {

+			BytesWritable outputWritable = (BytesWritable) lazySer.serialize(

+					result, outputInspector);

+			dataOutput.write(outputWritable.getBytes(), 0,

+					outputWritable.getLength());

+		} catch (SerDeException e) {

+			throw new AlgebricksException(e);

+		}

+	}

+

+	/**

+	 * bind the tuple reference to the cached row object

+	 * 

+	 * @param r

+	 */

+	private void readIntoCache(IFrameTupleReference r) {

+		if (cachedRowObject == null)

+			cachedRowObject = (LazyObject<? extends ObjectInspector>) LazyFactory

+					.createLazyObject(inspector);

+		cachedRowObject.init(r);

+	}

+

+	/**

+	 * set a list of children of this evaluator

+	 * 

+	 * @param children

+	 */

+	public void setChildren(List<ICopyEvaluator> children) {

+		this.children = children;

+	}

+

+	public void addChild(ICopyEvaluator child) {

+		if (children == null)

+			children = new ArrayList<ICopyEvaluator>();

+		children.add(child);

+	}

+

+}

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/AggregationFunctionEvaluator.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/AggregationFunctionEvaluator.java
new file mode 100644
index 0000000..271b5e4
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/AggregationFunctionEvaluator.java
@@ -0,0 +1,231 @@
+package edu.uci.ics.hivesterix.runtime.evaluator;

+

+import java.io.DataOutput;

+import java.io.IOException;

+import java.util.List;

+

+import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator;

+import org.apache.hadoop.hive.ql.metadata.HiveException;

+import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;

+import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFCount;

+import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator;

+import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator.AggregationBuffer;

+import org.apache.hadoop.hive.serde2.SerDe;

+import org.apache.hadoop.hive.serde2.SerDeException;

+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;

+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;

+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;

+import org.apache.hadoop.io.BytesWritable;

+

+import edu.uci.ics.hivesterix.serde.lazy.LazyObject;

+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;

+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyAggregateFunction;

+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;

+

+public class AggregationFunctionEvaluator implements ICopyAggregateFunction {

+

+	/**

+	 * the mode of aggregation function

+	 */

+	private GenericUDAFEvaluator.Mode mode;

+

+	/**

+	 * an array of evaluators

+	 */

+	private ExprNodeEvaluator[] evaluators;

+

+	/**

+	 * udaf evaluator partial

+	 */

+	private GenericUDAFEvaluator udafPartial;

+

+	/**

+	 * udaf evaluator complete

+	 */

+	private GenericUDAFEvaluator udafComplete;

+

+	/**

+	 * cached parameter objects

+	 */

+	private Object[] cachedParameters;

+

+	/**

+	 * cached row objects

+	 */

+	private LazyObject<? extends ObjectInspector> cachedRowObject;

+

+	/**

+	 * the output channel

+	 */

+	private DataOutput out;

+

+	/**

+	 * aggregation buffer

+	 */

+	private AggregationBuffer aggBuffer;

+

+	/**

+	 * we only use lazy serde to do serialization

+	 */

+	private SerDe lazySer;

+

+	/**

+	 * the output object inspector for this aggregation function

+	 */

+	private ObjectInspector outputInspector;

+

+	/**

+	 * the output object inspector for this aggregation function

+	 */

+	private ObjectInspector outputInspectorPartial;

+

+	/**

+	 * parameter inspectors

+	 */

+	private ObjectInspector[] parameterInspectors;

+

+	/**

+	 * output make sure the aggregation functio has least object creation

+	 * 

+	 * @param desc

+	 * @param oi

+	 * @param output

+	 */

+	public AggregationFunctionEvaluator(List<ExprNodeDesc> inputs,

+			List<TypeInfo> inputTypes, String genericUDAFName,

+			GenericUDAFEvaluator.Mode aggMode, boolean distinct,

+			ObjectInspector oi, DataOutput output, ExprNodeEvaluator[] evals,

+			ObjectInspector[] pInspectors, Object[] parameterCache,

+			SerDe serde, LazyObject<? extends ObjectInspector> row,

+			GenericUDAFEvaluator udafunctionPartial,

+			GenericUDAFEvaluator udafunctionComplete, ObjectInspector outputOi,

+			ObjectInspector outputOiPartial) {

+		// shared object across threads

+		this.out = output;

+		this.mode = aggMode;

+		this.parameterInspectors = pInspectors;

+

+		// thread local objects

+		this.evaluators = evals;

+		this.cachedParameters = parameterCache;

+		this.cachedRowObject = row;

+		this.lazySer = serde;

+		this.udafPartial = udafunctionPartial;

+		this.udafComplete = udafunctionComplete;

+		this.outputInspector = outputOi;

+		this.outputInspectorPartial = outputOiPartial;

+	}

+

+	@Override

+	public void init() throws AlgebricksException {

+		try {

+			aggBuffer = udafPartial.getNewAggregationBuffer();

+		} catch (HiveException e) {

+			throw new AlgebricksException(e);

+		}

+	}

+

+	@Override

+	public void step(IFrameTupleReference tuple) throws AlgebricksException {

+		readIntoCache(tuple);

+		processRow();

+	}

+

+	private void processRow() throws AlgebricksException {

+		try {

+			// get values by evaluating them

+			for (int i = 0; i < cachedParameters.length; i++) {

+				cachedParameters[i] = evaluators[i].evaluate(cachedRowObject);

+			}

+			processAggregate();

+		} catch (HiveException e) {

+			throw new AlgebricksException(e);

+		}

+	}

+

+	private void processAggregate() throws HiveException {

+		/**

+		 * accumulate the aggregation function

+		 */

+		switch (mode) {

+		case PARTIAL1:

+		case COMPLETE:

+			udafPartial.iterate(aggBuffer, cachedParameters);

+			break;

+		case PARTIAL2:

+		case FINAL:

+			if (udafPartial instanceof GenericUDAFCount.GenericUDAFCountEvaluator) {

+				Object parameter = ((PrimitiveObjectInspector) parameterInspectors[0])

+						.getPrimitiveWritableObject(cachedParameters[0]);

+				udafPartial.merge(aggBuffer, parameter);

+			} else

+				udafPartial.merge(aggBuffer, cachedParameters[0]);

+			break;

+		default:

+			break;

+		}

+	}

+

+	/**

+	 * serialize the result

+	 * 

+	 * @param result

+	 *            the evaluation result

+	 * @throws IOException

+	 * @throws AlgebricksException

+	 */

+	private void serializeResult(Object result, ObjectInspector oi)

+			throws IOException, AlgebricksException {

+		try {

+			BytesWritable outputWritable = (BytesWritable) lazySer.serialize(

+					result, oi);

+			out.write(outputWritable.getBytes(), 0, outputWritable.getLength());

+		} catch (SerDeException e) {

+			throw new AlgebricksException(e);

+		}

+	}

+

+	/**

+	 * bind the tuple reference to the cached row object

+	 * 

+	 * @param r

+	 */

+	private void readIntoCache(IFrameTupleReference r) {

+		cachedRowObject.init(r);

+	}

+

+	@Override

+	public void finish() throws AlgebricksException {

+		// aggregator

+		try {

+			Object result = null;

+			result = udafPartial.terminatePartial(aggBuffer);

+			if (mode == GenericUDAFEvaluator.Mode.COMPLETE

+					|| mode == GenericUDAFEvaluator.Mode.FINAL) {

+				result = udafComplete.terminate(aggBuffer);

+				serializeResult(result, outputInspector);

+			} else {

+				serializeResult(result, outputInspectorPartial);

+			}

+		} catch (HiveException e) {

+			throw new AlgebricksException(e);

+		} catch (IOException e) {

+			throw new AlgebricksException(e);

+		}

+	}

+

+	@Override

+	public void finishPartial() throws AlgebricksException {

+		// aggregator.

+		try {

+			Object result = null;

+			// get aggregations

+			result = udafPartial.terminatePartial(aggBuffer);

+			serializeResult(result, outputInspectorPartial);

+		} catch (HiveException e) {

+			throw new AlgebricksException(e);

+		} catch (IOException e) {

+			throw new AlgebricksException(e);

+		}

+	}

+}

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/AggregatuibFunctionSerializableEvaluator.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/AggregatuibFunctionSerializableEvaluator.java
new file mode 100644
index 0000000..032437b
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/AggregatuibFunctionSerializableEvaluator.java
@@ -0,0 +1,259 @@
+package edu.uci.ics.hivesterix.runtime.evaluator;
+
+import java.io.DataOutput;
+import java.io.IOException;
+import java.util.List;
+
+import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFCount;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator;
+import org.apache.hadoop.hive.serde2.SerDe;
+import org.apache.hadoop.hive.serde2.SerDeException;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.io.BytesWritable;
+
+import edu.uci.ics.hivesterix.serde.lazy.LazyObject;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopySerializableAggregateFunction;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+public class AggregatuibFunctionSerializableEvaluator implements
+		ICopySerializableAggregateFunction {
+
+	/**
+	 * the mode of aggregation function
+	 */
+	private GenericUDAFEvaluator.Mode mode;
+
+	/**
+	 * an array of evaluators
+	 */
+	private ExprNodeEvaluator[] evaluators;
+
+	/**
+	 * udaf evaluator partial
+	 */
+	private GenericUDAFEvaluator udafPartial;
+
+	/**
+	 * udaf evaluator complete
+	 */
+	private GenericUDAFEvaluator udafComplete;
+
+	/**
+	 * cached parameter objects
+	 */
+	private Object[] cachedParameters;
+
+	/**
+	 * cached row objects
+	 */
+	private LazyObject<? extends ObjectInspector> cachedRowObject;
+
+	/**
+	 * aggregation buffer
+	 */
+	private SerializableBuffer aggBuffer;
+
+	/**
+	 * we only use lazy serde to do serialization
+	 */
+	private SerDe lazySer;
+
+	/**
+	 * the output object inspector for this aggregation function
+	 */
+	private ObjectInspector outputInspector;
+
+	/**
+	 * the output object inspector for this aggregation function
+	 */
+	private ObjectInspector outputInspectorPartial;
+
+	/**
+	 * parameter inspectors
+	 */
+	private ObjectInspector[] parameterInspectors;
+
+	/**
+	 * output make sure the aggregation functio has least object creation
+	 * 
+	 * @param desc
+	 * @param oi
+	 * @param output
+	 */
+	public AggregatuibFunctionSerializableEvaluator(List<ExprNodeDesc> inputs,
+			List<TypeInfo> inputTypes, String genericUDAFName,
+			GenericUDAFEvaluator.Mode aggMode, boolean distinct,
+			ObjectInspector oi, ExprNodeEvaluator[] evals,
+			ObjectInspector[] pInspectors, Object[] parameterCache,
+			SerDe serde, LazyObject<? extends ObjectInspector> row,
+			GenericUDAFEvaluator udafunctionPartial,
+			GenericUDAFEvaluator udafunctionComplete, ObjectInspector outputOi,
+			ObjectInspector outputOiPartial) throws AlgebricksException {
+		// shared object across threads
+		this.mode = aggMode;
+		this.parameterInspectors = pInspectors;
+
+		// thread local objects
+		this.evaluators = evals;
+		this.cachedParameters = parameterCache;
+		this.cachedRowObject = row;
+		this.lazySer = serde;
+		this.udafPartial = udafunctionPartial;
+		this.udafComplete = udafunctionComplete;
+		this.outputInspector = outputOi;
+		this.outputInspectorPartial = outputOiPartial;
+
+		try {
+			aggBuffer = (SerializableBuffer) udafPartial
+					.getNewAggregationBuffer();
+		} catch (HiveException e) {
+			throw new AlgebricksException(e);
+		}
+	}
+
+	@Override
+	public void init(DataOutput output) throws AlgebricksException {
+		try {
+			udafPartial.reset(aggBuffer);
+			outputAggBuffer(aggBuffer, output);
+		} catch (HiveException e) {
+			throw new AlgebricksException(e);
+		}
+	}
+
+	@Override
+	public void step(IFrameTupleReference tuple, byte[] data, int start, int len)
+			throws AlgebricksException {
+		deSerializeAggBuffer(aggBuffer, data, start, len);
+		readIntoCache(tuple);
+		processRow();
+		serializeAggBuffer(aggBuffer, data, start, len);
+	}
+
+	private void processRow() throws AlgebricksException {
+		try {
+			// get values by evaluating them
+			for (int i = 0; i < cachedParameters.length; i++) {
+				cachedParameters[i] = evaluators[i].evaluate(cachedRowObject);
+			}
+			processAggregate();
+		} catch (HiveException e) {
+			throw new AlgebricksException(e);
+		}
+	}
+
+	private void processAggregate() throws HiveException {
+		/**
+		 * accumulate the aggregation function
+		 */
+		switch (mode) {
+		case PARTIAL1:
+		case COMPLETE:
+			udafPartial.iterate(aggBuffer, cachedParameters);
+			break;
+		case PARTIAL2:
+		case FINAL:
+			if (udafPartial instanceof GenericUDAFCount.GenericUDAFCountEvaluator) {
+				Object parameter = ((PrimitiveObjectInspector) parameterInspectors[0])
+						.getPrimitiveWritableObject(cachedParameters[0]);
+				udafPartial.merge(aggBuffer, parameter);
+			} else
+				udafPartial.merge(aggBuffer, cachedParameters[0]);
+			break;
+		default:
+			break;
+		}
+	}
+
+	/**
+	 * serialize the result
+	 * 
+	 * @param result
+	 *            the evaluation result
+	 * @throws IOException
+	 * @throws AlgebricksException
+	 */
+	private void serializeResult(Object result, ObjectInspector oi,
+			DataOutput out) throws IOException, AlgebricksException {
+		try {
+			BytesWritable outputWritable = (BytesWritable) lazySer.serialize(
+					result, oi);
+			out.write(outputWritable.getBytes(), 0, outputWritable.getLength());
+		} catch (SerDeException e) {
+			throw new AlgebricksException(e);
+		}
+	}
+
+	/**
+	 * bind the tuple reference to the cached row object
+	 * 
+	 * @param r
+	 */
+	private void readIntoCache(IFrameTupleReference r) {
+		cachedRowObject.init(r);
+	}
+
+	@Override
+	public void finish(byte[] data, int start, int len, DataOutput output)
+			throws AlgebricksException {
+		deSerializeAggBuffer(aggBuffer, data, start, len);
+		// aggregator
+		try {
+			Object result = null;
+			result = udafPartial.terminatePartial(aggBuffer);
+			if (mode == GenericUDAFEvaluator.Mode.COMPLETE
+					|| mode == GenericUDAFEvaluator.Mode.FINAL) {
+				result = udafComplete.terminate(aggBuffer);
+				serializeResult(result, outputInspector, output);
+			} else {
+				serializeResult(result, outputInspectorPartial, output);
+			}
+		} catch (HiveException e) {
+			throw new AlgebricksException(e);
+		} catch (IOException e) {
+			throw new AlgebricksException(e);
+		}
+	}
+
+	@Override
+	public void finishPartial(byte[] data, int start, int len, DataOutput output)
+			throws AlgebricksException {
+		deSerializeAggBuffer(aggBuffer, data, start, len);
+		// aggregator.
+		try {
+			Object result = null;
+			// get aggregations
+			result = udafPartial.terminatePartial(aggBuffer);
+			serializeResult(result, outputInspectorPartial, output);
+		} catch (HiveException e) {
+			throw new AlgebricksException(e);
+		} catch (IOException e) {
+			throw new AlgebricksException(e);
+		}
+	}
+
+	private void serializeAggBuffer(SerializableBuffer buffer, byte[] data,
+			int start, int len) throws AlgebricksException {
+		buffer.serializeAggBuffer(data, start, len);
+	}
+
+	private void deSerializeAggBuffer(SerializableBuffer buffer, byte[] data,
+			int start, int len) throws AlgebricksException {
+		buffer.deSerializeAggBuffer(data, start, len);
+	}
+
+	private void outputAggBuffer(SerializableBuffer buffer, DataOutput out)
+			throws AlgebricksException {
+		try {
+			buffer.serializeAggBuffer(out);
+		} catch (IOException e) {
+			throw new AlgebricksException(e);
+		}
+	}
+}
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/BufferSerDeUtil.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/BufferSerDeUtil.java
new file mode 100644
index 0000000..d73be93
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/BufferSerDeUtil.java
@@ -0,0 +1,73 @@
+package edu.uci.ics.hivesterix.runtime.evaluator;
+
+public class BufferSerDeUtil {
+
+	public static double getDouble(byte[] bytes, int offset) {
+		return Double.longBitsToDouble(getLong(bytes, offset));
+	}
+
+	public static float getFloat(byte[] bytes, int offset) {
+		return Float.intBitsToFloat(getInt(bytes, offset));
+	}
+
+	public static boolean getBoolean(byte[] bytes, int offset) {
+		if (bytes[offset] == 0)
+			return false;
+		else
+			return true;
+	}
+
+	public static int getInt(byte[] bytes, int offset) {
+		return ((bytes[offset] & 0xff) << 24)
+				+ ((bytes[offset + 1] & 0xff) << 16)
+				+ ((bytes[offset + 2] & 0xff) << 8)
+				+ ((bytes[offset + 3] & 0xff) << 0);
+	}
+
+	public static long getLong(byte[] bytes, int offset) {
+		return (((long) (bytes[offset] & 0xff)) << 56)
+				+ (((long) (bytes[offset + 1] & 0xff)) << 48)
+				+ (((long) (bytes[offset + 2] & 0xff)) << 40)
+				+ (((long) (bytes[offset + 3] & 0xff)) << 32)
+				+ (((long) (bytes[offset + 4] & 0xff)) << 24)
+				+ (((long) (bytes[offset + 5] & 0xff)) << 16)
+				+ (((long) (bytes[offset + 6] & 0xff)) << 8)
+				+ (((long) (bytes[offset + 7] & 0xff)) << 0);
+	}
+
+	public static void writeBoolean(boolean value, byte[] bytes, int offset) {
+		if (value)
+			bytes[offset] = (byte) 1;
+		else
+			bytes[offset] = (byte) 0;
+	}
+
+	public static void writeInt(int value, byte[] bytes, int offset) {
+		bytes[offset++] = (byte) (value >> 24);
+		bytes[offset++] = (byte) (value >> 16);
+		bytes[offset++] = (byte) (value >> 8);
+		bytes[offset++] = (byte) (value);
+	}
+
+	public static void writeLong(long value, byte[] bytes, int offset) {
+		bytes[offset++] = (byte) (value >> 56);
+		bytes[offset++] = (byte) (value >> 48);
+		bytes[offset++] = (byte) (value >> 40);
+		bytes[offset++] = (byte) (value >> 32);
+		bytes[offset++] = (byte) (value >> 24);
+		bytes[offset++] = (byte) (value >> 16);
+		bytes[offset++] = (byte) (value >> 8);
+		bytes[offset++] = (byte) (value);
+	}
+
+	public static void writeDouble(double value, byte[] bytes, int offset) {
+		long lValue = Double.doubleToLongBits(value);
+		writeLong(lValue, bytes, offset);
+	}
+
+	public static void writeFloat(float value, byte[] bytes, int offset) {
+		int iValue = Float.floatToIntBits(value);
+		writeInt(iValue, bytes, offset);
+	}
+
+}
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/ColumnExpressionEvaluator.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/ColumnExpressionEvaluator.java
new file mode 100644
index 0000000..3296e19
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/ColumnExpressionEvaluator.java
@@ -0,0 +1,18 @@
+package edu.uci.ics.hivesterix.runtime.evaluator;

+

+import org.apache.hadoop.hive.ql.exec.ExprNodeColumnEvaluator;

+import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;

+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;

+

+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;

+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;

+

+public class ColumnExpressionEvaluator extends AbstractExpressionEvaluator {

+

+	public ColumnExpressionEvaluator(ExprNodeColumnDesc expr,

+			ObjectInspector oi, IDataOutputProvider output)

+			throws AlgebricksException {

+		super(new ExprNodeColumnEvaluator(expr), oi, output);

+	}

+

+}

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/ConstantExpressionEvaluator.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/ConstantExpressionEvaluator.java
new file mode 100644
index 0000000..62928e6
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/ConstantExpressionEvaluator.java
@@ -0,0 +1,17 @@
+package edu.uci.ics.hivesterix.runtime.evaluator;

+

+import org.apache.hadoop.hive.ql.exec.ExprNodeConstantEvaluator;

+import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;

+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;

+

+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;

+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;

+

+public class ConstantExpressionEvaluator extends AbstractExpressionEvaluator {

+

+	public ConstantExpressionEvaluator(ExprNodeConstantDesc expr,

+			ObjectInspector oi, IDataOutputProvider output)

+			throws AlgebricksException {

+		super(new ExprNodeConstantEvaluator(expr), oi, output);

+	}

+}

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/ExpressionTranslator.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/ExpressionTranslator.java
new file mode 100644
index 0000000..2180910
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/ExpressionTranslator.java
@@ -0,0 +1,233 @@
+package edu.uci.ics.hivesterix.runtime.evaluator;

+

+import java.util.ArrayList;

+import java.util.List;

+

+import org.apache.commons.lang3.mutable.Mutable;

+import org.apache.hadoop.hive.ql.exec.FunctionInfo;

+import org.apache.hadoop.hive.ql.exec.FunctionRegistry;

+import org.apache.hadoop.hive.ql.plan.AggregationDesc;

+import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;

+import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;

+import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;

+import org.apache.hadoop.hive.ql.plan.ExprNodeFieldDesc;

+import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;

+import org.apache.hadoop.hive.ql.plan.ExprNodeNullDesc;

+import org.apache.hadoop.hive.ql.plan.UDTFDesc;

+import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator;

+import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;

+import org.apache.hadoop.hive.ql.udf.generic.GenericUDTF;

+import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo;

+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;

+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;

+

+import edu.uci.ics.hivesterix.logical.expression.ExpressionConstant;

+import edu.uci.ics.hivesterix.logical.expression.HiveAlgebricksBuiltInFunctionMap;

+import edu.uci.ics.hivesterix.logical.expression.HiveFunctionInfo;

+import edu.uci.ics.hivesterix.logical.expression.HivesterixConstantValue;

+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AggregateFunctionCallExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.UnnestingFunctionCallExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;

+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.IFunctionInfo;

+

+public class ExpressionTranslator {

+

+	public static Object getHiveExpression(ILogicalExpression expr,

+			IVariableTypeEnvironment env) throws Exception {

+		if (expr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {

+			/**

+			 * function expression

+			 */

+			AbstractFunctionCallExpression funcExpr = (AbstractFunctionCallExpression) expr;

+			IFunctionInfo funcInfo = funcExpr.getFunctionInfo();

+			FunctionIdentifier fid = funcInfo.getFunctionIdentifier();

+

+			if (fid.getName().equals(ExpressionConstant.FIELDACCESS)) {

+				Object info = ((HiveFunctionInfo) funcInfo).getInfo();

+				ExprNodeFieldDesc desc = (ExprNodeFieldDesc) info;

+				return new ExprNodeFieldDesc(desc.getTypeInfo(),

+						desc.getDesc(), desc.getFieldName(), desc.getIsList());

+			}

+

+			if (fid.getName().equals(ExpressionConstant.NULL)) {

+				return new ExprNodeNullDesc();

+			}

+

+			/**

+			 * argument expressions: translate argument expressions recursively

+			 * first, this logic is shared in scalar, aggregation and unnesting

+			 * function

+			 */

+			List<Mutable<ILogicalExpression>> arguments = funcExpr

+					.getArguments();

+			List<ExprNodeDesc> parameters = new ArrayList<ExprNodeDesc>();

+			for (Mutable<ILogicalExpression> argument : arguments) {

+				/**

+				 * parameters could not be aggregate function desc

+				 */

+				ExprNodeDesc parameter = (ExprNodeDesc) getHiveExpression(

+						argument.getValue(), env);

+				parameters.add(parameter);

+			}

+

+			/**

+			 * get expression

+			 */

+			if (funcExpr instanceof ScalarFunctionCallExpression) {

+				String udfName = HiveAlgebricksBuiltInFunctionMap.INSTANCE

+						.getHiveFunctionName(fid);

+				GenericUDF udf;

+				if (udfName != null) {

+					/**

+					 * get corresponding function info for built-in functions

+					 */

+					FunctionInfo fInfo = FunctionRegistry

+							.getFunctionInfo(udfName);

+					udf = fInfo.getGenericUDF();

+

+					int inputSize = parameters.size();

+					List<ExprNodeDesc> currentDescs = new ArrayList<ExprNodeDesc>();

+

+					// generate expression tree if necessary

+					while (inputSize > 2) {

+						int pairs = inputSize / 2;

+						for (int i = 0; i < pairs; i++) {

+							List<ExprNodeDesc> descs = new ArrayList<ExprNodeDesc>();

+							descs.add(parameters.get(2 * i));

+							descs.add(parameters.get(2 * i + 1));

+							ExprNodeDesc desc = ExprNodeGenericFuncDesc

+									.newInstance(udf, descs);

+							currentDescs.add(desc);

+						}

+

+						if (inputSize % 2 != 0) {

+							// List<ExprNodeDesc> descs = new

+							// ArrayList<ExprNodeDesc>();

+							// ExprNodeDesc lastExpr =

+							// currentDescs.remove(currentDescs.size() - 1);

+							// descs.add(lastExpr);

+							currentDescs.add(parameters.get(inputSize - 1));

+							// ExprNodeDesc desc =

+							// ExprNodeGenericFuncDesc.newInstance(udf, descs);

+							// currentDescs.add(desc);

+						}

+						inputSize = currentDescs.size();

+						parameters.clear();

+						parameters.addAll(currentDescs);

+						currentDescs.clear();

+					}

+

+				} else {

+					Object secondInfo = ((HiveFunctionInfo) funcInfo).getInfo();

+					if (secondInfo != null) {

+

+						/**

+						 * for GenericUDFBridge: we should not call get type of

+						 * this hive expression, because parameters may have

+						 * been changed!

+						 */

+						ExprNodeGenericFuncDesc hiveExpr = (ExprNodeGenericFuncDesc) ((HiveFunctionInfo) funcInfo)

+								.getInfo();

+						udf = hiveExpr.getGenericUDF();

+					} else {

+						/**

+						 * for other generic UDF

+						 */

+						Class<?> udfClass;

+						try {

+							udfClass = Class.forName(fid.getName());

+							udf = (GenericUDF) udfClass.newInstance();

+						} catch (Exception e) {

+							e.printStackTrace();

+							throw new AlgebricksException(e.getMessage());

+						}

+					}

+				}

+				/**

+				 * get hive generic function expression

+				 */

+				ExprNodeDesc desc = ExprNodeGenericFuncDesc.newInstance(udf,

+						parameters);

+				return desc;

+			} else if (funcExpr instanceof AggregateFunctionCallExpression) {

+				/**

+				 * hive aggregation info

+				 */

+				AggregationDesc aggregateDesc = (AggregationDesc) ((HiveFunctionInfo) funcExpr

+						.getFunctionInfo()).getInfo();

+				/**

+				 * set parameters

+				 */

+				aggregateDesc

+						.setParameters((ArrayList<ExprNodeDesc>) parameters);

+

+				List<TypeInfo> originalParameterTypeInfos = new ArrayList<TypeInfo>();

+				for (ExprNodeDesc parameter : parameters) {

+					if (parameter.getTypeInfo() instanceof StructTypeInfo) {

+						originalParameterTypeInfos

+								.add(TypeInfoFactory.doubleTypeInfo);

+					} else

+						originalParameterTypeInfos.add(parameter.getTypeInfo());

+				}

+

+				GenericUDAFEvaluator eval = FunctionRegistry

+						.getGenericUDAFEvaluator(

+								aggregateDesc.getGenericUDAFName(),

+								originalParameterTypeInfos,

+								aggregateDesc.getDistinct(), false);

+

+				AggregationDesc newAggregateDesc = new AggregationDesc(

+						aggregateDesc.getGenericUDAFName(), eval,

+						aggregateDesc.getParameters(),

+						aggregateDesc.getDistinct(), aggregateDesc.getMode());

+				return newAggregateDesc;

+			} else if (funcExpr instanceof UnnestingFunctionCallExpression) {

+				/**

+				 * type inference for UDTF function

+				 */

+				UDTFDesc hiveDesc = (UDTFDesc) ((HiveFunctionInfo) funcExpr

+						.getFunctionInfo()).getInfo();

+				String funcName = hiveDesc.getUDTFName();

+				FunctionInfo fi = FunctionRegistry.getFunctionInfo(funcName);

+				GenericUDTF udtf = fi.getGenericUDTF();

+				UDTFDesc desc = new UDTFDesc(udtf);

+				return desc;

+			} else {

+				throw new IllegalStateException(

+						"unrecognized function expression "

+								+ expr.getClass().getName());

+			}

+		} else if ((expr.getExpressionTag() == LogicalExpressionTag.VARIABLE)) {

+			/**

+			 * get type for variable in the environment

+			 */

+			VariableReferenceExpression varExpr = (VariableReferenceExpression) expr;

+			LogicalVariable var = varExpr.getVariableReference();

+			TypeInfo typeInfo = (TypeInfo) env.getVarType(var);

+			ExprNodeDesc desc = new ExprNodeColumnDesc(typeInfo,

+					var.toString(), "", false);

+			return desc;

+		} else if ((expr.getExpressionTag() == LogicalExpressionTag.CONSTANT)) {

+			/**

+			 * get expression for constant in the environment

+			 */

+			ConstantExpression varExpr = (ConstantExpression) expr;

+			Object value = ((HivesterixConstantValue) varExpr.getValue())

+					.getObject();

+			ExprNodeDesc desc = new ExprNodeConstantDesc(value);

+			return desc;

+		} else {

+			throw new IllegalStateException("illegal expressions "

+					+ expr.getClass().getName());

+		}

+	}

+}

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/FieldExpressionEvaluator.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/FieldExpressionEvaluator.java
new file mode 100644
index 0000000..5f6a5dc
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/FieldExpressionEvaluator.java
@@ -0,0 +1,17 @@
+package edu.uci.ics.hivesterix.runtime.evaluator;

+

+import org.apache.hadoop.hive.ql.exec.ExprNodeFieldEvaluator;

+import org.apache.hadoop.hive.ql.plan.ExprNodeFieldDesc;

+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;

+

+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;

+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;

+

+public class FieldExpressionEvaluator extends AbstractExpressionEvaluator {

+

+	public FieldExpressionEvaluator(ExprNodeFieldDesc expr, ObjectInspector oi,

+			IDataOutputProvider output) throws AlgebricksException {

+		super(new ExprNodeFieldEvaluator(expr), oi, output);

+	}

+

+}
\ No newline at end of file
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/FunctionExpressionEvaluator.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/FunctionExpressionEvaluator.java
new file mode 100644
index 0000000..c3f3c93
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/FunctionExpressionEvaluator.java
@@ -0,0 +1,18 @@
+package edu.uci.ics.hivesterix.runtime.evaluator;

+

+import org.apache.hadoop.hive.ql.exec.ExprNodeGenericFuncEvaluator;

+import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;

+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;

+

+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;

+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;

+

+public class FunctionExpressionEvaluator extends AbstractExpressionEvaluator {

+

+	public FunctionExpressionEvaluator(ExprNodeGenericFuncDesc expr,

+			ObjectInspector oi, IDataOutputProvider output)

+			throws AlgebricksException {

+		super(new ExprNodeGenericFuncEvaluator(expr), oi, output);

+	}

+

+}

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/NullExpressionEvaluator.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/NullExpressionEvaluator.java
new file mode 100644
index 0000000..cbe5561
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/NullExpressionEvaluator.java
@@ -0,0 +1,16 @@
+package edu.uci.ics.hivesterix.runtime.evaluator;

+

+import org.apache.hadoop.hive.ql.exec.ExprNodeNullEvaluator;

+import org.apache.hadoop.hive.ql.plan.ExprNodeNullDesc;

+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;

+

+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;

+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;

+

+public class NullExpressionEvaluator extends AbstractExpressionEvaluator {

+

+	public NullExpressionEvaluator(ExprNodeNullDesc expr, ObjectInspector oi,

+			IDataOutputProvider output) throws AlgebricksException {

+		super(new ExprNodeNullEvaluator(expr), oi, output);

+	}

+}

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/SerializableBuffer.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/SerializableBuffer.java
new file mode 100644
index 0000000..328b384
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/SerializableBuffer.java
@@ -0,0 +1,16 @@
+package edu.uci.ics.hivesterix.runtime.evaluator;
+
+import java.io.DataOutput;
+import java.io.IOException;
+
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator.AggregationBuffer;
+
+public interface SerializableBuffer extends AggregationBuffer {
+
+	public void deSerializeAggBuffer(byte[] data, int start, int len);
+
+	public void serializeAggBuffer(byte[] data, int start, int len);
+
+	public void serializeAggBuffer(DataOutput output) throws IOException;
+
+}
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/UDTFFunctionEvaluator.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/UDTFFunctionEvaluator.java
new file mode 100644
index 0000000..de0141b
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/UDTFFunctionEvaluator.java
@@ -0,0 +1,147 @@
+package edu.uci.ics.hivesterix.runtime.evaluator;

+

+import java.io.DataOutput;

+import java.io.IOException;

+

+import org.apache.hadoop.hive.ql.metadata.HiveException;

+import org.apache.hadoop.hive.ql.plan.UDTFDesc;

+import org.apache.hadoop.hive.ql.udf.generic.Collector;

+import org.apache.hadoop.hive.ql.udf.generic.GenericUDTF;

+import org.apache.hadoop.hive.serde2.SerDe;

+import org.apache.hadoop.hive.serde2.SerDeException;

+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;

+import org.apache.hadoop.io.BytesWritable;

+

+import edu.uci.ics.hivesterix.logical.expression.Schema;

+import edu.uci.ics.hivesterix.serde.lazy.LazyColumnar;

+import edu.uci.ics.hivesterix.serde.lazy.LazyFactory;

+import edu.uci.ics.hivesterix.serde.lazy.LazyObject;

+import edu.uci.ics.hivesterix.serde.lazy.LazySerDe;

+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;

+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyUnnestingFunction;

+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;

+

+public class UDTFFunctionEvaluator implements ICopyUnnestingFunction, Collector {

+

+	/**

+	 * udtf function

+	 */

+	private UDTFDesc func;

+

+	/**

+	 * input object inspector

+	 */

+	private ObjectInspector inputInspector;

+

+	/**

+	 * output object inspector

+	 */

+	private ObjectInspector outputInspector;

+

+	/**

+	 * object inspector for udtf

+	 */

+	private ObjectInspector[] udtfInputOIs;

+

+	/**

+	 * generic udtf

+	 */

+	private GenericUDTF udtf;

+

+	/**

+	 * data output

+	 */

+	private DataOutput out;

+

+	/**

+	 * the input row object

+	 */

+	private LazyColumnar cachedRowObject;

+

+	/**

+	 * cached row object (input)

+	 */

+	private Object[] cachedInputObjects;

+

+	/**

+	 * serialization/deserialization

+	 */

+	private SerDe lazySerDe;

+

+	/**

+	 * columns feed into UDTF

+	 */

+	private int[] columns;

+

+	public UDTFFunctionEvaluator(UDTFDesc desc, Schema schema, int[] cols,

+			DataOutput output) {

+		this.func = desc;

+		this.inputInspector = schema.toObjectInspector();

+		udtf = func.getGenericUDTF();

+		out = output;

+		columns = cols;

+	}

+

+	@Override

+	public void init(IFrameTupleReference tuple) throws AlgebricksException {

+		cachedInputObjects = new LazyObject[columns.length];

+		try {

+			cachedRowObject = (LazyColumnar) LazyFactory

+					.createLazyObject(inputInspector);

+			outputInspector = udtf.initialize(udtfInputOIs);

+		} catch (HiveException e) {

+			throw new AlgebricksException(e);

+		}

+		udtf.setCollector(this);

+		lazySerDe = new LazySerDe();

+		readIntoCache(tuple);

+	}

+

+	@Override

+	public boolean step() throws AlgebricksException {

+		try {

+			udtf.process(cachedInputObjects);

+			return true;

+		} catch (HiveException e) {

+			throw new AlgebricksException(e);

+		}

+	}

+

+	/**

+	 * bind the tuple reference to the cached row object

+	 * 

+	 * @param r

+	 */

+	private void readIntoCache(IFrameTupleReference r) {

+		cachedRowObject.init(r);

+		for (int i = 0; i < cachedInputObjects.length; i++) {

+			cachedInputObjects[i] = cachedRowObject.getField(columns[i]);

+		}

+	}

+

+	/**

+	 * serialize the result

+	 * 

+	 * @param result

+	 *            the evaluation result

+	 * @throws IOException

+	 * @throws AlgebricksException

+	 */

+	private void serializeResult(Object result) throws SerDeException,

+			IOException {

+		BytesWritable outputWritable = (BytesWritable) lazySerDe.serialize(

+				result, outputInspector);

+		out.write(outputWritable.getBytes(), 0, outputWritable.getLength());

+	}

+

+	@Override

+	public void collect(Object input) throws HiveException {

+		try {

+			serializeResult(input);

+		} catch (IOException e) {

+			throw new HiveException(e);

+		} catch (SerDeException e) {

+			throw new HiveException(e);

+		}

+	}

+}

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/exec/HyracksExecutionEngine.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/exec/HyracksExecutionEngine.java
new file mode 100644
index 0000000..6c1ac72
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/exec/HyracksExecutionEngine.java
@@ -0,0 +1,554 @@
+package edu.uci.ics.hivesterix.runtime.exec;

+

+import java.io.PrintWriter;

+import java.io.Serializable;

+import java.util.ArrayList;

+import java.util.HashMap;

+import java.util.Iterator;

+import java.util.List;

+import java.util.Map;

+import java.util.Map.Entry;

+import java.util.Set;

+

+import org.apache.commons.logging.Log;

+import org.apache.commons.logging.LogFactory;

+import org.apache.hadoop.hive.conf.HiveConf;

+import org.apache.hadoop.hive.ql.exec.ConditionalTask;

+import org.apache.hadoop.hive.ql.exec.FileSinkOperator;

+import org.apache.hadoop.hive.ql.exec.MapRedTask;

+import org.apache.hadoop.hive.ql.exec.Operator;

+import org.apache.hadoop.hive.ql.exec.TableScanOperator;

+import org.apache.hadoop.hive.ql.exec.Task;

+import org.apache.hadoop.hive.ql.plan.DynamicPartitionCtx;

+import org.apache.hadoop.hive.ql.plan.FetchWork;

+import org.apache.hadoop.hive.ql.plan.FileSinkDesc;

+import org.apache.hadoop.hive.ql.plan.MapredLocalWork;

+import org.apache.hadoop.hive.ql.plan.MapredWork;

+import org.apache.hadoop.hive.ql.plan.PartitionDesc;

+import org.apache.hadoop.hive.ql.plan.TableScanDesc;

+

+import edu.uci.ics.hivesterix.logical.expression.HiveExpressionTypeComputer;

+import edu.uci.ics.hivesterix.logical.expression.HiveMergeAggregationExpressionFactory;

+import edu.uci.ics.hivesterix.logical.expression.HiveNullableTypeComputer;

+import edu.uci.ics.hivesterix.logical.expression.HivePartialAggregationTypeComputer;

+import edu.uci.ics.hivesterix.logical.plan.HiveAlgebricksTranslator;

+import edu.uci.ics.hivesterix.logical.plan.HiveLogicalPlanAndMetaData;

+import edu.uci.ics.hivesterix.optimizer.rulecollections.HiveRuleCollections;

+import edu.uci.ics.hivesterix.runtime.config.ConfUtil;

+import edu.uci.ics.hivesterix.runtime.factory.evaluator.HiveExpressionRuntimeProvider;

+import edu.uci.ics.hivesterix.runtime.factory.nullwriter.HiveNullWriterFactory;

+import edu.uci.ics.hivesterix.runtime.inspector.HiveBinaryBooleanInspectorFactory;

+import edu.uci.ics.hivesterix.runtime.inspector.HiveBinaryIntegerInspectorFactory;

+import edu.uci.ics.hivesterix.runtime.jobgen.HiveConnectorPolicyAssignmentPolicy;

+import edu.uci.ics.hivesterix.runtime.jobgen.HiveConnectorPolicyAssignmentPolicy.Policy;

+import edu.uci.ics.hivesterix.runtime.provider.HiveBinaryComparatorFactoryProvider;

+import edu.uci.ics.hivesterix.runtime.provider.HiveBinaryHashFunctionFactoryProvider;

+import edu.uci.ics.hivesterix.runtime.provider.HiveBinaryHashFunctionFamilyProvider;

+import edu.uci.ics.hivesterix.runtime.provider.HiveNormalizedKeyComputerFactoryProvider;

+import edu.uci.ics.hivesterix.runtime.provider.HivePrinterFactoryProvider;

+import edu.uci.ics.hivesterix.runtime.provider.HiveSerializerDeserializerProvider;

+import edu.uci.ics.hivesterix.runtime.provider.HiveTypeTraitProvider;

+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;

+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;

+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;

+import edu.uci.ics.hyracks.algebricks.compiler.api.HeuristicCompilerFactoryBuilder;

+import edu.uci.ics.hyracks.algebricks.compiler.api.HeuristicCompilerFactoryBuilder.DefaultOptimizationContextFactory;

+import edu.uci.ics.hyracks.algebricks.compiler.api.ICompiler;

+import edu.uci.ics.hyracks.algebricks.compiler.api.ICompilerFactory;

+import edu.uci.ics.hyracks.algebricks.compiler.rewriter.rulecontrollers.SequentialFixpointRuleController;

+import edu.uci.ics.hyracks.algebricks.compiler.rewriter.rulecontrollers.SequentialOnceRuleController;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalPlan;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalPlanAndMetadata;

+import edu.uci.ics.hyracks.algebricks.core.algebra.prettyprint.LogicalOperatorPrettyPrintVisitor;

+import edu.uci.ics.hyracks.algebricks.core.algebra.prettyprint.PlanPrettyPrinter;

+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.AbstractRuleController;

+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;

+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.PhysicalOptimizationConfig;

+import edu.uci.ics.hyracks.api.client.HyracksConnection;

+import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;

+import edu.uci.ics.hyracks.api.job.JobId;

+import edu.uci.ics.hyracks.api.job.JobSpecification;

+

+@SuppressWarnings({ "rawtypes", "unchecked" })

+public class HyracksExecutionEngine implements IExecutionEngine {

+

+    private static final Log LOG = LogFactory.getLog(HyracksExecutionEngine.class.getName());

+

+    // private static final String[] locConstraints = {}

+

+    private static List<Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>> DEFAULT_LOGICAL_REWRITES = new ArrayList<Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>>();

+    private static List<Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>> DEFAULT_PHYSICAL_REWRITES = new ArrayList<Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>>();

+    static {

+        SequentialFixpointRuleController seqCtrlNoDfs = new SequentialFixpointRuleController(false);

+        SequentialFixpointRuleController seqCtrlFullDfs = new SequentialFixpointRuleController(true);

+        SequentialOnceRuleController seqOnceCtrl = new SequentialOnceRuleController(true);

+        DEFAULT_LOGICAL_REWRITES.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqCtrlFullDfs,

+                HiveRuleCollections.NORMALIZATION));

+        DEFAULT_LOGICAL_REWRITES.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqCtrlNoDfs,

+                HiveRuleCollections.COND_PUSHDOWN_AND_JOIN_INFERENCE));

+        DEFAULT_LOGICAL_REWRITES.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqCtrlFullDfs,

+                HiveRuleCollections.LOAD_FIELDS));

+        DEFAULT_LOGICAL_REWRITES.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqCtrlNoDfs,

+                HiveRuleCollections.OP_PUSHDOWN));

+        DEFAULT_LOGICAL_REWRITES.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqOnceCtrl,

+                HiveRuleCollections.DATA_EXCHANGE));

+        DEFAULT_LOGICAL_REWRITES.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqCtrlNoDfs,

+                HiveRuleCollections.CONSOLIDATION));

+

+        DEFAULT_PHYSICAL_REWRITES.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqOnceCtrl,

+                HiveRuleCollections.PHYSICAL_PLAN_REWRITES));

+        DEFAULT_PHYSICAL_REWRITES.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqOnceCtrl,

+                HiveRuleCollections.prepareJobGenRules));

+    }

+

+    /**

+     * static configurations for compiler

+     */

+    private HeuristicCompilerFactoryBuilder builder;

+

+    /**

+     * compiler

+     */

+    private ICompiler compiler;

+

+    /**

+     * physical optimization config

+     */

+    private PhysicalOptimizationConfig physicalOptimizationConfig;

+

+    /**

+     * final ending operators

+     */

+    private List<Operator> leaveOps = new ArrayList<Operator>();

+

+    /**

+     * tasks that are already visited

+     */

+    private Map<Task<? extends Serializable>, Boolean> tasksVisited = new HashMap<Task<? extends Serializable>, Boolean>();

+

+    /**

+     * hyracks job spec

+     */

+    private JobSpecification jobSpec;

+

+    /**

+     * hive configuration

+     */

+    private HiveConf conf;

+

+    /**

+     * plan printer

+     */

+    private PrintWriter planPrinter;

+

+    public HyracksExecutionEngine(HiveConf conf) {

+        this.conf = conf;

+        init(conf);

+    }

+

+    public HyracksExecutionEngine(HiveConf conf, PrintWriter planPrinter) {

+        this.conf = conf;

+        this.planPrinter = planPrinter;

+        init(conf);

+    }

+

+    private void init(HiveConf conf) {

+        builder = new HeuristicCompilerFactoryBuilder(DefaultOptimizationContextFactory.INSTANCE);

+        builder.setLogicalRewrites(DEFAULT_LOGICAL_REWRITES);

+        builder.setPhysicalRewrites(DEFAULT_PHYSICAL_REWRITES);

+        builder.setIMergeAggregationExpressionFactory(HiveMergeAggregationExpressionFactory.INSTANCE);

+        builder.setExpressionTypeComputer(HiveExpressionTypeComputer.INSTANCE);

+        builder.setNullableTypeComputer(HiveNullableTypeComputer.INSTANCE);

+

+        long memSizeExternalGby = conf.getLong("hive.algebricks.groupby.external.memory", 268435456);

+        long memSizeExternalSort = conf.getLong("hive.algebricks.sort.memory", 536870912);

+        int frameSize = conf.getInt("hive.algebricks.framesize", 32768);

+

+        physicalOptimizationConfig = new PhysicalOptimizationConfig();

+        int frameLimitExtGby = (int) (memSizeExternalGby / frameSize);

+        physicalOptimizationConfig.setMaxFramesExternalGroupBy(frameLimitExtGby);

+        int frameLimitExtSort = (int) (memSizeExternalSort / frameSize);

+        physicalOptimizationConfig.setMaxFramesExternalSort(frameLimitExtSort);

+        builder.setPhysicalOptimizationConfig(physicalOptimizationConfig);

+    }

+

+    @Override

+    public int compileJob(List<Task<? extends Serializable>> rootTasks) {

+        // clean up

+        leaveOps.clear();

+        tasksVisited.clear();

+        jobSpec = null;

+

+        HashMap<String, PartitionDesc> aliasToPath = new HashMap<String, PartitionDesc>();

+        List<Operator> rootOps = generateRootOperatorDAG(rootTasks, aliasToPath);

+

+        // get all leave Ops

+        getLeaves(rootOps, leaveOps);

+

+        HiveAlgebricksTranslator translator = new HiveAlgebricksTranslator();

+        try {

+            translator.translate(rootOps, null, aliasToPath);

+

+            ILogicalPlan plan = translator.genLogicalPlan();

+

+            if (plan.getRoots() != null && plan.getRoots().size() > 0 && plan.getRoots().get(0).getValue() != null) {

+                translator.printOperators();

+                ILogicalPlanAndMetadata planAndMetadata = new HiveLogicalPlanAndMetaData(plan,

+                        translator.getMetadataProvider());

+

+                ICompilerFactory compilerFactory = builder.create();

+                compiler = compilerFactory.createCompiler(planAndMetadata.getPlan(),

+                        planAndMetadata.getMetadataProvider(), translator.getVariableCounter());

+

+                // run optimization and re-writing rules for Hive plan

+                compiler.optimize();

+

+                // print optimized plan

+                LogicalOperatorPrettyPrintVisitor pvisitor = new LogicalOperatorPrettyPrintVisitor();

+                StringBuilder buffer = new StringBuilder();

+                PlanPrettyPrinter.printPlan(plan, buffer, pvisitor, 0);

+                String planStr = buffer.toString();

+                System.out.println(planStr);

+

+                if (planPrinter != null)

+                    planPrinter.print(planStr);

+            }

+        } catch (Exception e) {

+            e.printStackTrace();

+            return 1;

+        }

+

+        return 0;

+    }

+

+    private void codeGen() throws AlgebricksException {

+        // number of cpu cores in the cluster

+        builder.setClusterLocations(new AlgebricksAbsolutePartitionConstraint(ConfUtil.getNCs()));

+        // builder.setClusterTopology(ConfUtil.getClusterTopology());

+        builder.setBinaryBooleanInspectorFactory(HiveBinaryBooleanInspectorFactory.INSTANCE);

+        builder.setBinaryIntegerInspectorFactory(HiveBinaryIntegerInspectorFactory.INSTANCE);

+        builder.setComparatorFactoryProvider(HiveBinaryComparatorFactoryProvider.INSTANCE);

+        builder.setExpressionRuntimeProvider(HiveExpressionRuntimeProvider.INSTANCE);

+        builder.setHashFunctionFactoryProvider(HiveBinaryHashFunctionFactoryProvider.INSTANCE);

+        builder.setPrinterProvider(HivePrinterFactoryProvider.INSTANCE);

+        builder.setSerializerDeserializerProvider(HiveSerializerDeserializerProvider.INSTANCE);

+        builder.setNullWriterFactory(HiveNullWriterFactory.INSTANCE);

+        builder.setNormalizedKeyComputerFactoryProvider(HiveNormalizedKeyComputerFactoryProvider.INSTANCE);

+        builder.setPartialAggregationTypeComputer(HivePartialAggregationTypeComputer.INSTANCE);

+        builder.setTypeTraitProvider(HiveTypeTraitProvider.INSTANCE);

+        builder.setHashFunctionFamilyProvider(HiveBinaryHashFunctionFamilyProvider.INSTANCE);

+

+        jobSpec = compiler.createJob(null);

+

+        // set the policy

+        String policyStr = conf.get("hive.hyracks.connectorpolicy");

+        if (policyStr == null)

+            policyStr = "PIPELINING";

+        Policy policyValue = Policy.valueOf(policyStr);

+        jobSpec.setConnectorPolicyAssignmentPolicy(new HiveConnectorPolicyAssignmentPolicy(policyValue));

+        jobSpec.setUseConnectorPolicyForScheduling(false);

+    }

+

+    @Override

+    public int executeJob() {

+        try {

+            codeGen();

+            executeHyraxJob(jobSpec);

+        } catch (Exception e) {

+            e.printStackTrace();

+            return 1;

+        }

+        return 0;

+    }

+

+    private List<Operator> generateRootOperatorDAG(List<Task<? extends Serializable>> rootTasks,

+            HashMap<String, PartitionDesc> aliasToPath) {

+

+        List<Operator> rootOps = new ArrayList<Operator>();

+        List<Task<? extends Serializable>> toDelete = new ArrayList<Task<? extends Serializable>>();

+        tasksVisited.clear();

+

+        for (int i = rootTasks.size() - 1; i >= 0; i--) {

+            /**

+             * list of map-reduce tasks

+             */

+            Task<? extends Serializable> task = rootTasks.get(i);

+            // System.out.println("!" + task.getName());

+

+            if (task instanceof MapRedTask) {

+                List<Operator> mapRootOps = articulateMapReduceOperators(task, rootOps, aliasToPath, rootTasks);

+                if (i == 0)

+                    rootOps.addAll(mapRootOps);

+                else {

+                    List<Operator> leaves = new ArrayList<Operator>();

+                    getLeaves(rootOps, leaves);

+

+                    List<Operator> mapChildren = new ArrayList<Operator>();

+                    for (Operator childMap : mapRootOps) {

+                        if (childMap instanceof TableScanOperator) {

+                            TableScanDesc topDesc = (TableScanDesc) childMap.getConf();

+                            if (topDesc == null)

+                                mapChildren.add(childMap);

+                            else {

+                                rootOps.add(childMap);

+                            }

+                        } else

+                            mapChildren.add(childMap);

+                    }

+

+                    if (mapChildren.size() > 0) {

+                        for (Operator leaf : leaves)

+                            leaf.setChildOperators(mapChildren);

+                        for (Operator child : mapChildren)

+                            child.setParentOperators(leaves);

+                    }

+                }

+

+                MapredWork mr = (MapredWork) task.getWork();

+                HashMap<String, PartitionDesc> map = mr.getAliasToPartnInfo();

+

+                addAliasToPartition(aliasToPath, map);

+                toDelete.add(task);

+            }

+        }

+

+        for (Task<? extends Serializable> task : toDelete)

+            rootTasks.remove(task);

+

+        return rootOps;

+    }

+

+    private void addAliasToPartition(HashMap<String, PartitionDesc> aliasToPath, HashMap<String, PartitionDesc> map) {

+        Iterator<String> keys = map.keySet().iterator();

+        while (keys.hasNext()) {

+            String key = keys.next();

+            PartitionDesc part = map.get(key);

+            String[] names = key.split(":");

+            for (String name : names) {

+                aliasToPath.put(name, part);

+            }

+        }

+    }

+

+    private List<Operator> articulateMapReduceOperators(Task task, List<Operator> rootOps,

+            HashMap<String, PartitionDesc> aliasToPath, List<Task<? extends Serializable>> rootTasks) {

+        // System.out.println("!"+task.getName());

+        if (!(task instanceof MapRedTask)) {

+            if (!(task instanceof ConditionalTask)) {

+                rootTasks.add(task);

+                return null;

+            } else {

+                // remove map-reduce branches in condition task

+                ConditionalTask condition = (ConditionalTask) task;

+                List<Task<? extends Serializable>> branches = condition.getListTasks();

+                for (int i = branches.size() - 1; i >= 0; i--) {

+                    Task branch = branches.get(i);

+                    if (branch instanceof MapRedTask) {

+                        return articulateMapReduceOperators(branch, rootOps, aliasToPath, rootTasks);

+                    }

+                }

+                rootTasks.add(task);

+                return null;

+            }

+        }

+

+        MapredWork mr = (MapredWork) task.getWork();

+        HashMap<String, PartitionDesc> map = mr.getAliasToPartnInfo();

+

+        // put all aliasToParitionDesc mapping into the map

+        addAliasToPartition(aliasToPath, map);

+

+        MapRedTask mrtask = (MapRedTask) task;

+        MapredWork work = (MapredWork) mrtask.getWork();

+        HashMap<String, Operator<? extends Serializable>> operators = work.getAliasToWork();

+

+        Set entries = operators.entrySet();

+        Iterator<Entry<String, Operator>> iterator = entries.iterator();

+        List<Operator> mapRootOps = new ArrayList<Operator>();

+

+        // get map root operators

+        while (iterator.hasNext()) {

+            Operator next = iterator.next().getValue();

+            if (!mapRootOps.contains(next)) {

+                // clear that only for the case of union

+                mapRootOps.add(next);

+            }

+        }

+

+        // get map local work

+        MapredLocalWork localWork = work.getMapLocalWork();

+        if (localWork != null) {

+            HashMap<String, Operator<? extends Serializable>> localOperators = localWork.getAliasToWork();

+

+            Set localEntries = localOperators.entrySet();

+            Iterator<Entry<String, Operator>> localIterator = localEntries.iterator();

+            while (localIterator.hasNext()) {

+                mapRootOps.add(localIterator.next().getValue());

+            }

+

+            HashMap<String, FetchWork> localFetch = localWork.getAliasToFetchWork();

+            Set localFetchEntries = localFetch.entrySet();

+            Iterator<Entry<String, FetchWork>> localFetchIterator = localFetchEntries.iterator();

+            while (localFetchIterator.hasNext()) {

+                Entry<String, FetchWork> fetchMap = localFetchIterator.next();

+                FetchWork fetch = fetchMap.getValue();

+                String alias = fetchMap.getKey();

+                List<PartitionDesc> dirPart = fetch.getPartDesc();

+

+                // temporary hack: put the first partitionDesc into the map

+                aliasToPath.put(alias, dirPart.get(0));

+            }

+        }

+

+        Boolean visited = tasksVisited.get(task);

+        if (visited != null && visited.booleanValue() == true) {

+            return mapRootOps;

+        }

+

+        // do that only for union operator

+        for (Operator op : mapRootOps)

+            if (op.getParentOperators() != null)

+                op.getParentOperators().clear();

+

+        List<Operator> mapLeaves = new ArrayList<Operator>();

+        downToLeaves(mapRootOps, mapLeaves);

+        List<Operator> reduceOps = new ArrayList<Operator>();

+

+        if (work.getReducer() != null)

+            reduceOps.add(work.getReducer());

+

+        for (Operator mapLeaf : mapLeaves) {

+            mapLeaf.setChildOperators(reduceOps);

+        }

+

+        for (Operator reduceOp : reduceOps) {

+            if (reduceOp != null)

+                reduceOp.setParentOperators(mapLeaves);

+        }

+

+        List<Operator> leafs = new ArrayList<Operator>();

+        if (reduceOps.size() > 0) {

+            downToLeaves(reduceOps, leafs);

+        } else {

+            leafs = mapLeaves;

+        }

+

+        List<Operator> mapChildren = new ArrayList<Operator>();

+        if (task.getChildTasks() != null && task.getChildTasks().size() > 0) {

+            for (Object child : task.getChildTasks()) {

+                List<Operator> childMapOps = articulateMapReduceOperators((Task) child, rootOps, aliasToPath, rootTasks);

+                if (childMapOps == null)

+                    continue;

+

+                for (Operator childMap : childMapOps) {

+                    if (childMap instanceof TableScanOperator) {

+                        TableScanDesc topDesc = (TableScanDesc) childMap.getConf();

+                        if (topDesc == null)

+                            mapChildren.add(childMap);

+                        else {

+                            rootOps.add(childMap);

+                        }

+                    } else {

+                        // if not table scan, add the child

+                        mapChildren.add(childMap);

+                    }

+                }

+            }

+

+            if (mapChildren.size() > 0) {

+                int i = 0;

+                for (Operator leaf : leafs) {

+                    if (leaf.getChildOperators() == null || leaf.getChildOperators().size() == 0)

+                        leaf.setChildOperators(new ArrayList<Operator>());

+                    leaf.getChildOperators().add(mapChildren.get(i));

+                    i++;

+                }

+                i = 0;

+                for (Operator child : mapChildren) {

+                    if (child.getParentOperators() == null || child.getParentOperators().size() == 0)

+                        child.setParentOperators(new ArrayList<Operator>());

+                    child.getParentOperators().add(leafs.get(i));

+                    i++;

+                }

+            }

+        }

+

+        // mark this task as visited

+        this.tasksVisited.put(task, true);

+        return mapRootOps;

+    }

+

+    /**

+     * down to leaf nodes

+     * 

+     * @param ops

+     * @param leaves

+     */

+    private void downToLeaves(List<Operator> ops, List<Operator> leaves) {

+

+        // Operator currentOp;

+        for (Operator op : ops) {

+            if (op != null && op.getChildOperators() != null && op.getChildOperators().size() > 0) {

+                downToLeaves(op.getChildOperators(), leaves);

+            } else {

+                if (op != null && leaves.indexOf(op) < 0)

+                    leaves.add(op);

+            }

+        }

+    }

+

+    private void getLeaves(List<Operator> roots, List<Operator> currentLeaves) {

+        for (Operator op : roots) {

+            List<Operator> children = op.getChildOperators();

+            if (children == null || children.size() <= 0) {

+                currentLeaves.add(op);

+            } else {

+                getLeaves(children, currentLeaves);

+            }

+        }

+    }

+

+    private void executeHyraxJob(JobSpecification job) throws Exception {

+        String ipAddress = conf.get("hive.hyracks.host");

+        int port = Integer.parseInt(conf.get("hive.hyracks.port"));

+        String applicationName = conf.get("hive.hyracks.app");

+        //System.out.println("connect to " + ipAddress + " " + port);

+

+        IHyracksClientConnection hcc = new HyracksConnection(ipAddress, port);

+

+        //System.out.println("get connected");

+        long start = System.currentTimeMillis();

+        JobId jobId = hcc.startJob(applicationName, job);

+        hcc.waitForCompletion(jobId);

+

+        //System.out.println("job finished: " + jobId.toString());

+        // call all leave nodes to end

+        for (Operator leaf : leaveOps) {

+            jobClose(leaf);

+        }

+

+        long end = System.currentTimeMillis();

+        System.err.println(start + " " + end + " " + (end - start));

+    }

+

+    /**

+     * mv to final directory on hdfs (not real final)

+     * 

+     * @param leaf

+     * @throws Exception

+     */

+    private void jobClose(Operator leaf) throws Exception {

+        FileSinkOperator fsOp = (FileSinkOperator) leaf;

+        FileSinkDesc desc = fsOp.getConf();

+        boolean isNativeTable = !desc.getTableInfo().isNonNative();

+        if ((conf != null) && isNativeTable) {

+            String specPath = desc.getDirName();

+            DynamicPartitionCtx dpCtx = desc.getDynPartCtx();

+            // for 0.7.0

+            fsOp.mvFileToFinalPath(specPath, conf, true, LOG, dpCtx);

+            // for 0.8.0

+            // Utilities.mvFileToFinalPath(specPath, conf, true, LOG, dpCtx,

+            // desc);

+        }

+    }

+}

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/exec/IExecutionEngine.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/exec/IExecutionEngine.java
new file mode 100644
index 0000000..8f4c471
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/exec/IExecutionEngine.java
@@ -0,0 +1,25 @@
+package edu.uci.ics.hivesterix.runtime.exec;

+

+import java.io.Serializable;

+import java.util.List;

+

+import org.apache.hadoop.hive.ql.exec.Task;

+

+public interface IExecutionEngine {

+

+	/**

+	 * compile the job

+	 * 

+	 * @param rootTasks

+	 *            : Hive MapReduce plan

+	 * @return 0 pass, 1 fail

+	 */

+	public int compileJob(List<Task<? extends Serializable>> rootTasks);

+

+	/**

+	 * execute the job with latest compiled plan

+	 * 

+	 * @return

+	 */

+	public int executeJob();

+}

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveByteBinaryAscComparatorFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveByteBinaryAscComparatorFactory.java
new file mode 100644
index 0000000..9c2d463
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveByteBinaryAscComparatorFactory.java
@@ -0,0 +1,36 @@
+package edu.uci.ics.hivesterix.runtime.factory.comparator;

+

+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;

+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;

+

+public class HiveByteBinaryAscComparatorFactory implements

+		IBinaryComparatorFactory {

+	private static final long serialVersionUID = 1L;

+

+	public static HiveByteBinaryAscComparatorFactory INSTANCE = new HiveByteBinaryAscComparatorFactory();

+

+	private HiveByteBinaryAscComparatorFactory() {

+	}

+

+	@Override

+	public IBinaryComparator createBinaryComparator() {

+		return new IBinaryComparator() {

+			private byte left;

+			private byte right;

+

+			@Override

+			public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2,

+					int l2) {

+				left = b1[s1];

+				right = b2[s2];

+				if (left > right)

+					return 1;

+				else if (left == right)

+					return 0;

+				else

+					return -1;

+			}

+		};

+	}

+

+}

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveByteBinaryDescComparatorFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveByteBinaryDescComparatorFactory.java
new file mode 100644
index 0000000..ee71655
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveByteBinaryDescComparatorFactory.java
@@ -0,0 +1,35 @@
+package edu.uci.ics.hivesterix.runtime.factory.comparator;

+

+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;

+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;

+

+public class HiveByteBinaryDescComparatorFactory implements

+		IBinaryComparatorFactory {

+	private static final long serialVersionUID = 1L;

+

+	public static HiveByteBinaryDescComparatorFactory INSTANCE = new HiveByteBinaryDescComparatorFactory();

+

+	private HiveByteBinaryDescComparatorFactory() {

+	}

+

+	@Override

+	public IBinaryComparator createBinaryComparator() {

+		return new IBinaryComparator() {

+			private byte left;

+			private byte right;

+

+			@Override

+			public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2,

+					int l2) {

+				left = b1[s1];

+				right = b2[s2];

+				if (left > right)

+					return -1;

+				else if (left == right)

+					return 0;

+				else

+					return 1;

+			}

+		};

+	}

+}

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveDoubleBinaryAscComparatorFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveDoubleBinaryAscComparatorFactory.java
new file mode 100644
index 0000000..739e417
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveDoubleBinaryAscComparatorFactory.java
@@ -0,0 +1,39 @@
+package edu.uci.ics.hivesterix.runtime.factory.comparator;

+

+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils;

+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;

+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;

+

+public class HiveDoubleBinaryAscComparatorFactory implements

+		IBinaryComparatorFactory {

+	private static final long serialVersionUID = 1L;

+

+	public static HiveDoubleBinaryAscComparatorFactory INSTANCE = new HiveDoubleBinaryAscComparatorFactory();

+

+	private HiveDoubleBinaryAscComparatorFactory() {

+	}

+

+	@Override

+	public IBinaryComparator createBinaryComparator() {

+		return new IBinaryComparator() {

+			private double left;

+			private double right;

+

+			@Override

+			public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2,

+					int l2) {

+				left = Double.longBitsToDouble(LazyUtils

+						.byteArrayToLong(b1, s1));

+				right = Double.longBitsToDouble(LazyUtils.byteArrayToLong(b2,

+						s2));

+				if (left > right)

+					return 1;

+				else if (left == right)

+					return 0;

+				else

+					return -1;

+			}

+		};

+	}

+

+}

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveDoubleBinaryDescComparatorFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveDoubleBinaryDescComparatorFactory.java
new file mode 100644
index 0000000..0424c9f
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveDoubleBinaryDescComparatorFactory.java
@@ -0,0 +1,39 @@
+package edu.uci.ics.hivesterix.runtime.factory.comparator;

+

+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils;

+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;

+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;

+

+public class HiveDoubleBinaryDescComparatorFactory implements

+		IBinaryComparatorFactory {

+	private static final long serialVersionUID = 1L;

+

+	public static HiveDoubleBinaryDescComparatorFactory INSTANCE = new HiveDoubleBinaryDescComparatorFactory();

+

+	private HiveDoubleBinaryDescComparatorFactory() {

+	}

+

+	@Override

+	public IBinaryComparator createBinaryComparator() {

+		return new IBinaryComparator() {

+			private double left;

+			private double right;

+

+			@Override

+			public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2,

+					int l2) {

+				left = Double.longBitsToDouble(LazyUtils

+						.byteArrayToLong(b1, s1));

+				right = Double.longBitsToDouble(LazyUtils.byteArrayToLong(b2,

+						s2));

+				if (left > right)

+					return -1;

+				else if (left == right)

+					return 0;

+				else

+					return 1;

+			}

+		};

+	}

+

+}

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveFloatBinaryAscComparatorFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveFloatBinaryAscComparatorFactory.java
new file mode 100644
index 0000000..08542a7
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveFloatBinaryAscComparatorFactory.java
@@ -0,0 +1,37 @@
+package edu.uci.ics.hivesterix.runtime.factory.comparator;

+

+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils;

+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;

+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;

+

+public class HiveFloatBinaryAscComparatorFactory implements

+		IBinaryComparatorFactory {

+	private static final long serialVersionUID = 1L;

+

+	public static HiveFloatBinaryAscComparatorFactory INSTANCE = new HiveFloatBinaryAscComparatorFactory();

+

+	private HiveFloatBinaryAscComparatorFactory() {

+	}

+

+	@Override

+	public IBinaryComparator createBinaryComparator() {

+		return new IBinaryComparator() {

+			private float left;

+			private float right;

+

+			@Override

+			public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2,

+					int l2) {

+				left = Float.intBitsToFloat(LazyUtils.byteArrayToInt(b1, s1));

+				right = Float.intBitsToFloat(LazyUtils.byteArrayToInt(b2, s2));

+				if (left > right)

+					return 1;

+				else if (left == right)

+					return 0;

+				else

+					return -1;

+			}

+		};

+	}

+

+}

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveFloatBinaryDescComparatorFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveFloatBinaryDescComparatorFactory.java
new file mode 100644
index 0000000..513512e
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveFloatBinaryDescComparatorFactory.java
@@ -0,0 +1,37 @@
+package edu.uci.ics.hivesterix.runtime.factory.comparator;

+

+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils;

+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;

+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;

+

+public class HiveFloatBinaryDescComparatorFactory implements

+		IBinaryComparatorFactory {

+	private static final long serialVersionUID = 1L;

+

+	public static HiveFloatBinaryDescComparatorFactory INSTANCE = new HiveFloatBinaryDescComparatorFactory();

+

+	private HiveFloatBinaryDescComparatorFactory() {

+	}

+

+	@Override

+	public IBinaryComparator createBinaryComparator() {

+		return new IBinaryComparator() {

+			private float left;

+			private float right;

+

+			@Override

+			public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2,

+					int l2) {

+				left = Float.intBitsToFloat(LazyUtils.byteArrayToInt(b1, s1));

+				right = Float.intBitsToFloat(LazyUtils.byteArrayToInt(b2, s2));

+				if (left > right)

+					return -1;

+				else if (left == right)

+					return 0;

+				else

+					return 1;

+			}

+		};

+	}

+

+}

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveIntegerBinaryAscComparatorFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveIntegerBinaryAscComparatorFactory.java
new file mode 100644
index 0000000..947f30f
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveIntegerBinaryAscComparatorFactory.java
@@ -0,0 +1,44 @@
+package edu.uci.ics.hivesterix.runtime.factory.comparator;

+

+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils;

+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils.VInt;

+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;

+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;

+

+public class HiveIntegerBinaryAscComparatorFactory implements

+		IBinaryComparatorFactory {

+	private static final long serialVersionUID = 1L;

+

+	public static final HiveIntegerBinaryAscComparatorFactory INSTANCE = new HiveIntegerBinaryAscComparatorFactory();

+

+	private HiveIntegerBinaryAscComparatorFactory() {

+	}

+

+	@Override

+	public IBinaryComparator createBinaryComparator() {

+		return new IBinaryComparator() {

+			private VInt left = new VInt();

+			private VInt right = new VInt();

+

+			@Override

+			public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2,

+					int l2) {

+				LazyUtils.readVInt(b1, s1, left);

+				LazyUtils.readVInt(b2, s2, right);

+

+				if (left.length != l1 || right.length != l2)

+					throw new IllegalArgumentException(

+							"length mismatch in int comparator function actual: "

+									+ left.length + "," + right.length

+									+ " expected " + l1 + "," + l2);

+

+				if (left.value > right.value)

+					return 1;

+				else if (left.value == right.value)

+					return 0;

+				else

+					return -1;

+			}

+		};

+	}

+}

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveIntegerBinaryDescComparatorFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveIntegerBinaryDescComparatorFactory.java
new file mode 100644
index 0000000..7614aa1
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveIntegerBinaryDescComparatorFactory.java
@@ -0,0 +1,41 @@
+package edu.uci.ics.hivesterix.runtime.factory.comparator;

+

+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils;

+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils.VInt;

+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;

+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;

+

+public class HiveIntegerBinaryDescComparatorFactory implements

+		IBinaryComparatorFactory {

+	private static final long serialVersionUID = 1L;

+

+	public static final HiveIntegerBinaryDescComparatorFactory INSTANCE = new HiveIntegerBinaryDescComparatorFactory();

+

+	private HiveIntegerBinaryDescComparatorFactory() {

+	}

+

+	@Override

+	public IBinaryComparator createBinaryComparator() {

+		return new IBinaryComparator() {

+			private VInt left = new VInt();

+			private VInt right = new VInt();

+

+			@Override

+			public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2,

+					int l2) {

+				LazyUtils.readVInt(b1, s1, left);

+				LazyUtils.readVInt(b2, s2, right);

+				if (left.length != l1 || right.length != l2)

+					throw new IllegalArgumentException(

+							"length mismatch in int comparator function actual: "

+									+ left.length + " expected " + l1);

+				if (left.value > right.value)

+					return -1;

+				else if (left.value == right.value)

+					return 0;

+				else

+					return 1;

+			}

+		};

+	}

+}

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveLongBinaryAscComparatorFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveLongBinaryAscComparatorFactory.java
new file mode 100644
index 0000000..f5f3473
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveLongBinaryAscComparatorFactory.java
@@ -0,0 +1,41 @@
+package edu.uci.ics.hivesterix.runtime.factory.comparator;

+

+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils;

+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils.VLong;

+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;

+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;

+

+public class HiveLongBinaryAscComparatorFactory implements

+		IBinaryComparatorFactory {

+	private static final long serialVersionUID = 1L;

+

+	public static final HiveLongBinaryAscComparatorFactory INSTANCE = new HiveLongBinaryAscComparatorFactory();

+

+	private HiveLongBinaryAscComparatorFactory() {

+	}

+

+	@Override

+	public IBinaryComparator createBinaryComparator() {

+		return new IBinaryComparator() {

+			private VLong left = new VLong();

+			private VLong right = new VLong();

+

+			@Override

+			public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2,

+					int l2) {

+				LazyUtils.readVLong(b1, s1, left);

+				LazyUtils.readVLong(b2, s2, right);

+				if (left.length != l1 || right.length != l2)

+					throw new IllegalArgumentException(

+							"length mismatch in int comparator function actual: "

+									+ left.length + " expected " + l1);

+				if (left.value > right.value)

+					return 1;

+				else if (left.value == right.value)

+					return 0;

+				else

+					return -1;

+			}

+		};

+	}

+}

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveLongBinaryDescComparatorFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveLongBinaryDescComparatorFactory.java
new file mode 100644
index 0000000..b878b22
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveLongBinaryDescComparatorFactory.java
@@ -0,0 +1,41 @@
+package edu.uci.ics.hivesterix.runtime.factory.comparator;

+

+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils;

+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils.VLong;

+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;

+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;

+

+public class HiveLongBinaryDescComparatorFactory implements

+		IBinaryComparatorFactory {

+	private static final long serialVersionUID = 1L;

+

+	public static final HiveLongBinaryDescComparatorFactory INSTANCE = new HiveLongBinaryDescComparatorFactory();

+

+	private HiveLongBinaryDescComparatorFactory() {

+	}

+

+	@Override

+	public IBinaryComparator createBinaryComparator() {

+		return new IBinaryComparator() {

+			private VLong left = new VLong();

+			private VLong right = new VLong();

+

+			@Override

+			public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2,

+					int l2) {

+				LazyUtils.readVLong(b1, s1, left);

+				LazyUtils.readVLong(b2, s2, right);

+				if (left.length != l1 || right.length != l2)

+					throw new IllegalArgumentException(

+							"length mismatch in int comparator function actual: "

+									+ left.length + " expected " + l1);

+				if (left.value > right.value)

+					return -1;

+				else if (left.value == right.value)

+					return 0;

+				else

+					return 1;

+			}

+		};

+	}

+}

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveShortBinaryAscComparatorFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveShortBinaryAscComparatorFactory.java
new file mode 100644
index 0000000..8d55cdb
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveShortBinaryAscComparatorFactory.java
@@ -0,0 +1,37 @@
+package edu.uci.ics.hivesterix.runtime.factory.comparator;

+

+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils;

+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;

+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;

+

+public class HiveShortBinaryAscComparatorFactory implements

+		IBinaryComparatorFactory {

+	private static final long serialVersionUID = 1L;

+

+	public static HiveShortBinaryAscComparatorFactory INSTANCE = new HiveShortBinaryAscComparatorFactory();

+

+	private HiveShortBinaryAscComparatorFactory() {

+	}

+

+	@Override

+	public IBinaryComparator createBinaryComparator() {

+		return new IBinaryComparator() {

+			private short left;

+			private short right;

+

+			@Override

+			public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2,

+					int l2) {

+				left = LazyUtils.byteArrayToShort(b1, s1);

+				right = LazyUtils.byteArrayToShort(b2, s2);

+				if (left > right)

+					return 1;

+				else if (left == right)

+					return 0;

+				else

+					return -1;

+			}

+		};

+	}

+

+}

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveShortBinaryDescComparatorFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveShortBinaryDescComparatorFactory.java
new file mode 100644
index 0000000..4e8dde6
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveShortBinaryDescComparatorFactory.java
@@ -0,0 +1,37 @@
+package edu.uci.ics.hivesterix.runtime.factory.comparator;

+

+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils;

+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;

+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;

+

+public class HiveShortBinaryDescComparatorFactory implements

+		IBinaryComparatorFactory {

+	private static final long serialVersionUID = 1L;

+

+	public static HiveShortBinaryDescComparatorFactory INSTANCE = new HiveShortBinaryDescComparatorFactory();

+

+	private HiveShortBinaryDescComparatorFactory() {

+	}

+

+	@Override

+	public IBinaryComparator createBinaryComparator() {

+		return new IBinaryComparator() {

+			private short left;

+			private short right;

+

+			@Override

+			public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2,

+					int l2) {

+				left = LazyUtils.byteArrayToShort(b1, s1);

+				right = LazyUtils.byteArrayToShort(b2, s2);

+				if (left > right)

+					return -1;

+				else if (left == right)

+					return 0;

+				else

+					return 1;

+			}

+		};

+	}

+

+}

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveStringBinaryAscComparatorFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveStringBinaryAscComparatorFactory.java
new file mode 100644
index 0000000..a334ecf
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveStringBinaryAscComparatorFactory.java
@@ -0,0 +1,46 @@
+package edu.uci.ics.hivesterix.runtime.factory.comparator;

+

+import org.apache.hadoop.io.Text;

+

+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils;

+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils.VInt;

+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;

+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;

+

+public class HiveStringBinaryAscComparatorFactory implements

+		IBinaryComparatorFactory {

+	private static final long serialVersionUID = 1L;

+

+	public static HiveStringBinaryAscComparatorFactory INSTANCE = new HiveStringBinaryAscComparatorFactory();

+

+	private HiveStringBinaryAscComparatorFactory() {

+	}

+

+	@Override

+	public IBinaryComparator createBinaryComparator() {

+		return new IBinaryComparator() {

+			private VInt leftLen = new VInt();

+			private VInt rightLen = new VInt();

+

+			@Override

+			public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2,

+					int l2) {

+				LazyUtils.readVInt(b1, s1, leftLen);

+				LazyUtils.readVInt(b2, s2, rightLen);

+

+				if (leftLen.value + leftLen.length != l1

+						|| rightLen.value + rightLen.length != l2)

+					throw new IllegalStateException(

+							"parse string: length mismatch, expected "

+									+ (leftLen.value + leftLen.length) + ", "

+									+ (rightLen.value + rightLen.length)

+									+ " but get " + l1 + ", " + l2);

+

+				return Text.Comparator.compareBytes(b1, s1 + leftLen.length, l1

+						- leftLen.length, b2, s2 + rightLen.length, l2

+						- rightLen.length);

+			}

+		};

+	}

+

+}

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveStringBinaryDescComparatorFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveStringBinaryDescComparatorFactory.java
new file mode 100644
index 0000000..e00b58e
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveStringBinaryDescComparatorFactory.java
@@ -0,0 +1,45 @@
+package edu.uci.ics.hivesterix.runtime.factory.comparator;

+

+import org.apache.hadoop.io.WritableComparator;

+

+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils;

+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils.VInt;

+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;

+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;

+

+public class HiveStringBinaryDescComparatorFactory implements

+		IBinaryComparatorFactory {

+	private static final long serialVersionUID = 1L;

+

+	public static HiveStringBinaryDescComparatorFactory INSTANCE = new HiveStringBinaryDescComparatorFactory();

+

+	private HiveStringBinaryDescComparatorFactory() {

+	}

+

+	@Override

+	public IBinaryComparator createBinaryComparator() {

+		return new IBinaryComparator() {

+			private VInt leftLen = new VInt();

+			private VInt rightLen = new VInt();

+

+			@Override

+			public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2,

+					int l2) {

+				LazyUtils.readVInt(b1, s1, leftLen);

+				LazyUtils.readVInt(b2, s2, rightLen);

+

+				if (leftLen.value + leftLen.length != l1

+						|| rightLen.value + rightLen.length != l2)

+					throw new IllegalStateException(

+							"parse string: length mismatch, expected "

+									+ (leftLen.value + leftLen.length) + ", "

+									+ (rightLen.value + rightLen.length)

+									+ " but get " + l1 + ", " + l2);

+

+				return -WritableComparator.compareBytes(b1,

+						s1 + leftLen.length, l1 - leftLen.length, b2, s2

+								+ rightLen.length, l2 - rightLen.length);

+			}

+		};

+	}

+}

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/AggregationFunctionFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/AggregationFunctionFactory.java
new file mode 100644
index 0000000..c6078ca
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/AggregationFunctionFactory.java
@@ -0,0 +1,381 @@
+package edu.uci.ics.hivesterix.runtime.factory.evaluator;

+

+import java.util.ArrayList;

+import java.util.HashMap;

+import java.util.List;

+

+import org.apache.hadoop.conf.Configuration;

+import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator;

+import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluatorFactory;

+import org.apache.hadoop.hive.ql.exec.FunctionRegistry;

+import org.apache.hadoop.hive.ql.exec.Utilities;

+import org.apache.hadoop.hive.ql.metadata.HiveException;

+import org.apache.hadoop.hive.ql.plan.AggregationDesc;

+import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;

+import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator;

+import org.apache.hadoop.hive.serde2.SerDe;

+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;

+import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo;

+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;

+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;

+

+import edu.uci.ics.hivesterix.logical.expression.Schema;

+import edu.uci.ics.hivesterix.runtime.evaluator.AggregationFunctionEvaluator;

+import edu.uci.ics.hivesterix.runtime.evaluator.ExpressionTranslator;

+import edu.uci.ics.hivesterix.serde.lazy.LazyFactory;

+import edu.uci.ics.hivesterix.serde.lazy.LazyObject;

+import edu.uci.ics.hivesterix.serde.lazy.LazySerDe;

+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AggregateFunctionCallExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;

+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyAggregateFunction;

+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyAggregateFunctionFactory;

+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;

+

+public class AggregationFunctionFactory implements

+		ICopyAggregateFunctionFactory {

+

+	private static final long serialVersionUID = 1L;

+

+	/**

+	 * list of parameters' serialization

+	 */

+	private List<String> parametersSerialization = new ArrayList<String>();

+

+	/**

+	 * the name of the udf

+	 */

+	private String genericUDAFName;

+

+	/**

+	 * aggregation mode

+	 */

+	private GenericUDAFEvaluator.Mode mode;

+

+	/**

+	 * list of type info

+	 */

+	private List<TypeInfo> types = new ArrayList<TypeInfo>();

+

+	/**

+	 * distinct or not

+	 */

+	private boolean distinct;

+

+	/**

+	 * the schema of incoming rows

+	 */

+	private Schema rowSchema;

+

+	/**

+	 * list of parameters

+	 */

+	private transient List<ExprNodeDesc> parametersOrigin;

+

+	/**

+	 * row inspector

+	 */

+	private transient ObjectInspector rowInspector = null;

+

+	/**

+	 * output object inspector

+	 */

+	private transient ObjectInspector outputInspector = null;

+

+	/**

+	 * output object inspector

+	 */

+	private transient ObjectInspector outputInspectorPartial = null;

+

+	/**

+	 * parameter inspectors

+	 */

+	private transient ObjectInspector[] parameterInspectors = null;

+

+	/**

+	 * expression desc

+	 */

+	private transient HashMap<Long, List<ExprNodeDesc>> parameterExprs = new HashMap<Long, List<ExprNodeDesc>>();

+

+	/**

+	 * evaluators

+	 */

+	private transient HashMap<Long, ExprNodeEvaluator[]> evaluators = new HashMap<Long, ExprNodeEvaluator[]>();

+

+	/**

+	 * cached parameter objects

+	 */

+	private transient HashMap<Long, Object[]> cachedParameters = new HashMap<Long, Object[]>();

+

+	/**

+	 * cached row object: one per thread

+	 */

+	private transient HashMap<Long, LazyObject<? extends ObjectInspector>> cachedRowObjects = new HashMap<Long, LazyObject<? extends ObjectInspector>>();

+

+	/**

+	 * we only use lazy serde to do serialization

+	 */

+	private transient HashMap<Long, SerDe> serDe = new HashMap<Long, SerDe>();

+

+	/**

+	 * udaf evaluators

+	 */

+	private transient HashMap<Long, GenericUDAFEvaluator> udafsPartial = new HashMap<Long, GenericUDAFEvaluator>();

+

+	/**

+	 * udaf evaluators

+	 */

+	private transient HashMap<Long, GenericUDAFEvaluator> udafsComplete = new HashMap<Long, GenericUDAFEvaluator>();

+

+	/**

+	 * aggregation function desc

+	 */

+	private transient AggregationDesc aggregator;

+

+	/**

+	 * 

+	 * @param aggregator

+	 *            Algebricks function call expression

+	 * @param oi

+	 *            schema

+	 */

+	public AggregationFunctionFactory(

+			AggregateFunctionCallExpression expression, Schema oi,

+			IVariableTypeEnvironment env) throws AlgebricksException {

+

+		try {

+			aggregator = (AggregationDesc) ExpressionTranslator

+					.getHiveExpression(expression, env);

+		} catch (Exception e) {

+			e.printStackTrace();

+			throw new AlgebricksException(e.getMessage());

+		}

+		init(aggregator.getParameters(), aggregator.getGenericUDAFName(),

+				aggregator.getMode(), aggregator.getDistinct(), oi);

+	}

+

+	/**

+	 * constructor of aggregation function factory

+	 * 

+	 * @param inputs

+	 * @param name

+	 * @param udafMode

+	 * @param distinct

+	 * @param oi

+	 */

+	private void init(List<ExprNodeDesc> inputs, String name,

+			GenericUDAFEvaluator.Mode udafMode, boolean distinct, Schema oi) {

+		parametersOrigin = inputs;

+		genericUDAFName = name;

+		mode = udafMode;

+		this.distinct = distinct;

+		rowSchema = oi;

+

+		for (ExprNodeDesc input : inputs) {

+			TypeInfo type = input.getTypeInfo();

+			if (type instanceof StructTypeInfo) {

+				types.add(TypeInfoFactory.doubleTypeInfo);

+			} else

+				types.add(type);

+

+			String s = Utilities.serializeExpression(input);

+			parametersSerialization.add(s);

+		}

+	}

+

+	@Override

+	public synchronized ICopyAggregateFunction createAggregateFunction(

+			IDataOutputProvider provider) throws AlgebricksException {

+		if (parametersOrigin == null) {

+			Configuration config = new Configuration();

+			config.setClassLoader(this.getClass().getClassLoader());

+			/**

+			 * in case of class.forname(...) call in hive code

+			 */

+			Thread.currentThread().setContextClassLoader(

+					this.getClass().getClassLoader());

+

+			parametersOrigin = new ArrayList<ExprNodeDesc>();

+			for (String serialization : parametersSerialization) {

+				parametersOrigin.add(Utilities.deserializeExpression(

+						serialization, config));

+			}

+		}

+

+		/**

+		 * exprs

+		 */

+		if (parameterExprs == null)

+			parameterExprs = new HashMap<Long, List<ExprNodeDesc>>();

+

+		/**

+		 * evaluators

+		 */

+		if (evaluators == null)

+			evaluators = new HashMap<Long, ExprNodeEvaluator[]>();

+

+		/**

+		 * cached parameter objects

+		 */

+		if (cachedParameters == null)

+			cachedParameters = new HashMap<Long, Object[]>();

+

+		/**

+		 * cached row object: one per thread

+		 */

+		if (cachedRowObjects == null)

+			cachedRowObjects = new HashMap<Long, LazyObject<? extends ObjectInspector>>();

+

+		/**

+		 * we only use lazy serde to do serialization

+		 */

+		if (serDe == null)

+			serDe = new HashMap<Long, SerDe>();

+

+		/**

+		 * UDAF functions

+		 */

+		if (udafsComplete == null)

+			udafsComplete = new HashMap<Long, GenericUDAFEvaluator>();

+

+		/**

+		 * UDAF functions

+		 */

+		if (udafsPartial == null)

+			udafsPartial = new HashMap<Long, GenericUDAFEvaluator>();

+

+		if (parameterInspectors == null)

+			parameterInspectors = new ObjectInspector[parametersOrigin.size()];

+

+		if (rowInspector == null)

+			rowInspector = rowSchema.toObjectInspector();

+

+		// get current thread id

+		long threadId = Thread.currentThread().getId();

+

+		/**

+		 * expressions, expressions are thread local

+		 */

+		List<ExprNodeDesc> parameters = parameterExprs.get(threadId);

+		if (parameters == null) {

+			parameters = new ArrayList<ExprNodeDesc>();

+			for (ExprNodeDesc parameter : parametersOrigin)

+				parameters.add(parameter.clone());

+			parameterExprs.put(threadId, parameters);

+		}

+

+		/**

+		 * cached parameter objects

+		 */

+		Object[] cachedParas = cachedParameters.get(threadId);

+		if (cachedParas == null) {

+			cachedParas = new Object[parameters.size()];

+			cachedParameters.put(threadId, cachedParas);

+		}

+

+		/**

+		 * cached row object: one per thread

+		 */

+		LazyObject<? extends ObjectInspector> cachedRowObject = cachedRowObjects

+				.get(threadId);

+		if (cachedRowObject == null) {

+			cachedRowObject = LazyFactory.createLazyObject(rowInspector);

+			cachedRowObjects.put(threadId, cachedRowObject);

+		}

+

+		/**

+		 * we only use lazy serde to do serialization

+		 */

+		SerDe lazySer = serDe.get(threadId);

+		if (lazySer == null) {

+			lazySer = new LazySerDe();

+			serDe.put(threadId, lazySer);

+		}

+

+		/**

+		 * evaluators

+		 */

+		ExprNodeEvaluator[] evals = evaluators.get(threadId);

+		if (evals == null) {

+			evals = new ExprNodeEvaluator[parameters.size()];

+			evaluators.put(threadId, evals);

+		}

+

+		GenericUDAFEvaluator udafPartial;

+		GenericUDAFEvaluator udafComplete;

+

+		// initialize object inspectors

+		try {

+			/**

+			 * evaluators, udf, object inpsectors are shared in one thread

+			 */

+			for (int i = 0; i < evals.length; i++) {

+				if (evals[i] == null) {

+					evals[i] = ExprNodeEvaluatorFactory.get(parameters.get(i));

+					if (parameterInspectors[i] == null) {

+						parameterInspectors[i] = evals[i]

+								.initialize(rowInspector);

+					} else {

+						evals[i].initialize(rowInspector);

+					}

+				}

+			}

+

+			udafComplete = udafsComplete.get(threadId);

+			if (udafComplete == null) {

+				try {

+					udafComplete = FunctionRegistry.getGenericUDAFEvaluator(

+							genericUDAFName, types, distinct, false);

+				} catch (HiveException e) {

+					throw new AlgebricksException(e);

+				}

+				udafsComplete.put(threadId, udafComplete);

+				udafComplete.init(mode, parameterInspectors);

+			}

+

+			// multiple stage group by, determined by the mode parameter

+			if (outputInspector == null)

+				outputInspector = udafComplete.init(mode, parameterInspectors);

+

+			// initial partial gby udaf

+			GenericUDAFEvaluator.Mode partialMode;

+			// adjust mode for external groupby

+			if (mode == GenericUDAFEvaluator.Mode.COMPLETE)

+				partialMode = GenericUDAFEvaluator.Mode.PARTIAL1;

+			else if (mode == GenericUDAFEvaluator.Mode.FINAL)

+				partialMode = GenericUDAFEvaluator.Mode.PARTIAL2;

+			else

+				partialMode = mode;

+			udafPartial = udafsPartial.get(threadId);

+			if (udafPartial == null) {

+				try {

+					udafPartial = FunctionRegistry.getGenericUDAFEvaluator(

+							genericUDAFName, types, distinct, false);

+				} catch (HiveException e) {

+					throw new AlgebricksException(e);

+				}

+				udafPartial.init(partialMode, parameterInspectors);

+				udafsPartial.put(threadId, udafPartial);

+			}

+

+			// multiple stage group by, determined by the mode parameter

+			if (outputInspectorPartial == null)

+				outputInspectorPartial = udafPartial.init(partialMode,

+						parameterInspectors);

+		} catch (Exception e) {

+			e.printStackTrace();

+			throw new AlgebricksException(e);

+		}

+

+		return new AggregationFunctionEvaluator(parameters, types,

+				genericUDAFName, mode, distinct, rowInspector,

+				provider.getDataOutput(), evals, parameterInspectors,

+				cachedParas, lazySer, cachedRowObject, udafPartial,

+				udafComplete, outputInspector, outputInspectorPartial);

+	}

+

+	public String toString() {

+		return "aggregation function expression evaluator factory: "

+				+ this.genericUDAFName;

+	}

+}

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/AggregationFunctionSerializableFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/AggregationFunctionSerializableFactory.java
new file mode 100644
index 0000000..73717a3
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/AggregationFunctionSerializableFactory.java
@@ -0,0 +1,381 @@
+package edu.uci.ics.hivesterix.runtime.factory.evaluator;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator;
+import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluatorFactory;
+import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
+import org.apache.hadoop.hive.ql.exec.Utilities;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.plan.AggregationDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator;
+import org.apache.hadoop.hive.serde2.SerDe;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+
+import edu.uci.ics.hivesterix.logical.expression.Schema;
+import edu.uci.ics.hivesterix.runtime.evaluator.AggregatuibFunctionSerializableEvaluator;
+import edu.uci.ics.hivesterix.runtime.evaluator.ExpressionTranslator;
+import edu.uci.ics.hivesterix.serde.lazy.LazyFactory;
+import edu.uci.ics.hivesterix.serde.lazy.LazyObject;
+import edu.uci.ics.hivesterix.serde.lazy.LazySerDe;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AggregateFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopySerializableAggregateFunction;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopySerializableAggregateFunctionFactory;
+
+public class AggregationFunctionSerializableFactory implements
+		ICopySerializableAggregateFunctionFactory {
+
+	private static final long serialVersionUID = 1L;
+
+	/**
+	 * list of parameters' serialization
+	 */
+	private List<String> parametersSerialization = new ArrayList<String>();
+
+	/**
+	 * the name of the udf
+	 */
+	private String genericUDAFName;
+
+	/**
+	 * aggregation mode
+	 */
+	private GenericUDAFEvaluator.Mode mode;
+
+	/**
+	 * list of type info
+	 */
+	private List<TypeInfo> types = new ArrayList<TypeInfo>();
+
+	/**
+	 * distinct or not
+	 */
+	private boolean distinct;
+
+	/**
+	 * the schema of incoming rows
+	 */
+	private Schema rowSchema;
+
+	/**
+	 * list of parameters
+	 */
+	private transient List<ExprNodeDesc> parametersOrigin;
+
+	/**
+	 * row inspector
+	 */
+	private transient ObjectInspector rowInspector = null;
+
+	/**
+	 * output object inspector
+	 */
+	private transient ObjectInspector outputInspector = null;
+
+	/**
+	 * output object inspector
+	 */
+	private transient ObjectInspector outputInspectorPartial = null;
+
+	/**
+	 * parameter inspectors
+	 */
+	private transient ObjectInspector[] parameterInspectors = null;
+
+	/**
+	 * expression desc
+	 */
+	private transient HashMap<Long, List<ExprNodeDesc>> parameterExprs = new HashMap<Long, List<ExprNodeDesc>>();
+
+	/**
+	 * evaluators
+	 */
+	private transient HashMap<Long, ExprNodeEvaluator[]> evaluators = new HashMap<Long, ExprNodeEvaluator[]>();
+
+	/**
+	 * cached parameter objects
+	 */
+	private transient HashMap<Long, Object[]> cachedParameters = new HashMap<Long, Object[]>();
+
+	/**
+	 * cached row object: one per thread
+	 */
+	private transient HashMap<Long, LazyObject<? extends ObjectInspector>> cachedRowObjects = new HashMap<Long, LazyObject<? extends ObjectInspector>>();
+
+	/**
+	 * we only use lazy serde to do serialization
+	 */
+	private transient HashMap<Long, SerDe> serDe = new HashMap<Long, SerDe>();
+
+	/**
+	 * udaf evaluators
+	 */
+	private transient HashMap<Long, GenericUDAFEvaluator> udafsPartial = new HashMap<Long, GenericUDAFEvaluator>();
+
+	/**
+	 * udaf evaluators
+	 */
+	private transient HashMap<Long, GenericUDAFEvaluator> udafsComplete = new HashMap<Long, GenericUDAFEvaluator>();
+
+	/**
+	 * aggregation function desc
+	 */
+	private transient AggregationDesc aggregator;
+
+	/**
+	 * 
+	 * @param aggregator
+	 *            Algebricks function call expression
+	 * @param oi
+	 *            schema
+	 */
+	public AggregationFunctionSerializableFactory(
+			AggregateFunctionCallExpression expression, Schema oi,
+			IVariableTypeEnvironment env) throws AlgebricksException {
+
+		try {
+			aggregator = (AggregationDesc) ExpressionTranslator
+					.getHiveExpression(expression, env);
+		} catch (Exception e) {
+			e.printStackTrace();
+			throw new AlgebricksException(e.getMessage());
+		}
+		init(aggregator.getParameters(), aggregator.getGenericUDAFName(),
+				aggregator.getMode(), aggregator.getDistinct(), oi);
+	}
+
+	/**
+	 * constructor of aggregation function factory
+	 * 
+	 * @param inputs
+	 * @param name
+	 * @param udafMode
+	 * @param distinct
+	 * @param oi
+	 */
+	private void init(List<ExprNodeDesc> inputs, String name,
+			GenericUDAFEvaluator.Mode udafMode, boolean distinct, Schema oi) {
+		parametersOrigin = inputs;
+		genericUDAFName = name;
+		mode = udafMode;
+		this.distinct = distinct;
+		rowSchema = oi;
+
+		for (ExprNodeDesc input : inputs) {
+			TypeInfo type = input.getTypeInfo();
+			if (type instanceof StructTypeInfo) {
+				types.add(TypeInfoFactory.doubleTypeInfo);
+			} else
+				types.add(type);
+
+			String s = Utilities.serializeExpression(input);
+			parametersSerialization.add(s);
+		}
+	}
+
+	@Override
+	public synchronized ICopySerializableAggregateFunction createAggregateFunction()
+			throws AlgebricksException {
+		if (parametersOrigin == null) {
+			Configuration config = new Configuration();
+			config.setClassLoader(this.getClass().getClassLoader());
+			/**
+			 * in case of class.forname(...) call in hive code
+			 */
+			Thread.currentThread().setContextClassLoader(
+					this.getClass().getClassLoader());
+
+			parametersOrigin = new ArrayList<ExprNodeDesc>();
+			for (String serialization : parametersSerialization) {
+				parametersOrigin.add(Utilities.deserializeExpression(
+						serialization, config));
+			}
+		}
+
+		/**
+		 * exprs
+		 */
+		if (parameterExprs == null)
+			parameterExprs = new HashMap<Long, List<ExprNodeDesc>>();
+
+		/**
+		 * evaluators
+		 */
+		if (evaluators == null)
+			evaluators = new HashMap<Long, ExprNodeEvaluator[]>();
+
+		/**
+		 * cached parameter objects
+		 */
+		if (cachedParameters == null)
+			cachedParameters = new HashMap<Long, Object[]>();
+
+		/**
+		 * cached row object: one per thread
+		 */
+		if (cachedRowObjects == null)
+			cachedRowObjects = new HashMap<Long, LazyObject<? extends ObjectInspector>>();
+
+		/**
+		 * we only use lazy serde to do serialization
+		 */
+		if (serDe == null)
+			serDe = new HashMap<Long, SerDe>();
+
+		/**
+		 * UDAF functions
+		 */
+		if (udafsComplete == null)
+			udafsComplete = new HashMap<Long, GenericUDAFEvaluator>();
+
+		/**
+		 * UDAF functions
+		 */
+		if (udafsPartial == null)
+			udafsPartial = new HashMap<Long, GenericUDAFEvaluator>();
+
+		if (parameterInspectors == null)
+			parameterInspectors = new ObjectInspector[parametersOrigin.size()];
+
+		if (rowInspector == null)
+			rowInspector = rowSchema.toObjectInspector();
+
+		// get current thread id
+		long threadId = Thread.currentThread().getId();
+
+		/**
+		 * expressions, expressions are thread local
+		 */
+		List<ExprNodeDesc> parameters = parameterExprs.get(threadId);
+		if (parameters == null) {
+			parameters = new ArrayList<ExprNodeDesc>();
+			for (ExprNodeDesc parameter : parametersOrigin)
+				parameters.add(parameter.clone());
+			parameterExprs.put(threadId, parameters);
+		}
+
+		/**
+		 * cached parameter objects
+		 */
+		Object[] cachedParas = cachedParameters.get(threadId);
+		if (cachedParas == null) {
+			cachedParas = new Object[parameters.size()];
+			cachedParameters.put(threadId, cachedParas);
+		}
+
+		/**
+		 * cached row object: one per thread
+		 */
+		LazyObject<? extends ObjectInspector> cachedRowObject = cachedRowObjects
+				.get(threadId);
+		if (cachedRowObject == null) {
+			cachedRowObject = LazyFactory.createLazyObject(rowInspector);
+			cachedRowObjects.put(threadId, cachedRowObject);
+		}
+
+		/**
+		 * we only use lazy serde to do serialization
+		 */
+		SerDe lazySer = serDe.get(threadId);
+		if (lazySer == null) {
+			lazySer = new LazySerDe();
+			serDe.put(threadId, lazySer);
+		}
+
+		/**
+		 * evaluators
+		 */
+		ExprNodeEvaluator[] evals = evaluators.get(threadId);
+		if (evals == null) {
+			evals = new ExprNodeEvaluator[parameters.size()];
+			evaluators.put(threadId, evals);
+		}
+
+		GenericUDAFEvaluator udafPartial;
+		GenericUDAFEvaluator udafComplete;
+
+		// initialize object inspectors
+		try {
+			/**
+			 * evaluators, udf, object inpsectors are shared in one thread
+			 */
+			for (int i = 0; i < evals.length; i++) {
+				if (evals[i] == null) {
+					evals[i] = ExprNodeEvaluatorFactory.get(parameters.get(i));
+					if (parameterInspectors[i] == null) {
+						parameterInspectors[i] = evals[i]
+								.initialize(rowInspector);
+					} else {
+						evals[i].initialize(rowInspector);
+					}
+				}
+			}
+
+			udafComplete = udafsComplete.get(threadId);
+			if (udafComplete == null) {
+				try {
+					udafComplete = FunctionRegistry.getGenericUDAFEvaluator(
+							genericUDAFName, types, distinct, false);
+				} catch (HiveException e) {
+					throw new AlgebricksException(e);
+				}
+				udafsComplete.put(threadId, udafComplete);
+				udafComplete.init(mode, parameterInspectors);
+			}
+
+			// multiple stage group by, determined by the mode parameter
+			if (outputInspector == null)
+				outputInspector = udafComplete.init(mode, parameterInspectors);
+
+			// initial partial gby udaf
+			GenericUDAFEvaluator.Mode partialMode;
+			// adjust mode for external groupby
+			if (mode == GenericUDAFEvaluator.Mode.COMPLETE)
+				partialMode = GenericUDAFEvaluator.Mode.PARTIAL1;
+			else if (mode == GenericUDAFEvaluator.Mode.FINAL)
+				partialMode = GenericUDAFEvaluator.Mode.PARTIAL2;
+			else
+				partialMode = mode;
+			udafPartial = udafsPartial.get(threadId);
+			if (udafPartial == null) {
+				try {
+					udafPartial = FunctionRegistry.getGenericUDAFEvaluator(
+							genericUDAFName, types, distinct, false);
+				} catch (HiveException e) {
+					throw new AlgebricksException(e);
+				}
+				udafPartial.init(partialMode, parameterInspectors);
+				udafsPartial.put(threadId, udafPartial);
+			}
+
+			// multiple stage group by, determined by the mode parameter
+			if (outputInspectorPartial == null)
+				outputInspectorPartial = udafPartial.init(partialMode,
+						parameterInspectors);
+		} catch (Exception e) {
+			e.printStackTrace();
+			throw new AlgebricksException(e);
+		}
+
+		return new AggregatuibFunctionSerializableEvaluator(parameters, types,
+				genericUDAFName, mode, distinct, rowInspector, evals,
+				parameterInspectors, cachedParas, lazySer, cachedRowObject,
+				udafPartial, udafComplete, outputInspector,
+				outputInspectorPartial);
+	}
+
+	public String toString() {
+		return "aggregation function expression evaluator factory: "
+				+ this.genericUDAFName;
+	}
+
+}
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/ColumnExpressionEvaluatorFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/ColumnExpressionEvaluatorFactory.java
new file mode 100644
index 0000000..68bf408
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/ColumnExpressionEvaluatorFactory.java
@@ -0,0 +1,45 @@
+package edu.uci.ics.hivesterix.runtime.factory.evaluator;

+

+import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;

+

+import edu.uci.ics.hivesterix.logical.expression.Schema;

+import edu.uci.ics.hivesterix.runtime.evaluator.ColumnExpressionEvaluator;

+import edu.uci.ics.hivesterix.runtime.evaluator.ExpressionTranslator;

+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;

+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;

+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;

+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;

+

+public class ColumnExpressionEvaluatorFactory implements ICopyEvaluatorFactory {

+

+	private static final long serialVersionUID = 1L;

+

+	private ExprNodeColumnDesc expr;

+

+	private Schema inputSchema;

+

+	public ColumnExpressionEvaluatorFactory(ILogicalExpression expression,

+			Schema schema, IVariableTypeEnvironment env)

+			throws AlgebricksException {

+		try {

+			expr = (ExprNodeColumnDesc) ExpressionTranslator.getHiveExpression(

+					expression, env);

+		} catch (Exception e) {

+			throw new AlgebricksException(e.getMessage());

+		}

+		inputSchema = schema;

+	}

+

+	public ICopyEvaluator createEvaluator(IDataOutputProvider output)

+			throws AlgebricksException {

+		return new ColumnExpressionEvaluator(expr,

+				inputSchema.toObjectInspector(), output);

+	}

+

+	public String toString() {

+		return "column expression evaluator factory: " + expr.toString();

+	}

+

+}

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/ConstantExpressionEvaluatorFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/ConstantExpressionEvaluatorFactory.java
new file mode 100644
index 0000000..e0241a1
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/ConstantExpressionEvaluatorFactory.java
@@ -0,0 +1,46 @@
+package edu.uci.ics.hivesterix.runtime.factory.evaluator;

+

+import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;

+

+import edu.uci.ics.hivesterix.logical.expression.Schema;

+import edu.uci.ics.hivesterix.runtime.evaluator.ConstantExpressionEvaluator;

+import edu.uci.ics.hivesterix.runtime.evaluator.ExpressionTranslator;

+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;

+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;

+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;

+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;

+

+public class ConstantExpressionEvaluatorFactory implements

+		ICopyEvaluatorFactory {

+

+	private static final long serialVersionUID = 1L;

+

+	private ExprNodeConstantDesc expr;

+

+	private Schema schema;

+

+	public ConstantExpressionEvaluatorFactory(ILogicalExpression expression,

+			Schema inputSchema, IVariableTypeEnvironment env)

+			throws AlgebricksException {

+		try {

+			expr = (ExprNodeConstantDesc) ExpressionTranslator

+					.getHiveExpression(expression, env);

+		} catch (Exception e) {

+			throw new AlgebricksException(e.getMessage());

+		}

+		schema = inputSchema;

+	}

+

+	public ICopyEvaluator createEvaluator(IDataOutputProvider output)

+			throws AlgebricksException {

+		return new ConstantExpressionEvaluator(expr,

+				schema.toObjectInspector(), output);

+	}

+

+	public String toString() {

+		return "constant expression evaluator factory: " + expr.toString();

+	}

+

+}

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/FieldExpressionEvaluatorFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/FieldExpressionEvaluatorFactory.java
new file mode 100644
index 0000000..4b5f906
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/FieldExpressionEvaluatorFactory.java
@@ -0,0 +1,44 @@
+package edu.uci.ics.hivesterix.runtime.factory.evaluator;

+

+import org.apache.hadoop.hive.ql.plan.ExprNodeFieldDesc;

+

+import edu.uci.ics.hivesterix.logical.expression.Schema;

+import edu.uci.ics.hivesterix.runtime.evaluator.ExpressionTranslator;

+import edu.uci.ics.hivesterix.runtime.evaluator.FieldExpressionEvaluator;

+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;

+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;

+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;

+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;

+

+public class FieldExpressionEvaluatorFactory implements ICopyEvaluatorFactory {

+	private static final long serialVersionUID = 1L;

+

+	private ExprNodeFieldDesc expr;

+

+	private Schema inputSchema;

+

+	public FieldExpressionEvaluatorFactory(ILogicalExpression expression,

+			Schema schema, IVariableTypeEnvironment env)

+			throws AlgebricksException {

+		try {

+			expr = (ExprNodeFieldDesc) ExpressionTranslator.getHiveExpression(

+					expression, env);

+		} catch (Exception e) {

+			throw new AlgebricksException(e.getMessage());

+		}

+		inputSchema = schema;

+	}

+

+	public ICopyEvaluator createEvaluator(IDataOutputProvider output)

+			throws AlgebricksException {

+		return new FieldExpressionEvaluator(expr,

+				inputSchema.toObjectInspector(), output);

+	}

+

+	public String toString() {

+		return "field access expression evaluator factory: " + expr.toString();

+	}

+

+}

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/HiveExpressionRuntimeProvider.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/HiveExpressionRuntimeProvider.java
new file mode 100644
index 0000000..387ca72
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/HiveExpressionRuntimeProvider.java
@@ -0,0 +1,192 @@
+package edu.uci.ics.hivesterix.runtime.factory.evaluator;

+

+import java.util.ArrayList;

+import java.util.Iterator;

+import java.util.List;

+

+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;

+

+import edu.uci.ics.hivesterix.logical.expression.ExpressionConstant;

+import edu.uci.ics.hivesterix.logical.expression.Schema;

+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression.FunctionKind;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AggregateFunctionCallExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IExpressionRuntimeProvider;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.LogicalExpressionJobGenToExpressionRuntimeProviderAdapter.AggregateFunctionFactoryAdapter;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.LogicalExpressionJobGenToExpressionRuntimeProviderAdapter.ScalarEvaluatorFactoryAdapter;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.LogicalExpressionJobGenToExpressionRuntimeProviderAdapter.UnnestingFunctionFactoryAdapter;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.StatefulFunctionCallExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.UnnestingFunctionCallExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;

+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.IOperatorSchema;

+import edu.uci.ics.hyracks.algebricks.core.jobgen.impl.JobGenContext;

+import edu.uci.ics.hyracks.algebricks.runtime.base.IAggregateEvaluatorFactory;

+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;

+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopySerializableAggregateFunctionFactory;

+import edu.uci.ics.hyracks.algebricks.runtime.base.IRunningAggregateEvaluatorFactory;

+import edu.uci.ics.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory;

+import edu.uci.ics.hyracks.algebricks.runtime.base.IUnnestingEvaluatorFactory;

+

+public class HiveExpressionRuntimeProvider implements

+		IExpressionRuntimeProvider {

+

+	public static final IExpressionRuntimeProvider INSTANCE = new HiveExpressionRuntimeProvider();

+

+	@Override

+	public IAggregateEvaluatorFactory createAggregateFunctionFactory(

+			AggregateFunctionCallExpression expr, IVariableTypeEnvironment env,

+			IOperatorSchema[] inputSchemas, JobGenContext context)

+			throws AlgebricksException {

+		Schema schema = this.getSchema(inputSchemas[0], env);

+		return new AggregateFunctionFactoryAdapter(

+				new AggregationFunctionFactory(expr, schema, env));

+	}

+

+	@Override

+	public ICopySerializableAggregateFunctionFactory createSerializableAggregateFunctionFactory(

+			AggregateFunctionCallExpression expr, IVariableTypeEnvironment env,

+			IOperatorSchema[] inputSchemas, JobGenContext context)

+			throws AlgebricksException {

+		Schema schema = this.getSchema(inputSchemas[0], env);

+		return new AggregationFunctionSerializableFactory(expr, schema, env);

+	}

+

+	@Override

+	public IRunningAggregateEvaluatorFactory createRunningAggregateFunctionFactory(

+			StatefulFunctionCallExpression expr, IVariableTypeEnvironment env,

+			IOperatorSchema[] inputSchemas, JobGenContext context)

+			throws AlgebricksException {

+		return null;

+	}

+

+	@Override

+	public IUnnestingEvaluatorFactory createUnnestingFunctionFactory(

+			UnnestingFunctionCallExpression expr, IVariableTypeEnvironment env,

+			IOperatorSchema[] inputSchemas, JobGenContext context)

+			throws AlgebricksException {

+		Schema schema = this.getSchema(inputSchemas[0], env);

+		return new UnnestingFunctionFactoryAdapter(

+				new UnnestingFunctionFactory(expr, schema, env));

+	}

+

+	public IScalarEvaluatorFactory createEvaluatorFactory(

+			ILogicalExpression expr, IVariableTypeEnvironment env,

+			IOperatorSchema[] inputSchemas, JobGenContext context)

+			throws AlgebricksException {

+		switch (expr.getExpressionTag()) {

+		case VARIABLE: {

+			VariableReferenceExpression v = (VariableReferenceExpression) expr;

+			return new ScalarEvaluatorFactoryAdapter(

+					createVariableEvaluatorFactory(v, env, inputSchemas,

+							context));

+		}

+		case CONSTANT: {

+			ConstantExpression c = (ConstantExpression) expr;

+			return new ScalarEvaluatorFactoryAdapter(

+					createConstantEvaluatorFactory(c, env, inputSchemas,

+							context));

+		}

+		case FUNCTION_CALL: {

+			AbstractFunctionCallExpression fun = (AbstractFunctionCallExpression) expr;

+			FunctionIdentifier fid = fun.getFunctionIdentifier();

+

+			if (fid.getName().equals(ExpressionConstant.FIELDACCESS)) {

+				return new ScalarEvaluatorFactoryAdapter(

+						createFieldExpressionEvaluatorFactory(fun, env,

+								inputSchemas, context));

+			}

+

+			if (fid.getName().equals(ExpressionConstant.FIELDACCESS)) {

+				return new ScalarEvaluatorFactoryAdapter(

+						createNullExpressionEvaluatorFactory(fun, env,

+								inputSchemas, context));

+			}

+

+			if (fun.getKind() == FunctionKind.SCALAR) {

+				ScalarFunctionCallExpression scalar = (ScalarFunctionCallExpression) fun;

+				return new ScalarEvaluatorFactoryAdapter(

+						createScalarFunctionEvaluatorFactory(scalar, env,

+								inputSchemas, context));

+			} else {

+				throw new AlgebricksException(

+						"Cannot create evaluator for function " + fun

+								+ " of kind " + fun.getKind());

+			}

+		}

+		default: {

+			throw new IllegalStateException();

+		}

+		}

+	}

+

+	private ICopyEvaluatorFactory createVariableEvaluatorFactory(

+			VariableReferenceExpression expr, IVariableTypeEnvironment env,

+			IOperatorSchema[] inputSchemas, JobGenContext context)

+			throws AlgebricksException {

+		Schema schema = this.getSchema(inputSchemas[0], env);

+		return new ColumnExpressionEvaluatorFactory(expr, schema, env);

+	}

+

+	private ICopyEvaluatorFactory createScalarFunctionEvaluatorFactory(

+			AbstractFunctionCallExpression expr, IVariableTypeEnvironment env,

+			IOperatorSchema[] inputSchemas, JobGenContext context)

+			throws AlgebricksException {

+		List<String> names = new ArrayList<String>();

+		List<TypeInfo> types = new ArrayList<TypeInfo>();

+		for (IOperatorSchema inputSchema : inputSchemas) {

+			Schema schema = this.getSchema(inputSchema, env);

+			names.addAll(schema.getNames());

+			types.addAll(schema.getTypes());

+		}

+		Schema inputSchema = new Schema(names, types);

+		return new ScalarFunctionExpressionEvaluatorFactory(expr, inputSchema,

+				env);

+	}

+

+	private ICopyEvaluatorFactory createFieldExpressionEvaluatorFactory(

+			AbstractFunctionCallExpression expr, IVariableTypeEnvironment env,

+			IOperatorSchema[] inputSchemas, JobGenContext context)

+			throws AlgebricksException {

+		Schema schema = this.getSchema(inputSchemas[0], env);

+		return new FieldExpressionEvaluatorFactory(expr, schema, env);

+	}

+

+	private ICopyEvaluatorFactory createNullExpressionEvaluatorFactory(

+			AbstractFunctionCallExpression expr, IVariableTypeEnvironment env,

+			IOperatorSchema[] inputSchemas, JobGenContext context)

+			throws AlgebricksException {

+		Schema schema = this.getSchema(inputSchemas[0], env);

+		return new NullExpressionEvaluatorFactory(expr, schema, env);

+	}

+

+	private ICopyEvaluatorFactory createConstantEvaluatorFactory(

+			ConstantExpression expr, IVariableTypeEnvironment env,

+			IOperatorSchema[] inputSchemas, JobGenContext context)

+			throws AlgebricksException {

+		Schema schema = this.getSchema(inputSchemas[0], env);

+		return new ConstantExpressionEvaluatorFactory(expr, schema, env);

+	}

+

+	private Schema getSchema(IOperatorSchema inputSchema,

+			IVariableTypeEnvironment env) throws AlgebricksException {

+		List<String> names = new ArrayList<String>();

+		List<TypeInfo> types = new ArrayList<TypeInfo>();

+		Iterator<LogicalVariable> variables = inputSchema.iterator();

+		while (variables.hasNext()) {

+			LogicalVariable var = variables.next();

+			names.add(var.toString());

+			types.add((TypeInfo) env.getVarType(var));

+		}

+

+		Schema schema = new Schema(names, types);

+		return schema;

+	}

+

+}
\ No newline at end of file
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/NullExpressionEvaluatorFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/NullExpressionEvaluatorFactory.java
new file mode 100644
index 0000000..8f516e8
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/NullExpressionEvaluatorFactory.java
@@ -0,0 +1,45 @@
+package edu.uci.ics.hivesterix.runtime.factory.evaluator;

+

+import org.apache.hadoop.hive.ql.plan.ExprNodeNullDesc;

+

+import edu.uci.ics.hivesterix.logical.expression.Schema;

+import edu.uci.ics.hivesterix.runtime.evaluator.ExpressionTranslator;

+import edu.uci.ics.hivesterix.runtime.evaluator.NullExpressionEvaluator;

+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;

+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;

+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;

+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;

+

+public class NullExpressionEvaluatorFactory implements ICopyEvaluatorFactory {

+

+	private static final long serialVersionUID = 1L;

+

+	private ExprNodeNullDesc expr;

+

+	private Schema schema;

+

+	public NullExpressionEvaluatorFactory(ILogicalExpression expression,

+			Schema intputSchema, IVariableTypeEnvironment env)

+			throws AlgebricksException {

+		try {

+			expr = (ExprNodeNullDesc) ExpressionTranslator.getHiveExpression(

+					expression, env);

+		} catch (Exception e) {

+			throw new AlgebricksException(e.getMessage());

+		}

+		schema = intputSchema;

+	}

+

+	public ICopyEvaluator createEvaluator(IDataOutputProvider output)

+			throws AlgebricksException {

+		return new NullExpressionEvaluator(expr, schema.toObjectInspector(),

+				output);

+	}

+

+	public String toString() {

+		return "null expression evaluator factory: " + expr.toString();

+	}

+

+}

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/ScalarFunctionExpressionEvaluatorFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/ScalarFunctionExpressionEvaluatorFactory.java
new file mode 100644
index 0000000..262758e
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/ScalarFunctionExpressionEvaluatorFactory.java
@@ -0,0 +1,77 @@
+package edu.uci.ics.hivesterix.runtime.factory.evaluator;

+

+import org.apache.hadoop.conf.Configuration;

+import org.apache.hadoop.hive.ql.exec.Utilities;

+import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;

+

+import edu.uci.ics.hivesterix.logical.expression.Schema;

+import edu.uci.ics.hivesterix.runtime.evaluator.ExpressionTranslator;

+import edu.uci.ics.hivesterix.runtime.evaluator.FunctionExpressionEvaluator;

+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;

+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;

+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;

+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;

+

+public class ScalarFunctionExpressionEvaluatorFactory implements

+		ICopyEvaluatorFactory {

+

+	private static final long serialVersionUID = 1L;

+

+	private transient ExprNodeGenericFuncDesc expr;

+

+	private String exprSerialization;

+

+	private Schema inputSchema;

+

+	private transient Configuration config;

+

+	public ScalarFunctionExpressionEvaluatorFactory(

+			ILogicalExpression expression, Schema schema,

+			IVariableTypeEnvironment env) throws AlgebricksException {

+		try {

+			expr = (ExprNodeGenericFuncDesc) ExpressionTranslator

+					.getHiveExpression(expression, env);

+

+			exprSerialization = Utilities.serializeExpression(expr);

+

+		} catch (Exception e) {

+			e.printStackTrace();

+			throw new AlgebricksException(e.getMessage());

+		}

+		inputSchema = schema;

+	}

+

+	public synchronized ICopyEvaluator createEvaluator(

+			IDataOutputProvider output) throws AlgebricksException {

+		if (expr == null) {

+			configClassLoader();

+			expr = (ExprNodeGenericFuncDesc) Utilities.deserializeExpression(

+					exprSerialization, config);

+		}

+

+		ExprNodeGenericFuncDesc funcDesc = (ExprNodeGenericFuncDesc) expr

+				.clone();

+		return new FunctionExpressionEvaluator(funcDesc,

+				inputSchema.toObjectInspector(), output);

+	}

+

+	private void configClassLoader() {

+		config = new Configuration();

+		ClassLoader loader = this.getClass().getClassLoader();

+		config.setClassLoader(loader);

+		Thread.currentThread().setContextClassLoader(loader);

+	}

+

+	public String toString() {

+		if (expr == null) {

+			configClassLoader();

+			expr = (ExprNodeGenericFuncDesc) Utilities.deserializeExpression(

+					exprSerialization, new Configuration());

+		}

+

+		return "function expression evaluator factory: " + expr.getExprString();

+	}

+

+}

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/UnnestingFunctionFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/UnnestingFunctionFactory.java
new file mode 100644
index 0000000..1d77737
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/UnnestingFunctionFactory.java
@@ -0,0 +1,44 @@
+package edu.uci.ics.hivesterix.runtime.factory.evaluator;

+

+import org.apache.hadoop.hive.ql.plan.UDTFDesc;

+

+import edu.uci.ics.hivesterix.logical.expression.Schema;

+import edu.uci.ics.hivesterix.runtime.evaluator.ExpressionTranslator;

+import edu.uci.ics.hivesterix.runtime.evaluator.UDTFFunctionEvaluator;

+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;

+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyUnnestingFunction;

+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyUnnestingFunctionFactory;

+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;

+

+public class UnnestingFunctionFactory implements ICopyUnnestingFunctionFactory {

+

+	private static final long serialVersionUID = 1L;

+

+	private UDTFDesc expr;

+

+	private Schema inputSchema;

+

+	private int[] columns;

+

+	public UnnestingFunctionFactory(ILogicalExpression expression,

+			Schema schema, IVariableTypeEnvironment env)

+			throws AlgebricksException {

+		try {

+			expr = (UDTFDesc) ExpressionTranslator.getHiveExpression(

+					expression, env);

+		} catch (Exception e) {

+			throw new AlgebricksException(e.getMessage());

+		}

+		inputSchema = schema;

+	}

+

+	@Override

+	public ICopyUnnestingFunction createUnnestingFunction(

+			IDataOutputProvider provider) throws AlgebricksException {

+		return new UDTFFunctionEvaluator(expr, inputSchema, columns,

+				provider.getDataOutput());

+	}

+

+}

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/hashfunction/HiveDoubleBinaryHashFunctionFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/hashfunction/HiveDoubleBinaryHashFunctionFactory.java
new file mode 100644
index 0000000..fc302e1
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/hashfunction/HiveDoubleBinaryHashFunctionFactory.java
@@ -0,0 +1,31 @@
+package edu.uci.ics.hivesterix.runtime.factory.hashfunction;

+

+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils;

+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunction;

+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFactory;

+

+public class HiveDoubleBinaryHashFunctionFactory implements

+		IBinaryHashFunctionFactory {

+	private static final long serialVersionUID = 1L;

+

+	public static HiveDoubleBinaryHashFunctionFactory INSTANCE = new HiveDoubleBinaryHashFunctionFactory();

+

+	private HiveDoubleBinaryHashFunctionFactory() {

+	}

+

+	@Override

+	public IBinaryHashFunction createBinaryHashFunction() {

+		// TODO Auto-generated method stub

+		return new IBinaryHashFunction() {

+			private Double value;

+

+			@Override

+			public int hash(byte[] bytes, int offset, int length) {

+				value = Double.longBitsToDouble(LazyUtils.byteArrayToLong(

+						bytes, offset));

+				return value.hashCode();

+			}

+		};

+	}

+

+}

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/hashfunction/HiveIntegerBinaryHashFunctionFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/hashfunction/HiveIntegerBinaryHashFunctionFactory.java
new file mode 100644
index 0000000..e1a9994
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/hashfunction/HiveIntegerBinaryHashFunctionFactory.java
@@ -0,0 +1,35 @@
+package edu.uci.ics.hivesterix.runtime.factory.hashfunction;

+

+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils;

+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils.VInt;

+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunction;

+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFactory;

+

+public class HiveIntegerBinaryHashFunctionFactory implements

+		IBinaryHashFunctionFactory {

+	private static final long serialVersionUID = 1L;

+

+	public static IBinaryHashFunctionFactory INSTANCE = new HiveIntegerBinaryHashFunctionFactory();

+

+	private HiveIntegerBinaryHashFunctionFactory() {

+	}

+

+	@Override

+	public IBinaryHashFunction createBinaryHashFunction() {

+

+		return new IBinaryHashFunction() {

+			private VInt value = new VInt();

+

+			@Override

+			public int hash(byte[] bytes, int offset, int length) {

+				LazyUtils.readVInt(bytes, offset, value);

+				if (value.length != length)

+					throw new IllegalArgumentException(

+							"length mismatch in int hash function actual: "

+									+ length + " expected " + value.length);

+				return value.value;

+			}

+		};

+	}

+

+}

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/hashfunction/HiveLongBinaryHashFunctionFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/hashfunction/HiveLongBinaryHashFunctionFactory.java
new file mode 100644
index 0000000..6f7c6f2
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/hashfunction/HiveLongBinaryHashFunctionFactory.java
@@ -0,0 +1,31 @@
+package edu.uci.ics.hivesterix.runtime.factory.hashfunction;

+

+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils;

+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils.VLong;

+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunction;

+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFactory;

+

+public class HiveLongBinaryHashFunctionFactory implements

+		IBinaryHashFunctionFactory {

+	private static final long serialVersionUID = 1L;

+

+	public static IBinaryHashFunctionFactory INSTANCE = new HiveLongBinaryHashFunctionFactory();

+

+	private HiveLongBinaryHashFunctionFactory() {

+	}

+

+	@Override

+	public IBinaryHashFunction createBinaryHashFunction() {

+

+		return new IBinaryHashFunction() {

+			private VLong value = new VLong();

+

+			@Override

+			public int hash(byte[] bytes, int offset, int length) {

+				LazyUtils.readVLong(bytes, offset, value);

+				return (int) value.value;

+			}

+		};

+	}

+

+}

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/hashfunction/HiveRawBinaryHashFunctionFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/hashfunction/HiveRawBinaryHashFunctionFactory.java
new file mode 100644
index 0000000..e03dde0
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/hashfunction/HiveRawBinaryHashFunctionFactory.java
@@ -0,0 +1,32 @@
+package edu.uci.ics.hivesterix.runtime.factory.hashfunction;

+

+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunction;

+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFactory;

+

+public class HiveRawBinaryHashFunctionFactory implements

+		IBinaryHashFunctionFactory {

+	private static final long serialVersionUID = 1L;

+

+	public static IBinaryHashFunctionFactory INSTANCE = new HiveRawBinaryHashFunctionFactory();

+

+	private HiveRawBinaryHashFunctionFactory() {

+

+	}

+

+	@Override

+	public IBinaryHashFunction createBinaryHashFunction() {

+

+		return new IBinaryHashFunction() {

+

+			@Override

+			public int hash(byte[] bytes, int offset, int length) {

+				int value = 1;

+				int end = offset + length;

+				for (int i = offset; i < end; i++)

+					value = value * 31 + (int) bytes[i];

+				return value;

+			}

+		};

+	}

+

+}

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/hashfunction/HiveStingBinaryHashFunctionFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/hashfunction/HiveStingBinaryHashFunctionFactory.java
new file mode 100644
index 0000000..055c077
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/hashfunction/HiveStingBinaryHashFunctionFactory.java
@@ -0,0 +1,45 @@
+package edu.uci.ics.hivesterix.runtime.factory.hashfunction;

+

+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils;

+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils.VInt;

+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunction;

+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFactory;

+

+public class HiveStingBinaryHashFunctionFactory implements

+		IBinaryHashFunctionFactory {

+	private static final long serialVersionUID = 1L;

+

+	public static HiveStingBinaryHashFunctionFactory INSTANCE = new HiveStingBinaryHashFunctionFactory();

+

+	private HiveStingBinaryHashFunctionFactory() {

+	}

+

+	@Override

+	public IBinaryHashFunction createBinaryHashFunction() {

+		// TODO Auto-generated method stub

+		return new IBinaryHashFunction() {

+			private VInt len = new VInt();

+

+			@Override

+			public int hash(byte[] bytes, int offset, int length) {

+				LazyUtils.readVInt(bytes, offset, len);

+				if (len.value + len.length != length)

+					throw new IllegalStateException(

+							"parse string: length mismatch, expected "

+									+ (len.value + len.length) + " but get "

+									+ length);

+				return hashBytes(bytes, offset + len.length, length

+						- len.length);

+			}

+

+			public int hashBytes(byte[] bytes, int offset, int length) {

+				int value = 1;

+				int end = offset + length;

+				for (int i = offset; i < end; i++)

+					value = value * 31 + (int) bytes[i];

+				return value;

+			}

+		};

+	}

+

+}

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/hashfunction/MurmurHash3BinaryHashFunctionFamily.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/hashfunction/MurmurHash3BinaryHashFunctionFamily.java
new file mode 100644
index 0000000..760a614
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/hashfunction/MurmurHash3BinaryHashFunctionFamily.java
@@ -0,0 +1,63 @@
+package edu.uci.ics.hivesterix.runtime.factory.hashfunction;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunction;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFamily;
+
+public class MurmurHash3BinaryHashFunctionFamily implements IBinaryHashFunctionFamily {
+
+    public static final IBinaryHashFunctionFamily INSTANCE = new MurmurHash3BinaryHashFunctionFamily();
+
+    private static final long serialVersionUID = 1L;
+
+    private MurmurHash3BinaryHashFunctionFamily() {
+    }
+
+    private static final int C1 = 0xcc9e2d51;
+    private static final int C2 = 0x1b873593;
+    private static final int C3 = 5;
+    private static final int C4 = 0xe6546b64;
+    private static final int C5 = 0x85ebca6b;
+    private static final int C6 = 0xc2b2ae35;
+
+    @Override
+    public IBinaryHashFunction createBinaryHashFunction(final int seed) {
+        return new IBinaryHashFunction() {
+            @Override
+            public int hash(byte[] bytes, int offset, int length) {
+                int h = seed;
+                int p = offset;
+                int remain = length;
+                while (remain >= 4) {
+                    int k = (bytes[p] & 0xff) | ((bytes[p + 1] & 0xff) << 8) | ((bytes[p + 2] & 0xff) << 16)
+                            | ((bytes[p + 3] & 0xff) << 24);
+                    k *= C1;
+                    k = Integer.rotateLeft(k, 15);
+                    k *= C2;
+                    h ^= k;
+                    h = Integer.rotateLeft(h, 13);
+                    h = h * C3 + C4;
+                    p += 4;
+                    remain -= 4;
+                }
+                if (remain > 0) {
+                    int k = 0;
+                    for (int i = 0; remain > 0; i += 8) {
+                        k ^= (bytes[p++] & 0xff) << i;
+                        remain--;
+                    }
+                    k *= C1;
+                    k = Integer.rotateLeft(k, 15);
+                    k *= C2;
+                    h ^= k;
+                }
+                h ^= length;
+                h ^= (h >>> 16);
+                h *= C5;
+                h ^= (h >>> 13);
+                h *= C6;
+                h ^= (h >>> 16);
+                return h;
+            }
+        };
+    }
+}
\ No newline at end of file
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/normalize/HiveDoubleAscNormalizedKeyComputerFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/normalize/HiveDoubleAscNormalizedKeyComputerFactory.java
new file mode 100644
index 0000000..5f03962
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/normalize/HiveDoubleAscNormalizedKeyComputerFactory.java
@@ -0,0 +1,25 @@
+package edu.uci.ics.hivesterix.runtime.factory.normalize;
+
+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils;
+import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputer;
+import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputerFactory;
+
+public class HiveDoubleAscNormalizedKeyComputerFactory implements
+		INormalizedKeyComputerFactory {
+
+	private static final long serialVersionUID = 1L;
+
+	@Override
+	public INormalizedKeyComputer createNormalizedKeyComputer() {
+
+		return new INormalizedKeyComputer() {
+
+			@Override
+			public int normalize(byte[] bytes, int start, int length) {
+				int header = LazyUtils.byteArrayToInt(bytes, start);
+				long unsignedValue = (long) header;
+				return (int) ((unsignedValue - ((long) Integer.MIN_VALUE)) & 0xffffffffL);
+			}
+		};
+	}
+}
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/normalize/HiveDoubleDescNormalizedKeyComputerFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/normalize/HiveDoubleDescNormalizedKeyComputerFactory.java
new file mode 100644
index 0000000..e4587a2
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/normalize/HiveDoubleDescNormalizedKeyComputerFactory.java
@@ -0,0 +1,26 @@
+package edu.uci.ics.hivesterix.runtime.factory.normalize;
+
+import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputer;
+import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputerFactory;
+
+public class HiveDoubleDescNormalizedKeyComputerFactory implements
+		INormalizedKeyComputerFactory {
+
+	private static final long serialVersionUID = 1L;
+	private final INormalizedKeyComputerFactory ascNormalizedKeyComputerFactory = new HiveDoubleAscNormalizedKeyComputerFactory();
+
+	@Override
+	public INormalizedKeyComputer createNormalizedKeyComputer() {
+		return new INormalizedKeyComputer() {
+			private INormalizedKeyComputer nmkComputer = ascNormalizedKeyComputerFactory
+					.createNormalizedKeyComputer();
+
+			@Override
+			public int normalize(byte[] bytes, int start, int length) {
+				int nk = nmkComputer.normalize(bytes, start, length);
+				return (int) ((long) Integer.MAX_VALUE - (long) (nk - Integer.MIN_VALUE));
+			}
+
+		};
+	}
+}
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/normalize/HiveIntegerAscNormalizedKeyComputerFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/normalize/HiveIntegerAscNormalizedKeyComputerFactory.java
new file mode 100644
index 0000000..2ff390a
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/normalize/HiveIntegerAscNormalizedKeyComputerFactory.java
@@ -0,0 +1,31 @@
+package edu.uci.ics.hivesterix.runtime.factory.normalize;
+
+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils;
+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils.VInt;
+import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputer;
+import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputerFactory;
+
+public class HiveIntegerAscNormalizedKeyComputerFactory implements
+		INormalizedKeyComputerFactory {
+
+	private static final long serialVersionUID = 1L;
+
+	@Override
+	public INormalizedKeyComputer createNormalizedKeyComputer() {
+
+		return new INormalizedKeyComputer() {
+			private VInt vint = new VInt();
+
+			@Override
+			public int normalize(byte[] bytes, int start, int length) {
+				LazyUtils.readVInt(bytes, start, vint);
+				if (vint.length != length)
+					throw new IllegalArgumentException(
+							"length mismatch in int comparator function actual: "
+									+ vint.length + " expected " + length);
+				long unsignedValue = (long) vint.value;
+				return (int) ((unsignedValue - ((long) Integer.MIN_VALUE)) & 0xffffffffL);
+			}
+		};
+	}
+}
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/normalize/HiveIntegerDescNormalizedKeyComputerFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/normalize/HiveIntegerDescNormalizedKeyComputerFactory.java
new file mode 100644
index 0000000..8eff1f8
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/normalize/HiveIntegerDescNormalizedKeyComputerFactory.java
@@ -0,0 +1,31 @@
+package edu.uci.ics.hivesterix.runtime.factory.normalize;
+
+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils;
+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils.VInt;
+import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputer;
+import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputerFactory;
+
+public class HiveIntegerDescNormalizedKeyComputerFactory implements
+		INormalizedKeyComputerFactory {
+
+	private static final long serialVersionUID = 1L;
+
+	@Override
+	public INormalizedKeyComputer createNormalizedKeyComputer() {
+
+		return new INormalizedKeyComputer() {
+			private VInt vint = new VInt();
+
+			@Override
+			public int normalize(byte[] bytes, int start, int length) {
+				LazyUtils.readVInt(bytes, start, vint);
+				if (vint.length != length)
+					throw new IllegalArgumentException(
+							"length mismatch in int comparator function actual: "
+									+ vint.length + " expected " + length);
+				long unsignedValue = (long) vint.value;
+				return (int) ((long) 0xffffffff - unsignedValue);
+			}
+		};
+	}
+}
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/normalize/HiveLongAscNormalizedKeyComputerFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/normalize/HiveLongAscNormalizedKeyComputerFactory.java
new file mode 100644
index 0000000..768eec2
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/normalize/HiveLongAscNormalizedKeyComputerFactory.java
@@ -0,0 +1,65 @@
+package edu.uci.ics.hivesterix.runtime.factory.normalize;
+
+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils;
+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils.VLong;
+import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputer;
+import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputerFactory;
+
+public class HiveLongAscNormalizedKeyComputerFactory implements
+		INormalizedKeyComputerFactory {
+
+	private static final long serialVersionUID = 1L;
+
+	@Override
+	public INormalizedKeyComputer createNormalizedKeyComputer() {
+
+		return new INormalizedKeyComputer() {
+			private static final int POSTIVE_LONG_MASK = (3 << 30);
+			private static final int NON_NEGATIVE_INT_MASK = (2 << 30);
+			private static final int NEGATIVE_LONG_MASK = (0 << 30);
+			private VLong vlong = new VLong();
+
+			@Override
+			public int normalize(byte[] bytes, int start, int length) {
+				LazyUtils.readVLong(bytes, start, vlong);
+				if (vlong.length != length)
+					throw new IllegalArgumentException(
+							"length mismatch in int comparator function actual: "
+									+ vlong.length + " expected " + length);
+				long value = (long) vlong.value;
+				int highValue = (int) (value >> 32);
+				if (highValue > 0) {
+					/**
+					 * larger than Integer.MAX
+					 */
+					int highNmk = getKey(highValue);
+					highNmk >>= 2;
+					highNmk |= POSTIVE_LONG_MASK;
+					return highNmk;
+				} else if (highValue == 0) {
+					/**
+					 * smaller than Integer.MAX but >=0
+					 */
+					int lowNmk = (int) value;
+					lowNmk >>= 2;
+					lowNmk |= NON_NEGATIVE_INT_MASK;
+					return lowNmk;
+				} else {
+					/**
+					 * less than 0; TODO: have not optimized for that
+					 */
+					int highNmk = getKey(highValue);
+					highNmk >>= 2;
+					highNmk |= NEGATIVE_LONG_MASK;
+					return highNmk;
+				}
+			}
+
+			private int getKey(int value) {
+				long unsignedFirstValue = (long) value;
+				int nmk = (int) ((unsignedFirstValue - ((long) Integer.MIN_VALUE)) & 0xffffffffL);
+				return nmk;
+			}
+		};
+	}
+}
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/normalize/HiveLongDescNormalizedKeyComputerFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/normalize/HiveLongDescNormalizedKeyComputerFactory.java
new file mode 100644
index 0000000..20ae56a
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/normalize/HiveLongDescNormalizedKeyComputerFactory.java
@@ -0,0 +1,27 @@
+package edu.uci.ics.hivesterix.runtime.factory.normalize;
+
+import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputer;
+import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputerFactory;
+
+public class HiveLongDescNormalizedKeyComputerFactory implements
+		INormalizedKeyComputerFactory {
+
+	private static final long serialVersionUID = 1L;
+	private final INormalizedKeyComputerFactory ascNormalizedKeyComputerFactory = new HiveIntegerAscNormalizedKeyComputerFactory();
+
+	@Override
+	public INormalizedKeyComputer createNormalizedKeyComputer() {
+		return new INormalizedKeyComputer() {
+			private INormalizedKeyComputer nmkComputer = ascNormalizedKeyComputerFactory
+					.createNormalizedKeyComputer();
+
+			@Override
+			public int normalize(byte[] bytes, int start, int length) {
+				int nk = nmkComputer.normalize(bytes, start, length);
+				return (int) ((long) Integer.MAX_VALUE - (long) (nk - Integer.MIN_VALUE));
+			}
+
+		};
+	}
+
+}
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/normalize/HiveStringAscNormalizedKeyComputerFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/normalize/HiveStringAscNormalizedKeyComputerFactory.java
new file mode 100644
index 0000000..b16ccba
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/normalize/HiveStringAscNormalizedKeyComputerFactory.java
@@ -0,0 +1,44 @@
+package edu.uci.ics.hivesterix.runtime.factory.normalize;
+
+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils;
+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils.VInt;
+import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputer;
+import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputerFactory;
+import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
+
+public class HiveStringAscNormalizedKeyComputerFactory implements
+		INormalizedKeyComputerFactory {
+
+	private static final long serialVersionUID = 1L;
+
+	@Override
+	public INormalizedKeyComputer createNormalizedKeyComputer() {
+
+		return new INormalizedKeyComputer() {
+			private VInt len = new VInt();
+
+			@Override
+			public int normalize(byte[] bytes, int start, int length) {
+				LazyUtils.readVInt(bytes, start, len);
+
+				if (len.value + len.length != length)
+					throw new IllegalStateException(
+							"parse string: length mismatch, expected "
+									+ (len.value + len.length) + " but get "
+									+ length);
+				int nk = 0;
+				int offset = start + len.length;
+				for (int i = 0; i < 2; ++i) {
+					nk <<= 16;
+					if (i < len.value) {
+						char character = UTF8StringPointable.charAt(bytes,
+								offset);
+						nk += ((int) character) & 0xffff;
+						offset += UTF8StringPointable.charSize(bytes, offset);
+					}
+				}
+				return nk;
+			}
+		};
+	}
+}
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/normalize/HiveStringDescNormalizedKeyComputerFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/normalize/HiveStringDescNormalizedKeyComputerFactory.java
new file mode 100644
index 0000000..e8978c6
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/normalize/HiveStringDescNormalizedKeyComputerFactory.java
@@ -0,0 +1,40 @@
+package edu.uci.ics.hivesterix.runtime.factory.normalize;
+
+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils;
+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils.VInt;
+import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputer;
+import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputerFactory;
+import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
+
+public class HiveStringDescNormalizedKeyComputerFactory implements
+		INormalizedKeyComputerFactory {
+
+	private static final long serialVersionUID = 1L;
+
+	@Override
+	public INormalizedKeyComputer createNormalizedKeyComputer() {
+		return new INormalizedKeyComputer() {
+			private VInt len = new VInt();
+
+			@Override
+			public int normalize(byte[] bytes, int start, int length) {
+				LazyUtils.readVInt(bytes, start, len);
+				if (len.value + len.length != length)
+					throw new IllegalStateException(
+							"parse string: length mismatch, expected "
+									+ (len.value + len.length) + " but get "
+									+ length);
+				int nk = 0;
+				int offset = start + len.length;
+				for (int i = 0; i < 2; ++i) {
+					nk <<= 16;
+					if (i < len.value) {
+						nk += ((int) UTF8StringPointable.charAt(bytes, offset)) & 0xffff;
+						offset += UTF8StringPointable.charSize(bytes, offset);
+					}
+				}
+				return (int) ((long) 0xffffffff - (long) nk);
+			}
+		};
+	}
+}
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/nullwriter/HiveNullWriterFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/nullwriter/HiveNullWriterFactory.java
new file mode 100644
index 0000000..91d08c6
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/factory/nullwriter/HiveNullWriterFactory.java
@@ -0,0 +1,28 @@
+package edu.uci.ics.hivesterix.runtime.factory.nullwriter;
+
+import java.io.DataOutput;
+
+import edu.uci.ics.hyracks.api.dataflow.value.INullWriter;
+import edu.uci.ics.hyracks.api.dataflow.value.INullWriterFactory;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+
+public class HiveNullWriterFactory implements INullWriterFactory {
+
+	private static final long serialVersionUID = 1L;
+
+	public static HiveNullWriterFactory INSTANCE = new HiveNullWriterFactory();
+
+	@Override
+	public INullWriter createNullWriter() {
+		return new HiveNullWriter();
+	}
+}
+
+class HiveNullWriter implements INullWriter {
+
+	@Override
+	public void writeNull(DataOutput out) throws HyracksDataException {
+		// do nothing
+	}
+
+}
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/inspector/HiveBinaryBooleanInspector.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/inspector/HiveBinaryBooleanInspector.java
new file mode 100644
index 0000000..3d2b141
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/inspector/HiveBinaryBooleanInspector.java
@@ -0,0 +1,20 @@
+package edu.uci.ics.hivesterix.runtime.inspector;

+

+import edu.uci.ics.hyracks.algebricks.data.IBinaryBooleanInspector;

+

+public class HiveBinaryBooleanInspector implements IBinaryBooleanInspector {

+

+	HiveBinaryBooleanInspector() {

+	}

+

+	@Override

+	public boolean getBooleanValue(byte[] bytes, int offset, int length) {

+		if (length == 0)

+			return false;

+		if (length != 1)

+			throw new IllegalStateException("boolean field error: with length "

+					+ length);

+		return bytes[0] == 1;

+	}

+

+}

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/inspector/HiveBinaryBooleanInspectorFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/inspector/HiveBinaryBooleanInspectorFactory.java
new file mode 100644
index 0000000..86afbee
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/inspector/HiveBinaryBooleanInspectorFactory.java
@@ -0,0 +1,22 @@
+package edu.uci.ics.hivesterix.runtime.inspector;
+
+import edu.uci.ics.hyracks.algebricks.data.IBinaryBooleanInspector;
+import edu.uci.ics.hyracks.algebricks.data.IBinaryBooleanInspectorFactory;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+
+public class HiveBinaryBooleanInspectorFactory implements
+		IBinaryBooleanInspectorFactory {
+	private static final long serialVersionUID = 1L;
+	public static HiveBinaryBooleanInspectorFactory INSTANCE = new HiveBinaryBooleanInspectorFactory();
+
+	private HiveBinaryBooleanInspectorFactory() {
+
+	}
+
+	@Override
+	public IBinaryBooleanInspector createBinaryBooleanInspector(
+			IHyracksTaskContext arg0) {
+		return new HiveBinaryBooleanInspector();
+	}
+
+}
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/inspector/HiveBinaryIntegerInspector.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/inspector/HiveBinaryIntegerInspector.java
new file mode 100644
index 0000000..e82e501
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/inspector/HiveBinaryIntegerInspector.java
@@ -0,0 +1,23 @@
+package edu.uci.ics.hivesterix.runtime.inspector;

+

+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils;

+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils.VInt;

+import edu.uci.ics.hyracks.algebricks.data.IBinaryIntegerInspector;

+

+public class HiveBinaryIntegerInspector implements IBinaryIntegerInspector {

+	private VInt value = new VInt();

+

+	HiveBinaryIntegerInspector() {

+	}

+

+	@Override

+	public int getIntegerValue(byte[] bytes, int offset, int length) {

+		LazyUtils.readVInt(bytes, offset, value);

+		if (value.length != length)

+			throw new IllegalArgumentException(

+					"length mismatch in int hash function actual: " + length

+							+ " expected " + value.length);

+		return value.value;

+	}

+

+}

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/inspector/HiveBinaryIntegerInspectorFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/inspector/HiveBinaryIntegerInspectorFactory.java
new file mode 100644
index 0000000..b44e610
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/inspector/HiveBinaryIntegerInspectorFactory.java
@@ -0,0 +1,22 @@
+package edu.uci.ics.hivesterix.runtime.inspector;
+
+import edu.uci.ics.hyracks.algebricks.data.IBinaryIntegerInspector;
+import edu.uci.ics.hyracks.algebricks.data.IBinaryIntegerInspectorFactory;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+
+public class HiveBinaryIntegerInspectorFactory implements
+		IBinaryIntegerInspectorFactory {
+	private static final long serialVersionUID = 1L;
+	public static HiveBinaryIntegerInspectorFactory INSTANCE = new HiveBinaryIntegerInspectorFactory();
+
+	private HiveBinaryIntegerInspectorFactory() {
+
+	}
+
+	@Override
+	public IBinaryIntegerInspector createBinaryIntegerInspector(
+			IHyracksTaskContext arg0) {
+		return new HiveBinaryIntegerInspector();
+	}
+
+}
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveConnectorPolicyAssignmentPolicy.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveConnectorPolicyAssignmentPolicy.java
new file mode 100644
index 0000000..8f559e2
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveConnectorPolicyAssignmentPolicy.java
@@ -0,0 +1,67 @@
+package edu.uci.ics.hivesterix.runtime.jobgen;
+
+import edu.uci.ics.hyracks.api.dataflow.IConnectorDescriptor;
+import edu.uci.ics.hyracks.api.dataflow.connectors.IConnectorPolicy;
+import edu.uci.ics.hyracks.api.dataflow.connectors.IConnectorPolicyAssignmentPolicy;
+import edu.uci.ics.hyracks.api.dataflow.connectors.PipeliningConnectorPolicy;
+import edu.uci.ics.hyracks.api.dataflow.connectors.SendSideMaterializedBlockingConnectorPolicy;
+import edu.uci.ics.hyracks.api.dataflow.connectors.SendSideMaterializedPipeliningConnectorPolicy;
+import edu.uci.ics.hyracks.api.dataflow.connectors.SendSideMaterializedReceiveSideMaterializedBlockingConnectorPolicy;
+import edu.uci.ics.hyracks.dataflow.std.connectors.MToNPartitioningConnectorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.connectors.MToNPartitioningMergingConnectorDescriptor;
+
+public class HiveConnectorPolicyAssignmentPolicy implements
+		IConnectorPolicyAssignmentPolicy {
+	public enum Policy {
+		PIPELINING, SEND_SIDE_MAT_PIPELINING, SEND_SIDE_MAT_BLOCKING, SEND_SIDE_MAT_RECEIVE_SIDE_MAT_BLOCKING;
+	};
+
+	private static final long serialVersionUID = 1L;
+
+	private final IConnectorPolicy pipeliningPolicy = new PipeliningConnectorPolicy();
+	private final IConnectorPolicy sendSideMatPipeliningPolicy = new SendSideMaterializedPipeliningConnectorPolicy();
+	private final IConnectorPolicy sendSideMatBlockingPolicy = new SendSideMaterializedBlockingConnectorPolicy();
+	private final IConnectorPolicy sendSideMatReceiveSideMatBlockingPolicy = new SendSideMaterializedReceiveSideMaterializedBlockingConnectorPolicy();
+	private final Policy policy;
+
+	public HiveConnectorPolicyAssignmentPolicy(Policy policy) {
+		this.policy = policy;
+	}
+
+	@Override
+	public IConnectorPolicy getConnectorPolicyAssignment(
+			IConnectorDescriptor c, int nProducers, int nConsumers,
+			int[] fanouts) {
+		if (c instanceof MToNPartitioningMergingConnectorDescriptor) {
+			// avoid deadlocks
+			switch (policy) {
+			case PIPELINING:
+			case SEND_SIDE_MAT_PIPELINING:
+				return sendSideMatPipeliningPolicy;
+			case SEND_SIDE_MAT_BLOCKING:
+				return sendSideMatBlockingPolicy;
+			case SEND_SIDE_MAT_RECEIVE_SIDE_MAT_BLOCKING:
+				return sendSideMatReceiveSideMatBlockingPolicy;
+			default:
+				return sendSideMatPipeliningPolicy;
+			}
+		} else if (c instanceof MToNPartitioningConnectorDescriptor) {
+			// support different repartitioning policies
+			switch (policy) {
+			case PIPELINING:
+				return pipeliningPolicy;
+			case SEND_SIDE_MAT_PIPELINING:
+				return sendSideMatPipeliningPolicy;
+			case SEND_SIDE_MAT_BLOCKING:
+				return sendSideMatBlockingPolicy;
+			case SEND_SIDE_MAT_RECEIVE_SIDE_MAT_BLOCKING:
+				return sendSideMatReceiveSideMatBlockingPolicy;
+			default:
+				return pipeliningPolicy;
+			}
+		} else {
+			// pipelining for other connectors
+			return pipeliningPolicy;
+		}
+	}
+}
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveDataSink.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveDataSink.java
new file mode 100644
index 0000000..e4fbca5
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveDataSink.java
@@ -0,0 +1,32 @@
+package edu.uci.ics.hivesterix.runtime.jobgen;

+

+import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IDataSink;

+import edu.uci.ics.hyracks.algebricks.core.algebra.properties.IPartitioningProperty;

+import edu.uci.ics.hyracks.algebricks.core.algebra.properties.RandomPartitioningProperty;

+

+public class HiveDataSink implements IDataSink {

+

+	private Object[] schema;

+

+	private Object fsOperator;

+

+	public HiveDataSink(Object sink, Object[] sourceSchema) {

+		schema = sourceSchema;

+		fsOperator = sink;

+	}

+

+	@Override

+	public Object getId() {

+		return fsOperator;

+	}

+

+	@Override

+	public Object[] getSchemaTypes() {

+		return schema;

+	}

+

+	public IPartitioningProperty getPartitioningProperty() {

+		return new RandomPartitioningProperty(new HiveDomain());

+	}

+

+}

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveDataSource.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveDataSource.java
new file mode 100644
index 0000000..edff056
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveDataSource.java
@@ -0,0 +1,48 @@
+package edu.uci.ics.hivesterix.runtime.jobgen;

+

+import java.util.List;

+

+import org.apache.hadoop.hive.ql.plan.PartitionDesc;

+

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;

+import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IDataSource;

+import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IDataSourcePropertiesProvider;

+import edu.uci.ics.hyracks.algebricks.core.algebra.properties.FunctionalDependency;

+

+public class HiveDataSource<P> implements IDataSource<P> {

+

+	private P source;

+

+	private Object[] schema;

+

+	public HiveDataSource(P dataSource, Object[] sourceSchema) {

+		source = dataSource;

+		schema = sourceSchema;

+	}

+

+	@Override

+	public P getId() {

+		return source;

+	}

+

+	@Override

+	public Object[] getSchemaTypes() {

+		return schema;

+	}

+

+	@Override

+	public void computeFDs(List<LogicalVariable> scanVariables,

+			List<FunctionalDependency> fdList) {

+	}

+

+	@Override

+	public IDataSourcePropertiesProvider getPropertiesProvider() {

+		return new HiveDataSourcePartitioningProvider();

+	}

+

+	@Override

+	public String toString() {

+		PartitionDesc desc = (PartitionDesc) source;

+		return desc.getTableName();

+	}

+}

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveDataSourcePartitioningProvider.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveDataSourcePartitioningProvider.java
new file mode 100644
index 0000000..08dd684
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveDataSourcePartitioningProvider.java
@@ -0,0 +1,26 @@
+package edu.uci.ics.hivesterix.runtime.jobgen;

+

+import java.util.LinkedList;

+import java.util.List;

+

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;

+import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IDataSourcePropertiesProvider;

+import edu.uci.ics.hyracks.algebricks.core.algebra.properties.ILocalStructuralProperty;

+import edu.uci.ics.hyracks.algebricks.core.algebra.properties.IPartitioningProperty;

+import edu.uci.ics.hyracks.algebricks.core.algebra.properties.IPhysicalPropertiesVector;

+import edu.uci.ics.hyracks.algebricks.core.algebra.properties.RandomPartitioningProperty;

+import edu.uci.ics.hyracks.algebricks.core.algebra.properties.StructuralPropertiesVector;

+

+public class HiveDataSourcePartitioningProvider implements

+		IDataSourcePropertiesProvider {

+

+	@Override

+	public IPhysicalPropertiesVector computePropertiesVector(

+			List<LogicalVariable> scanVariables) {

+		IPartitioningProperty property = new RandomPartitioningProperty(

+				new HiveDomain());

+		IPhysicalPropertiesVector vector = new StructuralPropertiesVector(

+				property, new LinkedList<ILocalStructuralProperty>());

+		return vector;

+	}

+}

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveDomain.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveDomain.java
new file mode 100644
index 0000000..0af253a
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveDomain.java
@@ -0,0 +1,17 @@
+package edu.uci.ics.hivesterix.runtime.jobgen;

+

+import edu.uci.ics.hyracks.algebricks.core.algebra.properties.INodeDomain;

+

+public class HiveDomain implements INodeDomain {

+

+	@Override

+	public boolean sameAs(INodeDomain domain) {

+		return true;

+	}

+

+	@Override

+	public Integer cardinality() {

+		return 0;

+	}

+

+}

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveMetaDataProvider.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveMetaDataProvider.java
new file mode 100644
index 0000000..5782703
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveMetaDataProvider.java
@@ -0,0 +1,149 @@
+package edu.uci.ics.hivesterix.runtime.jobgen;

+

+import java.util.HashMap;

+import java.util.List;

+

+import org.apache.hadoop.hive.ql.exec.FileSinkOperator;

+import org.apache.hadoop.hive.ql.exec.Operator;

+import org.apache.hadoop.hive.ql.plan.PartitionDesc;

+

+import edu.uci.ics.hivesterix.logical.expression.HiveFunctionInfo;

+import edu.uci.ics.hivesterix.logical.expression.Schema;

+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;

+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;

+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;

+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;

+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.IFunctionInfo;

+import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IDataSink;

+import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IDataSource;

+import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IDataSourceIndex;

+import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IMetadataProvider;

+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.IOperatorSchema;

+import edu.uci.ics.hyracks.algebricks.core.jobgen.impl.JobGenContext;

+import edu.uci.ics.hyracks.algebricks.data.IPrinterFactory;

+import edu.uci.ics.hyracks.algebricks.runtime.base.IPushRuntimeFactory;

+import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;

+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;

+import edu.uci.ics.hyracks.api.job.JobSpecification;

+

+@SuppressWarnings("rawtypes")

+public class HiveMetaDataProvider<S, T> implements IMetadataProvider<S, T> {

+

+	private Operator fileSink;

+	private Schema outputSchema;

+	private HashMap<S, IDataSource<S>> dataSourceMap;

+

+	public HiveMetaDataProvider(Operator fsOp, Schema oi,

+			HashMap<S, IDataSource<S>> map) {

+		fileSink = fsOp;

+		outputSchema = oi;

+		dataSourceMap = map;

+	}

+

+	@Override

+	public IDataSourceIndex<T, S> findDataSourceIndex(T indexId, S dataSourceId)

+			throws AlgebricksException {

+		return null;

+	}

+

+	@Override

+	public IDataSource<S> findDataSource(S id) throws AlgebricksException {

+		return dataSourceMap.get(id);

+	}

+

+	@Override

+	public boolean scannerOperatorIsLeaf(IDataSource<S> dataSource) {

+		return true;

+	}

+

+	@Override

+	public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getScannerRuntime(

+			IDataSource<S> dataSource, List<LogicalVariable> scanVariables,

+			List<LogicalVariable> projectVariables, boolean projectPushed,

+			IOperatorSchema opSchema, IVariableTypeEnvironment typeEnv,

+			JobGenContext context, JobSpecification jobSpec)

+			throws AlgebricksException {

+

+		S desc = dataSource.getId();

+		HiveScanRuntimeGenerator generator = new HiveScanRuntimeGenerator(

+				(PartitionDesc) desc);

+		return generator.getRuntimeOperatorAndConstraint(dataSource,

+				scanVariables, projectVariables, projectPushed, context,

+				jobSpec);

+	}

+

+	@Override

+	public Pair<IPushRuntimeFactory, AlgebricksPartitionConstraint> getWriteFileRuntime(

+			IDataSink sink, int[] printColumns,

+			IPrinterFactory[] printerFactories, RecordDescriptor inputDesc) {

+

+		HiveWriteRuntimeGenerator generator = new HiveWriteRuntimeGenerator(

+				(FileSinkOperator) fileSink, outputSchema);

+		return generator.getWriterRuntime(inputDesc);

+	}

+

+	@Override

+	public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getDeleteRuntime(

+			IDataSource<S> arg0, IOperatorSchema arg1,

+			List<LogicalVariable> arg2, LogicalVariable arg3,

+			RecordDescriptor arg4, JobGenContext arg5, JobSpecification arg6)

+			throws AlgebricksException {

+		// TODO Auto-generated method stub

+		return null;

+	}

+

+	@Override

+	public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getInsertRuntime(

+			IDataSource<S> arg0, IOperatorSchema arg1,

+			List<LogicalVariable> arg2, LogicalVariable arg3,

+			RecordDescriptor arg4, JobGenContext arg5, JobSpecification arg6)

+			throws AlgebricksException {

+		// TODO Auto-generated method stub

+		return null;

+	}

+

+	@Override

+	public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getWriteResultRuntime(

+			IDataSource<S> arg0, IOperatorSchema arg1,

+			List<LogicalVariable> arg2, LogicalVariable arg3,

+			JobGenContext arg4, JobSpecification arg5)

+			throws AlgebricksException {

+		// TODO Auto-generated method stub

+		return null;

+	}

+

+	@Override

+	public IFunctionInfo lookupFunction(FunctionIdentifier arg0) {

+		return new HiveFunctionInfo(arg0, null);

+	}

+

+	@Override

+	public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getIndexInsertRuntime(

+			IDataSourceIndex<T, S> dataSource,

+			IOperatorSchema propagatedSchema, IOperatorSchema[] inputSchemas,

+			IVariableTypeEnvironment typeEnv,

+			List<LogicalVariable> primaryKeys,

+			List<LogicalVariable> secondaryKeys, ILogicalExpression filterExpr,

+			RecordDescriptor recordDesc, JobGenContext context,

+			JobSpecification spec) throws AlgebricksException {

+		// TODO Auto-generated method stub

+		return null;

+	}

+

+	@Override

+	public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getIndexDeleteRuntime(

+			IDataSourceIndex<T, S> dataSource,

+			IOperatorSchema propagatedSchema, IOperatorSchema[] inputSchemas,

+			IVariableTypeEnvironment typeEnv,

+			List<LogicalVariable> primaryKeys,

+			List<LogicalVariable> secondaryKeys, ILogicalExpression filterExpr,

+			RecordDescriptor recordDesc, JobGenContext context,

+			JobSpecification spec) throws AlgebricksException {

+		// TODO Auto-generated method stub

+		return null;

+	}

+

+}

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveOperatorSchema.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveOperatorSchema.java
new file mode 100644
index 0000000..83382f0
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveOperatorSchema.java
@@ -0,0 +1,84 @@
+package edu.uci.ics.hivesterix.runtime.jobgen;

+

+import java.util.ArrayList;

+import java.util.HashMap;

+import java.util.Iterator;

+import java.util.List;

+import java.util.Map;

+

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;

+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.IOperatorSchema;

+

+public class HiveOperatorSchema implements IOperatorSchema {

+

+	private final Map<LogicalVariable, Integer> varMap;

+

+	private final List<LogicalVariable> varList;

+

+	public HiveOperatorSchema() {

+		varMap = new HashMap<LogicalVariable, Integer>();

+		varList = new ArrayList<LogicalVariable>();

+	}

+

+	@Override

+	public void addAllVariables(IOperatorSchema source) {

+		for (LogicalVariable v : source) {

+			varMap.put(v, varList.size());

+			varList.add(v);

+		}

+	}

+

+	@Override

+	public void addAllNewVariables(IOperatorSchema source) {

+		for (LogicalVariable v : source) {

+			if (varMap.get(v) == null) {

+				varMap.put(v, varList.size());

+				varList.add(v);

+			}

+		}

+	}

+

+	@Override

+	public int addVariable(LogicalVariable var) {

+		int idx = varList.size();

+		varMap.put(var, idx);

+		varList.add(var);

+		return idx;

+	}

+

+	@Override

+	public void clear() {

+		varMap.clear();

+		varList.clear();

+	}

+

+	@Override

+	public int findVariable(LogicalVariable var) {

+		Integer i = varMap.get(var);

+		if (i == null) {

+			return -1;

+		}

+		return i;

+	}

+

+	@Override

+	public int getSize() {

+		return varList.size();

+	}

+

+	@Override

+	public LogicalVariable getVariable(int index) {

+		return varList.get(index);

+	}

+

+	@Override

+	public Iterator<LogicalVariable> iterator() {

+		return varList.iterator();

+	}

+

+	@Override

+	public String toString() {

+		return varMap.toString();

+	}

+

+}

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveScanRuntimeGenerator.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveScanRuntimeGenerator.java
new file mode 100644
index 0000000..9c8aee4
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveScanRuntimeGenerator.java
@@ -0,0 +1,117 @@
+package edu.uci.ics.hivesterix.runtime.jobgen;

+

+import java.util.List;

+import java.util.Properties;

+

+import org.apache.hadoop.fs.Path;

+import org.apache.hadoop.hive.ql.plan.PartitionDesc;

+import org.apache.hadoop.mapred.JobConf;

+

+import edu.uci.ics.hivesterix.runtime.config.ConfUtil;

+import edu.uci.ics.hivesterix.runtime.operator.filescan.HiveFileScanOperatorDescriptor;

+import edu.uci.ics.hivesterix.runtime.operator.filescan.HiveFileSplitProvider;

+import edu.uci.ics.hivesterix.runtime.operator.filescan.HiveTupleParserFactory;

+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;

+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;

+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;

+import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IDataSource;

+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.IOperatorSchema;

+import edu.uci.ics.hyracks.algebricks.core.jobgen.impl.JobGenContext;

+import edu.uci.ics.hyracks.algebricks.data.ISerializerDeserializerProvider;

+import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;

+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;

+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;

+import edu.uci.ics.hyracks.api.job.JobSpecification;

+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;

+import edu.uci.ics.hyracks.dataflow.std.file.ITupleParserFactory;

+

+@SuppressWarnings({ "rawtypes", "deprecation" })

+public class HiveScanRuntimeGenerator {

+

+	private PartitionDesc fileDesc;

+

+	private transient Path filePath;

+

+	private String filePathName;

+

+	private Properties properties;

+

+	public HiveScanRuntimeGenerator(PartitionDesc path) {

+		fileDesc = path;

+		properties = fileDesc.getProperties();

+

+		String inputPath = (String) properties.getProperty("location");

+

+		if (inputPath.startsWith("file:")) {

+			// Windows

+			String[] strs = inputPath.split(":");

+			filePathName = strs[strs.length - 1];

+		} else {

+			// Linux

+			filePathName = inputPath;

+		}

+

+		filePath = new Path(filePathName);

+	}

+

+	public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getRuntimeOperatorAndConstraint(

+			IDataSource dataSource, List<LogicalVariable> scanVariables,

+			List<LogicalVariable> projectVariables, boolean projectPushed,

+			JobGenContext context, JobSpecification jobSpec)

+			throws AlgebricksException {

+		// get the correct delimiter from Hive metastore or other data

+		// structures

+		IOperatorSchema propagatedSchema = new HiveOperatorSchema();

+

+		List<LogicalVariable> outputVariables = projectPushed ? projectVariables

+				: scanVariables;

+		for (LogicalVariable var : outputVariables)

+			propagatedSchema.addVariable(var);

+

+		int[] outputColumnsOffset = new int[scanVariables.size()];

+		int i = 0;

+		for (LogicalVariable var : scanVariables)

+			if (outputVariables.contains(var)) {

+				int offset = outputVariables.indexOf(var);

+				outputColumnsOffset[i++] = offset;

+			} else

+				outputColumnsOffset[i++] = -1;

+

+		Object[] schemaTypes = dataSource.getSchemaTypes();

+		// get record descriptor

+		RecordDescriptor recDescriptor = mkRecordDescriptor(propagatedSchema,

+				schemaTypes, context);

+

+		// setup the run time operator

+		JobConf conf = ConfUtil.getJobConf(fileDesc.getInputFileFormatClass(),

+				filePath);

+		int clusterSize = ConfUtil.getNCs().length;

+		IFileSplitProvider fsprovider = new HiveFileSplitProvider(conf,

+				filePathName, clusterSize);

+		ITupleParserFactory tupleParserFactory = new HiveTupleParserFactory(

+				fileDesc, conf, outputColumnsOffset);

+		HiveFileScanOperatorDescriptor opDesc = new HiveFileScanOperatorDescriptor(

+				jobSpec, fsprovider, tupleParserFactory, recDescriptor);

+

+		return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(

+				opDesc, opDesc.getPartitionConstraint());

+	}

+

+	private static RecordDescriptor mkRecordDescriptor(

+			IOperatorSchema opSchema, Object[] types, JobGenContext context)

+			throws AlgebricksException {

+		ISerializerDeserializer[] fields = new ISerializerDeserializer[opSchema

+				.getSize()];

+		ISerializerDeserializerProvider sdp = context

+				.getSerializerDeserializerProvider();

+		int size = opSchema.getSize();

+		for (int i = 0; i < size; i++) {

+			Object t = types[i];

+			fields[i] = sdp.getSerializerDeserializer(t);

+			i++;

+		}

+		return new RecordDescriptor(fields);

+	}

+

+}

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveWriteRuntimeGenerator.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveWriteRuntimeGenerator.java
new file mode 100644
index 0000000..d372868
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveWriteRuntimeGenerator.java
@@ -0,0 +1,40 @@
+package edu.uci.ics.hivesterix.runtime.jobgen;

+

+import org.apache.hadoop.hive.ql.exec.FileSinkOperator;

+import org.apache.hadoop.mapred.JobConf;

+

+import edu.uci.ics.hivesterix.logical.expression.Schema;

+import edu.uci.ics.hivesterix.runtime.config.ConfUtil;

+import edu.uci.ics.hivesterix.runtime.operator.filewrite.HivePushRuntimeFactory;

+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;

+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;

+import edu.uci.ics.hyracks.algebricks.runtime.base.IPushRuntimeFactory;

+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;

+

+@SuppressWarnings("deprecation")

+public class HiveWriteRuntimeGenerator {

+	private FileSinkOperator fileSink;

+

+	private Schema inputSchema;

+

+	public HiveWriteRuntimeGenerator(FileSinkOperator fsOp, Schema oi) {

+		fileSink = fsOp;

+		inputSchema = oi;

+	}

+

+	/**

+	 * get the write runtime

+	 * 

+	 * @param inputDesc

+	 * @return

+	 */

+	public Pair<IPushRuntimeFactory, AlgebricksPartitionConstraint> getWriterRuntime(

+			RecordDescriptor inputDesc) {

+		JobConf conf = ConfUtil.getJobConf();

+		IPushRuntimeFactory factory = new HivePushRuntimeFactory(inputDesc,

+				conf, fileSink, inputSchema);

+		Pair<IPushRuntimeFactory, AlgebricksPartitionConstraint> pair = new Pair<IPushRuntimeFactory, AlgebricksPartitionConstraint>(

+				factory, null);

+		return pair;

+	}

+}

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/operator/filescan/AbstractHiveFileSplitProvider.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/operator/filescan/AbstractHiveFileSplitProvider.java
new file mode 100644
index 0000000..2f988f8
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/operator/filescan/AbstractHiveFileSplitProvider.java
@@ -0,0 +1,19 @@
+package edu.uci.ics.hivesterix.runtime.operator.filescan;

+

+import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;

+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;

+

+public abstract class AbstractHiveFileSplitProvider implements

+		IFileSplitProvider {

+	private static final long serialVersionUID = 1L;

+

+	@Override

+	public FileSplit[] getFileSplits() {

+		// TODO Auto-generated method stub

+		return null;

+	}

+

+	@SuppressWarnings("deprecation")

+	public abstract org.apache.hadoop.mapred.FileSplit[] getFileSplitArray();

+

+}

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/operator/filescan/AbstractHiveTupleParser.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/operator/filescan/AbstractHiveTupleParser.java
new file mode 100644
index 0000000..a8addeb
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/operator/filescan/AbstractHiveTupleParser.java
@@ -0,0 +1,29 @@
+package edu.uci.ics.hivesterix.runtime.operator.filescan;

+

+import java.io.InputStream;

+

+import org.apache.hadoop.mapred.FileSplit;

+

+import edu.uci.ics.hyracks.api.comm.IFrameWriter;

+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;

+import edu.uci.ics.hyracks.dataflow.std.file.ITupleParser;

+

+@SuppressWarnings("deprecation")

+public abstract class AbstractHiveTupleParser implements ITupleParser {

+

+	@Override

+	public void parse(InputStream in, IFrameWriter writer)

+			throws HyracksDataException {

+		// empty implementation

+	}

+

+	/**

+	 * method for parsing HDFS file split

+	 * 

+	 * @param split

+	 * @param writer

+	 */

+	abstract public void parse(FileSplit split, IFrameWriter writer)

+			throws HyracksDataException;

+

+}

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/operator/filescan/HiveFileScanOperatorDescriptor.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/operator/filescan/HiveFileScanOperatorDescriptor.java
new file mode 100644
index 0000000..d248486
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/operator/filescan/HiveFileScanOperatorDescriptor.java
@@ -0,0 +1,173 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hivesterix.runtime.operator.filescan;
+
+import java.io.IOException;
+import java.net.InetAddress;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+import java.util.Random;
+
+import org.apache.hadoop.mapred.FileSplit;
+
+import edu.uci.ics.hivesterix.runtime.config.ConfUtil;
+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
+import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryOutputSourceOperatorNodePushable;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.dataflow.std.file.ITupleParser;
+import edu.uci.ics.hyracks.dataflow.std.file.ITupleParserFactory;
+
+@SuppressWarnings("deprecation")
+public class HiveFileScanOperatorDescriptor extends
+		AbstractSingleActivityOperatorDescriptor {
+	private static final long serialVersionUID = 1L;
+
+	/**
+	 * tuple parser factory
+	 */
+	private final ITupleParserFactory tupleParserFactory;
+
+	/**
+	 * Hive file split
+	 */
+	private Partition[] parts;
+
+	/**
+	 * IFileSplitProvider
+	 */
+	private IFileSplitProvider fileSplitProvider;
+
+	/**
+	 * constrains in the form of host DNS names
+	 */
+	private String[] constraintsByHostNames;
+
+	/**
+	 * ip-to-node controller mapping
+	 */
+	private Map<String, List<String>> ncMapping;
+
+	/**
+	 * an array of NCs
+	 */
+	private String[] NCs;
+
+	/**
+	 * 
+	 * @param spec
+	 * @param fsProvider
+	 */
+	public HiveFileScanOperatorDescriptor(JobSpecification spec,
+			IFileSplitProvider fsProvider,
+			ITupleParserFactory tupleParserFactory, RecordDescriptor rDesc) {
+		super(spec, 0, 1);
+		this.tupleParserFactory = tupleParserFactory;
+		recordDescriptors[0] = rDesc;
+		fileSplitProvider = fsProvider;
+	}
+
+	/**
+	 * set partition constraint at the first time it is called the number of
+	 * partitions is obtained from HDFS name node
+	 */
+	public AlgebricksAbsolutePartitionConstraint getPartitionConstraint()
+			throws AlgebricksException {
+		FileSplit[] returnedSplits = ((AbstractHiveFileSplitProvider) fileSplitProvider)
+				.getFileSplitArray();
+		Random random = new Random(System.currentTimeMillis());
+		ncMapping = ConfUtil.getNCMapping();
+		NCs = ConfUtil.getNCs();
+
+		int size = 0;
+		for (FileSplit split : returnedSplits)
+			if (split != null)
+				size++;
+
+		FileSplit[] splits = new FileSplit[size];
+		for (int i = 0; i < returnedSplits.length; i++)
+			if (returnedSplits[i] != null)
+				splits[i] = returnedSplits[i];
+
+		System.out.println("!!! number of splits: " + splits.length);
+		constraintsByHostNames = new String[splits.length];
+		for (int i = 0; i < splits.length; i++) {
+			try {
+				String[] loc = splits[i].getLocations();
+				Collections.shuffle(Arrays.asList(loc), random);
+				if (loc.length > 0) {
+					InetAddress[] allIps = InetAddress.getAllByName(loc[0]);
+					for (InetAddress ip : allIps) {
+						if (ncMapping.get(ip.getHostAddress()) != null) {
+							List<String> ncs = ncMapping.get(ip
+									.getHostAddress());
+							int pos = random.nextInt(ncs.size());
+							constraintsByHostNames[i] = ncs.get(pos);
+						} else {
+							int pos = random.nextInt(NCs.length);
+							constraintsByHostNames[i] = NCs[pos];
+						}
+					}
+				} else {
+					int pos = random.nextInt(NCs.length);
+					constraintsByHostNames[i] = NCs[pos];
+					if (splits[i].getLength() > 0)
+						throw new IllegalStateException(
+								"non local scanner non locations!!");
+				}
+			} catch (IOException e) {
+				throw new AlgebricksException(e);
+			}
+		}
+
+		parts = new Partition[splits.length];
+		for (int i = 0; i < splits.length; i++) {
+			parts[i] = new Partition(splits[i]);
+		}
+		return new AlgebricksAbsolutePartitionConstraint(constraintsByHostNames);
+	}
+
+	@Override
+	public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
+			IRecordDescriptorProvider recordDescProvider, int partition,
+			int nPartitions) {
+
+		final ITupleParser tp = tupleParserFactory.createTupleParser(ctx);
+		final int partitionId = partition;
+
+		return new AbstractUnaryOutputSourceOperatorNodePushable() {
+
+			@Override
+			public void initialize() throws HyracksDataException {
+				writer.open();
+				FileSplit split = parts[partitionId].toFileSplit();
+				if (split == null)
+					throw new HyracksDataException("partition " + partitionId
+							+ " is null!");
+				((AbstractHiveTupleParser) tp).parse(split, writer);
+				writer.close();
+			}
+		};
+	}
+}
\ No newline at end of file
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/operator/filescan/HiveFileSplitProvider.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/operator/filescan/HiveFileSplitProvider.java
new file mode 100644
index 0000000..d92d353
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/operator/filescan/HiveFileSplitProvider.java
@@ -0,0 +1,115 @@
+package edu.uci.ics.hivesterix.runtime.operator.filescan;

+

+import java.io.DataInputStream;

+import java.io.DataOutputStream;

+import java.io.File;

+import java.io.FileInputStream;

+import java.io.FileOutputStream;

+import java.io.IOException;

+import java.io.OutputStreamWriter;

+import java.io.PrintWriter;

+import java.util.UUID;

+

+import org.apache.hadoop.mapred.FileSplit;

+import org.apache.hadoop.mapred.InputFormat;

+import org.apache.hadoop.mapred.JobConf;

+import org.eclipse.jetty.util.log.Log;

+

+@SuppressWarnings({ "deprecation", "rawtypes" })

+public class HiveFileSplitProvider extends AbstractHiveFileSplitProvider {

+	private static final long serialVersionUID = 1L;

+

+	private transient InputFormat format;

+	private transient JobConf conf;

+	private String confContent;

+	final private int nPartition;

+	private transient FileSplit[] splits;

+

+	public HiveFileSplitProvider(JobConf conf, String filePath, int nPartition) {

+		format = conf.getInputFormat();

+		this.conf = conf;

+		this.nPartition = nPartition;

+		writeConfContent();

+	}

+

+	private void writeConfContent() {

+		File dir = new File("hadoop-conf-tmp");

+		if (!dir.exists()) {

+			dir.mkdir();

+		}

+

+		String fileName = "hadoop-conf-tmp/" + UUID.randomUUID()

+				+ System.currentTimeMillis() + ".xml";

+		try {

+			DataOutputStream out = new DataOutputStream(new FileOutputStream(

+					new File(fileName)));

+			conf.writeXml(out);

+			out.close();

+

+			DataInputStream in = new DataInputStream(new FileInputStream(

+					fileName));

+			StringBuffer buffer = new StringBuffer();

+			String line;

+			while ((line = in.readLine()) != null) {

+				buffer.append(line + "\n");

+			}

+			in.close();

+			confContent = buffer.toString();

+		} catch (Exception e) {

+			e.printStackTrace();

+		}

+	}

+

+	private void readConfContent() {

+		File dir = new File("hadoop-conf-tmp");

+		if (!dir.exists()) {

+			dir.mkdir();

+		}

+

+		String fileName = "hadoop-conf-tmp/" + UUID.randomUUID()

+				+ System.currentTimeMillis() + ".xml";

+		try {

+			PrintWriter out = new PrintWriter((new OutputStreamWriter(

+					new FileOutputStream(new File(fileName)))));

+			out.write(confContent);

+			out.close();

+			conf = new JobConf(fileName);

+		} catch (Exception e) {

+			e.printStackTrace();

+		}

+	}

+

+	@Override

+	/**

+	 * get the HDFS file split

+	 */

+	public FileSplit[] getFileSplitArray() {

+		readConfContent();

+		conf.setClassLoader(this.getClass().getClassLoader());

+		format = conf.getInputFormat();

+		// int splitSize = conf.getInt("mapred.min.split.size", 0);

+

+		if (splits == null) {

+			try {

+				splits = (org.apache.hadoop.mapred.FileSplit[]) format

+						.getSplits(conf, nPartition);

+				System.out.println("hdfs split number: " + splits.length);

+			} catch (IOException e) {

+				String inputPath = conf.get("mapred.input.dir");

+				String hdfsURL = conf.get("fs.default.name");

+				String alternatePath = inputPath.replaceAll(hdfsURL, "file:");

+				conf.set("mapred.input.dir", alternatePath);

+				try {

+					splits = (org.apache.hadoop.mapred.FileSplit[]) format

+							.getSplits(conf, nPartition);

+					System.out.println("hdfs split number: " + splits.length);

+				} catch (IOException e1) {

+					e1.printStackTrace();

+					Log.debug(e1.getMessage());

+					return null;

+				}

+			}

+		}

+		return splits;

+	}

+}

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/operator/filescan/HiveTupleParser.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/operator/filescan/HiveTupleParser.java
new file mode 100644
index 0000000..7681bd1
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/operator/filescan/HiveTupleParser.java
@@ -0,0 +1,233 @@
+package edu.uci.ics.hivesterix.runtime.operator.filescan;

+

+import java.io.DataOutput;

+import java.io.IOException;

+import java.nio.ByteBuffer;

+import java.util.List;

+import java.util.Properties;

+

+import org.apache.hadoop.hive.serde2.SerDe;

+import org.apache.hadoop.hive.serde2.SerDeException;

+import org.apache.hadoop.hive.serde2.lazy.objectinspector.LazySimpleStructObjectInspector;

+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;

+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;

+import org.apache.hadoop.hive.serde2.objectinspector.StructField;

+import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;

+import org.apache.hadoop.io.BytesWritable;

+import org.apache.hadoop.io.Text;

+import org.apache.hadoop.io.Writable;

+import org.apache.hadoop.mapred.FileSplit;

+import org.apache.hadoop.mapred.InputFormat;

+import org.apache.hadoop.mapred.JobConf;

+import org.apache.hadoop.mapred.RecordReader;

+import org.apache.hadoop.mapred.Reporter;

+import org.apache.hadoop.util.ReflectionUtils;

+

+import edu.uci.ics.hivesterix.serde.parser.IHiveParser;

+import edu.uci.ics.hivesterix.serde.parser.TextToBinaryTupleParser;

+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;

+import edu.uci.ics.hyracks.api.comm.IFrameWriter;

+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;

+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;

+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;

+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;

+import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;

+

+@SuppressWarnings({ "rawtypes", "deprecation", "unchecked" })

+public class HiveTupleParser extends AbstractHiveTupleParser {

+

+	private int[] outputColumnsOffset;

+	/**

+	 * class of input format

+	 */

+	private InputFormat inputFormat;

+

+	/**

+	 * serialization/deserialization object

+	 */

+	private SerDe serDe;

+

+	/**

+	 * the input row object inspector

+	 */

+	private ObjectInspector objectInspector;

+

+	/**

+	 * the hadoop job conf

+	 */

+	private JobConf job;

+

+	/**

+	 * Hyrax context to control resource allocation

+	 */

+	private final IHyracksTaskContext ctx;

+

+	/**

+	 * lazy serde: format flow in between operators

+	 */

+	private final SerDe outputSerDe;

+

+	/**

+	 * the parser from hive data to binary data

+	 */

+	private IHiveParser parser = null;

+

+	/**

+	 * parser for any hive input format

+	 * 

+	 * @param inputFormatClass

+	 * @param serDeClass

+	 * @param tbl

+	 * @param conf

+	 * @throws AlgebricksException

+	 */

+	public HiveTupleParser(String inputFormatClass, String serDeClass,

+			String outputSerDeClass, Properties tbl, JobConf conf,

+			final IHyracksTaskContext ctx, int[] outputColumnsOffset)

+			throws AlgebricksException {

+		try {

+			conf.setClassLoader(this.getClass().getClassLoader());

+

+			inputFormat = (InputFormat) ReflectionUtils.newInstance(

+					Class.forName(inputFormatClass), conf);

+			job = conf;

+

+			// initialize the input serde

+			serDe = (SerDe) ReflectionUtils.newInstance(

+					Class.forName(serDeClass), job);

+			serDe.initialize(job, tbl);

+

+			// initialize the output serde

+			outputSerDe = (SerDe) ReflectionUtils.newInstance(

+					Class.forName(outputSerDeClass), job);

+			outputSerDe.initialize(job, tbl);

+

+			// object inspector of the row

+			objectInspector = serDe.getObjectInspector();

+

+			// hyracks context

+			this.ctx = ctx;

+			this.outputColumnsOffset = outputColumnsOffset;

+

+			if (objectInspector instanceof LazySimpleStructObjectInspector) {

+				LazySimpleStructObjectInspector rowInspector = (LazySimpleStructObjectInspector) objectInspector;

+				List<? extends StructField> fieldRefs = rowInspector

+						.getAllStructFieldRefs();

+				boolean lightWeightParsable = true;

+				for (StructField fieldRef : fieldRefs) {

+					Category category = fieldRef.getFieldObjectInspector()

+							.getCategory();

+					if (!(category == Category.PRIMITIVE)) {

+						lightWeightParsable = false;

+						break;

+					}

+				}

+				if (lightWeightParsable)

+					parser = new TextToBinaryTupleParser(

+							this.outputColumnsOffset, this.objectInspector);

+			}

+		} catch (Exception e) {

+			throw new AlgebricksException(e);

+		}

+	}

+

+	/**

+	 * parse a input HDFS file split, the result is send to the writer

+	 * one-frame-a-time

+	 * 

+	 * @param split

+	 *            the HDFS file split

+	 * @param writer

+	 *            the writer

+	 * @throws HyracksDataException

+	 *             if there is sth. wrong in the ser/de

+	 */

+	@Override

+	public void parse(FileSplit split, IFrameWriter writer)

+			throws HyracksDataException {

+		try {

+			StructObjectInspector structInspector = (StructObjectInspector) objectInspector;

+

+			// create the reader, key, and value

+			RecordReader reader = inputFormat.getRecordReader(split, job,

+					Reporter.NULL);

+			Object key = reader.createKey();

+			Object value = reader.createValue();

+

+			// allocate a new frame

+			ByteBuffer frame = ctx.allocateFrame();

+			FrameTupleAppender appender = new FrameTupleAppender(

+					ctx.getFrameSize());

+			appender.reset(frame, true);

+

+			List<? extends StructField> fieldRefs = structInspector

+					.getAllStructFieldRefs();

+			int size = 0;

+			for (int i = 0; i < outputColumnsOffset.length; i++)

+				if (outputColumnsOffset[i] >= 0)

+					size++;

+

+			ArrayTupleBuilder tb = new ArrayTupleBuilder(size);

+			DataOutput dos = tb.getDataOutput();

+			StructField[] outputFieldRefs = new StructField[size];

+			Object[] outputFields = new Object[size];

+			for (int i = 0; i < outputColumnsOffset.length; i++)

+				if (outputColumnsOffset[i] >= 0)

+					outputFieldRefs[outputColumnsOffset[i]] = fieldRefs.get(i);

+

+			while (reader.next(key, value)) {

+				// reuse the tuple builder

+				tb.reset();

+				if (parser != null) {

+					Text text = (Text) value;

+					parser.parse(text.getBytes(), 0, text.getLength(), tb);

+				} else {

+					Object row = serDe.deserialize((Writable) value);

+					// write fields to the tuple builder one by one

+					int i = 0;

+					for (StructField fieldRef : fieldRefs) {

+						if (outputColumnsOffset[i] >= 0)

+							outputFields[outputColumnsOffset[i]] = structInspector

+									.getStructFieldData(row, fieldRef);

+						i++;

+					}

+

+					i = 0;

+					for (Object field : outputFields) {

+						BytesWritable fieldWritable = (BytesWritable) outputSerDe

+								.serialize(field, outputFieldRefs[i]

+										.getFieldObjectInspector());

+						dos.write(fieldWritable.getBytes(), 0,

+								fieldWritable.getSize());

+						tb.addFieldEndOffset();

+						i++;

+					}

+				}

+

+				if (!appender.append(tb.getFieldEndOffsets(),

+						tb.getByteArray(), 0, tb.getSize())) {

+					if (appender.getTupleCount() <= 0)

+						throw new IllegalStateException(

+								"zero tuples in a frame!");

+					FrameUtils.flushFrame(frame, writer);

+					appender.reset(frame, true);

+					if (!appender.append(tb.getFieldEndOffsets(),

+							tb.getByteArray(), 0, tb.getSize())) {

+						throw new IllegalStateException();

+					}

+				}

+			}

+			reader.close();

+			System.gc();

+

+			// flush the last frame

+			if (appender.getTupleCount() > 0) {

+				FrameUtils.flushFrame(frame, writer);

+			}

+		} catch (IOException e) {

+			throw new HyracksDataException(e);

+		} catch (SerDeException e) {

+			throw new HyracksDataException(e);

+		}

+	}

+}

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/operator/filescan/HiveTupleParserFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/operator/filescan/HiveTupleParserFactory.java
new file mode 100644
index 0000000..69aa881
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/operator/filescan/HiveTupleParserFactory.java
@@ -0,0 +1,111 @@
+package edu.uci.ics.hivesterix.runtime.operator.filescan;

+

+import java.io.DataInputStream;

+import java.io.DataOutputStream;

+import java.io.File;

+import java.io.FileInputStream;

+import java.io.FileOutputStream;

+import java.io.OutputStreamWriter;

+import java.io.PrintWriter;

+import java.util.Properties;

+import java.util.UUID;

+

+import org.apache.hadoop.hive.ql.plan.PartitionDesc;

+import org.apache.hadoop.mapred.JobConf;

+

+import edu.uci.ics.hivesterix.serde.lazy.LazySerDe;

+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;

+import edu.uci.ics.hyracks.dataflow.std.file.ITupleParser;

+import edu.uci.ics.hyracks.dataflow.std.file.ITupleParserFactory;

+

+@SuppressWarnings("deprecation")

+public class HiveTupleParserFactory implements ITupleParserFactory {

+

+	private static final long serialVersionUID = 1L;

+

+	private int[] outputColumns;

+

+	private String outputSerDeClass = LazySerDe.class.getName();

+

+	private String inputSerDeClass;

+

+	private transient JobConf conf;

+

+	private Properties tbl;

+

+	private String confContent;

+

+	private String inputFormatClass;

+

+	public HiveTupleParserFactory(PartitionDesc desc, JobConf conf,

+			int[] outputColumns) {

+		this.conf = conf;

+		tbl = desc.getProperties();

+		inputFormatClass = (String) tbl.getProperty("file.inputformat");

+		inputSerDeClass = (String) tbl.getProperty("serialization.lib");

+		this.outputColumns = outputColumns;

+

+		writeConfContent();

+	}

+

+	@Override

+	public ITupleParser createTupleParser(IHyracksTaskContext ctx) {

+		readConfContent();

+		try {

+			return new HiveTupleParser(inputFormatClass, inputSerDeClass,

+					outputSerDeClass, tbl, conf, ctx, outputColumns);

+		} catch (Exception e) {

+			e.printStackTrace();

+			return null;

+		}

+	}

+

+	private void writeConfContent() {

+		File dir = new File("hadoop-conf-tmp");

+		if (!dir.exists()) {

+			dir.mkdir();

+		}

+

+		String fileName = "hadoop-conf-tmp/" + UUID.randomUUID()

+				+ System.currentTimeMillis() + ".xml";

+		try {

+			DataOutputStream out = new DataOutputStream(new FileOutputStream(

+					new File(fileName)));

+			conf.writeXml(out);

+			out.close();

+

+			DataInputStream in = new DataInputStream(new FileInputStream(

+					fileName));

+			StringBuffer buffer = new StringBuffer();

+			String line;

+			while ((line = in.readLine()) != null) {

+				buffer.append(line + "\n");

+			}

+			in.close();

+			confContent = buffer.toString();

+		} catch (Exception e) {

+			e.printStackTrace();

+		}

+	}

+

+	private void readConfContent() {

+		File dir = new File("hadoop-conf-tmp");

+		if (!dir.exists()) {

+			dir.mkdir();

+		}

+

+		String fileName = "hadoop-conf-tmp/" + UUID.randomUUID()

+				+ System.currentTimeMillis() + ".xml";

+		try {

+			PrintWriter out = new PrintWriter((new OutputStreamWriter(

+					new FileOutputStream(new File(fileName)))));

+			out.write(confContent);

+			out.close();

+

+			conf = new JobConf(fileName);

+		} catch (Exception e) {

+			e.printStackTrace();

+		}

+	}

+

+}

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/operator/filescan/Partition.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/operator/filescan/Partition.java
new file mode 100644
index 0000000..1b3dcf2
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/operator/filescan/Partition.java
@@ -0,0 +1,35 @@
+package edu.uci.ics.hivesterix.runtime.operator.filescan;

+

+import java.io.IOException;

+import java.io.Serializable;

+

+import org.apache.hadoop.fs.Path;

+import org.apache.hadoop.mapred.FileSplit;

+

+@SuppressWarnings("deprecation")

+public class Partition implements Serializable {

+	private static final long serialVersionUID = 1L;

+

+	private String uri;

+	private long offset;

+	private long length;

+	private String[] locations;

+

+	public Partition() {

+	}

+

+	public Partition(FileSplit file) {

+		uri = file.getPath().toUri().toString();

+		offset = file.getStart();

+		length = file.getLength();

+		try {

+			locations = file.getLocations();

+		} catch (IOException e) {

+			throw new IllegalStateException(e);

+		}

+	}

+

+	public FileSplit toFileSplit() {

+		return new FileSplit(new Path(uri), offset, length, locations);

+	}

+}

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/operator/filewrite/HiveFileWritePushRuntime.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/operator/filewrite/HiveFileWritePushRuntime.java
new file mode 100644
index 0000000..05e79ea
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/operator/filewrite/HiveFileWritePushRuntime.java
@@ -0,0 +1,153 @@
+package edu.uci.ics.hivesterix.runtime.operator.filewrite;

+

+import java.nio.ByteBuffer;

+

+import org.apache.hadoop.hive.ql.exec.FileSinkOperator;

+import org.apache.hadoop.hive.ql.exec.OperatorFactory;

+import org.apache.hadoop.hive.ql.exec.RowSchema;

+import org.apache.hadoop.hive.ql.metadata.HiveException;

+import org.apache.hadoop.hive.ql.plan.FileSinkDesc;

+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;

+import org.apache.hadoop.mapred.JobConf;

+

+import edu.uci.ics.hivesterix.logical.expression.Schema;

+import edu.uci.ics.hivesterix.serde.lazy.LazyColumnar;

+import edu.uci.ics.hivesterix.serde.lazy.objectinspector.LazyColumnarObjectInspector;

+import edu.uci.ics.hyracks.algebricks.runtime.base.IPushRuntime;

+import edu.uci.ics.hyracks.api.comm.IFrameWriter;

+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;

+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;

+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;

+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;

+import edu.uci.ics.hyracks.dataflow.common.data.accessors.FrameTupleReference;

+

+@SuppressWarnings("deprecation")

+public class HiveFileWritePushRuntime implements IPushRuntime {

+

+	/**

+	 * frame tuple accessor to access byte buffer

+	 */

+	private final FrameTupleAccessor accessor;

+

+	/**

+	 * input object inspector

+	 */

+	private final ObjectInspector inputInspector;

+

+	/**

+	 * cachedInput

+	 */

+	private final LazyColumnar cachedInput;

+

+	/**

+	 * File sink operator of Hive

+	 */

+	private final FileSinkDesc fileSink;

+

+	/**

+	 * job configuration, which contain name node and other configuration

+	 * information

+	 */

+	private JobConf conf;

+

+	/**

+	 * input object inspector

+	 */

+	private final Schema inputSchema;

+

+	/**

+	 * a copy of hive schema representation

+	 */

+	private RowSchema rowSchema;

+

+	/**

+	 * the Hive file sink operator

+	 */

+	private FileSinkOperator fsOp;

+

+	/**

+	 * cached tuple object reference

+	 */

+	private FrameTupleReference tuple = new FrameTupleReference();

+

+	/**

+	 * @param spec

+	 * @param fsProvider

+	 */

+	public HiveFileWritePushRuntime(IHyracksTaskContext context,

+			RecordDescriptor inputRecordDesc, JobConf job, FileSinkDesc fs,

+			RowSchema schema, Schema oi) {

+		fileSink = fs;

+		fileSink.setGatherStats(false);

+

+		rowSchema = schema;

+		conf = job;

+		inputSchema = oi;

+

+		accessor = new FrameTupleAccessor(context.getFrameSize(),

+				inputRecordDesc);

+		inputInspector = inputSchema.toObjectInspector();

+		cachedInput = new LazyColumnar(

+				(LazyColumnarObjectInspector) inputInspector);

+	}

+

+	@Override

+	public void open() throws HyracksDataException {

+		fsOp = (FileSinkOperator) OperatorFactory.get(fileSink, rowSchema);

+		fsOp.setChildOperators(null);

+		fsOp.setParentOperators(null);

+		conf.setClassLoader(this.getClass().getClassLoader());

+

+		ObjectInspector[] inspectors = new ObjectInspector[1];

+		inspectors[0] = inputInspector;

+		try {

+			fsOp.initialize(conf, inspectors);

+			fsOp.setExecContext(null);

+		} catch (Exception e) {

+			e.printStackTrace();

+		}

+	}

+

+	@Override

+	public void nextFrame(ByteBuffer buffer) throws HyracksDataException {

+		accessor.reset(buffer);

+		int n = accessor.getTupleCount();

+		try {

+			for (int i = 0; i < n; ++i) {

+				tuple.reset(accessor, i);

+				cachedInput.init(tuple);

+				fsOp.process(cachedInput, 0);

+			}

+		} catch (HiveException e) {

+			throw new HyracksDataException(e);

+		}

+	}

+

+	@Override

+	public void close() throws HyracksDataException {

+		try {

+			Thread.currentThread().setContextClassLoader(

+					this.getClass().getClassLoader());

+			fsOp.closeOp(false);

+		} catch (HiveException e) {

+			throw new HyracksDataException(e);

+		}

+	}

+

+	@Override

+	public void setFrameWriter(int index, IFrameWriter writer,

+			RecordDescriptor recordDesc) {

+		throw new IllegalStateException();

+	}

+

+	@Override

+	public void setInputRecordDescriptor(int index,

+			RecordDescriptor recordDescriptor) {

+	}

+

+	@Override

+	public void fail() throws HyracksDataException {

+

+	}

+

+}

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/operator/filewrite/HivePushRuntimeFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/operator/filewrite/HivePushRuntimeFactory.java
new file mode 100644
index 0000000..43e90fa
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/operator/filewrite/HivePushRuntimeFactory.java
@@ -0,0 +1,113 @@
+package edu.uci.ics.hivesterix.runtime.operator.filewrite;

+

+import java.io.DataInputStream;

+import java.io.DataOutputStream;

+import java.io.File;

+import java.io.FileInputStream;

+import java.io.FileOutputStream;

+import java.io.OutputStreamWriter;

+import java.io.PrintWriter;

+import java.util.UUID;

+

+import org.apache.hadoop.hive.ql.exec.FileSinkOperator;

+import org.apache.hadoop.hive.ql.exec.RowSchema;

+import org.apache.hadoop.hive.ql.plan.FileSinkDesc;

+import org.apache.hadoop.mapred.JobConf;

+

+import edu.uci.ics.hivesterix.logical.expression.Schema;

+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;

+import edu.uci.ics.hyracks.algebricks.runtime.base.IPushRuntime;

+import edu.uci.ics.hyracks.algebricks.runtime.base.IPushRuntimeFactory;

+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;

+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;

+

+@SuppressWarnings("deprecation")

+public class HivePushRuntimeFactory implements IPushRuntimeFactory {

+

+	private static final long serialVersionUID = 1L;

+

+	private final RecordDescriptor inputRecordDesc;

+	private transient JobConf conf;

+	private final FileSinkDesc fileSink;

+	private final RowSchema outSchema;

+	private final Schema schema;

+

+	/**

+	 * the content of the configuration

+	 */

+	private String confContent;

+

+	public HivePushRuntimeFactory(RecordDescriptor inputRecordDesc,

+			JobConf conf, FileSinkOperator fsp, Schema sch) {

+		this.inputRecordDesc = inputRecordDesc;

+		this.conf = conf;

+		this.fileSink = fsp.getConf();

+		outSchema = fsp.getSchema();

+		this.schema = sch;

+

+		writeConfContent();

+	}

+

+	@Override

+	public String toString() {

+		return "file write";

+	}

+

+	@Override

+	public IPushRuntime createPushRuntime(IHyracksTaskContext context)

+			throws AlgebricksException {

+		if (conf == null)

+			readConfContent();

+

+		return new HiveFileWritePushRuntime(context, inputRecordDesc, conf,

+				fileSink, outSchema, schema);

+	}

+

+	private void readConfContent() {

+		File dir = new File("hadoop-conf-tmp");

+		if (!dir.exists()) {

+			dir.mkdir();

+		}

+

+		String fileName = "hadoop-conf-tmp/" + UUID.randomUUID()

+				+ System.currentTimeMillis() + ".xml";

+		try {

+			PrintWriter out = new PrintWriter((new OutputStreamWriter(

+					new FileOutputStream(new File(fileName)))));

+			out.write(confContent);

+			out.close();

+			conf = new JobConf(fileName);

+		} catch (Exception e) {

+			e.printStackTrace();

+		}

+	}

+

+	private void writeConfContent() {

+		File dir = new File("hadoop-conf-tmp");

+		if (!dir.exists()) {

+			dir.mkdir();

+		}

+

+		String fileName = "hadoop-conf-tmp/" + UUID.randomUUID()

+				+ System.currentTimeMillis() + ".xml";

+		try {

+			DataOutputStream out = new DataOutputStream(new FileOutputStream(

+					new File(fileName)));

+			conf.writeXml(out);

+			out.close();

+

+			DataInputStream in = new DataInputStream(new FileInputStream(

+					fileName));

+			StringBuffer buffer = new StringBuffer();

+			String line;

+			while ((line = in.readLine()) != null) {

+				buffer.append(line + "\n");

+			}

+			in.close();

+			confContent = buffer.toString();

+		} catch (Exception e) {

+			e.printStackTrace();

+		}

+	}

+

+}

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/provider/HiveBinaryComparatorFactoryProvider.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/provider/HiveBinaryComparatorFactoryProvider.java
new file mode 100644
index 0000000..5a2e98c
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/provider/HiveBinaryComparatorFactoryProvider.java
@@ -0,0 +1,77 @@
+package edu.uci.ics.hivesterix.runtime.provider;

+

+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;

+

+import edu.uci.ics.hivesterix.runtime.factory.comparator.HiveByteBinaryAscComparatorFactory;

+import edu.uci.ics.hivesterix.runtime.factory.comparator.HiveByteBinaryDescComparatorFactory;

+import edu.uci.ics.hivesterix.runtime.factory.comparator.HiveDoubleBinaryAscComparatorFactory;

+import edu.uci.ics.hivesterix.runtime.factory.comparator.HiveDoubleBinaryDescComparatorFactory;

+import edu.uci.ics.hivesterix.runtime.factory.comparator.HiveFloatBinaryAscComparatorFactory;

+import edu.uci.ics.hivesterix.runtime.factory.comparator.HiveFloatBinaryDescComparatorFactory;

+import edu.uci.ics.hivesterix.runtime.factory.comparator.HiveIntegerBinaryAscComparatorFactory;

+import edu.uci.ics.hivesterix.runtime.factory.comparator.HiveIntegerBinaryDescComparatorFactory;

+import edu.uci.ics.hivesterix.runtime.factory.comparator.HiveLongBinaryAscComparatorFactory;

+import edu.uci.ics.hivesterix.runtime.factory.comparator.HiveLongBinaryDescComparatorFactory;

+import edu.uci.ics.hivesterix.runtime.factory.comparator.HiveShortBinaryAscComparatorFactory;

+import edu.uci.ics.hivesterix.runtime.factory.comparator.HiveShortBinaryDescComparatorFactory;

+import edu.uci.ics.hivesterix.runtime.factory.comparator.HiveStringBinaryAscComparatorFactory;

+import edu.uci.ics.hivesterix.runtime.factory.comparator.HiveStringBinaryDescComparatorFactory;

+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;

+import edu.uci.ics.hyracks.algebricks.common.exceptions.NotImplementedException;

+import edu.uci.ics.hyracks.algebricks.data.IBinaryComparatorFactoryProvider;

+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;

+

+public class HiveBinaryComparatorFactoryProvider implements

+		IBinaryComparatorFactoryProvider {

+

+	public static final HiveBinaryComparatorFactoryProvider INSTANCE = new HiveBinaryComparatorFactoryProvider();

+

+	private HiveBinaryComparatorFactoryProvider() {

+	}

+

+	@Override

+	public IBinaryComparatorFactory getBinaryComparatorFactory(Object type,

+			boolean ascending) throws AlgebricksException {

+		if (type.equals(TypeInfoFactory.intTypeInfo)) {

+			if (ascending)

+				return HiveIntegerBinaryAscComparatorFactory.INSTANCE;

+			else

+				return HiveIntegerBinaryDescComparatorFactory.INSTANCE;

+

+		} else if (type.equals(TypeInfoFactory.longTypeInfo)) {

+			if (ascending)

+				return HiveLongBinaryAscComparatorFactory.INSTANCE;

+			else

+				return HiveLongBinaryDescComparatorFactory.INSTANCE;

+

+		} else if (type.equals(TypeInfoFactory.floatTypeInfo)) {

+			if (ascending)

+				return HiveFloatBinaryAscComparatorFactory.INSTANCE;

+			else

+				return HiveFloatBinaryDescComparatorFactory.INSTANCE;

+

+		} else if (type.equals(TypeInfoFactory.doubleTypeInfo)) {

+			if (ascending)

+				return HiveDoubleBinaryAscComparatorFactory.INSTANCE;

+			else

+				return HiveDoubleBinaryDescComparatorFactory.INSTANCE;

+		} else if (type.equals(TypeInfoFactory.shortTypeInfo)) {

+			if (ascending)

+				return HiveShortBinaryAscComparatorFactory.INSTANCE;

+			else

+				return HiveShortBinaryDescComparatorFactory.INSTANCE;

+		} else if (type.equals(TypeInfoFactory.stringTypeInfo)) {

+			if (ascending)

+				return HiveStringBinaryAscComparatorFactory.INSTANCE;

+			else

+				return HiveStringBinaryDescComparatorFactory.INSTANCE;

+		} else if (type.equals(TypeInfoFactory.byteTypeInfo)

+				|| type.equals(TypeInfoFactory.booleanTypeInfo)) {

+			if (ascending)

+				return HiveByteBinaryAscComparatorFactory.INSTANCE;

+			else

+				return HiveByteBinaryDescComparatorFactory.INSTANCE;

+		} else

+			throw new NotImplementedException();

+	}

+}

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/provider/HiveBinaryHashFunctionFactoryProvider.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/provider/HiveBinaryHashFunctionFactoryProvider.java
new file mode 100644
index 0000000..371d45b
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/provider/HiveBinaryHashFunctionFactoryProvider.java
@@ -0,0 +1,37 @@
+package edu.uci.ics.hivesterix.runtime.provider;

+

+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;

+

+import edu.uci.ics.hivesterix.runtime.factory.hashfunction.HiveDoubleBinaryHashFunctionFactory;

+import edu.uci.ics.hivesterix.runtime.factory.hashfunction.HiveIntegerBinaryHashFunctionFactory;

+import edu.uci.ics.hivesterix.runtime.factory.hashfunction.HiveLongBinaryHashFunctionFactory;

+import edu.uci.ics.hivesterix.runtime.factory.hashfunction.HiveRawBinaryHashFunctionFactory;

+import edu.uci.ics.hivesterix.runtime.factory.hashfunction.HiveStingBinaryHashFunctionFactory;

+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;

+import edu.uci.ics.hyracks.algebricks.data.IBinaryHashFunctionFactoryProvider;

+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFactory;

+

+public class HiveBinaryHashFunctionFactoryProvider implements

+		IBinaryHashFunctionFactoryProvider {

+

+	public static final HiveBinaryHashFunctionFactoryProvider INSTANCE = new HiveBinaryHashFunctionFactoryProvider();

+

+	private HiveBinaryHashFunctionFactoryProvider() {

+	}

+

+	@Override

+	public IBinaryHashFunctionFactory getBinaryHashFunctionFactory(Object type)

+			throws AlgebricksException {

+		if (type.equals(TypeInfoFactory.intTypeInfo)) {

+			return HiveIntegerBinaryHashFunctionFactory.INSTANCE;

+		} else if (type.equals(TypeInfoFactory.longTypeInfo)) {

+			return HiveLongBinaryHashFunctionFactory.INSTANCE;

+		} else if (type.equals(TypeInfoFactory.stringTypeInfo)) {

+			return HiveStingBinaryHashFunctionFactory.INSTANCE;

+		} else if (type.equals(TypeInfoFactory.doubleTypeInfo)) {

+			return HiveDoubleBinaryHashFunctionFactory.INSTANCE;

+		} else {

+			return HiveRawBinaryHashFunctionFactory.INSTANCE;

+		}

+	}

+}

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/provider/HiveBinaryHashFunctionFamilyProvider.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/provider/HiveBinaryHashFunctionFamilyProvider.java
new file mode 100644
index 0000000..e7a2e79
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/provider/HiveBinaryHashFunctionFamilyProvider.java
@@ -0,0 +1,20 @@
+package edu.uci.ics.hivesterix.runtime.provider;
+
+import edu.uci.ics.hivesterix.runtime.factory.hashfunction.MurmurHash3BinaryHashFunctionFamily;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.data.IBinaryHashFunctionFamilyProvider;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFamily;
+
+public class HiveBinaryHashFunctionFamilyProvider implements IBinaryHashFunctionFamilyProvider {
+
+    public static HiveBinaryHashFunctionFamilyProvider INSTANCE = new HiveBinaryHashFunctionFamilyProvider();
+
+    private HiveBinaryHashFunctionFamilyProvider() {
+
+    }
+
+    @Override
+    public IBinaryHashFunctionFamily getBinaryHashFunctionFamily(Object type) throws AlgebricksException {
+        return MurmurHash3BinaryHashFunctionFamily.INSTANCE;
+    }
+}
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/provider/HiveNormalizedKeyComputerFactoryProvider.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/provider/HiveNormalizedKeyComputerFactoryProvider.java
new file mode 100644
index 0000000..9e3a8ae
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/provider/HiveNormalizedKeyComputerFactoryProvider.java
@@ -0,0 +1,53 @@
+package edu.uci.ics.hivesterix.runtime.provider;
+
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+
+import edu.uci.ics.hivesterix.runtime.factory.normalize.HiveDoubleAscNormalizedKeyComputerFactory;
+import edu.uci.ics.hivesterix.runtime.factory.normalize.HiveDoubleDescNormalizedKeyComputerFactory;
+import edu.uci.ics.hivesterix.runtime.factory.normalize.HiveIntegerAscNormalizedKeyComputerFactory;
+import edu.uci.ics.hivesterix.runtime.factory.normalize.HiveIntegerDescNormalizedKeyComputerFactory;
+import edu.uci.ics.hivesterix.runtime.factory.normalize.HiveLongAscNormalizedKeyComputerFactory;
+import edu.uci.ics.hivesterix.runtime.factory.normalize.HiveLongDescNormalizedKeyComputerFactory;
+import edu.uci.ics.hivesterix.runtime.factory.normalize.HiveStringAscNormalizedKeyComputerFactory;
+import edu.uci.ics.hivesterix.runtime.factory.normalize.HiveStringDescNormalizedKeyComputerFactory;
+import edu.uci.ics.hyracks.algebricks.data.INormalizedKeyComputerFactoryProvider;
+import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputerFactory;
+
+public class HiveNormalizedKeyComputerFactoryProvider implements
+		INormalizedKeyComputerFactoryProvider {
+
+	public static final HiveNormalizedKeyComputerFactoryProvider INSTANCE = new HiveNormalizedKeyComputerFactoryProvider();
+
+	private HiveNormalizedKeyComputerFactoryProvider() {
+	}
+
+	@Override
+	public INormalizedKeyComputerFactory getNormalizedKeyComputerFactory(
+			Object type, boolean ascending) {
+		if (ascending) {
+			if (type.equals(TypeInfoFactory.stringTypeInfo)) {
+				return new HiveStringAscNormalizedKeyComputerFactory();
+			} else if (type.equals(TypeInfoFactory.intTypeInfo)) {
+				return new HiveIntegerAscNormalizedKeyComputerFactory();
+			} else if (type.equals(TypeInfoFactory.longTypeInfo)) {
+				return new HiveLongAscNormalizedKeyComputerFactory();
+			} else if (type.equals(TypeInfoFactory.doubleTypeInfo)) {
+				return new HiveDoubleAscNormalizedKeyComputerFactory();
+			} else {
+				return null;
+			}
+		} else {
+			if (type.equals(TypeInfoFactory.stringTypeInfo)) {
+				return new HiveStringDescNormalizedKeyComputerFactory();
+			} else if (type.equals(TypeInfoFactory.intTypeInfo)) {
+				return new HiveIntegerDescNormalizedKeyComputerFactory();
+			} else if (type.equals(TypeInfoFactory.longTypeInfo)) {
+				return new HiveLongDescNormalizedKeyComputerFactory();
+			} else if (type.equals(TypeInfoFactory.doubleTypeInfo)) {
+				return new HiveDoubleDescNormalizedKeyComputerFactory();
+			} else {
+				return null;
+			}
+		}
+	}
+}
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/provider/HivePrinterFactoryProvider.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/provider/HivePrinterFactoryProvider.java
new file mode 100644
index 0000000..bebb457
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/provider/HivePrinterFactoryProvider.java
@@ -0,0 +1,17 @@
+package edu.uci.ics.hivesterix.runtime.provider;

+

+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;

+import edu.uci.ics.hyracks.algebricks.data.IPrinterFactory;

+import edu.uci.ics.hyracks.algebricks.data.IPrinterFactoryProvider;

+

+public class HivePrinterFactoryProvider implements IPrinterFactoryProvider {

+

+	public static IPrinterFactoryProvider INSTANCE = new HivePrinterFactoryProvider();

+

+	@Override

+	public IPrinterFactory getPrinterFactory(Object type)

+			throws AlgebricksException {

+		return null;

+	}

+

+}

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/provider/HiveSerializerDeserializerProvider.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/provider/HiveSerializerDeserializerProvider.java
new file mode 100644
index 0000000..7938de8
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/provider/HiveSerializerDeserializerProvider.java
@@ -0,0 +1,23 @@
+package edu.uci.ics.hivesterix.runtime.provider;

+

+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;

+import edu.uci.ics.hyracks.algebricks.data.ISerializerDeserializerProvider;

+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;

+

+public class HiveSerializerDeserializerProvider implements

+		ISerializerDeserializerProvider {

+

+	public static final HiveSerializerDeserializerProvider INSTANCE = new HiveSerializerDeserializerProvider();

+

+	private HiveSerializerDeserializerProvider() {

+	}

+

+	@SuppressWarnings("rawtypes")

+	@Override

+	public ISerializerDeserializer getSerializerDeserializer(Object type)

+			throws AlgebricksException {

+		// return ARecordSerializerDeserializer.SCHEMALESS_INSTANCE;

+		return null;

+	}

+

+}

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/provider/HiveTypeTraitProvider.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/provider/HiveTypeTraitProvider.java
new file mode 100644
index 0000000..2059128
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/runtime/provider/HiveTypeTraitProvider.java
@@ -0,0 +1,33 @@
+package edu.uci.ics.hivesterix.runtime.provider;
+
+import java.io.Serializable;
+
+import edu.uci.ics.hyracks.algebricks.data.ITypeTraitProvider;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+
+public class HiveTypeTraitProvider implements ITypeTraitProvider, Serializable {
+	private static final long serialVersionUID = 1L;
+	public static HiveTypeTraitProvider INSTANCE = new HiveTypeTraitProvider();
+
+	private HiveTypeTraitProvider() {
+
+	}
+
+	@Override
+	public ITypeTraits getTypeTrait(Object arg0) {
+		return new ITypeTraits() {
+			private static final long serialVersionUID = 1L;
+
+			@Override
+			public int getFixedLength() {
+				return -1;
+			}
+
+			@Override
+			public boolean isFixedLength() {
+				return false;
+			}
+
+		};
+	}
+}
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/ByteArrayRef.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/ByteArrayRef.java
new file mode 100644
index 0000000..673416d
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/ByteArrayRef.java
@@ -0,0 +1,42 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hivesterix.serde.lazy;
+
+/**
+ * ByteArrayRef stores a reference to a byte array.
+ * 
+ * The LazyObject hierarchy uses a reference to a single ByteArrayRef, so that
+ * it's much faster to switch to the next row and release the reference to the
+ * old row (so that the system can do garbage collection if needed).
+ */
+public class ByteArrayRef {
+
+	/**
+	 * Stores the actual data.
+	 */
+	byte[] data;
+
+	public byte[] getData() {
+		return data;
+	}
+
+	public void setData(byte[] data) {
+		this.data = data;
+	}
+
+}
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyArray.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyArray.java
new file mode 100644
index 0000000..821c03d
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyArray.java
@@ -0,0 +1,236 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hivesterix.serde.lazy;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+
+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils.RecordInfo;
+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils.VInt;
+import edu.uci.ics.hivesterix.serde.lazy.objectinspector.LazyListObjectInspector;
+
+/**
+ * LazyArray is serialized as follows: start A b b b b b b end bytes[] ->
+ * |--------|---|---|---|---| ... |---|---|
+ * 
+ * Section A is the null-bytes. Suppose the list has N elements, then there are
+ * (N+7)/8 bytes used as null-bytes. Each bit corresponds to an element and it
+ * indicates whether that element is null (0) or not null (1).
+ * 
+ * After A, all b(s) represent the elements of the list. Each of them is again a
+ * LazyObject.
+ * 
+ */
+
+public class LazyArray extends LazyNonPrimitive<LazyListObjectInspector> {
+
+	/**
+	 * Whether the data is already parsed or not.
+	 */
+	boolean parsed = false;
+	/**
+	 * The length of the array. Only valid when the data is parsed.
+	 */
+	int arraySize = 0;
+
+	/**
+	 * The start positions and lengths of array elements. Only valid when the
+	 * data is parsed.
+	 */
+	int[] elementStart;
+	int[] elementLength;
+
+	/**
+	 * Whether an element is initialized or not.
+	 */
+	boolean[] elementInited;
+
+	/**
+	 * Whether an element is null or not. Because length is 0 does not means the
+	 * field is null. In particular, a 0-length string is not null.
+	 */
+	boolean[] elementIsNull;
+
+	/**
+	 * The elements of the array. Note that we call arrayElements[i].init(bytes,
+	 * begin, length) only when that element is accessed.
+	 */
+	@SuppressWarnings("rawtypes")
+	LazyObject[] arrayElements;
+
+	/**
+	 * Construct a LazyArray object with the ObjectInspector.
+	 * 
+	 * @param oi
+	 *            the oi representing the type of this LazyArray
+	 */
+	protected LazyArray(LazyListObjectInspector oi) {
+		super(oi);
+	}
+
+	/**
+	 * Set the row data for this LazyArray.
+	 * 
+	 * @see LazyObject#init(ByteArrayRef, int, int)
+	 */
+	@Override
+	public void init(byte[] bytes, int start, int length) {
+		super.init(bytes, start, length);
+		parsed = false;
+	}
+
+	/**
+	 * Enlarge the size of arrays storing information for the elements inside
+	 * the array.
+	 */
+	private void adjustArraySize(int newSize) {
+		if (elementStart == null || elementStart.length < newSize) {
+			elementStart = new int[newSize];
+			elementLength = new int[newSize];
+			elementInited = new boolean[newSize];
+			elementIsNull = new boolean[newSize];
+			arrayElements = new LazyObject[newSize];
+		}
+	}
+
+	VInt vInt = new LazyUtils.VInt();
+	RecordInfo recordInfo = new LazyUtils.RecordInfo();
+
+	/**
+	 * Parse the bytes and fill elementStart, elementLength, elementInited and
+	 * elementIsNull.
+	 */
+	private void parse() {
+
+		// get the vlong that represents the map size
+		LazyUtils.readVInt(bytes, start, vInt);
+		arraySize = vInt.value;
+		if (0 == arraySize) {
+			parsed = true;
+			return;
+		}
+
+		// adjust arrays
+		adjustArraySize(arraySize);
+		// find out the null-bytes
+		int arryByteStart = start + vInt.length;
+		int nullByteCur = arryByteStart;
+		int nullByteEnd = arryByteStart + (arraySize + 7) / 8;
+		// the begin the real elements
+		int lastElementByteEnd = nullByteEnd;
+		// the list element object inspector
+		ObjectInspector listEleObjectInspector = ((ListObjectInspector) oi)
+				.getListElementObjectInspector();
+		// parsing elements one by one
+		for (int i = 0; i < arraySize; i++) {
+			elementIsNull[i] = true;
+			if ((bytes[nullByteCur] & (1 << (i % 8))) != 0) {
+				elementIsNull[i] = false;
+				LazyUtils.checkObjectByteInfo(listEleObjectInspector, bytes,
+						lastElementByteEnd, recordInfo);
+				elementStart[i] = lastElementByteEnd + recordInfo.elementOffset;
+				elementLength[i] = recordInfo.elementSize;
+				lastElementByteEnd = elementStart[i] + elementLength[i];
+			}
+			// move onto the next null byte
+			if (7 == (i % 8)) {
+				nullByteCur++;
+			}
+		}
+
+		Arrays.fill(elementInited, 0, arraySize, false);
+		parsed = true;
+	}
+
+	/**
+	 * Returns the actual primitive object at the index position inside the
+	 * array represented by this LazyObject.
+	 */
+	public Object getListElementObject(int index) {
+		if (!parsed) {
+			parse();
+		}
+		if (index < 0 || index >= arraySize) {
+			return null;
+		}
+		return uncheckedGetElement(index);
+	}
+
+	/**
+	 * Get the element without checking out-of-bound index.
+	 * 
+	 * @param index
+	 *            index to the array element
+	 */
+	private Object uncheckedGetElement(int index) {
+
+		if (elementIsNull[index]) {
+			return null;
+		} else {
+			if (!elementInited[index]) {
+				elementInited[index] = true;
+				if (arrayElements[index] == null) {
+					arrayElements[index] = LazyFactory.createLazyObject((oi)
+							.getListElementObjectInspector());
+				}
+				arrayElements[index].init(bytes, elementStart[index],
+						elementLength[index]);
+			}
+		}
+		return arrayElements[index].getObject();
+	}
+
+	/**
+	 * Returns the array size.
+	 */
+	public int getListLength() {
+		if (!parsed) {
+			parse();
+		}
+		return arraySize;
+	}
+
+	/**
+	 * cachedList is reused every time getList is called. Different
+	 * LazyBianryArray instances cannot share the same cachedList.
+	 */
+	ArrayList<Object> cachedList;
+
+	/**
+	 * Returns the List of actual primitive objects. Returns null for null
+	 * array.
+	 */
+	public List<Object> getList() {
+		if (!parsed) {
+			parse();
+		}
+		if (cachedList == null) {
+			cachedList = new ArrayList<Object>(arraySize);
+		} else {
+			cachedList.clear();
+		}
+		for (int index = 0; index < arraySize; index++) {
+			cachedList.add(uncheckedGetElement(index));
+		}
+		return cachedList;
+	}
+}
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyBoolean.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyBoolean.java
new file mode 100644
index 0000000..83b6254
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyBoolean.java
@@ -0,0 +1,68 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hivesterix.serde.lazy;
+
+import org.apache.hadoop.io.BooleanWritable;
+
+import edu.uci.ics.hivesterix.serde.lazy.objectinspector.primitive.LazyBooleanObjectInspector;
+
+/**
+ * LazyObject for storing a value of boolean.
+ * 
+ * <p>
+ * Part of the code is adapted from Apache Harmony Project.
+ * 
+ * As with the specification, this implementation relied on code laid out in <a
+ * href="http://www.hackersdelight.org/">Henry S. Warren, Jr.'s Hacker's
+ * Delight, (Addison Wesley, 2002)</a> as well as <a
+ * href="http://aggregate.org/MAGIC/">The Aggregate's Magic Algorithms</a>.
+ * </p>
+ * 
+ */
+public class LazyBoolean extends
+		LazyPrimitive<LazyBooleanObjectInspector, BooleanWritable> {
+
+	public LazyBoolean(LazyBooleanObjectInspector oi) {
+		super(oi);
+		data = new BooleanWritable();
+	}
+
+	public LazyBoolean(LazyBoolean copy) {
+		super(copy);
+		data = new BooleanWritable(copy.data.get());
+	}
+
+	@Override
+	public void init(byte[] bytes, int start, int length) {
+		if (length == 0) {
+			isNull = true;
+			return;
+		} else
+			isNull = false;
+
+		// a temporal hack
+		assert (1 == length);
+		byte val = bytes[start];
+		if (val == 0) {
+			data.set(false);
+		} else if (val == 1) {
+			data.set(true);
+		}
+	}
+
+}
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyByte.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyByte.java
new file mode 100644
index 0000000..264015b
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyByte.java
@@ -0,0 +1,62 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hivesterix.serde.lazy;
+
+import org.apache.hadoop.io.ByteWritable;
+
+import edu.uci.ics.hivesterix.serde.lazy.objectinspector.primitive.LazyByteObjectInspector;
+
+/**
+ * LazyObject for storing a value of Byte.
+ * 
+ * <p>
+ * Part of the code is adapted from Apache Harmony Project.
+ * 
+ * As with the specification, this implementation relied on code laid out in <a
+ * href="http://www.hackersdelight.org/">Henry S. Warren, Jr.'s Hacker's
+ * Delight, (Addison Wesley, 2002)</a> as well as <a
+ * href="http://aggregate.org/MAGIC/">The Aggregate's Magic Algorithms</a>.
+ * </p>
+ * 
+ */
+public class LazyByte extends
+		LazyPrimitive<LazyByteObjectInspector, ByteWritable> {
+
+	public LazyByte(LazyByteObjectInspector oi) {
+		super(oi);
+		data = new ByteWritable();
+	}
+
+	public LazyByte(LazyByte copy) {
+		super(copy);
+		data = new ByteWritable(copy.data.get());
+	}
+
+	@Override
+	public void init(byte[] bytes, int start, int length) {
+		if (length == 0) {
+			isNull = true;
+			return;
+		} else
+			isNull = false;
+
+		assert (1 == length);
+		data.set(bytes[start]);
+	}
+
+}
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyColumnar.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyColumnar.java
new file mode 100644
index 0000000..a25ae49
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyColumnar.java
@@ -0,0 +1,211 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hivesterix.serde.lazy;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+import org.apache.hadoop.hive.serde2.objectinspector.StructField;
+import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+
+import edu.uci.ics.hivesterix.serde.lazy.objectinspector.LazyColumnarObjectInspector;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+/**
+ * LazyObject for storing a struct. The field of a struct can be primitive or
+ * non-primitive.
+ * 
+ * LazyStruct does not deal with the case of a NULL struct. That is handled by
+ * the parent LazyObject.
+ */
+@SuppressWarnings("rawtypes")
+public class LazyColumnar extends LazyNonPrimitive<LazyColumnarObjectInspector> {
+
+	/**
+	 * IFrameTupleReference: the backend of the struct
+	 */
+	IFrameTupleReference tuple;
+
+	/**
+	 * Whether the data is already parsed or not.
+	 */
+	boolean reset;
+
+	/**
+	 * The fields of the struct.
+	 */
+	LazyObject[] fields;
+
+	/**
+	 * Whether init() has been called on the field or not.
+	 */
+	boolean[] fieldVisited;
+
+	/**
+	 * whether it is the first time initialization
+	 */
+	boolean start = true;
+
+	/**
+	 * Construct a LazyStruct object with the ObjectInspector.
+	 */
+	public LazyColumnar(LazyColumnarObjectInspector oi) {
+		super(oi);
+	}
+
+	/**
+	 * Set the row data for this LazyStruct.
+	 * 
+	 * @see LazyObject#init(ByteArrayRef, int, int)
+	 */
+	@Override
+	public void init(byte[] bytes, int start, int length) {
+		super.init(bytes, start, length);
+		reset = false;
+	}
+
+	/**
+	 * Parse the byte[] and fill each field.
+	 */
+	private void parse() {
+
+		if (start) {
+			// initialize field array and reusable objects
+			List<? extends StructField> fieldRefs = ((StructObjectInspector) oi)
+					.getAllStructFieldRefs();
+
+			fields = new LazyObject[fieldRefs.size()];
+			for (int i = 0; i < fields.length; i++) {
+				fields[i] = LazyFactory.createLazyObject(fieldRefs.get(i)
+						.getFieldObjectInspector());
+			}
+			fieldVisited = new boolean[fields.length];
+			start = false;
+		}
+
+		Arrays.fill(fieldVisited, false);
+		reset = true;
+	}
+
+	/**
+	 * Get one field out of the struct.
+	 * 
+	 * If the field is a primitive field, return the actual object. Otherwise
+	 * return the LazyObject. This is because PrimitiveObjectInspector does not
+	 * have control over the object used by the user - the user simply directly
+	 * use the Object instead of going through Object
+	 * PrimitiveObjectInspector.get(Object).
+	 * 
+	 * @param fieldID
+	 *            The field ID
+	 * @return The field as a LazyObject
+	 */
+	public Object getField(int fieldID) {
+		if (!reset) {
+			parse();
+		}
+		return uncheckedGetField(fieldID);
+	}
+
+	/**
+	 * Get the field out of the row without checking parsed. This is called by
+	 * both getField and getFieldsAsList.
+	 * 
+	 * @param fieldID
+	 *            The id of the field starting from 0.
+	 * @param nullSequence
+	 *            The sequence representing NULL value.
+	 * @return The value of the field
+	 */
+	private Object uncheckedGetField(int fieldID) {
+		// get the buffer
+		byte[] buffer = tuple.getFieldData(fieldID);
+		// get the offset of the field
+		int s1 = tuple.getFieldStart(fieldID);
+		int l1 = tuple.getFieldLength(fieldID);
+
+		if (!fieldVisited[fieldID]) {
+			fieldVisited[fieldID] = true;
+			fields[fieldID].init(buffer, s1, l1);
+		}
+		// if (fields[fieldID].getObject() == null) {
+		// throw new IllegalStateException("illegal field " + fieldID);
+		// }
+		return fields[fieldID].getObject();
+	}
+
+	ArrayList<Object> cachedList;
+
+	/**
+	 * Get the values of the fields as an ArrayList.
+	 * 
+	 * @return The values of the fields as an ArrayList.
+	 */
+	public ArrayList<Object> getFieldsAsList() {
+		if (!reset) {
+			parse();
+		}
+		if (cachedList == null) {
+			cachedList = new ArrayList<Object>();
+		} else {
+			cachedList.clear();
+		}
+		for (int i = 0; i < fields.length; i++) {
+			cachedList.add(uncheckedGetField(i));
+		}
+		return cachedList;
+	}
+
+	@Override
+	public Object getObject() {
+		return this;
+	}
+
+	protected boolean getParsed() {
+		return reset;
+	}
+
+	protected void setParsed(boolean parsed) {
+		this.reset = parsed;
+	}
+
+	protected LazyObject[] getFields() {
+		return fields;
+	}
+
+	protected void setFields(LazyObject[] fields) {
+		this.fields = fields;
+	}
+
+	protected boolean[] getFieldInited() {
+		return fieldVisited;
+	}
+
+	protected void setFieldInited(boolean[] fieldInited) {
+		this.fieldVisited = fieldInited;
+	}
+
+	/**
+	 * rebind a frametuplereference to the struct
+	 */
+	public void init(IFrameTupleReference r) {
+		this.tuple = r;
+		reset = false;
+	}
+}
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyDouble.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyDouble.java
new file mode 100644
index 0000000..d687aa1
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyDouble.java
@@ -0,0 +1,53 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hivesterix.serde.lazy;
+
+import org.apache.hadoop.io.DoubleWritable;
+
+import edu.uci.ics.hivesterix.serde.lazy.objectinspector.primitive.LazyDoubleObjectInspector;
+
+/**
+ * LazyObject for storing a value of Double.
+ * 
+ */
+public class LazyDouble extends
+		LazyPrimitive<LazyDoubleObjectInspector, DoubleWritable> {
+
+	public LazyDouble(LazyDoubleObjectInspector oi) {
+		super(oi);
+		data = new DoubleWritable();
+	}
+
+	public LazyDouble(LazyDouble copy) {
+		super(copy);
+		data = new DoubleWritable(copy.data.get());
+	}
+
+	@Override
+	public void init(byte[] bytes, int start, int length) {
+		if (length == 0) {
+			isNull = true;
+			return;
+		} else
+			isNull = false;
+		assert (8 == length);
+		data.set(Double.longBitsToDouble(LazyUtils
+				.byteArrayToLong(bytes, start)));
+	}
+
+}
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyFactory.java
new file mode 100644
index 0000000..e7593e4
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyFactory.java
@@ -0,0 +1,97 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hivesterix.serde.lazy;
+
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
+
+import edu.uci.ics.hivesterix.serde.lazy.objectinspector.LazyColumnarObjectInspector;
+import edu.uci.ics.hivesterix.serde.lazy.objectinspector.LazyListObjectInspector;
+import edu.uci.ics.hivesterix.serde.lazy.objectinspector.LazyMapObjectInspector;
+import edu.uci.ics.hivesterix.serde.lazy.objectinspector.LazyStructObjectInspector;
+import edu.uci.ics.hivesterix.serde.lazy.objectinspector.primitive.LazyBooleanObjectInspector;
+import edu.uci.ics.hivesterix.serde.lazy.objectinspector.primitive.LazyByteObjectInspector;
+import edu.uci.ics.hivesterix.serde.lazy.objectinspector.primitive.LazyDoubleObjectInspector;
+import edu.uci.ics.hivesterix.serde.lazy.objectinspector.primitive.LazyFloatObjectInspector;
+import edu.uci.ics.hivesterix.serde.lazy.objectinspector.primitive.LazyIntObjectInspector;
+import edu.uci.ics.hivesterix.serde.lazy.objectinspector.primitive.LazyLongObjectInspector;
+import edu.uci.ics.hivesterix.serde.lazy.objectinspector.primitive.LazyShortObjectInspector;
+import edu.uci.ics.hivesterix.serde.lazy.objectinspector.primitive.LazyStringObjectInspector;
+
+/**
+ * LazyFactory.
+ * 
+ */
+public final class LazyFactory {
+
+	/**
+	 * Create a lazy binary primitive class given the type name.
+	 */
+	public static LazyPrimitive<?, ?> createLazyPrimitiveClass(
+			PrimitiveObjectInspector oi) {
+		PrimitiveCategory p = oi.getPrimitiveCategory();
+		switch (p) {
+		case BOOLEAN:
+			return new LazyBoolean((LazyBooleanObjectInspector) oi);
+		case BYTE:
+			return new LazyByte((LazyByteObjectInspector) oi);
+		case SHORT:
+			return new LazyShort((LazyShortObjectInspector) oi);
+		case INT:
+			return new LazyInteger((LazyIntObjectInspector) oi);
+		case LONG:
+			return new LazyLong((LazyLongObjectInspector) oi);
+		case FLOAT:
+			return new LazyFloat((LazyFloatObjectInspector) oi);
+		case DOUBLE:
+			return new LazyDouble((LazyDoubleObjectInspector) oi);
+		case STRING:
+			return new LazyString((LazyStringObjectInspector) oi);
+		default:
+			throw new RuntimeException("Internal error: no LazyObject for " + p);
+		}
+	}
+
+	/**
+	 * Create a hierarchical LazyObject based on the given typeInfo.
+	 */
+	public static LazyObject<? extends ObjectInspector> createLazyObject(
+			ObjectInspector oi) {
+		ObjectInspector.Category c = oi.getCategory();
+		switch (c) {
+		case PRIMITIVE:
+			return createLazyPrimitiveClass((PrimitiveObjectInspector) oi);
+		case MAP:
+			return new LazyMap((LazyMapObjectInspector) oi);
+		case LIST:
+			return new LazyArray((LazyListObjectInspector) oi);
+		case STRUCT: // check whether it is a top-level struct
+			if (oi instanceof LazyStructObjectInspector)
+				return new LazyStruct((LazyStructObjectInspector) oi);
+			else
+				return new LazyColumnar((LazyColumnarObjectInspector) oi);
+		default:
+			throw new RuntimeException("Hive LazySerDe Internal error.");
+		}
+	}
+
+	private LazyFactory() {
+		// prevent instantiation
+	}
+}
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyFloat.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyFloat.java
new file mode 100644
index 0000000..303cc67
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyFloat.java
@@ -0,0 +1,53 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hivesterix.serde.lazy;
+
+import org.apache.hadoop.io.FloatWritable;
+
+import edu.uci.ics.hivesterix.serde.lazy.objectinspector.primitive.LazyFloatObjectInspector;
+
+/**
+ * LazyObject for storing a value of Double.
+ * 
+ */
+public class LazyFloat extends
+		LazyPrimitive<LazyFloatObjectInspector, FloatWritable> {
+
+	public LazyFloat(LazyFloatObjectInspector oi) {
+		super(oi);
+		data = new FloatWritable();
+	}
+
+	public LazyFloat(LazyFloat copy) {
+		super(copy);
+		data = new FloatWritable(copy.data.get());
+	}
+
+	@Override
+	public void init(byte[] bytes, int start, int length) {
+		if (length == 0) {
+			isNull = true;
+			return;
+		} else
+			isNull = false;
+
+		assert (4 == length);
+		data.set(Float.intBitsToFloat(LazyUtils.byteArrayToInt(bytes, start)));
+	}
+
+}
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyInteger.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyInteger.java
new file mode 100644
index 0000000..c908c40
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyInteger.java
@@ -0,0 +1,72 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hivesterix.serde.lazy;
+
+import org.apache.hadoop.io.IntWritable;
+
+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils.VInt;
+import edu.uci.ics.hivesterix.serde.lazy.objectinspector.primitive.LazyIntObjectInspector;
+
+/**
+ * LazyObject for storing a value of Integer.
+ * 
+ * <p>
+ * Part of the code is adapted from Apache Harmony Project.
+ * 
+ * As with the specification, this implementation relied on code laid out in <a
+ * href="http://www.hackersdelight.org/">Henry S. Warren, Jr.'s Hacker's
+ * Delight, (Addison Wesley, 2002)</a> as well as <a
+ * href="http://aggregate.org/MAGIC/">The Aggregate's Magic Algorithms</a>.
+ * </p>
+ * 
+ */
+public class LazyInteger extends
+		LazyPrimitive<LazyIntObjectInspector, IntWritable> {
+
+	public LazyInteger(LazyIntObjectInspector oi) {
+		super(oi);
+		data = new IntWritable();
+	}
+
+	public LazyInteger(LazyInteger copy) {
+		super(copy);
+		data = new IntWritable(copy.data.get());
+	}
+
+	/**
+	 * The reusable vInt for decoding the integer.
+	 */
+	VInt vInt = new LazyUtils.VInt();
+
+	@Override
+	public void init(byte[] bytes, int start, int length) {
+		if (length == 0) {
+			isNull = true;
+			return;
+		} else
+			isNull = false;
+
+		LazyUtils.readVInt(bytes, start, vInt);
+		assert (length == vInt.length);
+		if (length != vInt.length)
+			throw new IllegalStateException(
+					"parse int: length mismatch, expected " + vInt.length
+							+ " but get " + length);
+		data.set(vInt.value);
+	}
+}
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyLong.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyLong.java
new file mode 100644
index 0000000..38097e6
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyLong.java
@@ -0,0 +1,71 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hivesterix.serde.lazy;
+
+import org.apache.hadoop.io.LongWritable;
+
+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils.VLong;
+import edu.uci.ics.hivesterix.serde.lazy.objectinspector.primitive.LazyLongObjectInspector;
+
+/**
+ * LazyObject for storing a value of Long.
+ * 
+ * <p>
+ * Part of the code is adapted from Apache Harmony Project.
+ * 
+ * As with the specification, this implementation relied on code laid out in <a
+ * href="http://www.hackersdelight.org/">Henry S. Warren, Jr.'s Hacker's
+ * Delight, (Addison Wesley, 2002)</a> as well as <a
+ * href="http://aggregate.org/MAGIC/">The Aggregate's Magic Algorithms</a>.
+ * </p>
+ * 
+ */
+public class LazyLong extends
+		LazyPrimitive<LazyLongObjectInspector, LongWritable> {
+
+	public LazyLong(LazyLongObjectInspector oi) {
+		super(oi);
+		data = new LongWritable();
+	}
+
+	public LazyLong(LazyLong copy) {
+		super(copy);
+		data = new LongWritable(copy.data.get());
+	}
+
+	/**
+	 * The reusable vLong for decoding the long.
+	 */
+	VLong vLong = new LazyUtils.VLong();
+
+	@Override
+	public void init(byte[] bytes, int start, int length) {
+		if (length == 0) {
+			isNull = true;
+			return;
+		} else
+			isNull = false;
+
+		LazyUtils.readVLong(bytes, start, vLong);
+		assert (length == vLong.length);
+		if (length != vLong.length)
+			throw new IllegalStateException("parse long: length mismatch");
+		data.set(vLong.value);
+	}
+
+}
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyMap.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyMap.java
new file mode 100644
index 0000000..56bc41b
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyMap.java
@@ -0,0 +1,337 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hivesterix.serde.lazy;
+
+import java.util.Arrays;
+import java.util.LinkedHashMap;
+import java.util.Map;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.serde2.lazy.ByteArrayRef;
+import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+
+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils.RecordInfo;
+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils.VInt;
+import edu.uci.ics.hivesterix.serde.lazy.objectinspector.LazyMapObjectInspector;
+
+/**
+ * LazyMap is serialized as follows: start A b c b c b c end bytes[] ->
+ * |--------|---|---|---|---| ... |---|---|
+ * 
+ * Section A is the null-bytes. Suppose the map has N key-value pairs, then
+ * there are (N*2+7)/8 bytes used as null-bytes. Each bit corresponds to a key
+ * or a value and it indicates whether that key or value is null (0) or not null
+ * (1).
+ * 
+ * After A, all the bytes are actual serialized data of the map, which are
+ * key-value pairs. b represent the keys and c represent the values. Each of
+ * them is again a LazyObject.
+ * 
+ */
+
+@SuppressWarnings("rawtypes")
+public class LazyMap extends LazyNonPrimitive<LazyMapObjectInspector> {
+
+	private static Log LOG = LogFactory.getLog(LazyMap.class.getName());
+
+	/**
+	 * Whether the data is already parsed or not.
+	 */
+	boolean parsed;
+
+	/**
+	 * The size of the map. Only valid when the data is parsed. -1 when the map
+	 * is NULL.
+	 */
+	int mapSize = 0;
+
+	/**
+	 * The beginning position and length of key[i] and value[i]. Only valid when
+	 * the data is parsed.
+	 */
+	int[] keyStart;
+	int[] keyLength;
+	int[] valueStart;
+	int[] valueLength;
+	/**
+	 * Whether valueObjects[i]/keyObjects[i] is initialized or not.
+	 */
+	boolean[] keyInited;
+	boolean[] valueInited;
+
+	/**
+	 * Whether valueObjects[i]/keyObjects[i] is null or not This could not be
+	 * inferred from the length of the object. In particular, a 0-length string
+	 * is not null.
+	 */
+	boolean[] keyIsNull;
+	boolean[] valueIsNull;
+
+	/**
+	 * The keys are stored in an array of LazyPrimitives.
+	 */
+	LazyPrimitive<?, ?>[] keyObjects;
+	/**
+	 * The values are stored in an array of LazyObjects. value[index] will start
+	 * from KeyEnd[index] + 1, and ends before KeyStart[index+1] - 1.
+	 */
+	LazyObject[] valueObjects;
+
+	protected LazyMap(LazyMapObjectInspector oi) {
+		super(oi);
+	}
+
+	/**
+	 * Set the row data for this LazyMap.
+	 * 
+	 * @see LazyObject#init(ByteArrayRef, int, int)
+	 */
+	@Override
+	public void init(byte[] bytes, int start, int length) {
+		super.init(bytes, start, length);
+		parsed = false;
+	}
+
+	/**
+	 * Adjust the size of arrays: keyStart, keyLength valueStart, valueLength
+	 * keyInited, keyIsNull valueInited, valueIsNull.
+	 */
+	protected void adjustArraySize(int newSize) {
+		if (keyStart == null || keyStart.length < newSize) {
+			keyStart = new int[newSize];
+			keyLength = new int[newSize];
+			valueStart = new int[newSize];
+			valueLength = new int[newSize];
+			keyInited = new boolean[newSize];
+			keyIsNull = new boolean[newSize];
+			valueInited = new boolean[newSize];
+			valueIsNull = new boolean[newSize];
+			keyObjects = new LazyPrimitive<?, ?>[newSize];
+			valueObjects = new LazyObject[newSize];
+		}
+	}
+
+	boolean nullMapKey = false;
+	VInt vInt = new LazyUtils.VInt();
+	RecordInfo recordInfo = new LazyUtils.RecordInfo();
+
+	/**
+	 * Parse the byte[] and fill keyStart, keyLength, keyIsNull valueStart,
+	 * valueLength and valueIsNull.
+	 */
+	private void parse() {
+
+		// get the VInt that represents the map size
+		LazyUtils.readVInt(bytes, start, vInt);
+		mapSize = vInt.value;
+		if (0 == mapSize) {
+			parsed = true;
+			return;
+		}
+
+		// adjust arrays
+		adjustArraySize(mapSize);
+
+		// find out the null-bytes
+		int mapByteStart = start + vInt.length;
+		int nullByteCur = mapByteStart;
+		int nullByteEnd = mapByteStart + (mapSize * 2 + 7) / 8;
+		int lastElementByteEnd = nullByteEnd;
+
+		// parsing the keys and values one by one
+		for (int i = 0; i < mapSize; i++) {
+			// parse a key
+			keyIsNull[i] = true;
+			if ((bytes[nullByteCur] & (1 << ((i * 2) % 8))) != 0) {
+				keyIsNull[i] = false;
+				LazyUtils.checkObjectByteInfo(
+						((MapObjectInspector) oi).getMapKeyObjectInspector(),
+						bytes, lastElementByteEnd, recordInfo);
+				keyStart[i] = lastElementByteEnd + recordInfo.elementOffset;
+				keyLength[i] = recordInfo.elementSize;
+				lastElementByteEnd = keyStart[i] + keyLength[i];
+			} else if (!nullMapKey) {
+				nullMapKey = true;
+				LOG.warn("Null map key encountered! Ignoring similar problems.");
+			}
+
+			// parse a value
+			valueIsNull[i] = true;
+			if ((bytes[nullByteCur] & (1 << ((i * 2 + 1) % 8))) != 0) {
+				valueIsNull[i] = false;
+				LazyUtils.checkObjectByteInfo(
+						((MapObjectInspector) oi).getMapValueObjectInspector(),
+						bytes, lastElementByteEnd, recordInfo);
+				valueStart[i] = lastElementByteEnd + recordInfo.elementOffset;
+				valueLength[i] = recordInfo.elementSize;
+				lastElementByteEnd = valueStart[i] + valueLength[i];
+			}
+
+			// move onto the next null byte
+			if (3 == (i % 4)) {
+				nullByteCur++;
+			}
+		}
+
+		Arrays.fill(keyInited, 0, mapSize, false);
+		Arrays.fill(valueInited, 0, mapSize, false);
+		parsed = true;
+	}
+
+	/**
+	 * Get the value object with the index without checking parsed.
+	 * 
+	 * @param index
+	 *            The index into the array starting from 0
+	 */
+	private LazyObject uncheckedGetValue(int index) {
+		if (valueIsNull[index]) {
+			return null;
+		}
+		if (!valueInited[index]) {
+			valueInited[index] = true;
+			if (valueObjects[index] == null) {
+				valueObjects[index] = LazyFactory
+						.createLazyObject(((MapObjectInspector) oi)
+								.getMapValueObjectInspector());
+			}
+			valueObjects[index].init(bytes, valueStart[index],
+					valueLength[index]);
+		}
+		return valueObjects[index];
+	}
+
+	/**
+	 * Get the value in the map for the key.
+	 * 
+	 * If there are multiple matches (which is possible in the serialized
+	 * format), only the first one is returned.
+	 * 
+	 * The most efficient way to get the value for the key is to serialize the
+	 * key and then try to find it in the array. We do linear search because in
+	 * most cases, user only wants to get one or two values out of the map, and
+	 * the cost of building up a HashMap is substantially higher.
+	 * 
+	 * @param key
+	 *            The key object that we are looking for.
+	 * @return The corresponding value object, or NULL if not found
+	 */
+	public Object getMapValueElement(Object key) {
+		if (!parsed) {
+			parse();
+		}
+		// search for the key
+		for (int i = 0; i < mapSize; i++) {
+			LazyPrimitive<?, ?> lazyKeyI = uncheckedGetKey(i);
+			if (lazyKeyI == null) {
+				continue;
+			}
+			// getWritableObject() will convert LazyPrimitive to actual
+			// primitive
+			// writable objects.
+			Object keyI = lazyKeyI.getWritableObject();
+			if (keyI == null) {
+				continue;
+			}
+			if (keyI.equals(key)) {
+				// Got a match, return the value
+				LazyObject v = uncheckedGetValue(i);
+				return v == null ? v : v.getObject();
+			}
+		}
+		return null;
+	}
+
+	/**
+	 * Get the key object with the index without checking parsed.
+	 * 
+	 * @param index
+	 *            The index into the array starting from 0
+	 */
+	private LazyPrimitive<?, ?> uncheckedGetKey(int index) {
+		if (keyIsNull[index]) {
+			return null;
+		}
+		if (!keyInited[index]) {
+			keyInited[index] = true;
+			if (keyObjects[index] == null) {
+				// Keys are always primitive
+				keyObjects[index] = LazyFactory
+						.createLazyPrimitiveClass((PrimitiveObjectInspector) ((MapObjectInspector) oi)
+								.getMapKeyObjectInspector());
+			}
+			keyObjects[index].init(bytes, keyStart[index], keyLength[index]);
+		}
+		return keyObjects[index];
+	}
+
+	/**
+	 * cachedMap is reused for different calls to getMap(). But each LazyMap has
+	 * a separate cachedMap so we won't overwrite the data by accident.
+	 */
+	LinkedHashMap<Object, Object> cachedMap;
+
+	/**
+	 * Return the map object representing this LazyMap. Note that the keyObjects
+	 * will be Writable primitive objects.
+	 * 
+	 * @return the map object
+	 */
+	public Map<Object, Object> getMap() {
+		if (!parsed) {
+			parse();
+		}
+		if (cachedMap == null) {
+			// Use LinkedHashMap to provide deterministic order
+			cachedMap = new LinkedHashMap<Object, Object>();
+		} else {
+			cachedMap.clear();
+		}
+
+		// go through each element of the map
+		for (int i = 0; i < mapSize; i++) {
+			LazyPrimitive<?, ?> lazyKey = uncheckedGetKey(i);
+			if (lazyKey == null) {
+				continue;
+			}
+			Object key = lazyKey.getObject();
+			// do not overwrite if there are duplicate keys
+			if (key != null && !cachedMap.containsKey(key)) {
+				LazyObject lazyValue = uncheckedGetValue(i);
+				Object value = (lazyValue == null ? null : lazyValue
+						.getObject());
+				cachedMap.put(key, value);
+			}
+		}
+		return cachedMap;
+	}
+
+	/**
+	 * Get the size of the map represented by this LazyMap.
+	 * 
+	 * @return The size of the map
+	 */
+	public int getMapSize() {
+		if (!parsed) {
+			parse();
+		}
+		return mapSize;
+	}
+}
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyNonPrimitive.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyNonPrimitive.java
new file mode 100644
index 0000000..b151f2d
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyNonPrimitive.java
@@ -0,0 +1,78 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hivesterix.serde.lazy;
+
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+/**
+ * LazyPrimitive stores a primitive Object in a LazyObject.
+ */
+public abstract class LazyNonPrimitive<OI extends ObjectInspector> extends
+		LazyObject<OI> {
+
+	protected byte[] bytes;
+	protected int start;
+	protected int length;
+
+	/**
+	 * Create a LazyNonPrimitive object with the specified ObjectInspector.
+	 * 
+	 * @param oi
+	 *            The ObjectInspector would have to have a hierarchy of
+	 *            LazyObjectInspectors with the leaf nodes being
+	 *            WritableObjectInspectors. It's used both for accessing the
+	 *            type hierarchy of the complex object, as well as getting meta
+	 *            information (separator, nullSequence, etc) when parsing the
+	 *            lazy object.
+	 */
+	protected LazyNonPrimitive(OI oi) {
+		super(oi);
+		bytes = null;
+		start = 0;
+		length = 0;
+	}
+
+	@Override
+	public void init(byte[] bytes, int start, int length) {
+		if (bytes == null) {
+			throw new RuntimeException("bytes cannot be null!");
+		}
+		this.bytes = bytes;
+		this.start = start;
+		this.length = length;
+		assert start >= 0;
+		assert start + length <= bytes.length;
+	}
+
+	@Override
+	public Object getObject() {
+		return this;
+	}
+
+	@Override
+	public int hashCode() {
+		return LazyUtils.hashBytes(bytes, start, length);
+	}
+
+	@Override
+	public void init(IFrameTupleReference tuple) {
+	}
+
+}
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyObject.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyObject.java
new file mode 100644
index 0000000..9aaaa88
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyObject.java
@@ -0,0 +1,78 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hivesterix.serde.lazy;
+
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+/**
+ * LazyObject stores an object in a range of bytes in a byte[].
+ * 
+ * A LazyObject can represent any primitive object or hierarchical object like
+ * array, map or struct.
+ */
+public abstract class LazyObject<OI extends ObjectInspector> {
+
+	OI oi;
+
+	/**
+	 * Create a LazyObject.
+	 * 
+	 * @param oi
+	 *            Derived classes can access meta information about this Lazy
+	 *            Object (e.g, separator, nullSequence, escaper) from it.
+	 */
+	protected LazyObject(OI oi) {
+		this.oi = oi;
+	}
+
+	/**
+	 * Set the data for this LazyObject. We take ByteArrayRef instead of byte[]
+	 * so that we will be able to drop the reference to byte[] by a single
+	 * assignment. The ByteArrayRef object can be reused across multiple rows.
+	 * 
+	 * @param bytes
+	 *            The wrapper of the byte[].
+	 * @param start
+	 *            The start position inside the bytes.
+	 * @param length
+	 *            The length of the data, starting from "start"
+	 * @see ByteArrayRef
+	 */
+	public abstract void init(byte[] bytes, int start, int length);
+
+	public abstract void init(IFrameTupleReference tuple);
+
+	/**
+	 * If the LazyObject is a primitive Object, then deserialize it and return
+	 * the actual primitive Object. Otherwise (array, map, struct), return this.
+	 */
+	public abstract Object getObject();
+
+	@Override
+	public abstract int hashCode();
+
+	protected OI getInspector() {
+		return oi;
+	}
+
+	protected void setInspector(OI oi) {
+		this.oi = oi;
+	}
+}
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyPrimitive.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyPrimitive.java
new file mode 100644
index 0000000..888e5b2
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyPrimitive.java
@@ -0,0 +1,70 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hivesterix.serde.lazy;
+
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.io.Writable;
+
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+/**
+ * LazyPrimitive stores a primitive Object in a LazyObject.
+ */
+public abstract class LazyPrimitive<OI extends ObjectInspector, T extends Writable>
+		extends LazyObject<OI> {
+
+	LazyPrimitive(OI oi) {
+		super(oi);
+	}
+
+	LazyPrimitive(LazyPrimitive<OI, T> copy) {
+		super(copy.oi);
+		isNull = copy.isNull;
+	}
+
+	T data;
+	boolean isNull = false;
+
+	/**
+	 * Returns the primitive object represented by this LazyObject. This is
+	 * useful because it can make sure we have "null" for null objects.
+	 */
+	@Override
+	public Object getObject() {
+		return isNull ? null : this;
+	}
+
+	public T getWritableObject() {
+		return isNull ? null : data;
+	}
+
+	@Override
+	public String toString() {
+		return isNull ? "null" : data.toString();
+	}
+
+	@Override
+	public int hashCode() {
+		return isNull ? 0 : data.hashCode();
+	}
+
+	@Override
+	public void init(IFrameTupleReference tuple) {
+	}
+
+}
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazySerDe.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazySerDe.java
new file mode 100644
index 0000000..4d0dff6
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazySerDe.java
@@ -0,0 +1,477 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hivesterix.serde.lazy;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.serde.Constants;
+import org.apache.hadoop.hive.serde2.ByteStream;
+import org.apache.hadoop.hive.serde2.ByteStream.Output;
+import org.apache.hadoop.hive.serde2.SerDe;
+import org.apache.hadoop.hive.serde2.SerDeException;
+import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.StructField;
+import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.BooleanObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.ByteObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.FloatObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
+import org.apache.hadoop.io.BytesWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.io.Writable;
+
+/**
+ * The LazySerDe class combines the lazy property of LazySimpleSerDe class and
+ * the binary property of BinarySortable class. Lazy means a field is not
+ * deserialized until required. Binary means a field is serialized in binary
+ * compact format.
+ */
+public class LazySerDe implements SerDe {
+
+	public static final Log LOG = LogFactory.getLog(LazySerDe.class.getName());
+
+	public LazySerDe() {
+	}
+
+	List<String> columnNames;
+	List<TypeInfo> columnTypes;
+
+	TypeInfo rowTypeInfo;
+	ObjectInspector cachedObjectInspector;
+
+	// The object for storing row data
+	LazyColumnar cachedLazyStruct;
+
+	/**
+	 * Initialize the SerDe with configuration and table information.
+	 */
+	@Override
+	public void initialize(Configuration conf, Properties tbl)
+			throws SerDeException {
+		// Get column names and types
+		String columnNameProperty = tbl.getProperty(Constants.LIST_COLUMNS);
+		String columnTypeProperty = tbl
+				.getProperty(Constants.LIST_COLUMN_TYPES);
+		if (columnNameProperty.length() == 0) {
+			columnNames = new ArrayList<String>();
+		} else {
+			columnNames = Arrays.asList(columnNameProperty.split(","));
+		}
+		if (columnTypeProperty.length() == 0) {
+			columnTypes = new ArrayList<TypeInfo>();
+		} else {
+			columnTypes = TypeInfoUtils
+					.getTypeInfosFromTypeString(columnTypeProperty);
+		}
+		assert (columnNames.size() == columnTypes.size());
+		// Create row related objects
+		rowTypeInfo = TypeInfoFactory.getStructTypeInfo(columnNames,
+				columnTypes);
+		// Create the object inspector and the lazy binary struct object
+		cachedObjectInspector = LazyUtils.getLazyObjectInspectorFromTypeInfo(
+				rowTypeInfo, true);
+		cachedLazyStruct = (LazyColumnar) LazyFactory
+				.createLazyObject(cachedObjectInspector);
+		// output debug info
+		LOG.debug("LazySerDe initialized with: columnNames=" + columnNames
+				+ " columnTypes=" + columnTypes);
+	}
+
+	/**
+	 * Returns the ObjectInspector for the row.
+	 */
+	@Override
+	public ObjectInspector getObjectInspector() throws SerDeException {
+		return cachedObjectInspector;
+	}
+
+	/**
+	 * Returns the Writable Class after serialization.
+	 */
+	@Override
+	public Class<? extends Writable> getSerializedClass() {
+		return BytesWritable.class;
+	}
+
+	// The wrapper for byte array
+	ByteArrayRef byteArrayRef;
+
+	/**
+	 * Deserialize a table record to a Lazy struct.
+	 */
+	@SuppressWarnings("deprecation")
+	@Override
+	public Object deserialize(Writable field) throws SerDeException {
+		if (byteArrayRef == null) {
+			byteArrayRef = new ByteArrayRef();
+		}
+		if (field instanceof BytesWritable) {
+			BytesWritable b = (BytesWritable) field;
+			if (b.getSize() == 0) {
+				return null;
+			}
+			// For backward-compatibility with hadoop 0.17
+			byteArrayRef.setData(b.get());
+			cachedLazyStruct.init(byteArrayRef.getData(), 0, b.getSize());
+		} else if (field instanceof Text) {
+			Text t = (Text) field;
+			if (t.getLength() == 0) {
+				return null;
+			}
+			byteArrayRef.setData(t.getBytes());
+			cachedLazyStruct.init(byteArrayRef.getData(), 0, t.getLength());
+		} else {
+			throw new SerDeException(getClass().toString()
+					+ ": expects either BytesWritable or Text object!");
+		}
+		return cachedLazyStruct;
+	}
+
+	/**
+	 * The reusable output buffer and serialize byte buffer.
+	 */
+	BytesWritable serializeBytesWritable = new BytesWritable();
+	ByteStream.Output serializeByteStream = new ByteStream.Output();
+
+	/**
+	 * Serialize an object to a byte buffer in a binary compact way.
+	 */
+	@Override
+	public Writable serialize(Object obj, ObjectInspector objInspector)
+			throws SerDeException {
+		// make sure it is a struct record or not
+		serializeByteStream.reset();
+
+		if (objInspector.getCategory() != Category.STRUCT) {
+			// serialize the primitive object
+			serialize(serializeByteStream, obj, objInspector);
+		} else {
+			// serialize the row as a struct
+			serializeStruct(serializeByteStream, obj,
+					(StructObjectInspector) objInspector);
+		}
+		// return the serialized bytes
+		serializeBytesWritable.set(serializeByteStream.getData(), 0,
+				serializeByteStream.getCount());
+		return serializeBytesWritable;
+	}
+
+	boolean nullMapKey = false;
+
+	/**
+	 * Serialize a struct object without writing the byte size. This function is
+	 * shared by both row serialization and struct serialization.
+	 * 
+	 * @param byteStream
+	 *            the byte stream storing the serialization data
+	 * @param obj
+	 *            the struct object to serialize
+	 * @param objInspector
+	 *            the struct object inspector
+	 */
+	private void serializeStruct(Output byteStream, Object obj,
+			StructObjectInspector soi) {
+		// do nothing for null struct
+		if (null == obj) {
+			return;
+		}
+		/*
+		 * Interleave serializing one null byte and 8 struct fields in each
+		 * round, in order to support data deserialization with different table
+		 * schemas
+		 */
+		List<? extends StructField> fields = soi.getAllStructFieldRefs();
+		int size = fields.size();
+		int lasti = 0;
+		byte nullByte = 0;
+		for (int i = 0; i < size; i++) {
+			// set bit to 1 if a field is not null
+			if (null != soi.getStructFieldData(obj, fields.get(i))) {
+				nullByte |= 1 << (i % 8);
+			}
+			// write the null byte every eight elements or
+			// if this is the last element and serialize the
+			// corresponding 8 struct fields at the same time
+			if (7 == i % 8 || i == size - 1) {
+				serializeByteStream.write(nullByte);
+				for (int j = lasti; j <= i; j++) {
+					serialize(serializeByteStream, soi.getStructFieldData(obj,
+							fields.get(j)), fields.get(j)
+							.getFieldObjectInspector());
+				}
+				lasti = i + 1;
+				nullByte = 0;
+			}
+		}
+	}
+
+	/**
+	 * A recursive function that serialize an object to a byte buffer based on
+	 * its object inspector.
+	 * 
+	 * @param byteStream
+	 *            the byte stream storing the serialization data
+	 * @param obj
+	 *            the object to serialize
+	 * @param objInspector
+	 *            the object inspector
+	 */
+	private void serialize(Output byteStream, Object obj,
+			ObjectInspector objInspector) {
+
+		// do nothing for null object
+		if (null == obj) {
+			return;
+		}
+
+		switch (objInspector.getCategory()) {
+		case PRIMITIVE: {
+			PrimitiveObjectInspector poi = (PrimitiveObjectInspector) objInspector;
+			switch (poi.getPrimitiveCategory()) {
+			case VOID: {
+				return;
+			}
+			case BOOLEAN: {
+				boolean v = ((BooleanObjectInspector) poi).get(obj);
+				byteStream.write((byte) (v ? 1 : 0));
+				return;
+			}
+			case BYTE: {
+				ByteObjectInspector boi = (ByteObjectInspector) poi;
+				byte v = boi.get(obj);
+				byteStream.write(v);
+				return;
+			}
+			case SHORT: {
+				ShortObjectInspector spoi = (ShortObjectInspector) poi;
+				short v = spoi.get(obj);
+				byteStream.write((byte) (v >> 8));
+				byteStream.write((byte) (v));
+				return;
+			}
+			case INT: {
+				IntObjectInspector ioi = (IntObjectInspector) poi;
+				int v = ioi.get(obj);
+				LazyUtils.writeVInt(byteStream, v);
+				return;
+			}
+			case LONG: {
+				LongObjectInspector loi = (LongObjectInspector) poi;
+				long v = loi.get(obj);
+				LazyUtils.writeVLong(byteStream, v);
+				return;
+			}
+			case FLOAT: {
+				FloatObjectInspector foi = (FloatObjectInspector) poi;
+				int v = Float.floatToIntBits(foi.get(obj));
+				byteStream.write((byte) (v >> 24));
+				byteStream.write((byte) (v >> 16));
+				byteStream.write((byte) (v >> 8));
+				byteStream.write((byte) (v));
+				return;
+			}
+			case DOUBLE: {
+				DoubleObjectInspector doi = (DoubleObjectInspector) poi;
+				long v = Double.doubleToLongBits(doi.get(obj));
+				byteStream.write((byte) (v >> 56));
+				byteStream.write((byte) (v >> 48));
+				byteStream.write((byte) (v >> 40));
+				byteStream.write((byte) (v >> 32));
+				byteStream.write((byte) (v >> 24));
+				byteStream.write((byte) (v >> 16));
+				byteStream.write((byte) (v >> 8));
+				byteStream.write((byte) (v));
+				return;
+			}
+			case STRING: {
+				StringObjectInspector soi = (StringObjectInspector) poi;
+				Text t = soi.getPrimitiveWritableObject(obj);
+				/* write byte size of the string which is a vint */
+				int length = t.getLength();
+				LazyUtils.writeVInt(byteStream, length);
+				/* write string itself */
+				byte[] data = t.getBytes();
+				byteStream.write(data, 0, length);
+				return;
+			}
+			default: {
+				throw new RuntimeException("Unrecognized type: "
+						+ poi.getPrimitiveCategory());
+			}
+			}
+		}
+		case LIST: {
+			ListObjectInspector loi = (ListObjectInspector) objInspector;
+			ObjectInspector eoi = loi.getListElementObjectInspector();
+
+			// 1/ reserve spaces for the byte size of the list
+			// which is a integer and takes four bytes
+			int byteSizeStart = byteStream.getCount();
+			byteStream.write((byte) 0);
+			byteStream.write((byte) 0);
+			byteStream.write((byte) 0);
+			byteStream.write((byte) 0);
+			int listStart = byteStream.getCount();
+
+			// 2/ write the size of the list as a VInt
+			int size = loi.getListLength(obj);
+			LazyUtils.writeVInt(byteStream, size);
+
+			// 3/ write the null bytes
+			byte nullByte = 0;
+			for (int eid = 0; eid < size; eid++) {
+				// set the bit to 1 if an element is not null
+				if (null != loi.getListElement(obj, eid)) {
+					nullByte |= 1 << (eid % 8);
+				}
+				// store the byte every eight elements or
+				// if this is the last element
+				if (7 == eid % 8 || eid == size - 1) {
+					byteStream.write(nullByte);
+					nullByte = 0;
+				}
+			}
+
+			// 4/ write element by element from the list
+			for (int eid = 0; eid < size; eid++) {
+				serialize(byteStream, loi.getListElement(obj, eid), eoi);
+			}
+
+			// 5/ update the list byte size
+			int listEnd = byteStream.getCount();
+			int listSize = listEnd - listStart;
+			byte[] bytes = byteStream.getData();
+			bytes[byteSizeStart] = (byte) (listSize >> 24);
+			bytes[byteSizeStart + 1] = (byte) (listSize >> 16);
+			bytes[byteSizeStart + 2] = (byte) (listSize >> 8);
+			bytes[byteSizeStart + 3] = (byte) (listSize);
+
+			return;
+		}
+		case MAP: {
+			MapObjectInspector moi = (MapObjectInspector) objInspector;
+			ObjectInspector koi = moi.getMapKeyObjectInspector();
+			ObjectInspector voi = moi.getMapValueObjectInspector();
+			Map<?, ?> map = moi.getMap(obj);
+
+			// 1/ reserve spaces for the byte size of the map
+			// which is a integer and takes four bytes
+			int byteSizeStart = byteStream.getCount();
+			byteStream.write((byte) 0);
+			byteStream.write((byte) 0);
+			byteStream.write((byte) 0);
+			byteStream.write((byte) 0);
+			int mapStart = byteStream.getCount();
+
+			// 2/ write the size of the map which is a VInt
+			int size = map.size();
+			LazyUtils.writeVInt(byteStream, size);
+
+			// 3/ write the null bytes
+			int b = 0;
+			byte nullByte = 0;
+			for (Map.Entry<?, ?> entry : map.entrySet()) {
+				// set the bit to 1 if a key is not null
+				if (null != entry.getKey()) {
+					nullByte |= 1 << (b % 8);
+				} else if (!nullMapKey) {
+					nullMapKey = true;
+					LOG.warn("Null map key encountered! Ignoring similar problems.");
+				}
+				b++;
+				// set the bit to 1 if a value is not null
+				if (null != entry.getValue()) {
+					nullByte |= 1 << (b % 8);
+				}
+				b++;
+				// write the byte to stream every 4 key-value pairs
+				// or if this is the last key-value pair
+				if (0 == b % 8 || b == size * 2) {
+					byteStream.write(nullByte);
+					nullByte = 0;
+				}
+			}
+
+			// 4/ write key-value pairs one by one
+			for (Map.Entry<?, ?> entry : map.entrySet()) {
+				serialize(byteStream, entry.getKey(), koi);
+				serialize(byteStream, entry.getValue(), voi);
+			}
+
+			// 5/ update the byte size of the map
+			int mapEnd = byteStream.getCount();
+			int mapSize = mapEnd - mapStart;
+			byte[] bytes = byteStream.getData();
+			bytes[byteSizeStart] = (byte) (mapSize >> 24);
+			bytes[byteSizeStart + 1] = (byte) (mapSize >> 16);
+			bytes[byteSizeStart + 2] = (byte) (mapSize >> 8);
+			bytes[byteSizeStart + 3] = (byte) (mapSize);
+
+			return;
+		}
+		case STRUCT: {
+			// 1/ reserve spaces for the byte size of the struct
+			// which is a integer and takes four bytes
+			int byteSizeStart = byteStream.getCount();
+			byteStream.write((byte) 0);
+			byteStream.write((byte) 0);
+			byteStream.write((byte) 0);
+			byteStream.write((byte) 0);
+			int structStart = byteStream.getCount();
+
+			// 2/ serialize the struct
+			serializeStruct(byteStream, obj,
+					(StructObjectInspector) objInspector);
+
+			// 3/ update the byte size of the struct
+			int structEnd = byteStream.getCount();
+			int structSize = structEnd - structStart;
+			byte[] bytes = byteStream.getData();
+			bytes[byteSizeStart] = (byte) (structSize >> 24);
+			bytes[byteSizeStart + 1] = (byte) (structSize >> 16);
+			bytes[byteSizeStart + 2] = (byte) (structSize >> 8);
+			bytes[byteSizeStart + 3] = (byte) (structSize);
+
+			return;
+		}
+		default: {
+			throw new RuntimeException("Unrecognized type: "
+					+ objInspector.getCategory());
+		}
+		}
+	}
+}
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyShort.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyShort.java
new file mode 100644
index 0000000..7484b72
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyShort.java
@@ -0,0 +1,62 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hivesterix.serde.lazy;
+
+import org.apache.hadoop.hive.serde2.io.ShortWritable;
+
+import edu.uci.ics.hivesterix.serde.lazy.objectinspector.primitive.LazyShortObjectInspector;
+
+/**
+ * LazyObject for storing a value of Short.
+ * 
+ * <p>
+ * Part of the code is adapted from Apache Harmony Project.
+ * 
+ * As with the specification, this implementation relied on code laid out in <a
+ * href="http://www.hackersdelight.org/">Henry S. Warren, Jr.'s Hacker's
+ * Delight, (Addison Wesley, 2002)</a> as well as <a
+ * href="http://aggregate.org/MAGIC/">The Aggregate's Magic Algorithms</a>.
+ * </p>
+ * 
+ */
+public class LazyShort extends
+		LazyPrimitive<LazyShortObjectInspector, ShortWritable> {
+
+	public LazyShort(LazyShortObjectInspector oi) {
+		super(oi);
+		data = new ShortWritable();
+	}
+
+	public LazyShort(LazyShort copy) {
+		super(copy);
+		data = new ShortWritable(copy.data.get());
+	}
+
+	@Override
+	public void init(byte[] bytes, int start, int length) {
+		if (length == 0) {
+			isNull = true;
+			return;
+		} else
+			isNull = false;
+
+		assert (2 == length);
+		data.set(LazyUtils.byteArrayToShort(bytes, start));
+	}
+
+}
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyString.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyString.java
new file mode 100644
index 0000000..c13533b
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyString.java
@@ -0,0 +1,60 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hivesterix.serde.lazy;
+
+import org.apache.hadoop.io.Text;
+
+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils.VInt;
+import edu.uci.ics.hivesterix.serde.lazy.objectinspector.primitive.LazyStringObjectInspector;
+
+/**
+ * LazyObject for storing a value of String.
+ */
+public class LazyString extends LazyPrimitive<LazyStringObjectInspector, Text> {
+
+	public LazyString(LazyStringObjectInspector oi) {
+		super(oi);
+		data = new Text();
+	}
+
+	public LazyString(LazyString copy) {
+		super(copy);
+		data = new Text(copy.data);
+	}
+
+	VInt vInt = new LazyUtils.VInt();
+
+	@Override
+	public void init(byte[] bytes, int start, int length) {
+		if (length == 0) {
+			isNull = true;
+			return;
+		} else
+			isNull = false;
+
+		// get the byte length of the string
+		LazyUtils.readVInt(bytes, start, vInt);
+		if (vInt.value + vInt.length != length)
+			throw new IllegalStateException(
+					"parse string: length mismatch, expected "
+							+ (vInt.value + vInt.length) + " but get " + length);
+		assert (length - vInt.length > -1);
+		data.set(bytes, start + vInt.length, length - vInt.length);
+	}
+
+}
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyStruct.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyStruct.java
new file mode 100644
index 0000000..61cc335
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyStruct.java
@@ -0,0 +1,244 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hivesterix.serde.lazy;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.StructField;
+import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+
+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils.RecordInfo;
+import edu.uci.ics.hivesterix.serde.lazy.objectinspector.LazyStructObjectInspector;
+
+/**
+ * LazyStruct is serialized as follows: start A B A B A B end bytes[] ->
+ * |-----|---------|--- ... ---|-----|---------|
+ * 
+ * Section A is one null-byte, corresponding to eight struct fields in Section
+ * B. Each bit indicates whether the corresponding field is null (0) or not null
+ * (1). Each field is a LazyObject.
+ * 
+ * Following B, there is another section A and B. This pattern repeats until the
+ * all struct fields are serialized.
+ */
+public class LazyStruct extends LazyNonPrimitive<LazyStructObjectInspector> {
+
+	private static Log LOG = LogFactory.getLog(LazyStruct.class.getName());
+
+	/**
+	 * Whether the data is already parsed or not.
+	 */
+	boolean parsed;
+
+	/**
+	 * The fields of the struct.
+	 */
+	@SuppressWarnings("rawtypes")
+	LazyObject[] fields;
+
+	/**
+	 * Whether a field is initialized or not.
+	 */
+	boolean[] fieldInited;
+
+	/**
+	 * Whether a field is null or not. Because length is 0 does not means the
+	 * field is null. In particular, a 0-length string is not null.
+	 */
+	boolean[] fieldIsNull;
+
+	/**
+	 * The start positions and lengths of struct fields. Only valid when the
+	 * data is parsed.
+	 */
+	int[] fieldStart;
+	int[] fieldLength;
+
+	/**
+	 * Construct a LazyStruct object with an ObjectInspector.
+	 */
+	protected LazyStruct(LazyStructObjectInspector oi) {
+		super(oi);
+	}
+
+	@Override
+	public void init(byte[] bytes, int start, int length) {
+		super.init(bytes, start, length);
+		parsed = false;
+	}
+
+	RecordInfo recordInfo = new LazyUtils.RecordInfo();
+	boolean missingFieldWarned = false;
+	boolean extraFieldWarned = false;
+
+	/**
+	 * Parse the byte[] and fill fieldStart, fieldLength, fieldInited and
+	 * fieldIsNull.
+	 */
+	private void parse() {
+
+		List<? extends StructField> fieldRefs = ((StructObjectInspector) oi)
+				.getAllStructFieldRefs();
+
+		if (fields == null) {
+			fields = new LazyObject[fieldRefs.size()];
+			for (int i = 0; i < fields.length; i++) {
+				ObjectInspector insp = fieldRefs.get(i)
+						.getFieldObjectInspector();
+				fields[i] = insp == null ? null : LazyFactory
+						.createLazyObject(insp);
+			}
+			fieldInited = new boolean[fields.length];
+			fieldIsNull = new boolean[fields.length];
+			fieldStart = new int[fields.length];
+			fieldLength = new int[fields.length];
+		}
+
+		/**
+		 * Please note that one null byte is followed by eight fields, then more
+		 * null byte and fields.
+		 */
+
+		int fieldId = 0;
+		int structByteEnd = start + length;
+
+		byte nullByte = bytes[start];
+		int lastFieldByteEnd = start + 1;
+		// Go through all bytes in the byte[]
+		for (int i = 0; i < fields.length; i++) {
+			fieldIsNull[i] = true;
+			if ((nullByte & (1 << (i % 8))) != 0) {
+				fieldIsNull[i] = false;
+				LazyUtils.checkObjectByteInfo(fieldRefs.get(i)
+						.getFieldObjectInspector(), bytes, lastFieldByteEnd,
+						recordInfo);
+				fieldStart[i] = lastFieldByteEnd + recordInfo.elementOffset;
+				fieldLength[i] = recordInfo.elementSize;
+				lastFieldByteEnd = fieldStart[i] + fieldLength[i];
+			}
+
+			// count how many fields are there
+			if (lastFieldByteEnd <= structByteEnd) {
+				fieldId++;
+			}
+			// next byte is a null byte if there are more bytes to go
+			if (7 == (i % 8)) {
+				if (lastFieldByteEnd < structByteEnd) {
+					nullByte = bytes[lastFieldByteEnd];
+					lastFieldByteEnd++;
+				} else {
+					// otherwise all null afterwards
+					nullByte = 0;
+					lastFieldByteEnd++;
+				}
+			}
+		}
+
+		// Extra bytes at the end?
+		if (!extraFieldWarned && lastFieldByteEnd < structByteEnd) {
+			extraFieldWarned = true;
+			LOG.warn("Extra bytes detected at the end of the row! Ignoring similar "
+					+ "problems.");
+		}
+
+		// Missing fields?
+		if (!missingFieldWarned && lastFieldByteEnd > structByteEnd) {
+			missingFieldWarned = true;
+			LOG.warn("Missing fields! Expected " + fields.length
+					+ " fields but " + "only got " + fieldId
+					+ "! Ignoring similar problems.");
+		}
+
+		Arrays.fill(fieldInited, false);
+		parsed = true;
+	}
+
+	/**
+	 * Get one field out of the struct.
+	 * 
+	 * If the field is a primitive field, return the actual object. Otherwise
+	 * return the LazyObject. This is because PrimitiveObjectInspector does not
+	 * have control over the object used by the user - the user simply directly
+	 * use the Object instead of going through Object
+	 * PrimitiveObjectInspector.get(Object).
+	 * 
+	 * @param fieldID
+	 *            The field ID
+	 * @return The field as a LazyObject
+	 */
+	public Object getField(int fieldID) {
+		if (!parsed) {
+			parse();
+		}
+		return uncheckedGetField(fieldID);
+	}
+
+	/**
+	 * Get the field out of the row without checking parsed. This is called by
+	 * both getField and getFieldsAsList.
+	 * 
+	 * @param fieldID
+	 *            The id of the field starting from 0.
+	 * @return The value of the field
+	 */
+	private Object uncheckedGetField(int fieldID) {
+		// Test the length first so in most cases we avoid doing a byte[]
+		// comparison.
+		if (fieldIsNull[fieldID]) {
+			return null;
+		}
+		if (!fieldInited[fieldID]) {
+			fieldInited[fieldID] = true;
+			fields[fieldID].init(bytes, fieldStart[fieldID],
+					fieldLength[fieldID]);
+		}
+		return fields[fieldID].getObject();
+	}
+
+	ArrayList<Object> cachedList;
+
+	/**
+	 * Get the values of the fields as an ArrayList.
+	 * 
+	 * @return The values of the fields as an ArrayList.
+	 */
+	public ArrayList<Object> getFieldsAsList() {
+		if (!parsed) {
+			parse();
+		}
+		if (cachedList == null) {
+			cachedList = new ArrayList<Object>();
+		} else {
+			cachedList.clear();
+		}
+		for (int i = 0; i < fields.length; i++) {
+			cachedList.add(uncheckedGetField(i));
+		}
+		return cachedList;
+	}
+
+	@Override
+	public Object getObject() {
+		return this;
+	}
+}
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyUtils.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyUtils.java
new file mode 100644
index 0000000..2d0406c
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyUtils.java
@@ -0,0 +1,529 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hivesterix.serde.lazy;
+
+import java.io.DataOutput;
+import java.io.IOException;
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+
+import org.apache.hadoop.hive.serde2.ByteStream.Output;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
+import org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.io.WritableUtils;
+
+import edu.uci.ics.hivesterix.serde.lazy.objectinspector.LazyObjectInspectorFactory;
+import edu.uci.ics.hivesterix.serde.lazy.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+
+/**
+ * LazyUtils.
+ * 
+ */
+public final class LazyUtils {
+
+	/**
+	 * Convert the byte array to an int starting from the given offset. Refer to
+	 * code by aeden on DZone Snippets:
+	 * 
+	 * @param b
+	 *            the byte array
+	 * @param offset
+	 *            the array offset
+	 * @return the integer
+	 */
+	public static int byteArrayToInt(byte[] b, int offset) {
+		int value = 0;
+		for (int i = 0; i < 4; i++) {
+			int shift = (4 - 1 - i) * 8;
+			value += (b[i + offset] & 0x000000FF) << shift;
+		}
+		return value;
+	}
+
+	/**
+	 * Convert the byte array to a long starting from the given offset.
+	 * 
+	 * @param b
+	 *            the byte array
+	 * @param offset
+	 *            the array offset
+	 * @return the long
+	 */
+	public static long byteArrayToLong(byte[] b, int offset) {
+		long value = 0;
+		for (int i = 0; i < 8; i++) {
+			int shift = (8 - 1 - i) * 8;
+			value += ((long) (b[i + offset] & 0x00000000000000FF)) << shift;
+		}
+		return value;
+	}
+
+	/**
+	 * Convert the byte array to a short starting from the given offset.
+	 * 
+	 * @param b
+	 *            the byte array
+	 * @param offset
+	 *            the array offset
+	 * @return the short
+	 */
+	public static short byteArrayToShort(byte[] b, int offset) {
+		short value = 0;
+		value += (b[offset] & 0x000000FF) << 8;
+		value += (b[offset + 1] & 0x000000FF);
+		return value;
+	}
+
+	/**
+	 * Record is the unit that data is serialized in. A record includes two
+	 * parts. The first part stores the size of the element and the second part
+	 * stores the real element. size element record ->
+	 * |----|-------------------------|
+	 * 
+	 * A RecordInfo stores two information of a record, the size of the "size"
+	 * part which is the element offset and the size of the element part which
+	 * is element size.
+	 */
+	public static class RecordInfo {
+		public RecordInfo() {
+			elementOffset = 0;
+			elementSize = 0;
+		}
+
+		public byte elementOffset;
+		public int elementSize;
+
+		@Override
+		public String toString() {
+			return "(" + elementOffset + ", " + elementSize + ")";
+		}
+	}
+
+	static VInt vInt = new LazyUtils.VInt();
+
+	/**
+	 * Check a particular field and set its size and offset in bytes based on
+	 * the field type and the bytes arrays.
+	 * 
+	 * For void, boolean, byte, short, int, long, float and double, there is no
+	 * offset and the size is fixed. For string, map, list, struct, the first
+	 * four bytes are used to store the size. So the offset is 4 and the size is
+	 * computed by concating the first four bytes together. The first four bytes
+	 * are defined with respect to the offset in the bytes arrays.
+	 * 
+	 * @param objectInspector
+	 *            object inspector of the field
+	 * @param bytes
+	 *            bytes arrays store the table row
+	 * @param offset
+	 *            offset of this field
+	 * @param recordInfo
+	 *            modify this byteinfo object and return it
+	 */
+	public static void checkObjectByteInfo(ObjectInspector objectInspector,
+			byte[] bytes, int offset, RecordInfo recordInfo) {
+		Category category = objectInspector.getCategory();
+		switch (category) {
+		case PRIMITIVE:
+			PrimitiveCategory primitiveCategory = ((PrimitiveObjectInspector) objectInspector)
+					.getPrimitiveCategory();
+			switch (primitiveCategory) {
+			case VOID:
+				recordInfo.elementOffset = 0;
+				recordInfo.elementSize = 0;
+				break;
+			case BOOLEAN:
+			case BYTE:
+				recordInfo.elementOffset = 0;
+				recordInfo.elementSize = 1;
+				break;
+			case SHORT:
+				recordInfo.elementOffset = 0;
+				recordInfo.elementSize = 2;
+				break;
+			case FLOAT:
+				recordInfo.elementOffset = 0;
+				recordInfo.elementSize = 4;
+				break;
+			case DOUBLE:
+				recordInfo.elementOffset = 0;
+				recordInfo.elementSize = 8;
+				break;
+			case INT:
+				recordInfo.elementOffset = 0;
+				recordInfo.elementSize = WritableUtils
+						.decodeVIntSize(bytes[offset]);
+				break;
+			case LONG:
+				recordInfo.elementOffset = 0;
+				recordInfo.elementSize = WritableUtils
+						.decodeVIntSize(bytes[offset]);
+				break;
+			case STRING:
+				// using vint instead of 4 bytes
+				LazyUtils.readVInt(bytes, offset, vInt);
+				recordInfo.elementOffset = vInt.length;
+				recordInfo.elementSize = vInt.value;
+				break;
+			default: {
+				throw new RuntimeException("Unrecognized primitive type: "
+						+ primitiveCategory);
+			}
+			}
+			break;
+		case LIST:
+		case MAP:
+		case STRUCT:
+			recordInfo.elementOffset = 4;
+			recordInfo.elementSize = LazyUtils.byteArrayToInt(bytes, offset);
+			break;
+		default: {
+			throw new RuntimeException("Unrecognized non-primitive type: "
+					+ category);
+		}
+		}
+	}
+
+	/**
+	 * A zero-compressed encoded long.
+	 */
+	public static class VLong {
+		public VLong() {
+			value = 0;
+			length = 0;
+		}
+
+		public long value;
+		public byte length;
+	};
+
+	/**
+	 * Reads a zero-compressed encoded long from a byte array and returns it.
+	 * 
+	 * @param bytes
+	 *            the byte array
+	 * @param offset
+	 *            offset of the array to read from
+	 * @param vlong
+	 *            storing the deserialized long and its size in byte
+	 */
+	public static void readVLong(byte[] bytes, int offset, VLong vlong) {
+		byte firstByte = bytes[offset];
+		vlong.length = (byte) WritableUtils.decodeVIntSize(firstByte);
+		if (vlong.length == 1) {
+			vlong.value = firstByte;
+			return;
+		}
+		long i = 0;
+		for (int idx = 0; idx < vlong.length - 1; idx++) {
+			byte b = bytes[offset + 1 + idx];
+			i = i << 8;
+			i = i | (b & 0xFF);
+		}
+		vlong.value = (WritableUtils.isNegativeVInt(firstByte) ? (i ^ -1L) : i);
+	}
+
+	/**
+	 * A zero-compressed encoded integer.
+	 */
+	public static class VInt implements Serializable {
+		private static final long serialVersionUID = 1L;
+
+		public VInt() {
+			value = 0;
+			length = 0;
+		}
+
+		public int value;
+		public byte length;
+	};
+
+	/**
+	 * Reads a zero-compressed encoded int from a byte array and returns it.
+	 * 
+	 * @param bytes
+	 *            the byte array
+	 * @param offset
+	 *            offset of the array to read from
+	 * @param vInt
+	 *            storing the deserialized int and its size in byte
+	 */
+	public static void readVInt(byte[] bytes, int offset, VInt vInt) {
+		byte firstByte = bytes[offset];
+		vInt.length = (byte) WritableUtils.decodeVIntSize(firstByte);
+		if (vInt.length == 1) {
+			vInt.value = firstByte;
+			return;
+		}
+		int i = 0;
+		for (int idx = 0; idx < vInt.length - 1; idx++) {
+			byte b = bytes[offset + 1 + idx];
+			i = i << 8;
+			i = i | (b & 0xFF);
+		}
+		vInt.value = (WritableUtils.isNegativeVInt(firstByte) ? (i ^ -1) : i);
+	}
+
+	/**
+	 * Writes a zero-compressed encoded int to a byte array.
+	 * 
+	 * @param byteStream
+	 *            the byte array/stream
+	 * @param i
+	 *            the int
+	 */
+	public static void writeVInt(Output byteStream, int i) {
+		writeVLong(byteStream, i);
+	}
+
+	/**
+	 * Write a zero-compressed encoded long to a byte array.
+	 * 
+	 * @param byteStream
+	 *            the byte array/stream
+	 * @param l
+	 *            the long
+	 */
+	public static void writeVLong(Output byteStream, long l) {
+		if (l >= -112 && l <= 127) {
+			byteStream.write((byte) l);
+			return;
+		}
+
+		int len = -112;
+		if (l < 0) {
+			l ^= -1L; // take one's complement'
+			len = -120;
+		}
+
+		long tmp = l;
+		while (tmp != 0) {
+			tmp = tmp >> 8;
+			len--;
+		}
+
+		byteStream.write((byte) len);
+
+		len = (len < -120) ? -(len + 120) : -(len + 112);
+
+		for (int idx = len; idx != 0; idx--) {
+			int shiftbits = (idx - 1) * 8;
+			long mask = 0xFFL << shiftbits;
+			byteStream.write((byte) ((l & mask) >> shiftbits));
+		}
+	}
+
+	static Map<TypeInfo, ObjectInspector> cachedLazyObjectInspector = new ConcurrentHashMap<TypeInfo, ObjectInspector>();
+
+	/**
+	 * Returns the lazy binary object inspector that can be used to inspect an
+	 * lazy binary object of that typeInfo
+	 * 
+	 * For primitive types, we use the standard writable object inspector.
+	 */
+	public static ObjectInspector getLazyObjectInspectorFromTypeInfo(
+			TypeInfo typeInfo, boolean topLevel) {
+		if (typeInfo == null)
+			throw new IllegalStateException("illegal type null ");
+		ObjectInspector result = cachedLazyObjectInspector.get(typeInfo);
+		if (result == null) {
+			switch (typeInfo.getCategory()) {
+			case PRIMITIVE: {
+				result = PrimitiveObjectInspectorFactory
+						.getPrimitiveLazyObjectInspector(((PrimitiveTypeInfo) typeInfo)
+								.getPrimitiveCategory());
+				break;
+			}
+			case LIST: {
+				ObjectInspector elementObjectInspector = getLazyObjectInspectorFromTypeInfo(
+						((ListTypeInfo) typeInfo).getListElementTypeInfo(),
+						false);
+				result = LazyObjectInspectorFactory
+						.getLazyListObjectInspector(elementObjectInspector);
+				break;
+			}
+			case MAP: {
+				MapTypeInfo mapTypeInfo = (MapTypeInfo) typeInfo;
+				ObjectInspector keyObjectInspector = getLazyObjectInspectorFromTypeInfo(
+						mapTypeInfo.getMapKeyTypeInfo(), false);
+				ObjectInspector valueObjectInspector = getLazyObjectInspectorFromTypeInfo(
+						mapTypeInfo.getMapValueTypeInfo(), false);
+				result = LazyObjectInspectorFactory.getLazyMapObjectInspector(
+						keyObjectInspector, valueObjectInspector);
+				break;
+			}
+			case STRUCT: {
+				StructTypeInfo structTypeInfo = (StructTypeInfo) typeInfo;
+				List<String> fieldNames = structTypeInfo
+						.getAllStructFieldNames();
+				List<TypeInfo> fieldTypeInfos = structTypeInfo
+						.getAllStructFieldTypeInfos();
+				List<ObjectInspector> fieldObjectInspectors = new ArrayList<ObjectInspector>(
+						fieldTypeInfos.size());
+
+				for (int i = 0; i < fieldTypeInfos.size(); i++) {
+					fieldObjectInspectors
+							.add(getLazyObjectInspectorFromTypeInfo(
+									fieldTypeInfos.get(i), false));
+				}
+
+				// if it is top level then create columnar
+				if (topLevel)
+					result = LazyObjectInspectorFactory
+							.getLazyColumnarObjectInspector(fieldNames,
+									fieldObjectInspectors);
+				// if it is not top level then create struct
+				else
+					result = LazyObjectInspectorFactory
+							.getLazyStructObjectInspector(fieldNames,
+									fieldObjectInspectors);
+
+				break;
+			}
+			default: {
+				result = null;
+			}
+			}
+			cachedLazyObjectInspector.put(typeInfo, result);
+		}
+		return result;
+	}
+
+	/**
+	 * get top-level lazy object inspector
+	 * 
+	 * @param fieldNames
+	 * @param fieldTypeInfos
+	 * @return
+	 */
+	public static ObjectInspector getLazyObjectInspector(
+			List<String> fieldNames, List<TypeInfo> fieldTypeInfos) {
+		List<ObjectInspector> fieldObjectInspectors = new ArrayList<ObjectInspector>(
+				fieldTypeInfos.size());
+		for (int i = 0; i < fieldTypeInfos.size(); i++) {
+			fieldObjectInspectors.add(getLazyObjectInspectorFromTypeInfo(
+					fieldTypeInfos.get(i), false));
+		}
+
+		return LazyObjectInspectorFactory.getLazyColumnarObjectInspector(
+				fieldNames, fieldObjectInspectors);
+	}
+
+	private LazyUtils() {
+		// prevent instantiation
+	}
+
+	/**
+	 * Returns -1 if the first byte sequence is lexicographically less than the
+	 * second; returns +1 if the second byte sequence is lexicographically less
+	 * than the first; otherwise return 0.
+	 */
+	public static int compare(byte[] b1, int start1, int length1, byte[] b2,
+			int start2, int length2) {
+
+		int min = Math.min(length1, length2);
+
+		for (int i = 0; i < min; i++) {
+			if (b1[start1 + i] == b2[start2 + i]) {
+				continue;
+			}
+			if (b1[start1 + i] < b2[start2 + i]) {
+				return -1;
+			} else {
+				return 1;
+			}
+		}
+
+		if (length1 < length2) {
+			return -1;
+		}
+		if (length1 > length2) {
+			return 1;
+		}
+		return 0;
+	}
+
+	public static int hashBytes(byte[] data, int start, int len) {
+		int hash = 1;
+		for (int i = start; i < len; i++) {
+			hash = (31 * hash) + data[i];
+		}
+		return hash;
+	}
+
+	/**
+	 * Writes a zero-compressed encoded int to a byte array.
+	 * 
+	 * @param byteStream
+	 *            the byte array/stream
+	 * @param i
+	 *            the int
+	 */
+	public static void writeVInt(DataOutput byteStream, int i)
+			throws IOException {
+		writeVLong(byteStream, i);
+	}
+
+	/**
+	 * Write a zero-compressed encoded long to a byte array.
+	 * 
+	 * @param byteStream
+	 *            the byte array/stream
+	 * @param l
+	 *            the long
+	 */
+	public static void writeVLong(DataOutput byteStream, long l)
+			throws IOException {
+		if (l >= -112 && l <= 127) {
+			byteStream.write((byte) l);
+			return;
+		}
+
+		int len = -112;
+		if (l < 0) {
+			l ^= -1L; // take one's complement'
+			len = -120;
+		}
+
+		long tmp = l;
+		while (tmp != 0) {
+			tmp = tmp >> 8;
+			len--;
+		}
+
+		byteStream.write((byte) len);
+
+		len = (len < -120) ? -(len + 120) : -(len + 112);
+
+		for (int idx = len; idx != 0; idx--) {
+			int shiftbits = (idx - 1) * 8;
+			long mask = 0xFFL << shiftbits;
+			byteStream.write((byte) ((l & mask) >> shiftbits));
+		}
+	}
+}
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/LazyColumnarObjectInspector.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/LazyColumnarObjectInspector.java
new file mode 100644
index 0000000..b20f185
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/LazyColumnarObjectInspector.java
@@ -0,0 +1,80 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hivesterix.serde.lazy.objectinspector;
+
+import java.io.Serializable;
+import java.util.List;
+
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.StructField;
+
+import edu.uci.ics.hivesterix.serde.lazy.LazyColumnar;
+
+/**
+ * ObjectInspector for LazyColumnar.
+ * 
+ * @see LazyColumnar
+ */
+public class LazyColumnarObjectInspector extends StandardStructObjectInspector
+		implements Serializable {
+
+	private static final long serialVersionUID = 1L;
+
+	public LazyColumnarObjectInspector(List<String> structFieldNames,
+			List<ObjectInspector> structFieldObjectInspectors) {
+		super(structFieldNames, structFieldObjectInspectors);
+	}
+
+	public LazyColumnarObjectInspector(List<StructField> fields) {
+		super(fields);
+	}
+
+	@Override
+	public Object getStructFieldData(Object data, StructField fieldRef) {
+		if (data == null) {
+			return null;
+		}
+		LazyColumnar struct = (LazyColumnar) data;
+		MyField f = (MyField) fieldRef;
+
+		int fieldID = f.getFieldID();
+		assert (fieldID >= 0 && fieldID < fields.size());
+
+		Object column = struct.getField(fieldID);
+		return column;
+	}
+
+	@Override
+	public List<Object> getStructFieldsDataAsList(Object data) {
+		if (data == null) {
+			return null;
+		}
+		LazyColumnar struct = (LazyColumnar) data;
+		return struct.getFieldsAsList();
+	}
+
+	public String toString() {
+		String str = "";
+		for (MyField f : fields) {
+			str += f.getFieldName() + ":"
+					+ f.getFieldObjectInspector().getTypeName() + "  ";
+		}
+		return str;
+	}
+}
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/LazyListObjectInspector.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/LazyListObjectInspector.java
new file mode 100644
index 0000000..dc4e85b
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/LazyListObjectInspector.java
@@ -0,0 +1,62 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hivesterix.serde.lazy.objectinspector;
+
+import java.util.List;
+
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.StandardListObjectInspector;
+
+import edu.uci.ics.hivesterix.serde.lazy.LazyArray;
+
+/**
+ * ObjectInspector for LazyList.
+ */
+public class LazyListObjectInspector extends StandardListObjectInspector {
+
+	protected LazyListObjectInspector(ObjectInspector listElementObjectInspector) {
+		super(listElementObjectInspector);
+	}
+
+	@Override
+	public List<?> getList(Object data) {
+		if (data == null) {
+			return null;
+		}
+		LazyArray array = (LazyArray) data;
+		return array.getList();
+	}
+
+	@Override
+	public Object getListElement(Object data, int index) {
+		if (data == null) {
+			return null;
+		}
+		LazyArray array = (LazyArray) data;
+		return array.getListElementObject(index);
+	}
+
+	@Override
+	public int getListLength(Object data) {
+		if (data == null) {
+			return -1;
+		}
+		LazyArray array = (LazyArray) data;
+		return array.getListLength();
+	}
+}
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/LazyMapObjectInspector.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/LazyMapObjectInspector.java
new file mode 100644
index 0000000..a3be142
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/LazyMapObjectInspector.java
@@ -0,0 +1,62 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hivesterix.serde.lazy.objectinspector;
+
+import java.util.Map;
+
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.StandardMapObjectInspector;
+
+import edu.uci.ics.hivesterix.serde.lazy.LazyMap;
+
+/**
+ * ObjectInspector for LazyMap.
+ * 
+ * @see LazyMap
+ */
+public class LazyMapObjectInspector extends StandardMapObjectInspector {
+
+	protected LazyMapObjectInspector(ObjectInspector mapKeyObjectInspector,
+			ObjectInspector mapValueObjectInspector) {
+		super(mapKeyObjectInspector, mapValueObjectInspector);
+	}
+
+	@Override
+	public Map<?, ?> getMap(Object data) {
+		if (data == null) {
+			return null;
+		}
+		return ((LazyMap) data).getMap();
+	}
+
+	@Override
+	public int getMapSize(Object data) {
+		if (data == null) {
+			return -1;
+		}
+		return ((LazyMap) data).getMapSize();
+	}
+
+	@Override
+	public Object getMapValueElement(Object data, Object key) {
+		if (data == null) {
+			return -1;
+		}
+		return ((LazyMap) data).getMapValueElement(key);
+	}
+}
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/LazyObjectInspectorFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/LazyObjectInspectorFactory.java
new file mode 100644
index 0000000..439b130
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/LazyObjectInspectorFactory.java
@@ -0,0 +1,93 @@
+package edu.uci.ics.hivesterix.serde.lazy.objectinspector;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.concurrent.ConcurrentHashMap;
+
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+
+/**
+ * ObjectInspectorFactory is the primary way to create new ObjectInspector
+ * instances.
+ * 
+ * SerDe classes should call the static functions in this library to create an
+ * ObjectInspector to return to the caller of SerDe2.getObjectInspector().
+ * 
+ * The reason of having caches here is that ObjectInspectors do not have an
+ * internal state - so ObjectInspectors with the same construction parameters
+ * should result in exactly the same ObjectInspector.
+ */
+
+public final class LazyObjectInspectorFactory {
+
+	static ConcurrentHashMap<ArrayList<Object>, LazyColumnarObjectInspector> cachedLazyColumnarObjectInspector = new ConcurrentHashMap<ArrayList<Object>, LazyColumnarObjectInspector>();
+
+	static ConcurrentHashMap<ArrayList<Object>, LazyStructObjectInspector> cachedLazyStructObjectInspector = new ConcurrentHashMap<ArrayList<Object>, LazyStructObjectInspector>();
+
+	static ConcurrentHashMap<ArrayList<Object>, LazyListObjectInspector> cachedLazyListObjectInspector = new ConcurrentHashMap<ArrayList<Object>, LazyListObjectInspector>();
+
+	static ConcurrentHashMap<ArrayList<Object>, LazyMapObjectInspector> cachedLazyMapObjectInspector = new ConcurrentHashMap<ArrayList<Object>, LazyMapObjectInspector>();
+
+	public static LazyColumnarObjectInspector getLazyColumnarObjectInspector(
+			List<String> structFieldNames,
+			List<ObjectInspector> structFieldObjectInspectors) {
+		ArrayList<Object> signature = new ArrayList<Object>();
+		signature.add(structFieldNames);
+		signature.add(structFieldObjectInspectors);
+		LazyColumnarObjectInspector result = cachedLazyColumnarObjectInspector
+				.get(signature);
+		if (result == null) {
+			result = new LazyColumnarObjectInspector(structFieldNames,
+					structFieldObjectInspectors);
+			cachedLazyColumnarObjectInspector.put(signature, result);
+		}
+		return result;
+	}
+
+	public static LazyStructObjectInspector getLazyStructObjectInspector(
+			List<String> structFieldNames,
+			List<ObjectInspector> structFieldObjectInspectors) {
+		ArrayList<Object> signature = new ArrayList<Object>();
+		signature.add(structFieldNames);
+		signature.add(structFieldObjectInspectors);
+		LazyStructObjectInspector result = cachedLazyStructObjectInspector
+				.get(signature);
+		if (result == null) {
+			result = new LazyStructObjectInspector(structFieldNames,
+					structFieldObjectInspectors);
+			cachedLazyStructObjectInspector.put(signature, result);
+		}
+		return result;
+	}
+
+	public static LazyListObjectInspector getLazyListObjectInspector(
+			ObjectInspector listElementInspector) {
+		ArrayList<Object> signature = new ArrayList<Object>();
+		signature.add(listElementInspector);
+		LazyListObjectInspector result = cachedLazyListObjectInspector
+				.get(signature);
+		if (result == null) {
+			result = new LazyListObjectInspector(listElementInspector);
+			cachedLazyListObjectInspector.put(signature, result);
+		}
+		return result;
+	}
+
+	public static LazyMapObjectInspector getLazyMapObjectInspector(
+			ObjectInspector keyInspector, ObjectInspector valueInspector) {
+		ArrayList<Object> signature = new ArrayList<Object>();
+		signature.add(keyInspector);
+		signature.add(valueInspector);
+		LazyMapObjectInspector result = cachedLazyMapObjectInspector
+				.get(signature);
+		if (result == null) {
+			result = new LazyMapObjectInspector(keyInspector, valueInspector);
+			cachedLazyMapObjectInspector.put(signature, result);
+		}
+		return result;
+	}
+
+	private LazyObjectInspectorFactory() {
+		// prevent instantiation
+	}
+}
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/LazyStructObjectInspector.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/LazyStructObjectInspector.java
new file mode 100644
index 0000000..1a50233
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/LazyStructObjectInspector.java
@@ -0,0 +1,66 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hivesterix.serde.lazy.objectinspector;
+
+import java.util.List;
+
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.StructField;
+
+import edu.uci.ics.hivesterix.serde.lazy.LazyStruct;
+
+/**
+ * ObjectInspector for LazyStruct.
+ * 
+ * @see LazyStruct
+ */
+public class LazyStructObjectInspector extends StandardStructObjectInspector {
+
+	protected LazyStructObjectInspector(List<String> structFieldNames,
+			List<ObjectInspector> structFieldObjectInspectors) {
+		super(structFieldNames, structFieldObjectInspectors);
+	}
+
+	protected LazyStructObjectInspector(List<StructField> fields) {
+		super(fields);
+	}
+
+	@Override
+	public Object getStructFieldData(Object data, StructField fieldRef) {
+		if (data == null) {
+			return null;
+		}
+		LazyStruct struct = (LazyStruct) data;
+		MyField f = (MyField) fieldRef;
+
+		int fieldID = f.getFieldID();
+		assert (fieldID >= 0 && fieldID < fields.size());
+
+		return struct.getField(fieldID);
+	}
+
+	@Override
+	public List<Object> getStructFieldsDataAsList(Object data) {
+		if (data == null) {
+			return null;
+		}
+		LazyStruct struct = (LazyStruct) data;
+		return struct.getFieldsAsList();
+	}
+}
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/AbstractPrimitiveLazyObjectInspector.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/AbstractPrimitiveLazyObjectInspector.java
new file mode 100644
index 0000000..7ef8bdd
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/AbstractPrimitiveLazyObjectInspector.java
@@ -0,0 +1,49 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hivesterix.serde.lazy.objectinspector.primitive;
+
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.AbstractPrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveTypeEntry;
+import org.apache.hadoop.io.Writable;
+
+import edu.uci.ics.hivesterix.serde.lazy.LazyPrimitive;
+
+/**
+ * An AbstractPrimitiveLazyObjectInspector for a LazyPrimitive object.
+ */
+public abstract class AbstractPrimitiveLazyObjectInspector<T extends Writable>
+		extends AbstractPrimitiveObjectInspector {
+
+	protected AbstractPrimitiveLazyObjectInspector(PrimitiveTypeEntry typeEntry) {
+		super(typeEntry);
+	}
+
+	@SuppressWarnings("unchecked")
+	@Override
+	public T getPrimitiveWritableObject(Object o) {
+		if (o == null)
+			System.out.println("sth. wrong");
+		return o == null ? null : ((LazyPrimitive<?, T>) o).getWritableObject();
+	}
+
+	@Override
+	public boolean preferWritable() {
+		return true;
+	}
+
+}
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyBooleanObjectInspector.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyBooleanObjectInspector.java
new file mode 100644
index 0000000..472dce0
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyBooleanObjectInspector.java
@@ -0,0 +1,51 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hivesterix.serde.lazy.objectinspector.primitive;
+
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.BooleanObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
+import org.apache.hadoop.io.BooleanWritable;
+
+import edu.uci.ics.hivesterix.serde.lazy.LazyBoolean;
+
+/**
+ * A WritableBooleanObjectInspector inspects a BooleanWritable Object.
+ */
+public class LazyBooleanObjectInspector extends
+		AbstractPrimitiveLazyObjectInspector<BooleanWritable> implements
+		BooleanObjectInspector {
+
+	LazyBooleanObjectInspector() {
+		super(PrimitiveObjectInspectorUtils.booleanTypeEntry);
+	}
+
+	@Override
+	public boolean get(Object o) {
+		return getPrimitiveWritableObject(o).get();
+	}
+
+	@Override
+	public Object copyObject(Object o) {
+		return o == null ? null : new LazyBoolean((LazyBoolean) o);
+	}
+
+	@Override
+	public Object getPrimitiveJavaObject(Object o) {
+		return o == null ? null : Boolean.valueOf(get(o));
+	}
+}
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyByteObjectInspector.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyByteObjectInspector.java
new file mode 100644
index 0000000..e631fc7
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyByteObjectInspector.java
@@ -0,0 +1,51 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hivesterix.serde.lazy.objectinspector.primitive;
+
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.ByteObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
+import org.apache.hadoop.io.ByteWritable;
+
+import edu.uci.ics.hivesterix.serde.lazy.LazyByte;
+
+/**
+ * A WritableByteObjectInspector inspects a ByteWritable Object.
+ */
+public class LazyByteObjectInspector extends
+		AbstractPrimitiveLazyObjectInspector<ByteWritable> implements
+		ByteObjectInspector {
+
+	LazyByteObjectInspector() {
+		super(PrimitiveObjectInspectorUtils.byteTypeEntry);
+	}
+
+	@Override
+	public byte get(Object o) {
+		return getPrimitiveWritableObject(o).get();
+	}
+
+	@Override
+	public Object copyObject(Object o) {
+		return o == null ? null : new LazyByte((LazyByte) o);
+	}
+
+	@Override
+	public Object getPrimitiveJavaObject(Object o) {
+		return o == null ? null : Byte.valueOf(get(o));
+	}
+}
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyDoubleObjectInspector.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyDoubleObjectInspector.java
new file mode 100644
index 0000000..1257f11
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyDoubleObjectInspector.java
@@ -0,0 +1,51 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hivesterix.serde.lazy.objectinspector.primitive;
+
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
+import org.apache.hadoop.io.DoubleWritable;
+
+import edu.uci.ics.hivesterix.serde.lazy.LazyDouble;
+
+/**
+ * A WritableDoubleObjectInspector inspects a DoubleWritable Object.
+ */
+public class LazyDoubleObjectInspector extends
+		AbstractPrimitiveLazyObjectInspector<DoubleWritable> implements
+		DoubleObjectInspector {
+
+	LazyDoubleObjectInspector() {
+		super(PrimitiveObjectInspectorUtils.doubleTypeEntry);
+	}
+
+	@Override
+	public double get(Object o) {
+		return getPrimitiveWritableObject(o).get();
+	}
+
+	@Override
+	public Object copyObject(Object o) {
+		return o == null ? null : new LazyDouble((LazyDouble) o);
+	}
+
+	@Override
+	public Object getPrimitiveJavaObject(Object o) {
+		return o == null ? null : Double.valueOf(get(o));
+	}
+}
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyFloatObjectInspector.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyFloatObjectInspector.java
new file mode 100644
index 0000000..c66a06f
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyFloatObjectInspector.java
@@ -0,0 +1,51 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hivesterix.serde.lazy.objectinspector.primitive;
+
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.FloatObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
+import org.apache.hadoop.io.FloatWritable;
+
+import edu.uci.ics.hivesterix.serde.lazy.LazyFloat;
+
+/**
+ * A FloatObjectInspector inspects a FloatWritable Object.
+ */
+public class LazyFloatObjectInspector extends
+		AbstractPrimitiveLazyObjectInspector<FloatWritable> implements
+		FloatObjectInspector {
+
+	LazyFloatObjectInspector() {
+		super(PrimitiveObjectInspectorUtils.floatTypeEntry);
+	}
+
+	@Override
+	public float get(Object o) {
+		return getPrimitiveWritableObject(o).get();
+	}
+
+	@Override
+	public Object copyObject(Object o) {
+		return o == null ? null : new LazyFloat((LazyFloat) o);
+	}
+
+	@Override
+	public Object getPrimitiveJavaObject(Object o) {
+		return o == null ? null : Float.valueOf(get(o));
+	}
+}
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyIntObjectInspector.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyIntObjectInspector.java
new file mode 100644
index 0000000..b2159e0
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyIntObjectInspector.java
@@ -0,0 +1,51 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hivesterix.serde.lazy.objectinspector.primitive;
+
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
+import org.apache.hadoop.io.IntWritable;
+
+import edu.uci.ics.hivesterix.serde.lazy.LazyInteger;
+
+/**
+ * A WritableIntObjectInspector inspects a IntWritable Object.
+ */
+public class LazyIntObjectInspector extends
+		AbstractPrimitiveLazyObjectInspector<IntWritable> implements
+		IntObjectInspector {
+
+	LazyIntObjectInspector() {
+		super(PrimitiveObjectInspectorUtils.intTypeEntry);
+	}
+
+	@Override
+	public int get(Object o) {
+		return getPrimitiveWritableObject(o).get();
+	}
+
+	@Override
+	public Object copyObject(Object o) {
+		return o == null ? null : new LazyInteger((LazyInteger) o);
+	}
+
+	@Override
+	public Object getPrimitiveJavaObject(Object o) {
+		return o == null ? null : Integer.valueOf(get(o));
+	}
+}
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyLongObjectInspector.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyLongObjectInspector.java
new file mode 100644
index 0000000..1fc2d53
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyLongObjectInspector.java
@@ -0,0 +1,51 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hivesterix.serde.lazy.objectinspector.primitive;
+
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
+import org.apache.hadoop.io.LongWritable;
+
+import edu.uci.ics.hivesterix.serde.lazy.LazyLong;
+
+/**
+ * A WritableLongObjectInspector inspects a LongWritable Object.
+ */
+public class LazyLongObjectInspector extends
+		AbstractPrimitiveLazyObjectInspector<LongWritable> implements
+		LongObjectInspector {
+
+	LazyLongObjectInspector() {
+		super(PrimitiveObjectInspectorUtils.longTypeEntry);
+	}
+
+	@Override
+	public long get(Object o) {
+		return getPrimitiveWritableObject(o).get();
+	}
+
+	@Override
+	public Object copyObject(Object o) {
+		return o == null ? null : new LazyLong((LazyLong) o);
+	}
+
+	@Override
+	public Object getPrimitiveJavaObject(Object o) {
+		return o == null ? null : Long.valueOf(get(o));
+	}
+}
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyPrimitiveObjectInspectorFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyPrimitiveObjectInspectorFactory.java
new file mode 100644
index 0000000..134dc5a
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyPrimitiveObjectInspectorFactory.java
@@ -0,0 +1,99 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hivesterix.serde.lazy.objectinspector.primitive;
+
+import java.util.ArrayList;
+import java.util.concurrent.ConcurrentHashMap;
+
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
+
+/**
+ * LazyPrimitiveObjectInspectorFactory is the primary way to create new
+ * ObjectInspector instances.
+ * 
+ * SerDe classes should call the static functions in this library to create an
+ * ObjectInspector to return to the caller of SerDe2.getObjectInspector().
+ * 
+ * The reason of having caches here is that ObjectInspector is because
+ * ObjectInspectors do not have an internal state - so ObjectInspectors with the
+ * same construction parameters should result in exactly the same
+ * ObjectInspector.
+ */
+public final class LazyPrimitiveObjectInspectorFactory {
+
+	public static final LazyBooleanObjectInspector LAZY_BOOLEAN_OBJECT_INSPECTOR = new LazyBooleanObjectInspector();
+	public static final LazyByteObjectInspector LAZY_BYTE_OBJECT_INSPECTOR = new LazyByteObjectInspector();
+	public static final LazyShortObjectInspector LAZY_SHORT_OBJECT_INSPECTOR = new LazyShortObjectInspector();
+	public static final LazyIntObjectInspector LAZY_INT_OBJECT_INSPECTOR = new LazyIntObjectInspector();
+	public static final LazyLongObjectInspector LAZY_LONG_OBJECT_INSPECTOR = new LazyLongObjectInspector();
+	public static final LazyFloatObjectInspector LAZY_FLOAT_OBJECT_INSPECTOR = new LazyFloatObjectInspector();
+	public static final LazyDoubleObjectInspector LAZY_DOUBLE_OBJECT_INSPECTOR = new LazyDoubleObjectInspector();
+	public static final LazyVoidObjectInspector LAZY_VOID_OBJECT_INSPECTOR = new LazyVoidObjectInspector();
+
+	static ConcurrentHashMap<ArrayList<Object>, LazyStringObjectInspector> cachedLazyStringObjectInspector = new ConcurrentHashMap<ArrayList<Object>, LazyStringObjectInspector>();
+
+	public static LazyStringObjectInspector getLazyStringObjectInspector(
+			boolean escaped, byte escapeChar) {
+		ArrayList<Object> signature = new ArrayList<Object>();
+		signature.add(Boolean.valueOf(escaped));
+		signature.add(Byte.valueOf(escapeChar));
+		LazyStringObjectInspector result = cachedLazyStringObjectInspector
+				.get(signature);
+		if (result == null) {
+			result = new LazyStringObjectInspector(escaped, escapeChar);
+			cachedLazyStringObjectInspector.put(signature, result);
+		}
+		return result;
+	}
+
+	public static AbstractPrimitiveLazyObjectInspector<?> getLazyObjectInspector(
+			PrimitiveCategory primitiveCategory, boolean escaped,
+			byte escapeChar) {
+
+		switch (primitiveCategory) {
+		case BOOLEAN:
+			return LAZY_BOOLEAN_OBJECT_INSPECTOR;
+		case BYTE:
+			return LAZY_BYTE_OBJECT_INSPECTOR;
+		case SHORT:
+			return LAZY_SHORT_OBJECT_INSPECTOR;
+		case INT:
+			return LAZY_INT_OBJECT_INSPECTOR;
+		case LONG:
+			return LAZY_LONG_OBJECT_INSPECTOR;
+		case FLOAT:
+			return LAZY_FLOAT_OBJECT_INSPECTOR;
+		case DOUBLE:
+			return LAZY_DOUBLE_OBJECT_INSPECTOR;
+		case STRING:
+			return getLazyStringObjectInspector(escaped, escapeChar);
+		case VOID:
+			return LAZY_VOID_OBJECT_INSPECTOR;
+		default:
+			throw new RuntimeException(
+					"Internal error: Cannot find ObjectInspector " + " for "
+							+ primitiveCategory);
+		}
+	}
+
+	private LazyPrimitiveObjectInspectorFactory() {
+		// prevent instantiation
+	}
+
+}
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyShortObjectInspector.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyShortObjectInspector.java
new file mode 100644
index 0000000..cb06dfd
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyShortObjectInspector.java
@@ -0,0 +1,51 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hivesterix.serde.lazy.objectinspector.primitive;
+
+import org.apache.hadoop.hive.serde2.io.ShortWritable;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector;
+
+import edu.uci.ics.hivesterix.serde.lazy.LazyShort;
+
+/**
+ * A WritableShortObjectInspector inspects a ShortWritable Object.
+ */
+public class LazyShortObjectInspector extends
+		AbstractPrimitiveLazyObjectInspector<ShortWritable> implements
+		ShortObjectInspector {
+
+	LazyShortObjectInspector() {
+		super(PrimitiveObjectInspectorUtils.shortTypeEntry);
+	}
+
+	@Override
+	public short get(Object o) {
+		return getPrimitiveWritableObject(o).get();
+	}
+
+	@Override
+	public Object copyObject(Object o) {
+		return o == null ? null : new LazyShort((LazyShort) o);
+	}
+
+	@Override
+	public Object getPrimitiveJavaObject(Object o) {
+		return o == null ? null : Short.valueOf(get(o));
+	}
+}
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyStringObjectInspector.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyStringObjectInspector.java
new file mode 100644
index 0000000..5832f34
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyStringObjectInspector.java
@@ -0,0 +1,66 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hivesterix.serde.lazy.objectinspector.primitive;
+
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector;
+import org.apache.hadoop.io.Text;
+
+import edu.uci.ics.hivesterix.serde.lazy.LazyString;
+
+/**
+ * A WritableStringObjectInspector inspects a Text Object.
+ */
+public class LazyStringObjectInspector extends
+		AbstractPrimitiveLazyObjectInspector<Text> implements
+		StringObjectInspector {
+
+	boolean escaped;
+	byte escapeChar;
+
+	LazyStringObjectInspector(boolean escaped, byte escapeChar) {
+		super(PrimitiveObjectInspectorUtils.stringTypeEntry);
+		this.escaped = escaped;
+		this.escapeChar = escapeChar;
+	}
+
+	@Override
+	public Object copyObject(Object o) {
+		return o == null ? null : new LazyString((LazyString) o);
+	}
+
+	@Override
+	public Text getPrimitiveWritableObject(Object o) {
+		return o == null ? null : ((LazyString) o).getWritableObject();
+	}
+
+	@Override
+	public String getPrimitiveJavaObject(Object o) {
+		return o == null ? null : ((LazyString) o).getWritableObject()
+				.toString();
+	}
+
+	public boolean isEscaped() {
+		return escaped;
+	}
+
+	public byte getEscapeChar() {
+		return escapeChar;
+	}
+
+}
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyVoidObjectInspector.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyVoidObjectInspector.java
new file mode 100644
index 0000000..a30f1af
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyVoidObjectInspector.java
@@ -0,0 +1,44 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hivesterix.serde.lazy.objectinspector.primitive;
+
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.VoidObjectInspector;
+import org.apache.hadoop.io.NullWritable;
+
+/**
+ * A WritableVoidObjectInspector inspects a NullWritable Object.
+ */
+public class LazyVoidObjectInspector extends
+		AbstractPrimitiveLazyObjectInspector<NullWritable> implements
+		VoidObjectInspector {
+
+	LazyVoidObjectInspector() {
+		super(PrimitiveObjectInspectorUtils.voidTypeEntry);
+	}
+
+	@Override
+	public Object copyObject(Object o) {
+		return o;
+	}
+
+	@Override
+	public Object getPrimitiveJavaObject(Object o) {
+		throw new RuntimeException("Internal error: cannot create Void object.");
+	}
+}
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/PrimitiveObjectInspectorFactory.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/PrimitiveObjectInspectorFactory.java
new file mode 100644
index 0000000..e70bdb9
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/PrimitiveObjectInspectorFactory.java
@@ -0,0 +1,89 @@
+/**

+ * Licensed to the Apache Software Foundation (ASF) under one

+ * or more contributor license agreements.  See the NOTICE file

+ * distributed with this work for additional information

+ * regarding copyright ownership.  The ASF licenses this file

+ * to you under the Apache License, Version 2.0 (the

+ * "License"); you may not use this file except in compliance

+ * with the License.  You may obtain a copy of the License at

+ *

+ *     http://www.apache.org/licenses/LICENSE-2.0

+ *

+ * Unless required by applicable law or agreed to in writing, software

+ * distributed under the License is distributed on an "AS IS" BASIS,

+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+ * See the License for the specific language governing permissions and

+ * limitations under the License.

+ */

+package edu.uci.ics.hivesterix.serde.lazy.objectinspector.primitive;

+

+import java.util.HashMap;

+

+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;

+

+/**

+ * PrimitiveObjectInspectorFactory is the primary way to create new

+ * PrimitiveObjectInspector instances.

+ * 

+ * The reason of having caches here is that ObjectInspector is because

+ * ObjectInspectors do not have an internal state - so ObjectInspectors with the

+ * same construction parameters should result in exactly the same

+ * ObjectInspector.

+ */

+public final class PrimitiveObjectInspectorFactory {

+

+	public static final LazyBooleanObjectInspector LazyBooleanObjectInspector = new LazyBooleanObjectInspector();

+	public static final LazyByteObjectInspector LazyByteObjectInspector = new LazyByteObjectInspector();

+	public static final LazyShortObjectInspector LazyShortObjectInspector = new LazyShortObjectInspector();

+	public static final LazyIntObjectInspector LazyIntObjectInspector = new LazyIntObjectInspector();

+	public static final LazyLongObjectInspector LazyLongObjectInspector = new LazyLongObjectInspector();

+	public static final LazyFloatObjectInspector LazyFloatObjectInspector = new LazyFloatObjectInspector();

+	public static final LazyDoubleObjectInspector LazyDoubleObjectInspector = new LazyDoubleObjectInspector();

+	public static final LazyStringObjectInspector LazyStringObjectInspector = new LazyStringObjectInspector(

+			false, (byte) '\\');

+	public static final LazyVoidObjectInspector LazyVoidObjectInspector = new LazyVoidObjectInspector();

+

+	private static HashMap<PrimitiveCategory, AbstractPrimitiveLazyObjectInspector<?>> cachedPrimitiveLazyInspectorCache = new HashMap<PrimitiveCategory, AbstractPrimitiveLazyObjectInspector<?>>();

+

+	static {

+		cachedPrimitiveLazyInspectorCache.put(PrimitiveCategory.BOOLEAN,

+				LazyBooleanObjectInspector);

+		cachedPrimitiveLazyInspectorCache.put(PrimitiveCategory.BYTE,

+				LazyByteObjectInspector);

+		cachedPrimitiveLazyInspectorCache.put(PrimitiveCategory.SHORT,

+				LazyShortObjectInspector);

+		cachedPrimitiveLazyInspectorCache.put(PrimitiveCategory.INT,

+				LazyIntObjectInspector);

+		cachedPrimitiveLazyInspectorCache.put(PrimitiveCategory.LONG,

+				LazyLongObjectInspector);

+		cachedPrimitiveLazyInspectorCache.put(PrimitiveCategory.FLOAT,

+				LazyFloatObjectInspector);

+		cachedPrimitiveLazyInspectorCache.put(PrimitiveCategory.DOUBLE,

+				LazyDoubleObjectInspector);

+		cachedPrimitiveLazyInspectorCache.put(PrimitiveCategory.STRING,

+				LazyStringObjectInspector);

+		cachedPrimitiveLazyInspectorCache.put(PrimitiveCategory.VOID,

+				LazyVoidObjectInspector);

+	}

+

+	/**

+	 * Returns the PrimitiveWritableObjectInspector for the PrimitiveCategory.

+	 * 

+	 * @param primitiveCategory

+	 */

+	public static AbstractPrimitiveLazyObjectInspector<?> getPrimitiveLazyObjectInspector(

+			PrimitiveCategory primitiveCategory) {

+		AbstractPrimitiveLazyObjectInspector<?> result = cachedPrimitiveLazyInspectorCache

+				.get(primitiveCategory);

+		if (result == null) {

+			throw new RuntimeException(

+					"Internal error: Cannot find ObjectInspector " + " for "

+							+ primitiveCategory);

+		}

+		return result;

+	}

+

+	private PrimitiveObjectInspectorFactory() {

+		// prevent instantiation

+	}

+}

diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/parser/IHiveParser.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/parser/IHiveParser.java
new file mode 100644
index 0000000..aeea68f
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/parser/IHiveParser.java
@@ -0,0 +1,17 @@
+package edu.uci.ics.hivesterix.serde.parser;
+
+import java.io.IOException;
+
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+
+public interface IHiveParser {
+	/**
+	 * parse one hive rwo into
+	 * 
+	 * @param row
+	 * @param objectInspector
+	 * @param tb
+	 */
+	public void parse(byte[] data, int start, int length, ArrayTupleBuilder tb)
+			throws IOException;
+}
diff --git a/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/parser/TextToBinaryTupleParser.java b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/parser/TextToBinaryTupleParser.java
new file mode 100644
index 0000000..3aeb058
--- /dev/null
+++ b/hivesterix/src/main/java/edu/uci/ics/hivesterix/serde/parser/TextToBinaryTupleParser.java
@@ -0,0 +1,184 @@
+package edu.uci.ics.hivesterix.serde.parser;
+
+import java.io.DataOutput;
+import java.io.IOException;
+import java.util.List;
+
+import org.apache.hadoop.hive.serde2.lazy.LazyInteger;
+import org.apache.hadoop.hive.serde2.lazy.LazyLong;
+import org.apache.hadoop.hive.serde2.lazy.LazyShort;
+import org.apache.hadoop.hive.serde2.lazy.objectinspector.LazySimpleStructObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.StructField;
+import org.apache.hadoop.io.Text;
+
+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.NotImplementedException;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+
+public class TextToBinaryTupleParser implements IHiveParser {
+	private int[] invertedIndex;
+	private int[] fieldEnds;
+	private int lastNecessaryFieldIndex;
+	private LazySimpleStructObjectInspector inputObjectInspector;
+	private List<? extends StructField> fieldRefs;
+
+	public TextToBinaryTupleParser(int[] outputColumnsOffset,
+			ObjectInspector structInspector) {
+		int size = 0;
+		for (int i = 0; i < outputColumnsOffset.length; i++)
+			if (outputColumnsOffset[i] >= 0)
+				size++;
+		invertedIndex = new int[size];
+		for (int i = 0; i < outputColumnsOffset.length; i++)
+			if (outputColumnsOffset[i] >= 0) {
+				invertedIndex[outputColumnsOffset[i]] = i;
+				lastNecessaryFieldIndex = i;
+			}
+		fieldEnds = new int[outputColumnsOffset.length];
+		for (int i = 0; i < fieldEnds.length; i++)
+			fieldEnds[i] = 0;
+		inputObjectInspector = (LazySimpleStructObjectInspector) structInspector;
+		fieldRefs = inputObjectInspector.getAllStructFieldRefs();
+	}
+
+	@Override
+	public void parse(byte[] bytes, int start, int length, ArrayTupleBuilder tb)
+			throws IOException {
+		byte separator = inputObjectInspector.getSeparator();
+		boolean lastColumnTakesRest = inputObjectInspector
+				.getLastColumnTakesRest();
+		boolean isEscaped = inputObjectInspector.isEscaped();
+		byte escapeChar = inputObjectInspector.getEscapeChar();
+		DataOutput output = tb.getDataOutput();
+
+		int structByteEnd = start + length - 1;
+		int fieldId = 0;
+		int fieldByteEnd = start;
+
+		// Go through all bytes in the byte[]
+		while (fieldByteEnd <= structByteEnd
+				&& fieldId <= lastNecessaryFieldIndex) {
+			if (fieldByteEnd == structByteEnd
+					|| bytes[fieldByteEnd] == separator) {
+				// Reached the end of a field?
+				if (lastColumnTakesRest && fieldId == fieldEnds.length - 1) {
+					fieldByteEnd = structByteEnd;
+				}
+				fieldEnds[fieldId] = fieldByteEnd;
+				if (fieldId == fieldEnds.length - 1
+						|| fieldByteEnd == structByteEnd) {
+					// for the case of null fields
+					for (int i = fieldId; i < fieldEnds.length; i++) {
+						fieldEnds[i] = fieldByteEnd;
+					}
+					break;
+				}
+				fieldByteEnd++;
+				fieldId++;
+			} else {
+				if (isEscaped && bytes[fieldByteEnd] == escapeChar
+						&& fieldByteEnd + 1 < structByteEnd) {
+					// ignore the char after escape_char
+					fieldByteEnd += 2;
+				} else {
+					fieldByteEnd++;
+				}
+			}
+		}
+
+		for (int i = 0; i < invertedIndex.length; i++) {
+			int index = invertedIndex[i];
+			StructField fieldRef = fieldRefs.get(index);
+			ObjectInspector inspector = fieldRef.getFieldObjectInspector();
+			Category category = inspector.getCategory();
+			int fieldStart = index == 0 ? 0 : fieldEnds[index - 1] + 1;
+			int fieldEnd = fieldEnds[index];
+			if (bytes[fieldEnd] == separator)
+				fieldEnd--;
+			int fieldLen = fieldEnd - fieldStart + 1;
+			switch (category) {
+			case PRIMITIVE:
+				PrimitiveObjectInspector poi = (PrimitiveObjectInspector) inspector;
+				switch (poi.getPrimitiveCategory()) {
+				case VOID: {
+					break;
+				}
+				case BOOLEAN: {
+					output.write(bytes[fieldStart]);
+					break;
+				}
+				case BYTE: {
+					output.write(bytes[fieldStart]);
+					break;
+				}
+				case SHORT: {
+					short v = LazyShort.parseShort(bytes, fieldStart, fieldLen);
+					output.write((byte) (v >> 8));
+					output.write((byte) (v));
+					break;
+				}
+				case INT: {
+					int v = LazyInteger.parseInt(bytes, fieldStart, fieldLen);
+					LazyUtils.writeVInt(output, v);
+					break;
+				}
+				case LONG: {
+					long v = LazyLong.parseLong(bytes, fieldStart, fieldLen);
+					LazyUtils.writeVLong(output, v);
+					break;
+				}
+				case FLOAT: {
+					float value = Float.parseFloat(Text.decode(bytes,
+							fieldStart, fieldLen));
+					int v = Float.floatToIntBits(value);
+					output.write((byte) (v >> 24));
+					output.write((byte) (v >> 16));
+					output.write((byte) (v >> 8));
+					output.write((byte) (v));
+					break;
+				}
+				case DOUBLE: {
+					try {
+						double value = Double.parseDouble(Text.decode(bytes,
+								fieldStart, fieldLen));
+						long v = Double.doubleToLongBits(value);
+						output.write((byte) (v >> 56));
+						output.write((byte) (v >> 48));
+						output.write((byte) (v >> 40));
+						output.write((byte) (v >> 32));
+						output.write((byte) (v >> 24));
+						output.write((byte) (v >> 16));
+						output.write((byte) (v >> 8));
+						output.write((byte) (v));
+					} catch (NumberFormatException e) {
+						throw e;
+					}
+					break;
+				}
+				case STRING: {
+					LazyUtils.writeVInt(output, fieldLen);
+					output.write(bytes, fieldStart, fieldLen);
+					break;
+				}
+				default: {
+					throw new RuntimeException("Unrecognized type: "
+							+ poi.getPrimitiveCategory());
+				}
+				}
+				break;
+			case STRUCT:
+				throw new NotImplementedException("Unrecognized type: struct ");
+			case LIST:
+				throw new NotImplementedException("Unrecognized type: struct ");
+			case MAP:
+				throw new NotImplementedException("Unrecognized type: struct ");
+			case UNION:
+				throw new NotImplementedException("Unrecognized type: struct ");
+			}
+			tb.addFieldEndOffset();
+		}
+	}
+}
diff --git a/hivesterix/src/main/java/org/apache/hadoop/hive/ql/Driver.java b/hivesterix/src/main/java/org/apache/hadoop/hive/ql/Driver.java
new file mode 100644
index 0000000..57e2cc0
--- /dev/null
+++ b/hivesterix/src/main/java/org/apache/hadoop/hive/ql/Driver.java
@@ -0,0 +1,1441 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql;
+
+import java.io.DataInput;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.PrintWriter;
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.Queue;
+import java.util.Set;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.common.JavaUtils;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.MetaStoreUtils;
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.metastore.api.Schema;
+import org.apache.hadoop.hive.ql.exec.ConditionalTask;
+import org.apache.hadoop.hive.ql.exec.ExecDriver;
+import org.apache.hadoop.hive.ql.exec.FetchTask;
+import org.apache.hadoop.hive.ql.exec.MapRedTask;
+import org.apache.hadoop.hive.ql.exec.Operator;
+import org.apache.hadoop.hive.ql.exec.StatsTask;
+import org.apache.hadoop.hive.ql.exec.TableScanOperator;
+import org.apache.hadoop.hive.ql.exec.Task;
+import org.apache.hadoop.hive.ql.exec.TaskFactory;
+import org.apache.hadoop.hive.ql.exec.TaskResult;
+import org.apache.hadoop.hive.ql.exec.TaskRunner;
+import org.apache.hadoop.hive.ql.exec.Utilities;
+import org.apache.hadoop.hive.ql.history.HiveHistory.Keys;
+import org.apache.hadoop.hive.ql.hooks.ExecuteWithHookContext;
+import org.apache.hadoop.hive.ql.hooks.Hook;
+import org.apache.hadoop.hive.ql.hooks.HookContext;
+import org.apache.hadoop.hive.ql.hooks.PostExecute;
+import org.apache.hadoop.hive.ql.hooks.PreExecute;
+import org.apache.hadoop.hive.ql.hooks.ReadEntity;
+import org.apache.hadoop.hive.ql.hooks.WriteEntity;
+import org.apache.hadoop.hive.ql.lockmgr.HiveLock;
+import org.apache.hadoop.hive.ql.lockmgr.HiveLockManager;
+import org.apache.hadoop.hive.ql.lockmgr.HiveLockManagerCtx;
+import org.apache.hadoop.hive.ql.lockmgr.HiveLockMode;
+import org.apache.hadoop.hive.ql.lockmgr.HiveLockObj;
+import org.apache.hadoop.hive.ql.lockmgr.HiveLockObject;
+import org.apache.hadoop.hive.ql.lockmgr.HiveLockObject.HiveLockObjectData;
+import org.apache.hadoop.hive.ql.lockmgr.LockException;
+import org.apache.hadoop.hive.ql.metadata.AuthorizationException;
+import org.apache.hadoop.hive.ql.metadata.DummyPartition;
+import org.apache.hadoop.hive.ql.metadata.Hive;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.metadata.HiveUtils;
+import org.apache.hadoop.hive.ql.metadata.Partition;
+import org.apache.hadoop.hive.ql.metadata.Table;
+import org.apache.hadoop.hive.ql.optimizer.ppr.PartitionPruner;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.AbstractSemanticAnalyzerHook;
+import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
+import org.apache.hadoop.hive.ql.parse.ErrorMsg;
+import org.apache.hadoop.hive.ql.parse.HiveSemanticAnalyzerHookContext;
+import org.apache.hadoop.hive.ql.parse.HiveSemanticAnalyzerHookContextImpl;
+import org.apache.hadoop.hive.ql.parse.ParseContext;
+import org.apache.hadoop.hive.ql.parse.ParseDriver;
+import org.apache.hadoop.hive.ql.parse.ParseException;
+import org.apache.hadoop.hive.ql.parse.ParseUtils;
+import org.apache.hadoop.hive.ql.parse.PrunedPartitionList;
+import org.apache.hadoop.hive.ql.parse.SemanticAnalyzer;
+import org.apache.hadoop.hive.ql.parse.SemanticAnalyzerFactory;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.ql.parse.VariableSubstitution;
+import org.apache.hadoop.hive.ql.plan.ConditionalResolver;
+import org.apache.hadoop.hive.ql.plan.ConditionalResolverMergeFiles;
+import org.apache.hadoop.hive.ql.plan.HiveOperation;
+import org.apache.hadoop.hive.ql.plan.TableDesc;
+import org.apache.hadoop.hive.ql.processors.CommandProcessor;
+import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
+import org.apache.hadoop.hive.serde2.ByteStream;
+import org.apache.hadoop.hive.shims.ShimLoader;
+import org.apache.hadoop.mapred.ClusterStatus;
+import org.apache.hadoop.mapred.JobClient;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.util.ReflectionUtils;
+
+import edu.uci.ics.hivesterix.runtime.exec.HyracksExecutionEngine;
+import edu.uci.ics.hivesterix.runtime.exec.IExecutionEngine;
+
+@SuppressWarnings({ "deprecation", "unused" })
+public class Driver implements CommandProcessor {
+
+	static final private Log LOG = LogFactory.getLog(Driver.class.getName());
+	static final private LogHelper console = new LogHelper(LOG);
+
+	// hive-sterix
+	private IExecutionEngine engine;
+	private boolean hivesterix = false;
+
+	private int maxRows = 100;
+	ByteStream.Output bos = new ByteStream.Output();
+
+	private HiveConf conf;
+	private DataInput resStream;
+	private Context ctx;
+	private QueryPlan plan;
+	private Schema schema;
+	private HiveLockManager hiveLockMgr;
+
+	private String errorMessage;
+	private String SQLState;
+
+	// A limit on the number of threads that can be launched
+	private int maxthreads;
+	private final int sleeptime = 2000;
+
+	protected int tryCount = Integer.MAX_VALUE;
+
+	private int checkLockManager() {
+		boolean supportConcurrency = conf
+				.getBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY);
+		if (supportConcurrency && (hiveLockMgr == null)) {
+			try {
+				setLockManager();
+			} catch (SemanticException e) {
+				errorMessage = "FAILED: Error in semantic analysis: "
+						+ e.getMessage();
+				SQLState = ErrorMsg.findSQLState(e.getMessage());
+				console.printError(
+						errorMessage,
+						"\n"
+								+ org.apache.hadoop.util.StringUtils
+										.stringifyException(e));
+				return (12);
+			}
+		}
+		return (0);
+	}
+
+	private void setLockManager() throws SemanticException {
+		boolean supportConcurrency = conf
+				.getBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY);
+		if (supportConcurrency) {
+			String lockMgr = conf.getVar(HiveConf.ConfVars.HIVE_LOCK_MANAGER);
+			if ((lockMgr == null) || (lockMgr.isEmpty())) {
+				throw new SemanticException(
+						ErrorMsg.LOCKMGR_NOT_SPECIFIED.getMsg());
+			}
+
+			try {
+				hiveLockMgr = (HiveLockManager) ReflectionUtils.newInstance(
+						conf.getClassByName(lockMgr), conf);
+				hiveLockMgr.setContext(new HiveLockManagerCtx(conf));
+			} catch (Exception e) {
+				throw new SemanticException(
+						ErrorMsg.LOCKMGR_NOT_INITIALIZED.getMsg()
+								+ e.getMessage());
+			}
+		}
+	}
+
+	public void init() {
+		Operator.resetId();
+	}
+
+	/**
+	 * Return the status information about the Map-Reduce cluster
+	 */
+	public ClusterStatus getClusterStatus() throws Exception {
+		ClusterStatus cs;
+		try {
+			JobConf job = new JobConf(conf, ExecDriver.class);
+			JobClient jc = new JobClient(job);
+			cs = jc.getClusterStatus();
+		} catch (Exception e) {
+			e.printStackTrace();
+			throw e;
+		}
+		LOG.info("Returning cluster status: " + cs.toString());
+		return cs;
+	}
+
+	public Schema getSchema() {
+		return schema;
+	}
+
+	/**
+	 * Get a Schema with fields represented with native Hive types
+	 */
+	public static Schema getSchema(BaseSemanticAnalyzer sem, HiveConf conf) {
+		Schema schema = null;
+
+		// If we have a plan, prefer its logical result schema if it's
+		// available; otherwise, try digging out a fetch task; failing that,
+		// give up.
+		if (sem == null) {
+			// can't get any info without a plan
+		} else if (sem.getResultSchema() != null) {
+			List<FieldSchema> lst = sem.getResultSchema();
+			schema = new Schema(lst, null);
+		} else if (sem.getFetchTask() != null) {
+			FetchTask ft = sem.getFetchTask();
+			TableDesc td = ft.getTblDesc();
+			// partitioned tables don't have tableDesc set on the FetchTask.
+			// Instead
+			// they have a list of PartitionDesc objects, each with a table
+			// desc.
+			// Let's
+			// try to fetch the desc for the first partition and use it's
+			// deserializer.
+			if (td == null && ft.getWork() != null
+					&& ft.getWork().getPartDesc() != null) {
+				if (ft.getWork().getPartDesc().size() > 0) {
+					td = ft.getWork().getPartDesc().get(0).getTableDesc();
+				}
+			}
+
+			if (td == null) {
+				LOG.info("No returning schema.");
+			} else {
+				String tableName = "result";
+				List<FieldSchema> lst = null;
+				try {
+					lst = MetaStoreUtils.getFieldsFromDeserializer(tableName,
+							td.getDeserializer());
+				} catch (Exception e) {
+					LOG.warn("Error getting schema: "
+							+ org.apache.hadoop.util.StringUtils
+									.stringifyException(e));
+				}
+				if (lst != null) {
+					schema = new Schema(lst, null);
+				}
+			}
+		}
+		if (schema == null) {
+			schema = new Schema();
+		}
+		LOG.info("Returning Hive schema: " + schema);
+		return schema;
+	}
+
+	/**
+	 * Get a Schema with fields represented with Thrift DDL types
+	 */
+	public Schema getThriftSchema() throws Exception {
+		Schema schema;
+		try {
+			schema = getSchema();
+			if (schema != null) {
+				List<FieldSchema> lst = schema.getFieldSchemas();
+				// Go over the schema and convert type to thrift type
+				if (lst != null) {
+					for (FieldSchema f : lst) {
+						f.setType(MetaStoreUtils.typeToThriftType(f.getType()));
+					}
+				}
+			}
+		} catch (Exception e) {
+			e.printStackTrace();
+			throw e;
+		}
+		LOG.info("Returning Thrift schema: " + schema);
+		return schema;
+	}
+
+	/**
+	 * Return the maximum number of rows returned by getResults
+	 */
+	public int getMaxRows() {
+		return maxRows;
+	}
+
+	/**
+	 * Set the maximum number of rows returned by getResults
+	 */
+	public void setMaxRows(int maxRows) {
+		this.maxRows = maxRows;
+	}
+
+	public boolean hasReduceTasks(List<Task<? extends Serializable>> tasks) {
+		if (tasks == null) {
+			return false;
+		}
+
+		boolean hasReduce = false;
+		for (Task<? extends Serializable> task : tasks) {
+			if (task.hasReduce()) {
+				return true;
+			}
+
+			hasReduce = (hasReduce || hasReduceTasks(task.getChildTasks()));
+		}
+		return hasReduce;
+	}
+
+	/**
+	 * for backwards compatibility with current tests
+	 */
+	public Driver(HiveConf conf) {
+		this.conf = conf;
+
+		// hivesterix
+		engine = new HyracksExecutionEngine(conf);
+	}
+
+	public Driver() {
+		if (SessionState.get() != null) {
+			conf = SessionState.get().getConf();
+		}
+
+		// hivesterix
+		engine = new HyracksExecutionEngine(conf);
+	}
+
+	// hivesterix: plan printer
+	public Driver(HiveConf conf, PrintWriter planPrinter) {
+		this.conf = conf;
+		engine = new HyracksExecutionEngine(conf, planPrinter);
+	}
+
+	public void clear() {
+		this.hivesterix = false;
+	}
+
+	/**
+	 * Compile a new query. Any currently-planned query associated with this
+	 * Driver is discarded.
+	 * 
+	 * @param command
+	 *            The SQL query to compile.
+	 */
+	public int compile(String command) {
+		if (plan != null) {
+			close();
+			plan = null;
+		}
+
+		TaskFactory.resetId();
+
+		try {
+			command = new VariableSubstitution().substitute(conf, command);
+			ctx = new Context(conf);
+
+			ParseDriver pd = new ParseDriver();
+			ASTNode tree = pd.parse(command, ctx);
+			tree = ParseUtils.findRootNonNullToken(tree);
+
+			BaseSemanticAnalyzer sem = SemanticAnalyzerFactory.get(conf, tree);
+			List<AbstractSemanticAnalyzerHook> saHooks = getSemanticAnalyzerHooks();
+
+			// Do semantic analysis and plan generation
+			if (saHooks != null) {
+				HiveSemanticAnalyzerHookContext hookCtx = new HiveSemanticAnalyzerHookContextImpl();
+				hookCtx.setConf(conf);
+				for (AbstractSemanticAnalyzerHook hook : saHooks) {
+					tree = hook.preAnalyze(hookCtx, tree);
+				}
+				sem.analyze(tree, ctx);
+				for (AbstractSemanticAnalyzerHook hook : saHooks) {
+					hook.postAnalyze(hookCtx, sem.getRootTasks());
+				}
+			} else {
+				sem.analyze(tree, ctx);
+			}
+
+			LOG.info("Semantic Analysis Completed");
+
+			// validate the plan
+			sem.validate();
+
+			plan = new QueryPlan(command, sem);
+			// initialize FetchTask right here
+			if (plan.getFetchTask() != null) {
+				plan.getFetchTask().initialize(conf, plan, null);
+			}
+
+			// get the output schema
+			schema = getSchema(sem, conf);
+
+			// test Only - serialize the query plan and deserialize it
+			if (sem instanceof SemanticAnalyzer
+					&& command.toLowerCase().indexOf("create") < 0) {
+
+				Thread.currentThread().setContextClassLoader(
+						this.getClass().getClassLoader());
+
+				String queryPlanFileName = ctx.getLocalScratchDir(true)
+						+ Path.SEPARATOR_CHAR + "queryplan.xml";
+				LOG.info("query plan = " + queryPlanFileName);
+				queryPlanFileName = new Path(queryPlanFileName).toUri()
+						.getPath();
+
+				// serialize the queryPlan
+				FileOutputStream fos = new FileOutputStream(queryPlanFileName);
+				Utilities.serializeQueryPlan(plan, fos);
+				fos.close();
+
+				// deserialize the queryPlan
+				FileInputStream fis = new FileInputStream(queryPlanFileName);
+				QueryPlan newPlan = Utilities.deserializeQueryPlan(fis, conf);
+				fis.close();
+
+				// Use the deserialized plan
+				plan = newPlan;
+			}
+
+			// initialize FetchTask right here
+			if (plan.getFetchTask() != null) {
+				plan.getFetchTask().initialize(conf, plan, null);
+			}
+
+			// do the authorization check
+			if (HiveConf.getBoolVar(conf,
+					HiveConf.ConfVars.HIVE_AUTHORIZATION_ENABLED)) {
+				try {
+					// doAuthorization(sem);
+				} catch (AuthorizationException authExp) {
+					console.printError("Authorization failed:"
+							+ authExp.getMessage()
+							+ ". Use show grant to get more details.");
+					return 403;
+				}
+			}
+
+			// hyracks run
+			if (sem instanceof SemanticAnalyzer
+					&& command.toLowerCase().indexOf("create") < 0) {
+				hivesterix = true;
+				return engine.compileJob(sem.getRootTasks());
+			}
+
+			return 0;
+		} catch (SemanticException e) {
+			errorMessage = "FAILED: Error in semantic analysis: "
+					+ e.getMessage();
+			SQLState = ErrorMsg.findSQLState(e.getMessage());
+			console.printError(errorMessage, "\n"
+					+ org.apache.hadoop.util.StringUtils.stringifyException(e));
+			return (10);
+		} catch (ParseException e) {
+			errorMessage = "FAILED: Parse Error: " + e.getMessage();
+			SQLState = ErrorMsg.findSQLState(e.getMessage());
+			console.printError(errorMessage, "\n"
+					+ org.apache.hadoop.util.StringUtils.stringifyException(e));
+			return (11);
+		} catch (Exception e) {
+			errorMessage = "FAILED: Hive Internal Error: "
+					+ Utilities.getNameMessage(e);
+			SQLState = ErrorMsg.findSQLState(e.getMessage());
+			console.printError(errorMessage + "\n"
+					+ org.apache.hadoop.util.StringUtils.stringifyException(e));
+			return (12);
+		}
+	}
+
+	private void doAuthorization(BaseSemanticAnalyzer sem)
+			throws HiveException, AuthorizationException {
+		HashSet<ReadEntity> inputs = sem.getInputs();
+		HashSet<WriteEntity> outputs = sem.getOutputs();
+		SessionState ss = SessionState.get();
+		HiveOperation op = ss.getHiveOperation();
+		Hive db = sem.getDb();
+		if (op != null) {
+			if (op.equals(HiveOperation.CREATETABLE_AS_SELECT)
+					|| op.equals(HiveOperation.CREATETABLE)) {
+				ss.getAuthorizer().authorize(
+						db.getDatabase(db.getCurrentDatabase()),
+						null,
+						HiveOperation.CREATETABLE_AS_SELECT
+								.getOutputRequiredPrivileges());
+			} else {
+				// if (op.equals(HiveOperation.IMPORT)) {
+				// ImportSemanticAnalyzer isa = (ImportSemanticAnalyzer) sem;
+				// if (!isa.existsTable()) {
+				ss.getAuthorizer().authorize(
+						db.getDatabase(db.getCurrentDatabase()),
+						null,
+						HiveOperation.CREATETABLE_AS_SELECT
+								.getOutputRequiredPrivileges());
+				// }
+				// }
+			}
+			if (outputs != null && outputs.size() > 0) {
+				for (WriteEntity write : outputs) {
+
+					if (write.getType() == WriteEntity.Type.PARTITION) {
+						Partition part = db.getPartition(write.getTable(),
+								write.getPartition().getSpec(), false);
+						if (part != null) {
+							ss.getAuthorizer().authorize(write.getPartition(),
+									null, op.getOutputRequiredPrivileges());
+							continue;
+						}
+					}
+
+					if (write.getTable() != null) {
+						ss.getAuthorizer().authorize(write.getTable(), null,
+								op.getOutputRequiredPrivileges());
+					}
+				}
+
+			}
+		}
+
+		if (inputs != null && inputs.size() > 0) {
+
+			Map<Table, List<String>> tab2Cols = new HashMap<Table, List<String>>();
+			Map<Partition, List<String>> part2Cols = new HashMap<Partition, List<String>>();
+
+			Map<String, Boolean> tableUsePartLevelAuth = new HashMap<String, Boolean>();
+			for (ReadEntity read : inputs) {
+				if (read.getPartition() != null) {
+					Table tbl = read.getTable();
+					String tblName = tbl.getTableName();
+					if (tableUsePartLevelAuth.get(tblName) == null) {
+						boolean usePartLevelPriv = (tbl.getParameters().get(
+								"PARTITION_LEVEL_PRIVILEGE") != null && ("TRUE"
+								.equalsIgnoreCase(tbl.getParameters().get(
+										"PARTITION_LEVEL_PRIVILEGE"))));
+						if (usePartLevelPriv) {
+							tableUsePartLevelAuth.put(tblName, Boolean.TRUE);
+						} else {
+							tableUsePartLevelAuth.put(tblName, Boolean.FALSE);
+						}
+					}
+				}
+			}
+
+			if (op.equals(HiveOperation.CREATETABLE_AS_SELECT)
+					|| op.equals(HiveOperation.QUERY)) {
+				SemanticAnalyzer querySem = (SemanticAnalyzer) sem;
+				ParseContext parseCtx = querySem.getParseContext();
+				Map<TableScanOperator, Table> tsoTopMap = parseCtx
+						.getTopToTable();
+
+				for (Map.Entry<String, Operator<? extends Serializable>> topOpMap : querySem
+						.getParseContext().getTopOps().entrySet()) {
+					Operator<? extends Serializable> topOp = topOpMap
+							.getValue();
+					if (topOp instanceof TableScanOperator
+							&& tsoTopMap.containsKey(topOp)) {
+						TableScanOperator tableScanOp = (TableScanOperator) topOp;
+						Table tbl = tsoTopMap.get(tableScanOp);
+						List<Integer> neededColumnIds = tableScanOp
+								.getNeededColumnIDs();
+						List<FieldSchema> columns = tbl.getCols();
+						List<String> cols = new ArrayList<String>();
+						if (neededColumnIds != null
+								&& neededColumnIds.size() > 0) {
+							for (int i = 0; i < neededColumnIds.size(); i++) {
+								cols.add(columns.get(neededColumnIds.get(i))
+										.getName());
+							}
+						} else {
+							for (int i = 0; i < columns.size(); i++) {
+								cols.add(columns.get(i).getName());
+							}
+						}
+						if (tbl.isPartitioned()
+								&& tableUsePartLevelAuth
+										.get(tbl.getTableName())) {
+							String alias_id = topOpMap.getKey();
+							PrunedPartitionList partsList = PartitionPruner
+									.prune(parseCtx.getTopToTable().get(topOp),
+											parseCtx.getOpToPartPruner().get(
+													topOp), parseCtx.getConf(),
+											alias_id,
+											parseCtx.getPrunedPartitions());
+							Set<Partition> parts = new HashSet<Partition>();
+							parts.addAll(partsList.getConfirmedPartns());
+							parts.addAll(partsList.getUnknownPartns());
+							for (Partition part : parts) {
+								List<String> existingCols = part2Cols.get(part);
+								if (existingCols == null) {
+									existingCols = new ArrayList<String>();
+								}
+								existingCols.addAll(cols);
+								part2Cols.put(part, existingCols);
+							}
+						} else {
+							List<String> existingCols = tab2Cols.get(tbl);
+							if (existingCols == null) {
+								existingCols = new ArrayList<String>();
+							}
+							existingCols.addAll(cols);
+							tab2Cols.put(tbl, existingCols);
+						}
+					}
+				}
+			}
+
+			// cache the results for table authorization
+			Set<String> tableAuthChecked = new HashSet<String>();
+			for (ReadEntity read : inputs) {
+				Table tbl = null;
+				if (read.getPartition() != null) {
+					tbl = read.getPartition().getTable();
+					// use partition level authorization
+					if (tableUsePartLevelAuth.get(tbl.getTableName())) {
+						List<String> cols = part2Cols.get(read.getPartition());
+						if (cols != null && cols.size() > 0) {
+							ss.getAuthorizer().authorize(
+									read.getPartition().getTable(),
+									read.getPartition(), cols,
+									op.getInputRequiredPrivileges(), null);
+						} else {
+							ss.getAuthorizer().authorize(read.getPartition(),
+									op.getInputRequiredPrivileges(), null);
+						}
+						continue;
+					}
+				} else if (read.getTable() != null) {
+					tbl = read.getTable();
+				}
+
+				// if we reach here, it means it needs to do a table
+				// authorization
+				// check, and the table authorization may already happened
+				// because of other
+				// partitions
+				if (tbl != null
+						&& !tableAuthChecked.contains(tbl.getTableName())) {
+					List<String> cols = tab2Cols.get(tbl);
+					if (cols != null && cols.size() > 0) {
+						ss.getAuthorizer().authorize(tbl, null, cols,
+								op.getInputRequiredPrivileges(), null);
+					} else {
+						ss.getAuthorizer().authorize(tbl,
+								op.getInputRequiredPrivileges(), null);
+					}
+					tableAuthChecked.add(tbl.getTableName());
+				}
+			}
+
+		}
+	}
+
+	/**
+	 * @return The current query plan associated with this Driver, if any.
+	 */
+	public QueryPlan getPlan() {
+		return plan;
+	}
+
+	/**
+	 * @param t
+	 *            The table to be locked
+	 * @param p
+	 *            The partition to be locked
+	 * @param mode
+	 *            The mode of the lock (SHARED/EXCLUSIVE) Get the list of
+	 *            objects to be locked. If a partition needs to be locked (in
+	 *            any mode), all its parents should also be locked in SHARED
+	 *            mode.
+	 **/
+	private List<HiveLockObj> getLockObjects(Table t, Partition p,
+			HiveLockMode mode) throws SemanticException {
+		List<HiveLockObj> locks = new LinkedList<HiveLockObj>();
+
+		HiveLockObjectData lockData = new HiveLockObjectData(plan.getQueryId(),
+				String.valueOf(System.currentTimeMillis()), "IMPLICIT");
+
+		if (t != null) {
+			locks.add(new HiveLockObj(new HiveLockObject(t, lockData), mode));
+			mode = HiveLockMode.SHARED;
+			locks.add(new HiveLockObj(new HiveLockObject(t.getDbName(),
+					lockData), mode));
+			return locks;
+		}
+
+		if (p != null) {
+			if (!(p instanceof DummyPartition)) {
+				locks.add(new HiveLockObj(new HiveLockObject(p, lockData), mode));
+			}
+
+			// All the parents are locked in shared mode
+			mode = HiveLockMode.SHARED;
+
+			// For dummy partitions, only partition name is needed
+			String name = p.getName();
+
+			if (p instanceof DummyPartition) {
+				name = p.getName().split("@")[2];
+			}
+
+			String partName = name;
+			String partialName = "";
+			String[] partns = name.split("/");
+			int len = p instanceof DummyPartition ? partns.length
+					: partns.length - 1;
+			for (int idx = 0; idx < len; idx++) {
+				String partn = partns[idx];
+				partialName += partn;
+				try {
+					locks.add(new HiveLockObj(new HiveLockObject(
+							new DummyPartition(p.getTable(), p.getTable()
+									.getDbName()
+									+ "/"
+									+ p.getTable().getTableName()
+									+ "/"
+									+ partialName), lockData), mode));
+					partialName += "/";
+				} catch (HiveException e) {
+					throw new SemanticException(e.getMessage());
+				}
+			}
+
+			locks.add(new HiveLockObj(
+					new HiveLockObject(p.getTable(), lockData), mode));
+			locks.add(new HiveLockObj(new HiveLockObject(p.getTable()
+					.getDbName(), lockData), mode));
+		}
+		return locks;
+	}
+
+	/**
+	 * Acquire read and write locks needed by the statement. The list of objects
+	 * to be locked are obtained from he inputs and outputs populated by the
+	 * compiler. The lock acuisition scheme is pretty simple. If all the locks
+	 * cannot be obtained, error out. Deadlock is avoided by making sure that
+	 * the locks are lexicographically sorted.
+	 **/
+	public int acquireReadWriteLocks() {
+		try {
+			int sleepTime = conf
+					.getIntVar(HiveConf.ConfVars.HIVE_LOCK_SLEEP_BETWEEN_RETRIES) * 1000;
+			int numRetries = conf
+					.getIntVar(HiveConf.ConfVars.HIVE_LOCK_NUMRETRIES);
+
+			boolean supportConcurrency = conf
+					.getBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY);
+			if (!supportConcurrency) {
+				return 0;
+			}
+
+			List<HiveLockObj> lockObjects = new ArrayList<HiveLockObj>();
+
+			// Sort all the inputs, outputs.
+			// If a lock needs to be acquired on any partition, a read lock
+			// needs to be acquired on all
+			// its parents also
+			for (ReadEntity input : plan.getInputs()) {
+				if (input.getType() == ReadEntity.Type.TABLE) {
+					lockObjects.addAll(getLockObjects(input.getTable(), null,
+							HiveLockMode.SHARED));
+				} else {
+					lockObjects.addAll(getLockObjects(null,
+							input.getPartition(), HiveLockMode.SHARED));
+				}
+			}
+
+			for (WriteEntity output : plan.getOutputs()) {
+				if (output.getTyp() == WriteEntity.Type.TABLE) {
+					lockObjects.addAll(getLockObjects(output.getTable(), null,
+							output.isComplete() ? HiveLockMode.EXCLUSIVE
+									: HiveLockMode.SHARED));
+				} else if (output.getTyp() == WriteEntity.Type.PARTITION) {
+					lockObjects.addAll(getLockObjects(null,
+							output.getPartition(), HiveLockMode.EXCLUSIVE));
+				}
+				// In case of dynamic queries, it is possible to have incomplete
+				// dummy partitions
+				else if (output.getTyp() == WriteEntity.Type.DUMMYPARTITION) {
+					lockObjects.addAll(getLockObjects(null,
+							output.getPartition(), HiveLockMode.SHARED));
+				}
+			}
+
+			if (lockObjects.isEmpty() && !ctx.isNeedLockMgr()) {
+				return 0;
+			}
+
+			int ret = checkLockManager();
+			if (ret != 0) {
+				return ret;
+			}
+
+			HiveLockObjectData lockData = new HiveLockObjectData(
+					plan.getQueryId(), String.valueOf(System
+							.currentTimeMillis()), "IMPLICIT");
+
+			// Lock the database also
+			try {
+				Hive db = Hive.get(conf);
+				lockObjects.add(new HiveLockObj(new HiveLockObject(db
+						.getCurrentDatabase(), lockData), HiveLockMode.SHARED));
+			} catch (HiveException e) {
+				throw new SemanticException(e.getMessage());
+			}
+
+			ctx.setHiveLockMgr(hiveLockMgr);
+			List<HiveLock> hiveLocks = null;
+
+			int tryNum = 1;
+			do {
+
+				// ctx.getHiveLockMgr();
+				// hiveLocks = ctx.getHiveLockMgr().lock(lockObjects, false);
+
+				if (hiveLocks != null) {
+					break;
+				}
+
+				tryNum++;
+				try {
+					Thread.sleep(sleepTime);
+				} catch (InterruptedException e) {
+				}
+			} while (tryNum < numRetries);
+
+			if (hiveLocks == null) {
+				throw new SemanticException(
+						ErrorMsg.LOCK_CANNOT_BE_ACQUIRED.getMsg());
+			} else {
+				ctx.setHiveLocks(hiveLocks);
+			}
+
+			return (0);
+		} catch (SemanticException e) {
+			errorMessage = "FAILED: Error in acquiring locks: "
+					+ e.getMessage();
+			SQLState = ErrorMsg.findSQLState(e.getMessage());
+			console.printError(errorMessage, "\n"
+					+ org.apache.hadoop.util.StringUtils.stringifyException(e));
+			return (10);
+		} catch (Exception e) {
+			errorMessage = "FAILED: Error in acquiring locks: "
+					+ e.getMessage();
+			SQLState = ErrorMsg.findSQLState(e.getMessage());
+			console.printError(errorMessage, "\n"
+					+ org.apache.hadoop.util.StringUtils.stringifyException(e));
+			return (10);
+		}
+	}
+
+	/**
+	 * Release all the locks acquired implicitly by the statement. Note that the
+	 * locks acquired with 'keepAlive' set to True are not released.
+	 **/
+	private void releaseLocks() {
+		if (ctx != null && ctx.getHiveLockMgr() != null) {
+			try {
+				ctx.getHiveLockMgr().close();
+				ctx.setHiveLocks(null);
+			} catch (LockException e) {
+			}
+		}
+	}
+
+	/**
+	 * @param hiveLocks
+	 *            list of hive locks to be released Release all the locks
+	 *            specified. If some of the locks have already been released,
+	 *            ignore them
+	 **/
+	private void releaseLocks(List<HiveLock> hiveLocks) {
+		if (hiveLocks != null) {
+			ctx.getHiveLockMgr().releaseLocks(hiveLocks);
+		}
+		ctx.setHiveLocks(null);
+	}
+
+	public CommandProcessorResponse run(String command) {
+		errorMessage = null;
+		SQLState = null;
+
+		int ret = compile(command);
+		if (ret != 0) {
+			// releaseLocks(ctx.getHiveLocks());
+			return new CommandProcessorResponse(ret, errorMessage, SQLState);
+		}
+
+		// ret = acquireReadWriteLocks();
+		if (ret != 0) {
+			// releaseLocks(ctx.getHiveLocks());
+			return new CommandProcessorResponse(ret, errorMessage, SQLState);
+		}
+
+		ret = execute();
+		if (ret != 0) {
+			// releaseLocks(ctx.getHiveLocks());
+			return new CommandProcessorResponse(ret, errorMessage, SQLState);
+		}
+
+		// releaseLocks(ctx.getHiveLocks());
+		return new CommandProcessorResponse(ret);
+	}
+
+	private List<AbstractSemanticAnalyzerHook> getSemanticAnalyzerHooks()
+			throws Exception {
+		ArrayList<AbstractSemanticAnalyzerHook> saHooks = new ArrayList<AbstractSemanticAnalyzerHook>();
+		String pestr = conf.getVar(HiveConf.ConfVars.SEMANTIC_ANALYZER_HOOK);
+		if (pestr == null) {
+			return saHooks;
+		}
+		pestr = pestr.trim();
+		if (pestr.equals("")) {
+			return saHooks;
+		}
+
+		String[] peClasses = pestr.split(",");
+
+		for (String peClass : peClasses) {
+			try {
+				AbstractSemanticAnalyzerHook hook = HiveUtils
+						.getSemanticAnalyzerHook(conf, peClass);
+				saHooks.add(hook);
+			} catch (HiveException e) {
+				console.printError("Pre Exec Hook Class not found:"
+						+ e.getMessage());
+				throw e;
+			}
+		}
+
+		return saHooks;
+	}
+
+	private List<Hook> getPreExecHooks() throws Exception {
+		ArrayList<Hook> pehooks = new ArrayList<Hook>();
+		String pestr = conf.getVar(HiveConf.ConfVars.PREEXECHOOKS);
+		pestr = pestr.trim();
+		if (pestr.equals("")) {
+			return pehooks;
+		}
+
+		String[] peClasses = pestr.split(",");
+
+		for (String peClass : peClasses) {
+			try {
+				pehooks.add((Hook) Class.forName(peClass.trim(), true,
+						JavaUtils.getClassLoader()).newInstance());
+			} catch (ClassNotFoundException e) {
+				console.printError("Pre Exec Hook Class not found:"
+						+ e.getMessage());
+				throw e;
+			}
+		}
+
+		return pehooks;
+	}
+
+	private List<Hook> getPostExecHooks() throws Exception {
+		ArrayList<Hook> pehooks = new ArrayList<Hook>();
+		String pestr = conf.getVar(HiveConf.ConfVars.POSTEXECHOOKS);
+		pestr = pestr.trim();
+		if (pestr.equals("")) {
+			return pehooks;
+		}
+
+		String[] peClasses = pestr.split(",");
+
+		for (String peClass : peClasses) {
+			try {
+				pehooks.add((Hook) Class.forName(peClass.trim(), true,
+						JavaUtils.getClassLoader()).newInstance());
+			} catch (ClassNotFoundException e) {
+				console.printError("Post Exec Hook Class not found:"
+						+ e.getMessage());
+				throw e;
+			}
+		}
+
+		return pehooks;
+	}
+
+	public int execute() {
+		// execute hivesterix plan
+		if (hivesterix) {
+			hivesterix = false;
+			int ret = engine.executeJob();
+			if (ret != 0)
+				return ret;
+		}
+
+		boolean noName = StringUtils.isEmpty(conf
+				.getVar(HiveConf.ConfVars.HADOOPJOBNAME));
+		int maxlen = conf.getIntVar(HiveConf.ConfVars.HIVEJOBNAMELENGTH);
+
+		String queryId = plan.getQueryId();
+		String queryStr = plan.getQueryStr();
+
+		conf.setVar(HiveConf.ConfVars.HIVEQUERYID, queryId);
+		conf.setVar(HiveConf.ConfVars.HIVEQUERYSTRING, queryStr);
+		maxthreads = HiveConf.getIntVar(conf,
+				HiveConf.ConfVars.EXECPARALLETHREADNUMBER);
+
+		try {
+			LOG.info("Starting command: " + queryStr);
+
+			plan.setStarted();
+
+			if (SessionState.get() != null) {
+				SessionState
+						.get()
+						.getHiveHistory()
+						.startQuery(queryStr,
+								conf.getVar(HiveConf.ConfVars.HIVEQUERYID));
+				SessionState.get().getHiveHistory().logPlanProgress(plan);
+			}
+			resStream = null;
+
+			HookContext hookContext = new HookContext(plan, conf);
+
+			for (Hook peh : getPreExecHooks()) {
+				if (peh instanceof ExecuteWithHookContext) {
+					((ExecuteWithHookContext) peh).run(hookContext);
+				} else if (peh instanceof PreExecute) {
+					((PreExecute) peh).run(SessionState.get(),
+							plan.getInputs(), plan.getOutputs(), ShimLoader
+									.getHadoopShims().getUGIForConf(conf));
+				}
+			}
+
+			int jobs = Utilities.getMRTasks(plan.getRootTasks()).size();
+			if (jobs > 0) {
+				console.printInfo("Total MapReduce jobs = " + jobs);
+			}
+			if (SessionState.get() != null) {
+				SessionState
+						.get()
+						.getHiveHistory()
+						.setQueryProperty(queryId, Keys.QUERY_NUM_TASKS,
+								String.valueOf(jobs));
+				SessionState.get().getHiveHistory()
+						.setIdToTableMap(plan.getIdToTableNameMap());
+			}
+			String jobname = Utilities.abbreviate(queryStr, maxlen - 6);
+
+			// A runtime that launches runnable tasks as separate Threads
+			// through
+			// TaskRunners
+			// As soon as a task isRunnable, it is put in a queue
+			// At any time, at most maxthreads tasks can be running
+			// The main thread polls the TaskRunners to check if they have
+			// finished.
+
+			Queue<Task<? extends Serializable>> runnable = new LinkedList<Task<? extends Serializable>>();
+			Map<TaskResult, TaskRunner> running = new HashMap<TaskResult, TaskRunner>();
+
+			DriverContext driverCxt = new DriverContext(runnable, ctx);
+
+			// Add root Tasks to runnable
+
+			for (Task<? extends Serializable> tsk : plan.getRootTasks()) {
+				driverCxt.addToRunnable(tsk);
+			}
+
+			// Loop while you either have tasks running, or tasks queued up
+
+			while (running.size() != 0 || runnable.peek() != null) {
+				// Launch upto maxthreads tasks
+				while (runnable.peek() != null && running.size() < maxthreads) {
+					Task<? extends Serializable> tsk = runnable.remove();
+					console.printInfo("executing task " + tsk.getName());
+					launchTask(tsk, queryId, noName, running, jobname, jobs,
+							driverCxt);
+				}
+
+				// poll the Tasks to see which one completed
+				TaskResult tskRes = pollTasks(running.keySet());
+				TaskRunner tskRun = running.remove(tskRes);
+				Task<? extends Serializable> tsk = tskRun.getTask();
+				hookContext.addCompleteTask(tskRun);
+
+				int exitVal = tskRes.getExitVal();
+				if (exitVal != 0) {
+					Task<? extends Serializable> backupTask = tsk
+							.getAndInitBackupTask();
+					if (backupTask != null) {
+						errorMessage = "FAILED: Execution Error, return code "
+								+ exitVal + " from " + tsk.getClass().getName();
+						console.printError(errorMessage);
+
+						errorMessage = "ATTEMPT: Execute BackupTask: "
+								+ backupTask.getClass().getName();
+						console.printError(errorMessage);
+
+						// add backup task to runnable
+						if (DriverContext.isLaunchable(backupTask)) {
+							driverCxt.addToRunnable(backupTask);
+						}
+						continue;
+
+					} else {
+						// TODO: This error messaging is not very informative.
+						// Fix that.
+						errorMessage = "FAILED: Execution Error, return code "
+								+ exitVal + " from " + tsk.getClass().getName();
+						SQLState = "08S01";
+						console.printError(errorMessage);
+						if (running.size() != 0) {
+							taskCleanup();
+						}
+						// in case we decided to run everything in local mode,
+						// restore the
+						// the jobtracker setting to its initial value
+						ctx.restoreOriginalTracker();
+						return 9;
+					}
+				}
+
+				if (SessionState.get() != null) {
+					SessionState
+							.get()
+							.getHiveHistory()
+							.setTaskProperty(queryId, tsk.getId(),
+									Keys.TASK_RET_CODE, String.valueOf(exitVal));
+					SessionState.get().getHiveHistory().endTask(queryId, tsk);
+				}
+
+				if (tsk.getChildTasks() != null) {
+					for (Task<? extends Serializable> child : tsk
+							.getChildTasks()) {
+						// hivesterix: don't check launchable condition
+						// if (DriverContext.isLaunchable(child)) {
+						driverCxt.addToRunnable(child);
+						// }
+					}
+				}
+			}
+
+			// in case we decided to run everything in local mode, restore the
+			// the jobtracker setting to its initial value
+			ctx.restoreOriginalTracker();
+
+			// remove incomplete outputs.
+			// Some incomplete outputs may be added at the beginning, for eg:
+			// for dynamic partitions.
+			// remove them
+			HashSet<WriteEntity> remOutputs = new HashSet<WriteEntity>();
+			for (WriteEntity output : plan.getOutputs()) {
+				if (!output.isComplete()) {
+					remOutputs.add(output);
+				}
+			}
+
+			for (WriteEntity output : remOutputs) {
+				plan.getOutputs().remove(output);
+			}
+
+			// Get all the post execution hooks and execute them.
+			for (Hook peh : getPostExecHooks()) {
+				if (peh instanceof ExecuteWithHookContext) {
+					((ExecuteWithHookContext) peh).run(hookContext);
+				} else if (peh instanceof PostExecute) {
+					((PostExecute) peh)
+							.run(SessionState.get(),
+									plan.getInputs(),
+									plan.getOutputs(),
+									(SessionState.get() != null ? SessionState
+											.get().getLineageState()
+											.getLineageInfo() : null),
+									ShimLoader.getHadoopShims().getUGIForConf(
+											conf));
+				}
+			}
+
+			if (SessionState.get() != null) {
+				SessionState
+						.get()
+						.getHiveHistory()
+						.setQueryProperty(queryId, Keys.QUERY_RET_CODE,
+								String.valueOf(0));
+				SessionState.get().getHiveHistory().printRowCount(queryId);
+			}
+		} catch (Exception e) {
+			if (SessionState.get() != null) {
+				SessionState
+						.get()
+						.getHiveHistory()
+						.setQueryProperty(queryId, Keys.QUERY_RET_CODE,
+								String.valueOf(12));
+			}
+			// TODO: do better with handling types of Exception here
+			errorMessage = "FAILED: Hive Internal Error: "
+					+ Utilities.getNameMessage(e);
+			SQLState = "08S01";
+			console.printError(errorMessage + "\n"
+					+ org.apache.hadoop.util.StringUtils.stringifyException(e));
+			return (12);
+		} finally {
+			if (SessionState.get() != null) {
+				SessionState.get().getHiveHistory().endQuery(queryId);
+			}
+			if (noName) {
+				conf.setVar(HiveConf.ConfVars.HADOOPJOBNAME, "");
+			}
+		}
+		plan.setDone();
+
+		if (SessionState.get() != null) {
+			try {
+				SessionState.get().getHiveHistory().logPlanProgress(plan);
+			} catch (Exception e) {
+			}
+		}
+		console.printInfo("OK");
+
+		return (0);
+	}
+
+	/**
+	 * Launches a new task
+	 * 
+	 * @param tsk
+	 *            task being launched
+	 * @param queryId
+	 *            Id of the query containing the task
+	 * @param noName
+	 *            whether the task has a name set
+	 * @param running
+	 *            map from taskresults to taskrunners
+	 * @param jobname
+	 *            name of the task, if it is a map-reduce job
+	 * @param jobs
+	 *            number of map-reduce jobs
+	 * @param curJobNo
+	 *            the sequential number of the next map-reduce job
+	 * @return the updated number of last the map-reduce job launched
+	 */
+
+	public void launchTask(Task<? extends Serializable> tsk, String queryId,
+			boolean noName, Map<TaskResult, TaskRunner> running,
+			String jobname, int jobs, DriverContext cxt) {
+
+		if (SessionState.get() != null) {
+			SessionState.get().getHiveHistory()
+					.startTask(queryId, tsk, tsk.getClass().getName());
+		}
+		if (tsk.isMapRedTask() && !(tsk instanceof ConditionalTask)) {
+			if (noName) {
+				conf.setVar(HiveConf.ConfVars.HADOOPJOBNAME, jobname + "("
+						+ tsk.getId() + ")");
+			}
+			cxt.incCurJobNo(1);
+			console.printInfo("Launching Job " + cxt.getCurJobNo() + " out of "
+					+ jobs);
+		}
+		tsk.initialize(conf, plan, cxt);
+		TaskResult tskRes = new TaskResult();
+		TaskRunner tskRun = new TaskRunner(tsk, tskRes);
+
+		// HiveConf.getBoolVar(conf, HiveConf.ConfVars.EXECPARALLEL) &&
+		// Launch Task: hivesterix tweak
+		if (tsk instanceof MapRedTask || tsk instanceof StatsTask) {
+			// Launch it in the parallel mode, as a separate thread only for MR
+			// tasks
+			tskRes.setRunning(false);
+			tskRes.setExitVal(0);
+		} else if (tsk instanceof ConditionalTask) {
+			ConditionalTask condTask = (ConditionalTask) tsk;
+			ConditionalResolver crs = condTask.getResolver();
+			if (crs instanceof ConditionalResolverMergeFiles) {
+				tskRes.setRunning(false);
+				tskRes.setExitVal(0);
+
+				List<Task<? extends Serializable>> children = condTask
+						.getListTasks();
+				for (Task<? extends Serializable> child : children)
+					if (child instanceof MapRedTask)
+						cxt.addToRunnable(child);
+			}
+		} else {
+			tskRun.runSequential();
+		}
+		running.put(tskRes, tskRun);
+		return;
+	}
+
+	/**
+	 * Cleans up remaining tasks in case of failure
+	 */
+
+	public void taskCleanup() {
+		// The currently existing Shutdown hooks will be automatically called,
+		// killing the map-reduce processes.
+		// The non MR processes will be killed as well.
+		System.exit(9);
+	}
+
+	/**
+	 * Polls running tasks to see if a task has ended.
+	 * 
+	 * @param results
+	 *            Set of result objects for running tasks
+	 * @return The result object for any completed/failed task
+	 */
+
+	public TaskResult pollTasks(Set<TaskResult> results) {
+		Iterator<TaskResult> resultIterator = results.iterator();
+		while (true) {
+			while (resultIterator.hasNext()) {
+				TaskResult tskRes = resultIterator.next();
+				if (tskRes.isRunning() == false) {
+					return tskRes;
+				}
+			}
+
+			// In this loop, nothing was found
+			// Sleep 10 seconds and restart
+			try {
+				Thread.sleep(sleeptime);
+			} catch (InterruptedException ie) {
+				// Do Nothing
+				;
+			}
+			resultIterator = results.iterator();
+		}
+	}
+
+	public boolean getResults(ArrayList<String> res) throws IOException {
+		if (plan != null && plan.getFetchTask() != null) {
+			FetchTask ft = plan.getFetchTask();
+			ft.setMaxRows(maxRows);
+			return ft.fetch(res);
+		}
+
+		if (resStream == null) {
+			resStream = ctx.getStream();
+		}
+		if (resStream == null) {
+			return false;
+		}
+
+		int numRows = 0;
+		String row = null;
+
+		while (numRows < maxRows) {
+			if (resStream == null) {
+				if (numRows > 0) {
+					return true;
+				} else {
+					return false;
+				}
+			}
+
+			bos.reset();
+			Utilities.StreamStatus ss;
+			try {
+				ss = Utilities.readColumn(resStream, bos);
+				if (bos.getCount() > 0) {
+					row = new String(bos.getData(), 0, bos.getCount(), "UTF-8");
+				} else if (ss == Utilities.StreamStatus.TERMINATED) {
+					row = new String();
+				}
+
+				if (row != null) {
+					numRows++;
+					res.add(row);
+				}
+			} catch (IOException e) {
+				console.printError("FAILED: Unexpected IO exception : "
+						+ e.getMessage());
+				res = null;
+				return false;
+			}
+
+			if (ss == Utilities.StreamStatus.EOF) {
+				resStream = ctx.getStream();
+			}
+		}
+		return true;
+	}
+
+	public int close() {
+		try {
+			if (plan != null) {
+				FetchTask fetchTask = plan.getFetchTask();
+				if (null != fetchTask) {
+					try {
+						fetchTask.clearFetch();
+					} catch (Exception e) {
+						LOG.debug(" Exception while clearing the Fetch task ",
+								e);
+					}
+				}
+			}
+			if (ctx != null) {
+				ctx.clear();
+			}
+			if (null != resStream) {
+				try {
+					((FSDataInputStream) resStream).close();
+				} catch (Exception e) {
+					LOG.debug(" Exception while closing the resStream ", e);
+				}
+			}
+		} catch (Exception e) {
+			console.printError("FAILED: Hive Internal Error: "
+					+ Utilities.getNameMessage(e) + "\n"
+					+ org.apache.hadoop.util.StringUtils.stringifyException(e));
+			return 13;
+		}
+
+		return 0;
+	}
+
+	public void destroy() {
+		releaseLocks();
+	}
+
+	public org.apache.hadoop.hive.ql.plan.api.Query getQueryPlan()
+			throws IOException {
+		return plan.getQueryPlan();
+	}
+
+	public int getTryCount() {
+		return tryCount;
+	}
+
+	public void setTryCount(int tryCount) {
+		this.tryCount = tryCount;
+	}
+}
diff --git a/hivesterix/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFAverage.java b/hivesterix/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFAverage.java
new file mode 100644
index 0000000..b174432
--- /dev/null
+++ b/hivesterix/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFAverage.java
@@ -0,0 +1,253 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import java.io.DataOutput;
+import java.io.IOException;
+import java.util.ArrayList;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.StructField;
+import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
+import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.util.StringUtils;
+
+import edu.uci.ics.hivesterix.runtime.evaluator.BufferSerDeUtil;
+import edu.uci.ics.hivesterix.runtime.evaluator.SerializableBuffer;
+
+/**
+ * GenericUDAFAverage.
+ * 
+ */
+@Description(name = "avg", value = "_FUNC_(x) - Returns the mean of a set of numbers")
+public class GenericUDAFAverage extends AbstractGenericUDAFResolver {
+
+	static final Log LOG = LogFactory
+			.getLog(GenericUDAFAverage.class.getName());
+
+	@Override
+	public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters)
+			throws SemanticException {
+		if (parameters.length != 1) {
+			throw new UDFArgumentTypeException(parameters.length - 1,
+					"Exactly one argument is expected.");
+		}
+
+		if (parameters[0].getCategory() != ObjectInspector.Category.PRIMITIVE) {
+			throw new UDFArgumentTypeException(0,
+					"Only primitive type arguments are accepted but "
+							+ parameters[0].getTypeName() + " is passed.");
+		}
+		switch (((PrimitiveTypeInfo) parameters[0]).getPrimitiveCategory()) {
+		case BYTE:
+		case SHORT:
+		case INT:
+		case LONG:
+		case FLOAT:
+		case DOUBLE:
+		case STRING:
+			return new GenericUDAFAverageEvaluator();
+		case BOOLEAN:
+		default:
+			throw new UDFArgumentTypeException(0,
+					"Only numeric or string type arguments are accepted but "
+							+ parameters[0].getTypeName() + " is passed.");
+		}
+	}
+
+	/**
+	 * GenericUDAFAverageEvaluator.
+	 * 
+	 */
+	public static class GenericUDAFAverageEvaluator extends
+			GenericUDAFEvaluator {
+
+		// For PARTIAL1 and COMPLETE
+		PrimitiveObjectInspector inputOI;
+
+		// For PARTIAL2 and FINAL
+		StructObjectInspector soi;
+		StructField countField;
+		StructField sumField;
+		LongObjectInspector countFieldOI;
+		DoubleObjectInspector sumFieldOI;
+
+		// For PARTIAL1 and PARTIAL2
+		Object[] partialResult;
+
+		// For FINAL and COMPLETE
+		DoubleWritable result;
+
+		@Override
+		public ObjectInspector init(Mode m, ObjectInspector[] parameters)
+				throws HiveException {
+			assert (parameters.length == 1);
+			super.init(m, parameters);
+
+			// init input
+			if (mode == Mode.PARTIAL1 || mode == Mode.COMPLETE) {
+				inputOI = (PrimitiveObjectInspector) parameters[0];
+			} else {
+				soi = (StructObjectInspector) parameters[0];
+				countField = soi.getStructFieldRef("count");
+				sumField = soi.getStructFieldRef("sum");
+				countFieldOI = (LongObjectInspector) countField
+						.getFieldObjectInspector();
+				sumFieldOI = (DoubleObjectInspector) sumField
+						.getFieldObjectInspector();
+			}
+
+			// init output
+			if (mode == Mode.PARTIAL1 || mode == Mode.PARTIAL2) {
+				// The output of a partial aggregation is a struct containing
+				// a "long" count and a "double" sum.
+
+				ArrayList<ObjectInspector> foi = new ArrayList<ObjectInspector>();
+				foi.add(PrimitiveObjectInspectorFactory.writableLongObjectInspector);
+				foi.add(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
+				ArrayList<String> fname = new ArrayList<String>();
+				fname.add("count");
+				fname.add("sum");
+				partialResult = new Object[2];
+				partialResult[0] = new LongWritable(0);
+				partialResult[1] = new DoubleWritable(0);
+				return ObjectInspectorFactory.getStandardStructObjectInspector(
+						fname, foi);
+
+			} else {
+				result = new DoubleWritable(0);
+				return PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
+			}
+		}
+
+		static class AverageAgg implements SerializableBuffer {
+			long count;
+			double sum;
+
+			@Override
+			public void deSerializeAggBuffer(byte[] data, int start, int len) {
+				count = BufferSerDeUtil.getLong(data, start);
+				start += 8;
+				sum = BufferSerDeUtil.getDouble(data, start);
+			}
+
+			@Override
+			public void serializeAggBuffer(byte[] data, int start, int len) {
+				BufferSerDeUtil.writeLong(count, data, start);
+				start += 8;
+				BufferSerDeUtil.writeDouble(sum, data, start);
+			}
+
+			@Override
+			public void serializeAggBuffer(DataOutput output)
+					throws IOException {
+				output.writeLong(count);
+				output.writeDouble(sum);
+			}
+		};
+
+		@Override
+		public AggregationBuffer getNewAggregationBuffer() throws HiveException {
+			AverageAgg result = new AverageAgg();
+			reset(result);
+			return result;
+		}
+
+		@Override
+		public void reset(AggregationBuffer agg) throws HiveException {
+			AverageAgg myagg = (AverageAgg) agg;
+			myagg.count = 0;
+			myagg.sum = 0;
+		}
+
+		boolean warned = false;
+
+		@Override
+		public void iterate(AggregationBuffer agg, Object[] parameters)
+				throws HiveException {
+			assert (parameters.length == 1);
+			Object p = parameters[0];
+			if (p != null) {
+				AverageAgg myagg = (AverageAgg) agg;
+				try {
+					double v = PrimitiveObjectInspectorUtils.getDouble(p,
+							inputOI);
+					myagg.count++;
+					myagg.sum += v;
+				} catch (NumberFormatException e) {
+					if (!warned) {
+						warned = true;
+						LOG.warn(getClass().getSimpleName() + " "
+								+ StringUtils.stringifyException(e));
+						LOG.warn(getClass().getSimpleName()
+								+ " ignoring similar exceptions.");
+					}
+				}
+			}
+		}
+
+		@Override
+		public Object terminatePartial(AggregationBuffer agg)
+				throws HiveException {
+			AverageAgg myagg = (AverageAgg) agg;
+			((LongWritable) partialResult[0]).set(myagg.count);
+			((DoubleWritable) partialResult[1]).set(myagg.sum);
+			return partialResult;
+		}
+
+		@Override
+		public void merge(AggregationBuffer agg, Object partial)
+				throws HiveException {
+			if (partial != null) {
+				AverageAgg myagg = (AverageAgg) agg;
+				Object partialCount = soi.getStructFieldData(partial,
+						countField);
+				Object partialSum = soi.getStructFieldData(partial, sumField);
+				myagg.count += countFieldOI.get(partialCount);
+				myagg.sum += sumFieldOI.get(partialSum);
+			}
+		}
+
+		@Override
+		public Object terminate(AggregationBuffer agg) throws HiveException {
+			AverageAgg myagg = (AverageAgg) agg;
+			if (myagg.count == 0) {
+				return null;
+			} else {
+				result.set(myagg.sum / myagg.count);
+				return result;
+			}
+		}
+	}
+
+}
diff --git a/hivesterix/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCorrelation.java b/hivesterix/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCorrelation.java
new file mode 100644
index 0000000..716faac
--- /dev/null
+++ b/hivesterix/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCorrelation.java
@@ -0,0 +1,428 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import java.io.DataOutput;
+import java.io.IOException;
+import java.util.ArrayList;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.StructField;
+import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
+import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.io.LongWritable;
+
+import edu.uci.ics.hivesterix.runtime.evaluator.BufferSerDeUtil;
+import edu.uci.ics.hivesterix.runtime.evaluator.SerializableBuffer;
+
+/**
+ * Compute the Pearson correlation coefficient corr(x, y), using the following
+ * stable one-pass method, based on: "Formulas for Robust, One-Pass Parallel
+ * Computation of Covariances and Arbitrary-Order Statistical Moments", Philippe
+ * Pebay, Sandia Labs and
+ * "The Art of Computer Programming, volume 2: Seminumerical Algorithms", Donald
+ * Knuth.
+ * 
+ * Incremental: n : <count> mx_n = mx_(n-1) + [x_n - mx_(n-1)]/n : <xavg> my_n =
+ * my_(n-1) + [y_n - my_(n-1)]/n : <yavg> c_n = c_(n-1) + (x_n - mx_(n-1))*(y_n
+ * - my_n) : <covariance * n> vx_n = vx_(n-1) + (x_n - mx_n)(x_n - mx_(n-1)):
+ * <variance * n> vy_n = vy_(n-1) + (y_n - my_n)(y_n - my_(n-1)): <variance * n>
+ * 
+ * Merge: c_(A,B) = c_A + c_B + (mx_A - mx_B)*(my_A - my_B)*n_A*n_B/(n_A+n_B)
+ * vx_(A,B) = vx_A + vx_B + (mx_A - mx_B)*(mx_A - mx_B)*n_A*n_B/(n_A+n_B)
+ * vy_(A,B) = vy_A + vy_B + (my_A - my_B)*(my_A - my_B)*n_A*n_B/(n_A+n_B)
+ * 
+ */
+@Description(name = "corr", value = "_FUNC_(x,y) - Returns the Pearson coefficient of correlation\n"
+		+ "between a set of number pairs", extended = "The function takes as arguments any pair of numeric types and returns a double.\n"
+		+ "Any pair with a NULL is ignored. If the function is applied to an empty set or\n"
+		+ "a singleton set, NULL will be returned. Otherwise, it computes the following:\n"
+		+ "   COVAR_POP(x,y)/(STDDEV_POP(x)*STDDEV_POP(y))\n"
+		+ "where neither x nor y is null,\n"
+		+ "COVAR_POP is the population covariance,\n"
+		+ "and STDDEV_POP is the population standard deviation.")
+public class GenericUDAFCorrelation extends AbstractGenericUDAFResolver {
+
+	static final Log LOG = LogFactory.getLog(GenericUDAFCorrelation.class
+			.getName());
+
+	@Override
+	public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters)
+			throws SemanticException {
+		if (parameters.length != 2) {
+			throw new UDFArgumentTypeException(parameters.length - 1,
+					"Exactly two arguments are expected.");
+		}
+
+		if (parameters[0].getCategory() != ObjectInspector.Category.PRIMITIVE) {
+			throw new UDFArgumentTypeException(0,
+					"Only primitive type arguments are accepted but "
+							+ parameters[0].getTypeName() + " is passed.");
+		}
+
+		if (parameters[1].getCategory() != ObjectInspector.Category.PRIMITIVE) {
+			throw new UDFArgumentTypeException(1,
+					"Only primitive type arguments are accepted but "
+							+ parameters[1].getTypeName() + " is passed.");
+		}
+
+		switch (((PrimitiveTypeInfo) parameters[0]).getPrimitiveCategory()) {
+		case BYTE:
+		case SHORT:
+		case INT:
+		case LONG:
+		case FLOAT:
+		case DOUBLE:
+			switch (((PrimitiveTypeInfo) parameters[1]).getPrimitiveCategory()) {
+			case BYTE:
+			case SHORT:
+			case INT:
+			case LONG:
+			case FLOAT:
+			case DOUBLE:
+				return new GenericUDAFCorrelationEvaluator();
+			case STRING:
+			case BOOLEAN:
+			default:
+				throw new UDFArgumentTypeException(1,
+						"Only numeric type arguments are accepted but "
+								+ parameters[1].getTypeName() + " is passed.");
+			}
+		case STRING:
+		case BOOLEAN:
+		default:
+			throw new UDFArgumentTypeException(0,
+					"Only numeric type arguments are accepted but "
+							+ parameters[0].getTypeName() + " is passed.");
+		}
+	}
+
+	/**
+	 * Evaluate the Pearson correlation coefficient using a stable one-pass
+	 * algorithm, based on work by Philippe Pébay and Donald Knuth.
+	 * 
+	 * Incremental: n : <count> mx_n = mx_(n-1) + [x_n - mx_(n-1)]/n : <xavg>
+	 * my_n = my_(n-1) + [y_n - my_(n-1)]/n : <yavg> c_n = c_(n-1) + (x_n -
+	 * mx_(n-1))*(y_n - my_n) : <covariance * n> vx_n = vx_(n-1) + (x_n -
+	 * mx_n)(x_n - mx_(n-1)): <variance * n> vy_n = vy_(n-1) + (y_n - my_n)(y_n
+	 * - my_(n-1)): <variance * n>
+	 * 
+	 * Merge: c_X = c_A + c_B + (mx_A - mx_B)*(my_A - my_B)*n_A*n_B/n_X vx_(A,B)
+	 * = vx_A + vx_B + (mx_A - mx_B)*(mx_A - mx_B)*n_A*n_B/(n_A+n_B) vy_(A,B) =
+	 * vy_A + vy_B + (my_A - my_B)*(my_A - my_B)*n_A*n_B/(n_A+n_B)
+	 * 
+	 */
+	public static class GenericUDAFCorrelationEvaluator extends
+			GenericUDAFEvaluator {
+
+		// For PARTIAL1 and COMPLETE
+		private PrimitiveObjectInspector xInputOI;
+		private PrimitiveObjectInspector yInputOI;
+
+		// For PARTIAL2 and FINAL
+		private StructObjectInspector soi;
+		private StructField countField;
+		private StructField xavgField;
+		private StructField yavgField;
+		private StructField xvarField;
+		private StructField yvarField;
+		private StructField covarField;
+		private LongObjectInspector countFieldOI;
+		private DoubleObjectInspector xavgFieldOI;
+		private DoubleObjectInspector yavgFieldOI;
+		private DoubleObjectInspector xvarFieldOI;
+		private DoubleObjectInspector yvarFieldOI;
+		private DoubleObjectInspector covarFieldOI;
+
+		// For PARTIAL1 and PARTIAL2
+		private Object[] partialResult;
+
+		// For FINAL and COMPLETE
+		private DoubleWritable result;
+
+		@Override
+		public ObjectInspector init(Mode m, ObjectInspector[] parameters)
+				throws HiveException {
+			super.init(m, parameters);
+
+			// init input
+			if (mode == Mode.PARTIAL1 || mode == Mode.COMPLETE) {
+				assert (parameters.length == 2);
+				xInputOI = (PrimitiveObjectInspector) parameters[0];
+				yInputOI = (PrimitiveObjectInspector) parameters[1];
+			} else {
+				assert (parameters.length == 1);
+				soi = (StructObjectInspector) parameters[0];
+
+				countField = soi.getStructFieldRef("count");
+				xavgField = soi.getStructFieldRef("xavg");
+				yavgField = soi.getStructFieldRef("yavg");
+				xvarField = soi.getStructFieldRef("xvar");
+				yvarField = soi.getStructFieldRef("yvar");
+				covarField = soi.getStructFieldRef("covar");
+
+				countFieldOI = (LongObjectInspector) countField
+						.getFieldObjectInspector();
+				xavgFieldOI = (DoubleObjectInspector) xavgField
+						.getFieldObjectInspector();
+				yavgFieldOI = (DoubleObjectInspector) yavgField
+						.getFieldObjectInspector();
+				xvarFieldOI = (DoubleObjectInspector) xvarField
+						.getFieldObjectInspector();
+				yvarFieldOI = (DoubleObjectInspector) yvarField
+						.getFieldObjectInspector();
+				covarFieldOI = (DoubleObjectInspector) covarField
+						.getFieldObjectInspector();
+			}
+
+			// init output
+			if (mode == Mode.PARTIAL1 || mode == Mode.PARTIAL2) {
+				// The output of a partial aggregation is a struct containing
+				// a long count, two double averages, two double variances,
+				// and a double covariance.
+
+				ArrayList<ObjectInspector> foi = new ArrayList<ObjectInspector>();
+
+				foi.add(PrimitiveObjectInspectorFactory.writableLongObjectInspector);
+				foi.add(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
+				foi.add(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
+				foi.add(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
+				foi.add(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
+				foi.add(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
+
+				ArrayList<String> fname = new ArrayList<String>();
+				fname.add("count");
+				fname.add("xavg");
+				fname.add("yavg");
+				fname.add("xvar");
+				fname.add("yvar");
+				fname.add("covar");
+
+				partialResult = new Object[6];
+				partialResult[0] = new LongWritable(0);
+				partialResult[1] = new DoubleWritable(0);
+				partialResult[2] = new DoubleWritable(0);
+				partialResult[3] = new DoubleWritable(0);
+				partialResult[4] = new DoubleWritable(0);
+				partialResult[5] = new DoubleWritable(0);
+
+				return ObjectInspectorFactory.getStandardStructObjectInspector(
+						fname, foi);
+
+			} else {
+				setResult(new DoubleWritable(0));
+				return PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
+			}
+		}
+
+		static class StdAgg implements SerializableBuffer {
+			long count; // number n of elements
+			double xavg; // average of x elements
+			double yavg; // average of y elements
+			double xvar; // n times the variance of x elements
+			double yvar; // n times the variance of y elements
+			double covar; // n times the covariance
+
+			@Override
+			public void deSerializeAggBuffer(byte[] data, int start, int len) {
+				count = BufferSerDeUtil.getLong(data, start);
+				start += 8;
+				xavg = BufferSerDeUtil.getDouble(data, start);
+				start += 8;
+				yavg = BufferSerDeUtil.getDouble(data, start);
+				start += 8;
+				xvar = BufferSerDeUtil.getDouble(data, start);
+				start += 8;
+				yvar = BufferSerDeUtil.getDouble(data, start);
+				start += 8;
+				covar = BufferSerDeUtil.getDouble(data, start);
+			}
+
+			@Override
+			public void serializeAggBuffer(byte[] data, int start, int len) {
+				BufferSerDeUtil.writeLong(count, data, start);
+				start += 8;
+				BufferSerDeUtil.writeDouble(xavg, data, start);
+				start += 8;
+				BufferSerDeUtil.writeDouble(yavg, data, start);
+				start += 8;
+				BufferSerDeUtil.writeDouble(xvar, data, start);
+				start += 8;
+				BufferSerDeUtil.writeDouble(yvar, data, start);
+				start += 8;
+				BufferSerDeUtil.writeDouble(covar, data, start);
+			}
+
+			@Override
+			public void serializeAggBuffer(DataOutput output)
+					throws IOException {
+				output.writeLong(count);
+				output.writeDouble(xavg);
+				output.writeDouble(yavg);
+				output.writeDouble(xvar);
+				output.writeDouble(yvar);
+				output.writeDouble(covar);
+			}
+		};
+
+		@Override
+		public AggregationBuffer getNewAggregationBuffer() throws HiveException {
+			StdAgg result = new StdAgg();
+			reset(result);
+			return result;
+		}
+
+		@Override
+		public void reset(AggregationBuffer agg) throws HiveException {
+			StdAgg myagg = (StdAgg) agg;
+			myagg.count = 0;
+			myagg.xavg = 0;
+			myagg.yavg = 0;
+			myagg.xvar = 0;
+			myagg.yvar = 0;
+			myagg.covar = 0;
+		}
+
+		@Override
+		public void iterate(AggregationBuffer agg, Object[] parameters)
+				throws HiveException {
+			assert (parameters.length == 2);
+			Object px = parameters[0];
+			Object py = parameters[1];
+			if (px != null && py != null) {
+				StdAgg myagg = (StdAgg) agg;
+				double vx = PrimitiveObjectInspectorUtils.getDouble(px,
+						xInputOI);
+				double vy = PrimitiveObjectInspectorUtils.getDouble(py,
+						yInputOI);
+				double xavgOld = myagg.xavg;
+				double yavgOld = myagg.yavg;
+				myagg.count++;
+				myagg.xavg += (vx - xavgOld) / myagg.count;
+				myagg.yavg += (vy - yavgOld) / myagg.count;
+				if (myagg.count > 1) {
+					myagg.covar += (vx - xavgOld) * (vy - myagg.yavg);
+					myagg.xvar += (vx - xavgOld) * (vx - myagg.xavg);
+					myagg.yvar += (vy - yavgOld) * (vy - myagg.yavg);
+				}
+			}
+		}
+
+		@Override
+		public Object terminatePartial(AggregationBuffer agg)
+				throws HiveException {
+			StdAgg myagg = (StdAgg) agg;
+			((LongWritable) partialResult[0]).set(myagg.count);
+			((DoubleWritable) partialResult[1]).set(myagg.xavg);
+			((DoubleWritable) partialResult[2]).set(myagg.yavg);
+			((DoubleWritable) partialResult[3]).set(myagg.xvar);
+			((DoubleWritable) partialResult[4]).set(myagg.yvar);
+			((DoubleWritable) partialResult[5]).set(myagg.covar);
+			return partialResult;
+		}
+
+		@Override
+		public void merge(AggregationBuffer agg, Object partial)
+				throws HiveException {
+			if (partial != null) {
+				StdAgg myagg = (StdAgg) agg;
+
+				Object partialCount = soi.getStructFieldData(partial,
+						countField);
+				Object partialXAvg = soi.getStructFieldData(partial, xavgField);
+				Object partialYAvg = soi.getStructFieldData(partial, yavgField);
+				Object partialXVar = soi.getStructFieldData(partial, xvarField);
+				Object partialYVar = soi.getStructFieldData(partial, yvarField);
+				Object partialCovar = soi.getStructFieldData(partial,
+						covarField);
+
+				long nA = myagg.count;
+				long nB = countFieldOI.get(partialCount);
+
+				if (nA == 0) {
+					// Just copy the information since there is nothing so far
+					myagg.count = countFieldOI.get(partialCount);
+					myagg.xavg = xavgFieldOI.get(partialXAvg);
+					myagg.yavg = yavgFieldOI.get(partialYAvg);
+					myagg.xvar = xvarFieldOI.get(partialXVar);
+					myagg.yvar = yvarFieldOI.get(partialYVar);
+					myagg.covar = covarFieldOI.get(partialCovar);
+				}
+
+				if (nA != 0 && nB != 0) {
+					// Merge the two partials
+					double xavgA = myagg.xavg;
+					double yavgA = myagg.yavg;
+					double xavgB = xavgFieldOI.get(partialXAvg);
+					double yavgB = yavgFieldOI.get(partialYAvg);
+					double xvarB = xvarFieldOI.get(partialXVar);
+					double yvarB = yvarFieldOI.get(partialYVar);
+					double covarB = covarFieldOI.get(partialCovar);
+
+					myagg.count += nB;
+					myagg.xavg = (xavgA * nA + xavgB * nB) / myagg.count;
+					myagg.yavg = (yavgA * nA + yavgB * nB) / myagg.count;
+					myagg.xvar += xvarB + (xavgA - xavgB) * (xavgA - xavgB)
+							* myagg.count;
+					myagg.yvar += yvarB + (yavgA - yavgB) * (yavgA - yavgB)
+							* myagg.count;
+					myagg.covar += covarB + (xavgA - xavgB) * (yavgA - yavgB)
+							* ((double) (nA * nB) / myagg.count);
+				}
+			}
+		}
+
+		@Override
+		public Object terminate(AggregationBuffer agg) throws HiveException {
+			StdAgg myagg = (StdAgg) agg;
+
+			if (myagg.count < 2) { // SQL standard - return null for zero or one
+									// pair
+				return null;
+			} else {
+				getResult().set(
+						myagg.covar / java.lang.Math.sqrt(myagg.xvar)
+								/ java.lang.Math.sqrt(myagg.yvar));
+				return getResult();
+			}
+		}
+
+		public void setResult(DoubleWritable result) {
+			this.result = result;
+		}
+
+		public DoubleWritable getResult() {
+			return result;
+		}
+	}
+
+}
diff --git a/hivesterix/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCount.java b/hivesterix/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCount.java
new file mode 100644
index 0000000..4160d5b
--- /dev/null
+++ b/hivesterix/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCount.java
@@ -0,0 +1,182 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import java.io.DataOutput;
+import java.io.IOException;
+
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.io.LongWritable;
+
+import edu.uci.ics.hivesterix.runtime.evaluator.BufferSerDeUtil;
+import edu.uci.ics.hivesterix.runtime.evaluator.SerializableBuffer;
+
+/**
+ * This class implements the COUNT aggregation function as in SQL.
+ */
+@Description(name = "count", value = "_FUNC_(*) - Returns the total number of retrieved rows, including "
+		+ "rows containing NULL values.\n"
+
+		+ "_FUNC_(expr) - Returns the number of rows for which the supplied "
+		+ "expression is non-NULL.\n"
+
+		+ "_FUNC_(DISTINCT expr[, expr...]) - Returns the number of rows for "
+		+ "which the supplied expression(s) are unique and non-NULL.")
+public class GenericUDAFCount implements GenericUDAFResolver2 {
+
+	@Override
+	public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters)
+			throws SemanticException {
+		// This method implementation is preserved for backward compatibility.
+		return new GenericUDAFCountEvaluator();
+	}
+
+	@Override
+	public GenericUDAFEvaluator getEvaluator(GenericUDAFParameterInfo paramInfo)
+			throws SemanticException {
+
+		TypeInfo[] parameters = paramInfo.getParameters();
+
+		if (parameters.length == 0) {
+			if (!paramInfo.isAllColumns()) {
+				throw new UDFArgumentException("Argument expected");
+			}
+			assert !paramInfo.isDistinct() : "DISTINCT not supported with *";
+		} else {
+			if (parameters.length > 1 && !paramInfo.isDistinct()) {
+				throw new UDFArgumentException(
+						"DISTINCT keyword must be specified");
+			}
+			assert !paramInfo.isAllColumns() : "* not supported in expression list";
+		}
+
+		return new GenericUDAFCountEvaluator().setCountAllColumns(paramInfo
+				.isAllColumns());
+	}
+
+	/**
+	 * GenericUDAFCountEvaluator.
+	 * 
+	 */
+	public static class GenericUDAFCountEvaluator extends GenericUDAFEvaluator {
+		private boolean countAllColumns = false;
+		private LongObjectInspector partialCountAggOI;
+		private LongWritable result;
+
+		@Override
+		public ObjectInspector init(Mode m, ObjectInspector[] parameters)
+				throws HiveException {
+			super.init(m, parameters);
+			partialCountAggOI = PrimitiveObjectInspectorFactory.writableLongObjectInspector;
+			result = new LongWritable(0);
+			return PrimitiveObjectInspectorFactory.writableLongObjectInspector;
+		}
+
+		private GenericUDAFCountEvaluator setCountAllColumns(
+				boolean countAllCols) {
+			countAllColumns = countAllCols;
+			return this;
+		}
+
+		/** class for storing count value. */
+		static class CountAgg implements SerializableBuffer {
+			long value;
+
+			@Override
+			public void deSerializeAggBuffer(byte[] data, int start, int len) {
+				value = BufferSerDeUtil.getLong(data, start);
+			}
+
+			@Override
+			public void serializeAggBuffer(byte[] data, int start, int len) {
+				BufferSerDeUtil.writeLong(value, data, start);
+			}
+
+			@Override
+			public void serializeAggBuffer(DataOutput output)
+					throws IOException {
+				output.writeLong(value);
+			}
+		}
+
+		@Override
+		public AggregationBuffer getNewAggregationBuffer() throws HiveException {
+			CountAgg buffer = new CountAgg();
+			reset(buffer);
+			return buffer;
+		}
+
+		@Override
+		public void reset(AggregationBuffer agg) throws HiveException {
+			((CountAgg) agg).value = 0;
+		}
+
+		@Override
+		public void iterate(AggregationBuffer agg, Object[] parameters)
+				throws HiveException {
+			// parameters == null means the input table/split is empty
+			if (parameters == null) {
+				return;
+			}
+			if (countAllColumns) {
+				assert parameters.length == 0;
+				((CountAgg) agg).value++;
+			} else {
+				assert parameters.length > 0;
+				boolean countThisRow = true;
+				for (Object nextParam : parameters) {
+					if (nextParam == null) {
+						countThisRow = false;
+						break;
+					}
+				}
+				if (countThisRow) {
+					((CountAgg) agg).value++;
+				}
+			}
+		}
+
+		@Override
+		public void merge(AggregationBuffer agg, Object partial)
+				throws HiveException {
+			if (partial != null) {
+				long p = partialCountAggOI.get(partial);
+				((CountAgg) agg).value += p;
+			}
+		}
+
+		@Override
+		public Object terminate(AggregationBuffer agg) throws HiveException {
+			result.set(((CountAgg) agg).value);
+			return result;
+		}
+
+		@Override
+		public Object terminatePartial(AggregationBuffer agg)
+				throws HiveException {
+			return terminate(agg);
+		}
+	}
+}
diff --git a/hivesterix/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCovariance.java b/hivesterix/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCovariance.java
new file mode 100644
index 0000000..11d9dc3
--- /dev/null
+++ b/hivesterix/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCovariance.java
@@ -0,0 +1,372 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import java.io.DataOutput;
+import java.io.IOException;
+import java.util.ArrayList;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.StructField;
+import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
+import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.io.LongWritable;
+
+import edu.uci.ics.hivesterix.runtime.evaluator.BufferSerDeUtil;
+import edu.uci.ics.hivesterix.runtime.evaluator.SerializableBuffer;
+
+/**
+ * Compute the covariance covar_pop(x, y), using the following one-pass method
+ * (ref. "Formulas for Robust, One-Pass Parallel Computation of Covariances and
+ * Arbitrary-Order Statistical Moments", Philippe Pebay, Sandia Labs):
+ * 
+ * Incremental: n : <count> mx_n = mx_(n-1) + [x_n - mx_(n-1)]/n : <xavg> my_n =
+ * my_(n-1) + [y_n - my_(n-1)]/n : <yavg> c_n = c_(n-1) + (x_n - mx_(n-1))*(y_n
+ * - my_n) : <covariance * n>
+ * 
+ * Merge: c_X = c_A + c_B + (mx_A - mx_B)*(my_A - my_B)*n_A*n_B/n_X
+ * 
+ */
+@Description(name = "covariance,covar_pop", value = "_FUNC_(x,y) - Returns the population covariance of a set of number pairs", extended = "The function takes as arguments any pair of numeric types and returns a double.\n"
+		+ "Any pair with a NULL is ignored. If the function is applied to an empty set, NULL\n"
+		+ "will be returned. Otherwise, it computes the following:\n"
+		+ "   (SUM(x*y)-SUM(x)*SUM(y)/COUNT(x,y))/COUNT(x,y)\n"
+		+ "where neither x nor y is null.")
+public class GenericUDAFCovariance extends AbstractGenericUDAFResolver {
+
+	static final Log LOG = LogFactory.getLog(GenericUDAFCovariance.class
+			.getName());
+
+	@Override
+	public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters)
+			throws SemanticException {
+		if (parameters.length != 2) {
+			throw new UDFArgumentTypeException(parameters.length - 1,
+					"Exactly two arguments are expected.");
+		}
+
+		if (parameters[0].getCategory() != ObjectInspector.Category.PRIMITIVE) {
+			throw new UDFArgumentTypeException(0,
+					"Only primitive type arguments are accepted but "
+							+ parameters[0].getTypeName() + " is passed.");
+		}
+
+		if (parameters[1].getCategory() != ObjectInspector.Category.PRIMITIVE) {
+			throw new UDFArgumentTypeException(1,
+					"Only primitive type arguments are accepted but "
+							+ parameters[1].getTypeName() + " is passed.");
+		}
+
+		switch (((PrimitiveTypeInfo) parameters[0]).getPrimitiveCategory()) {
+		case BYTE:
+		case SHORT:
+		case INT:
+		case LONG:
+		case FLOAT:
+		case DOUBLE:
+			switch (((PrimitiveTypeInfo) parameters[1]).getPrimitiveCategory()) {
+			case BYTE:
+			case SHORT:
+			case INT:
+			case LONG:
+			case FLOAT:
+			case DOUBLE:
+				return new GenericUDAFCovarianceEvaluator();
+			case STRING:
+			case BOOLEAN:
+			default:
+				throw new UDFArgumentTypeException(1,
+						"Only numeric or string type arguments are accepted but "
+								+ parameters[1].getTypeName() + " is passed.");
+			}
+		case STRING:
+		case BOOLEAN:
+		default:
+			throw new UDFArgumentTypeException(0,
+					"Only numeric or string type arguments are accepted but "
+							+ parameters[0].getTypeName() + " is passed.");
+		}
+	}
+
+	/**
+	 * Evaluate the variance using the algorithm described in
+	 * http://en.wikipedia.org/wiki/Algorithms_for_calculating_variance,
+	 * presumably by Pébay, Philippe (2008), in "Formulas for Robust, One-Pass
+	 * Parallel Computation of Covariances and Arbitrary-Order Statistical
+	 * Moments", Technical Report SAND2008-6212, Sandia National Laboratories,
+	 * http://infoserve.sandia.gov/sand_doc/2008/086212.pdf
+	 * 
+	 * Incremental: n : <count> mx_n = mx_(n-1) + [x_n - mx_(n-1)]/n : <xavg>
+	 * my_n = my_(n-1) + [y_n - my_(n-1)]/n : <yavg> c_n = c_(n-1) + (x_n -
+	 * mx_(n-1))*(y_n - my_n) : <covariance * n>
+	 * 
+	 * Merge: c_X = c_A + c_B + (mx_A - mx_B)*(my_A - my_B)*n_A*n_B/n_X
+	 * 
+	 * This one-pass algorithm is stable.
+	 * 
+	 */
+	public static class GenericUDAFCovarianceEvaluator extends
+			GenericUDAFEvaluator {
+
+		// For PARTIAL1 and COMPLETE
+		private PrimitiveObjectInspector xInputOI;
+		private PrimitiveObjectInspector yInputOI;
+
+		// For PARTIAL2 and FINAL
+		private StructObjectInspector soi;
+		private StructField countField;
+		private StructField xavgField;
+		private StructField yavgField;
+		private StructField covarField;
+		private LongObjectInspector countFieldOI;
+		private DoubleObjectInspector xavgFieldOI;
+		private DoubleObjectInspector yavgFieldOI;
+		private DoubleObjectInspector covarFieldOI;
+
+		// For PARTIAL1 and PARTIAL2
+		private Object[] partialResult;
+
+		// For FINAL and COMPLETE
+		private DoubleWritable result;
+
+		@Override
+		public ObjectInspector init(Mode m, ObjectInspector[] parameters)
+				throws HiveException {
+			super.init(m, parameters);
+
+			// init input
+			if (mode == Mode.PARTIAL1 || mode == Mode.COMPLETE) {
+				assert (parameters.length == 2);
+				xInputOI = (PrimitiveObjectInspector) parameters[0];
+				yInputOI = (PrimitiveObjectInspector) parameters[1];
+			} else {
+				assert (parameters.length == 1);
+				soi = (StructObjectInspector) parameters[0];
+
+				countField = soi.getStructFieldRef("count");
+				xavgField = soi.getStructFieldRef("xavg");
+				yavgField = soi.getStructFieldRef("yavg");
+				covarField = soi.getStructFieldRef("covar");
+
+				countFieldOI = (LongObjectInspector) countField
+						.getFieldObjectInspector();
+				xavgFieldOI = (DoubleObjectInspector) xavgField
+						.getFieldObjectInspector();
+				yavgFieldOI = (DoubleObjectInspector) yavgField
+						.getFieldObjectInspector();
+				covarFieldOI = (DoubleObjectInspector) covarField
+						.getFieldObjectInspector();
+			}
+
+			// init output
+			if (mode == Mode.PARTIAL1 || mode == Mode.PARTIAL2) {
+				// The output of a partial aggregation is a struct containing
+				// a long count, two double averages, and a double covariance.
+
+				ArrayList<ObjectInspector> foi = new ArrayList<ObjectInspector>();
+
+				foi.add(PrimitiveObjectInspectorFactory.writableLongObjectInspector);
+				foi.add(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
+				foi.add(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
+				foi.add(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
+
+				ArrayList<String> fname = new ArrayList<String>();
+				fname.add("count");
+				fname.add("xavg");
+				fname.add("yavg");
+				fname.add("covar");
+
+				partialResult = new Object[4];
+				partialResult[0] = new LongWritable(0);
+				partialResult[1] = new DoubleWritable(0);
+				partialResult[2] = new DoubleWritable(0);
+				partialResult[3] = new DoubleWritable(0);
+
+				return ObjectInspectorFactory.getStandardStructObjectInspector(
+						fname, foi);
+
+			} else {
+				setResult(new DoubleWritable(0));
+				return PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
+			}
+		}
+
+		static class StdAgg implements SerializableBuffer {
+			long count; // number n of elements
+			double xavg; // average of x elements
+			double yavg; // average of y elements
+			double covar; // n times the covariance
+
+			@Override
+			public void deSerializeAggBuffer(byte[] data, int start, int len) {
+				count = BufferSerDeUtil.getLong(data, start);
+				start += 8;
+				xavg = BufferSerDeUtil.getDouble(data, start);
+				start += 8;
+				yavg = BufferSerDeUtil.getDouble(data, start);
+				start += 8;
+				covar = BufferSerDeUtil.getDouble(data, start);
+			}
+
+			@Override
+			public void serializeAggBuffer(byte[] data, int start, int len) {
+				BufferSerDeUtil.writeLong(count, data, start);
+				start += 8;
+				BufferSerDeUtil.writeDouble(xavg, data, start);
+				start += 8;
+				BufferSerDeUtil.writeDouble(yavg, data, start);
+				start += 8;
+				BufferSerDeUtil.writeDouble(covar, data, start);
+			}
+
+			@Override
+			public void serializeAggBuffer(DataOutput output)
+					throws IOException {
+				output.writeLong(count);
+				output.writeDouble(xavg);
+				output.writeDouble(yavg);
+				output.writeDouble(covar);
+			}
+		};
+
+		@Override
+		public AggregationBuffer getNewAggregationBuffer() throws HiveException {
+			StdAgg result = new StdAgg();
+			reset(result);
+			return result;
+		}
+
+		@Override
+		public void reset(AggregationBuffer agg) throws HiveException {
+			StdAgg myagg = (StdAgg) agg;
+			myagg.count = 0;
+			myagg.xavg = 0;
+			myagg.yavg = 0;
+			myagg.covar = 0;
+		}
+
+		@Override
+		public void iterate(AggregationBuffer agg, Object[] parameters)
+				throws HiveException {
+			assert (parameters.length == 2);
+			Object px = parameters[0];
+			Object py = parameters[1];
+			if (px != null && py != null) {
+				StdAgg myagg = (StdAgg) agg;
+				double vx = PrimitiveObjectInspectorUtils.getDouble(px,
+						xInputOI);
+				double vy = PrimitiveObjectInspectorUtils.getDouble(py,
+						yInputOI);
+				myagg.count++;
+				myagg.yavg = myagg.yavg + (vy - myagg.yavg) / myagg.count;
+				if (myagg.count > 1) {
+					myagg.covar += (vx - myagg.xavg) * (vy - myagg.yavg);
+				}
+				myagg.xavg = myagg.xavg + (vx - myagg.xavg) / myagg.count;
+			}
+		}
+
+		@Override
+		public Object terminatePartial(AggregationBuffer agg)
+				throws HiveException {
+			StdAgg myagg = (StdAgg) agg;
+			((LongWritable) partialResult[0]).set(myagg.count);
+			((DoubleWritable) partialResult[1]).set(myagg.xavg);
+			((DoubleWritable) partialResult[2]).set(myagg.yavg);
+			((DoubleWritable) partialResult[3]).set(myagg.covar);
+			return partialResult;
+		}
+
+		@Override
+		public void merge(AggregationBuffer agg, Object partial)
+				throws HiveException {
+			if (partial != null) {
+				StdAgg myagg = (StdAgg) agg;
+
+				Object partialCount = soi.getStructFieldData(partial,
+						countField);
+				Object partialXAvg = soi.getStructFieldData(partial, xavgField);
+				Object partialYAvg = soi.getStructFieldData(partial, yavgField);
+				Object partialCovar = soi.getStructFieldData(partial,
+						covarField);
+
+				long nA = myagg.count;
+				long nB = countFieldOI.get(partialCount);
+
+				if (nA == 0) {
+					// Just copy the information since there is nothing so far
+					myagg.count = countFieldOI.get(partialCount);
+					myagg.xavg = xavgFieldOI.get(partialXAvg);
+					myagg.yavg = yavgFieldOI.get(partialYAvg);
+					myagg.covar = covarFieldOI.get(partialCovar);
+				}
+
+				if (nA != 0 && nB != 0) {
+					// Merge the two partials
+					double xavgA = myagg.xavg;
+					double yavgA = myagg.yavg;
+					double xavgB = xavgFieldOI.get(partialXAvg);
+					double yavgB = yavgFieldOI.get(partialYAvg);
+					double covarB = covarFieldOI.get(partialCovar);
+
+					myagg.count += nB;
+					myagg.xavg = (xavgA * nA + xavgB * nB) / myagg.count;
+					myagg.yavg = (yavgA * nA + yavgB * nB) / myagg.count;
+					myagg.covar += covarB + (xavgA - xavgB) * (yavgA - yavgB)
+							* ((double) (nA * nB) / myagg.count);
+				}
+			}
+		}
+
+		@Override
+		public Object terminate(AggregationBuffer agg) throws HiveException {
+			StdAgg myagg = (StdAgg) agg;
+
+			if (myagg.count == 0) { // SQL standard - return null for zero
+									// elements
+				return null;
+			} else {
+				getResult().set(myagg.covar / (myagg.count));
+				return getResult();
+			}
+		}
+
+		public void setResult(DoubleWritable result) {
+			this.result = result;
+		}
+
+		public DoubleWritable getResult() {
+			return result;
+		}
+	}
+
+}
diff --git a/hivesterix/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSum.java b/hivesterix/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSum.java
new file mode 100644
index 0000000..0323531
--- /dev/null
+++ b/hivesterix/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSum.java
@@ -0,0 +1,294 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import java.io.DataOutput;
+import java.io.IOException;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
+import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.util.StringUtils;
+
+import edu.uci.ics.hivesterix.runtime.evaluator.BufferSerDeUtil;
+import edu.uci.ics.hivesterix.runtime.evaluator.SerializableBuffer;
+
+/**
+ * GenericUDAFSum.
+ * 
+ */
+@Description(name = "sum", value = "_FUNC_(x) - Returns the sum of a set of numbers")
+public class GenericUDAFSum extends AbstractGenericUDAFResolver {
+
+	static final Log LOG = LogFactory.getLog(GenericUDAFSum.class.getName());
+
+	@Override
+	public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters)
+			throws SemanticException {
+		if (parameters.length != 1) {
+			throw new UDFArgumentTypeException(parameters.length - 1,
+					"Exactly one argument is expected.");
+		}
+
+		if (parameters[0].getCategory() != ObjectInspector.Category.PRIMITIVE) {
+			throw new UDFArgumentTypeException(0,
+					"Only primitive type arguments are accepted but "
+							+ parameters[0].getTypeName() + " is passed.");
+		}
+		switch (((PrimitiveTypeInfo) parameters[0]).getPrimitiveCategory()) {
+		case BYTE:
+		case SHORT:
+		case INT:
+		case LONG:
+			return new GenericUDAFSumLong();
+		case FLOAT:
+		case DOUBLE:
+		case STRING:
+			return new GenericUDAFSumDouble();
+		case BOOLEAN:
+		default:
+			throw new UDFArgumentTypeException(0,
+					"Only numeric or string type arguments are accepted but "
+							+ parameters[0].getTypeName() + " is passed.");
+		}
+	}
+
+	/**
+	 * GenericUDAFSumDouble.
+	 * 
+	 */
+	public static class GenericUDAFSumDouble extends GenericUDAFEvaluator {
+		private PrimitiveObjectInspector inputOI;
+		private DoubleWritable result;
+
+		@Override
+		public ObjectInspector init(Mode m, ObjectInspector[] parameters)
+				throws HiveException {
+			assert (parameters.length == 1);
+			super.init(m, parameters);
+			result = new DoubleWritable(0);
+			inputOI = (PrimitiveObjectInspector) parameters[0];
+			return PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
+		}
+
+		/** class for storing double sum value. */
+		static class SumDoubleAgg implements SerializableBuffer {
+			boolean empty;
+			double sum;
+
+			@Override
+			public void deSerializeAggBuffer(byte[] data, int start, int len) {
+				empty = BufferSerDeUtil.getBoolean(data, start);
+				start += 1;
+				sum = BufferSerDeUtil.getDouble(data, start);
+			}
+
+			@Override
+			public void serializeAggBuffer(byte[] data, int start, int len) {
+				BufferSerDeUtil.writeBoolean(empty, data, start);
+				start += 1;
+				BufferSerDeUtil.writeDouble(sum, data, start);
+			}
+
+			@Override
+			public void serializeAggBuffer(DataOutput output)
+					throws IOException {
+				output.writeBoolean(empty);
+				output.writeDouble(sum);
+			}
+		}
+
+		@Override
+		public AggregationBuffer getNewAggregationBuffer() throws HiveException {
+			SumDoubleAgg result = new SumDoubleAgg();
+			reset(result);
+			return result;
+		}
+
+		@Override
+		public void reset(AggregationBuffer agg) throws HiveException {
+			SumDoubleAgg myagg = (SumDoubleAgg) agg;
+			myagg.empty = true;
+			myagg.sum = 0;
+		}
+
+		boolean warned = false;
+
+		@Override
+		public void iterate(AggregationBuffer agg, Object[] parameters)
+				throws HiveException {
+			assert (parameters.length == 1);
+			try {
+				merge(agg, parameters[0]);
+			} catch (NumberFormatException e) {
+				if (!warned) {
+					warned = true;
+					LOG.warn(getClass().getSimpleName() + " "
+							+ StringUtils.stringifyException(e));
+					LOG.warn(getClass().getSimpleName()
+							+ " ignoring similar exceptions.");
+				}
+			}
+		}
+
+		@Override
+		public Object terminatePartial(AggregationBuffer agg)
+				throws HiveException {
+			return terminate(agg);
+		}
+
+		@Override
+		public void merge(AggregationBuffer agg, Object partial)
+				throws HiveException {
+			if (partial != null) {
+				SumDoubleAgg myagg = (SumDoubleAgg) agg;
+				myagg.empty = false;
+				myagg.sum += PrimitiveObjectInspectorUtils.getDouble(partial,
+						inputOI);
+			}
+		}
+
+		@Override
+		public Object terminate(AggregationBuffer agg) throws HiveException {
+			SumDoubleAgg myagg = (SumDoubleAgg) agg;
+			if (myagg.empty) {
+				return null;
+			}
+			result.set(myagg.sum);
+			return result;
+		}
+
+	}
+
+	/**
+	 * GenericUDAFSumLong.
+	 * 
+	 */
+	public static class GenericUDAFSumLong extends GenericUDAFEvaluator {
+		private PrimitiveObjectInspector inputOI;
+		private LongWritable result;
+
+		@Override
+		public ObjectInspector init(Mode m, ObjectInspector[] parameters)
+				throws HiveException {
+			assert (parameters.length == 1);
+			super.init(m, parameters);
+			result = new LongWritable(0);
+			inputOI = (PrimitiveObjectInspector) parameters[0];
+			return PrimitiveObjectInspectorFactory.writableLongObjectInspector;
+		}
+
+		/** class for storing double sum value. */
+		static class SumLongAgg implements SerializableBuffer {
+			boolean empty;
+			long sum;
+
+			@Override
+			public void deSerializeAggBuffer(byte[] data, int start, int len) {
+				empty = BufferSerDeUtil.getBoolean(data, start);
+				start += 1;
+				sum = BufferSerDeUtil.getLong(data, start);
+			}
+
+			@Override
+			public void serializeAggBuffer(byte[] data, int start, int len) {
+				BufferSerDeUtil.writeBoolean(empty, data, start);
+				start += 1;
+				BufferSerDeUtil.writeLong(sum, data, start);
+			}
+
+			@Override
+			public void serializeAggBuffer(DataOutput output)
+					throws IOException {
+				output.writeBoolean(empty);
+				output.writeLong(sum);
+			}
+		}
+
+		@Override
+		public AggregationBuffer getNewAggregationBuffer() throws HiveException {
+			SumLongAgg result = new SumLongAgg();
+			reset(result);
+			return result;
+		}
+
+		@Override
+		public void reset(AggregationBuffer agg) throws HiveException {
+			SumLongAgg myagg = (SumLongAgg) agg;
+			myagg.empty = true;
+			myagg.sum = 0;
+		}
+
+		private boolean warned = false;
+
+		@Override
+		public void iterate(AggregationBuffer agg, Object[] parameters)
+				throws HiveException {
+			assert (parameters.length == 1);
+			try {
+				merge(agg, parameters[0]);
+			} catch (NumberFormatException e) {
+				if (!warned) {
+					warned = true;
+					LOG.warn(getClass().getSimpleName() + " "
+							+ StringUtils.stringifyException(e));
+				}
+			}
+		}
+
+		@Override
+		public Object terminatePartial(AggregationBuffer agg)
+				throws HiveException {
+			return terminate(agg);
+		}
+
+		@Override
+		public void merge(AggregationBuffer agg, Object partial)
+				throws HiveException {
+			if (partial != null) {
+				SumLongAgg myagg = (SumLongAgg) agg;
+				myagg.sum += PrimitiveObjectInspectorUtils.getLong(partial,
+						inputOI);
+				myagg.empty = false;
+			}
+		}
+
+		@Override
+		public Object terminate(AggregationBuffer agg) throws HiveException {
+			SumLongAgg myagg = (SumLongAgg) agg;
+			if (myagg.empty) {
+				return null;
+			}
+			result.set(myagg.sum);
+			return result;
+		}
+
+	}
+
+}
diff --git a/hivesterix/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFVariance.java b/hivesterix/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFVariance.java
new file mode 100644
index 0000000..4c16f5a
--- /dev/null
+++ b/hivesterix/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFVariance.java
@@ -0,0 +1,331 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import java.io.DataOutput;
+import java.io.IOException;
+import java.util.ArrayList;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.StructField;
+import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
+import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.util.StringUtils;
+
+import edu.uci.ics.hivesterix.runtime.evaluator.BufferSerDeUtil;
+import edu.uci.ics.hivesterix.runtime.evaluator.SerializableBuffer;
+
+/**
+ * Compute the variance. This class is extended by: GenericUDAFVarianceSample
+ * GenericUDAFStd GenericUDAFStdSample
+ * 
+ */
+@Description(name = "variance,var_pop", value = "_FUNC_(x) - Returns the variance of a set of numbers")
+public class GenericUDAFVariance extends AbstractGenericUDAFResolver {
+
+	static final Log LOG = LogFactory.getLog(GenericUDAFVariance.class
+			.getName());
+
+	@Override
+	public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters)
+			throws SemanticException {
+		if (parameters.length != 1) {
+			throw new UDFArgumentTypeException(parameters.length - 1,
+					"Exactly one argument is expected.");
+		}
+
+		if (parameters[0].getCategory() != ObjectInspector.Category.PRIMITIVE) {
+			throw new UDFArgumentTypeException(0,
+					"Only primitive type arguments are accepted but "
+							+ parameters[0].getTypeName() + " is passed.");
+		}
+		switch (((PrimitiveTypeInfo) parameters[0]).getPrimitiveCategory()) {
+		case BYTE:
+		case SHORT:
+		case INT:
+		case LONG:
+		case FLOAT:
+		case DOUBLE:
+		case STRING:
+			return new GenericUDAFVarianceEvaluator();
+		case BOOLEAN:
+		default:
+			throw new UDFArgumentTypeException(0,
+					"Only numeric or string type arguments are accepted but "
+							+ parameters[0].getTypeName() + " is passed.");
+		}
+	}
+
+	/**
+	 * Evaluate the variance using the algorithm described by Chan, Golub, and
+	 * LeVeque in
+	 * "Algorithms for computing the sample variance: analysis and recommendations"
+	 * The American Statistician, 37 (1983) pp. 242--247.
+	 * 
+	 * variance = variance1 + variance2 + n/(m*(m+n)) * pow(((m/n)*t1 - t2),2)
+	 * 
+	 * where: - variance is sum[x-avg^2] (this is actually n times the variance)
+	 * and is updated at every step. - n is the count of elements in chunk1 - m
+	 * is the count of elements in chunk2 - t1 = sum of elements in chunk1, t2 =
+	 * sum of elements in chunk2.
+	 * 
+	 * This algorithm was proven to be numerically stable by J.L. Barlow in
+	 * "Error analysis of a pairwise summation algorithm to compute sample variance"
+	 * Numer. Math, 58 (1991) pp. 583--590
+	 * 
+	 */
+	public static class GenericUDAFVarianceEvaluator extends
+			GenericUDAFEvaluator {
+
+		// For PARTIAL1 and COMPLETE
+		private PrimitiveObjectInspector inputOI;
+
+		// For PARTIAL2 and FINAL
+		private StructObjectInspector soi;
+		private StructField countField;
+		private StructField sumField;
+		private StructField varianceField;
+		private LongObjectInspector countFieldOI;
+		private DoubleObjectInspector sumFieldOI;
+
+		// For PARTIAL1 and PARTIAL2
+		private Object[] partialResult;
+
+		// For FINAL and COMPLETE
+		private DoubleWritable result;
+
+		@Override
+		public ObjectInspector init(Mode m, ObjectInspector[] parameters)
+				throws HiveException {
+			assert (parameters.length == 1);
+			super.init(m, parameters);
+
+			// init input
+			if (mode == Mode.PARTIAL1 || mode == Mode.COMPLETE) {
+				inputOI = (PrimitiveObjectInspector) parameters[0];
+			} else {
+				soi = (StructObjectInspector) parameters[0];
+
+				countField = soi.getStructFieldRef("count");
+				sumField = soi.getStructFieldRef("sum");
+				varianceField = soi.getStructFieldRef("variance");
+
+				countFieldOI = (LongObjectInspector) countField
+						.getFieldObjectInspector();
+				sumFieldOI = (DoubleObjectInspector) sumField
+						.getFieldObjectInspector();
+			}
+
+			// init output
+			if (mode == Mode.PARTIAL1 || mode == Mode.PARTIAL2) {
+				// The output of a partial aggregation is a struct containing
+				// a long count and doubles sum and variance.
+
+				ArrayList<ObjectInspector> foi = new ArrayList<ObjectInspector>();
+
+				foi.add(PrimitiveObjectInspectorFactory.writableLongObjectInspector);
+				foi.add(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
+				foi.add(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
+
+				ArrayList<String> fname = new ArrayList<String>();
+				fname.add("count");
+				fname.add("sum");
+				fname.add("variance");
+
+				partialResult = new Object[3];
+				partialResult[0] = new LongWritable(0);
+				partialResult[1] = new DoubleWritable(0);
+				partialResult[2] = new DoubleWritable(0);
+
+				return ObjectInspectorFactory.getStandardStructObjectInspector(
+						fname, foi);
+
+			} else {
+				setResult(new DoubleWritable(0));
+				return PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
+			}
+		}
+
+		static class StdAgg implements SerializableBuffer {
+			long count; // number of elements
+			double sum; // sum of elements
+			double variance; // sum[x-avg^2] (this is actually n times the
+								// variance)
+
+			@Override
+			public void deSerializeAggBuffer(byte[] data, int start, int len) {
+				count = BufferSerDeUtil.getLong(data, start);
+				start += 8;
+				sum = BufferSerDeUtil.getDouble(data, start);
+				start += 8;
+				variance = BufferSerDeUtil.getDouble(data, start);
+			}
+
+			@Override
+			public void serializeAggBuffer(byte[] data, int start, int len) {
+				BufferSerDeUtil.writeLong(count, data, start);
+				start += 8;
+				BufferSerDeUtil.writeDouble(sum, data, start);
+				start += 8;
+				BufferSerDeUtil.writeDouble(variance, data, start);
+			}
+
+			@Override
+			public void serializeAggBuffer(DataOutput output)
+					throws IOException {
+				output.writeLong(count);
+				output.writeDouble(sum);
+				output.writeDouble(variance);
+			}
+		};
+
+		@Override
+		public AggregationBuffer getNewAggregationBuffer() throws HiveException {
+			StdAgg result = new StdAgg();
+			reset(result);
+			return result;
+		}
+
+		@Override
+		public void reset(AggregationBuffer agg) throws HiveException {
+			StdAgg myagg = (StdAgg) agg;
+			myagg.count = 0;
+			myagg.sum = 0;
+			myagg.variance = 0;
+		}
+
+		private boolean warned = false;
+
+		@Override
+		public void iterate(AggregationBuffer agg, Object[] parameters)
+				throws HiveException {
+			assert (parameters.length == 1);
+			Object p = parameters[0];
+			if (p != null) {
+				StdAgg myagg = (StdAgg) agg;
+				try {
+					double v = PrimitiveObjectInspectorUtils.getDouble(p,
+							inputOI);
+					myagg.count++;
+					myagg.sum += v;
+					if (myagg.count > 1) {
+						double t = myagg.count * v - myagg.sum;
+						myagg.variance += (t * t)
+								/ ((double) myagg.count * (myagg.count - 1));
+					}
+				} catch (NumberFormatException e) {
+					if (!warned) {
+						warned = true;
+						LOG.warn(getClass().getSimpleName() + " "
+								+ StringUtils.stringifyException(e));
+						LOG.warn(getClass().getSimpleName()
+								+ " ignoring similar exceptions.");
+					}
+				}
+			}
+		}
+
+		@Override
+		public Object terminatePartial(AggregationBuffer agg)
+				throws HiveException {
+			StdAgg myagg = (StdAgg) agg;
+			((LongWritable) partialResult[0]).set(myagg.count);
+			((DoubleWritable) partialResult[1]).set(myagg.sum);
+			((DoubleWritable) partialResult[2]).set(myagg.variance);
+			return partialResult;
+		}
+
+		@Override
+		public void merge(AggregationBuffer agg, Object partial)
+				throws HiveException {
+			if (partial != null) {
+				StdAgg myagg = (StdAgg) agg;
+
+				Object partialCount = soi.getStructFieldData(partial,
+						countField);
+				Object partialSum = soi.getStructFieldData(partial, sumField);
+				Object partialVariance = soi.getStructFieldData(partial,
+						varianceField);
+
+				long n = myagg.count;
+				long m = countFieldOI.get(partialCount);
+
+				if (n == 0) {
+					// Just copy the information since there is nothing so far
+					myagg.variance = sumFieldOI.get(partialVariance);
+					myagg.count = countFieldOI.get(partialCount);
+					myagg.sum = sumFieldOI.get(partialSum);
+				}
+
+				if (m != 0 && n != 0) {
+					// Merge the two partials
+
+					double a = myagg.sum;
+					double b = sumFieldOI.get(partialSum);
+
+					myagg.count += m;
+					myagg.sum += b;
+					double t = (m / (double) n) * a - b;
+					myagg.variance += sumFieldOI.get(partialVariance)
+							+ ((n / (double) m) / ((double) n + m)) * t * t;
+				}
+			}
+		}
+
+		@Override
+		public Object terminate(AggregationBuffer agg) throws HiveException {
+			StdAgg myagg = (StdAgg) agg;
+
+			if (myagg.count == 0) { // SQL standard - return null for zero
+									// elements
+				return null;
+			} else {
+				if (myagg.count > 1) {
+					getResult().set(myagg.variance / (myagg.count));
+				} else { // for one element the variance is always 0
+					getResult().set(0);
+				}
+				return getResult();
+			}
+		}
+
+		public void setResult(DoubleWritable result) {
+			this.result = result;
+		}
+
+		public DoubleWritable getResult() {
+			return result;
+		}
+	}
+
+}
diff --git a/hyracks-algebricks/hyracks-algebricks-tests/src/main/scripts/run.cmd b/hivesterix/src/main/scripts/run.cmd
old mode 100644
new mode 100755
similarity index 100%
copy from hyracks-algebricks/hyracks-algebricks-tests/src/main/scripts/run.cmd
copy to hivesterix/src/main/scripts/run.cmd
diff --git a/hyracks-algebricks/hyracks-algebricks-tests/src/main/scripts/run.sh b/hivesterix/src/main/scripts/run.sh
old mode 100644
new mode 100755
similarity index 100%
copy from hyracks-algebricks/hyracks-algebricks-tests/src/main/scripts/run.sh
copy to hivesterix/src/main/scripts/run.sh
diff --git a/hivesterix/src/test/java/edu/uci/ics/hivesterix/perf/PerfTestCase.java b/hivesterix/src/test/java/edu/uci/ics/hivesterix/perf/PerfTestCase.java
new file mode 100644
index 0000000..a69a3f2
--- /dev/null
+++ b/hivesterix/src/test/java/edu/uci/ics/hivesterix/perf/PerfTestCase.java
@@ -0,0 +1,144 @@
+package edu.uci.ics.hivesterix.perf;
+
+import java.io.File;
+import java.io.PrintWriter;
+import java.io.StringWriter;
+
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.Driver;
+import org.junit.Test;
+
+import edu.uci.ics.hivesterix.perf.base.AbstractPerfTestCase;
+import edu.uci.ics.hivesterix.runtime.config.ConfUtil;
+
+public class PerfTestCase extends AbstractPerfTestCase {
+	private File resultFile;
+	private FileSystem dfs;
+
+	PerfTestCase(File queryFile, File resultFile) {
+		super("testRuntimeFunction", queryFile);
+		this.queryFile = queryFile;
+		this.resultFile = resultFile;
+	}
+
+	@Test
+	public void testRuntimeFunction() throws Exception {
+		StringBuilder queryString = new StringBuilder();
+		readFileToString(queryFile, queryString);
+		String[] queries = queryString.toString().split(";");
+		StringWriter sw = new StringWriter();
+
+		HiveConf hconf = ConfUtil.getHiveConf();
+		Driver driver = new Driver(hconf, new PrintWriter(sw));
+		driver.init();
+
+		dfs = FileSystem.get(ConfUtil.getJobConf());
+
+		int i = 0;
+		for (String query : queries) {
+			if (i == queries.length - 1)
+				break;
+			driver.run(query);
+			driver.clear();
+			i++;
+		}
+
+		String warehouse = hconf.get("hive.metastore.warehouse.dir");
+		String tableName = removeExt(resultFile.getName());
+		String directory = warehouse + "/" + tableName + "/";
+		String localDirectory = "tmp";
+
+		FileStatus[] files = dfs.listStatus(new Path(directory));
+		FileSystem lfs = null;
+		if (files == null) {
+			lfs = FileSystem.getLocal(ConfUtil.getJobConf());
+			files = lfs.listStatus(new Path(directory));
+		}
+
+		File resultDirectory = new File(localDirectory + "/" + tableName);
+		deleteDir(resultDirectory);
+		resultDirectory.mkdir();
+
+		for (FileStatus fs : files) {
+			Path src = fs.getPath();
+			if (src.getName().indexOf("crc") >= 0)
+				continue;
+
+			String destStr = localDirectory + "/" + tableName + "/"
+					+ src.getName();
+			Path dest = new Path(destStr);
+			if (lfs != null) {
+				lfs.copyToLocalFile(src, dest);
+				dfs.copyFromLocalFile(dest, new Path(directory));
+			} else
+				dfs.copyToLocalFile(src, dest);
+		}
+
+		File[] rFiles = resultDirectory.listFiles();
+		StringBuilder sb = new StringBuilder();
+		for (File r : rFiles) {
+			if (r.getName().indexOf("crc") >= 0)
+				continue;
+			readFileToString(r, sb);
+		}
+		deleteDir(resultDirectory);
+
+		StringBuilder buf = new StringBuilder();
+		readFileToString(resultFile, buf);
+		if (!equal(buf, sb)) {
+			throw new Exception("Result for " + queryFile + " changed:\n"
+					+ sw.toString());
+		}
+	}
+
+	private void deleteDir(File resultDirectory) {
+		if (resultDirectory.exists()) {
+			File[] rFiles = resultDirectory.listFiles();
+			for (File r : rFiles)
+				r.delete();
+			resultDirectory.delete();
+		}
+	}
+
+	private boolean equal(StringBuilder sb1, StringBuilder sb2) {
+		String s1 = sb1.toString();
+		String s2 = sb2.toString();
+		String[] rowsOne = s1.split("\n");
+		String[] rowsTwo = s2.split("\n");
+
+		if (rowsOne.length != rowsTwo.length)
+			return false;
+
+		for (int i = 0; i < rowsOne.length; i++) {
+			String row1 = rowsOne[i];
+			String row2 = rowsTwo[i];
+
+			if (row1.equals(row2))
+				continue;
+
+			String[] fields1 = row1.split("");
+			String[] fields2 = row2.split("");
+
+			for (int j = 0; j < fields1.length; j++) {
+				if (fields1[j].equals(fields2[j])) {
+					continue;
+				} else if (fields1[j].indexOf('.') < 0) {
+					return false;
+				} else {
+					Float float1 = Float.parseFloat(fields1[j]);
+					Float float2 = Float.parseFloat(fields2[j]);
+
+					if (Math.abs(float1 - float2) == 0)
+						continue;
+					else
+						return false;
+				}
+			}
+		}
+
+		return true;
+	}
+}
diff --git a/hivesterix/src/test/java/edu/uci/ics/hivesterix/perf/PerfTestSuite.java b/hivesterix/src/test/java/edu/uci/ics/hivesterix/perf/PerfTestSuite.java
new file mode 100644
index 0000000..8d8178f
--- /dev/null
+++ b/hivesterix/src/test/java/edu/uci/ics/hivesterix/perf/PerfTestSuite.java
@@ -0,0 +1,74 @@
+package edu.uci.ics.hivesterix.perf;

+

+import java.io.File;

+import java.util.List;

+

+import junit.framework.Test;

+import junit.framework.TestResult;

+import edu.uci.ics.hivesterix.perf.base.AbstractPerfTestSuiteClass;

+

+public class PerfTestSuite extends AbstractPerfTestSuiteClass {

+

+	private static final String PATH_TO_QUERIES = "src/test/resources/perf/queries/";

+	private static final String PATH_TO_RESULTS = "src/test/resources/perf/results/";

+	private static final String PATH_TO_IGNORES = "src/test/resources/perf/ignore.txt";

+

+	private static final String FILE_EXTENSION_OF_RESULTS = "result";

+

+	public static Test suite() throws Exception {

+		List<String> ignores = getIgnoreList(PATH_TO_IGNORES);

+		File testData = new File(PATH_TO_QUERIES);

+		File[] queries = testData.listFiles();

+		PerfTestSuite testSuite = new PerfTestSuite();

+

+		// set hdfs and hyracks cluster, and load test data to hdfs

+		try {

+			testSuite.setup();

+			testSuite.loadData();

+		} catch (Exception e) {

+			e.printStackTrace();

+			throw new IllegalStateException(e.getMessage());

+		}

+

+		for (File qFile : queries) {

+			if (isIgnored(qFile.getName(), ignores))

+				continue;

+

+			if (qFile.isFile()) {

+				String resultFileName = hiveExtToResExt(qFile.getName());

+				File rFile = new File(PATH_TO_RESULTS + resultFileName);

+				testSuite.addTest(new PerfTestCase(qFile, rFile));

+			}

+		}

+		return testSuite;

+	}

+

+	private static String hiveExtToResExt(String fname) {

+		int dot = fname.lastIndexOf('.');

+		return fname.substring(0, dot + 1) + FILE_EXTENSION_OF_RESULTS;

+	}

+

+	/**

+	 * Runs the tests and collects their result in a TestResult.

+	 */

+	@Override

+	public void run(TestResult result) {

+

+		int testCount = countTestCases();

+		for (int i = 0; i < testCount; i++) {

+			Test each = this.testAt(i);

+			if (result.shouldStop())

+				break;

+			runTest(each, result);

+		}

+

+		// cleanup hdfs and hyracks cluster

+		try {

+			cleanup();

+		} catch (Exception e) {

+			e.printStackTrace();

+			throw new IllegalStateException(e.getMessage());

+		}

+	}

+

+}

diff --git a/hivesterix/src/test/java/edu/uci/ics/hivesterix/perf/PerfTestSuiteCaseGenerator.java b/hivesterix/src/test/java/edu/uci/ics/hivesterix/perf/PerfTestSuiteCaseGenerator.java
new file mode 100644
index 0000000..258db22
--- /dev/null
+++ b/hivesterix/src/test/java/edu/uci/ics/hivesterix/perf/PerfTestSuiteCaseGenerator.java
@@ -0,0 +1,101 @@
+package edu.uci.ics.hivesterix.perf;
+
+import java.io.File;
+
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.Driver;
+import org.junit.Test;
+
+import edu.uci.ics.hivesterix.perf.base.AbstractPerfTestCase;
+import edu.uci.ics.hivesterix.runtime.config.ConfUtil;
+
+public class PerfTestSuiteCaseGenerator extends AbstractPerfTestCase {
+	private File resultFile;
+	private FileSystem dfs;
+
+	PerfTestSuiteCaseGenerator(File queryFile, File resultFile) {
+		super("testRuntimeFunction", queryFile);
+		this.queryFile = queryFile;
+		this.resultFile = resultFile;
+	}
+
+	@Test
+	public void testRuntimeFunction() throws Exception {
+		StringBuilder queryString = new StringBuilder();
+		readFileToString(queryFile, queryString);
+		String[] queries = queryString.toString().split(";");
+
+		HiveConf hconf = ConfUtil.getHiveConf();
+		Driver driver = new Driver(hconf);
+		driver.init();
+
+		dfs = FileSystem.get(ConfUtil.getJobConf());
+
+		long startTime = System.currentTimeMillis();
+		int i = 0;
+		for (String query : queries) {
+			if (i == queries.length - 1)
+				break;
+			driver.run(query);
+			// driver.clear();
+			i++;
+		}
+		long endTime = System.currentTimeMillis();
+		System.out.println(resultFile.getName() + " execution time "
+				+ (endTime - startTime));
+
+		String warehouse = hconf.get("hive.metastore.warehouse.dir");
+		String tableName = removeExt(resultFile.getName());
+		String directory = warehouse + "/" + tableName + "/";
+		String localDirectory = "tmp";
+
+		FileStatus[] files = dfs.listStatus(new Path(directory));
+		FileSystem lfs = null;
+		if (files == null) {
+			lfs = FileSystem.getLocal(ConfUtil.getJobConf());
+			files = lfs.listStatus(new Path(directory));
+		}
+
+		File resultDirectory = new File(localDirectory + "/" + tableName);
+		deleteDir(resultDirectory);
+		resultDirectory.mkdir();
+
+		for (FileStatus fs : files) {
+			Path src = fs.getPath();
+			if (src.getName().indexOf("crc") >= 0)
+				continue;
+
+			String destStr = localDirectory + "/" + tableName + "/"
+					+ src.getName();
+			Path dest = new Path(destStr);
+			if (lfs != null) {
+				lfs.copyToLocalFile(src, dest);
+				dfs.copyFromLocalFile(dest, new Path(directory));
+			} else
+				dfs.copyToLocalFile(src, dest);
+		}
+
+		File[] rFiles = resultDirectory.listFiles();
+		StringBuilder sb = new StringBuilder();
+		for (File r : rFiles) {
+			if (r.getName().indexOf("crc") >= 0)
+				continue;
+			readFileToString(r, sb);
+		}
+		deleteDir(resultDirectory);
+
+		writeStringToFile(resultFile, sb);
+	}
+
+	private void deleteDir(File resultDirectory) {
+		if (resultDirectory.exists()) {
+			File[] rFiles = resultDirectory.listFiles();
+			for (File r : rFiles)
+				r.delete();
+			resultDirectory.delete();
+		}
+	}
+}
diff --git a/hivesterix/src/test/java/edu/uci/ics/hivesterix/perf/PerfTestSuiteGenerator.java b/hivesterix/src/test/java/edu/uci/ics/hivesterix/perf/PerfTestSuiteGenerator.java
new file mode 100644
index 0000000..0a27ca2
--- /dev/null
+++ b/hivesterix/src/test/java/edu/uci/ics/hivesterix/perf/PerfTestSuiteGenerator.java
@@ -0,0 +1,74 @@
+package edu.uci.ics.hivesterix.perf;

+

+import java.io.File;

+import java.util.List;

+

+import junit.framework.Test;

+import junit.framework.TestResult;

+import edu.uci.ics.hivesterix.perf.base.AbstractPerfTestSuiteClass;

+

+public class PerfTestSuiteGenerator extends AbstractPerfTestSuiteClass {

+

+	private static final String PATH_TO_QUERIES = "src/test/resources/perf/queries/";

+	private static final String PATH_TO_RESULTS = "src/test/resources/perf/results/";

+	private static final String PATH_TO_IGNORES = "src/test/resources/perf/ignore.txt";

+

+	private static final String FILE_EXTENSION_OF_RESULTS = "result";

+

+	public static Test suite() throws Exception {

+		List<String> ignores = getIgnoreList(PATH_TO_IGNORES);

+		File testData = new File(PATH_TO_QUERIES);

+		File[] queries = testData.listFiles();

+		PerfTestSuiteGenerator testSuite = new PerfTestSuiteGenerator();

+

+		// set hdfs and hyracks cluster, and load test data to hdfs

+		try {

+			testSuite.setup();

+			testSuite.loadData();

+		} catch (Exception e) {

+			e.printStackTrace();

+			throw new IllegalStateException(e.getMessage());

+		}

+

+		for (File qFile : queries) {

+			if (isIgnored(qFile.getName(), ignores))

+				continue;

+

+			if (qFile.isFile() && qFile.getName().startsWith("q18_")) {

+				String resultFileName = hiveExtToResExt(qFile.getName());

+				File rFile = new File(PATH_TO_RESULTS + resultFileName);

+				testSuite.addTest(new PerfTestSuiteCaseGenerator(qFile, rFile));

+			}

+		}

+		return testSuite;

+	}

+

+	private static String hiveExtToResExt(String fname) {

+		int dot = fname.lastIndexOf('.');

+		return fname.substring(0, dot + 1) + FILE_EXTENSION_OF_RESULTS;

+	}

+

+	/**

+	 * Runs the tests and collects their result in a TestResult.

+	 */

+	@Override

+	public void run(TestResult result) {

+

+		int testCount = countTestCases();

+		for (int i = 0; i < testCount; i++) {

+			Test each = this.testAt(i);

+			if (result.shouldStop())

+				break;

+			runTest(each, result);

+		}

+

+		// cleanup hdfs and hyracks cluster

+		try {

+			cleanup();

+		} catch (Exception e) {

+			e.printStackTrace();

+			throw new IllegalStateException(e.getMessage());

+		}

+	}

+

+}

diff --git a/hivesterix/src/test/java/edu/uci/ics/hivesterix/perf/base/AbstractPerfTestCase.java b/hivesterix/src/test/java/edu/uci/ics/hivesterix/perf/base/AbstractPerfTestCase.java
new file mode 100644
index 0000000..f55d6a1
--- /dev/null
+++ b/hivesterix/src/test/java/edu/uci/ics/hivesterix/perf/base/AbstractPerfTestCase.java
@@ -0,0 +1,52 @@
+package edu.uci.ics.hivesterix.perf.base;

+

+import java.io.BufferedReader;

+import java.io.File;

+import java.io.FileReader;

+import java.io.FileWriter;

+import java.io.PrintWriter;

+import java.io.StringWriter;

+

+import junit.framework.TestCase;

+

+public class AbstractPerfTestCase extends TestCase {

+	protected File queryFile;

+

+	public AbstractPerfTestCase(String testName, File queryFile) {

+		super(testName);

+	}

+

+	protected static void readFileToString(File file, StringBuilder buf)

+			throws Exception {

+		BufferedReader result = new BufferedReader(new FileReader(file));

+		while (true) {

+			String s = result.readLine();

+			if (s == null) {

+				break;

+			} else {

+				buf.append(s);

+				buf.append('\n');

+			}

+		}

+		result.close();

+	}

+

+	protected static void writeStringToFile(File file, StringWriter buf)

+			throws Exception {

+		PrintWriter result = new PrintWriter(new FileWriter(file));

+		result.print(buf);

+		result.close();

+	}

+

+	protected static void writeStringToFile(File file, StringBuilder buf)

+			throws Exception {

+		PrintWriter result = new PrintWriter(new FileWriter(file));

+		result.print(buf);

+		result.close();

+	}

+

+	protected static String removeExt(String fname) {

+		int dot = fname.lastIndexOf('.');

+		return fname.substring(0, dot);

+	}

+}

diff --git a/hivesterix/src/test/java/edu/uci/ics/hivesterix/perf/base/AbstractPerfTestSuiteClass.java b/hivesterix/src/test/java/edu/uci/ics/hivesterix/perf/base/AbstractPerfTestSuiteClass.java
new file mode 100644
index 0000000..05474ca
--- /dev/null
+++ b/hivesterix/src/test/java/edu/uci/ics/hivesterix/perf/base/AbstractPerfTestSuiteClass.java
@@ -0,0 +1,210 @@
+package edu.uci.ics.hivesterix.perf.base;

+

+import java.io.BufferedReader;

+import java.io.FileNotFoundException;

+import java.io.FileReader;

+import java.io.IOException;

+import java.util.ArrayList;

+import java.util.HashMap;

+import java.util.Iterator;

+import java.util.List;

+import java.util.Map;

+

+import junit.framework.TestSuite;

+

+import org.apache.hadoop.conf.Configuration;

+import org.apache.hadoop.fs.FileSystem;

+import org.apache.hadoop.fs.Path;

+import org.apache.hadoop.hdfs.MiniDFSCluster;

+import org.apache.hadoop.hive.conf.HiveConf;

+import org.apache.hadoop.hive.ql.session.SessionState;

+import org.apache.hadoop.mapred.JobConf;

+import org.apache.hadoop.mapred.MiniMRCluster;

+

+import edu.uci.ics.hivesterix.runtime.config.ConfUtil;

+import edu.uci.ics.hyracks.api.client.HyracksConnection;

+import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;

+import edu.uci.ics.hyracks.control.cc.ClusterControllerService;

+import edu.uci.ics.hyracks.control.common.controllers.CCConfig;

+import edu.uci.ics.hyracks.control.common.controllers.NCConfig;

+import edu.uci.ics.hyracks.control.nc.NodeControllerService;

+

+@SuppressWarnings("deprecation")

+public abstract class AbstractPerfTestSuiteClass extends TestSuite {

+

+	private static final String PATH_TO_HADOOP_CONF = "src/test/resources/perf/hadoop/conf";

+	private static final String PATH_TO_HIVE_CONF = "src/test/resources/perf/hive/conf/hive-default.xml";

+	private static final String PATH_TO_DATA = "src/test/resources/perf/data/";

+

+	private MiniDFSCluster dfsCluster;

+	private MiniMRCluster mrCluster;

+

+	private JobConf conf = new JobConf();

+	protected FileSystem dfs;

+

+	private int numberOfNC = 2;

+	private ClusterControllerService cc;

+	private Map<String, NodeControllerService> ncs = new HashMap<String, NodeControllerService>();

+

+	/**

+	 * setup cluster

+	 * 

+	 * @throws IOException

+	 */

+	protected void setup() throws Exception {

+		setupHdfs();

+		setupHyracks();

+	}

+

+	private void setupHdfs() throws IOException {

+		conf.addResource(new Path(PATH_TO_HADOOP_CONF + "/core-site.xml"));

+		conf.addResource(new Path(PATH_TO_HADOOP_CONF + "/mapred-site.xml"));

+		conf.addResource(new Path(PATH_TO_HADOOP_CONF + "/hdfs-site.xml"));

+		HiveConf hconf = new HiveConf(SessionState.class);

+		hconf.addResource(new Path(PATH_TO_HIVE_CONF));

+

+		FileSystem lfs = FileSystem.getLocal(new Configuration());

+		lfs.delete(new Path("build"), true);

+		lfs.delete(new Path("metastore_db"), true);

+

+		System.setProperty("hadoop.log.dir", "logs");

+		dfsCluster = new MiniDFSCluster(hconf, numberOfNC, true, null);

+		dfs = dfsCluster.getFileSystem();

+

+		mrCluster = new MiniMRCluster(2, dfs.getUri().toString(), 1);

+		hconf.setVar(HiveConf.ConfVars.HADOOPJT,

+				"localhost:" + mrCluster.getJobTrackerPort());

+		hconf.setInt("mapred.min.split.size", 1342177280);

+

+		conf = new JobConf(hconf);

+		ConfUtil.setJobConf(conf);

+

+		String fsName = conf.get("fs.default.name");

+		hconf.set("hive.metastore.warehouse.dir",

+				fsName.concat("/tmp/hivesterix"));

+		String warehouse = hconf.get("hive.metastore.warehouse.dir");

+		dfs.mkdirs(new Path(warehouse));

+		ConfUtil.setHiveConf(hconf);

+	}

+

+	private void setupHyracks() throws Exception {

+		// read hive conf

+		HiveConf hconf = new HiveConf(SessionState.class);

+		hconf.addResource(new Path(PATH_TO_HIVE_CONF));

+		SessionState.start(hconf);

+		String ipAddress = hconf.get("hive.hyracks.host");

+		int clientPort = Integer.parseInt(hconf.get("hive.hyracks.port"));

+		int clusterPort = clientPort;

+		String applicationName = hconf.get("hive.hyracks.app");

+

+		// start hyracks cc

+		CCConfig ccConfig = new CCConfig();

+		ccConfig.clientNetIpAddress = ipAddress;

+		ccConfig.clientNetPort = clientPort;

+		ccConfig.clusterNetPort = clusterPort;

+		ccConfig.profileDumpPeriod = 1000;

+		ccConfig.heartbeatPeriod = 200000000;

+		ccConfig.maxHeartbeatLapsePeriods = 200000000;

+		cc = new ClusterControllerService(ccConfig);

+		cc.start();

+

+		// start hyracks nc

+		for (int i = 0; i < numberOfNC; i++) {

+			NCConfig ncConfig = new NCConfig();

+			ncConfig.ccHost = ipAddress;

+			ncConfig.clusterNetIPAddress = ipAddress;

+			ncConfig.ccPort = clientPort;

+			ncConfig.dataIPAddress = "127.0.0.1";

+			ncConfig.nodeId = "nc" + i;

+			NodeControllerService nc = new NodeControllerService(ncConfig);

+			nc.start();

+			ncs.put(ncConfig.nodeId, nc);

+		}

+

+		IHyracksClientConnection hcc = new HyracksConnection(

+				ccConfig.clientNetIpAddress, clientPort);

+		hcc.createApplication(applicationName, null);

+	}

+

+	protected void makeDir(String path) throws IOException {

+		dfs.mkdirs(new Path(path));

+	}

+

+	protected void loadFiles(String src, String dest) throws IOException {

+		dfs.copyFromLocalFile(new Path(src), new Path(dest));

+	}

+

+	protected void cleanup() throws Exception {

+		cleanupHdfs();

+		cleanupHyracks();

+	}

+

+	/**

+	 * cleanup hdfs cluster

+	 */

+	private void cleanupHdfs() throws IOException {

+		dfs.delete(new Path("/"), true);

+		FileSystem.closeAll();

+		dfsCluster.shutdown();

+	}

+

+	/**

+	 * cleanup hyracks cluster

+	 */

+	private void cleanupHyracks() throws Exception {

+		Iterator<NodeControllerService> iterator = ncs.values().iterator();

+		while (iterator.hasNext()) {

+			NodeControllerService nc = iterator.next();

+			nc.stop();

+		}

+		cc.stop();

+	}

+

+	protected static List<String> getIgnoreList(String ignorePath)

+			throws FileNotFoundException, IOException {

+		BufferedReader reader = new BufferedReader(new FileReader(ignorePath));

+		String s = null;

+		List<String> ignores = new ArrayList<String>();

+		while ((s = reader.readLine()) != null) {

+			ignores.add(s);

+		}

+		reader.close();

+		return ignores;

+	}

+

+	protected static boolean isIgnored(String q, List<String> ignoreList) {

+		for (String ignore : ignoreList) {

+			if (ignore.equals(q)) {

+				return true;

+			}

+		}

+		return false;

+	}

+

+	protected void loadData() throws IOException {

+

+		makeDir("/tpch");

+		makeDir("/tpch/customer");

+		makeDir("/tpch/lineitem");

+		makeDir("/tpch/orders");

+		makeDir("/tpch/part");

+		makeDir("/tpch/partsupp");

+		makeDir("/tpch/supplier");

+		makeDir("/tpch/nation");

+		makeDir("/tpch/region");

+

+		makeDir("/jarod");

+

+		loadFiles(PATH_TO_DATA + "customer.tbl", "/tpch/customer/");

+		loadFiles(PATH_TO_DATA + "lineitem.tbl", "/tpch/lineitem/");

+		loadFiles(PATH_TO_DATA + "orders.tbl", "/tpch/orders/");

+		loadFiles(PATH_TO_DATA + "part.tbl", "/tpch/part/");

+		loadFiles(PATH_TO_DATA + "partsupp.tbl", "/tpch/partsupp/");

+		loadFiles(PATH_TO_DATA + "supplier.tbl", "/tpch/supplier/");

+		loadFiles(PATH_TO_DATA + "nation.tbl", "/tpch/nation/");

+		loadFiles(PATH_TO_DATA + "region.tbl", "/tpch/region/");

+

+		loadFiles(PATH_TO_DATA + "ext-gby.tbl", "/jarod/");

+	}

+

+}

diff --git a/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/base/AbstractHivesterixTestCase.java b/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/base/AbstractHivesterixTestCase.java
new file mode 100644
index 0000000..560cef7
--- /dev/null
+++ b/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/base/AbstractHivesterixTestCase.java
@@ -0,0 +1,52 @@
+package edu.uci.ics.hivesterix.test.base;

+

+import java.io.BufferedReader;

+import java.io.File;

+import java.io.FileReader;

+import java.io.FileWriter;

+import java.io.PrintWriter;

+import java.io.StringWriter;

+

+import junit.framework.TestCase;

+

+public class AbstractHivesterixTestCase extends TestCase {

+	protected File queryFile;

+

+	public AbstractHivesterixTestCase(String testName, File queryFile) {

+		super(testName);

+	}

+

+	protected static void readFileToString(File file, StringBuilder buf)

+			throws Exception {

+		BufferedReader result = new BufferedReader(new FileReader(file));

+		while (true) {

+			String s = result.readLine();

+			if (s == null) {

+				break;

+			} else {

+				buf.append(s);

+				buf.append('\n');

+			}

+		}

+		result.close();

+	}

+

+	protected static void writeStringToFile(File file, StringWriter buf)

+			throws Exception {

+		PrintWriter result = new PrintWriter(new FileWriter(file));

+		result.print(buf);

+		result.close();

+	}

+

+	protected static void writeStringToFile(File file, StringBuilder buf)

+			throws Exception {

+		PrintWriter result = new PrintWriter(new FileWriter(file));

+		result.print(buf);

+		result.close();

+	}

+

+	protected static String removeExt(String fname) {

+		int dot = fname.lastIndexOf('.');

+		return fname.substring(0, dot);

+	}

+}

diff --git a/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/base/AbstractTestSuiteClass.java b/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/base/AbstractTestSuiteClass.java
new file mode 100644
index 0000000..e9a5736
--- /dev/null
+++ b/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/base/AbstractTestSuiteClass.java
@@ -0,0 +1,217 @@
+package edu.uci.ics.hivesterix.test.base;

+

+import java.io.BufferedReader;

+import java.io.File;

+import java.io.FileNotFoundException;

+import java.io.FileReader;

+import java.io.IOException;

+import java.util.ArrayList;

+import java.util.HashMap;

+import java.util.Iterator;

+import java.util.List;

+import java.util.Map;

+

+import junit.framework.TestSuite;

+

+import org.apache.hadoop.conf.Configuration;

+import org.apache.hadoop.fs.FileSystem;

+import org.apache.hadoop.fs.Path;

+import org.apache.hadoop.hdfs.MiniDFSCluster;

+import org.apache.hadoop.hive.conf.HiveConf;

+import org.apache.hadoop.hive.ql.session.SessionState;

+import org.apache.hadoop.mapred.JobConf;

+import org.apache.hadoop.mapred.MiniMRCluster;

+

+import edu.uci.ics.hivesterix.runtime.config.ConfUtil;

+import edu.uci.ics.hyracks.api.client.HyracksConnection;

+import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;

+import edu.uci.ics.hyracks.control.cc.ClusterControllerService;

+import edu.uci.ics.hyracks.control.common.controllers.CCConfig;

+import edu.uci.ics.hyracks.control.common.controllers.NCConfig;

+import edu.uci.ics.hyracks.control.nc.NodeControllerService;

+

+@SuppressWarnings("deprecation")

+public abstract class AbstractTestSuiteClass extends TestSuite {

+

+	private static final String PATH_TO_HADOOP_CONF = "src/test/resources/runtimefunctionts/hadoop/conf";

+	private static final String PATH_TO_HIVE_CONF = "src/test/resources/runtimefunctionts/hive/conf/hive-default.xml";

+

+	private static final String PATH_TO_CLUSTER_CONF = "src/test/resources/runtimefunctionts/hive/conf/topology.xml";

+	private static final String PATH_TO_DATA = "src/test/resources/runtimefunctionts/data/";

+

+	private MiniDFSCluster dfsCluster;

+	private MiniMRCluster mrCluster;

+

+	private JobConf conf = new JobConf();

+	protected FileSystem dfs;

+

+	private int numberOfNC = 2;

+	private ClusterControllerService cc;

+	private Map<String, NodeControllerService> ncs = new HashMap<String, NodeControllerService>();

+

+	/**

+	 * setup cluster

+	 * 

+	 * @throws IOException

+	 */

+	protected void setup() throws Exception {

+		setupHdfs();

+		setupHyracks();

+	}

+

+	private void setupHdfs() throws IOException {

+		conf.addResource(new Path(PATH_TO_HADOOP_CONF + "/core-site.xml"));

+		conf.addResource(new Path(PATH_TO_HADOOP_CONF + "/mapred-site.xml"));

+		conf.addResource(new Path(PATH_TO_HADOOP_CONF + "/hdfs-site.xml"));

+		HiveConf hconf = new HiveConf(SessionState.class);

+		hconf.addResource(new Path(PATH_TO_HIVE_CONF));

+

+		FileSystem lfs = FileSystem.getLocal(new Configuration());

+		lfs.delete(new Path("build"), true);

+		lfs.delete(new Path("metastore_db"), true);

+

+		System.setProperty("hadoop.log.dir", "logs");

+		dfsCluster = new MiniDFSCluster(hconf, numberOfNC, true, null);

+		dfs = dfsCluster.getFileSystem();

+

+		mrCluster = new MiniMRCluster(2, dfs.getUri().toString(), 1);

+		hconf.setVar(HiveConf.ConfVars.HADOOPJT,

+				"localhost:" + mrCluster.getJobTrackerPort());

+

+		conf = new JobConf(hconf);

+		ConfUtil.setJobConf(conf);

+

+		String fsName = conf.get("fs.default.name");

+		hconf.set("hive.metastore.warehouse.dir",

+				fsName.concat("/tmp/hivesterix"));

+		String warehouse = hconf.get("hive.metastore.warehouse.dir");

+		dfs.mkdirs(new Path(warehouse));

+		ConfUtil.setHiveConf(hconf);

+	}

+

+	private void setupHyracks() throws Exception {

+		// read hive conf

+		HiveConf hconf = new HiveConf(SessionState.class);

+		hconf.addResource(new Path(PATH_TO_HIVE_CONF));

+		SessionState.start(hconf);

+		String ipAddress = hconf.get("hive.hyracks.host");

+		int clientPort = Integer.parseInt(hconf.get("hive.hyracks.port"));

+		int netPort = clientPort + 1;

+		String applicationName = hconf.get("hive.hyracks.app");

+

+		// start hyracks cc

+		CCConfig ccConfig = new CCConfig();

+		ccConfig.clientNetIpAddress = ipAddress;

+		ccConfig.clientNetPort = clientPort;

+		ccConfig.clusterNetPort = netPort;

+		ccConfig.profileDumpPeriod = 1000;

+		ccConfig.heartbeatPeriod = 200000000;

+		ccConfig.maxHeartbeatLapsePeriods = 200000000;

+		ccConfig.clusterTopologyDefinition = new File(PATH_TO_CLUSTER_CONF);

+		cc = new ClusterControllerService(ccConfig);

+		cc.start();

+

+		// start hyracks nc

+		for (int i = 0; i < numberOfNC; i++) {

+			NCConfig ncConfig = new NCConfig();

+			ncConfig.ccHost = ipAddress;

+			ncConfig.clusterNetIPAddress = ipAddress;

+			ncConfig.ccPort = netPort;

+			ncConfig.dataIPAddress = "127.0.0.1";

+			ncConfig.nodeId = "nc" + i;

+			NodeControllerService nc = new NodeControllerService(ncConfig);

+			nc.start();

+			ncs.put(ncConfig.nodeId, nc);

+		}

+

+		IHyracksClientConnection hcc = new HyracksConnection(

+				ccConfig.clientNetIpAddress, clientPort);

+		hcc.createApplication(applicationName, null);

+	}

+

+	protected void makeDir(String path) throws IOException {

+		dfs.mkdirs(new Path(path));

+	}

+

+	protected void loadFiles(String src, String dest) throws IOException {

+		dfs.copyFromLocalFile(new Path(src), new Path(dest));

+	}

+

+	protected void cleanup() throws Exception {

+		cleanupHdfs();

+		cleanupHyracks();

+	}

+

+	/**

+	 * cleanup hdfs cluster

+	 */

+	private void cleanupHdfs() throws IOException {

+		dfs.delete(new Path("/"), true);

+		FileSystem.closeAll();

+		dfsCluster.shutdown();

+	}

+

+	/**

+	 * cleanup hyracks cluster

+	 */

+	private void cleanupHyracks() throws Exception {

+		Iterator<NodeControllerService> iterator = ncs.values().iterator();

+		while (iterator.hasNext()) {

+			NodeControllerService nc = iterator.next();

+			nc.stop();

+		}

+		cc.stop();

+	}

+

+	protected static List<String> getIgnoreList(String ignorePath)

+			throws FileNotFoundException, IOException {

+		BufferedReader reader = new BufferedReader(new FileReader(ignorePath));

+		String s = null;

+		List<String> ignores = new ArrayList<String>();

+		while ((s = reader.readLine()) != null) {

+			ignores.add(s);

+		}

+		reader.close();

+		return ignores;

+	}

+

+	protected static boolean isIgnored(String q, List<String> ignoreList) {

+		for (String ignore : ignoreList) {

+			if (q.indexOf(ignore) >= 0) {

+				return true;

+			}

+		}

+		return false;

+	}

+

+	protected void loadData() throws IOException {

+

+		makeDir("/tpch");

+		makeDir("/tpch/customer");

+		makeDir("/tpch/lineitem");

+		makeDir("/tpch/orders");

+		makeDir("/tpch/part");

+		makeDir("/tpch/partsupp");

+		makeDir("/tpch/supplier");

+		makeDir("/tpch/nation");

+		makeDir("/tpch/region");

+

+		makeDir("/test");

+		makeDir("/test/joinsrc1");

+		makeDir("/test/joinsrc2");

+

+		loadFiles(PATH_TO_DATA + "customer.tbl", "/tpch/customer/");

+		loadFiles(PATH_TO_DATA + "lineitem.tbl", "/tpch/lineitem/");

+		loadFiles(PATH_TO_DATA + "orders.tbl", "/tpch/orders/");

+		loadFiles(PATH_TO_DATA + "part.tbl", "/tpch/part/");

+		loadFiles(PATH_TO_DATA + "partsupp.tbl", "/tpch/partsupp/");

+		loadFiles(PATH_TO_DATA + "supplier.tbl", "/tpch/supplier/");

+		loadFiles(PATH_TO_DATA + "nation.tbl", "/tpch/nation/");

+		loadFiles(PATH_TO_DATA + "region.tbl", "/tpch/region/");

+

+		loadFiles(PATH_TO_DATA + "large_card_join_src.tbl", "/test/joinsrc1/");

+		loadFiles(PATH_TO_DATA + "large_card_join_src_small.tbl",

+				"/test/joinsrc2/");

+	}

+

+}

diff --git a/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/datagen/RecordBalance.java b/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/datagen/RecordBalance.java
new file mode 100644
index 0000000..800d6be
--- /dev/null
+++ b/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/datagen/RecordBalance.java
@@ -0,0 +1,82 @@
+package edu.uci.ics.hivesterix.test.datagen;
+
+import java.io.IOException;
+import java.util.Iterator;
+
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapred.FileInputFormat;
+import org.apache.hadoop.mapred.FileOutputFormat;
+import org.apache.hadoop.mapred.JobClient;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.MapReduceBase;
+import org.apache.hadoop.mapred.Mapper;
+import org.apache.hadoop.mapred.OutputCollector;
+import org.apache.hadoop.mapred.Reducer;
+import org.apache.hadoop.mapred.Reporter;
+import org.apache.hadoop.mapred.TextInputFormat;
+
+@SuppressWarnings("deprecation")
+public class RecordBalance {
+
+	private static String confPath = System.getenv("HADDOP_HOME");
+	private static Path[] inputPaths = { new Path("/tpch/100x/customer"),
+			new Path("/tpch/100x/nation"), new Path("/tpch/100x/region"),
+			new Path("/tpch/100x/lineitem"), new Path("/tpch/100x/orders"),
+			new Path("/tpch/100x/part"), new Path("/tpch/100x/partsupp"),
+			new Path("/tpch/100x/supplier") };
+
+	private static Path[] outputPaths = { new Path("/tpch/100/customer"),
+			new Path("/tpch/100/nation"), new Path("/tpch/100/region"),
+			new Path("/tpch/100/lineitem"), new Path("/tpch/100/orders"),
+			new Path("/tpch/100/part"), new Path("/tpch/100/partsupp"),
+			new Path("/tpch/100/supplier") };
+
+	public static class MapRecordOnly extends MapReduceBase implements
+			Mapper<LongWritable, Text, LongWritable, Text> {
+
+		public void map(LongWritable id, Text inputValue,
+				OutputCollector<LongWritable, Text> output, Reporter reporter)
+				throws IOException {
+			output.collect(id, inputValue);
+		}
+	}
+
+	public static class ReduceRecordOnly extends MapReduceBase implements
+			Reducer<LongWritable, Text, NullWritable, Text> {
+
+		NullWritable key = NullWritable.get();
+
+		public void reduce(LongWritable inputKey, Iterator<Text> inputValue,
+				OutputCollector<NullWritable, Text> output, Reporter reporter)
+				throws IOException {
+			while (inputValue.hasNext())
+				output.collect(key, inputValue.next());
+		}
+	}
+
+	public static void main(String[] args) throws IOException {
+
+		for (int i = 0; i < inputPaths.length; i++) {
+			JobConf job = new JobConf(RecordBalance.class);
+			job.addResource(new Path(confPath + "/core-site.xml"));
+			job.addResource(new Path(confPath + "/mapred-site.xml"));
+			job.addResource(new Path(confPath + "/hdfs-site.xml"));
+
+			job.setJobName(RecordBalance.class.getSimpleName());
+			job.setMapperClass(MapRecordOnly.class);
+			job.setReducerClass(ReduceRecordOnly.class);
+			job.setMapOutputKeyClass(LongWritable.class);
+			job.setMapOutputValueClass(Text.class);
+
+			job.setInputFormat(TextInputFormat.class);
+			FileInputFormat.setInputPaths(job, inputPaths[i]);
+			FileOutputFormat.setOutputPath(job, outputPaths[i]);
+			job.setNumReduceTasks(Integer.parseInt(args[0]));
+
+			JobClient.runJob(job);
+		}
+	}
+}
diff --git a/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/legacy/LegacyTestCase.java b/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/legacy/LegacyTestCase.java
new file mode 100644
index 0000000..9591c32
--- /dev/null
+++ b/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/legacy/LegacyTestCase.java
@@ -0,0 +1,144 @@
+package edu.uci.ics.hivesterix.test.legacy;
+
+import java.io.File;
+import java.io.PrintWriter;
+import java.io.StringWriter;
+
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.Driver;
+import org.junit.Test;
+
+import edu.uci.ics.hivesterix.runtime.config.ConfUtil;
+import edu.uci.ics.hivesterix.test.base.AbstractHivesterixTestCase;
+
+public class LegacyTestCase extends AbstractHivesterixTestCase {
+	private File resultFile;
+	private FileSystem dfs;
+
+	public LegacyTestCase(File queryFile, File resultFile) {
+		super("legacy", queryFile);
+		this.queryFile = queryFile;
+		this.resultFile = resultFile;
+	}
+
+	@Test
+	public void testRuntimeFunction() throws Exception {
+		StringBuilder queryString = new StringBuilder();
+		readFileToString(queryFile, queryString);
+		String[] queries = queryString.toString().split(";");
+		StringWriter sw = new StringWriter();
+
+		HiveConf hconf = ConfUtil.getHiveConf();
+		Driver driver = new Driver(hconf, new PrintWriter(sw));
+		driver.init();
+
+		dfs = FileSystem.get(ConfUtil.getJobConf());
+
+		int i = 0;
+		for (String query : queries) {
+			if (i == queries.length - 1)
+				break;
+			driver.run(query);
+			driver.clear();
+			i++;
+		}
+
+		String warehouse = hconf.get("hive.metastore.warehouse.dir");
+		String tableName = removeExt(resultFile.getName());
+		String directory = warehouse + "/" + tableName + "/";
+		String localDirectory = "tmp";
+
+		FileStatus[] files = dfs.listStatus(new Path(directory));
+		FileSystem lfs = null;
+		if (files == null) {
+			lfs = FileSystem.getLocal(ConfUtil.getJobConf());
+			files = lfs.listStatus(new Path(directory));
+		}
+
+		File resultDirectory = new File(localDirectory + "/" + tableName);
+		deleteDir(resultDirectory);
+		resultDirectory.mkdir();
+
+		for (FileStatus fs : files) {
+			Path src = fs.getPath();
+			if (src.getName().indexOf("crc") >= 0)
+				continue;
+
+			String destStr = localDirectory + "/" + tableName + "/"
+					+ src.getName();
+			Path dest = new Path(destStr);
+			if (lfs != null) {
+				lfs.copyToLocalFile(src, dest);
+				dfs.copyFromLocalFile(dest, new Path(directory));
+			} else
+				dfs.copyToLocalFile(src, dest);
+		}
+
+		File[] rFiles = resultDirectory.listFiles();
+		StringBuilder sb = new StringBuilder();
+		for (File r : rFiles) {
+			if (r.getName().indexOf("crc") >= 0)
+				continue;
+			readFileToString(r, sb);
+		}
+		deleteDir(resultDirectory);
+
+		StringBuilder buf = new StringBuilder();
+		readFileToString(resultFile, buf);
+		if (!equal(buf, sb)) {
+			throw new Exception("Result for " + queryFile + " changed:\n"
+					+ sw.toString());
+		}
+	}
+
+	private void deleteDir(File resultDirectory) {
+		if (resultDirectory.exists()) {
+			File[] rFiles = resultDirectory.listFiles();
+			for (File r : rFiles)
+				r.delete();
+			resultDirectory.delete();
+		}
+	}
+
+	private boolean equal(StringBuilder sb1, StringBuilder sb2) {
+		String s1 = sb1.toString();
+		String s2 = sb2.toString();
+		String[] rowsOne = s1.split("\n");
+		String[] rowsTwo = s2.split("\n");
+
+		if (rowsOne.length != rowsTwo.length)
+			return false;
+
+		for (int i = 0; i < rowsOne.length; i++) {
+			String row1 = rowsOne[i];
+			String row2 = rowsTwo[i];
+
+			if (row1.equals(row2))
+				continue;
+
+			String[] fields1 = row1.split("");
+			String[] fields2 = row2.split("");
+
+			for (int j = 0; j < fields1.length; j++) {
+				if (fields1[j].equals(fields2[j])) {
+					continue;
+				} else if (fields1[j].indexOf('.') < 0) {
+					return false;
+				} else {
+					Float float1 = Float.parseFloat(fields1[j]);
+					Float float2 = Float.parseFloat(fields2[j]);
+
+					if (Math.abs(float1 - float2) == 0)
+						continue;
+					else
+						return false;
+				}
+			}
+		}
+
+		return true;
+	}
+}
diff --git a/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/optimizer/OptimizerTestCase.java b/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/optimizer/OptimizerTestCase.java
new file mode 100644
index 0000000..db13676
--- /dev/null
+++ b/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/optimizer/OptimizerTestCase.java
@@ -0,0 +1,57 @@
+package edu.uci.ics.hivesterix.test.optimizer;
+
+import java.io.File;
+import java.io.PrintWriter;
+import java.io.StringWriter;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.Driver;
+import org.junit.Test;
+
+import edu.uci.ics.hivesterix.runtime.config.ConfUtil;
+import edu.uci.ics.hivesterix.test.base.AbstractHivesterixTestCase;
+
+public class OptimizerTestCase extends AbstractHivesterixTestCase {
+	private File resultFile;
+
+	OptimizerTestCase(File queryFile, File resultFile) {
+		super("testOptimizer", queryFile);
+		this.queryFile = queryFile;
+		this.resultFile = resultFile;
+	}
+
+	@Test
+	public void testOptimizer() throws Exception {
+		StringBuilder queryString = new StringBuilder();
+		readFileToString(queryFile, queryString);
+		String[] queries = queryString.toString().split(";");
+		StringWriter sw = new StringWriter();
+
+		HiveConf hconf = ConfUtil.getHiveConf();
+		Driver driver = new Driver(hconf, new PrintWriter(sw));
+		driver.init();
+
+		int i = 0;
+		for (String query : queries) {
+			if (i == queries.length - 1)
+				break;
+			if (query.toLowerCase().indexOf("create") >= 0
+					|| query.toLowerCase().indexOf("drop") >= 0
+					|| query.toLowerCase().indexOf("set") >= 0
+					|| query.toLowerCase().startsWith("\n\ncreate")
+					|| query.toLowerCase().startsWith("\n\ndrop")
+					|| query.toLowerCase().startsWith("\n\nset"))
+				driver.run(query);
+			else
+				driver.compile(query);
+			driver.clear();
+			i++;
+		}
+		StringBuilder buf = new StringBuilder();
+		readFileToString(resultFile, buf);
+		if (!buf.toString().equals(sw.toString())) {
+			throw new Exception("Result for " + queryFile + " changed:\n"
+					+ sw.toString());
+		}
+	}
+}
diff --git a/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/optimizer/OptimizerTestSuitGenerator.java b/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/optimizer/OptimizerTestSuitGenerator.java
new file mode 100644
index 0000000..217f67d
--- /dev/null
+++ b/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/optimizer/OptimizerTestSuitGenerator.java
@@ -0,0 +1,77 @@
+package edu.uci.ics.hivesterix.test.optimizer;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.UnsupportedEncodingException;
+import java.util.List;
+
+import junit.framework.Test;
+import junit.framework.TestResult;
+import edu.uci.ics.hivesterix.test.base.AbstractTestSuiteClass;
+
+public class OptimizerTestSuitGenerator extends AbstractTestSuiteClass {
+	private static final String PATH_TO_QUERIES = "src/test/resources/optimizerts/queries/";
+	private static final String PATH_TO_RESULTS = "src/test/resources/optimizerts/results/";
+	private static final String PATH_TO_IGNORES = "src/test/resources/optimizerts/ignore.txt";
+
+	private static final String FILE_EXTENSION_OF_RESULTS = "plan";
+
+	public static Test suite() throws UnsupportedEncodingException,
+			FileNotFoundException, IOException {
+		List<String> ignores = getIgnoreList(PATH_TO_IGNORES);
+		File testData = new File(PATH_TO_QUERIES);
+		File[] queries = testData.listFiles();
+		OptimizerTestSuitGenerator testSuite = new OptimizerTestSuitGenerator();
+		// set hdfs and hyracks cluster, and load test data to hdfs
+		try {
+			testSuite.setup();
+			testSuite.loadData();
+		} catch (Exception e) {
+			e.printStackTrace();
+			throw new IllegalStateException(e.getMessage());
+		}
+
+		for (File qFile : queries) {
+			if (isIgnored(qFile.getName(), ignores))
+				continue;
+
+			if (qFile.isFile()) {
+				String resultFileName = aqlExtToResExt(qFile.getName());
+				File rFile = new File(PATH_TO_RESULTS + resultFileName);
+				testSuite.addTest(new OptimizerTestSuiteCaseGenerator(qFile,
+						rFile));
+			}
+		}
+		return testSuite;
+	}
+
+	private static String aqlExtToResExt(String fname) {
+		int dot = fname.lastIndexOf('.');
+		return fname.substring(0, dot + 1) + FILE_EXTENSION_OF_RESULTS;
+	}
+
+	/**
+	 * Runs the tests and collects their result in a TestResult.
+	 */
+	@Override
+	public void run(TestResult result) {
+
+		int testCount = countTestCases();
+		for (int i = 0; i < testCount; i++) {
+			Test each = this.testAt(i);
+			if (result.shouldStop())
+				break;
+			runTest(each, result);
+		}
+
+		// cleanup hdfs and hyracks cluster
+		try {
+			cleanup();
+		} catch (Exception e) {
+			e.printStackTrace();
+			throw new IllegalStateException(e.getMessage());
+		}
+	}
+
+}
diff --git a/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/optimizer/OptimizerTestSuite.java b/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/optimizer/OptimizerTestSuite.java
new file mode 100644
index 0000000..e3a4a4e
--- /dev/null
+++ b/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/optimizer/OptimizerTestSuite.java
@@ -0,0 +1,54 @@
+package edu.uci.ics.hivesterix.test.optimizer;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.UnsupportedEncodingException;
+import java.util.List;
+
+import junit.framework.Test;
+import edu.uci.ics.hivesterix.test.base.AbstractTestSuiteClass;
+
+public class OptimizerTestSuite extends AbstractTestSuiteClass {
+
+	private static final String PATH_TO_QUERIES = "src/test/resources/optimizerts/queries/";
+	private static final String PATH_TO_RESULTS = "src/test/resources/optimizerts/results/";
+	private static final String PATH_TO_IGNORES = "src/test/resources/optimizerts/ignore.txt";
+
+	private static final String FILE_EXTENSION_OF_RESULTS = "plan";
+
+	public static Test suite() throws UnsupportedEncodingException,
+			FileNotFoundException, IOException {
+		List<String> ignores = getIgnoreList(PATH_TO_IGNORES);
+		File testData = new File(PATH_TO_QUERIES);
+		File[] queries = testData.listFiles();
+		OptimizerTestSuite testSuite = new OptimizerTestSuite();
+
+		// set hdfs and hyracks cluster, and load test data to hdfs
+		try {
+			testSuite.setup();
+			testSuite.loadData();
+		} catch (Exception e) {
+			e.printStackTrace();
+			throw new IllegalStateException(e.getMessage());
+		}
+
+		for (File qFile : queries) {
+			if (isIgnored(qFile.getName(), ignores))
+				continue;
+
+			if (qFile.isFile() && qFile.getName().startsWith("h11_")) {
+				String resultFileName = hiveExtToResExt(qFile.getName());
+				File rFile = new File(PATH_TO_RESULTS + resultFileName);
+				testSuite.addTest(new OptimizerTestCase(qFile, rFile));
+			}
+		}
+		return testSuite;
+	}
+
+	private static String hiveExtToResExt(String fname) {
+		int dot = fname.lastIndexOf('.');
+		return fname.substring(0, dot + 1) + FILE_EXTENSION_OF_RESULTS;
+	}
+
+}
diff --git a/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/optimizer/OptimizerTestSuiteCaseGenerator.java b/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/optimizer/OptimizerTestSuiteCaseGenerator.java
new file mode 100644
index 0000000..a86dc29
--- /dev/null
+++ b/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/optimizer/OptimizerTestSuiteCaseGenerator.java
@@ -0,0 +1,53 @@
+package edu.uci.ics.hivesterix.test.optimizer;
+
+import java.io.File;
+import java.io.PrintWriter;
+import java.io.StringWriter;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.Driver;
+import org.junit.Test;
+
+import edu.uci.ics.hivesterix.runtime.config.ConfUtil;
+import edu.uci.ics.hivesterix.test.base.AbstractHivesterixTestCase;
+
+public class OptimizerTestSuiteCaseGenerator extends AbstractHivesterixTestCase {
+	private File resultFile;
+
+	OptimizerTestSuiteCaseGenerator(File queryFile, File resultFile) {
+		super("testOptimizer", queryFile);
+		this.queryFile = queryFile;
+		this.resultFile = resultFile;
+	}
+
+	@Test
+	public void testOptimizer() throws Exception {
+		StringBuilder queryString = new StringBuilder();
+		readFileToString(queryFile, queryString);
+		String[] queries = queryString.toString().split(";");
+		StringWriter sw = new StringWriter();
+
+		HiveConf hconf = ConfUtil.getHiveConf();
+		Driver driver = new Driver(hconf, new PrintWriter(sw));
+		driver.init();
+
+		int i = 0;
+		for (String query : queries) {
+			if (i == queries.length - 1)
+				break;
+			if (query.toLowerCase().indexOf("create") >= 0
+					|| query.toLowerCase().indexOf("drop") >= 0
+					|| query.toLowerCase().indexOf("set") >= 0
+					|| query.toLowerCase().startsWith("\n\ncreate")
+					|| query.toLowerCase().startsWith("\n\ndrop")
+					|| query.toLowerCase().startsWith("\n\nset"))
+				driver.run(query);
+			else
+				driver.compile(query);
+			driver.clear();
+			i++;
+		}
+		sw.close();
+		writeStringToFile(resultFile, sw);
+	}
+}
diff --git a/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/runtimefunction/RuntimeFunctionTestCase.java b/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/runtimefunction/RuntimeFunctionTestCase.java
new file mode 100644
index 0000000..078de9a
--- /dev/null
+++ b/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/runtimefunction/RuntimeFunctionTestCase.java
@@ -0,0 +1,152 @@
+package edu.uci.ics.hivesterix.test.runtimefunction;
+
+import java.io.File;
+import java.io.PrintWriter;
+import java.io.StringWriter;
+
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.Driver;
+import org.junit.Test;
+
+import edu.uci.ics.hivesterix.runtime.config.ConfUtil;
+import edu.uci.ics.hivesterix.test.base.AbstractHivesterixTestCase;
+
+public class RuntimeFunctionTestCase extends AbstractHivesterixTestCase {
+	private File resultFile;
+	private FileSystem dfs;
+
+	RuntimeFunctionTestCase(File queryFile, File resultFile) {
+		super("testRuntimeFunction", queryFile);
+		this.queryFile = queryFile;
+		this.resultFile = resultFile;
+	}
+
+	@Test
+	public void testRuntimeFunction() throws Exception {
+		StringBuilder queryString = new StringBuilder();
+		readFileToString(queryFile, queryString);
+		String[] queries = queryString.toString().split(";");
+		StringWriter sw = new StringWriter();
+
+		HiveConf hconf = ConfUtil.getHiveConf();
+		Driver driver = new Driver(hconf, new PrintWriter(sw));
+		driver.init();
+		// Driver driver = new Driver(hconf);
+
+		dfs = FileSystem.get(ConfUtil.getJobConf());
+
+		int i = 0;
+		for (String query : queries) {
+			if (i == queries.length - 1)
+				break;
+			driver.run(query);
+			driver.clear();
+			i++;
+		}
+
+		String warehouse = hconf.get("hive.metastore.warehouse.dir");
+		String tableName = removeExt(resultFile.getName());
+		String directory = warehouse + "/" + tableName + "/";
+		String localDirectory = "tmp";
+
+		FileStatus[] files = dfs.listStatus(new Path(directory));
+		FileSystem lfs = null;
+		if (files == null) {
+			lfs = FileSystem.getLocal(ConfUtil.getJobConf());
+			files = lfs.listStatus(new Path(directory));
+		}
+
+		File resultDirectory = new File(localDirectory + "/" + tableName);
+		deleteDir(resultDirectory);
+		resultDirectory.mkdir();
+
+		for (FileStatus fs : files) {
+			Path src = fs.getPath();
+			if (src.getName().indexOf("crc") >= 0)
+				continue;
+
+			String destStr = localDirectory + "/" + tableName + "/"
+					+ src.getName();
+			Path dest = new Path(destStr);
+			if (lfs != null) {
+				lfs.copyToLocalFile(src, dest);
+				dfs.copyFromLocalFile(dest, new Path(directory));
+			} else
+				dfs.copyToLocalFile(src, dest);
+		}
+
+		File[] rFiles = resultDirectory.listFiles();
+		StringBuilder sb = new StringBuilder();
+		for (File r : rFiles) {
+			if (r.getName().indexOf("crc") >= 0)
+				continue;
+			readFileToString(r, sb);
+		}
+
+		StringBuilder buf = new StringBuilder();
+		readFileToString(resultFile, buf);
+		StringBuffer errorMsg = new StringBuffer();
+		if (!equal(buf, sb, errorMsg)) {
+			throw new Exception("Result for " + queryFile + " changed:\n"
+					+ errorMsg.toString());
+		}
+		deleteDir(resultDirectory);
+	}
+
+	private void deleteDir(File resultDirectory) {
+		if (resultDirectory.exists()) {
+			File[] rFiles = resultDirectory.listFiles();
+			for (File r : rFiles)
+				r.delete();
+			resultDirectory.delete();
+		}
+	}
+
+	private boolean equal(StringBuilder sb1, StringBuilder sb2,
+			StringBuffer errorMsg) {
+		String s1 = sb1.toString();
+		String s2 = sb2.toString();
+		String[] rowsOne = s1.split("\n");
+		String[] rowsTwo = s2.split("\n");
+
+		if (rowsOne.length != rowsTwo.length)
+			return false;
+
+		for (int i = 0; i < rowsOne.length; i++) {
+			String row1 = rowsOne[i];
+			String row2 = rowsTwo[i];
+
+			if (row1.equals(row2))
+				continue;
+
+			String[] fields1 = row1.split("");
+			String[] fields2 = row2.split("");
+
+			for (int j = 0; j < fields1.length; j++) {
+				if (fields1[j].equals(fields2[j])) {
+					continue;
+				} else if (fields1[j].indexOf('.') < 0) {
+					errorMsg.append("line " + i + " column " + j + ": "
+							+ fields2[j] + " expected " + fields1[j]);
+					return false;
+				} else {
+					Float float1 = Float.parseFloat(fields1[j]);
+					Float float2 = Float.parseFloat(fields2[j]);
+
+					if (Math.abs(float1 - float2) == 0)
+						continue;
+					else {
+						errorMsg.append("line " + i + " column " + j + ": "
+								+ fields2[j] + " expected " + fields1[j]);
+						return false;
+					}
+				}
+			}
+		}
+
+		return true;
+	}
+}
diff --git a/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/runtimefunction/RuntimeFunctionTestSuite.java b/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/runtimefunction/RuntimeFunctionTestSuite.java
new file mode 100644
index 0000000..2093b1d
--- /dev/null
+++ b/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/runtimefunction/RuntimeFunctionTestSuite.java
@@ -0,0 +1,74 @@
+package edu.uci.ics.hivesterix.test.runtimefunction;

+

+import java.io.File;

+import java.util.List;

+

+import junit.framework.Test;

+import junit.framework.TestResult;

+import edu.uci.ics.hivesterix.test.base.AbstractTestSuiteClass;

+

+public class RuntimeFunctionTestSuite extends AbstractTestSuiteClass {

+

+	private static final String PATH_TO_QUERIES = "src/test/resources/runtimefunctionts/queries/";

+	private static final String PATH_TO_RESULTS = "src/test/resources/runtimefunctionts/results/";

+	private static final String PATH_TO_IGNORES = "src/test/resources/runtimefunctionts/ignore.txt";

+

+	private static final String FILE_EXTENSION_OF_RESULTS = "result";

+

+	public static Test suite() throws Exception {

+		List<String> ignores = getIgnoreList(PATH_TO_IGNORES);

+		File testData = new File(PATH_TO_QUERIES);

+		File[] queries = testData.listFiles();

+		RuntimeFunctionTestSuite testSuite = new RuntimeFunctionTestSuite();

+

+		// set hdfs and hyracks cluster, and load test data to hdfs

+		try {

+			testSuite.setup();

+			testSuite.loadData();

+		} catch (Exception e) {

+			e.printStackTrace();

+			throw new IllegalStateException(e.getMessage());

+		}

+

+		for (File qFile : queries) {

+			if (isIgnored(qFile.getName(), ignores))

+				continue;

+

+			if (qFile.isFile()) {

+				String resultFileName = hiveExtToResExt(qFile.getName());

+				File rFile = new File(PATH_TO_RESULTS + resultFileName);

+				testSuite.addTest(new RuntimeFunctionTestCase(qFile, rFile));

+			}

+		}

+		return testSuite;

+	}

+

+	private static String hiveExtToResExt(String fname) {

+		int dot = fname.lastIndexOf('.');

+		return fname.substring(0, dot + 1) + FILE_EXTENSION_OF_RESULTS;

+	}

+

+	/**

+	 * Runs the tests and collects their result in a TestResult.

+	 */

+	@Override

+	public void run(TestResult result) {

+

+		int testCount = countTestCases();

+		for (int i = 0; i < testCount; i++) {

+			Test each = this.testAt(i);

+			if (result.shouldStop())

+				break;

+			runTest(each, result);

+		}

+

+		// cleanup hdfs and hyracks cluster

+		try {

+			cleanup();

+		} catch (Exception e) {

+			e.printStackTrace();

+			throw new IllegalStateException(e.getMessage());

+		}

+	}

+

+}

diff --git a/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/runtimefunction/RuntimeFunctionTestSuiteCaseGenerator.java b/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/runtimefunction/RuntimeFunctionTestSuiteCaseGenerator.java
new file mode 100644
index 0000000..1b45b41
--- /dev/null
+++ b/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/runtimefunction/RuntimeFunctionTestSuiteCaseGenerator.java
@@ -0,0 +1,101 @@
+package edu.uci.ics.hivesterix.test.runtimefunction;
+
+import java.io.File;
+import java.io.PrintWriter;
+import java.io.StringWriter;
+
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.Driver;
+import org.junit.Test;
+
+import edu.uci.ics.hivesterix.runtime.config.ConfUtil;
+import edu.uci.ics.hivesterix.test.base.AbstractHivesterixTestCase;
+
+public class RuntimeFunctionTestSuiteCaseGenerator extends
+		AbstractHivesterixTestCase {
+	private File resultFile;
+	private FileSystem dfs;
+
+	RuntimeFunctionTestSuiteCaseGenerator(File queryFile, File resultFile) {
+		super("testRuntimeFunction", queryFile);
+		this.queryFile = queryFile;
+		this.resultFile = resultFile;
+	}
+
+	@Test
+	public void testRuntimeFunction() throws Exception {
+		StringBuilder queryString = new StringBuilder();
+		readFileToString(queryFile, queryString);
+		String[] queries = queryString.toString().split(";");
+		StringWriter sw = new StringWriter();
+
+		HiveConf hconf = ConfUtil.getHiveConf();
+		Driver driver = new Driver(hconf, new PrintWriter(sw));
+		driver.init();
+
+		dfs = FileSystem.get(ConfUtil.getJobConf());
+
+		int i = 0;
+		for (String query : queries) {
+			if (i == queries.length - 1)
+				break;
+			driver.run(query);
+			driver.clear();
+			i++;
+		}
+
+		String warehouse = hconf.get("hive.metastore.warehouse.dir");
+		String tableName = removeExt(resultFile.getName());
+		String directory = warehouse + "/" + tableName + "/";
+		String localDirectory = "tmp";
+
+		FileStatus[] files = dfs.listStatus(new Path(directory));
+		FileSystem lfs = null;
+		if (files == null) {
+			lfs = FileSystem.getLocal(ConfUtil.getJobConf());
+			files = lfs.listStatus(new Path(directory));
+		}
+
+		File resultDirectory = new File(localDirectory + "/" + tableName);
+		deleteDir(resultDirectory);
+		resultDirectory.mkdir();
+
+		for (FileStatus fs : files) {
+			Path src = fs.getPath();
+			if (src.getName().indexOf("crc") >= 0)
+				continue;
+
+			String destStr = localDirectory + "/" + tableName + "/"
+					+ src.getName();
+			Path dest = new Path(destStr);
+			if (lfs != null) {
+				lfs.copyToLocalFile(src, dest);
+				dfs.copyFromLocalFile(dest, new Path(directory));
+			} else
+				dfs.copyToLocalFile(src, dest);
+		}
+
+		File[] rFiles = resultDirectory.listFiles();
+		StringBuilder sb = new StringBuilder();
+		for (File r : rFiles) {
+			if (r.getName().indexOf("crc") >= 0)
+				continue;
+			readFileToString(r, sb);
+		}
+		deleteDir(resultDirectory);
+
+		writeStringToFile(resultFile, sb);
+	}
+
+	private void deleteDir(File resultDirectory) {
+		if (resultDirectory.exists()) {
+			File[] rFiles = resultDirectory.listFiles();
+			for (File r : rFiles)
+				r.delete();
+			resultDirectory.delete();
+		}
+	}
+}
diff --git a/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/runtimefunction/RuntimeFunctionTestSuiteGenerator.java b/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/runtimefunction/RuntimeFunctionTestSuiteGenerator.java
new file mode 100644
index 0000000..a67f475
--- /dev/null
+++ b/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/runtimefunction/RuntimeFunctionTestSuiteGenerator.java
@@ -0,0 +1,75 @@
+package edu.uci.ics.hivesterix.test.runtimefunction;

+

+import java.io.File;

+import java.util.List;

+

+import junit.framework.Test;

+import junit.framework.TestResult;

+import edu.uci.ics.hivesterix.test.base.AbstractTestSuiteClass;

+

+public class RuntimeFunctionTestSuiteGenerator extends AbstractTestSuiteClass {

+

+	private static final String PATH_TO_QUERIES = "src/test/resources/runtimefunctionts/queries/";

+	private static final String PATH_TO_RESULTS = "src/test/resources/runtimefunctionts/results/";

+	private static final String PATH_TO_IGNORES = "src/test/resources/runtimefunctionts/ignore.txt";

+

+	private static final String FILE_EXTENSION_OF_RESULTS = "result";

+

+	public static Test suite() throws Exception {

+		List<String> ignores = getIgnoreList(PATH_TO_IGNORES);

+		File testData = new File(PATH_TO_QUERIES);

+		File[] queries = testData.listFiles();

+		RuntimeFunctionTestSuiteGenerator testSuite = new RuntimeFunctionTestSuiteGenerator();

+

+		// set hdfs and hyracks cluster, and load test data to hdfs

+		try {

+			testSuite.setup();

+			testSuite.loadData();

+		} catch (Exception e) {

+			e.printStackTrace();

+			throw new IllegalStateException(e.getMessage());

+		}

+

+		for (File qFile : queries) {

+			if (isIgnored(qFile.getName(), ignores))

+				continue;

+

+			if (qFile.isFile() && qFile.getName().startsWith("q16_")) {

+				String resultFileName = hiveExtToResExt(qFile.getName());

+				File rFile = new File(PATH_TO_RESULTS + resultFileName);

+				testSuite.addTest(new RuntimeFunctionTestSuiteCaseGenerator(

+						qFile, rFile));

+			}

+		}

+		return testSuite;

+	}

+

+	private static String hiveExtToResExt(String fname) {

+		int dot = fname.lastIndexOf('.');

+		return fname.substring(0, dot + 1) + FILE_EXTENSION_OF_RESULTS;

+	}

+

+	/**

+	 * Runs the tests and collects their result in a TestResult.

+	 */

+	@Override

+	public void run(TestResult result) {

+

+		int testCount = countTestCases();

+		for (int i = 0; i < testCount; i++) {

+			Test each = this.testAt(i);

+			if (result.shouldStop())

+				break;

+			runTest(each, result);

+		}

+

+		// cleanup hdfs and hyracks cluster

+		try {

+			cleanup();

+		} catch (Exception e) {

+			e.printStackTrace();

+			throw new IllegalStateException(e.getMessage());

+		}

+	}

+

+}

diff --git a/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/serde/SerDeTest.java b/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/serde/SerDeTest.java
new file mode 100644
index 0000000..b5db432
--- /dev/null
+++ b/hivesterix/src/test/java/edu/uci/ics/hivesterix/test/serde/SerDeTest.java
@@ -0,0 +1,232 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hivesterix.test.serde;
+
+import java.util.List;
+import java.util.Properties;
+
+import junit.framework.TestCase;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.serde.Constants;
+import org.apache.hadoop.hive.serde2.SerDe;
+import org.apache.hadoop.hive.serde2.SerDeException;
+import org.apache.hadoop.hive.serde2.io.ByteWritable;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.io.ShortWritable;
+import org.apache.hadoop.hive.serde2.lazy.LazyPrimitive;
+import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
+import org.apache.hadoop.hive.serde2.objectinspector.StructField;
+import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+import org.apache.hadoop.io.BytesWritable;
+import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.io.Writable;
+
+import edu.uci.ics.hivesterix.serde.lazy.LazySerDe;
+
+/**
+ * TestLazySimpleSerDe.
+ * 
+ */
+@SuppressWarnings({ "deprecation", "rawtypes" })
+public class SerDeTest extends TestCase {
+
+	/**
+	 * Test the LazySimpleSerDe class.
+	 */
+	public void testLazySimpleSerDe() throws Throwable {
+		try {
+			// Create the SerDe
+			LazySimpleSerDe serDe = new LazySimpleSerDe();
+			Configuration conf = new Configuration();
+			Properties tbl = createProperties();
+			serDe.initialize(conf, tbl);
+
+			LazySerDe outputSerde = new LazySerDe();
+			outputSerde.initialize(conf, tbl);
+
+			// Data
+			String s = "123\t456\t789\t1000\t5.3\thive and hadoop\t1\tqf";
+
+			byte[] bytes = s.getBytes();
+			Writable bytesWritable = new BytesWritable(bytes);
+
+			// Test
+			// deserializeAndSerialize(serDe, t, s, expectedFieldsData);
+			Object row = serDe.deserialize(bytesWritable); // test my serde
+			StructObjectInspector simpleInspector = (StructObjectInspector) serDe
+					.getObjectInspector();
+			List<Object> fields = simpleInspector
+					.getStructFieldsDataAsList(row);
+			List<? extends StructField> fieldRefs = simpleInspector
+					.getAllStructFieldRefs();
+
+			int i = 0;
+			for (Object field : fields) {
+				BytesWritable fieldWritable = (BytesWritable) outputSerde
+						.serialize(field, fieldRefs.get(i)
+								.getFieldObjectInspector());
+				System.out.print(fieldWritable.getSize() + "|");
+				i++;
+			}
+
+			// Writable output = outputSerde.serialize(row, serDe
+			// .getObjectInspector());
+			// System.out.println(output);
+			//
+			// Object row2 = outputSerde.deserialize(output);
+			// Writable output2 = serDe.serialize(row2, outputSerde
+			// .getObjectInspector());
+			// System.out.println(output2);
+
+			// System.out.println(output);
+			// deserializeAndSerialize(outputSerde, t, s, expectedFieldsData);
+
+		} catch (Throwable e) {
+			e.printStackTrace();
+			throw e;
+		}
+	}
+
+	private void deserializeAndSerialize(SerDe serDe, Text t, String s,
+			Object[] expectedFieldsData) throws SerDeException {
+		// Get the row structure
+		StructObjectInspector oi = (StructObjectInspector) serDe
+				.getObjectInspector();
+		List<? extends StructField> fieldRefs = oi.getAllStructFieldRefs();
+		assertEquals(8, fieldRefs.size());
+
+		// Deserialize
+		Object row = serDe.deserialize(t);
+		for (int i = 0; i < fieldRefs.size(); i++) {
+			Object fieldData = oi.getStructFieldData(row, fieldRefs.get(i));
+			if (fieldData != null) {
+				fieldData = ((LazyPrimitive) fieldData).getWritableObject();
+			}
+			assertEquals("Field " + i, expectedFieldsData[i], fieldData);
+		}
+		// Serialize
+		assertEquals(Text.class, serDe.getSerializedClass());
+		Text serializedText = (Text) serDe.serialize(row, oi);
+		assertEquals("Serialized data", s, serializedText.toString());
+	}
+
+	private Properties createProperties() {
+		Properties tbl = new Properties();
+
+		// Set the configuration parameters
+		tbl.setProperty(Constants.SERIALIZATION_FORMAT, "9");
+		tbl.setProperty("columns",
+				"abyte,ashort,aint,along,adouble,astring,anullint,anullstring");
+		tbl.setProperty("columns.types",
+				"tinyint:smallint:int:bigint:double:string:int:string");
+		tbl.setProperty(Constants.SERIALIZATION_NULL_FORMAT, "NULL");
+		return tbl;
+	}
+
+	/**
+	 * Test the LazySimpleSerDe class with LastColumnTakesRest option.
+	 */
+	public void testLazySimpleSerDeLastColumnTakesRest() throws Throwable {
+		try {
+			// Create the SerDe
+			LazySimpleSerDe serDe = new LazySimpleSerDe();
+			Configuration conf = new Configuration();
+			Properties tbl = createProperties();
+			tbl.setProperty(Constants.SERIALIZATION_LAST_COLUMN_TAKES_REST,
+					"true");
+			serDe.initialize(conf, tbl);
+
+			// Data
+			Text t = new Text(
+					"123\t456\t789\t1000\t5.3\thive and hadoop\t1.\ta\tb\t");
+			String s = "123\t456\t789\t1000\t5.3\thive and hadoop\tNULL\ta\tb\t";
+			Object[] expectedFieldsData = { new ByteWritable((byte) 123),
+					new ShortWritable((short) 456), new IntWritable(789),
+					new LongWritable(1000), new DoubleWritable(5.3),
+					new Text("hive and hadoop"), null, new Text("a\tb\t") };
+
+			// Test
+			deserializeAndSerialize(serDe, t, s, expectedFieldsData);
+
+		} catch (Throwable e) {
+			e.printStackTrace();
+			throw e;
+		}
+	}
+
+	/**
+	 * Test the LazySimpleSerDe class with extra columns.
+	 */
+	public void testLazySimpleSerDeExtraColumns() throws Throwable {
+		try {
+			// Create the SerDe
+			LazySimpleSerDe serDe = new LazySimpleSerDe();
+			Configuration conf = new Configuration();
+			Properties tbl = createProperties();
+			serDe.initialize(conf, tbl);
+
+			// Data
+			Text t = new Text(
+					"123\t456\t789\t1000\t5.3\thive and hadoop\t1.\ta\tb\t");
+			String s = "123\t456\t789\t1000\t5.3\thive and hadoop\tNULL\ta";
+			Object[] expectedFieldsData = { new ByteWritable((byte) 123),
+					new ShortWritable((short) 456), new IntWritable(789),
+					new LongWritable(1000), new DoubleWritable(5.3),
+					new Text("hive and hadoop"), null, new Text("a") };
+
+			// Test
+			deserializeAndSerialize(serDe, t, s, expectedFieldsData);
+
+		} catch (Throwable e) {
+			e.printStackTrace();
+			throw e;
+		}
+	}
+
+	/**
+	 * Test the LazySimpleSerDe class with missing columns.
+	 */
+	public void testLazySimpleSerDeMissingColumns() throws Throwable {
+		try {
+			// Create the SerDe
+			LazySimpleSerDe serDe = new LazySimpleSerDe();
+			Configuration conf = new Configuration();
+			Properties tbl = createProperties();
+			serDe.initialize(conf, tbl);
+
+			// Data
+			Text t = new Text("123\t456\t789\t1000\t5.3\t");
+			String s = "123\t456\t789\t1000\t5.3\t\tNULL\tNULL";
+			Object[] expectedFieldsData = { new ByteWritable((byte) 123),
+					new ShortWritable((short) 456), new IntWritable(789),
+					new LongWritable(1000), new DoubleWritable(5.3),
+					new Text(""), null, null };
+
+			// Test
+			deserializeAndSerialize(serDe, t, s, expectedFieldsData);
+
+		} catch (Throwable e) {
+			e.printStackTrace();
+			throw e;
+		}
+	}
+
+}
diff --git a/hivesterix/src/test/resources/log4j.properties b/hivesterix/src/test/resources/log4j.properties
new file mode 100755
index 0000000..d5e6004
--- /dev/null
+++ b/hivesterix/src/test/resources/log4j.properties
@@ -0,0 +1,94 @@
+# Define some default values that can be overridden by system properties
+hadoop.root.logger=FATAL,console
+hadoop.log.dir=.
+hadoop.log.file=hadoop.log
+
+# Define the root logger to the system property "hadoop.root.logger".
+log4j.rootLogger=${hadoop.root.logger}, EventCounter
+
+# Logging Threshold
+log4j.threshhold=FATAL
+
+#
+# Daily Rolling File Appender
+#
+
+log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender
+log4j.appender.DRFA.File=${hadoop.log.dir}/${hadoop.log.file}
+
+# Rollver at midnight
+log4j.appender.DRFA.DatePattern=.yyyy-MM-dd
+
+# 30-day backup
+#log4j.appender.DRFA.MaxBackupIndex=30
+log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout
+
+# Pattern format: Date LogLevel LoggerName LogMessage
+log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
+# Debugging Pattern format
+#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n
+
+
+#
+# console
+# Add "console" to rootlogger above if you want to use this 
+#
+
+log4j.appender.console=org.apache.log4j.ConsoleAppender
+log4j.appender.console.target=System.err
+log4j.appender.console.layout=org.apache.log4j.PatternLayout
+log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n
+
+#
+# TaskLog Appender
+#
+
+#Default values
+hadoop.tasklog.taskid=null
+hadoop.tasklog.noKeepSplits=4
+hadoop.tasklog.totalLogFileSize=100
+hadoop.tasklog.purgeLogSplits=true
+hadoop.tasklog.logsRetainHours=12
+
+log4j.appender.TLA=org.apache.hadoop.mapred.TaskLogAppender
+log4j.appender.TLA.taskId=${hadoop.tasklog.taskid}
+log4j.appender.TLA.totalLogFileSize=${hadoop.tasklog.totalLogFileSize}
+
+log4j.appender.TLA.layout=org.apache.log4j.PatternLayout
+log4j.appender.TLA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
+
+#
+# Rolling File Appender
+#
+
+#log4j.appender.RFA=org.apache.log4j.RollingFileAppender
+#log4j.appender.RFA.File=${hadoop.log.dir}/${hadoop.log.file}
+
+# Logfile size and and 30-day backups
+#log4j.appender.RFA.MaxFileSize=1MB
+#log4j.appender.RFA.MaxBackupIndex=30
+
+#log4j.appender.RFA.layout=org.apache.log4j.PatternLayout
+#log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} - %m%n
+#log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n
+
+#
+# FSNamesystem Audit logging
+# All audit events are logged at INFO level
+#
+log4j.logger.org.apache.hadoop.fs.FSNamesystem.audit=WARN
+
+# Custom Logging levels
+
+#log4j.logger.org.apache.hadoop.mapred.JobTracker=DEBUG
+#log4j.logger.org.apache.hadoop.mapred.TaskTracker=DEBUG
+#log4j.logger.org.apache.hadoop.fs.FSNamesystem=DEBUG
+
+# Jets3t library
+log4j.logger.org.jets3t.service.impl.rest.httpclient.RestS3Service=ERROR
+
+#
+# Event Counter Appender
+# Sends counts of logging messages at different severity levels to Hadoop Metrics.
+#
+log4j.appender.EventCounter=org.apache.hadoop.metrics.jvm.EventCounter
diff --git a/hivesterix/src/test/resources/logging.properties b/hivesterix/src/test/resources/logging.properties
new file mode 100644
index 0000000..1cc34e1
--- /dev/null
+++ b/hivesterix/src/test/resources/logging.properties
@@ -0,0 +1,65 @@
+############################################################
+#  	Default Logging Configuration File
+#
+# You can use a different file by specifying a filename
+# with the java.util.logging.config.file system property.  
+# For example java -Djava.util.logging.config.file=myfile
+############################################################
+
+############################################################
+#  	Global properties
+############################################################
+
+# "handlers" specifies a comma separated list of log Handler 
+# classes.  These handlers will be installed during VM startup.
+# Note that these classes must be on the system classpath.
+# By default we only configure a ConsoleHandler, which will only
+# show messages at the INFO and above levels.
+
+handlers= java.util.logging.ConsoleHandler
+
+# To also add the FileHandler, use the following line instead.
+
+# handlers= java.util.logging.FileHandler, java.util.logging.ConsoleHandler
+
+# Default global logging level.
+# This specifies which kinds of events are logged across
+# all loggers.  For any given facility this global level
+# can be overriden by a facility specific level
+# Note that the ConsoleHandler also has a separate level
+# setting to limit messages printed to the console.
+
+.level= WARNING
+# .level= INFO
+# .level= FINE
+# .level = FINEST
+
+############################################################
+# Handler specific properties.
+# Describes specific configuration info for Handlers.
+############################################################
+
+# default file output is in user's home directory.
+
+# java.util.logging.FileHandler.pattern = %h/java%u.log
+# java.util.logging.FileHandler.limit = 50000
+# java.util.logging.FileHandler.count = 1
+# java.util.logging.FileHandler.formatter = java.util.logging.XMLFormatter
+
+# Limit the message that are printed on the console to FINE and above.
+
+java.util.logging.ConsoleHandler.level = FINE
+java.util.logging.ConsoleHandler.formatter = java.util.logging.SimpleFormatter
+
+
+############################################################
+# Facility specific properties.
+# Provides extra control for each logger.
+############################################################
+
+# For example, set the com.xyz.foo logger to only log SEVERE
+# messages:
+
+edu.uci.ics.asterix.level = WARNING
+edu.uci.ics.algebricks.level = WARNING
+edu.uci.ics.hyracks.level = WARNING
diff --git a/hivesterix/src/test/resources/optimizerts/hive/conf/hive-default.xml b/hivesterix/src/test/resources/optimizerts/hive/conf/hive-default.xml
new file mode 100644
index 0000000..d5d0149
--- /dev/null
+++ b/hivesterix/src/test/resources/optimizerts/hive/conf/hive-default.xml
@@ -0,0 +1,793 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+
+<configuration>
+
+	<!-- Hive Configuration can either be stored in this file or in the hadoop 
+		configuration files -->
+	<!-- that are implied by Hadoop setup variables. -->
+	<!-- Aside from Hadoop setup variables - this file is provided as a convenience 
+		so that Hive -->
+	<!-- users do not have to edit hadoop configuration files (that may be managed 
+		as a centralized -->
+	<!-- resource). -->
+
+	<!-- Hive Execution Parameters -->
+	<property>
+		<name>mapred.reduce.tasks</name>
+		<value>-1</value>
+		<description>The default number of reduce tasks per job. Typically set
+			to a prime close to the number of available hosts. Ignored when
+			mapred.job.tracker is "local". Hadoop set this to 1 by default,
+			whereas hive uses -1 as its default value.
+			By setting this property to
+			-1, Hive will automatically figure out what
+			should be the number of
+			reducers.
+  </description>
+	</property>
+
+	<property>
+		<name>hive.hyracks.host</name>
+		<value>127.0.0.1</value>
+	</property>
+
+	<property>
+		<name>hive.hyracks.port</name>
+		<value>13099</value>
+	</property>
+
+	<property>
+		<name>hive.hyracks.app</name>
+		<value>hivesterix</value>
+	</property>
+
+
+	<property>
+		<name>hive.hyracks.parrallelism</name>
+		<value>2</value>
+	</property>
+
+	<property>
+		<name>hive.algebricks.groupby.external</name>
+		<value>true</value>
+	</property>
+
+	<property>
+		<name>hive.algebricks.groupby.external.memory</name>
+		<value>3072</value>
+	</property>
+
+	<property>
+		<name>hive.algebricks.sort.memory</name>
+		<value>3072</value>
+	</property>
+
+	<property>
+		<name>hive.algebricks.framesize</name>
+		<value>768</value>
+	</property>
+
+	<property>
+		<name>hive.exec.reducers.bytes.per.reducer</name>
+		<value>1000000000</value>
+		<description>size per reducer.The default is 1G, i.e if the input size
+			is 10G, it will use 10 reducers.</description>
+	</property>
+
+	<property>
+		<name>hive.exec.reducers.max</name>
+		<value>999</value>
+		<description>max number of reducers will be used. If the one
+			specified
+			in the configuration parameter mapred.reduce.tasks is
+			negative, hive
+			will use this one as the max number of reducers when
+			automatically
+			determine number of reducers.</description>
+	</property>
+
+	<property>
+		<name>hive.exec.scratchdir</name>
+		<value>/tmp/hive-${user.name}</value>
+		<description>Scratch space for Hive jobs</description>
+	</property>
+
+	<property>
+		<name>hive.test.mode</name>
+		<value>false</value>
+		<description>whether hive is running in test mode. If yes, it turns on
+			sampling and prefixes the output tablename</description>
+	</property>
+
+	<property>
+		<name>hive.test.mode.prefix</name>
+		<value>test_</value>
+		<description>if hive is running in test mode, prefixes the output
+			table by this string</description>
+	</property>
+
+	<!-- If the input table is not bucketed, the denominator of the tablesample 
+		is determinied by the parameter below -->
+	<!-- For example, the following query: -->
+	<!-- INSERT OVERWRITE TABLE dest -->
+	<!-- SELECT col1 from src -->
+	<!-- would be converted to -->
+	<!-- INSERT OVERWRITE TABLE test_dest -->
+	<!-- SELECT col1 from src TABLESAMPLE (BUCKET 1 out of 32 on rand(1)) -->
+	<property>
+		<name>hive.test.mode.samplefreq</name>
+		<value>32</value>
+		<description>if hive is running in test mode and table is not
+			bucketed, sampling frequency</description>
+	</property>
+
+	<property>
+		<name>hive.test.mode.nosamplelist</name>
+		<value></value>
+		<description>if hive is running in test mode, dont sample the above
+			comma seperated list of tables</description>
+	</property>
+
+	<property>
+		<name>hive.metastore.local</name>
+		<value>true</value>
+		<description>controls whether to connect to remove metastore server or
+			open a new metastore server in Hive Client JVM</description>
+	</property>
+
+	<property>
+		<name>javax.jdo.option.ConnectionURL</name>
+		<value>jdbc:derby:;databaseName=metastore_db;create=true</value>
+		<description>JDBC connect string for a JDBC metastore</description>
+	</property>
+
+	<property>
+		<name>javax.jdo.option.ConnectionDriverName</name>
+		<value>org.apache.derby.jdbc.EmbeddedDriver</value>
+		<description>Driver class name for a JDBC metastore</description>
+	</property>
+
+	<property>
+		<name>javax.jdo.PersistenceManagerFactoryClass</name>
+		<value>org.datanucleus.jdo.JDOPersistenceManagerFactory</value>
+		<description>class implementing the jdo persistence</description>
+	</property>
+
+	<property>
+		<name>datanucleus.connectionPoolingType</name>
+		<value>DBCP</value>
+		<description>Uses a DBCP connection pool for JDBC metastore
+		</description>
+	</property>
+
+	<property>
+		<name>javax.jdo.option.DetachAllOnCommit</name>
+		<value>true</value>
+		<description>detaches all objects from session so that they can be
+			used after transaction is committed</description>
+	</property>
+
+	<property>
+		<name>javax.jdo.option.NonTransactionalRead</name>
+		<value>true</value>
+		<description>reads outside of transactions</description>
+	</property>
+
+	<property>
+		<name>javax.jdo.option.ConnectionUserName</name>
+		<value>APP</value>
+		<description>username to use against metastore database</description>
+	</property>
+
+	<property>
+		<name>javax.jdo.option.ConnectionPassword</name>
+		<value>mine</value>
+		<description>password to use against metastore database</description>
+	</property>
+
+	<property>
+		<name>datanucleus.validateTables</name>
+		<value>false</value>
+		<description>validates existing schema against code. turn this on if
+			you want to verify existing schema </description>
+	</property>
+
+	<property>
+		<name>datanucleus.validateColumns</name>
+		<value>false</value>
+		<description>validates existing schema against code. turn this on if
+			you want to verify existing schema </description>
+	</property>
+
+	<property>
+		<name>datanucleus.validateConstraints</name>
+		<value>false</value>
+		<description>validates existing schema against code. turn this on if
+			you want to verify existing schema </description>
+	</property>
+
+	<property>
+		<name>datanucleus.storeManagerType</name>
+		<value>rdbms</value>
+		<description>metadata store type</description>
+	</property>
+
+	<property>
+		<name>datanucleus.autoCreateSchema</name>
+		<value>true</value>
+		<description>creates necessary schema on a startup if one doesn't
+			exist. set this to false, after creating it once</description>
+	</property>
+
+	<property>
+		<name>datanucleus.autoStartMechanismMode</name>
+		<value>checked</value>
+		<description>throw exception if metadata tables are incorrect
+		</description>
+	</property>
+
+	<property>
+		<name>datanucleus.transactionIsolation</name>
+		<value>read-committed</value>
+		<description>Default transaction isolation level for identity
+			generation. </description>
+	</property>
+
+	<property>
+		<name>datanucleus.cache.level2</name>
+		<value>false</value>
+		<description>Use a level 2 cache. Turn this off if metadata is changed
+			independently of hive metastore server</description>
+	</property>
+
+	<property>
+		<name>datanucleus.cache.level2.type</name>
+		<value>SOFT</value>
+		<description>SOFT=soft reference based cache, WEAK=weak reference
+			based cache.</description>
+	</property>
+
+	<property>
+		<name>datanucleus.identifierFactory</name>
+		<value>datanucleus</value>
+		<description>Name of the identifier factory to use when generating
+			table/column names etc. 'datanucleus' is used for backward
+			compatibility</description>
+	</property>
+
+	<property>
+		<name>hive.metastore.warehouse.dir</name>
+		<value>/tmp/hivesterix</value>
+		<description>location of default database for the warehouse
+		</description>
+	</property>
+
+	<property>
+		<name>hive.metastore.connect.retries</name>
+		<value>5</value>
+		<description>Number of retries while opening a connection to metastore
+		</description>
+	</property>
+
+	<property>
+		<name>hive.metastore.rawstore.impl</name>
+		<value>org.apache.hadoop.hive.metastore.ObjectStore</value>
+		<description>Name of the class that implements
+			org.apache.hadoop.hive.metastore.rawstore interface. This class is
+			used to store and retrieval of raw metadata objects such as table,
+			database</description>
+	</property>
+
+	<property>
+		<name>hive.default.fileformat</name>
+		<value>TextFile</value>
+		<description>Default file format for CREATE TABLE statement. Options
+			are TextFile and SequenceFile. Users can explicitly say CREATE TABLE
+			... STORED AS &lt;TEXTFILE|SEQUENCEFILE&gt; to override</description>
+	</property>
+
+	<property>
+		<name>hive.fileformat.check</name>
+		<value>true</value>
+		<description>Whether to check file format or not when loading data
+			files</description>
+	</property>
+
+	<property>
+		<name>hive.map.aggr</name>
+		<value>true</value>
+		<description>Whether to use map-side aggregation in Hive Group By
+			queries</description>
+	</property>
+
+	<property>
+		<name>hive.groupby.skewindata</name>
+		<value>false</value>
+		<description>Whether there is skew in data to optimize group by
+			queries</description>
+	</property>
+
+	<property>
+		<name>hive.groupby.mapaggr.checkinterval</name>
+		<value>100000</value>
+		<description>Number of rows after which size of the grouping
+			keys/aggregation classes is performed</description>
+	</property>
+
+	<property>
+		<name>hive.mapred.local.mem</name>
+		<value>0</value>
+		<description>For local mode, memory of the mappers/reducers
+		</description>
+	</property>
+
+	<property>
+		<name>hive.map.aggr.hash.percentmemory</name>
+		<value>0.5</value>
+		<description>Portion of total memory to be used by map-side grup
+			aggregation hash table</description>
+	</property>
+
+	<property>
+		<name>hive.map.aggr.hash.min.reduction</name>
+		<value>0.5</value>
+		<description>Hash aggregation will be turned off if the ratio between
+			hash
+			table size and input rows is bigger than this number. Set to 1 to
+			make
+			sure
+			hash aggregation is never turned off.</description>
+	</property>
+
+	<property>
+		<name>hive.optimize.cp</name>
+		<value>true</value>
+		<description>Whether to enable column pruner</description>
+	</property>
+
+	<property>
+		<name>hive.optimize.ppd</name>
+		<value>true</value>
+		<description>Whether to enable predicate pushdown</description>
+	</property>
+
+	<property>
+		<name>hive.optimize.pruner</name>
+		<value>true</value>
+		<description>Whether to enable the new partition pruner which depends
+			on predicate pushdown. If this is disabled,
+			the old partition pruner
+			which is based on AST will be enabled.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.optimize.groupby</name>
+		<value>true</value>
+		<description>Whether to enable the bucketed group by from bucketed
+			partitions/tables.</description>
+	</property>
+
+	<property>
+		<name>hive.join.emit.interval</name>
+		<value>1000</value>
+		<description>How many rows in the right-most join operand Hive should
+			buffer before emitting the join result. </description>
+	</property>
+
+	<property>
+		<name>hive.join.cache.size</name>
+		<value>25000</value>
+		<description>How many rows in the joining tables (except the streaming
+			table) should be cached in memory. </description>
+	</property>
+
+	<property>
+		<name>hive.mapjoin.bucket.cache.size</name>
+		<value>100</value>
+		<description>How many values in each keys in the map-joined table
+			should be cached in memory. </description>
+	</property>
+
+	<property>
+		<name>hive.mapjoin.maxsize</name>
+		<value>100000</value>
+		<description>Maximum # of rows of the small table that can be handled
+			by map-side join. If the size is reached and hive.task.progress is
+			set, a fatal error counter is set and the job will be killed.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.mapjoin.cache.numrows</name>
+		<value>25000</value>
+		<description>How many rows should be cached by jdbm for map join.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.optimize.skewjoin</name>
+		<value>false</value>
+		<description>Whether to enable skew join optimization. </description>
+	</property>
+
+	<property>
+		<name>hive.skewjoin.key</name>
+		<value>100000</value>
+		<description>Determine if we get a skew key in join. If we see more
+			than the specified number of rows with the same key in join operator,
+			we think the key as a skew join key. </description>
+	</property>
+
+	<property>
+		<name>hive.skewjoin.mapjoin.map.tasks</name>
+		<value>10000</value>
+		<description> Determine the number of map task used in the follow up
+			map join job
+			for a skew join. It should be used together with
+			hive.skewjoin.mapjoin.min.split
+			to perform a fine grained control.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.skewjoin.mapjoin.min.split</name>
+		<value>33554432</value>
+		<description> Determine the number of map task at most used in the
+			follow up map join job
+			for a skew join by specifying the minimum split
+			size. It should be used
+			together with
+			hive.skewjoin.mapjoin.map.tasks
+			to perform a fine grained control.</description>
+	</property>
+
+	<property>
+		<name>hive.mapred.mode</name>
+		<value>nonstrict</value>
+		<description>The mode in which the hive operations are being
+			performed. In strict mode, some risky queries are not allowed to run
+		</description>
+	</property>
+
+	<property>
+		<name>hive.exec.script.maxerrsize</name>
+		<value>100000</value>
+		<description>Maximum number of bytes a script is allowed to emit to
+			standard error (per map-reduce task). This prevents runaway scripts
+			from filling logs partitions to capacity </description>
+	</property>
+
+	<property>
+		<name>hive.exec.script.allow.partial.consumption</name>
+		<value>false</value>
+		<description> When enabled, this option allows a user script to exit
+			successfully without consuming all the data from the standard input.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.script.operator.id.env.var</name>
+		<value>HIVE_SCRIPT_OPERATOR_ID</value>
+		<description> Name of the environment variable that holds the unique
+			script operator ID in the user's transform function (the custom
+			mapper/reducer that the user has specified in the query)
+		</description>
+	</property>
+
+	<property>
+		<name>hive.exec.compress.output</name>
+		<value>false</value>
+		<description> This controls whether the final outputs of a query (to a
+			local/hdfs file or a hive table) is compressed. The compression codec
+			and other options are determined from hadoop config variables
+			mapred.output.compress* </description>
+	</property>
+
+	<property>
+		<name>hive.exec.compress.intermediate</name>
+		<value>false</value>
+		<description> This controls whether intermediate files produced by
+			hive between multiple map-reduce jobs are compressed. The compression
+			codec and other options are determined from hadoop config variables
+			mapred.output.compress* </description>
+	</property>
+
+	<property>
+		<name>hive.exec.parallel</name>
+		<value>false</value>
+		<description>Whether to execute jobs in parallel</description>
+	</property>
+
+	<property>
+		<name>hive.exec.parallel.thread.number</name>
+		<value>8</value>
+		<description>How many jobs at most can be executed in parallel
+		</description>
+	</property>
+
+	<property>
+		<name>hive.hwi.war.file</name>
+		<value>lib\hive-hwi-0.7.0.war</value>
+		<description>This sets the path to the HWI war file, relative to
+			${HIVE_HOME}. </description>
+	</property>
+
+	<property>
+		<name>hive.hwi.listen.host</name>
+		<value>0.0.0.0</value>
+		<description>This is the host address the Hive Web Interface will
+			listen on</description>
+	</property>
+
+	<property>
+		<name>hive.hwi.listen.port</name>
+		<value>9999</value>
+		<description>This is the port the Hive Web Interface will listen on
+		</description>
+	</property>
+
+	<property>
+		<name>hive.exec.pre.hooks</name>
+		<value></value>
+		<description>Pre Execute Hook for Tests</description>
+	</property>
+
+	<property>
+		<name>hive.merge.mapfiles</name>
+		<value>true</value>
+		<description>Merge small files at the end of a map-only job
+		</description>
+	</property>
+
+	<property>
+		<name>hive.merge.mapredfiles</name>
+		<value>false</value>
+		<description>Merge small files at the end of a map-reduce job
+		</description>
+	</property>
+
+	<property>
+		<name>hive.heartbeat.interval</name>
+		<value>1000</value>
+		<description>Send a heartbeat after this interval - used by mapjoin
+			and filter operators</description>
+	</property>
+
+	<property>
+		<name>hive.merge.size.per.task</name>
+		<value>256000000</value>
+		<description>Size of merged files at the end of the job</description>
+	</property>
+
+	<property>
+		<name>hive.merge.size.smallfiles.avgsize</name>
+		<value>16000000</value>
+		<description>When the average output file size of a job is less than
+			this number, Hive will start an additional map-reduce job to merge
+			the output files into bigger files. This is only done for map-only
+			jobs if hive.merge.mapfiles is true, and for map-reduce jobs if
+			hive.merge.mapredfiles is true.</description>
+	</property>
+
+	<property>
+		<name>hive.script.auto.progress</name>
+		<value>false</value>
+		<description>Whether Hive Tranform/Map/Reduce Clause should
+			automatically send progress information to TaskTracker to avoid the
+			task getting killed because of inactivity. Hive sends progress
+			information when the script is outputting to stderr. This option
+			removes the need of periodically producing stderr messages, but users
+			should be cautious because this may prevent infinite loops in the
+			scripts to be killed by TaskTracker.  </description>
+	</property>
+
+	<property>
+		<name>hive.script.serde</name>
+		<value>org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe</value>
+		<description>The default serde for trasmitting input data to and
+			reading output data from the user scripts. </description>
+	</property>
+
+	<property>
+		<name>hive.script.recordreader</name>
+		<value>org.apache.hadoop.hive.ql.exec.TextRecordReader</value>
+		<description>The default record reader for reading data from the user
+			scripts. </description>
+	</property>
+
+	<property>
+		<name>hive.script.recordwriter</name>
+		<value>org.apache.hadoop.hive.ql.exec.TextRecordWriter</value>
+		<description>The default record writer for writing data to the user
+			scripts. </description>
+	</property>
+
+	<property>
+		<name>hive.input.format</name>
+		<value>org.apache.hadoop.hive.ql.io.HiveInputFormat</value>
+		<description>The default input format, if it is not specified, the
+			system assigns it. It is set to HiveInputFormat for hadoop versions
+			17, 18 and 19, whereas it is set to CombinedHiveInputFormat for
+			hadoop 20. The user can always overwrite it - if there is a bug in
+			CombinedHiveInputFormat, it can always be manually set to
+			HiveInputFormat. </description>
+	</property>
+
+	<property>
+		<name>hive.udtf.auto.progress</name>
+		<value>false</value>
+		<description>Whether Hive should automatically send progress
+			information to TaskTracker when using UDTF's to prevent the task
+			getting killed because of inactivity. Users should be cautious
+			because this may prevent TaskTracker from killing tasks with infinte
+			loops.  </description>
+	</property>
+
+	<property>
+		<name>hive.mapred.reduce.tasks.speculative.execution</name>
+		<value>true</value>
+		<description>Whether speculative execution for reducers should be
+			turned on. </description>
+	</property>
+
+	<property>
+		<name>hive.exec.counters.pull.interval</name>
+		<value>1000</value>
+		<description>The interval with which to poll the JobTracker for the
+			counters the running job. The smaller it is the more load there will
+			be on the jobtracker, the higher it is the less granular the caught
+			will be.</description>
+	</property>
+
+	<property>
+		<name>hive.enforce.bucketing</name>
+		<value>false</value>
+		<description>Whether bucketing is enforced. If true, while inserting
+			into the table, bucketing is enforced. </description>
+	</property>
+
+	<property>
+		<name>hive.enforce.sorting</name>
+		<value>false</value>
+		<description>Whether sorting is enforced. If true, while inserting
+			into the table, sorting is enforced. </description>
+	</property>
+
+	<property>
+		<name>hive.metastore.ds.connection.url.hook</name>
+		<value></value>
+		<description>Name of the hook to use for retriving the JDO connection
+			URL. If empty, the value in javax.jdo.option.ConnectionURL is used
+		</description>
+	</property>
+
+	<property>
+		<name>hive.metastore.ds.retry.attempts</name>
+		<value>1</value>
+		<description>The number of times to retry a metastore call if there
+			were a connection error</description>
+	</property>
+
+	<property>
+		<name>hive.metastore.ds.retry.interval</name>
+		<value>1000</value>
+		<description>The number of miliseconds between metastore retry
+			attempts</description>
+	</property>
+
+	<property>
+		<name>hive.metastore.server.min.threads</name>
+		<value>200</value>
+		<description>Minimum number of worker threads in the Thrift server's
+			pool.</description>
+	</property>
+
+	<property>
+		<name>hive.metastore.server.max.threads</name>
+		<value>100000</value>
+		<description>Maximum number of worker threads in the Thrift server's
+			pool.</description>
+	</property>
+
+	<property>
+		<name>hive.metastore.server.tcp.keepalive</name>
+		<value>true</value>
+		<description>Whether to enable TCP keepalive for the metastore server.
+			Keepalive will prevent accumulation of half-open connections.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.optimize.reducededuplication</name>
+		<value>true</value>
+		<description>Remove extra map-reduce jobs if the data is already
+			clustered by the same key which needs to be used again. This should
+			always be set to true. Since it is a new feature, it has been made
+			configurable.</description>
+	</property>
+
+	<property>
+		<name>hive.exec.dynamic.partition</name>
+		<value>false</value>
+		<description>Whether or not to allow dynamic partitions in DML/DDL.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.exec.dynamic.partition.mode</name>
+		<value>strict</value>
+		<description>In strict mode, the user must specify at least one static
+			partition in case the user accidentally overwrites all partitions.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.exec.max.dynamic.partitions</name>
+		<value>1000</value>
+		<description>Maximum number of dynamic partitions allowed to be
+			created in total.</description>
+	</property>
+
+	<property>
+		<name>hive.exec.max.dynamic.partitions.pernode</name>
+		<value>100</value>
+		<description>Maximum number of dynamic partitions allowed to be
+			created in each mapper/reducer node.</description>
+	</property>
+
+	<property>
+		<name>hive.default.partition.name</name>
+		<value>__HIVE_DEFAULT_PARTITION__</value>
+		<description>The default partition name in case the dynamic partition
+			column value is null/empty string or anyother values that cannot be
+			escaped. This value must not contain any special character used in
+			HDFS URI (e.g., ':', '%', '/' etc). The user has to be aware that the
+			dynamic partition value should not contain this value to avoid
+			confusions.</description>
+	</property>
+
+	<property>
+		<name>fs.har.impl</name>
+		<value>org.apache.hadoop.hive.shims.HiveHarFileSystem</value>
+		<description>The implementation for accessing Hadoop Archives. Note
+			that this won't be applicable to Hadoop vers less than 0.20
+		</description>
+	</property>
+
+	<property>
+		<name>hive.archive.enabled</name>
+		<value>false</value>
+		<description>Whether archiving operations are permitted</description>
+	</property>
+
+	<property>
+		<name>hive.archive.har.parentdir.settable</name>
+		<value>false</value>
+		<description>In new Hadoop versions, the parent directory must be set
+			while
+			creating a HAR. Because this functionality is hard to detect
+			with just
+			version
+			numbers, this conf var needs to be set manually.
+		</description>
+	</property>
+
+	<!-- HBase Storage Handler Parameters -->
+
+	<property>
+		<name>hive.hbase.wal.enabled</name>
+		<value>true</value>
+		<description>Whether writes to HBase should be forced to the
+			write-ahead log. Disabling this improves HBase write performance at
+			the risk of lost writes in case of a crash.</description>
+	</property>
+
+	<property>
+		<name>hive.exec.drop.ignorenonexistent</name>
+		<value>true</value>
+		<description>drop table always works.</description>
+	</property>
+
+</configuration>
diff --git a/hivesterix/src/test/resources/optimizerts/ignore.txt b/hivesterix/src/test/resources/optimizerts/ignore.txt
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/hivesterix/src/test/resources/optimizerts/ignore.txt
diff --git a/hivesterix/src/test/resources/optimizerts/queries/h11_share_scan.hive b/hivesterix/src/test/resources/optimizerts/queries/h11_share_scan.hive
new file mode 100644
index 0000000..a5c46c6
--- /dev/null
+++ b/hivesterix/src/test/resources/optimizerts/queries/h11_share_scan.hive
@@ -0,0 +1,10 @@
+-- union case: both subqueries are map jobs on same input, followed by filesink
+DROP TABLE IF EXISTS src;
+
+CREATE TABLE src(key int, value int);
+CREATE TABLE src1(key int, value int);
+CREATE TABLE src2(key int);
+
+FROM src
+INSERT overwrite table src1 select * where key < 5
+INSERT overwrite table src2 select key where key > 10;
diff --git a/hivesterix/src/test/resources/optimizerts/queries/h12_select_struct.hive b/hivesterix/src/test/resources/optimizerts/queries/h12_select_struct.hive
new file mode 100644
index 0000000..24ca265
--- /dev/null
+++ b/hivesterix/src/test/resources/optimizerts/queries/h12_select_struct.hive
@@ -0,0 +1,6 @@
+-- union case: both subqueries are map jobs on same input, followed by filesink
+DROP TABLE IF EXISTS src;
+
+CREATE TABLE src(key int, value struct<v1:int, v2:int>);
+
+select value from src;
diff --git a/hivesterix/src/test/resources/optimizerts/queries/q10_returned_item.hive b/hivesterix/src/test/resources/optimizerts/queries/q10_returned_item.hive
new file mode 100644
index 0000000..3f1214a
--- /dev/null
+++ b/hivesterix/src/test/resources/optimizerts/queries/q10_returned_item.hive
@@ -0,0 +1,37 @@
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS orders;
+DROP TABLE IF EXISTS customer;
+DROP TABLE IF EXISTS nation;
+DROP TABLE IF EXISTS q10_returned_item;
+
+-- create the tables and load the data
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/orders';
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/customer';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/nation';
+
+-- create the result table
+create table q10_returned_item (c_custkey int, c_name string, revenue double, c_acctbal string, n_name string, c_address string, c_phone string, c_comment string);
+
+set mapred.min.split.size=536870912;
+set hive.exec.reducers.bytes.per.reducer=1024000000;
+
+-- the query
+insert overwrite table q10_returned_item
+select 
+  c_custkey, c_name, sum(l_extendedprice * (1 - l_discount)) as revenue, 
+  c_acctbal, n_name, c_address, c_phone, c_comment
+from
+  customer c join orders o 
+  on 
+    c.c_custkey = o.o_custkey and o.o_orderdate >= '1993-10-01' and o.o_orderdate < '1994-01-01'
+  join nation n 
+  on 
+    c.c_nationkey = n.n_nationkey
+  join lineitem l 
+  on 
+    l.l_orderkey = o.o_orderkey and l.l_returnflag = 'R'
+group by c_custkey, c_name, c_acctbal, c_phone, n_name, c_address, c_comment 
+order by revenue desc 
+limit 20;
+
diff --git a/hivesterix/src/test/resources/optimizerts/queries/q11_important_stock.hive b/hivesterix/src/test/resources/optimizerts/queries/q11_important_stock.hive
new file mode 100644
index 0000000..8550b72
--- /dev/null
+++ b/hivesterix/src/test/resources/optimizerts/queries/q11_important_stock.hive
@@ -0,0 +1,47 @@
+DROP TABLE IF EXISTS partsupp;
+DROP TABLE IF EXISTS supplier;
+DROP TABLE IF EXISTS nation;
+DROP TABLE IF EXISTS q11_important_stock;
+DROP TABLE IF EXISTS q11_part_tmp;
+DROP TABLE IF EXISTS q11_sum_tmp;
+
+-- create tables and load data
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/supplier';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/nation';
+create external table partsupp (PS_PARTKEY INT, PS_SUPPKEY INT, PS_AVAILQTY INT, PS_SUPPLYCOST DOUBLE, PS_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION'/tpch/partsupp';
+
+-- create the target table
+create table q11_important_stock(ps_partkey INT, value DOUBLE);
+create table q11_part_tmp(ps_partkey int, part_value double);
+create table q11_sum_tmp(total_value double);
+
+-- the query
+insert overwrite table q11_part_tmp
+select 
+  ps_partkey, sum(ps_supplycost * ps_availqty) as part_value 
+from
+  nation n join supplier s 
+  on 
+    s.s_nationkey = n.n_nationkey and n.n_name = 'GERMANY'
+  join partsupp ps 
+  on 
+    ps.ps_suppkey = s.s_suppkey
+group by ps_partkey;
+
+insert overwrite table q11_sum_tmp
+select 
+  sum(part_value) as total_value
+from 
+  q11_part_tmp;
+
+insert overwrite table q11_important_stock
+select 
+  ps_partkey, part_value as value
+from
+  (
+    select ps_partkey, part_value, total_value
+    from q11_part_tmp join q11_sum_tmp
+  ) a
+where part_value > total_value * 0.0001
+order by value desc;
+
diff --git a/hivesterix/src/test/resources/optimizerts/queries/q12_shipping.hive b/hivesterix/src/test/resources/optimizerts/queries/q12_shipping.hive
new file mode 100644
index 0000000..062f7b9
--- /dev/null
+++ b/hivesterix/src/test/resources/optimizerts/queries/q12_shipping.hive
@@ -0,0 +1,42 @@
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS orders;
+DROP TABLE IF EXISTS q12_shipping;
+
+-- create the tables and load the data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/orders';
+
+-- create the result table
+create table q12_shipping(l_shipmode string, high_line_count double, low_line_count double);
+
+set mapred.min.split.size=536870912;
+set hive.exec.reducers.bytes.per.reducer=1225000000;
+
+-- the query
+insert overwrite table q12_shipping
+select 
+  l_shipmode,
+  sum(case
+    when o_orderpriority ='1-URGENT'
+         or o_orderpriority ='2-HIGH'
+    then 1
+    else 0
+end
+  ) as high_line_count,
+  sum(case
+    when o_orderpriority <> '1-URGENT'
+         and o_orderpriority <> '2-HIGH'
+    then 1
+    else 0
+end
+  ) as low_line_count
+from
+  orders o join lineitem l 
+  on 
+    o.o_orderkey = l.l_orderkey and l.l_commitdate < l.l_receiptdate
+and l.l_shipdate < l.l_commitdate and l.l_receiptdate >= '1994-01-01' 
+and l.l_receiptdate < '1995-01-01'
+where 
+  l.l_shipmode = 'MAIL' or l.l_shipmode = 'SHIP'
+group by l_shipmode
+order by l_shipmode;
diff --git a/hivesterix/src/test/resources/optimizerts/queries/q13_customer_distribution.hive b/hivesterix/src/test/resources/optimizerts/queries/q13_customer_distribution.hive
new file mode 100644
index 0000000..a799008
--- /dev/null
+++ b/hivesterix/src/test/resources/optimizerts/queries/q13_customer_distribution.hive
@@ -0,0 +1,27 @@
+DROP TABLE IF EXISTS customer;
+DROP TABLE IF EXISTS orders;
+DROP TABLE IF EXISTS q13_customer_distribution;
+
+-- create the tables and load the data
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/customer';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/orders';
+
+-- create the result table
+create table q13_customer_distribution (c_count int, custdist int);
+
+-- the query
+insert overwrite table q13_customer_distribution
+select 
+  c_count, count(1) as custdist
+from 
+  (select 
+     c_custkey, count(o_orderkey) as c_count
+   from 
+     customer c left outer join orders o 
+     on 
+       c.c_custkey = o.o_custkey and not o.o_comment like '%special%requests%'
+   group by c_custkey
+   ) c_orders
+group by c_count
+order by custdist desc, c_count desc;
+
diff --git a/hivesterix/src/test/resources/optimizerts/queries/q14_promotion_effect.hive b/hivesterix/src/test/resources/optimizerts/queries/q14_promotion_effect.hive
new file mode 100644
index 0000000..988f400
--- /dev/null
+++ b/hivesterix/src/test/resources/optimizerts/queries/q14_promotion_effect.hive
@@ -0,0 +1,28 @@
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS part;
+DROP TABLE IF EXISTS q14_promotion_effect;
+
+-- create the tables and load the data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/lineitem';
+create external table part (P_PARTKEY INT, P_NAME STRING, P_MFGR STRING, P_BRAND STRING, P_TYPE STRING, P_SIZE INT, P_CONTAINER STRING, P_RETAILPRICE DOUBLE, P_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/part';
+
+-- create the result table
+create table q14_promotion_effect(promo_revenue double);
+
+set mapred.min.split.size=536870912;
+set hive.exec.reducers.bytes.per.reducer=1040000000;
+
+-- the query
+insert overwrite table q14_promotion_effect
+select 
+  100.00 * sum(case
+               when p_type like 'PROMO%'
+               then l_extendedprice*(1-l_discount)
+               else 0.0
+               end
+  ) / sum(l_extendedprice * (1 - l_discount)) as promo_revenue
+from 
+  part p join lineitem l 
+  on 
+    l.l_partkey = p.p_partkey and l.l_shipdate >= '1995-09-01' and l.l_shipdate < '1995-10-01';
+
diff --git a/hivesterix/src/test/resources/optimizerts/queries/q15_top_supplier.hive b/hivesterix/src/test/resources/optimizerts/queries/q15_top_supplier.hive
new file mode 100644
index 0000000..04064ed
--- /dev/null
+++ b/hivesterix/src/test/resources/optimizerts/queries/q15_top_supplier.hive
@@ -0,0 +1,45 @@
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS supplier;
+DROP TABLE IF EXISTS revenue;
+DROP TABLE IF EXISTS max_revenue;
+DROP TABLE IF EXISTS q15_top_supplier;
+
+-- create the tables and load the data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/lineitem';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/supplier';
+
+-- create result tables
+create table revenue(supplier_no int, total_revenue double); 
+create table max_revenue(max_revenue double); 
+create table q15_top_supplier(s_suppkey int, s_name string, s_address string, s_phone string, total_revenue double);
+
+
+set mapred.min.split.size=536870912;
+
+-- the query
+insert overwrite table revenue
+select 
+  l_suppkey as supplier_no, sum(l_extendedprice * (1 - l_discount)) as total_revenue
+from 
+  lineitem
+where 
+  l_shipdate >= '1996-01-01' and l_shipdate < '1996-04-01'
+group by l_suppkey;
+
+insert overwrite table max_revenue
+select 
+  max(total_revenue)
+from 
+  revenue;
+
+insert overwrite table q15_top_supplier
+select 
+  s_suppkey, s_name, s_address, s_phone, total_revenue
+from supplier s join revenue r 
+  on 
+    s.s_suppkey = r.supplier_no
+  join max_revenue m 
+  on 
+    r.total_revenue = m.max_revenue
+order by s_suppkey;
+
diff --git a/hivesterix/src/test/resources/optimizerts/queries/q16_parts_supplier_relationship.hive b/hivesterix/src/test/resources/optimizerts/queries/q16_parts_supplier_relationship.hive
new file mode 100644
index 0000000..971ef99
--- /dev/null
+++ b/hivesterix/src/test/resources/optimizerts/queries/q16_parts_supplier_relationship.hive
@@ -0,0 +1,53 @@
+DROP TABLE IF EXISTS partsupp;
+DROP TABLE IF EXISTS part;
+DROP TABLE IF EXISTS supplier;
+DROP TABLE IF EXISTS q16_parts_supplier_relationship;
+DROP TABLE IF EXISTS q16_tmp;
+DROP TABLE IF EXISTS supplier_tmp;
+
+-- create the tables and load the data
+create external table part (P_PARTKEY INT, P_NAME STRING, P_MFGR STRING, P_BRAND STRING, P_TYPE STRING, P_SIZE INT, P_CONTAINER STRING, P_RETAILPRICE DOUBLE, P_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/part';
+create external table partsupp (PS_PARTKEY INT, PS_SUPPKEY INT, PS_AVAILQTY INT, PS_SUPPLYCOST DOUBLE, PS_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION'/tpch/partsupp';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/supplier';
+
+-- create the result table
+create table q16_parts_supplier_relationship(p_brand string, p_type string, p_size int, supplier_cnt int);
+create table q16_tmp(p_brand string, p_type string, p_size int, ps_suppkey int);
+create table supplier_tmp(s_suppkey int);
+
+-- the query
+insert overwrite table supplier_tmp
+select 
+  s_suppkey
+from 
+  supplier
+where 
+  not s_comment like '%Customer%Complaints%';
+
+insert overwrite table q16_tmp
+select 
+  p_brand, p_type, p_size, ps_suppkey
+from 
+  partsupp ps join part p 
+  on 
+    p.p_partkey = ps.ps_partkey and p.p_brand <> 'Brand#45' 
+    and not p.p_type like 'MEDIUM POLISHED%'
+  join supplier_tmp s 
+  on 
+    ps.ps_suppkey = s.s_suppkey;
+
+insert overwrite table q16_parts_supplier_relationship
+select 
+  p_brand, p_type, p_size, count(distinct ps_suppkey) as supplier_cnt
+from 
+  (select 
+     * 
+   from
+     q16_tmp 
+   where p_size = 49 or p_size = 14 or p_size = 23 or
+         p_size = 45 or p_size = 19 or p_size = 3 or
+         p_size = 36 or p_size = 9
+) q16_all
+group by p_brand, p_type, p_size
+order by supplier_cnt desc, p_brand, p_type, p_size;
+
diff --git a/hivesterix/src/test/resources/optimizerts/queries/q17_small_quantity_order_revenue.hive b/hivesterix/src/test/resources/optimizerts/queries/q17_small_quantity_order_revenue.hive
new file mode 100644
index 0000000..65291cd
--- /dev/null
+++ b/hivesterix/src/test/resources/optimizerts/queries/q17_small_quantity_order_revenue.hive
@@ -0,0 +1,38 @@
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS part;
+DROP TABLE IF EXISTS q17_small_quantity_order_revenue;
+DROP TABLE IF EXISTS lineitem_tmp;
+
+-- create the tables and load the data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/lineitem';
+create external table part (P_PARTKEY INT, P_NAME STRING, P_MFGR STRING, P_BRAND STRING, P_TYPE STRING, P_SIZE INT, P_CONTAINER STRING, P_RETAILPRICE DOUBLE, P_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/part';
+
+-- create the result table
+create table q17_small_quantity_order_revenue (avg_yearly double);
+create table lineitem_tmp (t_partkey int, t_avg_quantity double);
+
+-- the query
+insert overwrite table lineitem_tmp
+select 
+  l_partkey as t_partkey, 0.2 * avg(l_quantity) as t_avg_quantity
+from 
+  lineitem
+group by l_partkey;
+
+insert overwrite table q17_small_quantity_order_revenue
+select
+  sum(l_extendedprice) / 7.0 as avg_yearly
+from
+  (select l_quantity, l_extendedprice, t_avg_quantity from
+   lineitem_tmp t join
+     (select
+        l_quantity, l_partkey, l_extendedprice
+      from
+        part p join lineitem l
+        on
+          p.p_partkey = l.l_partkey
+          and p.p_brand = 'Brand#23'
+          and p.p_container = 'MED BOX'
+      ) l1 on l1.l_partkey = t.t_partkey
+   ) a
+where l_quantity < t_avg_quantity;
diff --git a/hivesterix/src/test/resources/optimizerts/queries/q18_large_volume_customer.hive b/hivesterix/src/test/resources/optimizerts/queries/q18_large_volume_customer.hive
new file mode 100644
index 0000000..76d0475
--- /dev/null
+++ b/hivesterix/src/test/resources/optimizerts/queries/q18_large_volume_customer.hive
@@ -0,0 +1,43 @@
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS orders;
+DROP TABLE IF EXISTS customer;
+DROP TABLE IF EXISTS q18_tmp;
+DROP TABLE IF EXISTS q18_large_volume_customer;
+
+-- create the tables and load the data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/orders';
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/customer';
+
+-- create the result tables
+create table q18_tmp(l_orderkey int, t_sum_quantity double);
+create table q18_large_volume_customer(c_name string, c_custkey int, o_orderkey int, o_orderdate string, o_totalprice double, sum_quantity double);
+
+set mapred.min.split.size=268435456;
+set hive.exec.reducers.bytes.per.reducer=1164000000;
+
+-- the query
+insert overwrite table q18_tmp
+select 
+  l_orderkey, sum(l_quantity) as t_sum_quantity
+from 
+  lineitem
+group by l_orderkey;
+
+insert overwrite table q18_large_volume_customer
+select 
+  c_name,c_custkey,o_orderkey,o_orderdate,o_totalprice,sum(l_quantity)
+from 
+  customer c join orders o 
+  on 
+    c.c_custkey = o.o_custkey
+  join q18_tmp t 
+  on 
+    o.o_orderkey = t.l_orderkey and t.t_sum_quantity > 300
+  join lineitem l 
+  on 
+    o.o_orderkey = l.l_orderkey
+group by c_name,c_custkey,o_orderkey,o_orderdate,o_totalprice
+order by o_totalprice desc,o_orderdate
+limit 100;
+
diff --git a/hivesterix/src/test/resources/optimizerts/queries/q19_discounted_revenue.hive b/hivesterix/src/test/resources/optimizerts/queries/q19_discounted_revenue.hive
new file mode 100644
index 0000000..fd330cd
--- /dev/null
+++ b/hivesterix/src/test/resources/optimizerts/queries/q19_discounted_revenue.hive
@@ -0,0 +1,49 @@
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS part;
+DROP TABLE IF EXISTS q19_discounted_revenue;
+
+-- create the tables and load the data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/lineitem';
+create external table part (P_PARTKEY INT, P_NAME STRING, P_MFGR STRING, P_BRAND STRING, P_TYPE STRING, P_SIZE INT, P_CONTAINER STRING, P_RETAILPRICE DOUBLE, P_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/part';
+
+-- create the result table
+create table q19_discounted_revenue(revenue double);
+
+set mapred.min.split.size=268435456;
+set hive.exec.reducers.bytes.per.reducer=1040000000;
+
+-- the query
+insert overwrite table q19_discounted_revenue
+select
+  sum(l_extendedprice * (1 - l_discount) ) as revenue
+from
+  	part p join lineitem l
+  on 
+    p.p_partkey = l.l_partkey    
+where
+  (
+    p_brand = 'Brand#12'
+	and p_container REGEXP 'SM CASE||SM BOX||SM PACK||SM PKG'
+	and l_quantity >= 1 and l_quantity <= 11
+	and p_size >= 1 and p_size <= 5
+	and l_shipmode REGEXP 'AIR||AIR REG'
+	and l_shipinstruct = 'DELIVER IN PERSON'
+  ) 
+  or 
+  (
+    p_brand = 'Brand#23'
+	and p_container REGEXP 'MED BAG||MED BOX||MED PKG||MED PACK'
+	and l_quantity >= 10 and l_quantity <= 20
+	and p_size >= 1 and p_size <= 10
+	and l_shipmode REGEXP 'AIR||AIR REG'
+	and l_shipinstruct = 'DELIVER IN PERSON'
+  )
+  or
+  (
+	p_brand = 'Brand#34'
+	and p_container REGEXP 'LG CASE||LG BOX||LG PACK||LG PKG'
+	and l_quantity >= 20 and l_quantity <= 30
+	and p_size >= 1 and p_size <= 15
+	and l_shipmode REGEXP 'AIR||AIR REG'
+	and l_shipinstruct = 'DELIVER IN PERSON'
+  );
diff --git a/hivesterix/src/test/resources/optimizerts/queries/q1_pricing_summary_report.hive b/hivesterix/src/test/resources/optimizerts/queries/q1_pricing_summary_report.hive
new file mode 100644
index 0000000..a002068
--- /dev/null
+++ b/hivesterix/src/test/resources/optimizerts/queries/q1_pricing_summary_report.hive
@@ -0,0 +1,21 @@
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS q1_pricing_summary_report;
+
+-- create tables and load data
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/lineitem';
+
+-- create the target table
+CREATE TABLE q1_pricing_summary_report ( L_RETURNFLAG STRING, L_LINESTATUS STRING, SUM_QTY DOUBLE, SUM_BASE_PRICE DOUBLE, SUM_DISC_PRICE DOUBLE, SUM_CHARGE DOUBLE, AVE_QTY DOUBLE, AVE_PRICE DOUBLE, AVE_DISC DOUBLE, COUNT_ORDER INT);
+
+set mapred.min.split.size=536870912;
+
+-- the query
+INSERT OVERWRITE TABLE q1_pricing_summary_report 
+SELECT 
+  L_RETURNFLAG, L_LINESTATUS, SUM(L_QUANTITY), SUM(L_EXTENDEDPRICE), SUM(L_EXTENDEDPRICE*(1-L_DISCOUNT)), SUM(L_EXTENDEDPRICE*(1-L_DISCOUNT)*(1+L_TAX)), AVG(L_QUANTITY), AVG(L_EXTENDEDPRICE), AVG(L_DISCOUNT), COUNT(1) 
+FROM 
+  lineitem 
+WHERE 
+  L_SHIPDATE<='1998-09-02' 
+GROUP BY L_RETURNFLAG, L_LINESTATUS 
+ORDER BY L_RETURNFLAG, L_LINESTATUS;
diff --git a/hivesterix/src/test/resources/optimizerts/queries/q20_potential_part_promotion.hive b/hivesterix/src/test/resources/optimizerts/queries/q20_potential_part_promotion.hive
new file mode 100644
index 0000000..63297e6
--- /dev/null
+++ b/hivesterix/src/test/resources/optimizerts/queries/q20_potential_part_promotion.hive
@@ -0,0 +1,78 @@
+DROP TABLE IF EXISTS partsupp;
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS supplier;
+DROP TABLE IF EXISTS nation;
+DROP TABLE IF EXISTS q20_tmp1;
+DROP TABLE IF EXISTS q20_tmp2;
+DROP TABLE IF EXISTS q20_tmp3;
+DROP TABLE IF EXISTS q20_tmp4;
+DROP TABLE IF EXISTS q20_potential_part_promotion;
+
+-- create tables and load data
+create external table part (P_PARTKEY INT, P_NAME STRING, P_MFGR STRING, P_BRAND STRING, P_TYPE STRING, P_SIZE INT, P_CONTAINER STRING, P_RETAILPRICE DOUBLE, P_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/part';
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/lineitem';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/supplier';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/nation';
+create external table partsupp (PS_PARTKEY INT, PS_SUPPKEY INT, PS_AVAILQTY INT, PS_SUPPLYCOST DOUBLE, PS_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION'/tpch/partsupp';
+
+-- create the target table
+create table q20_tmp1(p_partkey int);
+create table q20_tmp2(l_partkey int, l_suppkey int, sum_quantity double);
+create table q20_tmp3(ps_suppkey int, ps_availqty int, sum_quantity double);
+create table q20_tmp4(ps_suppkey int);
+create table q20_potential_part_promotion(s_name string, s_address string);
+
+set mapred.min.split.size=536870912;
+
+-- the query
+insert overwrite table q20_tmp1
+select distinct p_partkey
+from
+  part 
+where 
+  p_name like 'forest%';
+
+insert overwrite table q20_tmp2
+select 
+  l_partkey, l_suppkey, 0.5 * sum(l_quantity)
+from
+  lineitem
+where
+  l_shipdate >= '1994-01-01'
+  and l_shipdate < '1995-01-01'
+group by l_partkey, l_suppkey;
+
+insert overwrite table q20_tmp3
+select 
+  ps_suppkey, ps_availqty, sum_quantity
+from  
+  partsupp ps join q20_tmp1 t1 
+  on 
+    ps.ps_partkey = t1.p_partkey
+  join q20_tmp2 t2 
+  on 
+    ps.ps_partkey = t2.l_partkey and ps.ps_suppkey = t2.l_suppkey;
+
+insert overwrite table q20_tmp4
+select 
+  ps_suppkey
+from 
+  q20_tmp3
+where 
+  ps_availqty > sum_quantity
+group by ps_suppkey;
+
+insert overwrite table q20_potential_part_promotion
+select 
+  s_name, s_address
+from 
+  supplier s join nation n
+  on
+    s.s_nationkey = n.n_nationkey
+    and n.n_name = 'CANADA'
+  join q20_tmp4 t4
+  on 
+    s.s_suppkey = t4.ps_suppkey
+order by s_name;
+
+
diff --git a/hivesterix/src/test/resources/optimizerts/queries/q21_suppliers_who_kept_orders_waiting.hive b/hivesterix/src/test/resources/optimizerts/queries/q21_suppliers_who_kept_orders_waiting.hive
new file mode 100644
index 0000000..a467f60
--- /dev/null
+++ b/hivesterix/src/test/resources/optimizerts/queries/q21_suppliers_who_kept_orders_waiting.hive
@@ -0,0 +1,74 @@
+DROP TABLE IF EXISTS orders;
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS supplier;
+DROP TABLE IF EXISTS nation;
+DROP TABLE IF EXISTS q21_tmp1;
+DROP TABLE IF EXISTS q21_tmp2;
+DROP TABLE IF EXISTS q21_suppliers_who_kept_orders_waiting;
+
+-- create tables and load data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/orders';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/supplier';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/nation';
+
+-- create target tables
+create table q21_tmp1(l_orderkey int, count_suppkey int, max_suppkey int);
+create table q21_tmp2(l_orderkey int, count_suppkey int, max_suppkey int);
+create table q21_suppliers_who_kept_orders_waiting(s_name string, numwait int);
+
+-- the query
+insert overwrite table q21_tmp1
+select
+  l_orderkey, count(distinct l_suppkey), max(l_suppkey) as max_suppkey
+from
+  lineitem
+group by l_orderkey;
+
+insert overwrite table q21_tmp2
+select
+  l_orderkey, count(distinct l_suppkey), max(l_suppkey) as max_suppkey
+from
+  lineitem
+where
+  l_receiptdate > l_commitdate
+group by l_orderkey;
+
+insert overwrite table q21_suppliers_who_kept_orders_waiting
+select
+  s_name, count(1) as numwait
+from
+  (select s_name from
+(select s_name, t2.l_orderkey, l_suppkey, count_suppkey, max_suppkey 
+ from q21_tmp2 t2 right outer join
+      (select s_name, l_orderkey, l_suppkey from
+         (select s_name, t1.l_orderkey, l_suppkey, count_suppkey, max_suppkey
+          from
+            q21_tmp1 t1 join
+            (select s_name, l_orderkey, l_suppkey
+             from 
+               orders o join
+               (select s_name, l_orderkey, l_suppkey
+                from
+                  nation n join supplier s
+                  on
+                    s.s_nationkey = n.n_nationkey
+                    and n.n_name = 'SAUDI ARABIA'
+                  join lineitem l
+                  on
+                    s.s_suppkey = l.l_suppkey
+                where
+                  l.l_receiptdate > l.l_commitdate
+                ) l1 on o.o_orderkey = l1.l_orderkey and o.o_orderstatus = 'F'
+             ) l2 on l2.l_orderkey = t1.l_orderkey
+          ) a
+          where
+           (count_suppkey > 1) or ((count_suppkey=1) and (l_suppkey <> max_suppkey))
+       ) l3 on l3.l_orderkey = t2.l_orderkey
+    ) b
+    where
+     (count_suppkey is null) or ((count_suppkey=1) and (l_suppkey = max_suppkey))
+  )c
+group by s_name
+order by numwait desc, s_name
+limit 100;
diff --git a/hivesterix/src/test/resources/optimizerts/queries/q22_global_sales_opportunity.hive b/hivesterix/src/test/resources/optimizerts/queries/q22_global_sales_opportunity.hive
new file mode 100644
index 0000000..a7d6c72
--- /dev/null
+++ b/hivesterix/src/test/resources/optimizerts/queries/q22_global_sales_opportunity.hive
@@ -0,0 +1,70 @@
+DROP TABLE IF EXISTS customer;
+DROP TABLE IF EXISTS orders;
+DROP TABLE IF EXISTS q22_customer_tmp;
+DROP TABLE IF EXISTS q22_customer_tmp1;
+DROP TABLE IF EXISTS q22_orders_tmp;
+DROP TABLE IF EXISTS q22_global_sales_opportunity;
+
+-- create tables and load data
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/customer';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/orders';
+
+-- create target tables
+create table q22_customer_tmp(c_acctbal double, c_custkey int, cntrycode string);
+create table q22_customer_tmp1(avg_acctbal double);
+create table q22_orders_tmp(o_custkey int);
+create table q22_global_sales_opportunity(cntrycode string, numcust int, totacctbal double);
+
+-- the query
+insert overwrite table q22_customer_tmp
+select 
+  c_acctbal, c_custkey, substr(c_phone, 1, 2) as cntrycode
+from 
+  customer
+where 
+  substr(c_phone, 1, 2) = '13' or
+  substr(c_phone, 1, 2) = '31' or
+  substr(c_phone, 1, 2) = '23' or
+  substr(c_phone, 1, 2) = '29' or
+  substr(c_phone, 1, 2) = '30' or
+  substr(c_phone, 1, 2) = '18' or
+  substr(c_phone, 1, 2) = '17';
+ 
+insert overwrite table q22_customer_tmp1
+select
+  avg(c_acctbal)
+from
+  q22_customer_tmp
+where
+  c_acctbal > 0.00;
+
+insert overwrite table q22_orders_tmp
+select 
+  o_custkey 
+from 
+  orders
+group by 
+  o_custkey;
+
+insert overwrite table q22_global_sales_opportunity
+select
+  cntrycode, count(1) as numcust, sum(c_acctbal) as totacctbal
+from
+(
+  select cntrycode, c_acctbal, avg_acctbal from
+  q22_customer_tmp1 ct1 join
+  (
+    select cntrycode, c_acctbal from
+      q22_orders_tmp ot 
+      right outer join q22_customer_tmp ct 
+      on
+        ct.c_custkey = ot.o_custkey
+    where
+      o_custkey is null
+  ) ct2
+) a
+where
+  c_acctbal > avg_acctbal
+group by cntrycode
+order by cntrycode;
+
diff --git a/hivesterix/src/test/resources/optimizerts/queries/q2_minimum_cost_supplier.hive b/hivesterix/src/test/resources/optimizerts/queries/q2_minimum_cost_supplier.hive
new file mode 100644
index 0000000..061c5e7
--- /dev/null
+++ b/hivesterix/src/test/resources/optimizerts/queries/q2_minimum_cost_supplier.hive
@@ -0,0 +1,56 @@
+DROP TABLE IF EXISTS part;
+DROP TABLE IF EXISTS supplier;
+DROP TABLE IF EXISTS partsupp;
+DROP TABLE IF EXISTS nation;
+DROP TABLE IF EXISTS region;
+DROP TABLE IF EXISTS q2_minimum_cost_supplier;
+DROP TABLE IF EXISTS q2_minimum_cost_supplier_tmp1;
+DROP TABLE IF EXISTS q2_minimum_cost_supplier_tmp2;
+
+-- create the tables and load the data
+create external table part (P_PARTKEY INT, P_NAME STRING, P_MFGR STRING, P_BRAND STRING, P_TYPE STRING, P_SIZE INT, P_CONTAINER STRING, P_RETAILPRICE DOUBLE, P_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/part';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/supplier';
+create external table partsupp (PS_PARTKEY INT, PS_SUPPKEY INT, PS_AVAILQTY INT, PS_SUPPLYCOST DOUBLE, PS_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION'/tpch/partsupp';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/nation';
+create external table region (R_REGIONKEY INT, R_NAME STRING, R_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/region';
+
+-- create result tables
+create table q2_minimum_cost_supplier_tmp1 (s_acctbal double, s_name string, n_name string, p_partkey int, ps_supplycost double, p_mfgr string, s_address string, s_phone string, s_comment string);
+create table q2_minimum_cost_supplier_tmp2 (p_partkey int, ps_min_supplycost double);
+create table q2_minimum_cost_supplier (s_acctbal double, s_name string, n_name string, p_partkey int, p_mfgr string, s_address string, s_phone string, s_comment string);
+
+-- the query
+insert overwrite table q2_minimum_cost_supplier_tmp1 
+select 
+  s.s_acctbal, s.s_name, n.n_name, p.p_partkey, ps.ps_supplycost, p.p_mfgr, s.s_address, s.s_phone, s.s_comment 
+from 
+  nation n join region r 
+  on 
+    n.n_regionkey = r.r_regionkey and r.r_name = 'EUROPE' 
+  join supplier s 
+  on 
+s.s_nationkey = n.n_nationkey 
+  join partsupp ps 
+  on  
+s.s_suppkey = ps.ps_suppkey 
+  join part p 
+  on 
+    p.p_partkey = ps.ps_partkey and p.p_size = 15 and p.p_type like '%BRASS' ;
+
+insert overwrite table q2_minimum_cost_supplier_tmp2 
+select 
+  p_partkey, min(ps_supplycost) 
+from  
+  q2_minimum_cost_supplier_tmp1 
+group by p_partkey;
+
+insert overwrite table q2_minimum_cost_supplier 
+select 
+  t1.s_acctbal, t1.s_name, t1.n_name, t1.p_partkey, t1.p_mfgr, t1.s_address, t1.s_phone, t1.s_comment 
+from 
+  q2_minimum_cost_supplier_tmp1 t1 join q2_minimum_cost_supplier_tmp2 t2 
+on 
+  t1.p_partkey = t2.p_partkey and t1.ps_supplycost=t2.ps_min_supplycost 
+order by s_acctbal desc, n_name, s_name, p_partkey 
+limit 100;
+
diff --git a/hivesterix/src/test/resources/optimizerts/queries/q3_shipping_priority.hive b/hivesterix/src/test/resources/optimizerts/queries/q3_shipping_priority.hive
new file mode 100644
index 0000000..0049eb3
--- /dev/null
+++ b/hivesterix/src/test/resources/optimizerts/queries/q3_shipping_priority.hive
@@ -0,0 +1,29 @@
+DROP TABLE IF EXISTS orders;
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS customer;
+DROP TABLE IF EXISTS q3_shipping_priority;
+
+-- create tables and load data
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/orders';
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/customer';
+
+-- create the target table
+create table q3_shipping_priority (l_orderkey int, revenue double, o_orderdate string, o_shippriority int);
+
+set mapred.min.split.size=536870912;
+set hive.exec.reducers.bytes.per.reducer=1024000000;
+
+-- the query
+Insert overwrite table q3_shipping_priority 
+select 
+  l_orderkey, (l_extendedprice*(1-l_discount)) as revenue, o_orderdate, o_shippriority 
+from 
+  customer c join orders o 
+    on c.c_mktsegment = 'BUILDING' and c.c_custkey = o.o_custkey 
+  join lineitem l 
+    on l.l_orderkey = o.o_orderkey and l.l_linenumber<3
+-- group by l_orderkey, o_orderdate, o_shippriority 
+order by revenue desc
+limit 10;
+
diff --git a/hivesterix/src/test/resources/optimizerts/queries/q4_order_priority.hive b/hivesterix/src/test/resources/optimizerts/queries/q4_order_priority.hive
new file mode 100644
index 0000000..aa828e9
--- /dev/null
+++ b/hivesterix/src/test/resources/optimizerts/queries/q4_order_priority.hive
@@ -0,0 +1,30 @@
+DROP TABLE IF EXISTS orders;
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS q4_order_priority_tmp;
+DROP TABLE IF EXISTS q4_order_priority;
+
+-- create tables and load data
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/orders';
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/lineitem';
+
+-- create the target table
+CREATE TABLE q4_order_priority_tmp (O_ORDERKEY INT);
+CREATE TABLE q4_order_priority (O_ORDERPRIORITY STRING, ORDER_COUNT INT);
+
+set mapred.min.split.size=536870912;
+-- the query
+INSERT OVERWRITE TABLE q4_order_priority_tmp 
+select 
+  DISTINCT l_orderkey 
+from 
+  lineitem 
+where 
+  l_commitdate < l_receiptdate;
+INSERT OVERWRITE TABLE q4_order_priority 
+select o_orderpriority, count(1) as order_count 
+from 
+  orders o join q4_order_priority_tmp t 
+  on 
+o.o_orderkey = t.o_orderkey and o.o_orderdate >= '1993-07-01' and o.o_orderdate < '1993-10-01' 
+group by o_orderpriority 
+order by o_orderpriority;
diff --git a/hivesterix/src/test/resources/optimizerts/queries/q5_local_supplier_volume.hive b/hivesterix/src/test/resources/optimizerts/queries/q5_local_supplier_volume.hive
new file mode 100644
index 0000000..a975ce1
--- /dev/null
+++ b/hivesterix/src/test/resources/optimizerts/queries/q5_local_supplier_volume.hive
@@ -0,0 +1,42 @@
+DROP TABLE IF EXISTS customer;
+DROP TABLE IF EXISTS orders;
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS supplier;
+DROP TABLE IF EXISTS nation;
+DROP TABLE IF EXISTS region;
+DROP TABLE IF EXISTS q5_local_supplier_volume;
+
+-- create tables and load data
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/customer';
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/orders';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/supplier';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/nation';
+create external table region (R_REGIONKEY INT, R_NAME STRING, R_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/region';
+
+-- create the target table
+create table q5_local_supplier_volume (N_NAME STRING, REVENUE DOUBLE);
+
+set mapred.min.split.size=536870912;
+
+-- the query
+insert overwrite table q5_local_supplier_volume 
+select 
+  n_name, sum(l_extendedprice * (1 - l_discount)) as revenue 
+from
+  customer c join
+    ( select n_name, l_extendedprice, l_discount, s_nationkey, o_custkey from orders o join
+      ( select n_name, l_extendedprice, l_discount, l_orderkey, s_nationkey from lineitem l join
+        ( select n_name, s_suppkey, s_nationkey from supplier s join
+          ( select n_name, n_nationkey 
+            from nation n join region r 
+            on n.n_regionkey = r.r_regionkey and r.r_name = 'ASIA'
+          ) n1 on s.s_nationkey = n1.n_nationkey
+        ) s1 on l.l_suppkey = s1.s_suppkey
+      ) l1 on l1.l_orderkey = o.o_orderkey and o.o_orderdate >= '1994-01-01' 
+              and o.o_orderdate < '1995-01-01'
+) o1 
+on c.c_nationkey = o1.s_nationkey and c.c_custkey = o1.o_custkey
+group by n_name 
+order by revenue desc;
+
diff --git a/hivesterix/src/test/resources/optimizerts/queries/q6_forecast_revenue_change.hive b/hivesterix/src/test/resources/optimizerts/queries/q6_forecast_revenue_change.hive
new file mode 100644
index 0000000..d8cb9b9
--- /dev/null
+++ b/hivesterix/src/test/resources/optimizerts/queries/q6_forecast_revenue_change.hive
@@ -0,0 +1,21 @@
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS q6_forecast_revenue_change;
+
+-- create tables and load data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/lineitem';
+
+-- create the target table
+create table q6_forecast_revenue_change (revenue double);
+
+-- the query
+insert overwrite table q6_forecast_revenue_change 
+select 
+  sum(l_extendedprice*l_discount) as revenue
+from 
+  lineitem
+where 
+  l_shipdate >= '1994-01-01'
+  and l_shipdate < '1995-01-01'
+  and l_discount >= 0.05 and l_discount <= 0.07
+  and l_quantity < 24;
+
diff --git a/hivesterix/src/test/resources/optimizerts/queries/q7_volume_shipping.hive b/hivesterix/src/test/resources/optimizerts/queries/q7_volume_shipping.hive
new file mode 100644
index 0000000..3dfb22a
--- /dev/null
+++ b/hivesterix/src/test/resources/optimizerts/queries/q7_volume_shipping.hive
@@ -0,0 +1,71 @@
+DROP TABLE IF EXISTS customer;
+DROP TABLE IF EXISTS orders;
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS supplier;
+DROP TABLE IF EXISTS nation;
+DROP TABLE IF EXISTS q7_volume_shipping;
+DROP TABLE IF EXISTS q7_volume_shipping_tmp;
+
+-- create tables and load data
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/customer';
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/orders';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/supplier';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/nation';
+
+-- create the target table
+create table q7_volume_shipping (supp_nation string, cust_nation string, l_year int, revenue double);
+create table q7_volume_shipping_tmp(supp_nation string, cust_nation string, s_nationkey int, c_nationkey int);
+
+set mapred.min.split.size=536870912;
+set hive.exec.reducers.bytes.per.reducer=1225000000;
+
+-- the query
+insert overwrite table q7_volume_shipping_tmp
+select 
+  * 
+from
+  (
+    select 
+      n1.n_name as supp_nation, n2.n_name as cust_nation, n1.n_nationkey as s_nationkey,      
+      n2.n_nationkey as c_nationkey
+from 
+  nation n1 join nation n2 
+  on 
+    n1.n_name = 'FRANCE' and n2.n_name = 'GERMANY'
+    UNION ALL
+select 
+  n1.n_name as supp_nation, n2.n_name as cust_nation, n1.n_nationkey as s_nationkey, 
+  n2.n_nationkey as c_nationkey
+from 
+  nation n1 join nation n2 
+  on 
+    n2.n_name = 'FRANCE' and n1.n_name = 'GERMANY'
+) a;
+
+insert overwrite table q7_volume_shipping 
+select 
+  supp_nation, cust_nation, l_year, sum(volume) as revenue
+from 
+  (
+    select
+      supp_nation, cust_nation, year(l_shipdate) as l_year, 
+      l_extendedprice * (1 - l_discount) as volume
+    from
+      q7_volume_shipping_tmp t join
+        (select l_shipdate, l_extendedprice, l_discount, c_nationkey, s_nationkey 
+         from supplier s join
+           (select l_shipdate, l_extendedprice, l_discount, l_suppkey, c_nationkey 
+            from customer c join
+              (select l_shipdate, l_extendedprice, l_discount, l_suppkey, o_custkey 
+               from orders o join lineitem l 
+               on 
+                 o.o_orderkey = l.l_orderkey and l.l_shipdate >= '1995-01-01' 
+                 and l.l_shipdate <= '1996-12-31'
+               ) l1 on c.c_custkey = l1.o_custkey
+            ) l2 on s.s_suppkey = l2.l_suppkey
+         ) l3 on l3.c_nationkey = t.c_nationkey and l3.s_nationkey = t.s_nationkey
+   ) shipping
+group by supp_nation, cust_nation, l_year
+order by supp_nation, cust_nation, l_year;
+
diff --git a/hivesterix/src/test/resources/optimizerts/queries/q8_national_market_share.hive b/hivesterix/src/test/resources/optimizerts/queries/q8_national_market_share.hive
new file mode 100644
index 0000000..5e7baaa
--- /dev/null
+++ b/hivesterix/src/test/resources/optimizerts/queries/q8_national_market_share.hive
@@ -0,0 +1,56 @@
+DROP TABLE IF EXISTS customer;
+DROP TABLE IF EXISTS orders;
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS supplier;
+DROP TABLE IF EXISTS nation;
+DROP TABLE IF EXISTS region;
+DROP TABLE IF EXISTS part;
+DROP TABLE IF EXISTS q8_national_market_share;
+
+-- create the tables and load the data
+create external table part (P_PARTKEY INT, P_NAME STRING, P_MFGR STRING, P_BRAND STRING, P_TYPE STRING, P_SIZE INT, P_CONTAINER STRING, P_RETAILPRICE DOUBLE, P_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/part';
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/customer';
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/orders';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/supplier';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/nation';
+create external table region (R_REGIONKEY INT, R_NAME STRING, R_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/region';
+
+-- create the result table
+create table q8_national_market_share(o_year string, mkt_share double);
+
+-- the query
+insert overwrite table q8_national_market_share 
+select 
+  o_year, sum(case when nation = 'BRAZIL' then volume else 0.0 end) / sum(volume) as mkt_share
+from 
+  (
+select 
+  year(o_orderdate) as o_year, l_extendedprice * (1-l_discount) as volume, 
+  n2.n_name as nation
+    from
+      nation n2 join
+        (select o_orderdate, l_discount, l_extendedprice, s_nationkey 
+         from supplier s join
+          (select o_orderdate, l_discount, l_extendedprice, l_suppkey 
+           from part p join
+             (select o_orderdate, l_partkey, l_discount, l_extendedprice, l_suppkey 
+              from 
+                (select o_orderdate, o_orderkey 
+                 from orders o join
+                   (select c.c_custkey 
+                    from customer c join
+                      (select n1.n_nationkey 
+                       from nation n1 join region r
+                       on n1.n_regionkey = r.r_regionkey and r.r_name = 'AMERICA'
+                       ) n11 on c.c_nationkey = n11.n_nationkey
+                    ) c1 on c1.c_custkey = o.o_custkey
+                 ) o1 join lineitem l on l.l_orderkey = o1.o_orderkey and o1.o_orderdate >= '1995-01-01' 
+                         and o1.o_orderdate < '1996-12-31'
+              ) l1 on p.p_partkey = l1.l_partkey and p.p_type = 'ECONOMY ANODIZED STEEL'
+           ) p1 on s.s_suppkey = p1.l_suppkey
+        ) s1 on s1.s_nationkey = n2.n_nationkey
+  ) all_nation
+group by o_year
+order by o_year;
+
diff --git a/hivesterix/src/test/resources/optimizerts/queries/q9_product_type_profit.hive b/hivesterix/src/test/resources/optimizerts/queries/q9_product_type_profit.hive
new file mode 100644
index 0000000..586779c
--- /dev/null
+++ b/hivesterix/src/test/resources/optimizerts/queries/q9_product_type_profit.hive
@@ -0,0 +1,51 @@
+DROP TABLE IF EXISTS part;
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS supplier;
+DROP TABLE IF EXISTS orders;
+DROP TABLE IF EXISTS partsupp;
+DROP TABLE IF EXISTS nation;
+DROP TABLE IF EXISTS q9_product_type_profit;
+
+-- create the tables and load the data
+create external table part (P_PARTKEY INT, P_NAME STRING, P_MFGR STRING, P_BRAND STRING, P_TYPE STRING, P_SIZE INT, P_CONTAINER STRING, P_RETAILPRICE DOUBLE, P_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/part';
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/orders';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/supplier';
+create external table partsupp (PS_PARTKEY INT, PS_SUPPKEY INT, PS_AVAILQTY INT, PS_SUPPLYCOST DOUBLE, PS_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION'/tpch/partsupp';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/nation';
+
+-- create the result table
+create table q9_product_type_profit (nation string, o_year string, sum_profit double);
+
+set mapred.min.split.size=536870912;
+set hive.exec.reducers.bytes.per.reducer=1024000000;
+
+-- the query
+insert overwrite table q9_product_type_profit
+select 
+  nation, o_year, sum(amount) as sum_profit
+from 
+  (
+select 
+  n_name as nation, year(o_orderdate) as o_year, 
+  l_extendedprice * (1 - l_discount) -  ps_supplycost * l_quantity as amount
+    from
+      orders o join
+      (select l_extendedprice, l_discount, l_quantity, l_orderkey, n_name, ps_supplycost 
+       from part p join
+         (select l_extendedprice, l_discount, l_quantity, l_partkey, l_orderkey, 
+                 n_name, ps_supplycost 
+          from partsupp ps join
+            (select l_suppkey, l_extendedprice, l_discount, l_quantity, l_partkey, 
+                    l_orderkey, n_name 
+             from
+               (select s_suppkey, n_name 
+                from nation n join supplier s on n.n_nationkey = s.s_nationkey
+               ) s1 join lineitem l on s1.s_suppkey = l.l_suppkey
+            ) l1 on ps.ps_suppkey = l1.l_suppkey and ps.ps_partkey = l1.l_partkey
+         ) l2 on p.p_name like '%green%' and p.p_partkey = l2.l_partkey
+     ) l3 on o.o_orderkey = l3.l_orderkey
+  )profit
+group by nation, o_year
+order by nation, o_year desc;
+
diff --git a/hivesterix/src/test/resources/optimizerts/queries/u10_nestedloop_join.hive b/hivesterix/src/test/resources/optimizerts/queries/u10_nestedloop_join.hive
new file mode 100644
index 0000000..ce94ac6
--- /dev/null
+++ b/hivesterix/src/test/resources/optimizerts/queries/u10_nestedloop_join.hive
@@ -0,0 +1,21 @@
+DROP TABLE IF EXISTS nation;
+DROP TABLE IF EXISTS u10_nestedloop_join;
+
+-- create tables and load data
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/nation';
+
+-- create the target table
+create table u10_nestedloop_join(supp_nation string, cust_nation string, s_nationkey int, c_nationkey int);
+
+-- the query
+insert overwrite table u10_nestedloop_join
+select 
+  * 
+from
+  (
+    select 
+      n1.n_name as supp_nation, n2.n_name as cust_nation, n1.n_nationkey as s_nationkey,      
+      n2.n_nationkey as c_nationkey
+from 
+  nation n1 join nation n2 where n1.n_nationkey > n2.n_nationkey
+) a;
\ No newline at end of file
diff --git a/hivesterix/src/test/resources/optimizerts/queries/u1_group_by.hive b/hivesterix/src/test/resources/optimizerts/queries/u1_group_by.hive
new file mode 100644
index 0000000..1d5c312
--- /dev/null
+++ b/hivesterix/src/test/resources/optimizerts/queries/u1_group_by.hive
@@ -0,0 +1,12 @@
+drop table IF EXISTS lineitem;
+
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, 
+L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, 
+L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, 
+L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS 
+TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/lineitem';
+
+select * from (select sum(abs(L_QUANTITY)), SUM(L_EXTENDEDPRICE*(1-L_DISCOUNT)*(1+L_TAX))
+FROM lineitem WHERE L_SHIPDATE<='1998-09-02'  GROUP BY L_RETURNFLAG) T;
+
+drop table lineitem;
diff --git a/hivesterix/src/test/resources/optimizerts/queries/u2_select-project.hive b/hivesterix/src/test/resources/optimizerts/queries/u2_select-project.hive
new file mode 100644
index 0000000..1cf0c36
--- /dev/null
+++ b/hivesterix/src/test/resources/optimizerts/queries/u2_select-project.hive
@@ -0,0 +1,7 @@
+drop table IF EXISTS supplier;

+drop table IF EXISTS result;

+

+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/supplier';

+create table result (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT);

+

+select (2*s_suppkey), s_address, s_nationkey,  s_name   FROM supplier where S_SUPPKEY*2 < 20;

diff --git a/hivesterix/src/test/resources/optimizerts/queries/u3_union.hive b/hivesterix/src/test/resources/optimizerts/queries/u3_union.hive
new file mode 100644
index 0000000..1c84ba8
--- /dev/null
+++ b/hivesterix/src/test/resources/optimizerts/queries/u3_union.hive
@@ -0,0 +1,7 @@
+drop table IF EXISTS supplier;

+drop table IF EXISTS result;

+

+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/supplier';

+create table result (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT);

+

+select * from (select (2*s_suppkey), s_address, s_nationkey,  s_name  FROM supplier where S_SUPPKEY*2 < 20 union all select (2*s_suppkey), s_address, s_nationkey,  s_name   FROM supplier where S_SUPPKEY*2 > 50) t;

diff --git a/hivesterix/src/test/resources/optimizerts/queries/u4_join.hive b/hivesterix/src/test/resources/optimizerts/queries/u4_join.hive
new file mode 100644
index 0000000..c013fa6
--- /dev/null
+++ b/hivesterix/src/test/resources/optimizerts/queries/u4_join.hive
@@ -0,0 +1,14 @@
+drop table IF EXISTS supplier;

+drop table IF EXISTS result;

+

+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, 

+S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED

+BY '|' STORED AS TEXTFILE LOCATION '/tpch/supplier';

+

+create table result (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT);

+

+insert overwrite table result select s_suppkey, s_name, s_address, s_nationkey

+from supplier where S_SUPPKEY*2 < 20;

+

+select result.s_suppkey, supplier.s_phone, supplier.s_acctbal,

+supplier.s_comment from result join supplier on result.s_suppkey=supplier.s_suppkey;

diff --git a/hivesterix/src/test/resources/optimizerts/queries/u5_lateral_view.hive b/hivesterix/src/test/resources/optimizerts/queries/u5_lateral_view.hive
new file mode 100644
index 0000000..2740bca
--- /dev/null
+++ b/hivesterix/src/test/resources/optimizerts/queries/u5_lateral_view.hive
@@ -0,0 +1,7 @@
+drop table IF EXISTS supplier;

+drop table IF EXISTS result;

+

+create table supplier (S_SUPPKEY array<int>, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING);

+create table result (S_SUPPKEY int);

+

+select s_name, s_address, col1 from supplier LATERAL VIEW explode(s_suppkey) suppadd as col1;

diff --git a/hivesterix/src/test/resources/optimizerts/queries/u6_limit.hive b/hivesterix/src/test/resources/optimizerts/queries/u6_limit.hive
new file mode 100644
index 0000000..b268aff
--- /dev/null
+++ b/hivesterix/src/test/resources/optimizerts/queries/u6_limit.hive
@@ -0,0 +1,8 @@
+drop table IF EXISTS orders;

+drop table IF EXISTS result;

+drop table IF EXISTS q_limit2;

+

+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/orders';

+create table q_limit2(col1 int, col2 double, col3 string, col4 string);

+

+insert overwrite table q_limit2 select O_ORDERKEY, O_TOTALPRICE, O_ORDERDATE, O_CLERK from orders where O_TOTALPRICE<10000 order by o_totalprice limit 4;
\ No newline at end of file
diff --git a/hivesterix/src/test/resources/optimizerts/queries/u7_multi_join.hive b/hivesterix/src/test/resources/optimizerts/queries/u7_multi_join.hive
new file mode 100644
index 0000000..2891c56
--- /dev/null
+++ b/hivesterix/src/test/resources/optimizerts/queries/u7_multi_join.hive
@@ -0,0 +1,9 @@
+drop table IF EXISTS lineitem;

+drop table IF EXISTS orders;

+drop table IF EXISTS customer;

+

+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/lineitem';

+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/orders';

+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/customer';

+

+select l_linenumber, o_orderkey, o_totalprice, o_orderdate, o_shippriority from  customer c join orders o  on c.c_custkey = o.o_custkey join lineitem l    on o.o_orderkey = l.l_orderkey  where c.c_custkey<5 and o.o_totalprice<30000;

diff --git a/hivesterix/src/test/resources/optimizerts/queries/u8_non_mapred.hive b/hivesterix/src/test/resources/optimizerts/queries/u8_non_mapred.hive
new file mode 100644
index 0000000..247f2c1
--- /dev/null
+++ b/hivesterix/src/test/resources/optimizerts/queries/u8_non_mapred.hive
@@ -0,0 +1,7 @@
+drop table IF EXISTS supplier;

+drop table IF EXISTS result;

+

+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/supplier';

+create table result (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT);

+

+select * FROM supplier;

diff --git a/hivesterix/src/test/resources/optimizerts/queries/u9_order_by.hive b/hivesterix/src/test/resources/optimizerts/queries/u9_order_by.hive
new file mode 100644
index 0000000..8d5d1cf
--- /dev/null
+++ b/hivesterix/src/test/resources/optimizerts/queries/u9_order_by.hive
@@ -0,0 +1,7 @@
+drop table IF EXISTS supplier;

+drop table IF EXISTS result;

+

+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/supplier';

+create table result (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT);

+

+insert overwrite table result select s_suppkey, s_name, s_address, s_nationkey  FROM supplier where S_SUPPKEY*2 < 20 order by s_name;

diff --git a/hivesterix/src/test/resources/optimizerts/results/h11_share_scan.plan b/hivesterix/src/test/resources/optimizerts/results/h11_share_scan.plan
new file mode 100644
index 0000000..867bfaf
--- /dev/null
+++ b/hivesterix/src/test/resources/optimizerts/results/h11_share_scan.plan
@@ -0,0 +1,34 @@
+write [%0->$$1, %0->$$2]
+-- SINK_WRITE  |PARTITIONED|
+  select (function-call: algebricks:lt, Args:[%0->$$1, 5])
+  -- STREAM_SELECT  |PARTITIONED|
+    exchange 
+    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+      replicate 
+      -- SPLIT  |PARTITIONED|
+        exchange 
+        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+          data-scan []<-[$$1, $$2] <- default.src
+          -- DATASOURCE_SCAN  |PARTITIONED|
+            exchange 
+            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+              empty-tuple-source
+              -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+write [%0->$$1]
+-- SINK_WRITE  |PARTITIONED|
+  project ([$$1])
+  -- STREAM_PROJECT  |PARTITIONED|
+    select (function-call: algebricks:gt, Args:[%0->$$1, 10])
+    -- STREAM_SELECT  |PARTITIONED|
+      exchange 
+      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+        replicate 
+        -- SPLIT  |PARTITIONED|
+          exchange 
+          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+            data-scan []<-[$$1, $$2] <- default.src
+            -- DATASOURCE_SCAN  |PARTITIONED|
+              exchange 
+              -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                empty-tuple-source
+                -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/hivesterix/src/test/resources/optimizerts/results/h12_select_struct.plan b/hivesterix/src/test/resources/optimizerts/results/h12_select_struct.plan
new file mode 100644
index 0000000..8bbfb61
--- /dev/null
+++ b/hivesterix/src/test/resources/optimizerts/results/h12_select_struct.plan
@@ -0,0 +1,10 @@
+write [%0->$$2]
+-- SINK_WRITE  |PARTITIONED|
+  exchange 
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+    data-scan [$$2]<-[$$1, $$2] <- default.src
+    -- DATASOURCE_SCAN  |PARTITIONED|
+      exchange 
+      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+        empty-tuple-source
+        -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/hivesterix/src/test/resources/optimizerts/results/q10_returned_item.plan b/hivesterix/src/test/resources/optimizerts/results/q10_returned_item.plan
new file mode 100644
index 0000000..05b3718
--- /dev/null
+++ b/hivesterix/src/test/resources/optimizerts/results/q10_returned_item.plan
@@ -0,0 +1,102 @@
+write [%0->$$38, %0->$$39, %0->$$45, %0->$$40, %0->$$42, %0->$$43, %0->$$41, %0->$$44]
+-- SINK_WRITE  |UNPARTITIONED|
+  project ([$$38, $$39, $$45, $$40, $$42, $$43, $$41, $$44])
+  -- STREAM_PROJECT  |PARTITIONED|
+    limit 20
+    -- STREAM_LIMIT  |UNPARTITIONED|
+      limit 20
+      -- STREAM_LIMIT  |UNPARTITIONED|
+        exchange 
+        -- SORT_MERGE_EXCHANGE [$$45(DESC) ]  |PARTITIONED|
+          limit 20
+          -- STREAM_LIMIT  |LOCAL|
+            exchange 
+            -- ONE_TO_ONE_EXCHANGE  |LOCAL|
+              order (DESC, %0->$$45) 
+              -- STABLE_SORT [$$45(DESC)]  |LOCAL|
+                exchange 
+                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                  group by ([$$38 := %0->$$48; $$39 := %0->$$49; $$40 := %0->$$50; $$41 := %0->$$51; $$42 := %0->$$52; $$43 := %0->$$53; $$44 := %0->$$54]) decor ([]) {
+                            aggregate [$$45] <- [function-call: hive:sum(FINAL), Args:[%0->$$47]]
+                            -- AGGREGATE  |LOCAL|
+                              nested tuple source
+                              -- NESTED_TUPLE_SOURCE  |LOCAL|
+                         }
+                  -- EXTERNAL_GROUP_BY[$$48, $$49, $$50, $$51, $$52, $$53, $$54]  |PARTITIONED|
+                    exchange 
+                    -- HASH_PARTITION_EXCHANGE [$$48, $$49, $$50, $$51, $$52, $$53, $$54]  |PARTITIONED|
+                      group by ([$$48 := %0->$$21; $$49 := %0->$$22; $$50 := %0->$$26; $$51 := %0->$$25; $$52 := %0->$$18; $$53 := %0->$$23; $$54 := %0->$$28]) decor ([]) {
+                                aggregate [$$47] <- [function-call: hive:sum(PARTIAL1), Args:[function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMultiply, Args:[%0->$$6, function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMinus, Args:[1, %0->$$7]]]]
+                                -- AGGREGATE  |LOCAL|
+                                  nested tuple source
+                                  -- NESTED_TUPLE_SOURCE  |LOCAL|
+                             }
+                      -- EXTERNAL_GROUP_BY[$$21, $$22, $$26, $$25, $$18, $$23, $$28]  |LOCAL|
+                        exchange 
+                        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                          project ([$$21, $$22, $$23, $$25, $$26, $$28, $$18, $$6, $$7])
+                          -- STREAM_PROJECT  |PARTITIONED|
+                            exchange 
+                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                              join (function-call: algebricks:eq, Args:[%0->$$1, %0->$$29])
+                              -- HYBRID_HASH_JOIN [$$1][$$29]  |PARTITIONED|
+                                exchange 
+                                -- HASH_PARTITION_EXCHANGE [$$1]  |PARTITIONED|
+                                  project ([$$1, $$6, $$7])
+                                  -- STREAM_PROJECT  |PARTITIONED|
+                                    select (function-call: algebricks:eq, Args:[%0->$$9, R])
+                                    -- STREAM_SELECT  |PARTITIONED|
+                                      exchange 
+                                      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                        data-scan [$$1, $$6, $$7, $$9]<-[$$1, $$2, $$3, $$4, $$5, $$6, $$7, $$8, $$9, $$10, $$11, $$12, $$13, $$14, $$15, $$16] <- default.lineitem
+                                        -- DATASOURCE_SCAN  |PARTITIONED|
+                                          exchange 
+                                          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                            empty-tuple-source
+                                            -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                                exchange 
+                                -- HASH_PARTITION_EXCHANGE [$$29]  |PARTITIONED|
+                                  project ([$$29, $$21, $$22, $$23, $$25, $$26, $$28, $$18])
+                                  -- STREAM_PROJECT  |PARTITIONED|
+                                    exchange 
+                                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                      join (function-call: algebricks:eq, Args:[%0->$$17, %0->$$24])
+                                      -- HYBRID_HASH_JOIN [$$17][$$24]  |PARTITIONED|
+                                        exchange 
+                                        -- HASH_PARTITION_EXCHANGE [$$17]  |PARTITIONED|
+                                          data-scan [$$17, $$18]<-[$$17, $$18, $$19, $$20] <- default.nation
+                                          -- DATASOURCE_SCAN  |PARTITIONED|
+                                            exchange 
+                                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                              empty-tuple-source
+                                              -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                                        exchange 
+                                        -- HASH_PARTITION_EXCHANGE [$$24]  |PARTITIONED|
+                                          project ([$$24, $$21, $$22, $$23, $$25, $$26, $$28, $$29])
+                                          -- STREAM_PROJECT  |PARTITIONED|
+                                            exchange 
+                                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                              join (function-call: algebricks:eq, Args:[%0->$$30, %0->$$21])
+                                              -- HYBRID_HASH_JOIN [$$30][$$21]  |PARTITIONED|
+                                                exchange 
+                                                -- HASH_PARTITION_EXCHANGE [$$30]  |PARTITIONED|
+                                                  project ([$$30, $$29])
+                                                  -- STREAM_PROJECT  |PARTITIONED|
+                                                    select (function-call: algebricks:and, Args:[function-call: algebricks:lt, Args:[%0->$$33, 1994-01-01], function-call: algebricks:ge, Args:[%0->$$33, 1993-10-01], function-call: algebricks:lt, Args:[%0->$$33, 1994-01-01]])
+                                                    -- STREAM_SELECT  |PARTITIONED|
+                                                      exchange 
+                                                      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                        data-scan [$$33, $$29, $$30]<-[$$29, $$30, $$31, $$32, $$33, $$34, $$35, $$36, $$37] <- default.orders
+                                                        -- DATASOURCE_SCAN  |PARTITIONED|
+                                                          exchange 
+                                                          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                            empty-tuple-source
+                                                            -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                                                exchange 
+                                                -- HASH_PARTITION_EXCHANGE [$$21]  |PARTITIONED|
+                                                  data-scan [$$21, $$22, $$23, $$24, $$25, $$26, $$28]<-[$$21, $$22, $$23, $$24, $$25, $$26, $$27, $$28] <- default.customer
+                                                  -- DATASOURCE_SCAN  |PARTITIONED|
+                                                    exchange 
+                                                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                      empty-tuple-source
+                                                      -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/hivesterix/src/test/resources/optimizerts/results/q11_important_stock.plan b/hivesterix/src/test/resources/optimizerts/results/q11_important_stock.plan
new file mode 100644
index 0000000..70ad7ee
--- /dev/null
+++ b/hivesterix/src/test/resources/optimizerts/results/q11_important_stock.plan
@@ -0,0 +1,126 @@
+write [%0->$$17, %0->$$18]
+-- SINK_WRITE  |PARTITIONED|
+  project ([$$17, $$18])
+  -- STREAM_PROJECT  |PARTITIONED|
+    exchange 
+    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+      group by ([$$17 := %0->$$21]) decor ([]) {
+                aggregate [$$18] <- [function-call: hive:sum(FINAL), Args:[%0->$$20]]
+                -- AGGREGATE  |LOCAL|
+                  nested tuple source
+                  -- NESTED_TUPLE_SOURCE  |LOCAL|
+             }
+      -- EXTERNAL_GROUP_BY[$$21]  |PARTITIONED|
+        exchange 
+        -- HASH_PARTITION_EXCHANGE [$$21]  |PARTITIONED|
+          group by ([$$21 := %0->$$1]) decor ([]) {
+                    aggregate [$$20] <- [function-call: hive:sum(PARTIAL1), Args:[function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMultiply, Args:[%0->$$4, %0->$$3]]]
+                    -- AGGREGATE  |LOCAL|
+                      nested tuple source
+                      -- NESTED_TUPLE_SOURCE  |LOCAL|
+                 }
+          -- EXTERNAL_GROUP_BY[$$1]  |LOCAL|
+            exchange 
+            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+              project ([$$1, $$3, $$4])
+              -- STREAM_PROJECT  |PARTITIONED|
+                exchange 
+                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                  join (function-call: algebricks:eq, Args:[%0->$$2, %0->$$6])
+                  -- HYBRID_HASH_JOIN [$$2][$$6]  |PARTITIONED|
+                    exchange 
+                    -- HASH_PARTITION_EXCHANGE [$$2]  |PARTITIONED|
+                      data-scan [$$2, $$1, $$3, $$4]<-[$$1, $$2, $$3, $$4, $$5] <- default.partsupp
+                      -- DATASOURCE_SCAN  |PARTITIONED|
+                        exchange 
+                        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                          empty-tuple-source
+                          -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                    exchange 
+                    -- HASH_PARTITION_EXCHANGE [$$6]  |PARTITIONED|
+                      project ([$$6])
+                      -- STREAM_PROJECT  |PARTITIONED|
+                        exchange 
+                        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                          join (function-call: algebricks:eq, Args:[%0->$$9, %0->$$13])
+                          -- HYBRID_HASH_JOIN [$$9][$$13]  |PARTITIONED|
+                            exchange 
+                            -- HASH_PARTITION_EXCHANGE [$$9]  |PARTITIONED|
+                              data-scan [$$9, $$6]<-[$$6, $$7, $$8, $$9, $$10, $$11, $$12] <- default.supplier
+                              -- DATASOURCE_SCAN  |PARTITIONED|
+                                exchange 
+                                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                  empty-tuple-source
+                                  -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                            exchange 
+                            -- HASH_PARTITION_EXCHANGE [$$13]  |PARTITIONED|
+                              project ([$$13])
+                              -- STREAM_PROJECT  |PARTITIONED|
+                                select (function-call: algebricks:eq, Args:[%0->$$14, GERMANY])
+                                -- STREAM_SELECT  |PARTITIONED|
+                                  exchange 
+                                  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                    data-scan [$$13, $$14]<-[$$13, $$14, $$15, $$16] <- default.nation
+                                    -- DATASOURCE_SCAN  |PARTITIONED|
+                                      exchange 
+                                      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                        empty-tuple-source
+                                        -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+write [%0->$$3]
+-- SINK_WRITE  |PARTITIONED|
+  exchange 
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+    group by ([]) decor ([]) {
+              aggregate [$$3] <- [function-call: hive:sum(FINAL), Args:[%0->$$5]]
+              -- AGGREGATE  |LOCAL|
+                nested tuple source
+                -- NESTED_TUPLE_SOURCE  |LOCAL|
+           }
+    -- EXTERNAL_GROUP_BY[]  |PARTITIONED|
+      exchange 
+      -- HASH_PARTITION_EXCHANGE []  |PARTITIONED|
+        group by ([]) decor ([]) {
+                  aggregate [$$5] <- [function-call: hive:sum(PARTIAL1), Args:[%0->$$2]]
+                  -- AGGREGATE  |LOCAL|
+                    nested tuple source
+                    -- NESTED_TUPLE_SOURCE  |LOCAL|
+               }
+        -- EXTERNAL_GROUP_BY[]  |LOCAL|
+          exchange 
+          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+            data-scan [$$2]<-[$$1, $$2] <- default.q11_part_tmp
+            -- DATASOURCE_SCAN  |PARTITIONED|
+              exchange 
+              -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                empty-tuple-source
+                -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+write [%0->$$2, %0->$$3]
+-- SINK_WRITE  |PARTITIONED|
+  exchange 
+  -- SORT_MERGE_EXCHANGE [$$3(DESC) ]  |PARTITIONED|
+    order (DESC, %0->$$3) 
+    -- STABLE_SORT [$$3(DESC)]  |LOCAL|
+      exchange 
+      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+        project ([$$2, $$3])
+        -- STREAM_PROJECT  |PARTITIONED|
+          exchange 
+          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+            join (function-call: algebricks:and, Args:[function-call: algebricks:gt, Args:[%0->$$3, function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMultiply, Args:[%0->$$1, 1.0E-4]], true])
+            -- NESTED_LOOP  |PARTITIONED|
+              exchange 
+              -- BROADCAST_EXCHANGE  |PARTITIONED|
+                data-scan []<-[$$1] <- default.q11_sum_tmp
+                -- DATASOURCE_SCAN  |PARTITIONED|
+                  exchange 
+                  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                    empty-tuple-source
+                    -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+              exchange 
+              -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                data-scan [$$2, $$3]<-[$$2, $$3] <- default.q11_part_tmp
+                -- DATASOURCE_SCAN  |PARTITIONED|
+                  exchange 
+                  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                    empty-tuple-source
+                    -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/hivesterix/src/test/resources/optimizerts/results/q12_shipping.plan b/hivesterix/src/test/resources/optimizerts/results/q12_shipping.plan
new file mode 100644
index 0000000..5c240e2
--- /dev/null
+++ b/hivesterix/src/test/resources/optimizerts/results/q12_shipping.plan
@@ -0,0 +1,58 @@
+write [%0->$$26, %0->$$29, %0->$$30]
+-- SINK_WRITE  |PARTITIONED|
+  project ([$$26, $$29, $$30])
+  -- STREAM_PROJECT  |PARTITIONED|
+    assign [$$29, $$30] <- [function-call: hive:org.apache.hadoop.hive.ql.udf.UDFToDouble, Args:[%0->$$27], function-call: hive:org.apache.hadoop.hive.ql.udf.UDFToDouble, Args:[%0->$$28]]
+    -- ASSIGN  |PARTITIONED|
+      exchange 
+      -- SORT_MERGE_EXCHANGE [$$26(ASC) ]  |PARTITIONED|
+        order (ASC, %0->$$26) 
+        -- STABLE_SORT [$$26(ASC)]  |LOCAL|
+          exchange 
+          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+            group by ([$$26 := %0->$$34]) decor ([]) {
+                      aggregate [$$27, $$28] <- [function-call: hive:sum(FINAL), Args:[%0->$$32], function-call: hive:sum(FINAL), Args:[%0->$$33]]
+                      -- AGGREGATE  |LOCAL|
+                        nested tuple source
+                        -- NESTED_TUPLE_SOURCE  |LOCAL|
+                   }
+            -- EXTERNAL_GROUP_BY[$$34]  |PARTITIONED|
+              exchange 
+              -- HASH_PARTITION_EXCHANGE [$$34]  |PARTITIONED|
+                group by ([$$34 := %0->$$24]) decor ([]) {
+                          aggregate [$$32, $$33] <- [function-call: hive:sum(PARTIAL1), Args:[function-call: hive:org.apache.hadoop.hive.ql.udf.generic.GenericUDFWhen, Args:[function-call: algebricks:or, Args:[function-call: algebricks:eq, Args:[%0->$$6, 1-URGENT], function-call: algebricks:eq, Args:[%0->$$6, 2-HIGH]], 1, 0]], function-call: hive:sum(PARTIAL1), Args:[function-call: hive:org.apache.hadoop.hive.ql.udf.generic.GenericUDFWhen, Args:[function-call: algebricks:and, Args:[function-call: algebricks:neq, Args:[%0->$$6, 1-URGENT], function-call: algebricks:neq, Args:[%0->$$6, 2-HIGH]], 1, 0]]]
+                          -- AGGREGATE  |LOCAL|
+                            nested tuple source
+                            -- NESTED_TUPLE_SOURCE  |LOCAL|
+                       }
+                -- EXTERNAL_GROUP_BY[$$24]  |LOCAL|
+                  exchange 
+                  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                    project ([$$6, $$24])
+                    -- STREAM_PROJECT  |PARTITIONED|
+                      exchange 
+                      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                        join (function-call: algebricks:eq, Args:[%0->$$10, %0->$$1])
+                        -- HYBRID_HASH_JOIN [$$10][$$1]  |PARTITIONED|
+                          exchange 
+                          -- HASH_PARTITION_EXCHANGE [$$10]  |PARTITIONED|
+                            project ([$$10, $$24])
+                            -- STREAM_PROJECT  |PARTITIONED|
+                              select (function-call: algebricks:and, Args:[function-call: algebricks:lt, Args:[%0->$$22, 1995-01-01], function-call: algebricks:ge, Args:[%0->$$22, 1994-01-01], function-call: algebricks:lt, Args:[%0->$$20, %0->$$21], function-call: algebricks:lt, Args:[%0->$$21, %0->$$22], function-call: algebricks:lt, Args:[%0->$$20, %0->$$21], function-call: algebricks:ge, Args:[%0->$$22, 1994-01-01], function-call: algebricks:lt, Args:[%0->$$22, 1995-01-01], function-call: algebricks:or, Args:[function-call: algebricks:eq, Args:[%0->$$24, MAIL], function-call: algebricks:eq, Args:[%0->$$24, SHIP]], function-call: algebricks:or, Args:[function-call: algebricks:eq, Args:[%0->$$24, MAIL], function-call: algebricks:eq, Args:[%0->$$24, SHIP]]])
+                              -- STREAM_SELECT  |PARTITIONED|
+                                exchange 
+                                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                  data-scan [$$21, $$20, $$22, $$24, $$10]<-[$$10, $$11, $$12, $$13, $$14, $$15, $$16, $$17, $$18, $$19, $$20, $$21, $$22, $$23, $$24, $$25] <- default.lineitem
+                                  -- DATASOURCE_SCAN  |PARTITIONED|
+                                    exchange 
+                                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                      empty-tuple-source
+                                      -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                          exchange 
+                          -- HASH_PARTITION_EXCHANGE [$$1]  |PARTITIONED|
+                            data-scan [$$1, $$6]<-[$$1, $$2, $$3, $$4, $$5, $$6, $$7, $$8, $$9] <- default.orders
+                            -- DATASOURCE_SCAN  |PARTITIONED|
+                              exchange 
+                              -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                empty-tuple-source
+                                -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/hivesterix/src/test/resources/optimizerts/results/q13_customer_distribution.plan b/hivesterix/src/test/resources/optimizerts/results/q13_customer_distribution.plan
new file mode 100644
index 0000000..19bcd24
--- /dev/null
+++ b/hivesterix/src/test/resources/optimizerts/results/q13_customer_distribution.plan
@@ -0,0 +1,80 @@
+write [%0->$$22, %0->$$23]
+-- SINK_WRITE  |PARTITIONED|
+  project ([$$22, $$23])
+  -- STREAM_PROJECT  |PARTITIONED|
+    assign [$$22, $$23] <- [function-call: hive:org.apache.hadoop.hive.ql.udf.UDFToInteger, Args:[%0->$$20], function-call: hive:org.apache.hadoop.hive.ql.udf.UDFToInteger, Args:[%0->$$21]]
+    -- ASSIGN  |PARTITIONED|
+      exchange 
+      -- SORT_MERGE_EXCHANGE [$$21(DESC), $$20(DESC) ]  |PARTITIONED|
+        order (DESC, %0->$$21) (DESC, %0->$$20) 
+        -- STABLE_SORT [$$21(DESC), $$20(DESC)]  |LOCAL|
+          exchange 
+          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+            group by ([$$20 := %0->$$28]) decor ([]) {
+                      aggregate [$$21] <- [function-call: hive:count(FINAL), Args:[%0->$$27]]
+                      -- AGGREGATE  |LOCAL|
+                        nested tuple source
+                        -- NESTED_TUPLE_SOURCE  |LOCAL|
+                   }
+            -- EXTERNAL_GROUP_BY[$$28]  |PARTITIONED|
+              exchange 
+              -- HASH_PARTITION_EXCHANGE [$$28]  |PARTITIONED|
+                group by ([$$28 := %0->$$19]) decor ([]) {
+                          aggregate [$$27] <- [function-call: hive:count(PARTIAL1), Args:[1]]
+                          -- AGGREGATE  |LOCAL|
+                            nested tuple source
+                            -- NESTED_TUPLE_SOURCE  |LOCAL|
+                       }
+                -- EXTERNAL_GROUP_BY[$$19]  |LOCAL|
+                  exchange 
+                  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                    project ([$$19])
+                    -- STREAM_PROJECT  |PARTITIONED|
+                      exchange 
+                      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                        group by ([$$18 := %0->$$26]) decor ([]) {
+                                  aggregate [$$19] <- [function-call: hive:count(FINAL), Args:[%0->$$25]]
+                                  -- AGGREGATE  |LOCAL|
+                                    nested tuple source
+                                    -- NESTED_TUPLE_SOURCE  |LOCAL|
+                               }
+                        -- EXTERNAL_GROUP_BY[$$26]  |PARTITIONED|
+                          exchange 
+                          -- HASH_PARTITION_EXCHANGE [$$26]  |PARTITIONED|
+                            group by ([$$26 := %0->$$10]) decor ([]) {
+                                      aggregate [$$25] <- [function-call: hive:count(PARTIAL1), Args:[%0->$$1]]
+                                      -- AGGREGATE  |LOCAL|
+                                        nested tuple source
+                                        -- NESTED_TUPLE_SOURCE  |LOCAL|
+                                   }
+                            -- EXTERNAL_GROUP_BY[$$10]  |LOCAL|
+                              exchange 
+                              -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                project ([$$10, $$1])
+                                -- STREAM_PROJECT  |PARTITIONED|
+                                  exchange 
+                                  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                    left outer join (function-call: algebricks:eq, Args:[%0->$$2, %0->$$10])
+                                    -- HYBRID_HASH_JOIN [$$10][$$2]  |PARTITIONED|
+                                      exchange 
+                                      -- HASH_PARTITION_EXCHANGE [$$10]  |PARTITIONED|
+                                        data-scan [$$10]<-[$$10, $$11, $$12, $$13, $$14, $$15, $$16, $$17] <- default.customer
+                                        -- DATASOURCE_SCAN  |PARTITIONED|
+                                          exchange 
+                                          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                            empty-tuple-source
+                                            -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                                      exchange 
+                                      -- HASH_PARTITION_EXCHANGE [$$2]  |PARTITIONED|
+                                        project ([$$2, $$1])
+                                        -- STREAM_PROJECT  |PARTITIONED|
+                                          select (function-call: algebricks:not, Args:[function-call: hive:org.apache.hadoop.hive.ql.udf.UDFLike, Args:[%0->$$9, %special%requests%]])
+                                          -- STREAM_SELECT  |PARTITIONED|
+                                            exchange 
+                                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                              data-scan [$$1, $$2, $$9]<-[$$1, $$2, $$3, $$4, $$5, $$6, $$7, $$8, $$9] <- default.orders
+                                              -- DATASOURCE_SCAN  |PARTITIONED|
+                                                exchange 
+                                                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                  empty-tuple-source
+                                                  -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/hivesterix/src/test/resources/optimizerts/results/q14_promotion_effect.plan b/hivesterix/src/test/resources/optimizerts/results/q14_promotion_effect.plan
new file mode 100644
index 0000000..21b90bd
--- /dev/null
+++ b/hivesterix/src/test/resources/optimizerts/results/q14_promotion_effect.plan
@@ -0,0 +1,54 @@
+write [%0->$$28]
+-- SINK_WRITE  |PARTITIONED|
+  project ([$$28])
+  -- STREAM_PROJECT  |PARTITIONED|
+    assign [$$28] <- [function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPDivide, Args:[function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMultiply, Args:[100.0, %0->$$26], %0->$$27]]
+    -- ASSIGN  |PARTITIONED|
+      exchange 
+      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+        group by ([]) decor ([]) {
+                  aggregate [$$26, $$27] <- [function-call: hive:sum(FINAL), Args:[%0->$$30], function-call: hive:sum(FINAL), Args:[%0->$$31]]
+                  -- AGGREGATE  |LOCAL|
+                    nested tuple source
+                    -- NESTED_TUPLE_SOURCE  |LOCAL|
+               }
+        -- EXTERNAL_GROUP_BY[]  |PARTITIONED|
+          exchange 
+          -- HASH_PARTITION_EXCHANGE []  |PARTITIONED|
+            group by ([]) decor ([]) {
+                      aggregate [$$30, $$31] <- [function-call: hive:sum(PARTIAL1), Args:[function-call: hive:org.apache.hadoop.hive.ql.udf.generic.GenericUDFWhen, Args:[function-call: hive:org.apache.hadoop.hive.ql.udf.UDFLike, Args:[%0->$$5, PROMO%], function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMultiply, Args:[%0->$$15, function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMinus, Args:[1, %0->$$16]], 0.0]], function-call: hive:sum(PARTIAL1), Args:[function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMultiply, Args:[%0->$$15, function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMinus, Args:[1, %0->$$16]]]]
+                      -- AGGREGATE  |LOCAL|
+                        nested tuple source
+                        -- NESTED_TUPLE_SOURCE  |LOCAL|
+                   }
+            -- EXTERNAL_GROUP_BY[]  |LOCAL|
+              exchange 
+              -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                project ([$$5, $$15, $$16])
+                -- STREAM_PROJECT  |PARTITIONED|
+                  exchange 
+                  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                    join (function-call: algebricks:eq, Args:[%0->$$11, %0->$$1])
+                    -- HYBRID_HASH_JOIN [$$11][$$1]  |PARTITIONED|
+                      exchange 
+                      -- HASH_PARTITION_EXCHANGE [$$11]  |PARTITIONED|
+                        project ([$$11, $$15, $$16])
+                        -- STREAM_PROJECT  |PARTITIONED|
+                          select (function-call: algebricks:and, Args:[function-call: algebricks:lt, Args:[%0->$$20, 1995-10-01], function-call: algebricks:ge, Args:[%0->$$20, 1995-09-01], function-call: algebricks:lt, Args:[%0->$$20, 1995-10-01]])
+                          -- STREAM_SELECT  |PARTITIONED|
+                            exchange 
+                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                              data-scan [$$16, $$20, $$11, $$15]<-[$$10, $$11, $$12, $$13, $$14, $$15, $$16, $$17, $$18, $$19, $$20, $$21, $$22, $$23, $$24, $$25] <- default.lineitem
+                              -- DATASOURCE_SCAN  |PARTITIONED|
+                                exchange 
+                                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                  empty-tuple-source
+                                  -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                      exchange 
+                      -- HASH_PARTITION_EXCHANGE [$$1]  |PARTITIONED|
+                        data-scan [$$1, $$5]<-[$$1, $$2, $$3, $$4, $$5, $$6, $$7, $$8, $$9] <- default.part
+                        -- DATASOURCE_SCAN  |PARTITIONED|
+                          exchange 
+                          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                            empty-tuple-source
+                            -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/hivesterix/src/test/resources/optimizerts/results/q15_top_supplier.plan b/hivesterix/src/test/resources/optimizerts/results/q15_top_supplier.plan
new file mode 100644
index 0000000..a5bd27a
--- /dev/null
+++ b/hivesterix/src/test/resources/optimizerts/results/q15_top_supplier.plan
@@ -0,0 +1,110 @@
+write [%0->$$17, %0->$$18]
+-- SINK_WRITE  |PARTITIONED|
+  project ([$$17, $$18])
+  -- STREAM_PROJECT  |PARTITIONED|
+    exchange 
+    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+      group by ([$$17 := %0->$$21]) decor ([]) {
+                aggregate [$$18] <- [function-call: hive:sum(FINAL), Args:[%0->$$20]]
+                -- AGGREGATE  |LOCAL|
+                  nested tuple source
+                  -- NESTED_TUPLE_SOURCE  |LOCAL|
+             }
+      -- EXTERNAL_GROUP_BY[$$21]  |PARTITIONED|
+        exchange 
+        -- HASH_PARTITION_EXCHANGE [$$21]  |PARTITIONED|
+          group by ([$$21 := %0->$$3]) decor ([]) {
+                    aggregate [$$20] <- [function-call: hive:sum(PARTIAL1), Args:[function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMultiply, Args:[%0->$$6, function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMinus, Args:[1, %0->$$7]]]]
+                    -- AGGREGATE  |LOCAL|
+                      nested tuple source
+                      -- NESTED_TUPLE_SOURCE  |LOCAL|
+                 }
+          -- EXTERNAL_GROUP_BY[$$3]  |LOCAL|
+            exchange 
+            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+              project ([$$3, $$6, $$7])
+              -- STREAM_PROJECT  |PARTITIONED|
+                select (function-call: algebricks:and, Args:[function-call: algebricks:ge, Args:[%0->$$11, 1996-01-01], function-call: algebricks:lt, Args:[%0->$$11, 1996-04-01], function-call: algebricks:ge, Args:[%0->$$11, 1996-01-01], function-call: algebricks:lt, Args:[%0->$$11, 1996-04-01]])
+                -- STREAM_SELECT  |PARTITIONED|
+                  exchange 
+                  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                    data-scan [$$3, $$6, $$7, $$11]<-[$$1, $$2, $$3, $$4, $$5, $$6, $$7, $$8, $$9, $$10, $$11, $$12, $$13, $$14, $$15, $$16] <- default.lineitem
+                    -- DATASOURCE_SCAN  |PARTITIONED|
+                      exchange 
+                      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                        empty-tuple-source
+                        -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+write [%0->$$3]
+-- SINK_WRITE  |PARTITIONED|
+  exchange 
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+    group by ([]) decor ([]) {
+              aggregate [$$3] <- [function-call: hive:max(FINAL), Args:[%0->$$5]]
+              -- AGGREGATE  |LOCAL|
+                nested tuple source
+                -- NESTED_TUPLE_SOURCE  |LOCAL|
+           }
+    -- PRE_CLUSTERED_GROUP_BY[]  |PARTITIONED|
+      exchange 
+      -- HASH_PARTITION_EXCHANGE []  |PARTITIONED|
+        group by ([]) decor ([]) {
+                  aggregate [$$5] <- [function-call: hive:max(PARTIAL1), Args:[%0->$$2]]
+                  -- AGGREGATE  |LOCAL|
+                    nested tuple source
+                    -- NESTED_TUPLE_SOURCE  |LOCAL|
+               }
+        -- PRE_CLUSTERED_GROUP_BY[]  |LOCAL|
+          exchange 
+          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+            data-scan [$$2]<-[$$1, $$2] <- default.revenue
+            -- DATASOURCE_SCAN  |PARTITIONED|
+              exchange 
+              -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                empty-tuple-source
+                -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+write [%0->$$2, %0->$$3, %0->$$4, %0->$$6, %0->$$10]
+-- SINK_WRITE  |PARTITIONED|
+  exchange 
+  -- SORT_MERGE_EXCHANGE [$$2(ASC) ]  |PARTITIONED|
+    order (ASC, %0->$$2) 
+    -- STABLE_SORT [$$2(ASC)]  |LOCAL|
+      exchange 
+      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+        project ([$$2, $$3, $$4, $$6, $$10])
+        -- STREAM_PROJECT  |PARTITIONED|
+          exchange 
+          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+            join (function-call: algebricks:eq, Args:[%0->$$1, %0->$$10])
+            -- HYBRID_HASH_JOIN [$$1][$$10]  |PARTITIONED|
+              exchange 
+              -- HASH_PARTITION_EXCHANGE [$$1]  |PARTITIONED|
+                data-scan []<-[$$1] <- default.max_revenue
+                -- DATASOURCE_SCAN  |PARTITIONED|
+                  exchange 
+                  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                    empty-tuple-source
+                    -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+              exchange 
+              -- HASH_PARTITION_EXCHANGE [$$10]  |PARTITIONED|
+                project ([$$10, $$2, $$3, $$4, $$6])
+                -- STREAM_PROJECT  |PARTITIONED|
+                  exchange 
+                  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                    join (function-call: algebricks:eq, Args:[%0->$$9, %0->$$2])
+                    -- HYBRID_HASH_JOIN [$$9][$$2]  |PARTITIONED|
+                      exchange 
+                      -- HASH_PARTITION_EXCHANGE [$$9]  |PARTITIONED|
+                        data-scan []<-[$$9, $$10] <- default.revenue
+                        -- DATASOURCE_SCAN  |PARTITIONED|
+                          exchange 
+                          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                            empty-tuple-source
+                            -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                      exchange 
+                      -- HASH_PARTITION_EXCHANGE [$$2]  |PARTITIONED|
+                        data-scan [$$2, $$3, $$4, $$6]<-[$$2, $$3, $$4, $$5, $$6, $$7, $$8] <- default.supplier
+                        -- DATASOURCE_SCAN  |PARTITIONED|
+                          exchange 
+                          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                            empty-tuple-source
+                            -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/hivesterix/src/test/resources/optimizerts/results/q16_parts_supplier_relationship.plan b/hivesterix/src/test/resources/optimizerts/results/q16_parts_supplier_relationship.plan
new file mode 100644
index 0000000..9835346
--- /dev/null
+++ b/hivesterix/src/test/resources/optimizerts/results/q16_parts_supplier_relationship.plan
@@ -0,0 +1,98 @@
+write [%0->$$1]
+-- SINK_WRITE  |PARTITIONED|
+  project ([$$1])
+  -- STREAM_PROJECT  |PARTITIONED|
+    select (function-call: algebricks:not, Args:[function-call: hive:org.apache.hadoop.hive.ql.udf.UDFLike, Args:[%0->$$7, %Customer%Complaints%]])
+    -- STREAM_SELECT  |PARTITIONED|
+      exchange 
+      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+        data-scan [$$1, $$7]<-[$$1, $$2, $$3, $$4, $$5, $$6, $$7] <- default.supplier
+        -- DATASOURCE_SCAN  |PARTITIONED|
+          exchange 
+          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+            empty-tuple-source
+            -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+write [%0->$$5, %0->$$6, %0->$$7, %0->$$12]
+-- SINK_WRITE  |PARTITIONED|
+  project ([$$5, $$6, $$7, $$12])
+  -- STREAM_PROJECT  |PARTITIONED|
+    exchange 
+    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+      join (function-call: algebricks:eq, Args:[%0->$$1, %0->$$12])
+      -- HYBRID_HASH_JOIN [$$1][$$12]  |PARTITIONED|
+        exchange 
+        -- HASH_PARTITION_EXCHANGE [$$1]  |PARTITIONED|
+          data-scan []<-[$$1] <- default.supplier_tmp
+          -- DATASOURCE_SCAN  |PARTITIONED|
+            exchange 
+            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+              empty-tuple-source
+              -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+        exchange 
+        -- HASH_PARTITION_EXCHANGE [$$12]  |PARTITIONED|
+          project ([$$12, $$5, $$6, $$7])
+          -- STREAM_PROJECT  |PARTITIONED|
+            exchange 
+            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+              join (function-call: algebricks:eq, Args:[%0->$$2, %0->$$11])
+              -- HYBRID_HASH_JOIN [$$2][$$11]  |PARTITIONED|
+                exchange 
+                -- HASH_PARTITION_EXCHANGE [$$2]  |PARTITIONED|
+                  select (function-call: algebricks:and, Args:[function-call: algebricks:not, Args:[function-call: hive:org.apache.hadoop.hive.ql.udf.UDFLike, Args:[%0->$$6, MEDIUM POLISHED%]], function-call: algebricks:neq, Args:[%0->$$5, Brand#45], function-call: algebricks:not, Args:[function-call: hive:org.apache.hadoop.hive.ql.udf.UDFLike, Args:[%0->$$6, MEDIUM POLISHED%]]])
+                  -- STREAM_SELECT  |PARTITIONED|
+                    exchange 
+                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                      data-scan [$$2, $$5, $$6, $$7]<-[$$2, $$3, $$4, $$5, $$6, $$7, $$8, $$9, $$10] <- default.part
+                      -- DATASOURCE_SCAN  |PARTITIONED|
+                        exchange 
+                        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                          empty-tuple-source
+                          -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                exchange 
+                -- HASH_PARTITION_EXCHANGE [$$11]  |PARTITIONED|
+                  data-scan [$$11, $$12]<-[$$11, $$12, $$13, $$14, $$15] <- default.partsupp
+                  -- DATASOURCE_SCAN  |PARTITIONED|
+                    exchange 
+                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                      empty-tuple-source
+                      -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+write [%0->$$10, %0->$$11, %0->$$12, %0->$$14]
+-- SINK_WRITE  |PARTITIONED|
+  project ([$$10, $$11, $$12, $$14])
+  -- STREAM_PROJECT  |PARTITIONED|
+    assign [$$14] <- [function-call: hive:org.apache.hadoop.hive.ql.udf.UDFToInteger, Args:[%0->$$13]]
+    -- ASSIGN  |PARTITIONED|
+      exchange 
+      -- SORT_MERGE_EXCHANGE [$$13(DESC), $$10(ASC), $$11(ASC), $$12(ASC) ]  |PARTITIONED|
+        order (DESC, %0->$$13) (ASC, %0->$$10) (ASC, %0->$$11) (ASC, %0->$$12) 
+        -- STABLE_SORT [$$13(DESC), $$10(ASC), $$11(ASC), $$12(ASC)]  |LOCAL|
+          exchange 
+          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+            group by ([$$10 := %0->$$5; $$11 := %0->$$6; $$12 := %0->$$7]) decor ([]) {
+                      aggregate [$$13] <- [function-call: hive:count(COMPLETE), Args:[%0->$$8]]
+                      -- AGGREGATE  |LOCAL|
+                        nested tuple source
+                        -- NESTED_TUPLE_SOURCE  |LOCAL|
+                   }
+            -- EXTERNAL_GROUP_BY[$$5, $$6, $$7]  |PARTITIONED|
+              exchange 
+              -- HASH_PARTITION_EXCHANGE [$$5, $$6, $$7]  |PARTITIONED|
+                group by ([$$5 := %0->$$1; $$6 := %0->$$2; $$7 := %0->$$3; $$8 := %0->$$4]) decor ([]) {
+                          aggregate [] <- []
+                          -- AGGREGATE  |LOCAL|
+                            nested tuple source
+                            -- NESTED_TUPLE_SOURCE  |LOCAL|
+                       }
+                -- EXTERNAL_GROUP_BY[$$1, $$2, $$3, $$4]  |PARTITIONED|
+                  exchange 
+                  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                    select (function-call: algebricks:or, Args:[function-call: algebricks:or, Args:[function-call: algebricks:or, Args:[function-call: algebricks:or, Args:[function-call: algebricks:or, Args:[function-call: algebricks:or, Args:[function-call: algebricks:or, Args:[function-call: algebricks:eq, Args:[%0->$$3, 49], function-call: algebricks:eq, Args:[%0->$$3, 14]], function-call: algebricks:eq, Args:[%0->$$3, 23]], function-call: algebricks:eq, Args:[%0->$$3, 45]], function-call: algebricks:eq, Args:[%0->$$3, 19]], function-call: algebricks:eq, Args:[%0->$$3, 3]], function-call: algebricks:eq, Args:[%0->$$3, 36]], function-call: algebricks:eq, Args:[%0->$$3, 9]])
+                    -- STREAM_SELECT  |PARTITIONED|
+                      exchange 
+                      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                        data-scan []<-[$$1, $$2, $$3, $$4] <- default.q16_tmp
+                        -- DATASOURCE_SCAN  |PARTITIONED|
+                          exchange 
+                          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                            empty-tuple-source
+                            -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/hivesterix/src/test/resources/optimizerts/results/q17_small_quantity_order_revenue.plan b/hivesterix/src/test/resources/optimizerts/results/q17_small_quantity_order_revenue.plan
new file mode 100644
index 0000000..a827007
--- /dev/null
+++ b/hivesterix/src/test/resources/optimizerts/results/q17_small_quantity_order_revenue.plan
@@ -0,0 +1,104 @@
+write [%0->$$17, %0->$$19]
+-- SINK_WRITE  |PARTITIONED|
+  project ([$$17, $$19])
+  -- STREAM_PROJECT  |PARTITIONED|
+    assign [$$19] <- [function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMultiply, Args:[0.2, %0->$$18]]
+    -- ASSIGN  |PARTITIONED|
+      exchange 
+      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+        group by ([$$17 := %0->$$22]) decor ([]) {
+                  aggregate [$$18] <- [function-call: hive:avg(FINAL), Args:[%0->$$21]]
+                  -- AGGREGATE  |LOCAL|
+                    nested tuple source
+                    -- NESTED_TUPLE_SOURCE  |LOCAL|
+               }
+        -- EXTERNAL_GROUP_BY[$$22]  |PARTITIONED|
+          exchange 
+          -- HASH_PARTITION_EXCHANGE [$$22]  |PARTITIONED|
+            group by ([$$22 := %0->$$2]) decor ([]) {
+                      aggregate [$$21] <- [function-call: hive:avg(PARTIAL1), Args:[%0->$$5]]
+                      -- AGGREGATE  |LOCAL|
+                        nested tuple source
+                        -- NESTED_TUPLE_SOURCE  |LOCAL|
+                   }
+            -- EXTERNAL_GROUP_BY[$$2]  |LOCAL|
+              exchange 
+              -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                data-scan [$$2, $$5]<-[$$1, $$2, $$3, $$4, $$5, $$6, $$7, $$8, $$9, $$10, $$11, $$12, $$13, $$14, $$15, $$16] <- default.lineitem
+                -- DATASOURCE_SCAN  |PARTITIONED|
+                  exchange 
+                  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                    empty-tuple-source
+                    -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+write [%0->$$29]
+-- SINK_WRITE  |PARTITIONED|
+  project ([$$29])
+  -- STREAM_PROJECT  |PARTITIONED|
+    assign [$$29] <- [function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPDivide, Args:[%0->$$28, 7.0]]
+    -- ASSIGN  |PARTITIONED|
+      exchange 
+      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+        group by ([]) decor ([]) {
+                  aggregate [$$28] <- [function-call: hive:sum(FINAL), Args:[%0->$$31]]
+                  -- AGGREGATE  |LOCAL|
+                    nested tuple source
+                    -- NESTED_TUPLE_SOURCE  |LOCAL|
+               }
+        -- EXTERNAL_GROUP_BY[]  |PARTITIONED|
+          exchange 
+          -- HASH_PARTITION_EXCHANGE []  |PARTITIONED|
+            group by ([]) decor ([]) {
+                      aggregate [$$31] <- [function-call: hive:sum(PARTIAL1), Args:[%0->$$17]]
+                      -- AGGREGATE  |LOCAL|
+                        nested tuple source
+                        -- NESTED_TUPLE_SOURCE  |LOCAL|
+                   }
+            -- EXTERNAL_GROUP_BY[]  |LOCAL|
+              exchange 
+              -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                project ([$$17])
+                -- STREAM_PROJECT  |PARTITIONED|
+                  select (function-call: algebricks:lt, Args:[%0->$$16, %0->$$2])
+                  -- STREAM_SELECT  |UNPARTITIONED|
+                    exchange 
+                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                      join (function-call: algebricks:eq, Args:[%0->$$13, %0->$$1])
+                      -- HYBRID_HASH_JOIN [$$13][$$1]  |PARTITIONED|
+                        exchange 
+                        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                          project ([$$13, $$16, $$17])
+                          -- STREAM_PROJECT  |PARTITIONED|
+                            exchange 
+                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                              join (function-call: algebricks:eq, Args:[%0->$$13, %0->$$3])
+                              -- HYBRID_HASH_JOIN [$$13][$$3]  |PARTITIONED|
+                                exchange 
+                                -- HASH_PARTITION_EXCHANGE [$$13]  |PARTITIONED|
+                                  data-scan [$$13, $$16, $$17]<-[$$12, $$13, $$14, $$15, $$16, $$17, $$18, $$19, $$20, $$21, $$22, $$23, $$24, $$25, $$26, $$27] <- default.lineitem
+                                  -- DATASOURCE_SCAN  |PARTITIONED|
+                                    exchange 
+                                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                      empty-tuple-source
+                                      -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                                exchange 
+                                -- HASH_PARTITION_EXCHANGE [$$3]  |PARTITIONED|
+                                  project ([$$3])
+                                  -- STREAM_PROJECT  |PARTITIONED|
+                                    select (function-call: algebricks:and, Args:[function-call: algebricks:eq, Args:[%0->$$9, MED BOX], function-call: algebricks:eq, Args:[%0->$$6, Brand#23], function-call: algebricks:eq, Args:[%0->$$9, MED BOX]])
+                                    -- STREAM_SELECT  |PARTITIONED|
+                                      exchange 
+                                      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                        data-scan [$$3, $$6, $$9]<-[$$3, $$4, $$5, $$6, $$7, $$8, $$9, $$10, $$11] <- default.part
+                                        -- DATASOURCE_SCAN  |PARTITIONED|
+                                          exchange 
+                                          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                            empty-tuple-source
+                                            -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                        exchange 
+                        -- HASH_PARTITION_EXCHANGE [$$1]  |PARTITIONED|
+                          data-scan [$$1, $$2]<-[$$1, $$2] <- default.lineitem_tmp
+                          -- DATASOURCE_SCAN  |PARTITIONED|
+                            exchange 
+                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                              empty-tuple-source
+                              -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/hivesterix/src/test/resources/optimizerts/results/q18_large_volume_customer.plan b/hivesterix/src/test/resources/optimizerts/results/q18_large_volume_customer.plan
new file mode 100644
index 0000000..ea47ea0
--- /dev/null
+++ b/hivesterix/src/test/resources/optimizerts/results/q18_large_volume_customer.plan
@@ -0,0 +1,126 @@
+write [%0->$$17, %0->$$18]
+-- SINK_WRITE  |PARTITIONED|
+  project ([$$17, $$18])
+  -- STREAM_PROJECT  |PARTITIONED|
+    exchange 
+    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+      group by ([$$17 := %0->$$21]) decor ([]) {
+                aggregate [$$18] <- [function-call: hive:sum(FINAL), Args:[%0->$$20]]
+                -- AGGREGATE  |LOCAL|
+                  nested tuple source
+                  -- NESTED_TUPLE_SOURCE  |LOCAL|
+             }
+      -- EXTERNAL_GROUP_BY[$$21]  |PARTITIONED|
+        exchange 
+        -- HASH_PARTITION_EXCHANGE [$$21]  |PARTITIONED|
+          group by ([$$21 := %0->$$1]) decor ([]) {
+                    aggregate [$$20] <- [function-call: hive:sum(PARTIAL1), Args:[%0->$$5]]
+                    -- AGGREGATE  |LOCAL|
+                      nested tuple source
+                      -- NESTED_TUPLE_SOURCE  |LOCAL|
+                 }
+          -- EXTERNAL_GROUP_BY[$$1]  |LOCAL|
+            exchange 
+            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+              data-scan [$$1, $$5]<-[$$1, $$2, $$3, $$4, $$5, $$6, $$7, $$8, $$9, $$10, $$11, $$12, $$13, $$14, $$15, $$16] <- default.lineitem
+              -- DATASOURCE_SCAN  |PARTITIONED|
+                exchange 
+                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                  empty-tuple-source
+                  -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+write [%0->$$36, %0->$$37, %0->$$38, %0->$$39, %0->$$40, %0->$$41]
+-- SINK_WRITE  |UNPARTITIONED|
+  project ([$$36, $$37, $$38, $$39, $$40, $$41])
+  -- STREAM_PROJECT  |PARTITIONED|
+    limit 100
+    -- STREAM_LIMIT  |UNPARTITIONED|
+      limit 100
+      -- STREAM_LIMIT  |UNPARTITIONED|
+        exchange 
+        -- SORT_MERGE_EXCHANGE [$$40(DESC), $$39(ASC) ]  |PARTITIONED|
+          limit 100
+          -- STREAM_LIMIT  |LOCAL|
+            exchange 
+            -- ONE_TO_ONE_EXCHANGE  |LOCAL|
+              order (DESC, %0->$$40) (ASC, %0->$$39) 
+              -- STABLE_SORT [$$40(DESC), $$39(ASC)]  |LOCAL|
+                exchange 
+                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                  group by ([$$36 := %0->$$44; $$37 := %0->$$45; $$38 := %0->$$46; $$39 := %0->$$47; $$40 := %0->$$48]) decor ([]) {
+                            aggregate [$$41] <- [function-call: hive:sum(FINAL), Args:[%0->$$43]]
+                            -- AGGREGATE  |LOCAL|
+                              nested tuple source
+                              -- NESTED_TUPLE_SOURCE  |LOCAL|
+                         }
+                  -- EXTERNAL_GROUP_BY[$$44, $$45, $$46, $$47, $$48]  |PARTITIONED|
+                    exchange 
+                    -- HASH_PARTITION_EXCHANGE [$$44, $$45, $$46, $$47, $$48]  |PARTITIONED|
+                      group by ([$$44 := %0->$$20; $$45 := %0->$$19; $$46 := %0->$$27; $$47 := %0->$$31; $$48 := %0->$$30]) decor ([]) {
+                                aggregate [$$43] <- [function-call: hive:sum(PARTIAL1), Args:[%0->$$7]]
+                                -- AGGREGATE  |LOCAL|
+                                  nested tuple source
+                                  -- NESTED_TUPLE_SOURCE  |LOCAL|
+                             }
+                      -- EXTERNAL_GROUP_BY[$$20, $$19, $$27, $$31, $$30]  |LOCAL|
+                        exchange 
+                        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                          project ([$$19, $$20, $$27, $$30, $$31, $$7])
+                          -- STREAM_PROJECT  |PARTITIONED|
+                            exchange 
+                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                              join (function-call: algebricks:eq, Args:[%0->$$27, %0->$$1])
+                              -- HYBRID_HASH_JOIN [$$1][$$27]  |PARTITIONED|
+                                exchange 
+                                -- HASH_PARTITION_EXCHANGE [$$1]  |PARTITIONED|
+                                  project ([$$1, $$7])
+                                  -- STREAM_PROJECT  |PARTITIONED|
+                                    exchange 
+                                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                      join (function-call: algebricks:eq, Args:[%0->$$3, %0->$$1])
+                                      -- HYBRID_HASH_JOIN [$$3][$$1]  |PARTITIONED|
+                                        exchange 
+                                        -- HASH_PARTITION_EXCHANGE [$$3]  |PARTITIONED|
+                                          data-scan [$$3, $$7]<-[$$3, $$4, $$5, $$6, $$7, $$8, $$9, $$10, $$11, $$12, $$13, $$14, $$15, $$16, $$17, $$18] <- default.lineitem
+                                          -- DATASOURCE_SCAN  |PARTITIONED|
+                                            exchange 
+                                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                              empty-tuple-source
+                                              -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                                        exchange 
+                                        -- HASH_PARTITION_EXCHANGE [$$1]  |PARTITIONED|
+                                          project ([$$1])
+                                          -- STREAM_PROJECT  |PARTITIONED|
+                                            select (function-call: algebricks:gt, Args:[%0->$$2, 300])
+                                            -- STREAM_SELECT  |PARTITIONED|
+                                              exchange 
+                                              -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                data-scan []<-[$$1, $$2] <- default.q18_tmp
+                                                -- DATASOURCE_SCAN  |PARTITIONED|
+                                                  exchange 
+                                                  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                    empty-tuple-source
+                                                    -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                                exchange 
+                                -- HASH_PARTITION_EXCHANGE [$$27]  |PARTITIONED|
+                                  project ([$$27, $$19, $$20, $$30, $$31])
+                                  -- STREAM_PROJECT  |PARTITIONED|
+                                    exchange 
+                                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                      join (function-call: algebricks:eq, Args:[%0->$$28, %0->$$19])
+                                      -- HYBRID_HASH_JOIN [$$28][$$19]  |PARTITIONED|
+                                        exchange 
+                                        -- HASH_PARTITION_EXCHANGE [$$28]  |PARTITIONED|
+                                          data-scan [$$28, $$27, $$30, $$31]<-[$$27, $$28, $$29, $$30, $$31, $$32, $$33, $$34, $$35] <- default.orders
+                                          -- DATASOURCE_SCAN  |PARTITIONED|
+                                            exchange 
+                                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                              empty-tuple-source
+                                              -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                                        exchange 
+                                        -- HASH_PARTITION_EXCHANGE [$$19]  |PARTITIONED|
+                                          data-scan [$$19, $$20]<-[$$19, $$20, $$21, $$22, $$23, $$24, $$25, $$26] <- default.customer
+                                          -- DATASOURCE_SCAN  |PARTITIONED|
+                                            exchange 
+                                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                              empty-tuple-source
+                                              -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/hivesterix/src/test/resources/optimizerts/results/q19_discounted_revenue.plan b/hivesterix/src/test/resources/optimizerts/results/q19_discounted_revenue.plan
new file mode 100644
index 0000000..1827729
--- /dev/null
+++ b/hivesterix/src/test/resources/optimizerts/results/q19_discounted_revenue.plan
@@ -0,0 +1,46 @@
+write [%0->$$26]
+-- SINK_WRITE  |PARTITIONED|
+  exchange 
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+    group by ([]) decor ([]) {
+              aggregate [$$26] <- [function-call: hive:sum(FINAL), Args:[%0->$$28]]
+              -- AGGREGATE  |LOCAL|
+                nested tuple source
+                -- NESTED_TUPLE_SOURCE  |LOCAL|
+           }
+    -- EXTERNAL_GROUP_BY[]  |PARTITIONED|
+      exchange 
+      -- HASH_PARTITION_EXCHANGE []  |PARTITIONED|
+        group by ([]) decor ([]) {
+                  aggregate [$$28] <- [function-call: hive:sum(PARTIAL1), Args:[function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMultiply, Args:[%0->$$15, function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMinus, Args:[1, %0->$$16]]]]
+                  -- AGGREGATE  |LOCAL|
+                    nested tuple source
+                    -- NESTED_TUPLE_SOURCE  |LOCAL|
+               }
+        -- EXTERNAL_GROUP_BY[]  |LOCAL|
+          exchange 
+          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+            project ([$$15, $$16])
+            -- STREAM_PROJECT  |PARTITIONED|
+              select (function-call: algebricks:or, Args:[function-call: algebricks:or, Args:[function-call: algebricks:and, Args:[function-call: algebricks:and, Args:[function-call: algebricks:and, Args:[function-call: algebricks:and, Args:[function-call: algebricks:and, Args:[function-call: algebricks:and, Args:[function-call: algebricks:and, Args:[function-call: algebricks:eq, Args:[%0->$$4, Brand#12], function-call: hive:org.apache.hadoop.hive.ql.udf.UDFRegExp, Args:[%0->$$7, SM CASE||SM BOX||SM PACK||SM PKG]], function-call: algebricks:ge, Args:[%0->$$14, 1]], function-call: algebricks:le, Args:[%0->$$14, 11]], function-call: algebricks:ge, Args:[%0->$$6, 1]], function-call: algebricks:le, Args:[%0->$$6, 5]], function-call: hive:org.apache.hadoop.hive.ql.udf.UDFRegExp, Args:[%0->$$24, AIR||AIR REG]], function-call: algebricks:eq, Args:[%0->$$23, DELIVER IN PERSON]], function-call: algebricks:and, Args:[function-call: algebricks:and, Args:[function-call: algebricks:and, Args:[function-call: algebricks:and, Args:[function-call: algebricks:and, Args:[function-call: algebricks:and, Args:[function-call: algebricks:and, Args:[function-call: algebricks:eq, Args:[%0->$$4, Brand#23], function-call: hive:org.apache.hadoop.hive.ql.udf.UDFRegExp, Args:[%0->$$7, MED BAG||MED BOX||MED PKG||MED PACK]], function-call: algebricks:ge, Args:[%0->$$14, 10]], function-call: algebricks:le, Args:[%0->$$14, 20]], function-call: algebricks:ge, Args:[%0->$$6, 1]], function-call: algebricks:le, Args:[%0->$$6, 10]], function-call: hive:org.apache.hadoop.hive.ql.udf.UDFRegExp, Args:[%0->$$24, AIR||AIR REG]], function-call: algebricks:eq, Args:[%0->$$23, DELIVER IN PERSON]]], function-call: algebricks:and, Args:[function-call: algebricks:and, Args:[function-call: algebricks:and, Args:[function-call: algebricks:and, Args:[function-call: algebricks:and, Args:[function-call: algebricks:and, Args:[function-call: algebricks:and, Args:[function-call: algebricks:eq, Args:[%0->$$4, Brand#34], function-call: hive:org.apache.hadoop.hive.ql.udf.UDFRegExp, Args:[%0->$$7, LG CASE||LG BOX||LG PACK||LG PKG]], function-call: algebricks:ge, Args:[%0->$$14, 20]], function-call: algebricks:le, Args:[%0->$$14, 30]], function-call: algebricks:ge, Args:[%0->$$6, 1]], function-call: algebricks:le, Args:[%0->$$6, 15]], function-call: hive:org.apache.hadoop.hive.ql.udf.UDFRegExp, Args:[%0->$$24, AIR||AIR REG]], function-call: algebricks:eq, Args:[%0->$$23, DELIVER IN PERSON]]])
+              -- STREAM_SELECT  |UNPARTITIONED|
+                exchange 
+                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                  join (function-call: algebricks:eq, Args:[%0->$$11, %0->$$1])
+                  -- HYBRID_HASH_JOIN [$$11][$$1]  |PARTITIONED|
+                    exchange 
+                    -- HASH_PARTITION_EXCHANGE [$$11]  |PARTITIONED|
+                      data-scan [$$11, $$14, $$15, $$16, $$23, $$24]<-[$$10, $$11, $$12, $$13, $$14, $$15, $$16, $$17, $$18, $$19, $$20, $$21, $$22, $$23, $$24, $$25] <- default.lineitem
+                      -- DATASOURCE_SCAN  |PARTITIONED|
+                        exchange 
+                        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                          empty-tuple-source
+                          -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                    exchange 
+                    -- HASH_PARTITION_EXCHANGE [$$1]  |PARTITIONED|
+                      data-scan [$$1, $$4, $$6, $$7]<-[$$1, $$2, $$3, $$4, $$5, $$6, $$7, $$8, $$9] <- default.part
+                      -- DATASOURCE_SCAN  |PARTITIONED|
+                        exchange 
+                        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                          empty-tuple-source
+                          -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/hivesterix/src/test/resources/optimizerts/results/q1_pricing_summary_report.plan b/hivesterix/src/test/resources/optimizerts/results/q1_pricing_summary_report.plan
new file mode 100644
index 0000000..0e9c90f
--- /dev/null
+++ b/hivesterix/src/test/resources/optimizerts/results/q1_pricing_summary_report.plan
@@ -0,0 +1,42 @@
+write [%0->$$17, %0->$$18, %0->$$19, %0->$$20, %0->$$21, %0->$$22, %0->$$23, %0->$$24, %0->$$25, %0->$$27]
+-- SINK_WRITE  |PARTITIONED|
+  project ([$$17, $$18, $$19, $$20, $$21, $$22, $$23, $$24, $$25, $$27])
+  -- STREAM_PROJECT  |PARTITIONED|
+    assign [$$27] <- [function-call: hive:org.apache.hadoop.hive.ql.udf.UDFToInteger, Args:[%0->$$26]]
+    -- ASSIGN  |PARTITIONED|
+      exchange 
+      -- SORT_MERGE_EXCHANGE [$$17(ASC), $$18(ASC) ]  |PARTITIONED|
+        order (ASC, %0->$$17) (ASC, %0->$$18) 
+        -- STABLE_SORT [$$17(ASC), $$18(ASC)]  |LOCAL|
+          exchange 
+          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+            group by ([$$17 := %0->$$37; $$18 := %0->$$38]) decor ([]) {
+                      aggregate [$$19, $$20, $$21, $$22, $$23, $$24, $$25, $$26] <- [function-call: hive:sum(FINAL), Args:[%0->$$29], function-call: hive:sum(FINAL), Args:[%0->$$30], function-call: hive:sum(FINAL), Args:[%0->$$31], function-call: hive:sum(FINAL), Args:[%0->$$32], function-call: hive:avg(FINAL), Args:[%0->$$33], function-call: hive:avg(FINAL), Args:[%0->$$34], function-call: hive:avg(FINAL), Args:[%0->$$35], function-call: hive:count(FINAL), Args:[%0->$$36]]
+                      -- AGGREGATE  |LOCAL|
+                        nested tuple source
+                        -- NESTED_TUPLE_SOURCE  |LOCAL|
+                   }
+            -- EXTERNAL_GROUP_BY[$$37, $$38]  |PARTITIONED|
+              exchange 
+              -- HASH_PARTITION_EXCHANGE [$$37, $$38]  |PARTITIONED|
+                group by ([$$37 := %0->$$9; $$38 := %0->$$10]) decor ([]) {
+                          aggregate [$$29, $$30, $$31, $$32, $$33, $$34, $$35, $$36] <- [function-call: hive:sum(PARTIAL1), Args:[%0->$$5], function-call: hive:sum(PARTIAL1), Args:[%0->$$6], function-call: hive:sum(PARTIAL1), Args:[function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMultiply, Args:[%0->$$6, function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMinus, Args:[1, %0->$$7]]], function-call: hive:sum(PARTIAL1), Args:[function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMultiply, Args:[function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMultiply, Args:[%0->$$6, function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMinus, Args:[1, %0->$$7]], function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPPlus, Args:[1, %0->$$8]]], function-call: hive:avg(PARTIAL1), Args:[%0->$$5], function-call: hive:avg(PARTIAL1), Args:[%0->$$6], function-call: hive:avg(PARTIAL1), Args:[%0->$$7], function-call: hive:count(PARTIAL1), Args:[1]]
+                          -- AGGREGATE  |LOCAL|
+                            nested tuple source
+                            -- NESTED_TUPLE_SOURCE  |LOCAL|
+                       }
+                -- EXTERNAL_GROUP_BY[$$9, $$10]  |LOCAL|
+                  exchange 
+                  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                    project ([$$9, $$10, $$5, $$6, $$7, $$8])
+                    -- STREAM_PROJECT  |PARTITIONED|
+                      select (function-call: algebricks:le, Args:[%0->$$11, 1998-09-02])
+                      -- STREAM_SELECT  |PARTITIONED|
+                        exchange 
+                        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                          data-scan [$$5, $$6, $$7, $$8, $$9, $$10, $$11]<-[$$1, $$2, $$3, $$4, $$5, $$6, $$7, $$8, $$9, $$10, $$11, $$12, $$13, $$14, $$15, $$16] <- default.lineitem
+                          -- DATASOURCE_SCAN  |PARTITIONED|
+                            exchange 
+                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                              empty-tuple-source
+                              -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/hivesterix/src/test/resources/optimizerts/results/q20_potential_part_promotion.plan b/hivesterix/src/test/resources/optimizerts/results/q20_potential_part_promotion.plan
new file mode 100644
index 0000000..eddfca5
--- /dev/null
+++ b/hivesterix/src/test/resources/optimizerts/results/q20_potential_part_promotion.plan
@@ -0,0 +1,178 @@
+write [%0->$$1]
+-- SINK_WRITE  |PARTITIONED|
+  exchange 
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+    distinct ([%0->$$1])
+    -- PRE_SORTED_DISTINCT_BY  |PARTITIONED|
+      exchange 
+      -- ONE_TO_ONE_EXCHANGE  |LOCAL|
+        order (ASC, %0->$$1) 
+        -- STABLE_SORT [$$1(ASC)]  |LOCAL|
+          exchange 
+          -- HASH_PARTITION_EXCHANGE [$$1]  |PARTITIONED|
+            project ([$$1])
+            -- STREAM_PROJECT  |PARTITIONED|
+              select (function-call: hive:org.apache.hadoop.hive.ql.udf.UDFLike, Args:[%0->$$2, forest%])
+              -- STREAM_SELECT  |PARTITIONED|
+                exchange 
+                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                  data-scan [$$1, $$2]<-[$$1, $$2, $$3, $$4, $$5, $$6, $$7, $$8, $$9] <- default.part
+                  -- DATASOURCE_SCAN  |PARTITIONED|
+                    exchange 
+                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                      empty-tuple-source
+                      -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+write [%0->$$17, %0->$$18, %0->$$20]
+-- SINK_WRITE  |PARTITIONED|
+  project ([$$17, $$18, $$20])
+  -- STREAM_PROJECT  |PARTITIONED|
+    assign [$$20] <- [function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMultiply, Args:[0.5, %0->$$19]]
+    -- ASSIGN  |PARTITIONED|
+      exchange 
+      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+        group by ([$$17 := %0->$$23; $$18 := %0->$$24]) decor ([]) {
+                  aggregate [$$19] <- [function-call: hive:sum(FINAL), Args:[%0->$$22]]
+                  -- AGGREGATE  |LOCAL|
+                    nested tuple source
+                    -- NESTED_TUPLE_SOURCE  |LOCAL|
+               }
+        -- EXTERNAL_GROUP_BY[$$23, $$24]  |PARTITIONED|
+          exchange 
+          -- HASH_PARTITION_EXCHANGE [$$23, $$24]  |PARTITIONED|
+            group by ([$$23 := %0->$$2; $$24 := %0->$$3]) decor ([]) {
+                      aggregate [$$22] <- [function-call: hive:sum(PARTIAL1), Args:[%0->$$5]]
+                      -- AGGREGATE  |LOCAL|
+                        nested tuple source
+                        -- NESTED_TUPLE_SOURCE  |LOCAL|
+                   }
+            -- EXTERNAL_GROUP_BY[$$2, $$3]  |LOCAL|
+              exchange 
+              -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                project ([$$2, $$3, $$5])
+                -- STREAM_PROJECT  |PARTITIONED|
+                  select (function-call: algebricks:and, Args:[function-call: algebricks:ge, Args:[%0->$$11, 1994-01-01], function-call: algebricks:lt, Args:[%0->$$11, 1995-01-01], function-call: algebricks:ge, Args:[%0->$$11, 1994-01-01], function-call: algebricks:lt, Args:[%0->$$11, 1995-01-01]])
+                  -- STREAM_SELECT  |PARTITIONED|
+                    exchange 
+                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                      data-scan [$$2, $$3, $$5, $$11]<-[$$1, $$2, $$3, $$4, $$5, $$6, $$7, $$8, $$9, $$10, $$11, $$12, $$13, $$14, $$15, $$16] <- default.lineitem
+                      -- DATASOURCE_SCAN  |PARTITIONED|
+                        exchange 
+                        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                          empty-tuple-source
+                          -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+write [%0->$$6, %0->$$7, %0->$$3]
+-- SINK_WRITE  |PARTITIONED|
+  project ([$$6, $$7, $$3])
+  -- STREAM_PROJECT  |PARTITIONED|
+    exchange 
+    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+      join (function-call: algebricks:and, Args:[function-call: algebricks:eq, Args:[%0->$$1, %0->$$5], function-call: algebricks:eq, Args:[%0->$$2, %0->$$6]])
+      -- HYBRID_HASH_JOIN [$$1, $$2][$$5, $$6]  |PARTITIONED|
+        exchange 
+        -- HASH_PARTITION_EXCHANGE [$$1, $$2]  |PARTITIONED|
+          data-scan []<-[$$1, $$2, $$3] <- default.q20_tmp2
+          -- DATASOURCE_SCAN  |PARTITIONED|
+            exchange 
+            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+              empty-tuple-source
+              -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+        exchange 
+        -- HASH_PARTITION_EXCHANGE [$$5, $$6]  |PARTITIONED|
+          project ([$$5, $$6, $$7])
+          -- STREAM_PROJECT  |PARTITIONED|
+            exchange 
+            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+              join (function-call: algebricks:eq, Args:[%0->$$4, %0->$$5])
+              -- HYBRID_HASH_JOIN [$$4][$$5]  |PARTITIONED|
+                exchange 
+                -- HASH_PARTITION_EXCHANGE [$$4]  |PARTITIONED|
+                  data-scan []<-[$$4] <- default.q20_tmp1
+                  -- DATASOURCE_SCAN  |PARTITIONED|
+                    exchange 
+                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                      empty-tuple-source
+                      -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                exchange 
+                -- HASH_PARTITION_EXCHANGE [$$5]  |PARTITIONED|
+                  data-scan [$$5, $$6, $$7]<-[$$5, $$6, $$7, $$8, $$9] <- default.partsupp
+                  -- DATASOURCE_SCAN  |PARTITIONED|
+                    exchange 
+                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                      empty-tuple-source
+                      -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+write [%0->$$1]
+-- SINK_WRITE  |PARTITIONED|
+  exchange 
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+    distinct ([%0->$$1])
+    -- PRE_SORTED_DISTINCT_BY  |PARTITIONED|
+      exchange 
+      -- ONE_TO_ONE_EXCHANGE  |LOCAL|
+        order (ASC, %0->$$1) 
+        -- STABLE_SORT [$$1(ASC)]  |LOCAL|
+          exchange 
+          -- HASH_PARTITION_EXCHANGE [$$1]  |PARTITIONED|
+            project ([$$1])
+            -- STREAM_PROJECT  |PARTITIONED|
+              select (function-call: algebricks:gt, Args:[%0->$$2, %0->$$3])
+              -- STREAM_SELECT  |PARTITIONED|
+                exchange 
+                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                  data-scan []<-[$$1, $$2, $$3] <- default.q20_tmp3
+                  -- DATASOURCE_SCAN  |PARTITIONED|
+                    exchange 
+                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                      empty-tuple-source
+                      -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+write [%0->$$3, %0->$$4]
+-- SINK_WRITE  |PARTITIONED|
+  exchange 
+  -- SORT_MERGE_EXCHANGE [$$3(ASC) ]  |PARTITIONED|
+    order (ASC, %0->$$3) 
+    -- STABLE_SORT [$$3(ASC)]  |LOCAL|
+      exchange 
+      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+        project ([$$3, $$4])
+        -- STREAM_PROJECT  |PARTITIONED|
+          exchange 
+          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+            join (function-call: algebricks:eq, Args:[%0->$$1, %0->$$2])
+            -- HYBRID_HASH_JOIN [$$1][$$2]  |PARTITIONED|
+              exchange 
+              -- HASH_PARTITION_EXCHANGE [$$1]  |PARTITIONED|
+                data-scan []<-[$$1] <- default.q20_tmp4
+                -- DATASOURCE_SCAN  |PARTITIONED|
+                  exchange 
+                  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                    empty-tuple-source
+                    -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+              exchange 
+              -- HASH_PARTITION_EXCHANGE [$$2]  |PARTITIONED|
+                project ([$$2, $$3, $$4])
+                -- STREAM_PROJECT  |PARTITIONED|
+                  exchange 
+                  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                    join (function-call: algebricks:eq, Args:[%0->$$9, %0->$$5])
+                    -- HYBRID_HASH_JOIN [$$9][$$5]  |PARTITIONED|
+                      exchange 
+                      -- HASH_PARTITION_EXCHANGE [$$9]  |PARTITIONED|
+                        project ([$$9])
+                        -- STREAM_PROJECT  |PARTITIONED|
+                          select (function-call: algebricks:eq, Args:[%0->$$10, CANADA])
+                          -- STREAM_SELECT  |PARTITIONED|
+                            exchange 
+                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                              data-scan [$$9, $$10]<-[$$9, $$10, $$11, $$12] <- default.nation
+                              -- DATASOURCE_SCAN  |PARTITIONED|
+                                exchange 
+                                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                  empty-tuple-source
+                                  -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                      exchange 
+                      -- HASH_PARTITION_EXCHANGE [$$5]  |PARTITIONED|
+                        data-scan [$$5, $$2, $$3, $$4]<-[$$2, $$3, $$4, $$5, $$6, $$7, $$8] <- default.supplier
+                        -- DATASOURCE_SCAN  |PARTITIONED|
+                          exchange 
+                          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                            empty-tuple-source
+                            -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/hivesterix/src/test/resources/optimizerts/results/q21_suppliers_who_kept_orders_waiting.plan b/hivesterix/src/test/resources/optimizerts/results/q21_suppliers_who_kept_orders_waiting.plan
new file mode 100644
index 0000000..cc47cf3
--- /dev/null
+++ b/hivesterix/src/test/resources/optimizerts/results/q21_suppliers_who_kept_orders_waiting.plan
@@ -0,0 +1,224 @@
+write [%0->$$21, %0->$$24, %0->$$23]
+-- SINK_WRITE  |PARTITIONED|
+  project ([$$21, $$24, $$23])
+  -- STREAM_PROJECT  |PARTITIONED|
+    assign [$$24] <- [function-call: hive:org.apache.hadoop.hive.ql.udf.UDFToInteger, Args:[%0->$$22]]
+    -- ASSIGN  |PARTITIONED|
+      exchange 
+      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+        group by ([$$21 := %0->$$17]) decor ([]) {
+                  aggregate [$$22, $$23] <- [function-call: hive:count(COMPLETE), Args:[%0->$$18], function-call: hive:max(FINAL), Args:[%0->$$20]]
+                  -- AGGREGATE  |LOCAL|
+                    nested tuple source
+                    -- NESTED_TUPLE_SOURCE  |LOCAL|
+               }
+        -- PRE_CLUSTERED_GROUP_BY[$$17]  |PARTITIONED|
+          exchange 
+          -- HASH_PARTITION_MERGE_EXCHANGE MERGE:[$$17(ASC)] HASH:[$$17]  |PARTITIONED|
+            group by ([$$17 := %0->$$1; $$18 := %0->$$3]) decor ([]) {
+                      aggregate [$$20] <- [function-call: hive:max(PARTIAL1), Args:[%0->$$3]]
+                      -- AGGREGATE  |LOCAL|
+                        nested tuple source
+                        -- NESTED_TUPLE_SOURCE  |LOCAL|
+                   }
+            -- PRE_CLUSTERED_GROUP_BY[$$1, $$3]  |PARTITIONED|
+              exchange 
+              -- ONE_TO_ONE_EXCHANGE  |LOCAL|
+                order (ASC, %0->$$1) (ASC, %0->$$3) 
+                -- STABLE_SORT [$$1(ASC), $$3(ASC)]  |LOCAL|
+                  exchange 
+                  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                    data-scan [$$1, $$3]<-[$$1, $$2, $$3, $$4, $$5, $$6, $$7, $$8, $$9, $$10, $$11, $$12, $$13, $$14, $$15, $$16] <- default.lineitem
+                    -- DATASOURCE_SCAN  |PARTITIONED|
+                      exchange 
+                      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                        empty-tuple-source
+                        -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+write [%0->$$21, %0->$$24, %0->$$23]
+-- SINK_WRITE  |PARTITIONED|
+  project ([$$21, $$24, $$23])
+  -- STREAM_PROJECT  |PARTITIONED|
+    assign [$$24] <- [function-call: hive:org.apache.hadoop.hive.ql.udf.UDFToInteger, Args:[%0->$$22]]
+    -- ASSIGN  |PARTITIONED|
+      exchange 
+      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+        group by ([$$21 := %0->$$17]) decor ([]) {
+                  aggregate [$$22, $$23] <- [function-call: hive:count(COMPLETE), Args:[%0->$$18], function-call: hive:max(FINAL), Args:[%0->$$20]]
+                  -- AGGREGATE  |LOCAL|
+                    nested tuple source
+                    -- NESTED_TUPLE_SOURCE  |LOCAL|
+               }
+        -- PRE_CLUSTERED_GROUP_BY[$$17]  |PARTITIONED|
+          exchange 
+          -- HASH_PARTITION_MERGE_EXCHANGE MERGE:[$$17(ASC)] HASH:[$$17]  |PARTITIONED|
+            group by ([$$17 := %0->$$1; $$18 := %0->$$3]) decor ([]) {
+                      aggregate [$$20] <- [function-call: hive:max(PARTIAL1), Args:[%0->$$3]]
+                      -- AGGREGATE  |LOCAL|
+                        nested tuple source
+                        -- NESTED_TUPLE_SOURCE  |LOCAL|
+                   }
+            -- PRE_CLUSTERED_GROUP_BY[$$1, $$3]  |PARTITIONED|
+              exchange 
+              -- ONE_TO_ONE_EXCHANGE  |LOCAL|
+                order (ASC, %0->$$1) (ASC, %0->$$3) 
+                -- STABLE_SORT [$$1(ASC), $$3(ASC)]  |LOCAL|
+                  exchange 
+                  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                    project ([$$1, $$3])
+                    -- STREAM_PROJECT  |PARTITIONED|
+                      select (function-call: algebricks:gt, Args:[%0->$$13, %0->$$12])
+                      -- STREAM_SELECT  |PARTITIONED|
+                        exchange 
+                        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                          data-scan [$$1, $$3, $$12, $$13]<-[$$1, $$2, $$3, $$4, $$5, $$6, $$7, $$8, $$9, $$10, $$11, $$12, $$13, $$14, $$15, $$16] <- default.lineitem
+                          -- DATASOURCE_SCAN  |PARTITIONED|
+                            exchange 
+                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                              empty-tuple-source
+                              -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+write [%0->$$43, %0->$$45]
+-- SINK_WRITE  |UNPARTITIONED|
+  project ([$$43, $$45])
+  -- STREAM_PROJECT  |UNPARTITIONED|
+    assign [$$45] <- [function-call: hive:org.apache.hadoop.hive.ql.udf.UDFToInteger, Args:[%0->$$44]]
+    -- ASSIGN  |UNPARTITIONED|
+      limit 100
+      -- STREAM_LIMIT  |UNPARTITIONED|
+        limit 100
+        -- STREAM_LIMIT  |UNPARTITIONED|
+          exchange 
+          -- SORT_MERGE_EXCHANGE [$$44(DESC), $$43(ASC) ]  |PARTITIONED|
+            limit 100
+            -- STREAM_LIMIT  |LOCAL|
+              exchange 
+              -- ONE_TO_ONE_EXCHANGE  |LOCAL|
+                order (DESC, %0->$$44) (ASC, %0->$$43) 
+                -- STABLE_SORT [$$44(DESC), $$43(ASC)]  |LOCAL|
+                  exchange 
+                  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                    group by ([$$43 := %0->$$48]) decor ([]) {
+                              aggregate [$$44] <- [function-call: hive:count(FINAL), Args:[%0->$$47]]
+                              -- AGGREGATE  |LOCAL|
+                                nested tuple source
+                                -- NESTED_TUPLE_SOURCE  |LOCAL|
+                           }
+                    -- EXTERNAL_GROUP_BY[$$48]  |PARTITIONED|
+                      exchange 
+                      -- HASH_PARTITION_EXCHANGE [$$48]  |PARTITIONED|
+                        group by ([$$48 := %0->$$37]) decor ([]) {
+                                  aggregate [$$47] <- [function-call: hive:count(PARTIAL1), Args:[1]]
+                                  -- AGGREGATE  |LOCAL|
+                                    nested tuple source
+                                    -- NESTED_TUPLE_SOURCE  |LOCAL|
+                               }
+                        -- EXTERNAL_GROUP_BY[$$37]  |LOCAL|
+                          exchange 
+                          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                            project ([$$37])
+                            -- STREAM_PROJECT  |PARTITIONED|
+                              select (function-call: algebricks:or, Args:[function-call: hive:org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNull, Args:[%0->$$2], function-call: algebricks:and, Args:[function-call: algebricks:eq, Args:[%0->$$2, 1], function-call: algebricks:eq, Args:[%0->$$18, %0->$$3]]])
+                              -- STREAM_SELECT  |PARTITIONED|
+                                exchange 
+                                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                  left outer join (function-call: algebricks:eq, Args:[%0->$$4, %0->$$1])
+                                  -- HYBRID_HASH_JOIN [$$4][$$1]  |PARTITIONED|
+                                    exchange 
+                                    -- HASH_PARTITION_EXCHANGE [$$4]  |PARTITIONED|
+                                      project ([$$37, $$4, $$18])
+                                      -- STREAM_PROJECT  |PARTITIONED|
+                                        select (function-call: algebricks:or, Args:[function-call: algebricks:gt, Args:[%0->$$5, 1], function-call: algebricks:and, Args:[function-call: algebricks:eq, Args:[%0->$$5, 1], function-call: algebricks:neq, Args:[%0->$$18, %0->$$6]]])
+                                        -- STREAM_SELECT  |UNPARTITIONED|
+                                          exchange 
+                                          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                            join (function-call: algebricks:eq, Args:[%0->$$16, %0->$$4])
+                                            -- HYBRID_HASH_JOIN [$$16][$$4]  |PARTITIONED|
+                                              exchange 
+                                              -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                project ([$$37, $$16, $$18])
+                                                -- STREAM_PROJECT  |PARTITIONED|
+                                                  exchange 
+                                                  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                    join (function-call: algebricks:eq, Args:[%0->$$16, %0->$$7])
+                                                    -- HYBRID_HASH_JOIN [$$16][$$7]  |PARTITIONED|
+                                                      exchange 
+                                                      -- HASH_PARTITION_EXCHANGE [$$16]  |PARTITIONED|
+                                                        project ([$$37, $$16, $$18])
+                                                        -- STREAM_PROJECT  |PARTITIONED|
+                                                          exchange 
+                                                          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                            join (function-call: algebricks:eq, Args:[%0->$$18, %0->$$36])
+                                                            -- HYBRID_HASH_JOIN [$$18][$$36]  |PARTITIONED|
+                                                              exchange 
+                                                              -- HASH_PARTITION_EXCHANGE [$$18]  |PARTITIONED|
+                                                                project ([$$16, $$18])
+                                                                -- STREAM_PROJECT  |PARTITIONED|
+                                                                  select (function-call: algebricks:and, Args:[function-call: algebricks:gt, Args:[%0->$$28, %0->$$27], function-call: algebricks:gt, Args:[%0->$$28, %0->$$27]])
+                                                                  -- STREAM_SELECT  |PARTITIONED|
+                                                                    exchange 
+                                                                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                                      data-scan [$$16, $$18, $$27, $$28]<-[$$16, $$17, $$18, $$19, $$20, $$21, $$22, $$23, $$24, $$25, $$26, $$27, $$28, $$29, $$30, $$31] <- default.lineitem
+                                                                      -- DATASOURCE_SCAN  |PARTITIONED|
+                                                                        exchange 
+                                                                        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                                          empty-tuple-source
+                                                                          -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                                                              exchange 
+                                                              -- HASH_PARTITION_EXCHANGE [$$36]  |PARTITIONED|
+                                                                project ([$$36, $$37])
+                                                                -- STREAM_PROJECT  |PARTITIONED|
+                                                                  exchange 
+                                                                  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                                    join (function-call: algebricks:eq, Args:[%0->$$39, %0->$$32])
+                                                                    -- HYBRID_HASH_JOIN [$$39][$$32]  |PARTITIONED|
+                                                                      exchange 
+                                                                      -- HASH_PARTITION_EXCHANGE [$$39]  |PARTITIONED|
+                                                                        data-scan [$$39, $$36, $$37]<-[$$36, $$37, $$38, $$39, $$40, $$41, $$42] <- default.supplier
+                                                                        -- DATASOURCE_SCAN  |PARTITIONED|
+                                                                          exchange 
+                                                                          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                                            empty-tuple-source
+                                                                            -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                                                                      exchange 
+                                                                      -- HASH_PARTITION_EXCHANGE [$$32]  |PARTITIONED|
+                                                                        project ([$$32])
+                                                                        -- STREAM_PROJECT  |PARTITIONED|
+                                                                          select (function-call: algebricks:eq, Args:[%0->$$33, SAUDI ARABIA])
+                                                                          -- STREAM_SELECT  |PARTITIONED|
+                                                                            exchange 
+                                                                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                                              data-scan [$$32, $$33]<-[$$32, $$33, $$34, $$35] <- default.nation
+                                                                              -- DATASOURCE_SCAN  |PARTITIONED|
+                                                                                exchange 
+                                                                                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                                                  empty-tuple-source
+                                                                                  -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                                                      exchange 
+                                                      -- HASH_PARTITION_EXCHANGE [$$7]  |PARTITIONED|
+                                                        project ([$$7])
+                                                        -- STREAM_PROJECT  |PARTITIONED|
+                                                          select (function-call: algebricks:eq, Args:[%0->$$9, F])
+                                                          -- STREAM_SELECT  |PARTITIONED|
+                                                            exchange 
+                                                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                              data-scan [$$7, $$9]<-[$$7, $$8, $$9, $$10, $$11, $$12, $$13, $$14, $$15] <- default.orders
+                                                              -- DATASOURCE_SCAN  |PARTITIONED|
+                                                                exchange 
+                                                                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                                  empty-tuple-source
+                                                                  -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                                              exchange 
+                                              -- HASH_PARTITION_EXCHANGE [$$4]  |PARTITIONED|
+                                                data-scan [$$4, $$5, $$6]<-[$$4, $$5, $$6] <- default.q21_tmp1
+                                                -- DATASOURCE_SCAN  |PARTITIONED|
+                                                  exchange 
+                                                  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                    empty-tuple-source
+                                                    -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                                    exchange 
+                                    -- HASH_PARTITION_EXCHANGE [$$1]  |PARTITIONED|
+                                      data-scan [$$1, $$2, $$3]<-[$$1, $$2, $$3] <- default.q21_tmp2
+                                      -- DATASOURCE_SCAN  |PARTITIONED|
+                                        exchange 
+                                        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                          empty-tuple-source
+                                          -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/hivesterix/src/test/resources/optimizerts/results/q22_global_sales_opportunity.plan b/hivesterix/src/test/resources/optimizerts/results/q22_global_sales_opportunity.plan
new file mode 100644
index 0000000..591576b
--- /dev/null
+++ b/hivesterix/src/test/resources/optimizerts/results/q22_global_sales_opportunity.plan
@@ -0,0 +1,136 @@
+write [%0->$$6, %0->$$1, %0->$$9]
+-- SINK_WRITE  |PARTITIONED|
+  project ([$$6, $$1, $$9])
+  -- STREAM_PROJECT  |PARTITIONED|
+    assign [$$9] <- [function-call: hive:org.apache.hadoop.hive.ql.udf.UDFSubstr, Args:[%0->$$5, 1, 2]]
+    -- ASSIGN  |PARTITIONED|
+      select (function-call: algebricks:or, Args:[function-call: algebricks:or, Args:[function-call: algebricks:or, Args:[function-call: algebricks:or, Args:[function-call: algebricks:or, Args:[function-call: algebricks:or, Args:[function-call: algebricks:eq, Args:[function-call: hive:org.apache.hadoop.hive.ql.udf.UDFSubstr, Args:[%0->$$5, 1, 2], 13], function-call: algebricks:eq, Args:[function-call: hive:org.apache.hadoop.hive.ql.udf.UDFSubstr, Args:[%0->$$5, 1, 2], 31]], function-call: algebricks:eq, Args:[function-call: hive:org.apache.hadoop.hive.ql.udf.UDFSubstr, Args:[%0->$$5, 1, 2], 23]], function-call: algebricks:eq, Args:[function-call: hive:org.apache.hadoop.hive.ql.udf.UDFSubstr, Args:[%0->$$5, 1, 2], 29]], function-call: algebricks:eq, Args:[function-call: hive:org.apache.hadoop.hive.ql.udf.UDFSubstr, Args:[%0->$$5, 1, 2], 30]], function-call: algebricks:eq, Args:[function-call: hive:org.apache.hadoop.hive.ql.udf.UDFSubstr, Args:[%0->$$5, 1, 2], 18]], function-call: algebricks:eq, Args:[function-call: hive:org.apache.hadoop.hive.ql.udf.UDFSubstr, Args:[%0->$$5, 1, 2], 17]])
+      -- STREAM_SELECT  |PARTITIONED|
+        exchange 
+        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+          data-scan []<-[$$1, $$2, $$3, $$4, $$5, $$6, $$7, $$8] <- default.customer
+          -- DATASOURCE_SCAN  |PARTITIONED|
+            exchange 
+            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+              empty-tuple-source
+              -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+write [%0->$$4]
+-- SINK_WRITE  |PARTITIONED|
+  exchange 
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+    group by ([]) decor ([]) {
+              aggregate [$$4] <- [function-call: hive:avg(FINAL), Args:[%0->$$6]]
+              -- AGGREGATE  |LOCAL|
+                nested tuple source
+                -- NESTED_TUPLE_SOURCE  |LOCAL|
+           }
+    -- EXTERNAL_GROUP_BY[]  |PARTITIONED|
+      exchange 
+      -- HASH_PARTITION_EXCHANGE []  |PARTITIONED|
+        group by ([]) decor ([]) {
+                  aggregate [$$6] <- [function-call: hive:avg(PARTIAL1), Args:[%0->$$1]]
+                  -- AGGREGATE  |LOCAL|
+                    nested tuple source
+                    -- NESTED_TUPLE_SOURCE  |LOCAL|
+               }
+        -- EXTERNAL_GROUP_BY[]  |LOCAL|
+          exchange 
+          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+            select (function-call: algebricks:gt, Args:[%0->$$1, 0.0])
+            -- STREAM_SELECT  |PARTITIONED|
+              exchange 
+              -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                data-scan [$$1]<-[$$1, $$2, $$3] <- default.q22_customer_tmp
+                -- DATASOURCE_SCAN  |PARTITIONED|
+                  exchange 
+                  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                    empty-tuple-source
+                    -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+write [%0->$$2]
+-- SINK_WRITE  |PARTITIONED|
+  exchange 
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+    distinct ([%0->$$2])
+    -- PRE_SORTED_DISTINCT_BY  |PARTITIONED|
+      exchange 
+      -- ONE_TO_ONE_EXCHANGE  |LOCAL|
+        order (ASC, %0->$$2) 
+        -- STABLE_SORT [$$2(ASC)]  |LOCAL|
+          exchange 
+          -- HASH_PARTITION_EXCHANGE [$$2]  |PARTITIONED|
+            data-scan [$$2]<-[$$1, $$2, $$3, $$4, $$5, $$6, $$7, $$8, $$9] <- default.orders
+            -- DATASOURCE_SCAN  |PARTITIONED|
+              exchange 
+              -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                empty-tuple-source
+                -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+write [%0->$$6, %0->$$9, %0->$$8]
+-- SINK_WRITE  |PARTITIONED|
+  project ([$$6, $$9, $$8])
+  -- STREAM_PROJECT  |PARTITIONED|
+    assign [$$9] <- [function-call: hive:org.apache.hadoop.hive.ql.udf.UDFToInteger, Args:[%0->$$7]]
+    -- ASSIGN  |PARTITIONED|
+      exchange 
+      -- SORT_MERGE_EXCHANGE [$$6(ASC) ]  |PARTITIONED|
+        order (ASC, %0->$$6) 
+        -- STABLE_SORT [$$6(ASC)]  |LOCAL|
+          exchange 
+          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+            group by ([$$6 := %0->$$13]) decor ([]) {
+                      aggregate [$$7, $$8] <- [function-call: hive:count(FINAL), Args:[%0->$$11], function-call: hive:sum(FINAL), Args:[%0->$$12]]
+                      -- AGGREGATE  |LOCAL|
+                        nested tuple source
+                        -- NESTED_TUPLE_SOURCE  |LOCAL|
+                   }
+            -- EXTERNAL_GROUP_BY[$$13]  |PARTITIONED|
+              exchange 
+              -- HASH_PARTITION_EXCHANGE [$$13]  |PARTITIONED|
+                group by ([$$13 := %0->$$5]) decor ([]) {
+                          aggregate [$$11, $$12] <- [function-call: hive:count(PARTIAL1), Args:[1], function-call: hive:sum(PARTIAL1), Args:[%0->$$3]]
+                          -- AGGREGATE  |LOCAL|
+                            nested tuple source
+                            -- NESTED_TUPLE_SOURCE  |LOCAL|
+                       }
+                -- EXTERNAL_GROUP_BY[$$5]  |LOCAL|
+                  exchange 
+                  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                    project ([$$5, $$3])
+                    -- STREAM_PROJECT  |PARTITIONED|
+                      exchange 
+                      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                        join (function-call: algebricks:and, Args:[function-call: algebricks:gt, Args:[%0->$$3, %0->$$1], true])
+                        -- NESTED_LOOP  |PARTITIONED|
+                          exchange 
+                          -- BROADCAST_EXCHANGE  |PARTITIONED|
+                            project ([$$5, $$3])
+                            -- STREAM_PROJECT  |PARTITIONED|
+                              select (function-call: hive:org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNull, Args:[%0->$$2])
+                              -- STREAM_SELECT  |PARTITIONED|
+                                exchange 
+                                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                  left outer join (function-call: algebricks:eq, Args:[%0->$$4, %0->$$2])
+                                  -- HYBRID_HASH_JOIN [$$4][$$2]  |PARTITIONED|
+                                    exchange 
+                                    -- HASH_PARTITION_EXCHANGE [$$4]  |PARTITIONED|
+                                      data-scan []<-[$$3, $$4, $$5] <- default.q22_customer_tmp
+                                      -- DATASOURCE_SCAN  |PARTITIONED|
+                                        exchange 
+                                        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                          empty-tuple-source
+                                          -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                                    exchange 
+                                    -- HASH_PARTITION_EXCHANGE [$$2]  |PARTITIONED|
+                                      data-scan [$$2]<-[$$2] <- default.q22_orders_tmp
+                                      -- DATASOURCE_SCAN  |PARTITIONED|
+                                        exchange 
+                                        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                          empty-tuple-source
+                                          -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                          exchange 
+                          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                            data-scan [$$1]<-[$$1] <- default.q22_customer_tmp1
+                            -- DATASOURCE_SCAN  |PARTITIONED|
+                              exchange 
+                              -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                empty-tuple-source
+                                -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/hivesterix/src/test/resources/optimizerts/results/q2_minimum_cost_supplier.plan b/hivesterix/src/test/resources/optimizerts/results/q2_minimum_cost_supplier.plan
new file mode 100644
index 0000000..151f34d
--- /dev/null
+++ b/hivesterix/src/test/resources/optimizerts/results/q2_minimum_cost_supplier.plan
@@ -0,0 +1,156 @@
+write [%0->$$20, %0->$$16, %0->$$26, %0->$$1, %0->$$13, %0->$$3, %0->$$17, %0->$$19, %0->$$21]
+-- SINK_WRITE  |PARTITIONED|
+  project ([$$20, $$16, $$26, $$1, $$13, $$3, $$17, $$19, $$21])
+  -- STREAM_PROJECT  |PARTITIONED|
+    exchange 
+    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+      join (function-call: algebricks:eq, Args:[%0->$$1, %0->$$10])
+      -- HYBRID_HASH_JOIN [$$1][$$10]  |PARTITIONED|
+        exchange 
+        -- HASH_PARTITION_EXCHANGE [$$1]  |PARTITIONED|
+          project ([$$1, $$3])
+          -- STREAM_PROJECT  |PARTITIONED|
+            select (function-call: algebricks:and, Args:[function-call: hive:org.apache.hadoop.hive.ql.udf.UDFLike, Args:[%0->$$5, %BRASS], function-call: algebricks:eq, Args:[%0->$$6, 15], function-call: hive:org.apache.hadoop.hive.ql.udf.UDFLike, Args:[%0->$$5, %BRASS]])
+            -- STREAM_SELECT  |PARTITIONED|
+              exchange 
+              -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                data-scan [$$1, $$3, $$5, $$6]<-[$$1, $$2, $$3, $$4, $$5, $$6, $$7, $$8, $$9] <- default.part
+                -- DATASOURCE_SCAN  |PARTITIONED|
+                  exchange 
+                  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                    empty-tuple-source
+                    -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+        exchange 
+        -- HASH_PARTITION_EXCHANGE [$$10]  |PARTITIONED|
+          project ([$$10, $$16, $$17, $$19, $$20, $$21, $$26, $$13])
+          -- STREAM_PROJECT  |PARTITIONED|
+            exchange 
+            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+              join (function-call: algebricks:eq, Args:[%0->$$11, %0->$$15])
+              -- HYBRID_HASH_JOIN [$$11][$$15]  |PARTITIONED|
+                exchange 
+                -- HASH_PARTITION_EXCHANGE [$$11]  |PARTITIONED|
+                  data-scan [$$11, $$10, $$13]<-[$$10, $$11, $$12, $$13, $$14] <- default.partsupp
+                  -- DATASOURCE_SCAN  |PARTITIONED|
+                    exchange 
+                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                      empty-tuple-source
+                      -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                exchange 
+                -- HASH_PARTITION_EXCHANGE [$$15]  |PARTITIONED|
+                  project ([$$15, $$16, $$17, $$19, $$20, $$21, $$26])
+                  -- STREAM_PROJECT  |PARTITIONED|
+                    exchange 
+                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                      join (function-call: algebricks:eq, Args:[%0->$$18, %0->$$25])
+                      -- HYBRID_HASH_JOIN [$$18][$$25]  |PARTITIONED|
+                        exchange 
+                        -- HASH_PARTITION_EXCHANGE [$$18]  |PARTITIONED|
+                          data-scan []<-[$$15, $$16, $$17, $$18, $$19, $$20, $$21] <- default.supplier
+                          -- DATASOURCE_SCAN  |PARTITIONED|
+                            exchange 
+                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                              empty-tuple-source
+                              -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                        exchange 
+                        -- HASH_PARTITION_EXCHANGE [$$25]  |PARTITIONED|
+                          project ([$$25, $$26])
+                          -- STREAM_PROJECT  |PARTITIONED|
+                            exchange 
+                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                              join (function-call: algebricks:eq, Args:[%0->$$22, %0->$$27])
+                              -- HYBRID_HASH_JOIN [$$22][$$27]  |PARTITIONED|
+                                exchange 
+                                -- HASH_PARTITION_EXCHANGE [$$22]  |PARTITIONED|
+                                  project ([$$22])
+                                  -- STREAM_PROJECT  |PARTITIONED|
+                                    select (function-call: algebricks:eq, Args:[%0->$$23, EUROPE])
+                                    -- STREAM_SELECT  |PARTITIONED|
+                                      exchange 
+                                      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                        data-scan [$$23, $$22]<-[$$22, $$23, $$24] <- default.region
+                                        -- DATASOURCE_SCAN  |PARTITIONED|
+                                          exchange 
+                                          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                            empty-tuple-source
+                                            -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                                exchange 
+                                -- HASH_PARTITION_EXCHANGE [$$27]  |PARTITIONED|
+                                  data-scan [$$27, $$25, $$26]<-[$$25, $$26, $$27, $$28] <- default.nation
+                                  -- DATASOURCE_SCAN  |PARTITIONED|
+                                    exchange 
+                                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                      empty-tuple-source
+                                      -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+write [%0->$$10, %0->$$11]
+-- SINK_WRITE  |PARTITIONED|
+  project ([$$10, $$11])
+  -- STREAM_PROJECT  |PARTITIONED|
+    exchange 
+    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+      group by ([$$10 := %0->$$14]) decor ([]) {
+                aggregate [$$11] <- [function-call: hive:min(FINAL), Args:[%0->$$13]]
+                -- AGGREGATE  |LOCAL|
+                  nested tuple source
+                  -- NESTED_TUPLE_SOURCE  |LOCAL|
+             }
+      -- PRE_CLUSTERED_GROUP_BY[$$14]  |PARTITIONED|
+        exchange 
+        -- HASH_PARTITION_MERGE_EXCHANGE MERGE:[$$14(ASC)] HASH:[$$14]  |PARTITIONED|
+          group by ([$$14 := %0->$$4]) decor ([]) {
+                    aggregate [$$13] <- [function-call: hive:min(PARTIAL1), Args:[%0->$$5]]
+                    -- AGGREGATE  |LOCAL|
+                      nested tuple source
+                      -- NESTED_TUPLE_SOURCE  |LOCAL|
+                 }
+          -- PRE_CLUSTERED_GROUP_BY[$$4]  |LOCAL|
+            exchange 
+            -- ONE_TO_ONE_EXCHANGE  |LOCAL|
+              order (ASC, %0->$$4) 
+              -- STABLE_SORT [$$4(ASC)]  |LOCAL|
+                exchange 
+                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                  data-scan [$$4, $$5]<-[$$1, $$2, $$3, $$4, $$5, $$6, $$7, $$8, $$9] <- default.q2_minimum_cost_supplier_tmp1
+                  -- DATASOURCE_SCAN  |PARTITIONED|
+                    exchange 
+                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                      empty-tuple-source
+                      -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+write [%0->$$3, %0->$$4, %0->$$5, %0->$$6, %0->$$8, %0->$$9, %0->$$10, %0->$$11]
+-- SINK_WRITE  |UNPARTITIONED|
+  limit 100
+  -- STREAM_LIMIT  |UNPARTITIONED|
+    limit 100
+    -- STREAM_LIMIT  |UNPARTITIONED|
+      exchange 
+      -- SORT_MERGE_EXCHANGE [$$3(DESC), $$5(ASC), $$4(ASC), $$6(ASC) ]  |PARTITIONED|
+        limit 100
+        -- STREAM_LIMIT  |LOCAL|
+          exchange 
+          -- ONE_TO_ONE_EXCHANGE  |LOCAL|
+            order (DESC, %0->$$3) (ASC, %0->$$5) (ASC, %0->$$4) (ASC, %0->$$6) 
+            -- STABLE_SORT [$$3(DESC), $$5(ASC), $$4(ASC), $$6(ASC)]  |LOCAL|
+              exchange 
+              -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                project ([$$3, $$4, $$5, $$6, $$8, $$9, $$10, $$11])
+                -- STREAM_PROJECT  |PARTITIONED|
+                  exchange 
+                  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                    join (function-call: algebricks:and, Args:[function-call: algebricks:eq, Args:[%0->$$1, %0->$$6], function-call: algebricks:eq, Args:[%0->$$2, %0->$$7]])
+                    -- HYBRID_HASH_JOIN [$$1, $$2][$$6, $$7]  |PARTITIONED|
+                      exchange 
+                      -- HASH_PARTITION_EXCHANGE [$$1, $$2]  |PARTITIONED|
+                        data-scan []<-[$$1, $$2] <- default.q2_minimum_cost_supplier_tmp2
+                        -- DATASOURCE_SCAN  |PARTITIONED|
+                          exchange 
+                          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                            empty-tuple-source
+                            -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                      exchange 
+                      -- HASH_PARTITION_EXCHANGE [$$6, $$7]  |PARTITIONED|
+                        data-scan [$$6, $$7, $$3, $$4, $$5, $$8, $$9, $$10, $$11]<-[$$3, $$4, $$5, $$6, $$7, $$8, $$9, $$10, $$11] <- default.q2_minimum_cost_supplier_tmp1
+                        -- DATASOURCE_SCAN  |PARTITIONED|
+                          exchange 
+                          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                            empty-tuple-source
+                            -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/hivesterix/src/test/resources/optimizerts/results/q3_shipping_priority.plan b/hivesterix/src/test/resources/optimizerts/results/q3_shipping_priority.plan
new file mode 100644
index 0000000..a1b8e42
--- /dev/null
+++ b/hivesterix/src/test/resources/optimizerts/results/q3_shipping_priority.plan
@@ -0,0 +1,70 @@
+write [%0->$$1, %0->$$34, %0->$$29, %0->$$32]
+-- SINK_WRITE  |UNPARTITIONED|
+  limit 10
+  -- STREAM_LIMIT  |UNPARTITIONED|
+    limit 10
+    -- STREAM_LIMIT  |UNPARTITIONED|
+      exchange 
+      -- SORT_MERGE_EXCHANGE [$$34(DESC) ]  |PARTITIONED|
+        limit 10
+        -- STREAM_LIMIT  |LOCAL|
+          exchange 
+          -- ONE_TO_ONE_EXCHANGE  |LOCAL|
+            order (DESC, %0->$$34) 
+            -- STABLE_SORT [$$34(DESC)]  |LOCAL|
+              exchange 
+              -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                project ([$$1, $$34, $$29, $$32])
+                -- STREAM_PROJECT  |PARTITIONED|
+                  assign [$$34] <- [function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMultiply, Args:[%0->$$6, function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMinus, Args:[1, %0->$$7]]]
+                  -- ASSIGN  |PARTITIONED|
+                    project ([$$29, $$32, $$1, $$6, $$7])
+                    -- STREAM_PROJECT  |PARTITIONED|
+                      exchange 
+                      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                        join (function-call: algebricks:eq, Args:[%0->$$1, %0->$$25])
+                        -- HYBRID_HASH_JOIN [$$1][$$25]  |PARTITIONED|
+                          exchange 
+                          -- HASH_PARTITION_EXCHANGE [$$1]  |PARTITIONED|
+                            project ([$$1, $$6, $$7])
+                            -- STREAM_PROJECT  |PARTITIONED|
+                              select (function-call: algebricks:lt, Args:[%0->$$4, 3])
+                              -- STREAM_SELECT  |PARTITIONED|
+                                exchange 
+                                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                  data-scan [$$1, $$4, $$6, $$7]<-[$$1, $$2, $$3, $$4, $$5, $$6, $$7, $$8, $$9, $$10, $$11, $$12, $$13, $$14, $$15, $$16] <- default.lineitem
+                                  -- DATASOURCE_SCAN  |PARTITIONED|
+                                    exchange 
+                                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                      empty-tuple-source
+                                      -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                          exchange 
+                          -- HASH_PARTITION_EXCHANGE [$$25]  |PARTITIONED|
+                            project ([$$25, $$29, $$32])
+                            -- STREAM_PROJECT  |PARTITIONED|
+                              exchange 
+                              -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                join (function-call: algebricks:eq, Args:[%0->$$26, %0->$$17])
+                                -- HYBRID_HASH_JOIN [$$26][$$17]  |PARTITIONED|
+                                  exchange 
+                                  -- HASH_PARTITION_EXCHANGE [$$26]  |PARTITIONED|
+                                    data-scan [$$26, $$25, $$29, $$32]<-[$$25, $$26, $$27, $$28, $$29, $$30, $$31, $$32, $$33] <- default.orders
+                                    -- DATASOURCE_SCAN  |PARTITIONED|
+                                      exchange 
+                                      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                        empty-tuple-source
+                                        -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                                  exchange 
+                                  -- HASH_PARTITION_EXCHANGE [$$17]  |PARTITIONED|
+                                    project ([$$17])
+                                    -- STREAM_PROJECT  |PARTITIONED|
+                                      select (function-call: algebricks:eq, Args:[%0->$$23, BUILDING])
+                                      -- STREAM_SELECT  |PARTITIONED|
+                                        exchange 
+                                        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                          data-scan [$$17, $$23]<-[$$17, $$18, $$19, $$20, $$21, $$22, $$23, $$24] <- default.customer
+                                          -- DATASOURCE_SCAN  |PARTITIONED|
+                                            exchange 
+                                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                              empty-tuple-source
+                                              -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/hivesterix/src/test/resources/optimizerts/results/q4_order_priority.plan b/hivesterix/src/test/resources/optimizerts/results/q4_order_priority.plan
new file mode 100644
index 0000000..435fd7c
--- /dev/null
+++ b/hivesterix/src/test/resources/optimizerts/results/q4_order_priority.plan
@@ -0,0 +1,82 @@
+write [%0->$$1]
+-- SINK_WRITE  |PARTITIONED|
+  exchange 
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+    distinct ([%0->$$1])
+    -- PRE_SORTED_DISTINCT_BY  |PARTITIONED|
+      exchange 
+      -- ONE_TO_ONE_EXCHANGE  |LOCAL|
+        order (ASC, %0->$$1) 
+        -- STABLE_SORT [$$1(ASC)]  |LOCAL|
+          exchange 
+          -- HASH_PARTITION_EXCHANGE [$$1]  |PARTITIONED|
+            project ([$$1])
+            -- STREAM_PROJECT  |PARTITIONED|
+              select (function-call: algebricks:lt, Args:[%0->$$12, %0->$$13])
+              -- STREAM_SELECT  |PARTITIONED|
+                exchange 
+                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                  data-scan [$$1, $$12, $$13]<-[$$1, $$2, $$3, $$4, $$5, $$6, $$7, $$8, $$9, $$10, $$11, $$12, $$13, $$14, $$15, $$16] <- default.lineitem
+                  -- DATASOURCE_SCAN  |PARTITIONED|
+                    exchange 
+                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                      empty-tuple-source
+                      -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+write [%0->$$11, %0->$$13]
+-- SINK_WRITE  |PARTITIONED|
+  project ([$$11, $$13])
+  -- STREAM_PROJECT  |PARTITIONED|
+    assign [$$13] <- [function-call: hive:org.apache.hadoop.hive.ql.udf.UDFToInteger, Args:[%0->$$12]]
+    -- ASSIGN  |PARTITIONED|
+      exchange 
+      -- SORT_MERGE_EXCHANGE [$$11(ASC) ]  |PARTITIONED|
+        order (ASC, %0->$$11) 
+        -- STABLE_SORT [$$11(ASC)]  |LOCAL|
+          exchange 
+          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+            group by ([$$11 := %0->$$16]) decor ([]) {
+                      aggregate [$$12] <- [function-call: hive:count(FINAL), Args:[%0->$$15]]
+                      -- AGGREGATE  |LOCAL|
+                        nested tuple source
+                        -- NESTED_TUPLE_SOURCE  |LOCAL|
+                   }
+            -- EXTERNAL_GROUP_BY[$$16]  |PARTITIONED|
+              exchange 
+              -- HASH_PARTITION_EXCHANGE [$$16]  |PARTITIONED|
+                group by ([$$16 := %0->$$7]) decor ([]) {
+                          aggregate [$$15] <- [function-call: hive:count(PARTIAL1), Args:[1]]
+                          -- AGGREGATE  |LOCAL|
+                            nested tuple source
+                            -- NESTED_TUPLE_SOURCE  |LOCAL|
+                       }
+                -- EXTERNAL_GROUP_BY[$$7]  |LOCAL|
+                  exchange 
+                  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                    project ([$$7])
+                    -- STREAM_PROJECT  |PARTITIONED|
+                      exchange 
+                      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                        join (function-call: algebricks:eq, Args:[%0->$$1, %0->$$2])
+                        -- HYBRID_HASH_JOIN [$$1][$$2]  |PARTITIONED|
+                          exchange 
+                          -- HASH_PARTITION_EXCHANGE [$$1]  |PARTITIONED|
+                            data-scan []<-[$$1] <- default.q4_order_priority_tmp
+                            -- DATASOURCE_SCAN  |PARTITIONED|
+                              exchange 
+                              -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                empty-tuple-source
+                                -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                          exchange 
+                          -- HASH_PARTITION_EXCHANGE [$$2]  |PARTITIONED|
+                            project ([$$2, $$7])
+                            -- STREAM_PROJECT  |PARTITIONED|
+                              select (function-call: algebricks:and, Args:[function-call: algebricks:lt, Args:[%0->$$6, 1993-10-01], function-call: algebricks:ge, Args:[%0->$$6, 1993-07-01], function-call: algebricks:lt, Args:[%0->$$6, 1993-10-01]])
+                              -- STREAM_SELECT  |PARTITIONED|
+                                exchange 
+                                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                  data-scan [$$2, $$6, $$7]<-[$$2, $$3, $$4, $$5, $$6, $$7, $$8, $$9, $$10] <- default.orders
+                                  -- DATASOURCE_SCAN  |PARTITIONED|
+                                    exchange 
+                                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                      empty-tuple-source
+                                      -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/hivesterix/src/test/resources/optimizerts/results/q5_local_supplier_volume.plan b/hivesterix/src/test/resources/optimizerts/results/q5_local_supplier_volume.plan
new file mode 100644
index 0000000..177d24c
--- /dev/null
+++ b/hivesterix/src/test/resources/optimizerts/results/q5_local_supplier_volume.plan
@@ -0,0 +1,126 @@
+write [%0->$$48, %0->$$49]
+-- SINK_WRITE  |PARTITIONED|
+  project ([$$48, $$49])
+  -- STREAM_PROJECT  |PARTITIONED|
+    exchange 
+    -- SORT_MERGE_EXCHANGE [$$49(DESC) ]  |PARTITIONED|
+      order (DESC, %0->$$49) 
+      -- STABLE_SORT [$$49(DESC)]  |LOCAL|
+        exchange 
+        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+          group by ([$$48 := %0->$$52]) decor ([]) {
+                    aggregate [$$49] <- [function-call: hive:sum(FINAL), Args:[%0->$$51]]
+                    -- AGGREGATE  |LOCAL|
+                      nested tuple source
+                      -- NESTED_TUPLE_SOURCE  |LOCAL|
+                 }
+          -- EXTERNAL_GROUP_BY[$$52]  |PARTITIONED|
+            exchange 
+            -- HASH_PARTITION_EXCHANGE [$$52]  |PARTITIONED|
+              group by ([$$52 := %0->$$42]) decor ([]) {
+                        aggregate [$$51] <- [function-call: hive:sum(PARTIAL1), Args:[function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMultiply, Args:[%0->$$23, function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMinus, Args:[1, %0->$$24]]]]
+                        -- AGGREGATE  |LOCAL|
+                          nested tuple source
+                          -- NESTED_TUPLE_SOURCE  |LOCAL|
+                     }
+              -- EXTERNAL_GROUP_BY[$$42]  |LOCAL|
+                exchange 
+                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                  project ([$$42, $$23, $$24])
+                  -- STREAM_PROJECT  |PARTITIONED|
+                    exchange 
+                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                      join (function-call: algebricks:and, Args:[function-call: algebricks:eq, Args:[%0->$$37, %0->$$4], function-call: algebricks:eq, Args:[%0->$$10, %0->$$1]])
+                      -- HYBRID_HASH_JOIN [$$37, $$10][$$4, $$1]  |PARTITIONED|
+                        exchange 
+                        -- HASH_PARTITION_EXCHANGE [$$37, $$10]  |PARTITIONED|
+                          project ([$$10, $$42, $$23, $$24, $$37])
+                          -- STREAM_PROJECT  |PARTITIONED|
+                            exchange 
+                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                              join (function-call: algebricks:eq, Args:[%0->$$18, %0->$$9])
+                              -- HYBRID_HASH_JOIN [$$18][$$9]  |PARTITIONED|
+                                exchange 
+                                -- HASH_PARTITION_EXCHANGE [$$18]  |PARTITIONED|
+                                  project ([$$18, $$23, $$24, $$42, $$37])
+                                  -- STREAM_PROJECT  |PARTITIONED|
+                                    exchange 
+                                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                      join (function-call: algebricks:eq, Args:[%0->$$34, %0->$$20])
+                                      -- HYBRID_HASH_JOIN [$$34][$$20]  |PARTITIONED|
+                                        exchange 
+                                        -- HASH_PARTITION_EXCHANGE [$$34]  |PARTITIONED|
+                                          project ([$$34, $$37, $$42])
+                                          -- STREAM_PROJECT  |PARTITIONED|
+                                            exchange 
+                                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                              join (function-call: algebricks:eq, Args:[%0->$$41, %0->$$37])
+                                              -- HYBRID_HASH_JOIN [$$41][$$37]  |PARTITIONED|
+                                                exchange 
+                                                -- HASH_PARTITION_EXCHANGE [$$41]  |PARTITIONED|
+                                                  project ([$$41, $$42])
+                                                  -- STREAM_PROJECT  |PARTITIONED|
+                                                    exchange 
+                                                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                      join (function-call: algebricks:eq, Args:[%0->$$45, %0->$$43])
+                                                      -- HYBRID_HASH_JOIN [$$45][$$43]  |PARTITIONED|
+                                                        exchange 
+                                                        -- HASH_PARTITION_EXCHANGE [$$45]  |PARTITIONED|
+                                                          project ([$$45])
+                                                          -- STREAM_PROJECT  |PARTITIONED|
+                                                            select (function-call: algebricks:eq, Args:[%0->$$46, ASIA])
+                                                            -- STREAM_SELECT  |PARTITIONED|
+                                                              exchange 
+                                                              -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                                data-scan [$$46, $$45]<-[$$45, $$46, $$47] <- default.region
+                                                                -- DATASOURCE_SCAN  |PARTITIONED|
+                                                                  exchange 
+                                                                  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                                    empty-tuple-source
+                                                                    -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                                                        exchange 
+                                                        -- HASH_PARTITION_EXCHANGE [$$43]  |PARTITIONED|
+                                                          data-scan [$$43, $$41, $$42]<-[$$41, $$42, $$43, $$44] <- default.nation
+                                                          -- DATASOURCE_SCAN  |PARTITIONED|
+                                                            exchange 
+                                                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                              empty-tuple-source
+                                                              -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                                                exchange 
+                                                -- HASH_PARTITION_EXCHANGE [$$37]  |PARTITIONED|
+                                                  data-scan [$$37, $$34]<-[$$34, $$35, $$36, $$37, $$38, $$39, $$40] <- default.supplier
+                                                  -- DATASOURCE_SCAN  |PARTITIONED|
+                                                    exchange 
+                                                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                      empty-tuple-source
+                                                      -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                                        exchange 
+                                        -- HASH_PARTITION_EXCHANGE [$$20]  |PARTITIONED|
+                                          data-scan [$$20, $$18, $$23, $$24]<-[$$18, $$19, $$20, $$21, $$22, $$23, $$24, $$25, $$26, $$27, $$28, $$29, $$30, $$31, $$32, $$33] <- default.lineitem
+                                          -- DATASOURCE_SCAN  |PARTITIONED|
+                                            exchange 
+                                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                              empty-tuple-source
+                                              -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                                exchange 
+                                -- HASH_PARTITION_EXCHANGE [$$9]  |PARTITIONED|
+                                  project ([$$9, $$10])
+                                  -- STREAM_PROJECT  |PARTITIONED|
+                                    select (function-call: algebricks:and, Args:[function-call: algebricks:lt, Args:[%0->$$13, 1995-01-01], function-call: algebricks:ge, Args:[%0->$$13, 1994-01-01], function-call: algebricks:lt, Args:[%0->$$13, 1995-01-01]])
+                                    -- STREAM_SELECT  |PARTITIONED|
+                                      exchange 
+                                      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                        data-scan [$$9, $$10, $$13]<-[$$9, $$10, $$11, $$12, $$13, $$14, $$15, $$16, $$17] <- default.orders
+                                        -- DATASOURCE_SCAN  |PARTITIONED|
+                                          exchange 
+                                          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                            empty-tuple-source
+                                            -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                        exchange 
+                        -- HASH_PARTITION_EXCHANGE [$$4, $$1]  |PARTITIONED|
+                          data-scan [$$4, $$1]<-[$$1, $$2, $$3, $$4, $$5, $$6, $$7, $$8] <- default.customer
+                          -- DATASOURCE_SCAN  |PARTITIONED|
+                            exchange 
+                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                              empty-tuple-source
+                              -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/hivesterix/src/test/resources/optimizerts/results/q6_forecast_revenue_change.plan b/hivesterix/src/test/resources/optimizerts/results/q6_forecast_revenue_change.plan
new file mode 100644
index 0000000..cd9ffcd
--- /dev/null
+++ b/hivesterix/src/test/resources/optimizerts/results/q6_forecast_revenue_change.plan
@@ -0,0 +1,34 @@
+write [%0->$$17]
+-- SINK_WRITE  |PARTITIONED|
+  exchange 
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+    group by ([]) decor ([]) {
+              aggregate [$$17] <- [function-call: hive:sum(FINAL), Args:[%0->$$19]]
+              -- AGGREGATE  |LOCAL|
+                nested tuple source
+                -- NESTED_TUPLE_SOURCE  |LOCAL|
+           }
+    -- EXTERNAL_GROUP_BY[]  |PARTITIONED|
+      exchange 
+      -- HASH_PARTITION_EXCHANGE []  |PARTITIONED|
+        group by ([]) decor ([]) {
+                  aggregate [$$19] <- [function-call: hive:sum(PARTIAL1), Args:[function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMultiply, Args:[%0->$$6, %0->$$7]]]
+                  -- AGGREGATE  |LOCAL|
+                    nested tuple source
+                    -- NESTED_TUPLE_SOURCE  |LOCAL|
+               }
+        -- EXTERNAL_GROUP_BY[]  |LOCAL|
+          exchange 
+          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+            project ([$$6, $$7])
+            -- STREAM_PROJECT  |PARTITIONED|
+              select (function-call: algebricks:and, Args:[function-call: algebricks:ge, Args:[%0->$$11, 1994-01-01], function-call: algebricks:lt, Args:[%0->$$11, 1995-01-01], function-call: algebricks:ge, Args:[%0->$$7, 0.05], function-call: algebricks:le, Args:[%0->$$7, 0.07], function-call: algebricks:lt, Args:[%0->$$5, 24], function-call: algebricks:ge, Args:[%0->$$11, 1994-01-01], function-call: algebricks:lt, Args:[%0->$$11, 1995-01-01], function-call: algebricks:ge, Args:[%0->$$7, 0.05], function-call: algebricks:le, Args:[%0->$$7, 0.07], function-call: algebricks:lt, Args:[%0->$$5, 24]])
+              -- STREAM_SELECT  |PARTITIONED|
+                exchange 
+                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                  data-scan [$$5, $$6, $$7, $$11]<-[$$1, $$2, $$3, $$4, $$5, $$6, $$7, $$8, $$9, $$10, $$11, $$12, $$13, $$14, $$15, $$16] <- default.lineitem
+                  -- DATASOURCE_SCAN  |PARTITIONED|
+                    exchange 
+                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                      empty-tuple-source
+                      -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/hivesterix/src/test/resources/optimizerts/results/q7_volume_shipping.plan b/hivesterix/src/test/resources/optimizerts/results/q7_volume_shipping.plan
new file mode 100644
index 0000000..39f8301
--- /dev/null
+++ b/hivesterix/src/test/resources/optimizerts/results/q7_volume_shipping.plan
@@ -0,0 +1,192 @@
+write [%0->$$17, %0->$$18, %0->$$19, %0->$$20]
+-- SINK_WRITE  |PARTITIONED|
+  exchange 
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+    union ($$6, $$10, $$17) ($$2, $$14, $$18) ($$5, $$9, $$19) ($$1, $$13, $$20)
+    -- UNION_ALL  |PARTITIONED|
+      exchange 
+      -- ONE_TO_ONE_EXCHANGE  |UNPARTITIONED|
+        project ([$$6, $$2, $$5, $$1])
+        -- STREAM_PROJECT  |UNPARTITIONED|
+          exchange 
+          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+            join (true)
+            -- NESTED_LOOP  |PARTITIONED|
+              exchange 
+              -- BROADCAST_EXCHANGE  |PARTITIONED|
+                select (function-call: algebricks:eq, Args:[%0->$$2, GERMANY])
+                -- STREAM_SELECT  |PARTITIONED|
+                  exchange 
+                  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                    data-scan [$$1, $$2]<-[$$1, $$2, $$3, $$4] <- default.nation
+                    -- DATASOURCE_SCAN  |PARTITIONED|
+                      exchange 
+                      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                        empty-tuple-source
+                        -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+              exchange 
+              -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                project ([$$5, $$6])
+                -- STREAM_PROJECT  |PARTITIONED|
+                  select (function-call: algebricks:eq, Args:[%0->$$6, FRANCE])
+                  -- STREAM_SELECT  |PARTITIONED|
+                    project ([$$5, $$6])
+                    -- STREAM_PROJECT  |UNPARTITIONED|
+                      assign [$$5, $$6] <- [%0->$$9, %0->$$10]
+                      -- ASSIGN  |UNPARTITIONED|
+                        exchange 
+                        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                          replicate 
+                          -- SPLIT  |PARTITIONED|
+                            exchange 
+                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                              data-scan [$$9, $$10]<-[$$9, $$10, $$11, $$12] <- default.nation
+                              -- DATASOURCE_SCAN  |PARTITIONED|
+                                exchange 
+                                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                  empty-tuple-source
+                                  -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+      exchange 
+      -- ONE_TO_ONE_EXCHANGE  |UNPARTITIONED|
+        project ([$$10, $$14, $$9, $$13])
+        -- STREAM_PROJECT  |UNPARTITIONED|
+          exchange 
+          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+            join (true)
+            -- NESTED_LOOP  |PARTITIONED|
+              exchange 
+              -- BROADCAST_EXCHANGE  |PARTITIONED|
+                select (function-call: algebricks:eq, Args:[%0->$$14, FRANCE])
+                -- STREAM_SELECT  |PARTITIONED|
+                  exchange 
+                  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                    data-scan [$$13, $$14]<-[$$13, $$14, $$15, $$16] <- default.nation
+                    -- DATASOURCE_SCAN  |PARTITIONED|
+                      exchange 
+                      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                        empty-tuple-source
+                        -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+              exchange 
+              -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                project ([$$9, $$10])
+                -- STREAM_PROJECT  |PARTITIONED|
+                  select (function-call: algebricks:eq, Args:[%0->$$10, GERMANY])
+                  -- STREAM_SELECT  |PARTITIONED|
+                    exchange 
+                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                      replicate 
+                      -- SPLIT  |PARTITIONED|
+                        exchange 
+                        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                          data-scan [$$9, $$10]<-[$$9, $$10, $$11, $$12] <- default.nation
+                          -- DATASOURCE_SCAN  |PARTITIONED|
+                            exchange 
+                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                              empty-tuple-source
+                              -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+write [%0->$$47, %0->$$48, %0->$$49, %0->$$50]
+-- SINK_WRITE  |PARTITIONED|
+  project ([$$47, $$48, $$49, $$50])
+  -- STREAM_PROJECT  |PARTITIONED|
+    exchange 
+    -- SORT_MERGE_EXCHANGE [$$47(ASC), $$48(ASC), $$49(ASC) ]  |PARTITIONED|
+      order (ASC, %0->$$47) (ASC, %0->$$48) (ASC, %0->$$49) 
+      -- STABLE_SORT [$$47(ASC), $$48(ASC), $$49(ASC)]  |LOCAL|
+        exchange 
+        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+          group by ([$$47 := %0->$$53; $$48 := %0->$$54; $$49 := %0->$$55]) decor ([]) {
+                    aggregate [$$50] <- [function-call: hive:sum(FINAL), Args:[%0->$$52]]
+                    -- AGGREGATE  |LOCAL|
+                      nested tuple source
+                      -- NESTED_TUPLE_SOURCE  |LOCAL|
+                 }
+          -- EXTERNAL_GROUP_BY[$$53, $$54, $$55]  |PARTITIONED|
+            exchange 
+            -- HASH_PARTITION_EXCHANGE [$$53, $$54, $$55]  |PARTITIONED|
+              group by ([$$53 := %0->$$1; $$54 := %0->$$2; $$55 := %0->$$45]) decor ([]) {
+                        aggregate [$$52] <- [function-call: hive:sum(PARTIAL1), Args:[%0->$$46]]
+                        -- AGGREGATE  |LOCAL|
+                          nested tuple source
+                          -- NESTED_TUPLE_SOURCE  |LOCAL|
+                     }
+              -- EXTERNAL_GROUP_BY[$$1, $$2, $$45]  |LOCAL|
+                exchange 
+                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                  project ([$$1, $$2, $$45, $$46])
+                  -- STREAM_PROJECT  |PARTITIONED|
+                    assign [$$45, $$46] <- [function-call: hive:org.apache.hadoop.hive.ql.udf.UDFYear, Args:[%0->$$30], function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMultiply, Args:[%0->$$25, function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMinus, Args:[1, %0->$$26]]]
+                    -- ASSIGN  |PARTITIONED|
+                      project ([$$1, $$2, $$30, $$25, $$26])
+                      -- STREAM_PROJECT  |PARTITIONED|
+                        exchange 
+                        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                          join (function-call: algebricks:and, Args:[function-call: algebricks:eq, Args:[%0->$$15, %0->$$4], function-call: algebricks:eq, Args:[%0->$$8, %0->$$3]])
+                          -- HYBRID_HASH_JOIN [$$15, $$8][$$4, $$3]  |PARTITIONED|
+                            exchange 
+                            -- HASH_PARTITION_EXCHANGE [$$15, $$8]  |PARTITIONED|
+                              project ([$$8, $$30, $$25, $$26, $$15])
+                              -- STREAM_PROJECT  |PARTITIONED|
+                                exchange 
+                                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                  join (function-call: algebricks:eq, Args:[%0->$$22, %0->$$5])
+                                  -- HYBRID_HASH_JOIN [$$22][$$5]  |PARTITIONED|
+                                    exchange 
+                                    -- HASH_PARTITION_EXCHANGE [$$22]  |PARTITIONED|
+                                      project ([$$15, $$30, $$25, $$26, $$22])
+                                      -- STREAM_PROJECT  |PARTITIONED|
+                                        exchange 
+                                        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                          join (function-call: algebricks:eq, Args:[%0->$$37, %0->$$12])
+                                          -- HYBRID_HASH_JOIN [$$37][$$12]  |PARTITIONED|
+                                            exchange 
+                                            -- HASH_PARTITION_EXCHANGE [$$37]  |PARTITIONED|
+                                              project ([$$37, $$22, $$25, $$26, $$30])
+                                              -- STREAM_PROJECT  |PARTITIONED|
+                                                exchange 
+                                                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                  join (function-call: algebricks:eq, Args:[%0->$$20, %0->$$36])
+                                                  -- HYBRID_HASH_JOIN [$$20][$$36]  |PARTITIONED|
+                                                    exchange 
+                                                    -- HASH_PARTITION_EXCHANGE [$$20]  |PARTITIONED|
+                                                      select (function-call: algebricks:and, Args:[function-call: algebricks:le, Args:[%0->$$30, 1996-12-31], function-call: algebricks:ge, Args:[%0->$$30, 1995-01-01], function-call: algebricks:le, Args:[%0->$$30, 1996-12-31]])
+                                                      -- STREAM_SELECT  |PARTITIONED|
+                                                        exchange 
+                                                        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                          data-scan [$$20, $$22, $$25, $$26, $$30]<-[$$20, $$21, $$22, $$23, $$24, $$25, $$26, $$27, $$28, $$29, $$30, $$31, $$32, $$33, $$34, $$35] <- default.lineitem
+                                                          -- DATASOURCE_SCAN  |PARTITIONED|
+                                                            exchange 
+                                                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                              empty-tuple-source
+                                                              -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                                                    exchange 
+                                                    -- HASH_PARTITION_EXCHANGE [$$36]  |PARTITIONED|
+                                                      data-scan [$$36, $$37]<-[$$36, $$37, $$38, $$39, $$40, $$41, $$42, $$43, $$44] <- default.orders
+                                                      -- DATASOURCE_SCAN  |PARTITIONED|
+                                                        exchange 
+                                                        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                          empty-tuple-source
+                                                          -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                                            exchange 
+                                            -- HASH_PARTITION_EXCHANGE [$$12]  |PARTITIONED|
+                                              data-scan [$$12, $$15]<-[$$12, $$13, $$14, $$15, $$16, $$17, $$18, $$19] <- default.customer
+                                              -- DATASOURCE_SCAN  |PARTITIONED|
+                                                exchange 
+                                                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                  empty-tuple-source
+                                                  -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                                    exchange 
+                                    -- HASH_PARTITION_EXCHANGE [$$5]  |PARTITIONED|
+                                      data-scan [$$5, $$8]<-[$$5, $$6, $$7, $$8, $$9, $$10, $$11] <- default.supplier
+                                      -- DATASOURCE_SCAN  |PARTITIONED|
+                                        exchange 
+                                        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                          empty-tuple-source
+                                          -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                            exchange 
+                            -- HASH_PARTITION_EXCHANGE [$$4, $$3]  |PARTITIONED|
+                              data-scan [$$4, $$3, $$1, $$2]<-[$$1, $$2, $$3, $$4] <- default.q7_volume_shipping_tmp
+                              -- DATASOURCE_SCAN  |PARTITIONED|
+                                exchange 
+                                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                  empty-tuple-source
+                                  -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/hivesterix/src/test/resources/optimizerts/results/q8_national_market_share.plan b/hivesterix/src/test/resources/optimizerts/results/q8_national_market_share.plan
new file mode 100644
index 0000000..b807a24
--- /dev/null
+++ b/hivesterix/src/test/resources/optimizerts/results/q8_national_market_share.plan
@@ -0,0 +1,190 @@
+write [%0->$$63, %0->$$66]
+-- SINK_WRITE  |PARTITIONED|
+  exchange 
+  -- SORT_MERGE_EXCHANGE [$$63(ASC) ]  |PARTITIONED|
+    order (ASC, %0->$$63) 
+    -- STABLE_SORT [$$63(ASC)]  |LOCAL|
+      exchange 
+      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+        project ([$$63, $$66])
+        -- STREAM_PROJECT  |PARTITIONED|
+          assign [$$66] <- [function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPDivide, Args:[%0->$$64, %0->$$65]]
+          -- ASSIGN  |PARTITIONED|
+            exchange 
+            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+              group by ([$$63 := %0->$$70]) decor ([]) {
+                        aggregate [$$64, $$65] <- [function-call: hive:sum(FINAL), Args:[%0->$$68], function-call: hive:sum(FINAL), Args:[%0->$$69]]
+                        -- AGGREGATE  |LOCAL|
+                          nested tuple source
+                          -- NESTED_TUPLE_SOURCE  |LOCAL|
+                     }
+              -- EXTERNAL_GROUP_BY[$$70]  |PARTITIONED|
+                exchange 
+                -- HASH_PARTITION_EXCHANGE [$$70]  |PARTITIONED|
+                  group by ([$$70 := %0->$$61]) decor ([]) {
+                            aggregate [$$68, $$69] <- [function-call: hive:sum(PARTIAL1), Args:[function-call: hive:org.apache.hadoop.hive.ql.udf.generic.GenericUDFWhen, Args:[function-call: algebricks:eq, Args:[%0->$$2, BRAZIL], %0->$$62, 0.0]], function-call: hive:sum(PARTIAL1), Args:[%0->$$62]]
+                            -- AGGREGATE  |LOCAL|
+                              nested tuple source
+                              -- NESTED_TUPLE_SOURCE  |LOCAL|
+                         }
+                  -- EXTERNAL_GROUP_BY[$$61]  |LOCAL|
+                    exchange 
+                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                      project ([$$61, $$62, $$2])
+                      -- STREAM_PROJECT  |PARTITIONED|
+                        assign [$$61, $$62] <- [function-call: hive:org.apache.hadoop.hive.ql.udf.UDFYear, Args:[%0->$$41], function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMultiply, Args:[%0->$$26, function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMinus, Args:[1, %0->$$27]]]
+                        -- ASSIGN  |PARTITIONED|
+                          project ([$$2, $$41, $$27, $$26])
+                          -- STREAM_PROJECT  |PARTITIONED|
+                            exchange 
+                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                              join (function-call: algebricks:eq, Args:[%0->$$8, %0->$$1])
+                              -- HYBRID_HASH_JOIN [$$8][$$1]  |PARTITIONED|
+                                exchange 
+                                -- HASH_PARTITION_EXCHANGE [$$8]  |PARTITIONED|
+                                  project ([$$8, $$41, $$27, $$26])
+                                  -- STREAM_PROJECT  |PARTITIONED|
+                                    exchange 
+                                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                      join (function-call: algebricks:eq, Args:[%0->$$23, %0->$$5])
+                                      -- HYBRID_HASH_JOIN [$$23][$$5]  |PARTITIONED|
+                                        exchange 
+                                        -- HASH_PARTITION_EXCHANGE [$$23]  |PARTITIONED|
+                                          project ([$$41, $$27, $$26, $$23])
+                                          -- STREAM_PROJECT  |PARTITIONED|
+                                            exchange 
+                                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                              join (function-call: algebricks:eq, Args:[%0->$$22, %0->$$12])
+                                              -- HYBRID_HASH_JOIN [$$22][$$12]  |PARTITIONED|
+                                                exchange 
+                                                -- HASH_PARTITION_EXCHANGE [$$22]  |PARTITIONED|
+                                                  project ([$$41, $$22, $$23, $$26, $$27])
+                                                  -- STREAM_PROJECT  |PARTITIONED|
+                                                    exchange 
+                                                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                      join (function-call: algebricks:eq, Args:[%0->$$21, %0->$$37])
+                                                      -- HYBRID_HASH_JOIN [$$21][$$37]  |PARTITIONED|
+                                                        exchange 
+                                                        -- HASH_PARTITION_EXCHANGE [$$21]  |PARTITIONED|
+                                                          data-scan [$$21, $$22, $$23, $$26, $$27]<-[$$21, $$22, $$23, $$24, $$25, $$26, $$27, $$28, $$29, $$30, $$31, $$32, $$33, $$34, $$35, $$36] <- default.lineitem
+                                                          -- DATASOURCE_SCAN  |PARTITIONED|
+                                                            exchange 
+                                                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                              empty-tuple-source
+                                                              -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                                                        exchange 
+                                                        -- HASH_PARTITION_EXCHANGE [$$37]  |PARTITIONED|
+                                                          project ([$$37, $$41])
+                                                          -- STREAM_PROJECT  |PARTITIONED|
+                                                            exchange 
+                                                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                              join (function-call: algebricks:eq, Args:[%0->$$46, %0->$$38])
+                                                              -- HYBRID_HASH_JOIN [$$46][$$38]  |PARTITIONED|
+                                                                exchange 
+                                                                -- HASH_PARTITION_EXCHANGE [$$46]  |PARTITIONED|
+                                                                  project ([$$46])
+                                                                  -- STREAM_PROJECT  |PARTITIONED|
+                                                                    exchange 
+                                                                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                                      join (function-call: algebricks:eq, Args:[%0->$$54, %0->$$49])
+                                                                      -- HYBRID_HASH_JOIN [$$54][$$49]  |PARTITIONED|
+                                                                        exchange 
+                                                                        -- HASH_PARTITION_EXCHANGE [$$54]  |PARTITIONED|
+                                                                          project ([$$54])
+                                                                          -- STREAM_PROJECT  |PARTITIONED|
+                                                                            exchange 
+                                                                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                                              join (function-call: algebricks:eq, Args:[%0->$$58, %0->$$56])
+                                                                              -- HYBRID_HASH_JOIN [$$58][$$56]  |PARTITIONED|
+                                                                                exchange 
+                                                                                -- HASH_PARTITION_EXCHANGE [$$58]  |PARTITIONED|
+                                                                                  project ([$$58])
+                                                                                  -- STREAM_PROJECT  |PARTITIONED|
+                                                                                    select (function-call: algebricks:eq, Args:[%0->$$59, AMERICA])
+                                                                                    -- STREAM_SELECT  |PARTITIONED|
+                                                                                      exchange 
+                                                                                      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                                                        data-scan [$$59, $$58]<-[$$58, $$59, $$60] <- default.region
+                                                                                        -- DATASOURCE_SCAN  |PARTITIONED|
+                                                                                          exchange 
+                                                                                          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                                                            empty-tuple-source
+                                                                                            -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                                                                                exchange 
+                                                                                -- HASH_PARTITION_EXCHANGE [$$56]  |PARTITIONED|
+                                                                                  project ([$$56, $$54])
+                                                                                  -- STREAM_PROJECT  |PARTITIONED|
+                                                                                    exchange 
+                                                                                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                                                      replicate 
+                                                                                      -- SPLIT  |PARTITIONED|
+                                                                                        exchange 
+                                                                                        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                                                          data-scan []<-[$$54, $$55, $$56, $$57] <- default.nation
+                                                                                          -- DATASOURCE_SCAN  |PARTITIONED|
+                                                                                            exchange 
+                                                                                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                                                              empty-tuple-source
+                                                                                              -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                                                                        exchange 
+                                                                        -- HASH_PARTITION_EXCHANGE [$$49]  |PARTITIONED|
+                                                                          data-scan [$$49, $$46]<-[$$46, $$47, $$48, $$49, $$50, $$51, $$52, $$53] <- default.customer
+                                                                          -- DATASOURCE_SCAN  |PARTITIONED|
+                                                                            exchange 
+                                                                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                                              empty-tuple-source
+                                                                              -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                                                                exchange 
+                                                                -- HASH_PARTITION_EXCHANGE [$$38]  |PARTITIONED|
+                                                                  project ([$$38, $$37, $$41])
+                                                                  -- STREAM_PROJECT  |PARTITIONED|
+                                                                    select (function-call: algebricks:and, Args:[function-call: algebricks:lt, Args:[%0->$$41, 1996-12-31], function-call: algebricks:ge, Args:[%0->$$41, 1995-01-01]])
+                                                                    -- STREAM_SELECT  |PARTITIONED|
+                                                                      exchange 
+                                                                      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                                        data-scan [$$38, $$37, $$41]<-[$$37, $$38, $$39, $$40, $$41, $$42, $$43, $$44, $$45] <- default.orders
+                                                                        -- DATASOURCE_SCAN  |PARTITIONED|
+                                                                          exchange 
+                                                                          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                                            empty-tuple-source
+                                                                            -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                                                exchange 
+                                                -- HASH_PARTITION_EXCHANGE [$$12]  |PARTITIONED|
+                                                  project ([$$12])
+                                                  -- STREAM_PROJECT  |PARTITIONED|
+                                                    select (function-call: algebricks:eq, Args:[%0->$$16, ECONOMY ANODIZED STEEL])
+                                                    -- STREAM_SELECT  |PARTITIONED|
+                                                      exchange 
+                                                      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                        data-scan [$$16, $$12]<-[$$12, $$13, $$14, $$15, $$16, $$17, $$18, $$19, $$20] <- default.part
+                                                        -- DATASOURCE_SCAN  |PARTITIONED|
+                                                          exchange 
+                                                          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                            empty-tuple-source
+                                                            -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                                        exchange 
+                                        -- HASH_PARTITION_EXCHANGE [$$5]  |PARTITIONED|
+                                          data-scan [$$5, $$8]<-[$$5, $$6, $$7, $$8, $$9, $$10, $$11] <- default.supplier
+                                          -- DATASOURCE_SCAN  |PARTITIONED|
+                                            exchange 
+                                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                              empty-tuple-source
+                                              -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                                exchange 
+                                -- HASH_PARTITION_EXCHANGE [$$1]  |PARTITIONED|
+                                  project ([$$1, $$2])
+                                  -- STREAM_PROJECT  |PARTITIONED|
+                                    assign [$$1, $$2, $$3, $$4] <- [%0->$$54, %0->$$55, %0->$$56, %0->$$57]
+                                    -- ASSIGN  |UNPARTITIONED|
+                                      exchange 
+                                      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                        replicate 
+                                        -- SPLIT  |PARTITIONED|
+                                          exchange 
+                                          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                            data-scan []<-[$$54, $$55, $$56, $$57] <- default.nation
+                                            -- DATASOURCE_SCAN  |PARTITIONED|
+                                              exchange 
+                                              -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                empty-tuple-source
+                                                -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/hivesterix/src/test/resources/optimizerts/results/q9_product_type_profit.plan b/hivesterix/src/test/resources/optimizerts/results/q9_product_type_profit.plan
new file mode 100644
index 0000000..f57f4a3
--- /dev/null
+++ b/hivesterix/src/test/resources/optimizerts/results/q9_product_type_profit.plan
@@ -0,0 +1,124 @@
+write [%0->$$53, %0->$$54, %0->$$55]
+-- SINK_WRITE  |PARTITIONED|
+  project ([$$53, $$54, $$55])
+  -- STREAM_PROJECT  |PARTITIONED|
+    exchange 
+    -- SORT_MERGE_EXCHANGE [$$53(ASC), $$54(DESC) ]  |PARTITIONED|
+      order (ASC, %0->$$53) (DESC, %0->$$54) 
+      -- STABLE_SORT [$$53(ASC), $$54(DESC)]  |LOCAL|
+        exchange 
+        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+          group by ([$$53 := %0->$$58; $$54 := %0->$$59]) decor ([]) {
+                    aggregate [$$55] <- [function-call: hive:sum(FINAL), Args:[%0->$$57]]
+                    -- AGGREGATE  |LOCAL|
+                      nested tuple source
+                      -- NESTED_TUPLE_SOURCE  |LOCAL|
+                 }
+          -- EXTERNAL_GROUP_BY[$$58, $$59]  |PARTITIONED|
+            exchange 
+            -- HASH_PARTITION_EXCHANGE [$$58, $$59]  |PARTITIONED|
+              group by ([$$58 := %0->$$48; $$59 := %0->$$51]) decor ([]) {
+                        aggregate [$$57] <- [function-call: hive:sum(PARTIAL1), Args:[%0->$$52]]
+                        -- AGGREGATE  |LOCAL|
+                          nested tuple source
+                          -- NESTED_TUPLE_SOURCE  |LOCAL|
+                     }
+              -- EXTERNAL_GROUP_BY[$$48, $$51]  |LOCAL|
+                exchange 
+                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                  project ([$$48, $$51, $$52])
+                  -- STREAM_PROJECT  |PARTITIONED|
+                    assign [$$51, $$52] <- [function-call: hive:org.apache.hadoop.hive.ql.udf.UDFYear, Args:[%0->$$5], function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMinus, Args:[function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMultiply, Args:[%0->$$29, function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMinus, Args:[1, %0->$$30]], function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMultiply, Args:[%0->$$22, %0->$$28]]]
+                    -- ASSIGN  |PARTITIONED|
+                      project ([$$5, $$29, $$30, $$28, $$48, $$22])
+                      -- STREAM_PROJECT  |PARTITIONED|
+                        exchange 
+                        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                          join (function-call: algebricks:eq, Args:[%0->$$24, %0->$$1])
+                          -- HYBRID_HASH_JOIN [$$24][$$1]  |PARTITIONED|
+                            exchange 
+                            -- HASH_PARTITION_EXCHANGE [$$24]  |PARTITIONED|
+                              project ([$$29, $$30, $$28, $$24, $$48, $$22])
+                              -- STREAM_PROJECT  |PARTITIONED|
+                                exchange 
+                                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                  join (function-call: algebricks:eq, Args:[%0->$$25, %0->$$10])
+                                  -- HYBRID_HASH_JOIN [$$25][$$10]  |PARTITIONED|
+                                    exchange 
+                                    -- HASH_PARTITION_EXCHANGE [$$25]  |PARTITIONED|
+                                      project ([$$22, $$29, $$30, $$28, $$25, $$24, $$48])
+                                      -- STREAM_PROJECT  |PARTITIONED|
+                                        exchange 
+                                        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                          join (function-call: algebricks:and, Args:[function-call: algebricks:eq, Args:[%0->$$26, %0->$$20], function-call: algebricks:eq, Args:[%0->$$25, %0->$$19]])
+                                          -- HYBRID_HASH_JOIN [$$26, $$25][$$20, $$19]  |PARTITIONED|
+                                            exchange 
+                                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                              project ([$$48, $$24, $$25, $$26, $$28, $$29, $$30])
+                                              -- STREAM_PROJECT  |PARTITIONED|
+                                                exchange 
+                                                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                  join (function-call: algebricks:eq, Args:[%0->$$26, %0->$$40])
+                                                  -- HYBRID_HASH_JOIN [$$26][$$40]  |PARTITIONED|
+                                                    exchange 
+                                                    -- HASH_PARTITION_EXCHANGE [$$26]  |PARTITIONED|
+                                                      data-scan [$$26, $$24, $$25, $$28, $$29, $$30]<-[$$24, $$25, $$26, $$27, $$28, $$29, $$30, $$31, $$32, $$33, $$34, $$35, $$36, $$37, $$38, $$39] <- default.lineitem
+                                                      -- DATASOURCE_SCAN  |PARTITIONED|
+                                                        exchange 
+                                                        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                          empty-tuple-source
+                                                          -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                                                    exchange 
+                                                    -- HASH_PARTITION_EXCHANGE [$$40]  |PARTITIONED|
+                                                      project ([$$40, $$48])
+                                                      -- STREAM_PROJECT  |PARTITIONED|
+                                                        exchange 
+                                                        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                          join (function-call: algebricks:eq, Args:[%0->$$43, %0->$$47])
+                                                          -- HYBRID_HASH_JOIN [$$43][$$47]  |PARTITIONED|
+                                                            exchange 
+                                                            -- HASH_PARTITION_EXCHANGE [$$43]  |PARTITIONED|
+                                                              data-scan [$$43, $$40]<-[$$40, $$41, $$42, $$43, $$44, $$45, $$46] <- default.supplier
+                                                              -- DATASOURCE_SCAN  |PARTITIONED|
+                                                                exchange 
+                                                                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                                  empty-tuple-source
+                                                                  -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                                                            exchange 
+                                                            -- HASH_PARTITION_EXCHANGE [$$47]  |PARTITIONED|
+                                                              data-scan [$$47, $$48]<-[$$47, $$48, $$49, $$50] <- default.nation
+                                                              -- DATASOURCE_SCAN  |PARTITIONED|
+                                                                exchange 
+                                                                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                                  empty-tuple-source
+                                                                  -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                                            exchange 
+                                            -- HASH_PARTITION_EXCHANGE [$$20]  |PARTITIONED|
+                                              data-scan [$$20, $$19, $$22]<-[$$19, $$20, $$21, $$22, $$23] <- default.partsupp
+                                              -- DATASOURCE_SCAN  |PARTITIONED|
+                                                exchange 
+                                                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                  empty-tuple-source
+                                                  -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                                    exchange 
+                                    -- HASH_PARTITION_EXCHANGE [$$10]  |PARTITIONED|
+                                      project ([$$10])
+                                      -- STREAM_PROJECT  |PARTITIONED|
+                                        select (function-call: hive:org.apache.hadoop.hive.ql.udf.UDFLike, Args:[%0->$$11, %green%])
+                                        -- STREAM_SELECT  |PARTITIONED|
+                                          exchange 
+                                          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                            data-scan [$$10, $$11]<-[$$10, $$11, $$12, $$13, $$14, $$15, $$16, $$17, $$18] <- default.part
+                                            -- DATASOURCE_SCAN  |PARTITIONED|
+                                              exchange 
+                                              -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                empty-tuple-source
+                                                -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                            exchange 
+                            -- HASH_PARTITION_EXCHANGE [$$1]  |PARTITIONED|
+                              data-scan [$$1, $$5]<-[$$1, $$2, $$3, $$4, $$5, $$6, $$7, $$8, $$9] <- default.orders
+                              -- DATASOURCE_SCAN  |PARTITIONED|
+                                exchange 
+                                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                  empty-tuple-source
+                                  -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/hivesterix/src/test/resources/optimizerts/results/u10_nestedloop_join.plan b/hivesterix/src/test/resources/optimizerts/results/u10_nestedloop_join.plan
new file mode 100644
index 0000000..c86d57f
--- /dev/null
+++ b/hivesterix/src/test/resources/optimizerts/results/u10_nestedloop_join.plan
@@ -0,0 +1,24 @@
+write [%0->$$6, %0->$$2, %0->$$5, %0->$$1]
+-- SINK_WRITE  |PARTITIONED|
+  project ([$$6, $$2, $$5, $$1])
+  -- STREAM_PROJECT  |PARTITIONED|
+    exchange 
+    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+      join (function-call: algebricks:and, Args:[function-call: algebricks:gt, Args:[%0->$$5, %0->$$1], true])
+      -- NESTED_LOOP  |PARTITIONED|
+        exchange 
+        -- BROADCAST_EXCHANGE  |PARTITIONED|
+          data-scan [$$1, $$2]<-[$$1, $$2, $$3, $$4] <- default.nation
+          -- DATASOURCE_SCAN  |PARTITIONED|
+            exchange 
+            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+              empty-tuple-source
+              -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+        exchange 
+        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+          data-scan [$$5, $$6]<-[$$5, $$6, $$7, $$8] <- default.nation
+          -- DATASOURCE_SCAN  |PARTITIONED|
+            exchange 
+            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+              empty-tuple-source
+              -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/hivesterix/src/test/resources/optimizerts/results/u1_group_by.plan b/hivesterix/src/test/resources/optimizerts/results/u1_group_by.plan
new file mode 100644
index 0000000..188aa6d
--- /dev/null
+++ b/hivesterix/src/test/resources/optimizerts/results/u1_group_by.plan
@@ -0,0 +1,36 @@
+write [%0->$$18, %0->$$19]
+-- SINK_WRITE  |PARTITIONED|
+  project ([$$18, $$19])
+  -- STREAM_PROJECT  |PARTITIONED|
+    exchange 
+    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+      group by ([$$17 := %0->$$23]) decor ([]) {
+                aggregate [$$18, $$19] <- [function-call: hive:sum(FINAL), Args:[%0->$$21], function-call: hive:sum(FINAL), Args:[%0->$$22]]
+                -- AGGREGATE  |LOCAL|
+                  nested tuple source
+                  -- NESTED_TUPLE_SOURCE  |LOCAL|
+             }
+      -- EXTERNAL_GROUP_BY[$$23]  |PARTITIONED|
+        exchange 
+        -- HASH_PARTITION_EXCHANGE [$$23]  |PARTITIONED|
+          group by ([$$23 := %0->$$9]) decor ([]) {
+                    aggregate [$$21, $$22] <- [function-call: hive:sum(PARTIAL1), Args:[function-call: hive:org.apache.hadoop.hive.ql.udf.UDFAbs, Args:[%0->$$5]], function-call: hive:sum(PARTIAL1), Args:[function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMultiply, Args:[function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMultiply, Args:[%0->$$6, function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMinus, Args:[1, %0->$$7]], function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPPlus, Args:[1, %0->$$8]]]]
+                    -- AGGREGATE  |LOCAL|
+                      nested tuple source
+                      -- NESTED_TUPLE_SOURCE  |LOCAL|
+                 }
+          -- EXTERNAL_GROUP_BY[$$9]  |LOCAL|
+            exchange 
+            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+              project ([$$9, $$5, $$6, $$7, $$8])
+              -- STREAM_PROJECT  |PARTITIONED|
+                select (function-call: algebricks:le, Args:[%0->$$11, 1998-09-02])
+                -- STREAM_SELECT  |PARTITIONED|
+                  exchange 
+                  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                    data-scan [$$5, $$6, $$7, $$8, $$9, $$11]<-[$$1, $$2, $$3, $$4, $$5, $$6, $$7, $$8, $$9, $$10, $$11, $$12, $$13, $$14, $$15, $$16] <- default.lineitem
+                    -- DATASOURCE_SCAN  |PARTITIONED|
+                      exchange 
+                      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                        empty-tuple-source
+                        -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/hivesterix/src/test/resources/optimizerts/results/u2_select-project.plan b/hivesterix/src/test/resources/optimizerts/results/u2_select-project.plan
new file mode 100644
index 0000000..4485b36
--- /dev/null
+++ b/hivesterix/src/test/resources/optimizerts/results/u2_select-project.plan
@@ -0,0 +1,16 @@
+write [%0->$$8, %0->$$3, %0->$$4, %0->$$2]
+-- SINK_WRITE  |PARTITIONED|
+  project ([$$8, $$3, $$4, $$2])
+  -- STREAM_PROJECT  |PARTITIONED|
+    assign [$$8] <- [function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMultiply, Args:[2, %0->$$1]]
+    -- ASSIGN  |PARTITIONED|
+      select (function-call: algebricks:lt, Args:[function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMultiply, Args:[%0->$$1, 2], 20])
+      -- STREAM_SELECT  |PARTITIONED|
+        exchange 
+        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+          data-scan []<-[$$1, $$2, $$3, $$4, $$5, $$6, $$7] <- default.supplier
+          -- DATASOURCE_SCAN  |PARTITIONED|
+            exchange 
+            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+              empty-tuple-source
+              -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/hivesterix/src/test/resources/optimizerts/results/u3_union.plan b/hivesterix/src/test/resources/optimizerts/results/u3_union.plan
new file mode 100644
index 0000000..c4040f2
--- /dev/null
+++ b/hivesterix/src/test/resources/optimizerts/results/u3_union.plan
@@ -0,0 +1,38 @@
+write [%0->$$17, %0->$$18, %0->$$19, %0->$$20]
+-- SINK_WRITE  |PARTITIONED|
+  exchange 
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+    union ($$8, $$16, $$17) ($$3, $$11, $$18) ($$4, $$12, $$19) ($$2, $$10, $$20)
+    -- UNION_ALL  |PARTITIONED|
+      exchange 
+      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+        project ([$$8, $$3, $$4, $$2])
+        -- STREAM_PROJECT  |PARTITIONED|
+          assign [$$8] <- [function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMultiply, Args:[2, %0->$$1]]
+          -- ASSIGN  |PARTITIONED|
+            select (function-call: algebricks:gt, Args:[function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMultiply, Args:[%0->$$1, 2], 50])
+            -- STREAM_SELECT  |PARTITIONED|
+              exchange 
+              -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                data-scan []<-[$$1, $$2, $$3, $$4, $$5, $$6, $$7] <- default.supplier
+                -- DATASOURCE_SCAN  |PARTITIONED|
+                  exchange 
+                  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                    empty-tuple-source
+                    -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+      exchange 
+      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+        project ([$$16, $$11, $$12, $$10])
+        -- STREAM_PROJECT  |PARTITIONED|
+          assign [$$16] <- [function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMultiply, Args:[2, %0->$$9]]
+          -- ASSIGN  |PARTITIONED|
+            select (function-call: algebricks:lt, Args:[function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMultiply, Args:[%0->$$9, 2], 20])
+            -- STREAM_SELECT  |PARTITIONED|
+              exchange 
+              -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                data-scan []<-[$$9, $$10, $$11, $$12, $$13, $$14, $$15] <- default.supplier
+                -- DATASOURCE_SCAN  |PARTITIONED|
+                  exchange 
+                  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                    empty-tuple-source
+                    -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/hivesterix/src/test/resources/optimizerts/results/u4_join.plan b/hivesterix/src/test/resources/optimizerts/results/u4_join.plan
new file mode 100644
index 0000000..449a601
--- /dev/null
+++ b/hivesterix/src/test/resources/optimizerts/results/u4_join.plan
@@ -0,0 +1,36 @@
+write [%0->$$1, %0->$$2, %0->$$3, %0->$$4]
+-- SINK_WRITE  |PARTITIONED|
+  select (function-call: algebricks:lt, Args:[function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMultiply, Args:[%0->$$1, 2], 20])
+  -- STREAM_SELECT  |PARTITIONED|
+    exchange 
+    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+      data-scan [$$1, $$2, $$3, $$4]<-[$$1, $$2, $$3, $$4, $$5, $$6, $$7] <- default.supplier
+      -- DATASOURCE_SCAN  |PARTITIONED|
+        exchange 
+        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+          empty-tuple-source
+          -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+write [%0->$$1, %0->$$9, %0->$$10, %0->$$11]
+-- SINK_WRITE  |PARTITIONED|
+  project ([$$1, $$9, $$10, $$11])
+  -- STREAM_PROJECT  |PARTITIONED|
+    exchange 
+    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+      join (function-call: algebricks:eq, Args:[%0->$$5, %0->$$1])
+      -- HYBRID_HASH_JOIN [$$5][$$1]  |PARTITIONED|
+        exchange 
+        -- HASH_PARTITION_EXCHANGE [$$5]  |PARTITIONED|
+          data-scan [$$5, $$9, $$10, $$11]<-[$$5, $$6, $$7, $$8, $$9, $$10, $$11] <- default.supplier
+          -- DATASOURCE_SCAN  |PARTITIONED|
+            exchange 
+            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+              empty-tuple-source
+              -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+        exchange 
+        -- HASH_PARTITION_EXCHANGE [$$1]  |PARTITIONED|
+          data-scan [$$1]<-[$$1, $$2, $$3, $$4] <- default.result
+          -- DATASOURCE_SCAN  |PARTITIONED|
+            exchange 
+            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+              empty-tuple-source
+              -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/hivesterix/src/test/resources/optimizerts/results/u5_lateral_view.plan b/hivesterix/src/test/resources/optimizerts/results/u5_lateral_view.plan
new file mode 100644
index 0000000..48e624e
--- /dev/null
+++ b/hivesterix/src/test/resources/optimizerts/results/u5_lateral_view.plan
@@ -0,0 +1,14 @@
+write [%0->$$2, %0->$$3, %0->$$8]
+-- SINK_WRITE  |PARTITIONED|
+  project ([$$2, $$3, $$8])
+  -- STREAM_PROJECT  |PARTITIONED|
+    unnest $$8 <- function-call: hive:explode, Args:[%0->$$1]
+    -- UNNEST  |PARTITIONED|
+      exchange 
+      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+        data-scan [$$1, $$2, $$3]<-[$$1, $$2, $$3, $$4, $$5, $$6, $$7] <- default.supplier
+        -- DATASOURCE_SCAN  |PARTITIONED|
+          exchange 
+          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+            empty-tuple-source
+            -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/hivesterix/src/test/resources/optimizerts/results/u6_limit.plan b/hivesterix/src/test/resources/optimizerts/results/u6_limit.plan
new file mode 100644
index 0000000..b5ed12f
--- /dev/null
+++ b/hivesterix/src/test/resources/optimizerts/results/u6_limit.plan
@@ -0,0 +1,26 @@
+write [%0->$$1, %0->$$4, %0->$$5, %0->$$7]
+-- SINK_WRITE  |UNPARTITIONED|
+  limit 4
+  -- STREAM_LIMIT  |UNPARTITIONED|
+    limit 4
+    -- STREAM_LIMIT  |UNPARTITIONED|
+      exchange 
+      -- SORT_MERGE_EXCHANGE [$$4(ASC) ]  |PARTITIONED|
+        limit 4
+        -- STREAM_LIMIT  |LOCAL|
+          exchange 
+          -- ONE_TO_ONE_EXCHANGE  |LOCAL|
+            order (ASC, %0->$$4) 
+            -- STABLE_SORT [$$4(ASC)]  |LOCAL|
+              exchange 
+              -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                select (function-call: algebricks:lt, Args:[%0->$$4, 10000])
+                -- STREAM_SELECT  |PARTITIONED|
+                  exchange 
+                  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                    data-scan [$$1, $$4, $$5, $$7]<-[$$1, $$2, $$3, $$4, $$5, $$6, $$7, $$8, $$9] <- default.orders
+                    -- DATASOURCE_SCAN  |PARTITIONED|
+                      exchange 
+                      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                        empty-tuple-source
+                        -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/hivesterix/src/test/resources/optimizerts/results/u7_multi_join.plan b/hivesterix/src/test/resources/optimizerts/results/u7_multi_join.plan
new file mode 100644
index 0000000..ab55181
--- /dev/null
+++ b/hivesterix/src/test/resources/optimizerts/results/u7_multi_join.plan
@@ -0,0 +1,52 @@
+write [%0->$$4, %0->$$25, %0->$$28, %0->$$29, %0->$$32]
+-- SINK_WRITE  |PARTITIONED|
+  project ([$$4, $$25, $$28, $$29, $$32])
+  -- STREAM_PROJECT  |PARTITIONED|
+    exchange 
+    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+      join (function-call: algebricks:eq, Args:[%0->$$1, %0->$$25])
+      -- HYBRID_HASH_JOIN [$$1][$$25]  |PARTITIONED|
+        exchange 
+        -- HASH_PARTITION_EXCHANGE [$$1]  |PARTITIONED|
+          data-scan [$$1, $$4]<-[$$1, $$2, $$3, $$4, $$5, $$6, $$7, $$8, $$9, $$10, $$11, $$12, $$13, $$14, $$15, $$16] <- default.lineitem
+          -- DATASOURCE_SCAN  |PARTITIONED|
+            exchange 
+            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+              empty-tuple-source
+              -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+        exchange 
+        -- HASH_PARTITION_EXCHANGE [$$25]  |PARTITIONED|
+          project ([$$32, $$25, $$29, $$28])
+          -- STREAM_PROJECT  |PARTITIONED|
+            project ([$$25, $$17, $$28, $$29, $$32])
+            -- STREAM_PROJECT  |PARTITIONED|
+              exchange 
+              -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                join (function-call: algebricks:eq, Args:[%0->$$26, %0->$$17])
+                -- HYBRID_HASH_JOIN [$$26][$$17]  |PARTITIONED|
+                  exchange 
+                  -- HASH_PARTITION_EXCHANGE [$$26]  |PARTITIONED|
+                    select (function-call: algebricks:and, Args:[function-call: algebricks:lt, Args:[%0->$$28, 30000], function-call: algebricks:lt, Args:[%0->$$28, 30000]])
+                    -- STREAM_SELECT  |PARTITIONED|
+                      exchange 
+                      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                        data-scan [$$32, $$25, $$26, $$29, $$28]<-[$$25, $$26, $$27, $$28, $$29, $$30, $$31, $$32, $$33] <- default.orders
+                        -- DATASOURCE_SCAN  |PARTITIONED|
+                          exchange 
+                          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                            empty-tuple-source
+                            -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                  exchange 
+                  -- HASH_PARTITION_EXCHANGE [$$17]  |PARTITIONED|
+                    project ([$$17])
+                    -- STREAM_PROJECT  |PARTITIONED|
+                      select (function-call: algebricks:and, Args:[function-call: algebricks:lt, Args:[%0->$$17, 5], function-call: algebricks:lt, Args:[%0->$$17, 5]])
+                      -- STREAM_SELECT  |PARTITIONED|
+                        exchange 
+                        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                          data-scan [$$17]<-[$$17, $$18, $$19, $$20, $$21, $$22, $$23, $$24] <- default.customer
+                          -- DATASOURCE_SCAN  |PARTITIONED|
+                            exchange 
+                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                              empty-tuple-source
+                              -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/hivesterix/src/test/resources/optimizerts/results/u8_non_mapred.plan b/hivesterix/src/test/resources/optimizerts/results/u8_non_mapred.plan
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/hivesterix/src/test/resources/optimizerts/results/u8_non_mapred.plan
diff --git a/hivesterix/src/test/resources/optimizerts/results/u9_order_by.plan b/hivesterix/src/test/resources/optimizerts/results/u9_order_by.plan
new file mode 100644
index 0000000..7370fcf
--- /dev/null
+++ b/hivesterix/src/test/resources/optimizerts/results/u9_order_by.plan
@@ -0,0 +1,18 @@
+write [%0->$$1, %0->$$2, %0->$$3, %0->$$4]
+-- SINK_WRITE  |PARTITIONED|
+  exchange 
+  -- SORT_MERGE_EXCHANGE [$$2(ASC) ]  |PARTITIONED|
+    order (ASC, %0->$$2) 
+    -- STABLE_SORT [$$2(ASC)]  |LOCAL|
+      exchange 
+      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+        select (function-call: algebricks:lt, Args:[function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMultiply, Args:[%0->$$1, 2], 20])
+        -- STREAM_SELECT  |PARTITIONED|
+          exchange 
+          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+            data-scan [$$1, $$2, $$3, $$4]<-[$$1, $$2, $$3, $$4, $$5, $$6, $$7] <- default.supplier
+            -- DATASOURCE_SCAN  |PARTITIONED|
+              exchange 
+              -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                empty-tuple-source
+                -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/hivesterix/src/test/resources/runtimefunctionts/conf/cluster b/hivesterix/src/test/resources/runtimefunctionts/conf/cluster
new file mode 100644
index 0000000..ee81dc1
--- /dev/null
+++ b/hivesterix/src/test/resources/runtimefunctionts/conf/cluster
@@ -0,0 +1,3 @@
+2
+127.0.0.1 nc0
+127.0.0.1 nc1
diff --git a/hyracks-algebricks/hyracks-algebricks-tests/data/tpch0.001/customer.tbl b/hivesterix/src/test/resources/runtimefunctionts/data/customer.tbl
similarity index 100%
copy from hyracks-algebricks/hyracks-algebricks-tests/data/tpch0.001/customer.tbl
copy to hivesterix/src/test/resources/runtimefunctionts/data/customer.tbl
diff --git a/hivesterix/src/test/resources/runtimefunctionts/data/large_card_join_src.tbl b/hivesterix/src/test/resources/runtimefunctionts/data/large_card_join_src.tbl
new file mode 100644
index 0000000..0168ae9
--- /dev/null
+++ b/hivesterix/src/test/resources/runtimefunctionts/data/large_card_join_src.tbl
@@ -0,0 +1,1030 @@
+0
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+3
+3
+3
+4
+4
\ No newline at end of file
diff --git a/hivesterix/src/test/resources/runtimefunctionts/data/large_card_join_src_small.tbl b/hivesterix/src/test/resources/runtimefunctionts/data/large_card_join_src_small.tbl
new file mode 100644
index 0000000..d8263ee
--- /dev/null
+++ b/hivesterix/src/test/resources/runtimefunctionts/data/large_card_join_src_small.tbl
@@ -0,0 +1 @@
+2
\ No newline at end of file
diff --git a/hyracks-algebricks/hyracks-algebricks-tests/data/tpch0.001/lineitem.tbl b/hivesterix/src/test/resources/runtimefunctionts/data/lineitem.tbl
similarity index 100%
copy from hyracks-algebricks/hyracks-algebricks-tests/data/tpch0.001/lineitem.tbl
copy to hivesterix/src/test/resources/runtimefunctionts/data/lineitem.tbl
diff --git a/hyracks-algebricks/hyracks-algebricks-tests/data/tpch0.001/nation.tbl b/hivesterix/src/test/resources/runtimefunctionts/data/nation.tbl
similarity index 100%
copy from hyracks-algebricks/hyracks-algebricks-tests/data/tpch0.001/nation.tbl
copy to hivesterix/src/test/resources/runtimefunctionts/data/nation.tbl
diff --git a/hyracks-algebricks/hyracks-algebricks-tests/data/tpch0.001/orders.tbl b/hivesterix/src/test/resources/runtimefunctionts/data/orders.tbl
similarity index 100%
copy from hyracks-algebricks/hyracks-algebricks-tests/data/tpch0.001/orders.tbl
copy to hivesterix/src/test/resources/runtimefunctionts/data/orders.tbl
diff --git a/hivesterix/src/test/resources/runtimefunctionts/data/part.tbl b/hivesterix/src/test/resources/runtimefunctionts/data/part.tbl
new file mode 100644
index 0000000..f58926e
--- /dev/null
+++ b/hivesterix/src/test/resources/runtimefunctionts/data/part.tbl
@@ -0,0 +1,200 @@
+1|goldenrod lavender spring chocolate lace|Manufacturer#1|Brand#13|PROMO BURNISHED COPPER|7|JUMBO PKG|901.00|ly. slyly ironi|
+2|blush thistle blue yellow saddle|Manufacturer#1|Brand#13|LARGE BRUSHED BRASS|1|LG CASE|902.00|lar accounts amo|
+3|spring green yellow purple cornsilk|Manufacturer#4|Brand#42|STANDARD POLISHED BRASS|21|WRAP CASE|903.00|egular deposits hag|
+4|cornflower chocolate smoke green pink|Manufacturer#3|Brand#34|SMALL PLATED BRASS|14|MED DRUM|904.00|p furiously r|
+5|forest brown coral puff cream|Manufacturer#3|Brand#32|STANDARD POLISHED TIN|15|SM PKG|905.00| wake carefully |
+6|bisque cornflower lawn forest magenta|Manufacturer#2|Brand#24|PROMO PLATED STEEL|4|MED BAG|906.00|sual a|
+7|moccasin green thistle khaki floral|Manufacturer#1|Brand#11|SMALL PLATED COPPER|45|SM BAG|907.00|lyly. ex|
+8|misty lace thistle snow royal|Manufacturer#4|Brand#44|PROMO BURNISHED TIN|41|LG DRUM|908.00|eposi|
+9|thistle dim navajo dark gainsboro|Manufacturer#4|Brand#43|SMALL BURNISHED STEEL|12|WRAP CASE|909.00|ironic foxe|
+10|linen pink saddle puff powder|Manufacturer#5|Brand#54|LARGE BURNISHED STEEL|44|LG CAN|910.01|ithely final deposit|
+11|spring maroon seashell almond orchid|Manufacturer#2|Brand#25|STANDARD BURNISHED NICKEL|43|WRAP BOX|911.01|ng gr|
+12|cornflower wheat orange maroon ghost|Manufacturer#3|Brand#33|MEDIUM ANODIZED STEEL|25|JUMBO CASE|912.01| quickly|
+13|ghost olive orange rosy thistle|Manufacturer#5|Brand#55|MEDIUM BURNISHED NICKEL|1|JUMBO PACK|913.01|osits.|
+14|khaki seashell rose cornsilk navajo|Manufacturer#1|Brand#13|SMALL POLISHED STEEL|28|JUMBO BOX|914.01|kages c|
+15|blanched honeydew sky turquoise medium|Manufacturer#1|Brand#15|LARGE ANODIZED BRASS|45|LG CASE|915.01|usual ac|
+16|deep sky turquoise drab peach|Manufacturer#3|Brand#32|PROMO PLATED TIN|2|MED PACK|916.01|unts a|
+17|indian navy coral pink deep|Manufacturer#4|Brand#43|ECONOMY BRUSHED STEEL|16|LG BOX|917.01| regular accounts|
+18|turquoise indian lemon lavender misty|Manufacturer#1|Brand#11|SMALL BURNISHED STEEL|42|JUMBO PACK|918.01|s cajole slyly a|
+19|chocolate navy tan deep brown|Manufacturer#2|Brand#23|SMALL ANODIZED NICKEL|33|WRAP BOX|919.01| pending acc|
+20|ivory navy honeydew sandy midnight|Manufacturer#1|Brand#12|LARGE POLISHED NICKEL|48|MED BAG|920.02|are across the asympt|
+21|lemon floral azure frosted lime|Manufacturer#3|Brand#33|SMALL BURNISHED TIN|31|MED BAG|921.02|ss packages. pendin|
+22|medium forest blue ghost black|Manufacturer#4|Brand#43|PROMO POLISHED BRASS|19|LG DRUM|922.02| even p|
+23|coral lavender seashell rosy burlywood|Manufacturer#3|Brand#35|MEDIUM BURNISHED TIN|42|JUMBO JAR|923.02|nic, fina|
+24|seashell coral metallic midnight floral|Manufacturer#5|Brand#52|MEDIUM PLATED STEEL|20|MED CASE|924.02| final the|
+25|Algebricksmarine steel firebrick light turquoise|Manufacturer#5|Brand#55|STANDARD BRUSHED COPPER|3|JUMBO BAG|925.02|requests wake|
+26|beige frosted moccasin chocolate snow|Manufacturer#3|Brand#32|SMALL BRUSHED STEEL|32|SM CASE|926.02| instructions i|
+27|saddle puff beige linen yellow|Manufacturer#1|Brand#14|LARGE ANODIZED TIN|20|MED PKG|927.02|s wake. ir|
+28|navajo yellow drab white misty|Manufacturer#4|Brand#44|SMALL PLATED COPPER|19|JUMBO PKG|928.02|x-ray pending, iron|
+29|lemon sky grey salmon orchid|Manufacturer#3|Brand#33|PROMO PLATED COPPER|7|LG DRUM|929.02| carefully fluffi|
+30|cream misty steel spring medium|Manufacturer#4|Brand#42|PROMO ANODIZED TIN|17|LG BOX|930.03|carefully bus|
+31|slate seashell steel medium moccasin|Manufacturer#5|Brand#53|STANDARD BRUSHED TIN|10|LG BAG|931.03|uriously s|
+32|sandy wheat coral spring burnished|Manufacturer#4|Brand#42|ECONOMY PLATED BRASS|31|LG CASE|932.03|urts. carefully fin|
+33|spring bisque salmon slate pink|Manufacturer#2|Brand#22|ECONOMY PLATED NICKEL|16|LG PKG|933.03|ly eve|
+34|khaki steel rose ghost salmon|Manufacturer#1|Brand#13|LARGE BRUSHED STEEL|8|JUMBO BOX|934.03|riously ironic|
+35|green blush tomato burlywood seashell|Manufacturer#4|Brand#43|MEDIUM ANODIZED BRASS|14|JUMBO PACK|935.03|e carefully furi|
+36|chiffon tan forest moccasin dark|Manufacturer#2|Brand#25|SMALL BURNISHED COPPER|3|JUMBO CAN|936.03|olites o|
+37|royal coral orange burnished navajo|Manufacturer#4|Brand#45|LARGE POLISHED TIN|48|JUMBO BOX|937.03|silent |
+38|seashell papaya white mint brown|Manufacturer#4|Brand#43|ECONOMY ANODIZED BRASS|11|SM JAR|938.03|structions inte|
+39|rose medium floral salmon powder|Manufacturer#5|Brand#53|SMALL POLISHED TIN|43|JUMBO JAR|939.03|se slowly above the fl|
+40|lemon midnight metallic sienna steel|Manufacturer#2|Brand#25|ECONOMY BURNISHED COPPER|27|SM CASE|940.04|! blithely specia|
+41|burlywood goldenrod pink peru sienna|Manufacturer#2|Brand#23|ECONOMY ANODIZED TIN|7|WRAP JAR|941.04|uriously. furiously cl|
+42|midnight turquoise lawn beige thistle|Manufacturer#5|Brand#52|MEDIUM BURNISHED TIN|45|LG BOX|942.04|the slow|
+43|medium lace midnight royal chartreuse|Manufacturer#4|Brand#44|PROMO POLISHED STEEL|5|WRAP CASE|943.04|e slyly along the ir|
+44|saddle cream wheat lemon burnished|Manufacturer#4|Brand#45|MEDIUM PLATED TIN|48|SM PACK|944.04|pinto beans. carefully|
+45|lawn peru ghost khaki maroon|Manufacturer#4|Brand#43|SMALL BRUSHED NICKEL|9|WRAP BAG|945.04|nts bo|
+46|honeydew turquoise Algebricksmarine spring tan|Manufacturer#1|Brand#11|STANDARD POLISHED TIN|45|WRAP CASE|946.04|the blithely unusual |
+47|honeydew red azure magenta brown|Manufacturer#4|Brand#45|LARGE BURNISHED BRASS|14|JUMBO PACK|947.04| even plate|
+48|slate thistle cornsilk pale forest|Manufacturer#5|Brand#53|STANDARD BRUSHED STEEL|27|JUMBO CASE|948.04|ng to the depo|
+49|light firebrick cyan puff blue|Manufacturer#2|Brand#24|SMALL BURNISHED TIN|31|MED DRUM|949.04|ar pack|
+50|linen blanched tomato slate medium|Manufacturer#3|Brand#33|LARGE ANODIZED TIN|25|WRAP PKG|950.05|kages m|
+51|lime frosted indian dodger linen|Manufacturer#4|Brand#45|ECONOMY BURNISHED NICKEL|34|JUMBO PACK|951.05|n foxes|
+52|lemon midnight lace sky deep|Manufacturer#3|Brand#35|STANDARD BURNISHED TIN|25|WRAP CASE|952.05| final deposits. fu|
+53|bisque rose cornsilk seashell purple|Manufacturer#2|Brand#23|ECONOMY BURNISHED NICKEL|32|MED BAG|953.05|mptot|
+54|blanched mint yellow papaya cyan|Manufacturer#2|Brand#21|LARGE BURNISHED COPPER|19|WRAP CASE|954.05|e blithely|
+55|sky cream deep tomato rosy|Manufacturer#2|Brand#23|ECONOMY BRUSHED COPPER|9|MED BAG|955.05|ly final pac|
+56|antique beige brown deep dodger|Manufacturer#1|Brand#12|MEDIUM PLATED STEEL|20|WRAP DRUM|956.05|ts. blithel|
+57|purple blue light sienna deep|Manufacturer#3|Brand#32|MEDIUM BURNISHED BRASS|49|MED PKG|957.05|lly abov|
+58|linen hot cornsilk drab bisque|Manufacturer#5|Brand#53|STANDARD POLISHED TIN|44|LG PACK|958.05| fluffily blithely reg|
+59|misty brown medium mint salmon|Manufacturer#5|Brand#53|MEDIUM POLISHED TIN|2|LG BAG|959.05|regular exc|
+60|snow spring sandy olive tomato|Manufacturer#1|Brand#11|LARGE POLISHED COPPER|27|JUMBO CASE|960.06| integ|
+61|light tan linen tomato peach|Manufacturer#5|Brand#54|SMALL BURNISHED NICKEL|18|WRAP DRUM|961.06|es. blithely en|
+62|tan cornsilk spring grey chocolate|Manufacturer#3|Brand#35|STANDARD BRUSHED BRASS|39|JUMBO BOX|962.06|ckly across the carefu|
+63|burnished puff coral light papaya|Manufacturer#3|Brand#32|STANDARD BURNISHED NICKEL|10|JUMBO CAN|963.06| quickly |
+64|Algebricksmarine coral lemon ivory gainsboro|Manufacturer#2|Brand#21|MEDIUM ANODIZED BRASS|1|JUMBO CAN|964.06|efully regular pi|
+65|slate drab medium puff gainsboro|Manufacturer#5|Brand#53|MEDIUM BRUSHED COPPER|3|MED CAN|965.06|posits after the quic|
+66|cornflower pale almond lemon linen|Manufacturer#3|Brand#35|PROMO ANODIZED NICKEL|46|SM CASE|966.06|haggle blithely iro|
+67|slate salmon rose spring seashell|Manufacturer#2|Brand#21|SMALL BRUSHED TIN|31|WRAP DRUM|967.06| regular, p|
+68|bisque ivory mint purple almond|Manufacturer#1|Brand#11|PROMO ANODIZED STEEL|10|WRAP BOX|968.06|eposits shall h|
+69|lace burnished rosy antique metallic|Manufacturer#5|Brand#52|MEDIUM POLISHED BRASS|2|SM BOX|969.06|ely final depo|
+70|violet seashell firebrick dark navajo|Manufacturer#1|Brand#11|STANDARD BRUSHED STEEL|42|LG PACK|970.07|inal gifts. sl|
+71|violet firebrick cream peru white|Manufacturer#3|Brand#33|STANDARD PLATED BRASS|26|WRAP DRUM|971.07| packages alongside|
+72|hot spring yellow azure dodger|Manufacturer#2|Brand#23|STANDARD ANODIZED TIN|25|JUMBO PACK|972.07|efully final the|
+73|cream moccasin royal dim chiffon|Manufacturer#2|Brand#21|SMALL BRUSHED COPPER|35|WRAP DRUM|973.07|ts haggl|
+74|frosted grey Algebricksmarine thistle papaya|Manufacturer#5|Brand#55|ECONOMY ANODIZED BRASS|25|JUMBO CASE|974.07|ent foxes|
+75|Algebricksmarine maroon wheat salmon metallic|Manufacturer#3|Brand#35|SMALL BURNISHED NICKEL|39|SM JAR|975.07|s sleep furiou|
+76|rosy light lime puff sandy|Manufacturer#3|Brand#34|MEDIUM BRUSHED COPPER|9|SM PKG|976.07|n accounts sleep qu|
+77|mint bisque chiffon snow firebrick|Manufacturer#5|Brand#52|STANDARD BRUSHED COPPER|13|MED PKG|977.07|uests.|
+78|blush forest slate seashell puff|Manufacturer#1|Brand#14|ECONOMY POLISHED STEEL|24|LG JAR|978.07|icing deposits wake|
+79|gainsboro pink grey tan almond|Manufacturer#4|Brand#45|PROMO ANODIZED BRASS|22|JUMBO BAG|979.07| foxes are slyly regu|
+80|tomato chartreuse coral turquoise linen|Manufacturer#4|Brand#44|PROMO PLATED BRASS|28|MED CAN|980.08|unusual dependencies i|
+81|misty sandy cornsilk dodger blush|Manufacturer#5|Brand#53|ECONOMY BRUSHED TIN|21|MED BAG|981.08|ove the furiou|
+82|khaki tomato purple almond tan|Manufacturer#1|Brand#15|ECONOMY POLISHED TIN|12|WRAP BOX|982.08|ial requests haggle |
+83|blush green dim lawn peru|Manufacturer#1|Brand#12|PROMO BURNISHED NICKEL|47|SM CAN|983.08|ly regul|
+84|salmon floral cream rose dark|Manufacturer#4|Brand#45|SMALL ANODIZED NICKEL|26|JUMBO PACK|984.08|ideas nag|
+85|dim deep Algebricksmarine smoke pale|Manufacturer#5|Brand#55|PROMO ANODIZED NICKEL|16|LG BAG|985.08| silent|
+86|green blanched firebrick dim cream|Manufacturer#4|Brand#44|STANDARD PLATED TIN|37|LG CASE|986.08| daring sheaves |
+87|purple lace seashell antique orange|Manufacturer#4|Brand#41|LARGE PLATED STEEL|41|WRAP PACK|987.08|yly final|
+88|lime orange bisque chartreuse lemon|Manufacturer#4|Brand#44|PROMO PLATED COPPER|16|SM CASE|988.08|e regular packages. |
+89|ghost lace lemon sienna saddle|Manufacturer#5|Brand#53|STANDARD BURNISHED STEEL|7|MED JAR|989.08|y final pinto |
+90|hot rosy violet plum pale|Manufacturer#5|Brand#51|ECONOMY POLISHED STEEL|49|JUMBO CAN|990.09|caref|
+91|misty bisque lavender spring turquoise|Manufacturer#2|Brand#21|STANDARD BRUSHED TIN|32|JUMBO PKG|991.09|counts dete|
+92|blush magenta ghost tomato rose|Manufacturer#2|Brand#22|STANDARD ANODIZED TIN|35|JUMBO PKG|992.09|he ironic accounts. sp|
+93|pale yellow cornsilk dodger moccasin|Manufacturer#2|Brand#24|LARGE ANODIZED TIN|2|WRAP DRUM|993.09| platel|
+94|blanched pink frosted mint snow|Manufacturer#3|Brand#35|STANDARD POLISHED BRASS|32|SM BOX|994.09|s accounts cajo|
+95|dodger beige wheat orchid navy|Manufacturer#3|Brand#33|LARGE BRUSHED TIN|36|WRAP DRUM|995.09| final pinto beans |
+96|chocolate light firebrick rose indian|Manufacturer#5|Brand#53|STANDARD BRUSHED STEEL|32|SM CASE|996.09|ng to the bli|
+97|coral dodger beige black chartreuse|Manufacturer#3|Brand#33|MEDIUM POLISHED BRASS|49|WRAP CAN|997.09|ss excuses sleep am|
+98|frosted peru chiffon yellow Algebricksmarine|Manufacturer#5|Brand#54|STANDARD ANODIZED BRASS|22|MED JAR|998.09|e the q|
+99|mint grey purple sienna metallic|Manufacturer#2|Brand#21|SMALL BURNISHED STEEL|11|JUMBO PKG|999.09|press|
+100|cyan orchid indian cornflower saddle|Manufacturer#3|Brand#33|ECONOMY ANODIZED TIN|4|LG BAG|1000.10|of the steal|
+101|powder deep lavender violet gainsboro|Manufacturer#3|Brand#32|LARGE ANODIZED STEEL|26|JUMBO JAR|1001.10|ly even,|
+102|papaya maroon blush powder sky|Manufacturer#3|Brand#31|MEDIUM BURNISHED BRASS|17|SM DRUM|1002.10|ular packa|
+103|navy sky spring orchid forest|Manufacturer#2|Brand#25|MEDIUM PLATED BRASS|45|WRAP DRUM|1003.10|e blithely blith|
+104|plum cyan cornflower midnight royal|Manufacturer#1|Brand#13|MEDIUM ANODIZED STEEL|36|JUMBO BAG|1004.10|ites sleep quickly|
+105|dodger slate pale mint navajo|Manufacturer#1|Brand#15|SMALL POLISHED COPPER|27|LG DRUM|1005.10|odolites was |
+106|cornsilk bisque seashell lemon frosted|Manufacturer#3|Brand#31|MEDIUM PLATED BRASS|28|WRAP DRUM|1006.10|unts maintain |
+107|violet honeydew bisque sienna orchid|Manufacturer#5|Brand#53|SMALL BURNISHED TIN|12|MED BOX|1007.10|slyly special depos|
+108|bisque peach magenta tomato yellow|Manufacturer#1|Brand#12|PROMO PLATED NICKEL|41|MED PKG|1008.10|after the carefully |
+109|lemon black indian cornflower pale|Manufacturer#3|Brand#33|ECONOMY POLISHED TIN|11|LG PACK|1009.10|instruction|
+110|firebrick navy rose beige black|Manufacturer#3|Brand#33|STANDARD BURNISHED COPPER|46|LG DRUM|1010.11|t quickly a|
+111|orange cornflower mint snow peach|Manufacturer#5|Brand#54|LARGE BRUSHED COPPER|28|JUMBO JAR|1011.11|kly bold epitaphs |
+112|hot Algebricksmarine tomato lace indian|Manufacturer#4|Brand#43|PROMO BRUSHED STEEL|42|JUMBO CAN|1012.11|the express, |
+113|almond seashell azure blanched light|Manufacturer#3|Brand#31|PROMO POLISHED TIN|23|LG CAN|1013.11|finally even |
+114|pink black blanched lace chartreuse|Manufacturer#5|Brand#51|MEDIUM POLISHED NICKEL|41|MED PACK|1014.11|ully final foxes. pint|
+115|spring chiffon cream orchid dodger|Manufacturer#4|Brand#45|STANDARD POLISHED STEEL|24|MED CAN|1015.11|counts nag! caref|
+116|goldenrod black slate forest red|Manufacturer#5|Brand#53|PROMO POLISHED NICKEL|33|SM PACK|1016.11|usly final courts |
+117|tomato honeydew pale red yellow|Manufacturer#1|Brand#14|SMALL BRUSHED TIN|25|LG BAG|1017.11|ages acc|
+118|ghost plum brown coral cornsilk|Manufacturer#2|Brand#25|PROMO ANODIZED TIN|31|MED PACK|1018.11|ly ironic pinto|
+119|olive metallic slate peach green|Manufacturer#4|Brand#43|LARGE POLISHED STEEL|30|WRAP CASE|1019.11|out the quickly r|
+120|pink powder mint moccasin navajo|Manufacturer#1|Brand#14|SMALL ANODIZED NICKEL|45|WRAP JAR|1020.12|lly a|
+121|bisque royal goldenrod medium thistle|Manufacturer#1|Brand#14|ECONOMY BRUSHED COPPER|13|SM PKG|1021.12|deposi|
+122|gainsboro royal forest dark lace|Manufacturer#2|Brand#21|MEDIUM ANODIZED TIN|8|LG DRUM|1022.12|sts c|
+123|deep dim peach light beige|Manufacturer#1|Brand#12|SMALL BURNISHED TIN|31|JUMBO PKG|1023.12|ray regula|
+124|wheat blush forest metallic navajo|Manufacturer#3|Brand#32|PROMO ANODIZED STEEL|1|LG BOX|1024.12|g the expr|
+125|mint ivory saddle peach midnight|Manufacturer#1|Brand#12|STANDARD BRUSHED BRASS|17|WRAP BAG|1025.12|kages against|
+126|burnished black blue metallic orchid|Manufacturer#4|Brand#45|MEDIUM BRUSHED NICKEL|4|LG BAG|1026.12|es sleep al|
+127|royal coral orchid spring sky|Manufacturer#5|Brand#52|SMALL BURNISHED NICKEL|14|LG JAR|1027.12|lithely expr|
+128|dark burlywood burnished snow sky|Manufacturer#2|Brand#22|PROMO PLATED TIN|5|SM BAG|1028.12|e of the furiously ex|
+129|grey spring chiffon thistle lime|Manufacturer#1|Brand#15|LARGE POLISHED TIN|20|SM JAR|1029.12| careful|
+130|gainsboro powder cyan pale rosy|Manufacturer#2|Brand#23|SMALL PLATED NICKEL|26|LG BOX|1030.13|ake slyly|
+131|tomato moccasin cyan brown goldenrod|Manufacturer#5|Brand#52|STANDARD ANODIZED BRASS|43|MED DRUM|1031.13|nts wake dar|
+132|seashell papaya tomato lime hot|Manufacturer#4|Brand#45|STANDARD BURNISHED BRASS|2|WRAP DRUM|1032.13|ckly expre|
+133|firebrick black dodger pink salmon|Manufacturer#1|Brand#13|SMALL BRUSHED NICKEL|19|LG PKG|1033.13| final pinto beans|
+134|steel beige mint maroon indian|Manufacturer#4|Brand#42|SMALL POLISHED STEEL|35|SM PKG|1034.13|es. bold pa|
+135|thistle chocolate ghost gainsboro peru|Manufacturer#2|Brand#21|MEDIUM BURNISHED STEEL|24|JUMBO CASE|1035.13|l frets |
+136|cornsilk maroon blanched thistle rosy|Manufacturer#2|Brand#22|SMALL PLATED STEEL|2|WRAP BAG|1036.13|kages print carefully|
+137|cornsilk drab ghost sandy royal|Manufacturer#3|Brand#31|ECONOMY PLATED STEEL|25|MED PACK|1037.13|the t|
+138|dark Algebricksmarine tomato medium puff|Manufacturer#1|Brand#13|ECONOMY BURNISHED COPPER|42|JUMBO DRUM|1038.13|ts solve acro|
+139|floral steel burlywood navy cream|Manufacturer#3|Brand#32|MEDIUM BRUSHED STEEL|7|SM BOX|1039.13|ter t|
+140|Algebricksmarine lavender maroon slate hot|Manufacturer#5|Brand#53|STANDARD PLATED STEEL|45|SM BOX|1040.14|oss the carefu|
+141|honeydew magenta tomato spring medium|Manufacturer#3|Brand#35|STANDARD ANODIZED STEEL|23|SM PKG|1041.14|ans nag furiously pen|
+142|chartreuse linen grey slate saddle|Manufacturer#5|Brand#55|STANDARD ANODIZED BRASS|36|MED JAR|1042.14|he accounts. pac|
+143|bisque dodger blanched steel maroon|Manufacturer#3|Brand#34|ECONOMY PLATED TIN|44|MED BAG|1043.14|nts across the|
+144|hot midnight orchid dim steel|Manufacturer#1|Brand#14|SMALL ANODIZED TIN|26|SM BOX|1044.14|owly |
+145|navajo lavender chocolate deep hot|Manufacturer#5|Brand#53|PROMO BRUSHED COPPER|24|SM BAG|1045.14|es wake furiously blit|
+146|azure smoke mint cream burlywood|Manufacturer#3|Brand#34|STANDARD BRUSHED COPPER|11|WRAP PACK|1046.14|unts cajole|
+147|honeydew orange dodger linen lace|Manufacturer#1|Brand#11|MEDIUM PLATED COPPER|29|JUMBO PKG|1047.14|wake never bold |
+148|yellow white ghost lavender salmon|Manufacturer#3|Brand#31|STANDARD PLATED STEEL|20|SM BOX|1048.14|platelets wake fu|
+149|tan thistle frosted indian lawn|Manufacturer#2|Brand#24|MEDIUM BURNISHED NICKEL|6|MED PKG|1049.14|leep requests. dog|
+150|pale rose navajo firebrick Algebricksmarine|Manufacturer#3|Brand#35|LARGE BRUSHED TIN|21|SM BAG|1050.15|ironic foxes|
+151|chartreuse linen violet ghost thistle|Manufacturer#3|Brand#34|LARGE PLATED BRASS|45|MED CAN|1051.15|ccounts nag i|
+152|white sky antique tomato chartreuse|Manufacturer#5|Brand#53|MEDIUM POLISHED STEEL|48|MED CASE|1052.15|thely regular t|
+153|linen frosted slate coral peru|Manufacturer#1|Brand#11|STANDARD PLATED TIN|20|MED BAG|1053.15|thlessly. silen|
+154|peru moccasin peach pale spring|Manufacturer#1|Brand#11|ECONOMY ANODIZED TIN|1|JUMBO BAG|1054.15|posits |
+155|puff yellow cyan tomato purple|Manufacturer#2|Brand#21|SMALL BRUSHED NICKEL|28|WRAP CASE|1055.15|lly ironic, r|
+156|almond ghost powder blush forest|Manufacturer#4|Brand#43|SMALL POLISHED NICKEL|2|LG PKG|1056.15| pinto beans. eve|
+157|navajo linen coral brown forest|Manufacturer#1|Brand#11|ECONOMY ANODIZED STEEL|26|JUMBO PACK|1057.15|ial courts. ru|
+158|magenta light misty navy honeydew|Manufacturer#4|Brand#45|MEDIUM BURNISHED COPPER|47|LG JAR|1058.15| ideas detect slyl|
+159|white orange antique beige Algebricksmarine|Manufacturer#4|Brand#43|SMALL ANODIZED BRASS|46|SM BAG|1059.15| ironic requests-- pe|
+160|frosted cornflower khaki salmon metallic|Manufacturer#5|Brand#55|STANDARD POLISHED COPPER|47|JUMBO CAN|1060.16|nts are carefully|
+161|metallic khaki navy forest cyan|Manufacturer#2|Brand#22|STANDARD PLATED TIN|17|SM PACK|1061.16|r the bl|
+162|burlywood cornflower Algebricksmarine misty snow|Manufacturer#3|Brand#33|MEDIUM ANODIZED COPPER|35|JUMBO PACK|1062.16|e slyly around th|
+163|blush metallic maroon lawn forest|Manufacturer#2|Brand#21|ECONOMY PLATED TIN|34|WRAP DRUM|1063.16|nly s|
+164|orange cyan magenta navajo indian|Manufacturer#2|Brand#23|LARGE PLATED BRASS|35|JUMBO BAG|1064.16|mong th|
+165|white dim cornflower sky seashell|Manufacturer#1|Brand#15|STANDARD PLATED STEEL|24|SM CAN|1065.16| carefully fin|
+166|linen bisque tomato gainsboro goldenrod|Manufacturer#5|Brand#52|LARGE POLISHED COPPER|4|MED BAG|1066.16|ss the|
+167|almond floral grey dim sky|Manufacturer#3|Brand#32|LARGE ANODIZED STEEL|46|WRAP BOX|1067.16|ic ac|
+168|lace gainsboro burlywood smoke tomato|Manufacturer#1|Brand#13|SMALL BRUSHED COPPER|20|JUMBO DRUM|1068.16|ss package|
+169|bisque misty sky cornflower peach|Manufacturer#5|Brand#55|STANDARD POLISHED BRASS|10|JUMBO CASE|1069.16|lets alongside of|
+170|peru grey blanched goldenrod yellow|Manufacturer#3|Brand#33|LARGE POLISHED COPPER|28|LG DRUM|1070.17|yly s|
+171|beige violet black magenta chartreuse|Manufacturer#1|Brand#11|STANDARD BURNISHED COPPER|40|LG JAR|1071.17| the r|
+172|medium goldenrod linen sky coral|Manufacturer#5|Brand#53|PROMO PLATED NICKEL|28|MED CASE|1072.17|quick as|
+173|chartreuse seashell powder navy grey|Manufacturer#1|Brand#12|ECONOMY BURNISHED TIN|17|LG CASE|1073.17|sly bold excuses haggl|
+174|hot cornflower slate saddle pale|Manufacturer#1|Brand#15|ECONOMY BRUSHED COPPER|25|LG CASE|1074.17| accounts nag ab|
+175|magenta blue chartreuse tan green|Manufacturer#1|Brand#11|PROMO ANODIZED TIN|45|JUMBO JAR|1075.17|ole against the|
+176|pink drab ivory papaya grey|Manufacturer#2|Brand#24|SMALL ANODIZED STEEL|40|MED CAN|1076.17|blithely. ironic|
+177|indian turquoise purple green spring|Manufacturer#2|Brand#21|MEDIUM BRUSHED STEEL|42|LG BAG|1077.17|ermanently eve|
+178|lace blanched magenta yellow almond|Manufacturer#1|Brand#13|STANDARD POLISHED TIN|10|LG JAR|1078.17|regular instructions.|
+179|deep puff brown blue burlywood|Manufacturer#4|Brand#43|ECONOMY BRUSHED STEEL|20|LG JAR|1079.17|ely regul|
+180|seashell maroon lace burnished lavender|Manufacturer#3|Brand#33|STANDARD BURNISHED NICKEL|7|WRAP BAG|1080.18|oss the |
+181|antique plum smoke pink dodger|Manufacturer#2|Brand#24|MEDIUM PLATED STEEL|19|WRAP CAN|1081.18|al deposits |
+182|beige cyan burlywood chiffon light|Manufacturer#3|Brand#31|MEDIUM ANODIZED COPPER|11|JUMBO CAN|1082.18|bits are |
+183|ivory white burnished papaya cornflower|Manufacturer#5|Brand#52|PROMO POLISHED STEEL|35|LG PKG|1083.18|ly regular excus|
+184|ghost honeydew cyan lawn powder|Manufacturer#5|Brand#53|SMALL POLISHED TIN|42|LG BOX|1084.18|ding courts. idly iro|
+185|firebrick black ivory spring medium|Manufacturer#4|Brand#44|ECONOMY POLISHED TIN|4|WRAP BAG|1085.18|even foxe|
+186|grey purple chocolate turquoise plum|Manufacturer#2|Brand#23|ECONOMY BRUSHED TIN|15|JUMBO PKG|1086.18|ly reg|
+187|white red lace deep pale|Manufacturer#4|Brand#45|PROMO ANODIZED BRASS|45|MED CAN|1087.18|leep slyly s|
+188|moccasin steel rosy drab white|Manufacturer#5|Brand#54|ECONOMY ANODIZED BRASS|9|MED CAN|1088.18| above the silent p|
+189|dodger moccasin lemon purple thistle|Manufacturer#2|Brand#22|MEDIUM BRUSHED BRASS|13|WRAP DRUM|1089.18|en requests. sauternes|
+190|chartreuse goldenrod midnight cornflower blush|Manufacturer#5|Brand#53|LARGE BURNISHED NICKEL|23|WRAP BAG|1090.19| furiously even d|
+191|mint midnight puff forest peach|Manufacturer#3|Brand#31|MEDIUM POLISHED BRASS|36|WRAP BOX|1091.19| asymptote|
+192|thistle puff pink cream orange|Manufacturer#3|Brand#34|STANDARD BRUSHED COPPER|17|MED BAG|1092.19|uickly regular, expr|
+193|turquoise lime royal metallic azure|Manufacturer#4|Brand#45|ECONOMY BURNISHED BRASS|31|SM PKG|1093.19|final ideas wake furi|
+194|brown black cream navy plum|Manufacturer#5|Brand#51|ECONOMY POLISHED STEEL|7|SM CAN|1094.19|y special accoun|
+195|bisque sienna hot goldenrod khaki|Manufacturer#4|Brand#41|STANDARD BRUSHED NICKEL|40|MED CASE|1095.19|oxes sleep care|
+196|pale peru linen hot maroon|Manufacturer#3|Brand#33|SMALL BURNISHED NICKEL|3|JUMBO JAR|1096.19|uickly special |
+197|lawn lemon khaki rosy blue|Manufacturer#5|Brand#52|SMALL ANODIZED COPPER|18|SM JAR|1097.19|lithely after the eve|
+198|orange cornflower indian Algebricksmarine white|Manufacturer#4|Brand#41|PROMO BRUSHED NICKEL|43|SM PACK|1098.19|ackages? carefully re|
+199|ivory slate lavender tan royal|Manufacturer#3|Brand#31|ECONOMY PLATED STEEL|23|JUMBO DRUM|1099.19|ickly regul|
+200|peach cornsilk navy rosy red|Manufacturer#5|Brand#54|MEDIUM POLISHED BRASS|22|LG PKG|1100.20|furiously even depo|
diff --git a/hyracks-algebricks/hyracks-algebricks-tests/data/tpch0.001/partsupp.tbl b/hivesterix/src/test/resources/runtimefunctionts/data/partsupp.tbl
similarity index 100%
copy from hyracks-algebricks/hyracks-algebricks-tests/data/tpch0.001/partsupp.tbl
copy to hivesterix/src/test/resources/runtimefunctionts/data/partsupp.tbl
diff --git a/hyracks-algebricks/hyracks-algebricks-tests/data/tpch0.001/region.tbl b/hivesterix/src/test/resources/runtimefunctionts/data/region.tbl
similarity index 100%
copy from hyracks-algebricks/hyracks-algebricks-tests/data/tpch0.001/region.tbl
copy to hivesterix/src/test/resources/runtimefunctionts/data/region.tbl
diff --git a/hyracks-algebricks/hyracks-algebricks-tests/data/tpch0.001/supplier.tbl b/hivesterix/src/test/resources/runtimefunctionts/data/supplier.tbl
similarity index 100%
copy from hyracks-algebricks/hyracks-algebricks-tests/data/tpch0.001/supplier.tbl
copy to hivesterix/src/test/resources/runtimefunctionts/data/supplier.tbl
diff --git a/hivesterix/src/test/resources/runtimefunctionts/hadoop/conf/core-site.xml b/hivesterix/src/test/resources/runtimefunctionts/hadoop/conf/core-site.xml
new file mode 100644
index 0000000..47dfac5
--- /dev/null
+++ b/hivesterix/src/test/resources/runtimefunctionts/hadoop/conf/core-site.xml
@@ -0,0 +1,18 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+
+<!-- Put site-specific property overrides in this file. -->
+
+<configuration>
+
+<property>
+    <name>fs.default.name</name>
+    <value>hdfs://127.0.0.1:31888</value>
+</property>
+<property>
+    <name>hadoop.tmp.dir</name>
+    <value>/tmp/hadoop</value>
+</property>
+
+
+</configuration>
diff --git a/hivesterix/src/test/resources/runtimefunctionts/hadoop/conf/core-site.xml.bak b/hivesterix/src/test/resources/runtimefunctionts/hadoop/conf/core-site.xml.bak
new file mode 100644
index 0000000..2e248d4
--- /dev/null
+++ b/hivesterix/src/test/resources/runtimefunctionts/hadoop/conf/core-site.xml.bak
@@ -0,0 +1,18 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+
+<!-- Put site-specific property overrides in this file. -->
+
+<configuration>
+
+<property>
+    <name>fs.default.name</name>
+    <value>hdfs://localhost:31888</value>
+</property>
+<property>
+    <name>hadoop.tmp.dir</name>
+    <value>/tmp/hadoop</value>
+</property>
+
+
+</configuration>
diff --git a/hivesterix/src/test/resources/runtimefunctionts/hadoop/conf/hdfs-site.xml b/hivesterix/src/test/resources/runtimefunctionts/hadoop/conf/hdfs-site.xml
new file mode 100644
index 0000000..842e7ab
--- /dev/null
+++ b/hivesterix/src/test/resources/runtimefunctionts/hadoop/conf/hdfs-site.xml
@@ -0,0 +1,18 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+
+<!-- Put site-specific property overrides in this file. -->
+
+<configuration>
+
+<property>
+   <name>dfs.replication</name>
+   <value>2</value>
+</property>
+
+<property>
+	<name>dfs.block.size</name>
+	<value>65536</value>
+</property>
+
+</configuration>
diff --git a/hivesterix/src/test/resources/runtimefunctionts/hadoop/conf/hdfs-site.xml.bak b/hivesterix/src/test/resources/runtimefunctionts/hadoop/conf/hdfs-site.xml.bak
new file mode 100644
index 0000000..e3c082b
--- /dev/null
+++ b/hivesterix/src/test/resources/runtimefunctionts/hadoop/conf/hdfs-site.xml.bak
@@ -0,0 +1,18 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+
+<!-- Put site-specific property overrides in this file. -->
+
+<configuration>
+
+<property>
+   <name>dfs.replication</name>
+   <value>2</value>
+</property>
+
+<property>
+	<name>dfs.block.size</name>
+	<value>32768</value>
+</property>
+
+</configuration>
diff --git a/hivesterix/src/test/resources/runtimefunctionts/hadoop/conf/mapred-site.xml b/hivesterix/src/test/resources/runtimefunctionts/hadoop/conf/mapred-site.xml
new file mode 100644
index 0000000..1b9a4d6
--- /dev/null
+++ b/hivesterix/src/test/resources/runtimefunctionts/hadoop/conf/mapred-site.xml
@@ -0,0 +1,25 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+
+<!-- Put site-specific property overrides in this file. -->
+
+<configuration>
+
+  <property>
+    <name>mapred.job.tracker</name>
+    <value>localhost:29007</value>
+  </property>
+  <property>
+     <name>mapred.tasktracker.map.tasks.maximum</name>
+     <value>20</value>
+  </property>
+   <property>
+      <name>mapred.tasktracker.reduce.tasks.maximum</name>
+      <value>20</value>
+   </property>
+   <property>
+      <name>mapred.min.split.size</name>
+      <value>65536</value>
+   </property>
+
+</configuration>
diff --git a/hivesterix/src/test/resources/runtimefunctionts/hadoop/conf/mapred-site.xml.bak b/hivesterix/src/test/resources/runtimefunctionts/hadoop/conf/mapred-site.xml.bak
new file mode 100644
index 0000000..7a51b86
--- /dev/null
+++ b/hivesterix/src/test/resources/runtimefunctionts/hadoop/conf/mapred-site.xml.bak
@@ -0,0 +1,25 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+
+<!-- Put site-specific property overrides in this file. -->
+
+<configuration>
+
+  <property>
+    <name>mapred.job.tracker</name>
+    <value>localhost:29007</value>
+  </property>
+  <property>
+     <name>mapred.tasktracker.map.tasks.maximum</name>
+     <value>20</value>
+  </property>
+   <property>
+      <name>mapred.tasktracker.reduce.tasks.maximum</name>
+      <value>20</value>
+   </property>
+   <property>
+      <name>mapred.min.split.size</name>
+      <value>32768</value>
+   </property>
+
+</configuration>
diff --git a/hivesterix/src/test/resources/runtimefunctionts/hive/conf/hive-default.xml b/hivesterix/src/test/resources/runtimefunctionts/hive/conf/hive-default.xml
new file mode 100644
index 0000000..eef4071
--- /dev/null
+++ b/hivesterix/src/test/resources/runtimefunctionts/hive/conf/hive-default.xml
@@ -0,0 +1,853 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+
+<configuration>
+
+	<!-- Hive Configuration can either be stored in this file or in the hadoop 
+		configuration files -->
+	<!-- that are implied by Hadoop setup variables. -->
+	<!-- Aside from Hadoop setup variables - this file is provided as a convenience 
+		so that Hive -->
+	<!-- users do not have to edit hadoop configuration files (that may be managed 
+		as a centralized -->
+	<!-- resource). -->
+
+	<!-- Hive Execution Parameters -->
+	<property>
+		<name>mapred.reduce.tasks</name>
+		<value>-1</value>
+		<description>The default number of reduce tasks per job. Typically set
+			to a prime close to the number of available hosts. Ignored when
+			mapred.job.tracker is "local". Hadoop set this to 1 by default,
+			whereas hive uses -1 as its default value.
+			By setting this property to
+			-1, Hive will automatically figure out what
+			should be the number of
+			reducers.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.hyracks.connectorpolicy</name>
+		<value>SEND_SIDE_MAT_PIPELINING</value>
+	</property>
+
+	<property>
+		<name>hive.hyracks.host</name>
+		<value>127.0.0.1</value>
+	</property>
+
+	<property>
+		<name>hive.hyracks.port</name>
+		<value>13099</value>
+	</property>
+
+	<property>
+		<name>hive.hyracks.app</name>
+		<value>hivesterix</value>
+	</property>
+
+
+	<property>
+		<name>hive.hyracks.parrallelism</name>
+		<value>2</value>
+	</property>
+
+	<property>
+		<name>hive.algebricks.groupby.external</name>
+		<value>false</value>
+	</property>
+
+	<property>
+		<name>hive.algebricks.groupby.external.memory</name>
+		<value>3072</value>
+	</property>
+
+	<property>
+		<name>hive.algebricks.sort.memory</name>
+		<value>3072</value>
+	</property>
+
+	<property>
+		<name>hive.algebricks.framesize</name>
+		<value>768</value>
+	</property>
+
+	<property>
+		<name>hive.exec.reducers.bytes.per.reducer</name>
+		<value>1000000000</value>
+		<description>size per reducer.The default is 1G, i.e if the input size
+			is 10G, it will use 10 reducers.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.exec.reducers.max</name>
+		<value>999</value>
+		<description>max number of reducers will be used. If the one
+			specified
+			in the configuration parameter mapred.reduce.tasks is
+			negative, hive
+			will use this one as the max number of reducers when
+			automatically
+			determine number of reducers.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.exec.scratchdir</name>
+		<value>/tmp/hive-${user.name}</value>
+		<description>Scratch space for Hive jobs</description>
+	</property>
+
+	<property>
+		<name>hive.test.mode</name>
+		<value>false</value>
+		<description>whether hive is running in test mode. If yes, it turns on
+			sampling and prefixes the output tablename
+		</description>
+	</property>
+
+	<property>
+		<name>hive.test.mode.prefix</name>
+		<value>test_</value>
+		<description>if hive is running in test mode, prefixes the output
+			table by this string
+		</description>
+	</property>
+
+	<!-- If the input table is not bucketed, the denominator of the tablesample 
+		is determinied by the parameter below -->
+	<!-- For example, the following query: -->
+	<!-- INSERT OVERWRITE TABLE dest -->
+	<!-- SELECT col1 from src -->
+	<!-- would be converted to -->
+	<!-- INSERT OVERWRITE TABLE test_dest -->
+	<!-- SELECT col1 from src TABLESAMPLE (BUCKET 1 out of 32 on rand(1)) -->
+	<property>
+		<name>hive.test.mode.samplefreq</name>
+		<value>32</value>
+		<description>if hive is running in test mode and table is not
+			bucketed, sampling frequency
+		</description>
+	</property>
+
+	<property>
+		<name>hive.test.mode.nosamplelist</name>
+		<value></value>
+		<description>if hive is running in test mode, dont sample the above
+			comma seperated list of tables
+		</description>
+	</property>
+
+	<property>
+		<name>hive.metastore.local</name>
+		<value>true</value>
+		<description>controls whether to connect to remove metastore server or
+			open a new metastore server in Hive Client JVM
+		</description>
+	</property>
+
+	<property>
+		<name>javax.jdo.option.ConnectionURL</name>
+		<value>jdbc:derby:;databaseName=metastore_db;create=true</value>
+		<description>JDBC connect string for a JDBC metastore</description>
+	</property>
+
+	<property>
+		<name>javax.jdo.option.ConnectionDriverName</name>
+		<value>org.apache.derby.jdbc.EmbeddedDriver</value>
+		<description>Driver class name for a JDBC metastore</description>
+	</property>
+
+	<property>
+		<name>javax.jdo.PersistenceManagerFactoryClass</name>
+		<value>org.datanucleus.jdo.JDOPersistenceManagerFactory</value>
+		<description>class implementing the jdo persistence</description>
+	</property>
+
+	<property>
+		<name>datanucleus.connectionPoolingType</name>
+		<value>DBCP</value>
+		<description>Uses a DBCP connection pool for JDBC metastore
+		</description>
+	</property>
+
+	<property>
+		<name>javax.jdo.option.DetachAllOnCommit</name>
+		<value>true</value>
+		<description>detaches all objects from session so that they can be
+			used after transaction is committed
+		</description>
+	</property>
+
+	<property>
+		<name>javax.jdo.option.NonTransactionalRead</name>
+		<value>true</value>
+		<description>reads outside of transactions</description>
+	</property>
+
+	<property>
+		<name>javax.jdo.option.ConnectionUserName</name>
+		<value>APP</value>
+		<description>username to use against metastore database</description>
+	</property>
+
+	<property>
+		<name>javax.jdo.option.ConnectionPassword</name>
+		<value>mine</value>
+		<description>password to use against metastore database</description>
+	</property>
+
+	<property>
+		<name>datanucleus.validateTables</name>
+		<value>false</value>
+		<description>validates existing schema against code. turn this on if
+			you want to verify existing schema
+		</description>
+	</property>
+
+	<property>
+		<name>datanucleus.validateColumns</name>
+		<value>false</value>
+		<description>validates existing schema against code. turn this on if
+			you want to verify existing schema
+		</description>
+	</property>
+
+	<property>
+		<name>datanucleus.validateConstraints</name>
+		<value>false</value>
+		<description>validates existing schema against code. turn this on if
+			you want to verify existing schema
+		</description>
+	</property>
+
+	<property>
+		<name>datanucleus.storeManagerType</name>
+		<value>rdbms</value>
+		<description>metadata store type</description>
+	</property>
+
+	<property>
+		<name>datanucleus.autoCreateSchema</name>
+		<value>true</value>
+		<description>creates necessary schema on a startup if one doesn't
+			exist. set this to false, after creating it once
+		</description>
+	</property>
+
+	<property>
+		<name>datanucleus.autoStartMechanismMode</name>
+		<value>checked</value>
+		<description>throw exception if metadata tables are incorrect
+		</description>
+	</property>
+
+	<property>
+		<name>datanucleus.transactionIsolation</name>
+		<value>read-committed</value>
+		<description>Default transaction isolation level for identity
+			generation.
+		</description>
+	</property>
+
+	<property>
+		<name>datanucleus.cache.level2</name>
+		<value>false</value>
+		<description>Use a level 2 cache. Turn this off if metadata is changed
+			independently of hive metastore server
+		</description>
+	</property>
+
+	<property>
+		<name>datanucleus.cache.level2.type</name>
+		<value>SOFT</value>
+		<description>SOFT=soft reference based cache, WEAK=weak reference
+			based cache.
+		</description>
+	</property>
+
+	<property>
+		<name>datanucleus.identifierFactory</name>
+		<value>datanucleus</value>
+		<description>Name of the identifier factory to use when generating
+			table/column names etc. 'datanucleus' is used for backward
+			compatibility
+		</description>
+	</property>
+
+	<property>
+		<name>hive.metastore.warehouse.dir</name>
+		<value>/tmp/hivesterix</value>
+		<description>location of default database for the warehouse
+		</description>
+	</property>
+
+	<property>
+		<name>hive.metastore.connect.retries</name>
+		<value>5</value>
+		<description>Number of retries while opening a connection to metastore
+		</description>
+	</property>
+
+	<property>
+		<name>hive.metastore.rawstore.impl</name>
+		<value>org.apache.hadoop.hive.metastore.ObjectStore</value>
+		<description>Name of the class that implements
+			org.apache.hadoop.hive.metastore.rawstore interface. This class is
+			used to store and retrieval of raw metadata objects such as table,
+			database
+		</description>
+	</property>
+
+	<property>
+		<name>hive.default.fileformat</name>
+		<value>TextFile</value>
+		<description>Default file format for CREATE TABLE statement. Options
+			are TextFile and SequenceFile. Users can explicitly say CREATE TABLE
+			... STORED AS &lt;TEXTFILE|SEQUENCEFILE&gt; to override</description>
+	</property>
+
+	<property>
+		<name>hive.fileformat.check</name>
+		<value>true</value>
+		<description>Whether to check file format or not when loading data
+			files
+		</description>
+	</property>
+
+	<property>
+		<name>hive.map.aggr</name>
+		<value>true</value>
+		<description>Whether to use map-side aggregation in Hive Group By
+			queries
+		</description>
+	</property>
+
+	<property>
+		<name>hive.groupby.skewindata</name>
+		<value>false</value>
+		<description>Whether there is skew in data to optimize group by
+			queries
+		</description>
+	</property>
+
+	<property>
+		<name>hive.groupby.mapaggr.checkinterval</name>
+		<value>100000</value>
+		<description>Number of rows after which size of the grouping
+			keys/aggregation classes is performed
+		</description>
+	</property>
+
+	<property>
+		<name>hive.mapred.local.mem</name>
+		<value>0</value>
+		<description>For local mode, memory of the mappers/reducers
+		</description>
+	</property>
+
+	<property>
+		<name>hive.map.aggr.hash.percentmemory</name>
+		<value>0.5</value>
+		<description>Portion of total memory to be used by map-side grup
+			aggregation hash table
+		</description>
+	</property>
+
+	<property>
+		<name>hive.map.aggr.hash.min.reduction</name>
+		<value>0.5</value>
+		<description>Hash aggregation will be turned off if the ratio between
+			hash
+			table size and input rows is bigger than this number. Set to 1 to
+			make
+			sure
+			hash aggregation is never turned off.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.optimize.cp</name>
+		<value>true</value>
+		<description>Whether to enable column pruner</description>
+	</property>
+
+	<property>
+		<name>hive.optimize.ppd</name>
+		<value>true</value>
+		<description>Whether to enable predicate pushdown</description>
+	</property>
+
+	<property>
+		<name>hive.optimize.pruner</name>
+		<value>true</value>
+		<description>Whether to enable the new partition pruner which depends
+			on predicate pushdown. If this is disabled,
+			the old partition pruner
+			which is based on AST will be enabled.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.optimize.groupby</name>
+		<value>true</value>
+		<description>Whether to enable the bucketed group by from bucketed
+			partitions/tables.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.join.emit.interval</name>
+		<value>1000</value>
+		<description>How many rows in the right-most join operand Hive should
+			buffer before emitting the join result.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.join.cache.size</name>
+		<value>25000</value>
+		<description>How many rows in the joining tables (except the streaming
+			table) should be cached in memory.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.mapjoin.bucket.cache.size</name>
+		<value>100</value>
+		<description>How many values in each keys in the map-joined table
+			should be cached in memory.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.mapjoin.maxsize</name>
+		<value>100000</value>
+		<description>Maximum # of rows of the small table that can be handled
+			by map-side join. If the size is reached and hive.task.progress is
+			set, a fatal error counter is set and the job will be killed.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.mapjoin.cache.numrows</name>
+		<value>25000</value>
+		<description>How many rows should be cached by jdbm for map join.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.optimize.skewjoin</name>
+		<value>false</value>
+		<description>Whether to enable skew join optimization. </description>
+	</property>
+
+	<property>
+		<name>hive.skewjoin.key</name>
+		<value>100000</value>
+		<description>Determine if we get a skew key in join. If we see more
+			than the specified number of rows with the same key in join operator,
+			we think the key as a skew join key.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.skewjoin.mapjoin.map.tasks</name>
+		<value>10000</value>
+		<description> Determine the number of map task used in the follow up
+			map join job
+			for a skew join. It should be used together with
+			hive.skewjoin.mapjoin.min.split
+			to perform a fine grained control.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.skewjoin.mapjoin.min.split</name>
+		<value>33554432</value>
+		<description> Determine the number of map task at most used in the
+			follow up map join job
+			for a skew join by specifying the minimum split
+			size. It should be used
+			together with
+			hive.skewjoin.mapjoin.map.tasks
+			to perform a fine grained control.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.mapred.mode</name>
+		<value>nonstrict</value>
+		<description>The mode in which the hive operations are being
+			performed. In strict mode, some risky queries are not allowed to run
+		</description>
+	</property>
+
+	<property>
+		<name>hive.exec.script.maxerrsize</name>
+		<value>100000</value>
+		<description>Maximum number of bytes a script is allowed to emit to
+			standard error (per map-reduce task). This prevents runaway scripts
+			from filling logs partitions to capacity
+		</description>
+	</property>
+
+	<property>
+		<name>hive.exec.script.allow.partial.consumption</name>
+		<value>false</value>
+		<description> When enabled, this option allows a user script to exit
+			successfully without consuming all the data from the standard input.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.script.operator.id.env.var</name>
+		<value>HIVE_SCRIPT_OPERATOR_ID</value>
+		<description> Name of the environment variable that holds the unique
+			script operator ID in the user's transform function (the custom
+			mapper/reducer that the user has specified in the query)
+		</description>
+	</property>
+
+	<property>
+		<name>hive.exec.compress.output</name>
+		<value>false</value>
+		<description> This controls whether the final outputs of a query (to a
+			local/hdfs file or a hive table) is compressed. The compression codec
+			and other options are determined from hadoop config variables
+			mapred.output.compress*
+		</description>
+	</property>
+
+	<property>
+		<name>hive.exec.compress.intermediate</name>
+		<value>false</value>
+		<description> This controls whether intermediate files produced by
+			hive between multiple map-reduce jobs are compressed. The compression
+			codec and other options are determined from hadoop config variables
+			mapred.output.compress*
+		</description>
+	</property>
+
+	<property>
+		<name>hive.exec.parallel</name>
+		<value>false</value>
+		<description>Whether to execute jobs in parallel</description>
+	</property>
+
+	<property>
+		<name>hive.exec.parallel.thread.number</name>
+		<value>8</value>
+		<description>How many jobs at most can be executed in parallel
+		</description>
+	</property>
+
+	<property>
+		<name>hive.hwi.war.file</name>
+		<value>lib\hive-hwi-0.7.0.war</value>
+		<description>This sets the path to the HWI war file, relative to
+			${HIVE_HOME}.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.hwi.listen.host</name>
+		<value>0.0.0.0</value>
+		<description>This is the host address the Hive Web Interface will
+			listen on
+		</description>
+	</property>
+
+	<property>
+		<name>hive.hwi.listen.port</name>
+		<value>9999</value>
+		<description>This is the port the Hive Web Interface will listen on
+		</description>
+	</property>
+
+	<property>
+		<name>hive.exec.pre.hooks</name>
+		<value></value>
+		<description>Pre Execute Hook for Tests</description>
+	</property>
+
+	<property>
+		<name>hive.merge.mapfiles</name>
+		<value>true</value>
+		<description>Merge small files at the end of a map-only job
+		</description>
+	</property>
+
+	<property>
+		<name>hive.merge.mapredfiles</name>
+		<value>false</value>
+		<description>Merge small files at the end of a map-reduce job
+		</description>
+	</property>
+
+	<property>
+		<name>hive.heartbeat.interval</name>
+		<value>1000</value>
+		<description>Send a heartbeat after this interval - used by mapjoin
+			and filter operators
+		</description>
+	</property>
+
+	<property>
+		<name>hive.merge.size.per.task</name>
+		<value>256000000</value>
+		<description>Size of merged files at the end of the job</description>
+	</property>
+
+	<property>
+		<name>hive.merge.size.smallfiles.avgsize</name>
+		<value>16000000</value>
+		<description>When the average output file size of a job is less than
+			this number, Hive will start an additional map-reduce job to merge
+			the output files into bigger files. This is only done for map-only
+			jobs if hive.merge.mapfiles is true, and for map-reduce jobs if
+			hive.merge.mapredfiles is true.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.script.auto.progress</name>
+		<value>false</value>
+		<description>Whether Hive Tranform/Map/Reduce Clause should
+			automatically send progress information to TaskTracker to avoid the
+			task getting killed because of inactivity. Hive sends progress
+			information when the script is outputting to stderr. This option
+			removes the need of periodically producing stderr messages, but users
+			should be cautious because this may prevent infinite loops in the
+			scripts to be killed by TaskTracker.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.script.serde</name>
+		<value>org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe</value>
+		<description>The default serde for trasmitting input data to and
+			reading output data from the user scripts.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.script.recordreader</name>
+		<value>org.apache.hadoop.hive.ql.exec.TextRecordReader</value>
+		<description>The default record reader for reading data from the user
+			scripts.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.script.recordwriter</name>
+		<value>org.apache.hadoop.hive.ql.exec.TextRecordWriter</value>
+		<description>The default record writer for writing data to the user
+			scripts.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.input.format</name>
+		<value>org.apache.hadoop.hive.ql.io.HiveInputFormat</value>
+		<description>The default input format, if it is not specified, the
+			system assigns it. It is set to HiveInputFormat for hadoop versions
+			17, 18 and 19, whereas it is set to CombinedHiveInputFormat for
+			hadoop 20. The user can always overwrite it - if there is a bug in
+			CombinedHiveInputFormat, it can always be manually set to
+			HiveInputFormat.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.udtf.auto.progress</name>
+		<value>false</value>
+		<description>Whether Hive should automatically send progress
+			information to TaskTracker when using UDTF's to prevent the task
+			getting killed because of inactivity. Users should be cautious
+			because this may prevent TaskTracker from killing tasks with infinte
+			loops.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.mapred.reduce.tasks.speculative.execution</name>
+		<value>true</value>
+		<description>Whether speculative execution for reducers should be
+			turned on.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.exec.counters.pull.interval</name>
+		<value>1000</value>
+		<description>The interval with which to poll the JobTracker for the
+			counters the running job. The smaller it is the more load there will
+			be on the jobtracker, the higher it is the less granular the caught
+			will be.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.enforce.bucketing</name>
+		<value>false</value>
+		<description>Whether bucketing is enforced. If true, while inserting
+			into the table, bucketing is enforced.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.enforce.sorting</name>
+		<value>false</value>
+		<description>Whether sorting is enforced. If true, while inserting
+			into the table, sorting is enforced.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.metastore.ds.connection.url.hook</name>
+		<value></value>
+		<description>Name of the hook to use for retriving the JDO connection
+			URL. If empty, the value in javax.jdo.option.ConnectionURL is used
+		</description>
+	</property>
+
+	<property>
+		<name>hive.metastore.ds.retry.attempts</name>
+		<value>1</value>
+		<description>The number of times to retry a metastore call if there
+			were a connection error
+		</description>
+	</property>
+
+	<property>
+		<name>hive.metastore.ds.retry.interval</name>
+		<value>1000</value>
+		<description>The number of miliseconds between metastore retry
+			attempts
+		</description>
+	</property>
+
+	<property>
+		<name>hive.metastore.server.min.threads</name>
+		<value>200</value>
+		<description>Minimum number of worker threads in the Thrift server's
+			pool.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.metastore.server.max.threads</name>
+		<value>100000</value>
+		<description>Maximum number of worker threads in the Thrift server's
+			pool.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.metastore.server.tcp.keepalive</name>
+		<value>true</value>
+		<description>Whether to enable TCP keepalive for the metastore server.
+			Keepalive will prevent accumulation of half-open connections.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.optimize.reducededuplication</name>
+		<value>true</value>
+		<description>Remove extra map-reduce jobs if the data is already
+			clustered by the same key which needs to be used again. This should
+			always be set to true. Since it is a new feature, it has been made
+			configurable.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.exec.dynamic.partition</name>
+		<value>false</value>
+		<description>Whether or not to allow dynamic partitions in DML/DDL.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.exec.dynamic.partition.mode</name>
+		<value>strict</value>
+		<description>In strict mode, the user must specify at least one static
+			partition in case the user accidentally overwrites all partitions.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.exec.max.dynamic.partitions</name>
+		<value>1000</value>
+		<description>Maximum number of dynamic partitions allowed to be
+			created in total.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.exec.max.dynamic.partitions.pernode</name>
+		<value>100</value>
+		<description>Maximum number of dynamic partitions allowed to be
+			created in each mapper/reducer node.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.default.partition.name</name>
+		<value>__HIVE_DEFAULT_PARTITION__</value>
+		<description>The default partition name in case the dynamic partition
+			column value is null/empty string or anyother values that cannot be
+			escaped. This value must not contain any special character used in
+			HDFS URI (e.g., ':', '%', '/' etc). The user has to be aware that the
+			dynamic partition value should not contain this value to avoid
+			confusions.
+		</description>
+	</property>
+
+	<property>
+		<name>fs.har.impl</name>
+		<value>org.apache.hadoop.hive.shims.HiveHarFileSystem</value>
+		<description>The implementation for accessing Hadoop Archives. Note
+			that this won't be applicable to Hadoop vers less than 0.20
+		</description>
+	</property>
+
+	<property>
+		<name>hive.archive.enabled</name>
+		<value>false</value>
+		<description>Whether archiving operations are permitted</description>
+	</property>
+
+	<property>
+		<name>hive.archive.har.parentdir.settable</name>
+		<value>false</value>
+		<description>In new Hadoop versions, the parent directory must be set
+			while
+			creating a HAR. Because this functionality is hard to detect
+			with just
+			version
+			numbers, this conf var needs to be set manually.
+		</description>
+	</property>
+
+	<!-- HBase Storage Handler Parameters -->
+
+	<property>
+		<name>hive.hbase.wal.enabled</name>
+		<value>true</value>
+		<description>Whether writes to HBase should be forced to the
+			write-ahead log. Disabling this improves HBase write performance at
+			the risk of lost writes in case of a crash.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.exec.drop.ignorenonexistent</name>
+		<value>true</value>
+		<description>drop table always works.</description>
+	</property>
+
+</configuration>
diff --git a/hivesterix/src/test/resources/runtimefunctionts/hive/conf/topology.xml b/hivesterix/src/test/resources/runtimefunctionts/hive/conf/topology.xml
new file mode 100644
index 0000000..4aac091
--- /dev/null
+++ b/hivesterix/src/test/resources/runtimefunctionts/hive/conf/topology.xml
@@ -0,0 +1,12 @@
+<cluster-topology>
+    <network-switch name="switch1">
+        <network-switch name="switch2">
+            <terminal name="nc0"/>
+            <terminal name="nc3"/>
+        </network-switch>
+        <network-switch name="switch3">
+            <terminal name="nc1"/>
+            <terminal name="nc4"/>
+        </network-switch>
+    </network-switch>
+</cluster-topology>
\ No newline at end of file
diff --git a/hivesterix/src/test/resources/runtimefunctionts/ignore.txt b/hivesterix/src/test/resources/runtimefunctionts/ignore.txt
new file mode 100644
index 0000000..e70ea78
--- /dev/null
+++ b/hivesterix/src/test/resources/runtimefunctionts/ignore.txt
@@ -0,0 +1 @@
+q16
\ No newline at end of file
diff --git a/hivesterix/src/test/resources/runtimefunctionts/logging.properties b/hivesterix/src/test/resources/runtimefunctionts/logging.properties
new file mode 100644
index 0000000..1cc34e1
--- /dev/null
+++ b/hivesterix/src/test/resources/runtimefunctionts/logging.properties
@@ -0,0 +1,65 @@
+############################################################
+#  	Default Logging Configuration File
+#
+# You can use a different file by specifying a filename
+# with the java.util.logging.config.file system property.  
+# For example java -Djava.util.logging.config.file=myfile
+############################################################
+
+############################################################
+#  	Global properties
+############################################################
+
+# "handlers" specifies a comma separated list of log Handler 
+# classes.  These handlers will be installed during VM startup.
+# Note that these classes must be on the system classpath.
+# By default we only configure a ConsoleHandler, which will only
+# show messages at the INFO and above levels.
+
+handlers= java.util.logging.ConsoleHandler
+
+# To also add the FileHandler, use the following line instead.
+
+# handlers= java.util.logging.FileHandler, java.util.logging.ConsoleHandler
+
+# Default global logging level.
+# This specifies which kinds of events are logged across
+# all loggers.  For any given facility this global level
+# can be overriden by a facility specific level
+# Note that the ConsoleHandler also has a separate level
+# setting to limit messages printed to the console.
+
+.level= WARNING
+# .level= INFO
+# .level= FINE
+# .level = FINEST
+
+############################################################
+# Handler specific properties.
+# Describes specific configuration info for Handlers.
+############################################################
+
+# default file output is in user's home directory.
+
+# java.util.logging.FileHandler.pattern = %h/java%u.log
+# java.util.logging.FileHandler.limit = 50000
+# java.util.logging.FileHandler.count = 1
+# java.util.logging.FileHandler.formatter = java.util.logging.XMLFormatter
+
+# Limit the message that are printed on the console to FINE and above.
+
+java.util.logging.ConsoleHandler.level = FINE
+java.util.logging.ConsoleHandler.formatter = java.util.logging.SimpleFormatter
+
+
+############################################################
+# Facility specific properties.
+# Provides extra control for each logger.
+############################################################
+
+# For example, set the com.xyz.foo logger to only log SEVERE
+# messages:
+
+edu.uci.ics.asterix.level = WARNING
+edu.uci.ics.algebricks.level = WARNING
+edu.uci.ics.hyracks.level = WARNING
diff --git a/hivesterix/src/test/resources/runtimefunctionts/queries/q10_returned_item.hive b/hivesterix/src/test/resources/runtimefunctionts/queries/q10_returned_item.hive
new file mode 100644
index 0000000..3f1214a
--- /dev/null
+++ b/hivesterix/src/test/resources/runtimefunctionts/queries/q10_returned_item.hive
@@ -0,0 +1,37 @@
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS orders;
+DROP TABLE IF EXISTS customer;
+DROP TABLE IF EXISTS nation;
+DROP TABLE IF EXISTS q10_returned_item;
+
+-- create the tables and load the data
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/orders';
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/customer';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/nation';
+
+-- create the result table
+create table q10_returned_item (c_custkey int, c_name string, revenue double, c_acctbal string, n_name string, c_address string, c_phone string, c_comment string);
+
+set mapred.min.split.size=536870912;
+set hive.exec.reducers.bytes.per.reducer=1024000000;
+
+-- the query
+insert overwrite table q10_returned_item
+select 
+  c_custkey, c_name, sum(l_extendedprice * (1 - l_discount)) as revenue, 
+  c_acctbal, n_name, c_address, c_phone, c_comment
+from
+  customer c join orders o 
+  on 
+    c.c_custkey = o.o_custkey and o.o_orderdate >= '1993-10-01' and o.o_orderdate < '1994-01-01'
+  join nation n 
+  on 
+    c.c_nationkey = n.n_nationkey
+  join lineitem l 
+  on 
+    l.l_orderkey = o.o_orderkey and l.l_returnflag = 'R'
+group by c_custkey, c_name, c_acctbal, c_phone, n_name, c_address, c_comment 
+order by revenue desc 
+limit 20;
+
diff --git a/hivesterix/src/test/resources/runtimefunctionts/queries/q11_important_stock.hive b/hivesterix/src/test/resources/runtimefunctionts/queries/q11_important_stock.hive
new file mode 100644
index 0000000..de0cfc3
--- /dev/null
+++ b/hivesterix/src/test/resources/runtimefunctionts/queries/q11_important_stock.hive
@@ -0,0 +1,47 @@
+DROP TABLE IF EXISTS partsupp;
+DROP TABLE IF EXISTS supplier;
+DROP TABLE IF EXISTS nation;
+DROP TABLE IF EXISTS q11_important_stock;
+DROP TABLE IF EXISTS q11_part_tmp;
+DROP TABLE IF EXISTS q11_sum_tmp;
+
+-- create tables and load data
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/supplier';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/nation';
+create external table partsupp (PS_PARTKEY INT, PS_SUPPKEY INT, PS_AVAILQTY INT, PS_SUPPLYCOST DOUBLE, PS_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION'/tpch/partsupp';
+
+-- create the target table
+create table q11_important_stock(ps_partkey INT, value DOUBLE);
+create table q11_part_tmp(ps_partkey int, part_value double);
+create table q11_sum_tmp(total_value double);
+
+-- the query
+insert overwrite table q11_part_tmp
+select 
+  ps_partkey, sum(ps_supplycost * ps_availqty) as part_value 
+from
+  nation n join supplier s 
+  on 
+    s.s_nationkey = n.n_nationkey
+  join partsupp ps 
+  on 
+    ps.ps_suppkey = s.s_suppkey
+group by ps_partkey;
+
+insert overwrite table q11_sum_tmp
+select 
+  sum(part_value) as total_value
+from 
+  q11_part_tmp;
+
+insert overwrite table q11_important_stock
+select 
+  ps_partkey, part_value as value
+from
+  (
+    select ps_partkey, part_value, total_value
+    from q11_part_tmp join q11_sum_tmp
+  ) a
+where part_value > total_value * 0.00001
+order by value desc;
+
diff --git a/hivesterix/src/test/resources/runtimefunctionts/queries/q12_shipping.hive b/hivesterix/src/test/resources/runtimefunctionts/queries/q12_shipping.hive
new file mode 100644
index 0000000..062f7b9
--- /dev/null
+++ b/hivesterix/src/test/resources/runtimefunctionts/queries/q12_shipping.hive
@@ -0,0 +1,42 @@
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS orders;
+DROP TABLE IF EXISTS q12_shipping;
+
+-- create the tables and load the data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/orders';
+
+-- create the result table
+create table q12_shipping(l_shipmode string, high_line_count double, low_line_count double);
+
+set mapred.min.split.size=536870912;
+set hive.exec.reducers.bytes.per.reducer=1225000000;
+
+-- the query
+insert overwrite table q12_shipping
+select 
+  l_shipmode,
+  sum(case
+    when o_orderpriority ='1-URGENT'
+         or o_orderpriority ='2-HIGH'
+    then 1
+    else 0
+end
+  ) as high_line_count,
+  sum(case
+    when o_orderpriority <> '1-URGENT'
+         and o_orderpriority <> '2-HIGH'
+    then 1
+    else 0
+end
+  ) as low_line_count
+from
+  orders o join lineitem l 
+  on 
+    o.o_orderkey = l.l_orderkey and l.l_commitdate < l.l_receiptdate
+and l.l_shipdate < l.l_commitdate and l.l_receiptdate >= '1994-01-01' 
+and l.l_receiptdate < '1995-01-01'
+where 
+  l.l_shipmode = 'MAIL' or l.l_shipmode = 'SHIP'
+group by l_shipmode
+order by l_shipmode;
diff --git a/hivesterix/src/test/resources/runtimefunctionts/queries/q13_customer_distribution.hive b/hivesterix/src/test/resources/runtimefunctionts/queries/q13_customer_distribution.hive
new file mode 100644
index 0000000..a799008
--- /dev/null
+++ b/hivesterix/src/test/resources/runtimefunctionts/queries/q13_customer_distribution.hive
@@ -0,0 +1,27 @@
+DROP TABLE IF EXISTS customer;
+DROP TABLE IF EXISTS orders;
+DROP TABLE IF EXISTS q13_customer_distribution;
+
+-- create the tables and load the data
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/customer';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/orders';
+
+-- create the result table
+create table q13_customer_distribution (c_count int, custdist int);
+
+-- the query
+insert overwrite table q13_customer_distribution
+select 
+  c_count, count(1) as custdist
+from 
+  (select 
+     c_custkey, count(o_orderkey) as c_count
+   from 
+     customer c left outer join orders o 
+     on 
+       c.c_custkey = o.o_custkey and not o.o_comment like '%special%requests%'
+   group by c_custkey
+   ) c_orders
+group by c_count
+order by custdist desc, c_count desc;
+
diff --git a/hivesterix/src/test/resources/runtimefunctionts/queries/q14_promotion_effect.hive b/hivesterix/src/test/resources/runtimefunctionts/queries/q14_promotion_effect.hive
new file mode 100644
index 0000000..988f400
--- /dev/null
+++ b/hivesterix/src/test/resources/runtimefunctionts/queries/q14_promotion_effect.hive
@@ -0,0 +1,28 @@
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS part;
+DROP TABLE IF EXISTS q14_promotion_effect;
+
+-- create the tables and load the data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/lineitem';
+create external table part (P_PARTKEY INT, P_NAME STRING, P_MFGR STRING, P_BRAND STRING, P_TYPE STRING, P_SIZE INT, P_CONTAINER STRING, P_RETAILPRICE DOUBLE, P_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/part';
+
+-- create the result table
+create table q14_promotion_effect(promo_revenue double);
+
+set mapred.min.split.size=536870912;
+set hive.exec.reducers.bytes.per.reducer=1040000000;
+
+-- the query
+insert overwrite table q14_promotion_effect
+select 
+  100.00 * sum(case
+               when p_type like 'PROMO%'
+               then l_extendedprice*(1-l_discount)
+               else 0.0
+               end
+  ) / sum(l_extendedprice * (1 - l_discount)) as promo_revenue
+from 
+  part p join lineitem l 
+  on 
+    l.l_partkey = p.p_partkey and l.l_shipdate >= '1995-09-01' and l.l_shipdate < '1995-10-01';
+
diff --git a/hivesterix/src/test/resources/runtimefunctionts/queries/q15_top_supplier.hive b/hivesterix/src/test/resources/runtimefunctionts/queries/q15_top_supplier.hive
new file mode 100644
index 0000000..04064ed
--- /dev/null
+++ b/hivesterix/src/test/resources/runtimefunctionts/queries/q15_top_supplier.hive
@@ -0,0 +1,45 @@
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS supplier;
+DROP TABLE IF EXISTS revenue;
+DROP TABLE IF EXISTS max_revenue;
+DROP TABLE IF EXISTS q15_top_supplier;
+
+-- create the tables and load the data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/lineitem';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/supplier';
+
+-- create result tables
+create table revenue(supplier_no int, total_revenue double); 
+create table max_revenue(max_revenue double); 
+create table q15_top_supplier(s_suppkey int, s_name string, s_address string, s_phone string, total_revenue double);
+
+
+set mapred.min.split.size=536870912;
+
+-- the query
+insert overwrite table revenue
+select 
+  l_suppkey as supplier_no, sum(l_extendedprice * (1 - l_discount)) as total_revenue
+from 
+  lineitem
+where 
+  l_shipdate >= '1996-01-01' and l_shipdate < '1996-04-01'
+group by l_suppkey;
+
+insert overwrite table max_revenue
+select 
+  max(total_revenue)
+from 
+  revenue;
+
+insert overwrite table q15_top_supplier
+select 
+  s_suppkey, s_name, s_address, s_phone, total_revenue
+from supplier s join revenue r 
+  on 
+    s.s_suppkey = r.supplier_no
+  join max_revenue m 
+  on 
+    r.total_revenue = m.max_revenue
+order by s_suppkey;
+
diff --git a/hivesterix/src/test/resources/runtimefunctionts/queries/q16_parts_supplier_relationship.hive b/hivesterix/src/test/resources/runtimefunctionts/queries/q16_parts_supplier_relationship.hive
new file mode 100644
index 0000000..971ef99
--- /dev/null
+++ b/hivesterix/src/test/resources/runtimefunctionts/queries/q16_parts_supplier_relationship.hive
@@ -0,0 +1,53 @@
+DROP TABLE IF EXISTS partsupp;
+DROP TABLE IF EXISTS part;
+DROP TABLE IF EXISTS supplier;
+DROP TABLE IF EXISTS q16_parts_supplier_relationship;
+DROP TABLE IF EXISTS q16_tmp;
+DROP TABLE IF EXISTS supplier_tmp;
+
+-- create the tables and load the data
+create external table part (P_PARTKEY INT, P_NAME STRING, P_MFGR STRING, P_BRAND STRING, P_TYPE STRING, P_SIZE INT, P_CONTAINER STRING, P_RETAILPRICE DOUBLE, P_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/part';
+create external table partsupp (PS_PARTKEY INT, PS_SUPPKEY INT, PS_AVAILQTY INT, PS_SUPPLYCOST DOUBLE, PS_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION'/tpch/partsupp';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/supplier';
+
+-- create the result table
+create table q16_parts_supplier_relationship(p_brand string, p_type string, p_size int, supplier_cnt int);
+create table q16_tmp(p_brand string, p_type string, p_size int, ps_suppkey int);
+create table supplier_tmp(s_suppkey int);
+
+-- the query
+insert overwrite table supplier_tmp
+select 
+  s_suppkey
+from 
+  supplier
+where 
+  not s_comment like '%Customer%Complaints%';
+
+insert overwrite table q16_tmp
+select 
+  p_brand, p_type, p_size, ps_suppkey
+from 
+  partsupp ps join part p 
+  on 
+    p.p_partkey = ps.ps_partkey and p.p_brand <> 'Brand#45' 
+    and not p.p_type like 'MEDIUM POLISHED%'
+  join supplier_tmp s 
+  on 
+    ps.ps_suppkey = s.s_suppkey;
+
+insert overwrite table q16_parts_supplier_relationship
+select 
+  p_brand, p_type, p_size, count(distinct ps_suppkey) as supplier_cnt
+from 
+  (select 
+     * 
+   from
+     q16_tmp 
+   where p_size = 49 or p_size = 14 or p_size = 23 or
+         p_size = 45 or p_size = 19 or p_size = 3 or
+         p_size = 36 or p_size = 9
+) q16_all
+group by p_brand, p_type, p_size
+order by supplier_cnt desc, p_brand, p_type, p_size;
+
diff --git a/hivesterix/src/test/resources/runtimefunctionts/queries/q17_small_quantity_order_revenue.hive b/hivesterix/src/test/resources/runtimefunctionts/queries/q17_small_quantity_order_revenue.hive
new file mode 100644
index 0000000..db7746b
--- /dev/null
+++ b/hivesterix/src/test/resources/runtimefunctionts/queries/q17_small_quantity_order_revenue.hive
@@ -0,0 +1,37 @@
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS part;
+DROP TABLE IF EXISTS q17_small_quantity_order_revenue;
+DROP TABLE IF EXISTS lineitem_tmp;
+
+-- create the tables and load the data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/lineitem';
+create external table part (P_PARTKEY INT, P_NAME STRING, P_MFGR STRING, P_BRAND STRING, P_TYPE STRING, P_SIZE INT, P_CONTAINER STRING, P_RETAILPRICE DOUBLE, P_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/part';
+
+-- create the result table
+create table q17_small_quantity_order_revenue (avg_yearly double);
+create table lineitem_tmp (t_partkey int, t_avg_quantity double);
+
+-- the query
+insert overwrite table lineitem_tmp
+select 
+  l_partkey as t_partkey, 0.2 * avg(l_quantity) as t_avg_quantity
+from 
+  lineitem
+group by l_partkey;
+
+insert overwrite table q17_small_quantity_order_revenue
+select
+  sum(l_extendedprice) / 7.0 as avg_yearly
+from
+  (select l_quantity, l_extendedprice, t_avg_quantity from
+   lineitem_tmp t join
+     (select
+        l_quantity, l_partkey, l_extendedprice
+      from
+        part p join lineitem l
+        on
+          p.p_partkey = l.l_partkey
+          and p.p_container = 'MED BOX'
+      ) l1 on l1.l_partkey = t.t_partkey
+   ) a
+where l_quantity < t_avg_quantity;
diff --git a/hivesterix/src/test/resources/runtimefunctionts/queries/q18_large_volume_customer.hive b/hivesterix/src/test/resources/runtimefunctionts/queries/q18_large_volume_customer.hive
new file mode 100644
index 0000000..ac2902c
--- /dev/null
+++ b/hivesterix/src/test/resources/runtimefunctionts/queries/q18_large_volume_customer.hive
@@ -0,0 +1,43 @@
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS orders;
+DROP TABLE IF EXISTS customer;
+DROP TABLE IF EXISTS q18_tmp;
+DROP TABLE IF EXISTS q18_large_volume_customer;
+
+-- create the tables and load the data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/orders';
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/customer';
+
+-- create the result tables
+create table q18_tmp(l_orderkey int, t_sum_quantity double);
+create table q18_large_volume_customer(c_name string, c_custkey int, o_orderkey int, o_orderdate string, o_totalprice double, sum_quantity double);
+
+set mapred.min.split.size=268435456;
+set hive.exec.reducers.bytes.per.reducer=1164000000;
+
+-- the query
+insert overwrite table q18_tmp
+select 
+  l_orderkey, sum(l_quantity) as t_sum_quantity
+from 
+  lineitem
+group by l_orderkey;
+
+insert overwrite table q18_large_volume_customer
+select 
+  c_name,c_custkey,o_orderkey,o_orderdate,o_totalprice,sum(l_quantity)
+from 
+  customer c join orders o 
+  on 
+    c.c_custkey = o.o_custkey
+  join q18_tmp t 
+  on 
+    o.o_orderkey = t.l_orderkey and t.t_sum_quantity > 30
+  join lineitem l 
+  on 
+    o.o_orderkey = l.l_orderkey
+group by c_name,c_custkey,o_orderkey,o_orderdate,o_totalprice
+order by o_totalprice desc,o_orderdate
+limit 100;
+
diff --git a/hivesterix/src/test/resources/runtimefunctionts/queries/q19_discounted_revenue.hive b/hivesterix/src/test/resources/runtimefunctionts/queries/q19_discounted_revenue.hive
new file mode 100644
index 0000000..2002e1e
--- /dev/null
+++ b/hivesterix/src/test/resources/runtimefunctionts/queries/q19_discounted_revenue.hive
@@ -0,0 +1,49 @@
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS part;
+DROP TABLE IF EXISTS q19_discounted_revenue;
+
+-- create the tables and load the data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/lineitem';
+create external table part (P_PARTKEY INT, P_NAME STRING, P_MFGR STRING, P_BRAND STRING, P_TYPE STRING, P_SIZE INT, P_CONTAINER STRING, P_RETAILPRICE DOUBLE, P_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/part';
+
+-- create the result table
+create table q19_discounted_revenue(revenue double);
+
+set mapred.min.split.size=268435456;
+set hive.exec.reducers.bytes.per.reducer=1040000000;
+
+-- the query
+insert overwrite table q19_discounted_revenue
+select
+  sum(l_extendedprice * (1 - l_discount) ) as revenue
+from
+  part p join lineitem l 
+  on 
+    p.p_partkey = l.l_partkey    
+where
+  (
+    p_brand = 'Brand#12'
+	and p_container REGEXP 'SM CASE||SM BOX||SM PACK||SM PKG'
+	and l_quantity >= 1 and l_quantity <= 11
+	and p_size >= 1 and p_size <= 5
+	and l_shipmode REGEXP 'AIR||AIR REG'
+	and l_shipinstruct = 'DELIVER IN PERSON'
+  ) 
+  or 
+  (
+    p_brand = 'Brand#23'
+	and p_container REGEXP 'MED BAG||MED BOX||MED PKG||MED PACK'
+	and l_quantity >= 10 and l_quantity <= 20
+	and p_size >= 1 and p_size <= 10
+	and l_shipmode REGEXP 'AIR||AIR REG'
+	and l_shipinstruct = 'DELIVER IN PERSON'
+  )
+  or
+  (
+	p_brand = 'Brand#34'
+	and p_container REGEXP 'LG CASE||LG BOX||LG PACK||LG PKG'
+	and l_quantity >= 20 and l_quantity <= 30
+	and p_size >= 1 and p_size <= 15
+	and l_shipmode REGEXP 'AIR||AIR REG'
+	and l_shipinstruct = 'DELIVER IN PERSON'
+  );
diff --git a/hivesterix/src/test/resources/runtimefunctionts/queries/q1_pricing_summary_report.hive b/hivesterix/src/test/resources/runtimefunctionts/queries/q1_pricing_summary_report.hive
new file mode 100644
index 0000000..a002068
--- /dev/null
+++ b/hivesterix/src/test/resources/runtimefunctionts/queries/q1_pricing_summary_report.hive
@@ -0,0 +1,21 @@
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS q1_pricing_summary_report;
+
+-- create tables and load data
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/lineitem';
+
+-- create the target table
+CREATE TABLE q1_pricing_summary_report ( L_RETURNFLAG STRING, L_LINESTATUS STRING, SUM_QTY DOUBLE, SUM_BASE_PRICE DOUBLE, SUM_DISC_PRICE DOUBLE, SUM_CHARGE DOUBLE, AVE_QTY DOUBLE, AVE_PRICE DOUBLE, AVE_DISC DOUBLE, COUNT_ORDER INT);
+
+set mapred.min.split.size=536870912;
+
+-- the query
+INSERT OVERWRITE TABLE q1_pricing_summary_report 
+SELECT 
+  L_RETURNFLAG, L_LINESTATUS, SUM(L_QUANTITY), SUM(L_EXTENDEDPRICE), SUM(L_EXTENDEDPRICE*(1-L_DISCOUNT)), SUM(L_EXTENDEDPRICE*(1-L_DISCOUNT)*(1+L_TAX)), AVG(L_QUANTITY), AVG(L_EXTENDEDPRICE), AVG(L_DISCOUNT), COUNT(1) 
+FROM 
+  lineitem 
+WHERE 
+  L_SHIPDATE<='1998-09-02' 
+GROUP BY L_RETURNFLAG, L_LINESTATUS 
+ORDER BY L_RETURNFLAG, L_LINESTATUS;
diff --git a/hivesterix/src/test/resources/runtimefunctionts/queries/q20_potential_part_promotion.hive b/hivesterix/src/test/resources/runtimefunctionts/queries/q20_potential_part_promotion.hive
new file mode 100644
index 0000000..2bb90ea
--- /dev/null
+++ b/hivesterix/src/test/resources/runtimefunctionts/queries/q20_potential_part_promotion.hive
@@ -0,0 +1,73 @@
+DROP TABLE IF EXISTS partsupp;
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS supplier;
+DROP TABLE IF EXISTS nation;
+DROP TABLE IF EXISTS part;
+DROP TABLE IF EXISTS q20_tmp1;
+DROP TABLE IF EXISTS q20_tmp2;
+DROP TABLE IF EXISTS q20_tmp3;
+DROP TABLE IF EXISTS q20_tmp4;
+DROP TABLE IF EXISTS q20_potential_part_promotion;
+
+-- create tables and load data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/lineitem';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/supplier';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/nation';
+create external table partsupp (PS_PARTKEY INT, PS_SUPPKEY INT, PS_AVAILQTY INT, PS_SUPPLYCOST DOUBLE, PS_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION'/tpch/partsupp';
+create external table part (P_PARTKEY INT, P_NAME STRING, P_MFGR STRING, P_BRAND STRING, P_TYPE STRING, P_SIZE INT, P_CONTAINER STRING, P_RETAILPRICE DOUBLE, P_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/part';
+
+-- create the target table
+create table q20_tmp1(p_partkey int);
+create table q20_tmp2(l_partkey int, l_suppkey int, sum_quantity double);
+create table q20_tmp3(ps_suppkey int, ps_availqty int, sum_quantity double);
+create table q20_tmp4(ps_suppkey int);
+create table q20_potential_part_promotion(s_name string, s_address string);
+
+set mapred.min.split.size=536870912;
+
+-- the query
+insert overwrite table q20_tmp1
+select distinct p_partkey
+from
+  part;
+
+insert overwrite table q20_tmp2
+select 
+  l_partkey, l_suppkey, 0.5 * sum(l_quantity)
+from
+  lineitem
+group by l_partkey, l_suppkey;
+
+insert overwrite table q20_tmp3
+select 
+  ps_suppkey, ps_availqty, sum_quantity
+from  
+  partsupp ps join q20_tmp1 t1 
+  on 
+    ps.ps_partkey = t1.p_partkey
+  join q20_tmp2 t2 
+  on 
+    ps.ps_partkey = t2.l_partkey and ps.ps_suppkey = t2.l_suppkey;
+
+insert overwrite table q20_tmp4
+select 
+  ps_suppkey
+from 
+  q20_tmp3
+where 
+  ps_availqty > sum_quantity
+group by ps_suppkey;
+
+insert overwrite table q20_potential_part_promotion
+select 
+  s_name, s_address
+from 
+   nation n join supplier s
+  on
+    s.s_nationkey = n.n_nationkey
+  join q20_tmp4 t4
+  on 
+    s.s_suppkey = t4.ps_suppkey
+order by s_name;
+
+
diff --git a/hivesterix/src/test/resources/runtimefunctionts/queries/q21_suppliers_who_kept_orders_waiting.hive b/hivesterix/src/test/resources/runtimefunctionts/queries/q21_suppliers_who_kept_orders_waiting.hive
new file mode 100644
index 0000000..9d01741
--- /dev/null
+++ b/hivesterix/src/test/resources/runtimefunctionts/queries/q21_suppliers_who_kept_orders_waiting.hive
@@ -0,0 +1,71 @@
+DROP TABLE IF EXISTS orders;
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS supplier;
+DROP TABLE IF EXISTS nation;
+DROP TABLE IF EXISTS q21_tmp1;
+DROP TABLE IF EXISTS q21_tmp2;
+DROP TABLE IF EXISTS q21_suppliers_who_kept_orders_waiting;
+
+-- create tables and load data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/orders';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/supplier';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/nation';
+
+-- create target tables
+create table q21_tmp1(l_orderkey int, count_suppkey int, max_suppkey int);
+create table q21_tmp2(l_orderkey int, count_suppkey int, max_suppkey int);
+create table q21_suppliers_who_kept_orders_waiting(s_name string, numwait int);
+
+-- the query
+insert overwrite table q21_tmp1
+select
+  l_orderkey, count(distinct l_suppkey), max(l_suppkey) as max_suppkey
+from
+  lineitem
+group by l_orderkey;
+
+insert overwrite table q21_tmp2
+select
+  l_orderkey, count(distinct l_suppkey), max(l_suppkey) as max_suppkey
+from
+  lineitem
+where
+  l_receiptdate > l_commitdate
+group by l_orderkey;
+
+insert overwrite table q21_suppliers_who_kept_orders_waiting
+select
+  s_name, count(1) as numwait
+from
+  (select s_name from
+(select s_name, t2.l_orderkey, l_suppkey, count_suppkey, max_suppkey 
+ from q21_tmp2 t2 right outer join
+      (select s_name, l_orderkey, l_suppkey from
+         (select s_name, t1.l_orderkey, l_suppkey, count_suppkey, max_suppkey
+          from
+            q21_tmp1 t1 join
+            (select s_name, l_orderkey, l_suppkey
+             from 
+               orders o join
+               (select s_name, l_orderkey, l_suppkey
+                from
+                  nation n join supplier s
+                  on
+                    s.s_nationkey = n.n_nationkey
+                  join lineitem l
+                  on
+                    s.s_suppkey = l.l_suppkey
+                where
+                  l.l_receiptdate > l.l_commitdate
+                ) l1 on o.o_orderkey = l1.l_orderkey
+             ) l2 on l2.l_orderkey = t1.l_orderkey
+          ) a
+          where
+           (count_suppkey >= 0)
+       ) l3 on l3.l_orderkey = t2.l_orderkey
+    ) b
+  )c
+group by s_name
+order by numwait desc, s_name
+limit 100;
diff --git a/hivesterix/src/test/resources/runtimefunctionts/queries/q22_global_sales_opportunity.hive b/hivesterix/src/test/resources/runtimefunctionts/queries/q22_global_sales_opportunity.hive
new file mode 100644
index 0000000..851a8b4
--- /dev/null
+++ b/hivesterix/src/test/resources/runtimefunctionts/queries/q22_global_sales_opportunity.hive
@@ -0,0 +1,60 @@
+DROP TABLE IF EXISTS customer;
+DROP TABLE IF EXISTS orders;
+DROP TABLE IF EXISTS q22_customer_tmp;
+DROP TABLE IF EXISTS q22_customer_tmp1;
+DROP TABLE IF EXISTS q22_orders_tmp;
+DROP TABLE IF EXISTS q22_global_sales_opportunity;
+
+-- create tables and load data
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/customer';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/orders';
+
+-- create target tables
+create table q22_customer_tmp(c_acctbal double, c_custkey int, cntrycode string);
+create table q22_customer_tmp1(avg_acctbal double);
+create table q22_orders_tmp(o_custkey int);
+create table q22_global_sales_opportunity(cntrycode string, numcust int, totacctbal double);
+
+-- the query
+insert overwrite table q22_customer_tmp
+select 
+  c_acctbal, c_custkey, substr(c_phone, 1, 2) as cntrycode
+from 
+  customer;
+ 
+insert overwrite table q22_customer_tmp1
+select
+  avg(c_acctbal)
+from
+  q22_customer_tmp
+where
+  c_acctbal > 0.00;
+
+insert overwrite table q22_orders_tmp
+select 
+  o_custkey 
+from 
+  orders
+group by 
+  o_custkey;
+
+insert overwrite table q22_global_sales_opportunity
+select
+  cntrycode, count(1) as numcust, sum(c_acctbal) as totacctbal
+from
+(
+  select cntrycode, c_acctbal, avg_acctbal from
+  q22_customer_tmp1 ct1 join
+  (
+    select cntrycode, c_acctbal from
+      q22_orders_tmp ot 
+      right outer join q22_customer_tmp ct 
+      on
+        ct.c_custkey = ot.o_custkey
+  ) ct2
+) a
+where
+  c_acctbal > avg_acctbal
+group by cntrycode
+order by cntrycode;
+
diff --git a/hivesterix/src/test/resources/runtimefunctionts/queries/q2_minimum_cost_supplier.hive b/hivesterix/src/test/resources/runtimefunctionts/queries/q2_minimum_cost_supplier.hive
new file mode 100644
index 0000000..200b99f
--- /dev/null
+++ b/hivesterix/src/test/resources/runtimefunctionts/queries/q2_minimum_cost_supplier.hive
@@ -0,0 +1,56 @@
+DROP TABLE IF EXISTS part;
+DROP TABLE IF EXISTS supplier;
+DROP TABLE IF EXISTS partsupp;
+DROP TABLE IF EXISTS nation;
+DROP TABLE IF EXISTS region;
+DROP TABLE IF EXISTS q2_minimum_cost_supplier;
+DROP TABLE IF EXISTS q2_minimum_cost_supplier_tmp1;
+DROP TABLE IF EXISTS q2_minimum_cost_supplier_tmp2;
+
+-- create the tables and load the data
+create external table part (P_PARTKEY INT, P_NAME STRING, P_MFGR STRING, P_BRAND STRING, P_TYPE STRING, P_SIZE INT, P_CONTAINER STRING, P_RETAILPRICE DOUBLE, P_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/part';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/supplier';
+create external table partsupp (PS_PARTKEY INT, PS_SUPPKEY INT, PS_AVAILQTY INT, PS_SUPPLYCOST DOUBLE, PS_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION'/tpch/partsupp';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/nation';
+create external table region (R_REGIONKEY INT, R_NAME STRING, R_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/region';
+
+-- create result tables
+create table q2_minimum_cost_supplier_tmp1 (s_acctbal double, s_name string, n_name string, p_partkey int, ps_supplycost double, p_mfgr string, s_address string, s_phone string, s_comment string);
+create table q2_minimum_cost_supplier_tmp2 (p_partkey int, ps_min_supplycost double);
+create table q2_minimum_cost_supplier (s_acctbal double, s_name string, n_name string, p_partkey int, p_mfgr string, s_address string, s_phone string, s_comment string);
+
+-- the query
+insert overwrite table q2_minimum_cost_supplier_tmp1 
+select 
+  s.s_acctbal, s.s_name, n.n_name, p.p_partkey, ps.ps_supplycost, p.p_mfgr, s.s_address, s.s_phone, s.s_comment 
+from 
+  nation n join region r 
+  on 
+    n.n_regionkey = r.r_regionkey and r.r_name = 'EUROPE' 
+  join supplier s 
+  on 
+s.s_nationkey = n.n_nationkey 
+  join partsupp ps 
+  on  
+s.s_suppkey = ps.ps_suppkey 
+  join part p 
+  on 
+    p.p_partkey = ps.ps_partkey and p.p_type like '%BRASS' ;
+
+insert overwrite table q2_minimum_cost_supplier_tmp2 
+select 
+  p_partkey, min(ps_supplycost) 
+from  
+  q2_minimum_cost_supplier_tmp1 
+group by p_partkey;
+
+insert overwrite table q2_minimum_cost_supplier 
+select 
+  t1.s_acctbal, t1.s_name, t1.n_name, t1.p_partkey, t1.p_mfgr, t1.s_address, t1.s_phone, t1.s_comment 
+from 
+  q2_minimum_cost_supplier_tmp1 t1 join q2_minimum_cost_supplier_tmp2 t2 
+on 
+  t1.p_partkey = t2.p_partkey and t1.ps_supplycost=t2.ps_min_supplycost 
+order by s_acctbal desc, n_name, s_name, p_partkey 
+limit 100;
+
diff --git a/hivesterix/src/test/resources/runtimefunctionts/queries/q3_shipping_priority.hive b/hivesterix/src/test/resources/runtimefunctionts/queries/q3_shipping_priority.hive
new file mode 100644
index 0000000..0049eb3
--- /dev/null
+++ b/hivesterix/src/test/resources/runtimefunctionts/queries/q3_shipping_priority.hive
@@ -0,0 +1,29 @@
+DROP TABLE IF EXISTS orders;
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS customer;
+DROP TABLE IF EXISTS q3_shipping_priority;
+
+-- create tables and load data
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/orders';
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/customer';
+
+-- create the target table
+create table q3_shipping_priority (l_orderkey int, revenue double, o_orderdate string, o_shippriority int);
+
+set mapred.min.split.size=536870912;
+set hive.exec.reducers.bytes.per.reducer=1024000000;
+
+-- the query
+Insert overwrite table q3_shipping_priority 
+select 
+  l_orderkey, (l_extendedprice*(1-l_discount)) as revenue, o_orderdate, o_shippriority 
+from 
+  customer c join orders o 
+    on c.c_mktsegment = 'BUILDING' and c.c_custkey = o.o_custkey 
+  join lineitem l 
+    on l.l_orderkey = o.o_orderkey and l.l_linenumber<3
+-- group by l_orderkey, o_orderdate, o_shippriority 
+order by revenue desc
+limit 10;
+
diff --git a/hivesterix/src/test/resources/runtimefunctionts/queries/q4_order_priority.hive b/hivesterix/src/test/resources/runtimefunctionts/queries/q4_order_priority.hive
new file mode 100644
index 0000000..aa828e9
--- /dev/null
+++ b/hivesterix/src/test/resources/runtimefunctionts/queries/q4_order_priority.hive
@@ -0,0 +1,30 @@
+DROP TABLE IF EXISTS orders;
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS q4_order_priority_tmp;
+DROP TABLE IF EXISTS q4_order_priority;
+
+-- create tables and load data
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/orders';
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/lineitem';
+
+-- create the target table
+CREATE TABLE q4_order_priority_tmp (O_ORDERKEY INT);
+CREATE TABLE q4_order_priority (O_ORDERPRIORITY STRING, ORDER_COUNT INT);
+
+set mapred.min.split.size=536870912;
+-- the query
+INSERT OVERWRITE TABLE q4_order_priority_tmp 
+select 
+  DISTINCT l_orderkey 
+from 
+  lineitem 
+where 
+  l_commitdate < l_receiptdate;
+INSERT OVERWRITE TABLE q4_order_priority 
+select o_orderpriority, count(1) as order_count 
+from 
+  orders o join q4_order_priority_tmp t 
+  on 
+o.o_orderkey = t.o_orderkey and o.o_orderdate >= '1993-07-01' and o.o_orderdate < '1993-10-01' 
+group by o_orderpriority 
+order by o_orderpriority;
diff --git a/hivesterix/src/test/resources/runtimefunctionts/queries/q5_local_supplier_volume.hive b/hivesterix/src/test/resources/runtimefunctionts/queries/q5_local_supplier_volume.hive
new file mode 100644
index 0000000..9af2dd2
--- /dev/null
+++ b/hivesterix/src/test/resources/runtimefunctionts/queries/q5_local_supplier_volume.hive
@@ -0,0 +1,42 @@
+DROP TABLE IF EXISTS customer;
+DROP TABLE IF EXISTS orders;
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS supplier;
+DROP TABLE IF EXISTS nation;
+DROP TABLE IF EXISTS region;
+DROP TABLE IF EXISTS q5_local_supplier_volume;
+
+-- create tables and load data
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/customer';
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/orders';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/supplier';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/nation';
+create external table region (R_REGIONKEY INT, R_NAME STRING, R_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/region';
+
+-- create the target table
+create table q5_local_supplier_volume (N_NAME STRING, REVENUE DOUBLE);
+
+set mapred.min.split.size=536870912;
+
+-- the query
+insert overwrite table q5_local_supplier_volume 
+select 
+  n_name, sum(l_extendedprice * (1 - l_discount)) as revenue 
+from
+  customer c join
+    ( select n_name, l_extendedprice, l_discount, s_nationkey, o_custkey from 
+      ( select n_name, l_extendedprice, l_discount, l_orderkey, s_nationkey from
+        ( select n_name, s_suppkey, s_nationkey from 
+          ( select n_name, n_nationkey 
+            from nation n join region r 
+            on n.n_regionkey = r.r_regionkey
+          ) n1 join supplier s on s.s_nationkey = n1.n_nationkey
+        ) s1 join lineitem l on l.l_suppkey = s1.s_suppkey
+      ) l1 join orders o on l1.l_orderkey = o.o_orderkey and o.o_orderdate >= '1990-01-01' 
+              and o.o_orderdate < '1995-01-01'
+) o1 
+on c.c_nationkey = o1.s_nationkey and c.c_custkey = o1.o_custkey
+group by n_name 
+order by revenue desc;
+
diff --git a/hivesterix/src/test/resources/runtimefunctionts/queries/q6_forecast_revenue_change.hive b/hivesterix/src/test/resources/runtimefunctionts/queries/q6_forecast_revenue_change.hive
new file mode 100644
index 0000000..d8cb9b9
--- /dev/null
+++ b/hivesterix/src/test/resources/runtimefunctionts/queries/q6_forecast_revenue_change.hive
@@ -0,0 +1,21 @@
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS q6_forecast_revenue_change;
+
+-- create tables and load data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/lineitem';
+
+-- create the target table
+create table q6_forecast_revenue_change (revenue double);
+
+-- the query
+insert overwrite table q6_forecast_revenue_change 
+select 
+  sum(l_extendedprice*l_discount) as revenue
+from 
+  lineitem
+where 
+  l_shipdate >= '1994-01-01'
+  and l_shipdate < '1995-01-01'
+  and l_discount >= 0.05 and l_discount <= 0.07
+  and l_quantity < 24;
+
diff --git a/hivesterix/src/test/resources/runtimefunctionts/queries/q7_volume_shipping.hive b/hivesterix/src/test/resources/runtimefunctionts/queries/q7_volume_shipping.hive
new file mode 100644
index 0000000..2678f80
--- /dev/null
+++ b/hivesterix/src/test/resources/runtimefunctionts/queries/q7_volume_shipping.hive
@@ -0,0 +1,71 @@
+DROP TABLE IF EXISTS customer;
+DROP TABLE IF EXISTS orders;
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS supplier;
+DROP TABLE IF EXISTS nation;
+DROP TABLE IF EXISTS q7_volume_shipping;
+DROP TABLE IF EXISTS q7_volume_shipping_tmp;
+
+-- create tables and load data
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/customer';
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/orders';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/supplier';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/nation';
+
+-- create the target table
+create table q7_volume_shipping (supp_nation string, cust_nation string, l_year int, revenue double);
+create table q7_volume_shipping_tmp(supp_nation string, cust_nation string, s_nationkey int, c_nationkey int);
+
+set mapred.min.split.size=536870912;
+set hive.exec.reducers.bytes.per.reducer=1225000000;
+
+-- the query
+insert overwrite table q7_volume_shipping_tmp
+select 
+  * 
+from
+  (
+    select 
+      n1.n_name as supp_nation, n2.n_name as cust_nation, n1.n_nationkey as s_nationkey,      
+      n2.n_nationkey as c_nationkey
+from 
+  nation n1 join nation n2 
+  on 
+    n2.n_name = 'GERMANY'
+    UNION ALL
+select 
+  n1.n_name as supp_nation, n2.n_name as cust_nation, n1.n_nationkey as s_nationkey, 
+  n2.n_nationkey as c_nationkey
+from 
+  nation n1 join nation n2 
+  on 
+    n1.n_name = 'GERMANY'
+) a;
+
+insert overwrite table q7_volume_shipping 
+select 
+  supp_nation, cust_nation, l_year, sum(volume) as revenue
+from 
+  (
+    select
+      supp_nation, cust_nation, year(l_shipdate) as l_year, 
+      l_extendedprice * (1 - l_discount) as volume
+    from
+      q7_volume_shipping_tmp t join
+        (select l_shipdate, l_extendedprice, l_discount, c_nationkey, s_nationkey 
+         from supplier s join
+           (select l_shipdate, l_extendedprice, l_discount, l_suppkey, c_nationkey 
+            from customer c join
+              (select l_shipdate, l_extendedprice, l_discount, l_suppkey, o_custkey 
+               from orders o join lineitem l 
+               on 
+                 o.o_orderkey = l.l_orderkey and l.l_shipdate >= '1992-01-01' 
+                 and l.l_shipdate <= '1996-12-31'
+               ) l1 on c.c_custkey = l1.o_custkey
+            ) l2 on s.s_suppkey = l2.l_suppkey
+         ) l3 on l3.c_nationkey = t.c_nationkey and l3.s_nationkey = t.s_nationkey
+   ) shipping
+group by supp_nation, cust_nation, l_year
+order by supp_nation, cust_nation, l_year;
+
diff --git a/hivesterix/src/test/resources/runtimefunctionts/queries/q8_national_market_share.hive b/hivesterix/src/test/resources/runtimefunctionts/queries/q8_national_market_share.hive
new file mode 100644
index 0000000..4d9d36f
--- /dev/null
+++ b/hivesterix/src/test/resources/runtimefunctionts/queries/q8_national_market_share.hive
@@ -0,0 +1,56 @@
+DROP TABLE IF EXISTS customer;
+DROP TABLE IF EXISTS orders;
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS supplier;
+DROP TABLE IF EXISTS nation;
+DROP TABLE IF EXISTS region;
+DROP TABLE IF EXISTS part;
+DROP TABLE IF EXISTS q8_national_market_share;
+
+-- create the tables and load the data
+create external table part (P_PARTKEY INT, P_NAME STRING, P_MFGR STRING, P_BRAND STRING, P_TYPE STRING, P_SIZE INT, P_CONTAINER STRING, P_RETAILPRICE DOUBLE, P_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/part';
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/customer';
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/orders';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/supplier';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/nation';
+create external table region (R_REGIONKEY INT, R_NAME STRING, R_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/region';
+
+-- create the result table
+create table q8_national_market_share(o_year string, mkt_share double);
+
+-- the query
+insert overwrite table q8_national_market_share 
+select 
+  o_year, sum(case when nation = 'BRAZIL' then volume else 0.0 end) / sum(volume) as mkt_share
+from 
+  (
+select 
+  year(o_orderdate) as o_year, l_extendedprice * (1-l_discount) as volume, 
+  n2.n_name as nation
+    from
+      nation n2 join
+        (select o_orderdate, l_discount, l_extendedprice, s_nationkey 
+         from 
+          (select o_orderdate, l_discount, l_extendedprice, l_suppkey 
+           from part p join
+             (select o_orderdate, l_partkey, l_discount, l_extendedprice, l_suppkey 
+              from 
+                (select o_orderdate, o_orderkey 
+                 from 
+                   (select c.c_custkey 
+                    from 
+                      (select n1.n_nationkey 
+                       from nation n1 join region r
+                       on n1.n_regionkey = r.r_regionkey and r.r_name = 'AMERICA'
+                       ) n11 join customer c on c.c_nationkey = n11.n_nationkey
+                    ) c1 join orders o on c1.c_custkey = o.o_custkey
+                 ) o1 join lineitem l on l.l_orderkey = o1.o_orderkey and o1.o_orderdate >= '1995-01-01' 
+                         and o1.o_orderdate < '1996-12-31'
+              ) l1 on p.p_partkey = l1.l_partkey and p.p_type = 'ECONOMY ANODIZED STEEL'
+           ) p1 join supplier s on s.s_suppkey = p1.l_suppkey
+        ) s1 on s1.s_nationkey = n2.n_nationkey
+  ) all_nation
+group by o_year
+order by o_year;
+
diff --git a/hivesterix/src/test/resources/runtimefunctionts/queries/q9_product_type_profit.hive b/hivesterix/src/test/resources/runtimefunctionts/queries/q9_product_type_profit.hive
new file mode 100644
index 0000000..2e5b4a1
--- /dev/null
+++ b/hivesterix/src/test/resources/runtimefunctionts/queries/q9_product_type_profit.hive
@@ -0,0 +1,50 @@
+DROP TABLE IF EXISTS part;
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS supplier;
+DROP TABLE IF EXISTS orders;
+DROP TABLE IF EXISTS partsupp;
+DROP TABLE IF EXISTS nation;
+DROP TABLE IF EXISTS q9_product_type_profit;
+
+-- create the tables and load the data
+create external table part (P_PARTKEY INT, P_NAME STRING, P_MFGR STRING, P_BRAND STRING, P_TYPE STRING, P_SIZE INT, P_CONTAINER STRING, P_RETAILPRICE DOUBLE, P_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/part';
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/orders';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/supplier';
+create external table partsupp (PS_PARTKEY INT, PS_SUPPKEY INT, PS_AVAILQTY INT, PS_SUPPLYCOST DOUBLE, PS_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION'/tpch/partsupp';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/nation';
+
+-- create the result table
+create table q9_product_type_profit (nation string, o_year string, sum_profit double);
+
+set mapred.min.split.size=536870912;
+set hive.exec.reducers.bytes.per.reducer=1024000000;
+
+-- the query
+insert overwrite table q9_product_type_profit
+select 
+  nation, o_year, sum(amount) as sum_profit
+from 
+  (
+select 
+  n_name as nation, year(o_orderdate) as o_year, 
+  l_extendedprice * (1 - l_discount) -  ps_supplycost * l_quantity as amount
+    from
+      (select l_extendedprice, l_discount, l_quantity, l_orderkey, n_name, ps_supplycost 
+       from part p join
+         (select l_extendedprice, l_discount, l_quantity, l_partkey, l_orderkey, 
+                 n_name, ps_supplycost 
+          from partsupp ps join
+            (select l_suppkey, l_extendedprice, l_discount, l_quantity, l_partkey, 
+                    l_orderkey, n_name 
+             from
+               (select s_suppkey, n_name 
+                from nation n join supplier s on n.n_nationkey = s.s_nationkey
+               ) s1 join lineitem l on s1.s_suppkey = l.l_suppkey
+            ) l1 on ps.ps_suppkey = l1.l_suppkey and ps.ps_partkey = l1.l_partkey
+         ) l2 on p.p_name like '%green%' and p.p_partkey = l2.l_partkey
+     ) l3 join orders o on o.o_orderkey = l3.l_orderkey
+  )profit
+group by nation, o_year
+order by nation, o_year desc;
+
diff --git a/hivesterix/src/test/resources/runtimefunctionts/queries/u10_join.hive b/hivesterix/src/test/resources/runtimefunctionts/queries/u10_join.hive
new file mode 100644
index 0000000..1d901c2
--- /dev/null
+++ b/hivesterix/src/test/resources/runtimefunctionts/queries/u10_join.hive
@@ -0,0 +1,15 @@
+DROP TABLE IF EXISTS orders;
+DROP TABLE IF EXISTS customer;
+DROP TABLE IF EXISTS u10_join;
+
+-- create the tables and load the data
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/orders';
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/customer';
+
+create table u10_join(O_ORDERSTATUS STRING);
+
+insert overwrite table u10_join
+select O_TOTALPRICE
+from orders join customer
+on orders.O_CUSTKEY=customer.C_CUSTKEY
+order by O_TOTALPRICE;
diff --git a/hivesterix/src/test/resources/runtimefunctionts/queries/u10_nestedloop_join.hive b/hivesterix/src/test/resources/runtimefunctionts/queries/u10_nestedloop_join.hive
new file mode 100644
index 0000000..8fc0a7a
--- /dev/null
+++ b/hivesterix/src/test/resources/runtimefunctionts/queries/u10_nestedloop_join.hive
@@ -0,0 +1,21 @@
+DROP TABLE IF EXISTS nation;
+DROP TABLE IF EXISTS u10_nestedloop_join;
+
+-- create tables and load data
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/nation';
+
+-- create the target table
+create table u10_nestedloop_join(supp_nation string, cust_nation string, s_nationkey int, c_nationkey int);
+
+-- the query
+insert overwrite table u10_nestedloop_join
+select 
+  * 
+from
+  (
+    select 
+      n1.n_name as supp_nation, n2.n_name as cust_nation, n1.n_nationkey as s_nationkey,      
+      n2.n_nationkey as c_nationkey
+from 
+  nation n1 join nation n2 where n1.n_nationkey > n2.n_nationkey
+) a order by a.supp_nation, a.cust_nation;
\ No newline at end of file
diff --git a/hivesterix/src/test/resources/runtimefunctionts/queries/u1_gby.hive b/hivesterix/src/test/resources/runtimefunctionts/queries/u1_gby.hive
new file mode 100644
index 0000000..6e53d01
--- /dev/null
+++ b/hivesterix/src/test/resources/runtimefunctionts/queries/u1_gby.hive
@@ -0,0 +1,11 @@
+DROP TABLE IF EXISTS nation;
+DROP TABLE IF EXISTS u1_gby;
+
+-- create tables and load data
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/nation';
+create table u1_gby(col1 INT, col2 INT);
+
+insert overwrite table u1_gby select N_REGIONKEY, count(1)
+from nation
+group by N_REGIONKEY
+order by N_REGIONKEY;
diff --git a/hivesterix/src/test/resources/runtimefunctionts/queries/u2_gby_external.hive b/hivesterix/src/test/resources/runtimefunctionts/queries/u2_gby_external.hive
new file mode 100644
index 0000000..be9de2d
--- /dev/null
+++ b/hivesterix/src/test/resources/runtimefunctionts/queries/u2_gby_external.hive
@@ -0,0 +1,19 @@
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS u2_gby_external;
+
+-- create the tables and load the data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/lineitem';
+
+-- create the result tables
+create table u2_gby_external(l_partkey int, t_sum_quantity double);
+
+-- the query
+insert overwrite table u2_gby_external
+select 
+  l_orderkey, avg(L_QUANTITY) as t_sum_quantity
+from 
+  lineitem
+group by l_orderkey
+order by l_orderkey desc
+limit 10;
+
diff --git a/hivesterix/src/test/resources/runtimefunctionts/queries/u3_union.hive b/hivesterix/src/test/resources/runtimefunctionts/queries/u3_union.hive
new file mode 100644
index 0000000..99d62df
--- /dev/null
+++ b/hivesterix/src/test/resources/runtimefunctionts/queries/u3_union.hive
@@ -0,0 +1,11 @@
+drop table IF EXISTS supplier;

+drop table IF EXISTS u3_union;

+

+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/supplier';

+create table u3_union (S_SUPPKEY INT, S_ADDRESS STRING, S_NATIONKEY INT,  S_NAME STRING);

+

+insert overwrite table u3_union 

+select * from (select (2*s_suppkey), s_address, s_nationkey,  s_name  FROM supplier where S_SUPPKEY*2 < 20 

+union all 

+select (2*s_suppkey), s_address, s_nationkey,  s_name   FROM supplier where S_SUPPKEY*2 > 50) t

+order by t.s_address;

diff --git a/hivesterix/src/test/resources/runtimefunctionts/queries/u4_gby_distinct.hive b/hivesterix/src/test/resources/runtimefunctionts/queries/u4_gby_distinct.hive
new file mode 100644
index 0000000..6cd4a5b
--- /dev/null
+++ b/hivesterix/src/test/resources/runtimefunctionts/queries/u4_gby_distinct.hive
@@ -0,0 +1,19 @@
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS u4_gby_distinct;
+
+-- create the tables and load the data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/lineitem';
+
+-- create the result tables
+create table u4_gby_distinct(l_partkey int, t_sum_quantity double);
+
+-- the query
+insert overwrite table u4_gby_distinct
+select 
+  l_orderkey, avg(distinct L_QUANTITY) as t_sum_quantity
+from 
+  lineitem
+group by l_orderkey
+order by l_orderkey desc
+limit 10;
+
diff --git a/hivesterix/src/test/resources/runtimefunctionts/queries/u5_gby_global.hive b/hivesterix/src/test/resources/runtimefunctionts/queries/u5_gby_global.hive
new file mode 100644
index 0000000..cef7e2b
--- /dev/null
+++ b/hivesterix/src/test/resources/runtimefunctionts/queries/u5_gby_global.hive
@@ -0,0 +1,16 @@
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS u5_gby_global;
+
+-- create the tables and load the data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/lineitem';
+
+-- create the result tables
+create table u5_gby_global(t_sum_quantity double);
+
+-- the query
+insert overwrite table u5_gby_global
+select 
+  sum(L_QUANTITY) as t_sum_quantity
+from 
+  lineitem;
+
diff --git a/hivesterix/src/test/resources/runtimefunctionts/queries/u6_large_card_join.hive b/hivesterix/src/test/resources/runtimefunctionts/queries/u6_large_card_join.hive
new file mode 100644
index 0000000..5bf560b
--- /dev/null
+++ b/hivesterix/src/test/resources/runtimefunctionts/queries/u6_large_card_join.hive
@@ -0,0 +1,23 @@
+DROP TABLE IF EXISTS joinsrc1;
+DROP TABLE IF EXISTS joinsrc2;
+DROP TABLE IF EXISTS u6_large_card_join;
+
+-- create the tables and load the data
+create external table joinsrc1 (ID_1 INT) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' 
+STORED AS TEXTFILE LOCATION '/test/joinsrc1';
+
+create external table joinsrc2 (ID_2 INT) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' 
+STORED AS TEXTFILE LOCATION '/test/joinsrc2';
+
+-- create the result tables
+create table u6_large_card_join(col1 int, col2 int);
+
+-- the query
+insert overwrite table u6_large_card_join
+select 
+   ID_1, ID_2
+from 
+  joinsrc1 join joinsrc2
+on
+  joinsrc1.ID_1=joinsrc2.ID_2;
+
diff --git a/hivesterix/src/test/resources/runtimefunctionts/results/q10_returned_item.result b/hivesterix/src/test/resources/runtimefunctionts/results/q10_returned_item.result
new file mode 100644
index 0000000..6cc8ef4
--- /dev/null
+++ b/hivesterix/src/test/resources/runtimefunctionts/results/q10_returned_item.result
@@ -0,0 +1,20 @@
+121Customer#000000121282635.171899999966428.32PERUtv nCR2YKupGN73mQudO27-411-990-2959uriously stealthy ideas. carefully final courts use carefully
+124Customer#000000124222182.51881842.49CHINAaTbyVAW5tCd,v09O28-183-750-7809le fluffily even dependencies. quietly s
+106Customer#000000106190241.33343288.42ARGENTINAxGCOEAUjUNG11-751-989-4627lose slyly. ironic accounts along the evenly regular theodolites wake about the special, final gifts. 
+16Customer#000000016161422.046099999984681.03IRANcYiaeMLZSMAOQ2 d0W,20-781-609-3107kly silent courts. thinly regular theodolites sleep fluffily after 
+44Customer#000000044149364.565199999987315.94MOZAMBIQUEOi,dOSPwDu4jo4x,,P85E0dmhZGvNtBwi26-190-260-5375r requests around the unusual, bold a
+71Customer#000000071129481.02450000001-611.19GERMANYTlGalgdXWBmMV,6agLyWYDyIz9MKzcY8gl,w6t1B17-710-812-5403g courts across the regular, final pinto beans are blithely pending ac
+89Customer#000000089121663.12431530.76KENYAdtR, y9JQWUO6FoJExyp8whOU24-394-451-5404counts are slyly beyond the slyly final accounts. quickly final ideas wake. r
+112Customer#000000112111137.714099999982953.35ROMANIARcfgG3bO7QeCnfjqJT129-233-262-8382rmanently unusual multipliers. blithely ruthless deposits are furiously along the
+62Customer#000000062106368.0153595.61GERMANYupJK2Dnw13,17-361-978-7059kly special dolphins. pinto beans are slyly. quickly regular accounts are furiously a
+146Customer#000000146103265.988799999993328.68CANADAGdxkdXG9u7iyI1,,y5tq4ZyrcEy13-835-723-3223ffily regular dinos are slyly unusual requests. slyly specia
+19Customer#00000001999306.012700000028914.71CHINAuc,3bHIx84H,wdrmLOjVsiqXCq2tr28-396-526-5053 nag. furiously careful packages are slyly at the accounts. furiously regular in
+145Customer#00000014599256.90189748.93JORDANkQjHmt2kcec cy3hfMh969u23-562-444-8454ests? express, express instructions use. blithely fina
+103Customer#00000010397311.772400000022757.45INDONESIA8KIsQX4LJ7QMsj6DrtFtXu0nUEdV,8a19-216-107-2107furiously pending notornis boost slyly around the blithely ironic ideas? final, even instructions cajole fl
+136Customer#00000013695855.39799999999-842.39GERMANYQoLsJ0v5C1IQbh,DS117-501-210-4726ackages sleep ironic, final courts. even requests above the blithely bold requests g
+53Customer#00000005392568.91244113.64MOROCCOHnaxHzTfFTZs8MuCpJyTbZ47Cm4wFOOgib25-168-852-5363ar accounts are. even foxes are blithely. fluffily pending deposits boost
+49Customer#00000004990965.72624573.94IRANcNgAeX7Fqrdf7HQN9EwjUa4nxT,68L FKAxzl20-908-631-4424nusual foxes! fluffily pending packages maintain to the regular 
+37Customer#00000003788065.74579999999-917.75INDIA7EV4Pwh,3SboctTWt18-385-235-7162ilent packages are carefully among the deposits. furiousl
+82Customer#00000008286998.96449468.34CHINAzhG3EZbap4c992Gj3bK,3Ne,Xn28-159-442-5305s wake. bravely regular accounts are furiously. regula
+125Customer#00000012584808.068-234.12ROMANIA,wSZXdVR xxIIfm9s8ITyLl3kgjT6UC07GY0Y29-261-996-3120x-ray finally after the packages? regular requests c
+59Customer#00000005984655.57113458.6ARGENTINAzLOCP0wh92OtBihgspOGl411-355-584-3112ously final packages haggle blithely after the express deposits. furiou
diff --git a/hivesterix/src/test/resources/runtimefunctionts/results/q11_important_stock.result b/hivesterix/src/test/resources/runtimefunctionts/results/q11_important_stock.result
new file mode 100644
index 0000000..cdc01d2
--- /dev/null
+++ b/hivesterix/src/test/resources/runtimefunctionts/results/q11_important_stock.result
@@ -0,0 +1,200 @@
+252.832302068E7
+1242.59627599E7
+1752.385395363E7
+1972.248551967E7
+1632.099460571E7
+1602.00232846E7
+821.9919213349999998E7
+1691.898734723E7
+291.867279344E7
+261.8612458270000003E7
+731.827170729E7
+1611.798746301E7
+751.7959598009999998E7
+341.7780838360000003E7
+981.7763191509999998E7
+691.728526943E7
+1111.708388262E7
+1711.635442066E7
+1661.635189374E7
+771.598059909E7
+781.58768992E7
+1431.585686159E7
+171.5474261120000001E7
+1091.5054682620000001E7
+1051.5053163809999999E7
+961.495213259E7
+1461.4810759440000001E7
+1361.4654967749999998E7
+1161.443209134E7
+1281.4393555260000002E7
+1421.422039904E7
+1211.420032605E7
+301.4163132409999998E7
+161.413646503E7
+1981.413535335E7
+791.38652287E7
+901.3732797480000002E7
+321.369962979E7
+741.3388711109999998E7
+11.3378707239999998E7
+891.337148041E7
+221.335499174E7
+1861.317604077E7
+1891.305492542E7
+141.299397721E7
+931.299298218E7
+1681.299041501E7
+991.2750046790000001E7
+1671.268255069E7
+21.258471636E7
+1821.256239411E7
+611.253677656E7
+1121.234957975E7
+1781.2260301739999998E7
+1721.219775193E7
+1651.219746506E7
+1841.216784393E7
+1871.214970141E7
+1531.211935422E7
+951.20468895E7
+111.200715156E7
+1251.200347611E7
+1541.1851133850000001E7
+151.179843879E7
+671.178579951E7
+81.170789262E7
+871.168637671E7
+1341.1683586929999998E7
+1301.168246149E7
+431.161150462E7
+1021.151554211E7
+211.141066856E7
+621.138927324E7
+91.126484373E7
+801.118329032E7
+1731.1026774860000001E7
+941.092440116E7
+31.075814545E7
+1031.0691221600000001E7
+1581.067861635E7
+491.06445572E7
+1391.044045371E7
+1921.035745974E7
+241.033911936E7
+391.03210148E7
+1561.014364082E7
+1881.011906085E7
+121.0108587399999999E7
+331.005296264E7
+281.005234286E7
+409927827.77
+1999907803.559999999
+1939869674.77
+1069869361.73
+1089868370.31
+1839855564.82
+709700431.94
+489655921.88
+1189622756.149999999
+139592610.32
+839543465.079999998
+1599519909.44
+1479513932.18
+459423874.47
+1179408426.72
+1359311247.28
+1859305341.780000001
+1319223742.49
+79175528.209999999
+719167712.04
+1009131099.530000001
+769092927.110000001
+538979121.97
+1418686511.12
+648627897.290000001
+1018521762.0
+1768510175.88
+198481679.5
+1948464559.54
+918460636.52
+1328416851.24
+1138405217.959999999
+518247118.499999999
+418187897.16
+558092552.890000001
+728007155.3
+1157954624.0
+1707895241.609999999
+1147832023.279999999
+377809598.66
+547578243.79
+1807531794.4799999995
+607508961.6899999995
+317433034.240000001
+357132671.49
+1407122050.08
+1507106237.92
+1077082828.68
+1237049500.720000001
+1907017966.9
+1206920857.09
+1966905182.43
+1776887257.27
+1266813302.029999999
+1226812763.340000001
+2006780024.53
+1576766365.68
+636724960.14
+386667789.55
+586640619.38
+1456633786.590000001
+1446546945.92
+206533101.39
+1276483139.620000001
+106433776.51
+366410209.249999999
+476407355.369999999
+1916347187.43
+1376180452.850000001
+566145826.6
+1046134341.850000001
+446038126.659999999
+976036047.1899999995
+1815853464.149999999
+1625829410.54
+865746713.88
+525680644.4799999995
+1555552007.57
+925489588.28
+55461046.93
+185456316.21
+1495367514.630000001
+1105261352.11
+45162989.07
+65120392.47
+1485061589.27
+424957032.47
+1194954403.48
+844891082.38
+654834763.09
+664719253.369999999
+1794610607.92
+234531731.12
+684504770.61
+274371849.52
+1293997604.7800000003
+1953817436.31
+593765210.2100000004
+573739347.1199999996
+1383567425.75
+1743484708.31
+1643462215.0
+813421610.4200000004
+463398443.33
+853338711.3899999997
+503145791.97
+883117730.2399999998
+1512727444.22
+1521837809.1700000002
+1331517282.33
diff --git a/hivesterix/src/test/resources/runtimefunctionts/results/q12_shipping.result b/hivesterix/src/test/resources/runtimefunctionts/results/q12_shipping.result
new file mode 100644
index 0000000..bb95677
--- /dev/null
+++ b/hivesterix/src/test/resources/runtimefunctionts/results/q12_shipping.result
@@ -0,0 +1,2 @@
+MAIL5.05.0
+SHIP5.010.0
diff --git a/hivesterix/src/test/resources/runtimefunctionts/results/q13_customer_distribution.result b/hivesterix/src/test/resources/runtimefunctionts/results/q13_customer_distribution.result
new file mode 100644
index 0000000..beaa047
--- /dev/null
+++ b/hivesterix/src/test/resources/runtimefunctionts/results/q13_customer_distribution.result
@@ -0,0 +1,27 @@
+050
+168
+177
+206
+136
+126
+96
+235
+145
+105
+214
+184
+114
+84
+74
+263
+223
+63
+53
+43
+292
+242
+192
+152
+281
+251
+31
diff --git a/hivesterix/src/test/resources/runtimefunctionts/results/q14_promotion_effect.result b/hivesterix/src/test/resources/runtimefunctionts/results/q14_promotion_effect.result
new file mode 100644
index 0000000..3b823e7
--- /dev/null
+++ b/hivesterix/src/test/resources/runtimefunctionts/results/q14_promotion_effect.result
@@ -0,0 +1 @@
+15.230212611597251
diff --git a/hivesterix/src/test/resources/runtimefunctionts/results/q15_top_supplier.result b/hivesterix/src/test/resources/runtimefunctionts/results/q15_top_supplier.result
new file mode 100644
index 0000000..d975521
--- /dev/null
+++ b/hivesterix/src/test/resources/runtimefunctionts/results/q15_top_supplier.result
@@ -0,0 +1 @@
+10Supplier#000000010Saygah3gYWMp72i PY34-852-489-8585797313.3838
diff --git a/hivesterix/src/test/resources/runtimefunctionts/results/q16_parts_supplier_relationship.result b/hivesterix/src/test/resources/runtimefunctionts/results/q16_parts_supplier_relationship.result
new file mode 100644
index 0000000..393a33a
--- /dev/null
+++ b/hivesterix/src/test/resources/runtimefunctionts/results/q16_parts_supplier_relationship.result
@@ -0,0 +1,34 @@
+Brand#11PROMO ANODIZED TIN454
+Brand#11SMALL PLATED COPPER454
+Brand#11STANDARD POLISHED TIN454
+Brand#13MEDIUM ANODIZED STEEL364
+Brand#14SMALL ANODIZED NICKEL454
+Brand#15LARGE ANODIZED BRASS454
+Brand#21LARGE BURNISHED COPPER194
+Brand#23ECONOMY BRUSHED COPPER94
+Brand#25MEDIUM PLATED BRASS454
+Brand#31ECONOMY PLATED STEEL234
+Brand#31PROMO POLISHED TIN234
+Brand#32MEDIUM BURNISHED BRASS494
+Brand#33LARGE BRUSHED TIN364
+Brand#33SMALL BURNISHED NICKEL34
+Brand#34LARGE PLATED BRASS454
+Brand#34MEDIUM BRUSHED COPPER94
+Brand#34SMALL PLATED BRASS144
+Brand#35STANDARD ANODIZED STEEL234
+Brand#43PROMO POLISHED BRASS194
+Brand#43SMALL BRUSHED NICKEL94
+Brand#44SMALL PLATED COPPER194
+Brand#52MEDIUM BURNISHED TIN454
+Brand#52SMALL BURNISHED NICKEL144
+Brand#53MEDIUM BRUSHED COPPER34
+Brand#55STANDARD ANODIZED BRASS364
+Brand#55STANDARD BRUSHED COPPER34
+Brand#13SMALL BRUSHED NICKEL192
+Brand#25SMALL BURNISHED COPPER32
+Brand#43MEDIUM ANODIZED BRASS142
+Brand#53STANDARD PLATED STEEL452
+Brand#24MEDIUM PLATED STEEL191
+Brand#51ECONOMY POLISHED STEEL491
+Brand#53LARGE BURNISHED NICKEL231
+Brand#54ECONOMY ANODIZED BRASS91
diff --git a/hivesterix/src/test/resources/runtimefunctionts/results/q17_small_quantity_order_revenue.result b/hivesterix/src/test/resources/runtimefunctionts/results/q17_small_quantity_order_revenue.result
new file mode 100644
index 0000000..ccfa2e3
--- /dev/null
+++ b/hivesterix/src/test/resources/runtimefunctionts/results/q17_small_quantity_order_revenue.result
@@ -0,0 +1 @@
+863.2285714285715
diff --git a/hivesterix/src/test/resources/runtimefunctionts/results/q18_large_volume_customer.result b/hivesterix/src/test/resources/runtimefunctionts/results/q18_large_volume_customer.result
new file mode 100644
index 0000000..32d3515
--- /dev/null
+++ b/hivesterix/src/test/resources/runtimefunctionts/results/q18_large_volume_customer.result
@@ -0,0 +1,100 @@
+Customer#0000000707025671998-02-27263411.29266.0
+Customer#0000000101044211997-04-04258779.02255.0
+Customer#0000000525257651994-12-15249900.42247.0
+Customer#0000000828234601995-10-03245976.74254.0
+Customer#0000000686822081995-05-01245388.06256.0
+Customer#0000000282823061995-07-26244704.23235.0
+Customer#00000014614659251995-11-13242588.87242.0
+Customer#0000000292911211997-01-13241837.88242.0
+Customer#0000000676739071992-08-19240457.56239.0
+Customer#0000000767651581997-01-21240284.95248.0
+Customer#00000013113144841996-12-24237947.61243.0
+Customer#0000001151156451994-12-03234763.73245.0
+Customer#0000000494942941992-08-15232194.74225.0
+Customer#0000000767614771997-08-24231831.35236.0
+Customer#0000000444446451994-09-20231012.22248.0
+Customer#0000000898959571993-12-27230949.45242.0
+Customer#000000076763261995-06-04229165.17228.0
+Customer#000000067679281995-03-02228136.49241.0
+Customer#0000000797938081994-04-24228054.01227.0
+Customer#0000000373753171994-09-09228002.51231.0
+Customer#00000000443581993-09-20226806.66223.0
+Customer#00000014214256991992-07-30226314.91240.0
+Customer#00000012112118881993-10-31224724.11225.0
+Customer#0000000949426901996-03-31224674.27219.0
+Customer#0000000949454131997-10-17224382.57212.0
+Customer#0000000323253811993-01-29223995.46228.0
+Customer#0000001451455181998-02-08223537.09214.0
+Customer#0000000292929451996-01-03223507.72231.0
+Customer#000000007736541992-06-03222653.54222.0
+Customer#0000001451458071993-11-24222392.53216.0
+Customer#00000014914936191996-11-20222274.54221.0
+Customer#0000000707054721993-04-11221636.83217.0
+Customer#00000013713749001992-06-30221320.76227.0
+Customer#00000010610637781993-05-26221036.31225.0
+Customer#00000012112111531996-04-18220727.97209.0
+Customer#0000000707040041993-05-07220715.14228.0
+Customer#000000098987681996-08-20220636.82231.0
+Customer#00000014914956061996-11-12219959.08231.0
+Customer#000000055554841997-01-03219920.62224.0
+Customer#00000014014042301992-03-04219709.6217.0
+Customer#00000008282391996-09-20219707.84231.0
+Customer#0000000373727891998-03-14219123.27218.0
+Customer#0000000171732691996-03-01218697.85220.0
+Customer#00000014914935901995-05-13218482.7210.0
+Customer#0000001341346141992-12-01218116.21204.0
+Customer#0000000929241971996-08-13217709.03225.0
+Customer#00000013313311561996-10-19217682.81218.0
+Customer#000000046464531997-05-26216826.73226.0
+Customer#00000012412431091993-07-24216104.85210.0
+Customer#0000000434349941996-06-29216071.76213.0
+Customer#00000014914937131998-05-07215342.63213.0
+Customer#00000002929681998-04-18215135.72213.0
+Customer#0000000131324381993-07-15214494.39210.0
+Customer#00000013313346131998-03-05212339.55214.0
+Customer#00000010610617611993-12-24211925.95218.0
+Customer#0000000494912481992-01-02210713.88207.0
+Customer#000000005558591997-04-23210643.96211.0
+Customer#00000010610618271996-06-22210113.88205.0
+Customer#0000000858551841998-07-20209155.48213.0
+Customer#0000001331337101993-01-02208974.42196.0
+Customer#0000000525251861996-08-03208892.63210.0
+Customer#0000000282820501994-06-02208517.98217.0
+Customer#0000000767621801996-09-14208481.57212.0
+Customer#00000011911935881995-03-19207925.83212.0
+Customer#00000013413414441994-12-06207907.6205.0
+Customer#0000001031037421994-12-23207632.55198.0
+Customer#0000000171740991992-08-21207364.8208.0
+Customer#00000010910912861993-05-14207291.83200.0
+Customer#0000000797956331998-05-31207119.83203.0
+Customer#0000000626220221992-03-15206742.11209.0
+Customer#0000000222245831994-09-25206495.43197.0
+Customer#00000014814851851997-07-25206179.68198.0
+Customer#0000000444431751994-07-15205282.63215.0
+Customer#0000000565625651998-02-28204438.57201.0
+Customer#00000014914937471996-08-20204355.65195.0
+Customer#00000010110149641997-07-28204163.1197.0
+Customer#0000000626249921992-05-10203904.8198.0
+Customer#0000000101037511994-04-27202917.72204.0
+Customer#0000000767625341996-07-17202784.54214.0
+Customer#00000000111641992-10-21202660.52213.0
+Customer#00000011811812831996-08-30202623.92200.0
+Customer#0000000101018901996-12-18202364.58207.0
+Customer#0000000777717621994-08-20202227.17216.0
+Customer#00000010610641961998-05-15201455.98198.0
+Customer#0000000646458951997-01-01201419.83200.0
+Customer#00000000886441992-05-01201268.06202.0
+Customer#000000047472611993-06-29201003.12200.0
+Customer#0000000797946721995-11-07199593.71203.0
+Customer#0000001311319301994-12-17199102.23204.0
+Customer#00000011811841611993-08-21198995.21211.0
+Customer#0000000737340691992-05-13198816.13199.0
+Customer#00000014214256961995-05-04198723.3198.0
+Customer#00000013413438721996-09-06198538.68207.0
+Customer#00000012712710591994-02-27198360.22194.0
+Customer#00000010310342931996-08-20198322.91202.0
+Customer#000000080809931995-09-10198238.65194.0
+Customer#000000091914201995-10-31198039.23200.0
+Customer#0000000929233331992-09-16197973.22195.0
+Customer#00000014614641921998-04-19197192.95209.0
+Customer#00000014514515751995-09-13197031.52204.0
diff --git a/hivesterix/src/test/resources/runtimefunctionts/results/q19_discounted_revenue.result b/hivesterix/src/test/resources/runtimefunctionts/results/q19_discounted_revenue.result
new file mode 100644
index 0000000..2e44572
--- /dev/null
+++ b/hivesterix/src/test/resources/runtimefunctionts/results/q19_discounted_revenue.result
@@ -0,0 +1 @@
+51515.7344
diff --git a/hivesterix/src/test/resources/runtimefunctionts/results/q1_pricing_summary_report.result b/hivesterix/src/test/resources/runtimefunctionts/results/q1_pricing_summary_report.result
new file mode 100644
index 0000000..c1a7b06
--- /dev/null
+++ b/hivesterix/src/test/resources/runtimefunctionts/results/q1_pricing_summary_report.result
@@ -0,0 +1,4 @@
+AF37474.03.756962464000004E73.567619209699997E73.7101416222424E725.35453315290933725419.2318267929880.0508660351826793961478
+NF1041.01041301.07999060.89799999991036450.8022827.39473684210526427402.6597368421030.0428947368421052638
+NO75168.07.538495536999999E77.165316630340007E77.4498798133073E725.55865351921115225632.4227711662680.049697381842910722941
+RF36511.03.657084124000003E73.473847287579999E73.616906011219296E725.05902539464653225100.09693891560.050027453671928671457
diff --git a/hivesterix/src/test/resources/runtimefunctionts/results/q20_potential_part_promotion.result b/hivesterix/src/test/resources/runtimefunctionts/results/q20_potential_part_promotion.result
new file mode 100644
index 0000000..d808757
--- /dev/null
+++ b/hivesterix/src/test/resources/runtimefunctionts/results/q20_potential_part_promotion.result
@@ -0,0 +1,10 @@
+Supplier#000000001 N kD4on9OM Ipw3,gf0JBoQDd7tgrzrddZ
+Supplier#00000000289eJ5ksX3ImxJQBvxObC,
+Supplier#000000003q1,G3Pj6OjIuUYfUoH18BFTKP5aU9bEV3
+Supplier#000000004Bk7ah4CK8SYQTepEmvMkkgMwg
+Supplier#000000005Gcdm2rJRzl5qlTVzc
+Supplier#000000006tQxuVm7s7CnK
+Supplier#000000007s,4TicNGB4uO6PaSqNBUq
+Supplier#0000000089Sq4bBH2FQEmaFOocY45sRTxo6yuoG
+Supplier#0000000091KhUgZegwM3ua7dsYmekYBsK
+Supplier#000000010Saygah3gYWMp72i PY
diff --git a/hivesterix/src/test/resources/runtimefunctionts/results/q21_suppliers_who_kept_orders_waiting.result b/hivesterix/src/test/resources/runtimefunctionts/results/q21_suppliers_who_kept_orders_waiting.result
new file mode 100644
index 0000000..50fa26f
--- /dev/null
+++ b/hivesterix/src/test/resources/runtimefunctionts/results/q21_suppliers_who_kept_orders_waiting.result
@@ -0,0 +1,10 @@
+Supplier#000000007431
+Supplier#000000005417
+Supplier#000000001403
+Supplier#000000009373
+Supplier#000000004367
+Supplier#000000002364
+Supplier#000000010358
+Supplier#000000003349
+Supplier#000000008347
+Supplier#000000006343
diff --git a/hivesterix/src/test/resources/runtimefunctionts/results/q22_global_sales_opportunity.result b/hivesterix/src/test/resources/runtimefunctionts/results/q22_global_sales_opportunity.result
new file mode 100644
index 0000000..08bcd0c
--- /dev/null
+++ b/hivesterix/src/test/resources/runtimefunctionts/results/q22_global_sales_opportunity.result
@@ -0,0 +1,23 @@
+10320747.13
+11535208.880000000005
+12213735.27
+13213545.3
+1419963.15
+15214624.84
+16211239.02
+1719127.27
+18322156.91
+19643758.41
+20323085.67
+21319400.52
+22320332.18
+23325483.06
+25319038.36
+26538943.899999999994
+27213248.06
+28542700.5
+29436059.009999999995
+30217528.46
+31323599.11
+32425754.22
+33320359.59
diff --git a/hivesterix/src/test/resources/runtimefunctionts/results/q2_minimum_cost_supplier.result b/hivesterix/src/test/resources/runtimefunctionts/results/q2_minimum_cost_supplier.result
new file mode 100644
index 0000000..402ecf3
--- /dev/null
+++ b/hivesterix/src/test/resources/runtimefunctionts/results/q2_minimum_cost_supplier.result
@@ -0,0 +1,11 @@
+6820.35Supplier#000000007UNITED KINGDOM2Manufacturer#1s,4TicNGB4uO6PaSqNBUq33-990-965-2201s unwind silently furiously regular courts. final requests are deposits. requests wake quietly blit
+6820.35Supplier#000000007UNITED KINGDOM4Manufacturer#3s,4TicNGB4uO6PaSqNBUq33-990-965-2201s unwind silently furiously regular courts. final requests are deposits. requests wake quietly blit
+6820.35Supplier#000000007UNITED KINGDOM22Manufacturer#4s,4TicNGB4uO6PaSqNBUq33-990-965-2201s unwind silently furiously regular courts. final requests are deposits. requests wake quietly blit
+6820.35Supplier#000000007UNITED KINGDOM62Manufacturer#3s,4TicNGB4uO6PaSqNBUq33-990-965-2201s unwind silently furiously regular courts. final requests are deposits. requests wake quietly blit
+6820.35Supplier#000000007UNITED KINGDOM79Manufacturer#4s,4TicNGB4uO6PaSqNBUq33-990-965-2201s unwind silently furiously regular courts. final requests are deposits. requests wake quietly blit
+6820.35Supplier#000000007UNITED KINGDOM94Manufacturer#3s,4TicNGB4uO6PaSqNBUq33-990-965-2201s unwind silently furiously regular courts. final requests are deposits. requests wake quietly blit
+6820.35Supplier#000000007UNITED KINGDOM102Manufacturer#3s,4TicNGB4uO6PaSqNBUq33-990-965-2201s unwind silently furiously regular courts. final requests are deposits. requests wake quietly blit
+6820.35Supplier#000000007UNITED KINGDOM106Manufacturer#3s,4TicNGB4uO6PaSqNBUq33-990-965-2201s unwind silently furiously regular courts. final requests are deposits. requests wake quietly blit
+6820.35Supplier#000000007UNITED KINGDOM131Manufacturer#5s,4TicNGB4uO6PaSqNBUq33-990-965-2201s unwind silently furiously regular courts. final requests are deposits. requests wake quietly blit
+6820.35Supplier#000000007UNITED KINGDOM159Manufacturer#4s,4TicNGB4uO6PaSqNBUq33-990-965-2201s unwind silently furiously regular courts. final requests are deposits. requests wake quietly blit
+6820.35Supplier#000000007UNITED KINGDOM193Manufacturer#4s,4TicNGB4uO6PaSqNBUq33-990-965-2201s unwind silently furiously regular courts. final requests are deposits. requests wake quietly blit
diff --git a/hivesterix/src/test/resources/runtimefunctionts/results/q3_shipping_priority.result b/hivesterix/src/test/resources/runtimefunctionts/results/q3_shipping_priority.result
new file mode 100644
index 0000000..94b2cb9
--- /dev/null
+++ b/hivesterix/src/test/resources/runtimefunctionts/results/q3_shipping_priority.result
@@ -0,0 +1,10 @@
+157454559.51996-12-120
+301253664.311993-05-050
+115353458.01996-04-180
+128452830.331996-01-070
+317151436.9351993-04-060
+32350547.61994-03-260
+153749641.821992-02-150
+205148951.7775999999941996-03-180
+128648603.9036000000051993-05-140
+61448531.71521992-12-010
diff --git a/hivesterix/src/test/resources/runtimefunctionts/results/q4_order_priority.result b/hivesterix/src/test/resources/runtimefunctionts/results/q4_order_priority.result
new file mode 100644
index 0000000..0e757ee
--- /dev/null
+++ b/hivesterix/src/test/resources/runtimefunctionts/results/q4_order_priority.result
@@ -0,0 +1,5 @@
+1-URGENT9
+2-HIGH7
+3-MEDIUM9
+4-NOT SPECIFIED8
+5-LOW12
diff --git a/hivesterix/src/test/resources/runtimefunctionts/results/q5_local_supplier_volume.result b/hivesterix/src/test/resources/runtimefunctionts/results/q5_local_supplier_volume.result
new file mode 100644
index 0000000..ecdf467
--- /dev/null
+++ b/hivesterix/src/test/resources/runtimefunctionts/results/q5_local_supplier_volume.result
@@ -0,0 +1,8 @@
+PERU1099912.8209
+MOROCCO520107.17919999996
+IRAN375610.964
+IRAQ364417.398
+ETHIOPIA253825.7622
+ARGENTINA102659.0106
+UNITED KINGDOM61065.8711
+KENYA29679.393200000002
diff --git a/hivesterix/src/test/resources/runtimefunctionts/results/q6_forecast_revenue_change.result b/hivesterix/src/test/resources/runtimefunctionts/results/q6_forecast_revenue_change.result
new file mode 100644
index 0000000..45bb483
--- /dev/null
+++ b/hivesterix/src/test/resources/runtimefunctionts/results/q6_forecast_revenue_change.result
@@ -0,0 +1 @@
+77949.91860000002
diff --git a/hivesterix/src/test/resources/runtimefunctionts/results/q7_volume_shipping.result b/hivesterix/src/test/resources/runtimefunctionts/results/q7_volume_shipping.result
new file mode 100644
index 0000000..c4cfcee
--- /dev/null
+++ b/hivesterix/src/test/resources/runtimefunctionts/results/q7_volume_shipping.result
@@ -0,0 +1,37 @@
+ARGENTINAGERMANY199263089.1006
+ARGENTINAGERMANY199364024.4532
+ARGENTINAGERMANY199432719.877199999995
+ARGENTINAGERMANY199563729.862400000005
+ARGENTINAGERMANY19961801.8198
+ETHIOPIAGERMANY199274693.317
+ETHIOPIAGERMANY199313733.706600000001
+ETHIOPIAGERMANY199483631.40359999999
+ETHIOPIAGERMANY199569329.67199999999
+ETHIOPIAGERMANY199642017.435999999994
+IRANGERMANY199238014.335399999996
+IRANGERMANY1994252152.5927
+IRANGERMANY19959106.957199999999
+IRAQGERMANY199268040.7747
+IRAQGERMANY19933676.8004
+IRAQGERMANY199485948.85280000001
+IRAQGERMANY199566380.2488
+KENYAGERMANY199277164.5422
+KENYAGERMANY199363792.8736
+KENYAGERMANY199474537.6256
+KENYAGERMANY199537851.309
+KENYAGERMANY199618467.316
+MOROCCOGERMANY199289669.69080000001
+MOROCCOGERMANY1994173726.0087
+MOROCCOGERMANY199537169.8497
+PERUGERMANY1992226624.7652
+PERUGERMANY199358359.3076
+PERUGERMANY1994345376.2983
+PERUGERMANY199552968.9424
+PERUGERMANY19967960.72
+UNITED KINGDOMGERMANY1992100143.32140000002
+UNITED KINGDOMGERMANY199341582.5227
+UNITED KINGDOMGERMANY1994164740.3271
+UNITED KINGDOMGERMANY199650909.551999999996
+UNITED STATESGERMANY199252480.9528
+UNITED STATESGERMANY1994115566.8388
+UNITED STATESGERMANY199580489.69949999999
diff --git a/hivesterix/src/test/resources/runtimefunctionts/results/q8_national_market_share.result b/hivesterix/src/test/resources/runtimefunctionts/results/q8_national_market_share.result
new file mode 100644
index 0000000..7d7bd56
--- /dev/null
+++ b/hivesterix/src/test/resources/runtimefunctionts/results/q8_national_market_share.result
@@ -0,0 +1,2 @@
+19950.0
+19960.0
diff --git a/hivesterix/src/test/resources/runtimefunctionts/results/q9_product_type_profit.result b/hivesterix/src/test/resources/runtimefunctionts/results/q9_product_type_profit.result
new file mode 100644
index 0000000..f900b06
--- /dev/null
+++ b/hivesterix/src/test/resources/runtimefunctionts/results/q9_product_type_profit.result
@@ -0,0 +1,60 @@
+ARGENTINA199817779.069700000007
+ARGENTINA199713943.953800000003
+ARGENTINA19967641.422700000003
+ARGENTINA199520892.752500000002
+ARGENTINA199415088.352599999998
+ARGENTINA199317586.344600000004
+ARGENTINA199228732.461499999994
+ETHIOPIA199828217.159999999996
+ETHIOPIA199633970.65
+ETHIOPIA199537720.35
+ETHIOPIA199437251.01
+ETHIOPIA199323782.61
+IRAN199723590.007999999998
+IRAN19967428.232500000005
+IRAN199521000.996499999994
+IRAN199429408.13
+IRAN199349876.41499999999
+IRAN199252064.24
+IRAQ199811619.960399999996
+IRAQ199747910.246
+IRAQ199618459.567499999997
+IRAQ199532782.37010000001
+IRAQ19949041.2317
+IRAQ199330687.2625
+IRAQ199229098.2557
+KENYA199833148.3345
+KENYA199754355.016500000005
+KENYA199653607.4854
+KENYA199585354.87380000002
+KENYA1994102904.2511
+KENYA1993109310.8084
+KENYA1992138534.12099999998
+MOROCCO1998157058.2328
+MOROCCO199788669.96099999998
+MOROCCO1996236833.66719999994
+MOROCCO1995381575.86679999996
+MOROCCO1994243523.4336
+MOROCCO1993232196.78029999993
+MOROCCO1992347434.1452
+PERU1998101109.01959999997
+PERU199758073.086599999995
+PERU199630360.52179999999
+PERU1995138451.77999999997
+PERU199455023.063200000004
+PERU1993110409.0863
+PERU199270946.1916
+UNITED KINGDOM1998139685.04400000002
+UNITED KINGDOM1997183502.04979999995
+UNITED KINGDOM1996374085.28839999996
+UNITED KINGDOM1995548356.7983999999
+UNITED KINGDOM1994266982.7679999999
+UNITED KINGDOM1993717309.464
+UNITED KINGDOM199279540.6016
+UNITED STATES199832847.96
+UNITED STATES199730849.5
+UNITED STATES199656125.46000000001
+UNITED STATES199515961.7977
+UNITED STATES199431671.2
+UNITED STATES199355057.469
+UNITED STATES199251970.23
diff --git a/hivesterix/src/test/resources/runtimefunctionts/results/u10_join.result b/hivesterix/src/test/resources/runtimefunctionts/results/u10_join.result
new file mode 100644
index 0000000..2239e1c
--- /dev/null
+++ b/hivesterix/src/test/resources/runtimefunctionts/results/u10_join.result
@@ -0,0 +1,1500 @@
+1051.15
+1084.38
+1147.42
+1816.28
+1861.19
+1984.14
+2007.48
+2158.13
+2638.98
+3089.42
+3223.17
+3726.14
+3808.05
+3892.77
+3942.73
+3967.47
+4104.3
+4225.26
+4766.19
+4819.91
+4820.55
+4913.06
+5184.26
+5472.17
+5978.65
+6088.41
+6402.41
+6406.29
+6793.45
+7014.31
+7102.74
+7108.12
+7211.59
+7231.91
+7471.75
+7859.36
+8225.96
+8413.31
+8709.16
+8720.45
+8945.03
+8958.65
+9006.25
+9103.4
+9495.28
+9669.46
+9679.45
+9741.03
+10163.56
+10451.97
+10500.27
+10508.12
+10645.48
+10677.86
+10934.84
+11405.4
+11474.95
+11493.8
+11575.77
+11850.45
+12137.76
+12291.83
+12896.25
+12907.62
+12918.7
+12984.85
+13197.78
+13282.23
+13491.31
+13603.08
+13679.32
+14275.01
+14790.37
+15082.82
+15313.61
+15417.57
+16003.86
+16030.15
+16346.94
+16689.19
+16763.95
+16922.51
+17031.01
+17172.66
+17213.59
+17231.05
+17603.01
+17986.15
+18247.86
+18307.45
+18566.14
+18594.66
+18653.09
+18795.62
+18885.35
+19056.99
+19405.73
+19612.03
+19811.69
+20065.73
+20099.43
+20182.22
+20214.49
+20530.97
+20752.62
+20791.5
+21088.59
+21119.86
+21137.08
+21207.08
+21267.72
+21640.1
+21760.09
+21815.3
+21964.66
+22072.16
+22294.51
+22767.49
+22840.21
+22994.51
+23020.62
+23039.46
+23067.48
+23198.24
+23280.61
+23327.88
+23476.12
+23614.89
+23973.6
+23984.88
+24265.24
+24347.36
+24362.39
+24468.16
+24637.96
+24654.79
+24660.06
+24844.39
+25007.95
+25170.88
+25661.87
+25767.07
+25861.74
+25985.52
+26011.2
+26128.99
+26714.67
+26798.65
+26839.16
+26868.85
+26906.38
+26981.31
+26999.83
+27016.74
+27049.22
+27148.63
+27204.6
+27461.48
+27561.82
+27598.17
+27629.66
+27663.16
+27727.52
+28007.73
+28070.86
+28155.92
+28223.57
+28330.42
+28571.39
+28623.04
+28658.26
+28930.68
+29029.84
+29305.47
+29673.73
+29827.44
+29920.8
+30045.95
+30059.47
+30137.17
+30457.91
+30494.62
+30495.65
+30550.9
+30722.49
+30755.69
+30778.78
+30783.05
+31043.39
+31075.51
+31084.79
+31103.83
+31471.04
+31538.94
+31689.46
+31693.88
+31795.52
+32796.35
+32890.89
+32929.3
+33085.68
+33123.28
+33124.96
+33248.04
+33396.35
+33401.77
+33470.4
+33755.47
+33998.9
+34004.48
+34035.17
+34269.96
+34363.63
+34632.57
+34768.68
+34797.72
+34936.31
+35019.95
+35131.8
+35390.15
+35514.45
+35589.57
+35795.22
+35949.14
+36024.96
+36333.34
+36389.43
+36464.76
+36468.55
+36551.43
+36592.48
+36671.88
+36889.65
+37248.78
+37301.25
+37348.62
+37398.9
+37526.68
+37696.7
+37776.79
+37804.43
+37881.31
+38038.84
+38057.81
+38065.28
+38164.23
+38330.42
+38446.39
+38545.97
+38596.81
+38974.67
+38988.98
+39048.94
+39103.37
+39190.62
+39263.28
+39338.44
+39358.51
+39382.74
+39397.6
+39470.39
+39612.63
+39700.29
+39793.05
+39805.04
+39828.51
+39835.54
+39906.87
+40142.15
+40183.29
+40234.5
+40492.37
+40548.99
+40572.64
+40975.96
+40982.08
+41032.81
+41162.24
+41375.69
+41392.31
+41433.48
+41450.19
+41492.25
+41552.78
+41573.42
+41605.63
+41655.51
+41670.02
+41686.1
+41723.86
+41758.44
+41760.0
+41811.12
+42225.53
+42410.57
+42579.4
+42867.92
+42927.07
+42945.82
+43092.76
+43255.19
+43315.15
+43360.95
+43789.14
+43809.37
+43889.17
+44002.53
+44387.23
+44429.81
+44672.03
+44777.63
+44781.32
+45311.07
+45514.27
+45536.27
+45695.84
+45704.96
+45767.69
+45860.94
+45889.09
+46076.46
+46107.7
+46298.53
+46310.83
+46355.83
+46366.56
+46376.09
+46380.69
+46393.97
+46418.85
+46459.92
+46598.65
+46753.63
+46815.93
+46918.22
+47010.15
+47033.21
+47099.71
+47120.41
+47232.79
+47272.67
+47286.32
+47384.71
+47440.91
+47447.63
+47614.08
+47623.94
+47627.89
+47753.0
+47823.04
+47852.06
+47925.47
+47940.51
+47985.98
+48024.99
+48053.18
+48206.14
+48284.06
+48419.58
+48478.54
+48497.09
+48502.79
+48781.39
+49033.69
+49305.98
+49357.72
+49625.21
+49841.12
+49903.57
+50201.16
+50287.06
+50328.84
+50601.01
+50724.06
+51004.44
+51494.47
+51697.18
+51775.54
+51839.94
+52114.01
+52190.52
+52359.51
+52414.19
+52433.54
+52562.16
+52982.23
+53212.95
+53287.25
+53581.41
+53649.35
+53827.34
+54121.92
+54175.35
+54356.1
+54478.95
+54655.07
+55090.67
+55211.04
+55553.68
+55554.97
+55582.94
+55619.01
+55892.35
+55950.21
+56207.66
+56210.26
+56227.04
+56449.23
+56779.06
+56936.1
+56938.16
+56998.36
+57092.26
+57127.71
+57213.18
+57584.12
+57697.44
+57740.74
+57823.37
+58032.77
+58094.75
+58111.0
+58168.07
+58212.22
+58218.35
+58273.89
+58546.02
+58666.79
+58853.11
+58932.19
+59180.25
+59186.02
+59291.75
+59404.77
+59417.76
+59439.44
+59455.61
+59651.38
+59931.42
+59982.31
+59989.66
+60314.97
+60568.34
+60867.14
+60868.39
+60887.9
+60918.41
+60933.29
+61052.1
+61297.42
+61811.33
+62014.51
+62108.45
+62172.34
+62251.15
+62258.18
+62277.18
+62316.61
+62430.67
+62453.97
+62497.51
+62518.31
+62541.27
+62567.99
+62661.93
+62716.67
+62807.13
+62814.89
+62972.29
+63041.33
+63103.32
+63195.54
+63278.0
+63470.78
+63535.56
+63537.13
+63590.17
+63703.92
+63873.14
+64000.93
+64102.93
+64271.75
+64344.86
+64838.66
+64892.73
+65189.17
+65218.47
+65269.38
+65331.05
+65385.42
+65601.08
+65678.21
+65702.39
+65883.92
+66158.13
+66268.86
+66408.29
+66455.34
+66697.95
+66817.05
+66927.16
+67018.3
+67045.94
+67049.37
+67167.19
+67173.82
+67226.28
+67525.43
+67572.73
+67789.42
+67941.54
+67944.38
+67979.49
+68052.7
+68056.57
+68255.82
+68309.28
+68494.08
+68519.84
+68619.29
+68817.08
+68885.66
+68908.31
+69412.71
+69447.25
+69668.22
+70182.63
+70183.29
+70232.26
+70377.31
+70392.02
+70403.62
+70430.54
+70462.84
+70502.52
+70529.27
+70553.45
+70557.05
+70857.51
+71017.99
+71241.63
+71349.3
+71362.5
+71381.21
+71453.85
+71460.49
+71543.41
+71683.84
+71781.23
+71822.86
+71852.67
+71968.1
+72055.87
+72150.68
+72359.55
+72440.52
+72533.07
+72835.95
+72843.48
+73517.91
+73739.06
+73882.37
+73907.63
+73924.21
+73962.95
+73990.08
+74483.95
+74710.74
+74882.22
+74892.08
+74940.13
+75026.51
+75030.81
+75074.07
+75144.68
+75145.87
+75661.7
+75733.58
+76067.1
+76119.72
+76164.41
+76518.11
+76799.25
+76848.96
+77247.05
+77482.87
+77487.09
+77705.4
+77754.62
+78221.69
+78567.55
+78676.54
+78711.4
+79189.58
+79197.77
+79230.47
+79248.35
+79258.24
+79270.23
+79380.51
+79594.68
+79646.89
+79683.42
+79782.56
+79785.52
+79863.84
+79901.18
+80018.54
+80084.61
+80274.22
+80437.72
+80438.38
+80487.97
+80592.44
+80624.38
+81089.61
+81138.17
+81351.53
+81663.65
+81826.12
+82026.18
+82034.03
+82151.12
+82190.77
+82197.79
+82467.29
+82493.07
+82504.56
+82563.1
+82598.87
+82746.74
+82824.14
+82918.36
+82928.12
+83413.3
+83490.99
+83665.2
+83773.49
+83804.38
+84053.93
+84314.51
+84405.78
+84493.55
+84627.76
+84651.8
+84800.44
+84871.5
+84954.79
+84983.9
+85122.24
+85255.56
+85381.0
+85394.06
+85397.04
+85477.89
+85552.21
+85755.84
+85822.67
+85861.93
+85901.7
+85927.85
+85948.02
+86076.86
+86534.05
+86615.25
+86918.57
+86958.28
+87073.89
+87248.17
+87475.82
+87689.88
+87803.55
+87892.38
+87988.34
+88047.04
+88080.33
+88216.32
+88219.12
+88448.24
+88704.26
+88966.68
+89143.36
+89224.24
+89345.99
+89359.11
+89399.4
+89503.11
+89509.91
+89592.11
+89684.31
+89731.1
+89792.48
+89877.09
+89992.48
+89999.72
+90042.41
+90380.4
+90707.58
+90755.31
+90981.28
+91017.61
+91438.59
+91513.79
+91541.48
+91664.85
+91678.66
+91795.13
+91929.93
+91982.29
+92069.62
+92123.32
+92187.8
+92261.08
+92326.79
+92340.77
+92484.7
+92716.17
+92730.74
+92798.66
+92851.8
+92856.91
+93206.35
+93259.93
+93335.6
+93403.05
+93769.28
+93828.15
+94030.43
+94135.77
+94231.71
+94400.43
+94446.69
+94527.23
+94534.07
+94649.25
+94866.39
+94969.41
+95063.41
+95126.32
+95291.79
+95312.81
+95453.8
+95563.95
+95591.4
+95731.5
+95761.93
+95929.46
+96015.13
+96057.42
+96166.92
+96359.65
+96431.77
+96458.03
+96596.81
+96855.29
+97502.23
+97733.87
+97758.28
+97981.06
+98140.86
+98258.73
+98275.37
+98335.61
+98422.83
+98485.21
+98541.95
+98643.17
+98723.11
+98753.57
+98956.82
+98987.51
+99050.81
+99088.75
+99177.69
+99290.01
+99377.51
+99494.67
+99577.55
+99798.76
+99834.47
+99851.38
+99960.46
+100035.03
+100106.96
+100290.07
+100445.59
+100671.06
+100714.13
+100749.6
+100750.67
+100758.71
+100954.64
+101020.75
+101202.18
+101240.96
+101339.68
+101429.61
+101616.44
+101709.52
+101878.46
+101899.93
+102207.2
+102226.59
+102534.63
+102665.03
+102693.61
+102793.59
+102807.59
+103085.13
+103192.74
+103320.91
+103641.15
+103656.44
+103814.27
+104038.78
+104166.56
+104259.88
+104391.11
+104523.03
+104585.77
+104664.4
+104695.09
+104759.25
+104927.66
+104966.33
+105094.09
+105145.4
+105302.05
+105421.09
+105492.37
+105561.21
+105770.53
+105789.01
+106036.84
+106045.89
+106122.38
+106150.05
+106315.25
+106446.02
+106612.48
+106635.21
+106823.97
+106935.19
+107139.29
+107140.22
+107231.6
+107406.26
+107732.23
+107824.4
+107919.86
+107958.62
+108107.42
+108171.38
+108196.56
+108239.46
+108317.51
+108334.3
+108353.08
+108361.46
+108412.57
+108424.94
+108443.84
+109077.69
+109202.9
+109246.54
+109247.0
+109301.02
+109351.87
+109469.9
+109536.55
+109979.71
+110194.31
+110432.76
+110626.82
+110826.83
+110958.36
+111020.79
+111207.93
+111403.66
+111547.31
+111597.96
+111924.56
+112444.42
+112603.34
+112770.89
+112843.52
+112845.04
+112912.0
+113156.3
+113191.45
+113417.03
+113505.19
+113701.89
+113954.89
+114097.63
+114145.18
+114681.55
+114879.19
+114978.03
+114990.63
+115161.29
+115219.88
+115411.37
+115688.85
+115717.37
+115759.13
+115877.4
+115929.14
+115959.96
+116003.11
+116069.66
+116093.49
+116127.69
+116193.97
+116227.05
+116258.53
+116740.67
+116789.98
+116792.13
+116923.0
+117132.72
+117397.16
+117537.87
+117728.37
+117817.52
+117827.18
+117909.23
+118036.54
+118201.53
+118464.65
+118495.12
+118570.79
+118802.62
+118896.95
+119164.96
+119201.64
+119605.91
+119820.38
+119838.14
+119887.47
+119910.04
+119917.28
+120053.52
+120073.51
+120086.84
+120324.82
+120516.93
+120533.46
+120626.49
+120828.12
+121220.59
+121360.83
+121663.68
+121704.45
+121935.23
+121994.04
+122157.14
+122490.66
+122611.05
+122621.31
+122785.82
+122823.78
+122964.66
+122969.79
+123014.83
+123120.06
+123477.05
+123586.03
+123956.25
+124380.73
+124402.59
+124470.32
+124539.0
+124608.69
+124637.19
+124661.48
+124675.27
+124719.97
+124950.79
+125011.92
+125030.37
+125125.57
+125170.86
+125188.72
+125191.12
+125396.8
+125509.17
+125562.09
+125792.83
+126066.0
+126113.32
+126205.42
+126235.35
+126597.21
+126804.9
+126902.81
+126948.81
+127068.89
+127132.51
+127134.05
+127191.47
+127345.45
+127527.05
+127532.2
+127717.72
+127817.38
+127934.71
+128014.15
+128024.71
+128234.96
+128367.97
+128624.99
+128776.9
+128786.57
+129004.81
+129012.84
+129033.13
+129062.13
+129086.93
+129096.8
+129546.56
+129636.99
+129657.08
+129803.03
+129821.09
+130125.64
+130204.17
+130345.9
+130515.61
+130647.18
+130687.64
+130702.19
+131079.52
+131092.67
+131103.31
+131122.82
+131146.47
+131251.81
+131432.42
+131447.03
+131604.34
+131752.07
+131891.05
+132494.97
+132838.49
+132854.79
+132972.24
+133002.55
+133038.59
+133273.64
+133451.14
+133665.12
+133829.35
+133864.82
+134308.04
+134333.33
+134413.58
+134442.37
+134726.09
+134814.65
+135157.92
+135187.33
+135335.96
+135613.18
+135643.87
+135647.68
+135745.58
+135761.05
+135934.6
+136058.7
+136162.13
+136360.37
+136517.34
+136582.6
+136634.34
+136765.03
+136954.81
+137030.4
+137223.14
+137297.71
+137369.5
+137473.58
+137576.19
+138010.76
+138423.03
+138584.2
+138902.23
+139047.22
+139104.17
+139124.72
+139332.94
+139542.14
+139579.18
+139580.85
+139714.71
+139854.41
+139902.71
+139915.23
+140031.23
+140363.7
+140390.6
+140608.69
+140685.01
+140838.11
+141118.87
+141159.63
+141311.01
+141486.77
+141554.06
+141647.08
+141679.41
+141822.19
+141824.23
+141858.97
+141902.54
+142029.67
+142070.65
+142290.77
+142291.79
+142322.33
+142323.38
+142494.99
+142767.26
+142866.39
+142891.22
+143070.7
+143191.54
+143212.85
+143276.28
+143350.75
+143411.69
+143753.01
+143813.39
+143899.85
+144123.37
+144335.16
+145040.38
+145060.41
+145096.17
+145100.47
+145232.09
+145249.13
+145426.11
+145495.62
+145630.76
+145654.97
+145695.42
+145713.03
+145730.19
+145761.99
+145768.47
+145857.6
+145898.47
+145906.24
+145971.6
+146136.1
+146221.66
+146298.28
+146382.71
+146581.14
+146849.33
+146862.27
+146896.72
+146933.07
+147071.86
+147243.86
+147329.51
+147343.68
+147543.26
+147915.68
+148176.06
+148299.05
+148500.71
+148682.82
+148789.52
+149451.88
+149466.62
+149536.2
+149614.34
+149671.92
+150334.57
+150345.63
+150349.92
+150582.77
+150585.73
+150655.44
+150886.49
+151089.96
+151148.81
+151233.65
+151282.65
+151404.78
+151419.5
+151515.08
+151801.06
+152940.0
+153024.28
+153048.74
+153069.14
+153233.93
+153259.41
+153386.61
+153426.79
+153568.02
+153637.79
+153720.22
+153864.67
+154260.84
+154383.37
+154590.05
+154653.32
+154936.43
+154958.89
+154967.89
+155017.92
+155045.39
+155356.8
+155680.6
+156018.74
+156345.64
+156381.95
+156407.4
+156477.94
+156697.55
+156802.8
+157040.57
+157062.7
+157767.86
+157968.27
+158345.31
+158479.37
+158776.68
+158853.63
+158885.83
+158893.16
+158991.89
+159005.35
+159015.39
+159170.8
+159171.69
+159578.94
+159720.39
+159870.44
+160627.01
+160882.76
+161066.22
+161307.05
+161625.5
+161745.44
+162088.3
+162113.46
+162165.94
+162176.23
+162634.53
+162786.67
+163709.85
+163746.47
+163794.53
+163834.46
+163966.67
+164462.61
+165019.32
+165219.08
+165454.51
+165489.52
+165655.99
+165890.47
+166335.03
+166506.22
+166669.86
+166947.75
+166961.06
+167017.39
+167056.34
+167262.34
+168495.03
+168562.27
+168618.39
+168721.45
+168750.48
+168952.1
+169107.85
+169756.19
+169797.4
+169847.63
+170360.27
+171128.1
+171326.48
+171488.73
+171522.54
+171894.45
+171975.62
+172021.87
+172102.96
+172436.3
+172861.58
+172872.37
+172899.84
+172908.01
+173024.71
+173130.2
+173145.37
+173340.09
+173444.6
+173522.71
+174090.3
+174223.2
+174569.88
+174634.12
+174798.97
+175017.68
+175142.28
+175422.13
+176084.63
+176278.57
+176294.34
+176525.53
+176647.54
+176864.83
+176867.34
+176911.21
+177181.67
+177458.97
+178060.22
+178249.05
+178254.66
+178491.24
+178492.01
+178821.73
+179287.95
+179292.14
+179418.31
+179462.21
+179554.41
+179686.07
+179747.47
+179827.12
+179854.51
+179923.54
+179984.42
+180054.29
+180119.22
+180396.95
+180417.11
+180455.98
+180478.16
+180692.9
+180737.75
+180912.15
+181077.36
+181320.5
+181346.56
+182025.95
+182432.17
+182516.77
+182966.39
+183104.71
+183176.6
+183286.33
+183493.42
+183620.33
+183671.08
+183734.56
+183965.61
+184172.31
+184583.99
+185496.66
+185968.15
+186215.81
+186370.23
+186669.1
+186912.51
+187156.38
+187514.11
+187516.29
+187553.35
+187932.3
+188124.55
+188985.18
+189361.42
+189547.57
+189651.76
+190142.17
+190490.78
+190652.53
+190693.92
+190960.69
+192074.23
+192217.86
+192417.85
+193832.28
+193857.67
+194119.31
+194159.59
+195515.26
+195834.96
+195844.84
+196080.26
+196443.16
+196989.09
+197031.52
+197192.95
+197973.22
+198039.23
+198238.65
+198322.91
+198360.22
+198538.68
+198723.3
+198816.13
+198995.21
+199102.23
+199593.71
+201003.12
+201268.06
+201419.83
+201455.98
+202227.17
+202364.58
+202623.92
+202660.52
+202784.54
+202917.72
+203904.8
+204163.1
+204355.65
+204438.57
+205282.63
+206179.68
+206495.43
+206742.11
+207119.83
+207291.83
+207364.8
+207632.55
+207907.6
+207925.83
+208481.57
+208517.98
+208892.63
+208974.42
+209155.48
+210113.88
+210643.96
+210713.88
+211925.95
+212339.55
+214494.39
+215135.72
+215342.63
+216071.76
+216104.85
+216826.73
+217682.81
+217709.03
+218116.21
+218482.7
+218697.85
+219123.27
+219707.84
+219709.6
+219920.62
+219959.08
+220636.82
+220715.14
+220727.97
+221036.31
+221320.76
+221636.83
+222274.54
+222392.53
+222653.54
+223507.72
+223537.09
+223995.46
+224382.57
+224674.27
+224724.11
+226314.91
+226806.66
+228002.51
+228054.01
+228136.49
+229165.17
+230949.45
+231012.22
+231831.35
+232194.74
+234763.73
+237947.61
+240284.95
+240457.56
+241837.88
+242588.87
+244704.23
+245388.06
+245976.74
+249900.42
+258779.02
+263411.29
diff --git a/hivesterix/src/test/resources/runtimefunctionts/results/u10_nestedloop_join.result b/hivesterix/src/test/resources/runtimefunctionts/results/u10_nestedloop_join.result
new file mode 100644
index 0000000..ffc76a4
--- /dev/null
+++ b/hivesterix/src/test/resources/runtimefunctionts/results/u10_nestedloop_join.result
@@ -0,0 +1,300 @@
+ARGENTINAALGERIA10
+BRAZILALGERIA20
+BRAZILARGENTINA21
+CANADAALGERIA30
+CANADAARGENTINA31
+CANADABRAZIL32
+CHINAALGERIA180
+CHINAARGENTINA181
+CHINABRAZIL182
+CHINACANADA183
+CHINAEGYPT184
+CHINAETHIOPIA185
+CHINAFRANCE186
+CHINAGERMANY187
+CHINAINDIA188
+CHINAINDONESIA189
+CHINAIRAN1810
+CHINAIRAQ1811
+CHINAJAPAN1812
+CHINAJORDAN1813
+CHINAKENYA1814
+CHINAMOROCCO1815
+CHINAMOZAMBIQUE1816
+CHINAPERU1817
+EGYPTALGERIA40
+EGYPTARGENTINA41
+EGYPTBRAZIL42
+EGYPTCANADA43
+ETHIOPIAALGERIA50
+ETHIOPIAARGENTINA51
+ETHIOPIABRAZIL52
+ETHIOPIACANADA53
+ETHIOPIAEGYPT54
+FRANCEALGERIA60
+FRANCEARGENTINA61
+FRANCEBRAZIL62
+FRANCECANADA63
+FRANCEEGYPT64
+FRANCEETHIOPIA65
+GERMANYALGERIA70
+GERMANYARGENTINA71
+GERMANYBRAZIL72
+GERMANYCANADA73
+GERMANYEGYPT74
+GERMANYETHIOPIA75
+GERMANYFRANCE76
+INDIAALGERIA80
+INDIAARGENTINA81
+INDIABRAZIL82
+INDIACANADA83
+INDIAEGYPT84
+INDIAETHIOPIA85
+INDIAFRANCE86
+INDIAGERMANY87
+INDONESIAALGERIA90
+INDONESIAARGENTINA91
+INDONESIABRAZIL92
+INDONESIACANADA93
+INDONESIAEGYPT94
+INDONESIAETHIOPIA95
+INDONESIAFRANCE96
+INDONESIAGERMANY97
+INDONESIAINDIA98
+IRANALGERIA100
+IRANARGENTINA101
+IRANBRAZIL102
+IRANCANADA103
+IRANEGYPT104
+IRANETHIOPIA105
+IRANFRANCE106
+IRANGERMANY107
+IRANINDIA108
+IRANINDONESIA109
+IRAQALGERIA110
+IRAQARGENTINA111
+IRAQBRAZIL112
+IRAQCANADA113
+IRAQEGYPT114
+IRAQETHIOPIA115
+IRAQFRANCE116
+IRAQGERMANY117
+IRAQINDIA118
+IRAQINDONESIA119
+IRAQIRAN1110
+JAPANALGERIA120
+JAPANARGENTINA121
+JAPANBRAZIL122
+JAPANCANADA123
+JAPANEGYPT124
+JAPANETHIOPIA125
+JAPANFRANCE126
+JAPANGERMANY127
+JAPANINDIA128
+JAPANINDONESIA129
+JAPANIRAN1210
+JAPANIRAQ1211
+JORDANALGERIA130
+JORDANARGENTINA131
+JORDANBRAZIL132
+JORDANCANADA133
+JORDANEGYPT134
+JORDANETHIOPIA135
+JORDANFRANCE136
+JORDANGERMANY137
+JORDANINDIA138
+JORDANINDONESIA139
+JORDANIRAN1310
+JORDANIRAQ1311
+JORDANJAPAN1312
+KENYAALGERIA140
+KENYAARGENTINA141
+KENYABRAZIL142
+KENYACANADA143
+KENYAEGYPT144
+KENYAETHIOPIA145
+KENYAFRANCE146
+KENYAGERMANY147
+KENYAINDIA148
+KENYAINDONESIA149
+KENYAIRAN1410
+KENYAIRAQ1411
+KENYAJAPAN1412
+KENYAJORDAN1413
+MOROCCOALGERIA150
+MOROCCOARGENTINA151
+MOROCCOBRAZIL152
+MOROCCOCANADA153
+MOROCCOEGYPT154
+MOROCCOETHIOPIA155
+MOROCCOFRANCE156
+MOROCCOGERMANY157
+MOROCCOINDIA158
+MOROCCOINDONESIA159
+MOROCCOIRAN1510
+MOROCCOIRAQ1511
+MOROCCOJAPAN1512
+MOROCCOJORDAN1513
+MOROCCOKENYA1514
+MOZAMBIQUEALGERIA160
+MOZAMBIQUEARGENTINA161
+MOZAMBIQUEBRAZIL162
+MOZAMBIQUECANADA163
+MOZAMBIQUEEGYPT164
+MOZAMBIQUEETHIOPIA165
+MOZAMBIQUEFRANCE166
+MOZAMBIQUEGERMANY167
+MOZAMBIQUEINDIA168
+MOZAMBIQUEINDONESIA169
+MOZAMBIQUEIRAN1610
+MOZAMBIQUEIRAQ1611
+MOZAMBIQUEJAPAN1612
+MOZAMBIQUEJORDAN1613
+MOZAMBIQUEKENYA1614
+MOZAMBIQUEMOROCCO1615
+PERUALGERIA170
+PERUARGENTINA171
+PERUBRAZIL172
+PERUCANADA173
+PERUEGYPT174
+PERUETHIOPIA175
+PERUFRANCE176
+PERUGERMANY177
+PERUINDIA178
+PERUINDONESIA179
+PERUIRAN1710
+PERUIRAQ1711
+PERUJAPAN1712
+PERUJORDAN1713
+PERUKENYA1714
+PERUMOROCCO1715
+PERUMOZAMBIQUE1716
+ROMANIAALGERIA190
+ROMANIAARGENTINA191
+ROMANIABRAZIL192
+ROMANIACANADA193
+ROMANIACHINA1918
+ROMANIAEGYPT194
+ROMANIAETHIOPIA195
+ROMANIAFRANCE196
+ROMANIAGERMANY197
+ROMANIAINDIA198
+ROMANIAINDONESIA199
+ROMANIAIRAN1910
+ROMANIAIRAQ1911
+ROMANIAJAPAN1912
+ROMANIAJORDAN1913
+ROMANIAKENYA1914
+ROMANIAMOROCCO1915
+ROMANIAMOZAMBIQUE1916
+ROMANIAPERU1917
+RUSSIAALGERIA220
+RUSSIAARGENTINA221
+RUSSIABRAZIL222
+RUSSIACANADA223
+RUSSIACHINA2218
+RUSSIAEGYPT224
+RUSSIAETHIOPIA225
+RUSSIAFRANCE226
+RUSSIAGERMANY227
+RUSSIAINDIA228
+RUSSIAINDONESIA229
+RUSSIAIRAN2210
+RUSSIAIRAQ2211
+RUSSIAJAPAN2212
+RUSSIAJORDAN2213
+RUSSIAKENYA2214
+RUSSIAMOROCCO2215
+RUSSIAMOZAMBIQUE2216
+RUSSIAPERU2217
+RUSSIAROMANIA2219
+RUSSIASAUDI ARABIA2220
+RUSSIAVIETNAM2221
+SAUDI ARABIAALGERIA200
+SAUDI ARABIAARGENTINA201
+SAUDI ARABIABRAZIL202
+SAUDI ARABIACANADA203
+SAUDI ARABIACHINA2018
+SAUDI ARABIAEGYPT204
+SAUDI ARABIAETHIOPIA205
+SAUDI ARABIAFRANCE206
+SAUDI ARABIAGERMANY207
+SAUDI ARABIAINDIA208
+SAUDI ARABIAINDONESIA209
+SAUDI ARABIAIRAN2010
+SAUDI ARABIAIRAQ2011
+SAUDI ARABIAJAPAN2012
+SAUDI ARABIAJORDAN2013
+SAUDI ARABIAKENYA2014
+SAUDI ARABIAMOROCCO2015
+SAUDI ARABIAMOZAMBIQUE2016
+SAUDI ARABIAPERU2017
+SAUDI ARABIAROMANIA2019
+UNITED KINGDOMALGERIA230
+UNITED KINGDOMARGENTINA231
+UNITED KINGDOMBRAZIL232
+UNITED KINGDOMCANADA233
+UNITED KINGDOMCHINA2318
+UNITED KINGDOMEGYPT234
+UNITED KINGDOMETHIOPIA235
+UNITED KINGDOMFRANCE236
+UNITED KINGDOMGERMANY237
+UNITED KINGDOMINDIA238
+UNITED KINGDOMINDONESIA239
+UNITED KINGDOMIRAN2310
+UNITED KINGDOMIRAQ2311
+UNITED KINGDOMJAPAN2312
+UNITED KINGDOMJORDAN2313
+UNITED KINGDOMKENYA2314
+UNITED KINGDOMMOROCCO2315
+UNITED KINGDOMMOZAMBIQUE2316
+UNITED KINGDOMPERU2317
+UNITED KINGDOMROMANIA2319
+UNITED KINGDOMRUSSIA2322
+UNITED KINGDOMSAUDI ARABIA2320
+UNITED KINGDOMVIETNAM2321
+UNITED STATESALGERIA240
+UNITED STATESARGENTINA241
+UNITED STATESBRAZIL242
+UNITED STATESCANADA243
+UNITED STATESCHINA2418
+UNITED STATESEGYPT244
+UNITED STATESETHIOPIA245
+UNITED STATESFRANCE246
+UNITED STATESGERMANY247
+UNITED STATESINDIA248
+UNITED STATESINDONESIA249
+UNITED STATESIRAN2410
+UNITED STATESIRAQ2411
+UNITED STATESJAPAN2412
+UNITED STATESJORDAN2413
+UNITED STATESKENYA2414
+UNITED STATESMOROCCO2415
+UNITED STATESMOZAMBIQUE2416
+UNITED STATESPERU2417
+UNITED STATESROMANIA2419
+UNITED STATESRUSSIA2422
+UNITED STATESSAUDI ARABIA2420
+UNITED STATESUNITED KINGDOM2423
+UNITED STATESVIETNAM2421
+VIETNAMALGERIA210
+VIETNAMARGENTINA211
+VIETNAMBRAZIL212
+VIETNAMCANADA213
+VIETNAMCHINA2118
+VIETNAMEGYPT214
+VIETNAMETHIOPIA215
+VIETNAMFRANCE216
+VIETNAMGERMANY217
+VIETNAMINDIA218
+VIETNAMINDONESIA219
+VIETNAMIRAN2110
+VIETNAMIRAQ2111
+VIETNAMJAPAN2112
+VIETNAMJORDAN2113
+VIETNAMKENYA2114
+VIETNAMMOROCCO2115
+VIETNAMMOZAMBIQUE2116
+VIETNAMPERU2117
+VIETNAMROMANIA2119
+VIETNAMSAUDI ARABIA2120
diff --git a/hivesterix/src/test/resources/runtimefunctionts/results/u1_gby.result b/hivesterix/src/test/resources/runtimefunctionts/results/u1_gby.result
new file mode 100644
index 0000000..7efa15a
--- /dev/null
+++ b/hivesterix/src/test/resources/runtimefunctionts/results/u1_gby.result
@@ -0,0 +1,5 @@
+05
+15
+25
+35
+45
diff --git a/hivesterix/src/test/resources/runtimefunctionts/results/u2_gby_external.result b/hivesterix/src/test/resources/runtimefunctionts/results/u2_gby_external.result
new file mode 100644
index 0000000..b30110b
--- /dev/null
+++ b/hivesterix/src/test/resources/runtimefunctionts/results/u2_gby_external.result
@@ -0,0 +1,10 @@
+598841.0
+598725.25
+598617.8
+59854.0
+598420.25
+595928.857142857142858
+595829.6
+595734.57142857142857
+595630.0
+595523.0
diff --git a/hivesterix/src/test/resources/runtimefunctionts/results/u3_union.result b/hivesterix/src/test/resources/runtimefunctionts/results/u3_union.result
new file mode 100644
index 0000000..e8ec7a2
--- /dev/null
+++ b/hivesterix/src/test/resources/runtimefunctionts/results/u3_union.result
@@ -0,0 +1,9 @@
+2 N kD4on9OM Ipw3,gf0JBoQDd7tgrzrddZ17Supplier#000000001
+181KhUgZegwM3ua7dsYmekYBsK10Supplier#000000009
+489eJ5ksX3ImxJQBvxObC,5Supplier#000000002
+169Sq4bBH2FQEmaFOocY45sRTxo6yuoG17Supplier#000000008
+8Bk7ah4CK8SYQTepEmvMkkgMwg15Supplier#000000004
+10Gcdm2rJRzl5qlTVzc11Supplier#000000005
+6q1,G3Pj6OjIuUYfUoH18BFTKP5aU9bEV31Supplier#000000003
+14s,4TicNGB4uO6PaSqNBUq23Supplier#000000007
+12tQxuVm7s7CnK14Supplier#000000006
diff --git a/hivesterix/src/test/resources/runtimefunctionts/results/u4_gby_distinct.result b/hivesterix/src/test/resources/runtimefunctionts/results/u4_gby_distinct.result
new file mode 100644
index 0000000..b30110b
--- /dev/null
+++ b/hivesterix/src/test/resources/runtimefunctionts/results/u4_gby_distinct.result
@@ -0,0 +1,10 @@
+598841.0
+598725.25
+598617.8
+59854.0
+598420.25
+595928.857142857142858
+595829.6
+595734.57142857142857
+595630.0
+595523.0
diff --git a/hivesterix/src/test/resources/runtimefunctionts/results/u5_gby_global.result b/hivesterix/src/test/resources/runtimefunctionts/results/u5_gby_global.result
new file mode 100644
index 0000000..8828088
--- /dev/null
+++ b/hivesterix/src/test/resources/runtimefunctionts/results/u5_gby_global.result
@@ -0,0 +1 @@
+152398.0
diff --git a/hivesterix/src/test/resources/runtimefunctionts/results/u6_large_card_join.result b/hivesterix/src/test/resources/runtimefunctionts/results/u6_large_card_join.result
new file mode 100644
index 0000000..82b4857
--- /dev/null
+++ b/hivesterix/src/test/resources/runtimefunctionts/results/u6_large_card_join.result
@@ -0,0 +1,512 @@
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
diff --git a/hyracks-algebricks/hyracks-algebricks-common/pom.xml b/hyracks-algebricks/hyracks-algebricks-common/pom.xml
deleted file mode 100644
index 39cce82..0000000
--- a/hyracks-algebricks/hyracks-algebricks-common/pom.xml
+++ /dev/null
@@ -1,31 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <artifactId>hyracks-algebricks-common</artifactId>
-
-  <parent>
-    <groupId>edu.uci.ics.hyracks</groupId>
-    <artifactId>hyracks-algebricks</artifactId>
-    <version>0.2.2-SNAPSHOT</version>
-  </parent>
-
-  <build>
-    <plugins>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-compiler-plugin</artifactId>
-        <version>2.0.2</version>
-        <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
-        </configuration>
-      </plugin>
-    </plugins>
-  </build>
-  <dependencies>
-  <dependency>
-  	<groupId>edu.uci.ics.hyracks</groupId>
-  	<artifactId>hyracks-api</artifactId>
-  	<version>0.2.2-SNAPSHOT</version>
-  </dependency>
-  </dependencies>
-</project>
diff --git a/hyracks-algebricks/hyracks-algebricks-compiler/pom.xml b/hyracks-algebricks/hyracks-algebricks-compiler/pom.xml
deleted file mode 100644
index 7219595..0000000
--- a/hyracks-algebricks/hyracks-algebricks-compiler/pom.xml
+++ /dev/null
@@ -1,36 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <artifactId>hyracks-algebricks-compiler</artifactId>
-
-  <parent>
-    <groupId>edu.uci.ics.hyracks</groupId>
-    <artifactId>hyracks-algebricks</artifactId>
-    <version>0.2.2-SNAPSHOT</version>
-  </parent>
-
-  <build>
-    <plugins>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-compiler-plugin</artifactId>
-        <version>2.0.2</version>
-        <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
-        </configuration>
-      </plugin>
-    </plugins>
-  </build>
-  <dependencies>
-  <dependency>
-  	<groupId>edu.uci.ics.hyracks</groupId>
-  	<artifactId>hyracks-algebricks-rewriter</artifactId>
-  	<version>0.2.2-SNAPSHOT</version>
-  </dependency>
-  <dependency>
-  	<groupId>edu.uci.ics.hyracks</groupId>
-  	<artifactId>hyracks-algebricks-core</artifactId>
-  	<version>0.2.2-SNAPSHOT</version>
-  </dependency>
-  </dependencies>
-</project>
diff --git a/hyracks-algebricks/hyracks-algebricks-compiler/src/main/java/edu/uci/ics/hyracks/algebricks/compiler/api/AbstractCompilerFactoryBuilder.java b/hyracks-algebricks/hyracks-algebricks-compiler/src/main/java/edu/uci/ics/hyracks/algebricks/compiler/api/AbstractCompilerFactoryBuilder.java
deleted file mode 100644
index f1e7acb..0000000
--- a/hyracks-algebricks/hyracks-algebricks-compiler/src/main/java/edu/uci/ics/hyracks/algebricks/compiler/api/AbstractCompilerFactoryBuilder.java
+++ /dev/null
@@ -1,219 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.algebricks.compiler.api;
-
-import java.util.List;
-
-import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
-import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IExpressionEvalSizeComputer;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IExpressionRuntimeProvider;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IExpressionTypeComputer;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IMergeAggregationExpressionFactory;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.INullableTypeComputer;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IPartialAggregationTypeComputer;
-import edu.uci.ics.hyracks.algebricks.core.rewriter.base.AbstractRuleController;
-import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
-import edu.uci.ics.hyracks.algebricks.core.rewriter.base.PhysicalOptimizationConfig;
-import edu.uci.ics.hyracks.algebricks.data.IBinaryBooleanInspectorFactory;
-import edu.uci.ics.hyracks.algebricks.data.IBinaryComparatorFactoryProvider;
-import edu.uci.ics.hyracks.algebricks.data.IBinaryHashFunctionFactoryProvider;
-import edu.uci.ics.hyracks.algebricks.data.IBinaryIntegerInspectorFactory;
-import edu.uci.ics.hyracks.algebricks.data.INormalizedKeyComputerFactoryProvider;
-import edu.uci.ics.hyracks.algebricks.data.IPrinterFactoryProvider;
-import edu.uci.ics.hyracks.algebricks.data.ISerializerDeserializerProvider;
-import edu.uci.ics.hyracks.algebricks.data.ITypeTraitProvider;
-import edu.uci.ics.hyracks.api.dataflow.value.INullWriterFactory;
-
-public abstract class AbstractCompilerFactoryBuilder {
-
-    protected List<Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>> logicalRewrites;
-    protected List<Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>> physicalRewrites;
-    protected ITypeTraitProvider typeTraitProvider;
-    protected ISerializerDeserializerProvider serializerDeserializerProvider;
-    protected IBinaryHashFunctionFactoryProvider hashFunctionFactoryProvider;
-    protected IBinaryComparatorFactoryProvider comparatorFactoryProvider;
-    protected IBinaryBooleanInspectorFactory binaryBooleanInspectorFactory;
-    protected IBinaryIntegerInspectorFactory binaryIntegerInspectorFactory;
-    protected IPrinterFactoryProvider printerProvider;
-    protected IExpressionRuntimeProvider expressionRuntimeProvider;
-    protected IExpressionTypeComputer expressionTypeComputer;
-    protected INullableTypeComputer nullableTypeComputer;
-    protected IExpressionEvalSizeComputer expressionEvalSizeComputer;
-    protected INullWriterFactory nullWriterFactory;
-    protected INormalizedKeyComputerFactoryProvider normalizedKeyComputerFactoryProvider;
-    protected IPartialAggregationTypeComputer partialAggregationTypeComputer;
-    protected IMergeAggregationExpressionFactory mergeAggregationExpressionFactory;
-    protected PhysicalOptimizationConfig physicalOptimizationConfig = new PhysicalOptimizationConfig();
-    protected AlgebricksPartitionConstraint clusterLocations;
-    protected int frameSize = -1;
-
-    public abstract ICompilerFactory create();
-
-    public void setLogicalRewrites(List<Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>> logicalRewrites) {
-        this.logicalRewrites = logicalRewrites;
-    }
-
-    public void setPhysicalRewrites(List<Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>> physicalRewrites) {
-        this.physicalRewrites = physicalRewrites;
-    }
-
-    public void setTypeTraitProvider(ITypeTraitProvider typeTraitProvider) {
-        this.typeTraitProvider = typeTraitProvider;
-    }
-
-    public ITypeTraitProvider getTypeTraitProvider() {
-        return typeTraitProvider;
-    }
-
-    public void setSerializerDeserializerProvider(ISerializerDeserializerProvider serializerDeserializerProvider) {
-        this.serializerDeserializerProvider = serializerDeserializerProvider;
-    }
-
-    public ISerializerDeserializerProvider getSerializerDeserializerProvider() {
-        return serializerDeserializerProvider;
-    }
-
-    public void setHashFunctionFactoryProvider(IBinaryHashFunctionFactoryProvider hashFunctionFactoryProvider) {
-        this.hashFunctionFactoryProvider = hashFunctionFactoryProvider;
-    }
-
-    public IBinaryHashFunctionFactoryProvider getHashFunctionFactoryProvider() {
-        return hashFunctionFactoryProvider;
-    }
-
-    public void setComparatorFactoryProvider(IBinaryComparatorFactoryProvider comparatorFactoryProvider) {
-        this.comparatorFactoryProvider = comparatorFactoryProvider;
-    }
-
-    public IBinaryComparatorFactoryProvider getComparatorFactoryProvider() {
-        return comparatorFactoryProvider;
-    }
-
-    public void setBinaryBooleanInspectorFactory(IBinaryBooleanInspectorFactory binaryBooleanInspectorFactory) {
-        this.binaryBooleanInspectorFactory = binaryBooleanInspectorFactory;
-    }
-
-    public IBinaryBooleanInspectorFactory getBinaryBooleanInspectorFactory() {
-        return binaryBooleanInspectorFactory;
-    }
-
-    public void setBinaryIntegerInspectorFactory(IBinaryIntegerInspectorFactory binaryIntegerInspectorFactory) {
-        this.binaryIntegerInspectorFactory = binaryIntegerInspectorFactory;
-    }
-
-    public IBinaryIntegerInspectorFactory getBinaryIntegerInspectorFactory() {
-        return binaryIntegerInspectorFactory;
-    }
-
-    public void setPrinterProvider(IPrinterFactoryProvider printerProvider) {
-        this.printerProvider = printerProvider;
-    }
-
-    public IPrinterFactoryProvider getPrinterProvider() {
-        return printerProvider;
-    }
-
-    public void setExpressionRuntimeProvider(IExpressionRuntimeProvider expressionRuntimeProvider) {
-        this.expressionRuntimeProvider = expressionRuntimeProvider;
-    }
-
-    public IExpressionRuntimeProvider getExpressionRuntimeProvider() {
-        return expressionRuntimeProvider;
-    }
-
-    public void setExpressionTypeComputer(IExpressionTypeComputer expressionTypeComputer) {
-        this.expressionTypeComputer = expressionTypeComputer;
-    }
-
-    public IExpressionTypeComputer getExpressionTypeComputer() {
-        return expressionTypeComputer;
-    }
-
-    public void setClusterLocations(AlgebricksPartitionConstraint clusterLocations) {
-        this.clusterLocations = clusterLocations;
-    }
-
-    public AlgebricksPartitionConstraint getClusterLocations() {
-        return clusterLocations;
-    }
-
-    public void setNullWriterFactory(INullWriterFactory nullWriterFactory) {
-        this.nullWriterFactory = nullWriterFactory;
-    }
-
-    public INullWriterFactory getNullWriterFactory() {
-        return nullWriterFactory;
-    }
-
-    public void setExpressionEvalSizeComputer(IExpressionEvalSizeComputer expressionEvalSizeComputer) {
-        this.expressionEvalSizeComputer = expressionEvalSizeComputer;
-    }
-
-    public IExpressionEvalSizeComputer getExpressionEvalSizeComputer() {
-        return expressionEvalSizeComputer;
-    }
-
-    public void setNormalizedKeyComputerFactoryProvider(
-            INormalizedKeyComputerFactoryProvider normalizedKeyComputerFactoryProvider) {
-        this.normalizedKeyComputerFactoryProvider = normalizedKeyComputerFactoryProvider;
-    }
-
-    public INormalizedKeyComputerFactoryProvider getNormalizedKeyComputerFactoryProvider() {
-        return normalizedKeyComputerFactoryProvider;
-    }
-
-    public void setFrameSize(int frameSize) {
-        this.frameSize = frameSize;
-    }
-
-    public int getFrameSize() {
-        return frameSize;
-    }
-
-    public IPartialAggregationTypeComputer getPartialAggregationTypeComputer() {
-        return partialAggregationTypeComputer;
-    }
-
-    public void setPartialAggregationTypeComputer(IPartialAggregationTypeComputer partialAggregationTypeComputer) {
-        this.partialAggregationTypeComputer = partialAggregationTypeComputer;
-    }
-
-    public IMergeAggregationExpressionFactory getIMergeAggregationExpressionFactory() {
-        return mergeAggregationExpressionFactory;
-    }
-
-    public void setIMergeAggregationExpressionFactory(
-            IMergeAggregationExpressionFactory mergeAggregationExpressionFactory) {
-        this.mergeAggregationExpressionFactory = mergeAggregationExpressionFactory;
-    }
-
-    public PhysicalOptimizationConfig getPhysicalOptimizationConfig() {
-        return physicalOptimizationConfig;
-    }
-
-    public void setPhysicalOptimizationConfig(PhysicalOptimizationConfig physicalOptimizationConfig) {
-        this.physicalOptimizationConfig = physicalOptimizationConfig;
-    }
-
-    public void setNullableTypeComputer(INullableTypeComputer nullableTypeComputer) {
-        this.nullableTypeComputer = nullableTypeComputer;
-    }
-
-    public INullableTypeComputer getNullableTypeComputer() {
-        return nullableTypeComputer;
-    }
-
-}
diff --git a/hyracks-algebricks/hyracks-algebricks-compiler/src/main/java/edu/uci/ics/hyracks/algebricks/compiler/api/HeuristicCompilerFactoryBuilder.java b/hyracks-algebricks/hyracks-algebricks-compiler/src/main/java/edu/uci/ics/hyracks/algebricks/compiler/api/HeuristicCompilerFactoryBuilder.java
deleted file mode 100644
index 953e1b1..0000000
--- a/hyracks-algebricks/hyracks-algebricks-compiler/src/main/java/edu/uci/ics/hyracks/algebricks/compiler/api/HeuristicCompilerFactoryBuilder.java
+++ /dev/null
@@ -1,101 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.algebricks.compiler.api;
-
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalPlan;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IExpressionEvalSizeComputer;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IExpressionTypeComputer;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IMergeAggregationExpressionFactory;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.INullableTypeComputer;
-import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IMetadataProvider;
-import edu.uci.ics.hyracks.algebricks.core.config.AlgebricksConfig;
-import edu.uci.ics.hyracks.algebricks.core.jobgen.impl.JobGenContext;
-import edu.uci.ics.hyracks.algebricks.core.jobgen.impl.PlanCompiler;
-import edu.uci.ics.hyracks.algebricks.core.rewriter.base.AlgebricksOptimizationContext;
-import edu.uci.ics.hyracks.algebricks.core.rewriter.base.HeuristicOptimizer;
-import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IOptimizationContextFactory;
-import edu.uci.ics.hyracks.algebricks.core.rewriter.base.PhysicalOptimizationConfig;
-import edu.uci.ics.hyracks.api.job.IJobletEventListenerFactory;
-import edu.uci.ics.hyracks.api.job.JobSpecification;
-
-public class HeuristicCompilerFactoryBuilder extends AbstractCompilerFactoryBuilder {
-
-    public static class DefaultOptimizationContextFactory implements IOptimizationContextFactory {
-
-        public static final DefaultOptimizationContextFactory INSTANCE = new DefaultOptimizationContextFactory();
-
-        private DefaultOptimizationContextFactory() {
-        }
-
-        @Override
-        public IOptimizationContext createOptimizationContext(int varCounter, int frameSize,
-                IExpressionEvalSizeComputer expressionEvalSizeComputer,
-                IMergeAggregationExpressionFactory mergeAggregationExpressionFactory,
-                IExpressionTypeComputer expressionTypeComputer, INullableTypeComputer nullableTypeComputer,
-                PhysicalOptimizationConfig physicalOptimizationConfig) {
-            return new AlgebricksOptimizationContext(varCounter, frameSize, expressionEvalSizeComputer,
-                    mergeAggregationExpressionFactory, expressionTypeComputer, nullableTypeComputer,
-                    physicalOptimizationConfig);
-        }
-    }
-
-    private IOptimizationContextFactory optCtxFactory;
-
-    public HeuristicCompilerFactoryBuilder() {
-        this.optCtxFactory = DefaultOptimizationContextFactory.INSTANCE;
-    }
-
-    public HeuristicCompilerFactoryBuilder(IOptimizationContextFactory optCtxFactory) {
-        this.optCtxFactory = optCtxFactory;
-    }
-
-    @Override
-    public ICompilerFactory create() {
-        return new ICompilerFactory() {
-            @Override
-            public ICompiler createCompiler(final ILogicalPlan plan, final IMetadataProvider<?, ?> metadata,
-                    int varCounter) {
-                final IOptimizationContext oc = optCtxFactory.createOptimizationContext(varCounter, frameSize,
-                        expressionEvalSizeComputer, mergeAggregationExpressionFactory, expressionTypeComputer,
-                        nullableTypeComputer, physicalOptimizationConfig);
-                oc.setMetadataDeclarations(metadata);
-                final HeuristicOptimizer opt = new HeuristicOptimizer(plan, logicalRewrites, physicalRewrites, oc);
-                return new ICompiler() {
-
-                    @Override
-                    public void optimize() throws AlgebricksException {
-                        opt.optimize();
-                    }
-
-                    @Override
-                    public JobSpecification createJob(Object appContext, IJobletEventListenerFactory jobEventListenerFactory) throws AlgebricksException {
-                        AlgebricksConfig.ALGEBRICKS_LOGGER.fine("Starting Job Generation.\n");
-                        JobGenContext context = new JobGenContext(null, metadata, appContext,
-                                serializerDeserializerProvider, hashFunctionFactoryProvider, comparatorFactoryProvider,
-                                typeTraitProvider, binaryBooleanInspectorFactory, binaryIntegerInspectorFactory,
-                                printerProvider, nullWriterFactory, normalizedKeyComputerFactoryProvider,
-                                expressionRuntimeProvider, expressionTypeComputer, nullableTypeComputer, oc,
-                                expressionEvalSizeComputer, partialAggregationTypeComputer, frameSize, clusterLocations);
-                        PlanCompiler pc = new PlanCompiler(context);
-                        return pc.compilePlan(plan, null, jobEventListenerFactory);
-                    }
-                };
-            }
-        };
-    }
-
-}
diff --git a/hyracks-algebricks/hyracks-algebricks-compiler/src/main/java/edu/uci/ics/hyracks/algebricks/compiler/api/ICompiler.java b/hyracks-algebricks/hyracks-algebricks-compiler/src/main/java/edu/uci/ics/hyracks/algebricks/compiler/api/ICompiler.java
deleted file mode 100644
index 517ca6b..0000000
--- a/hyracks-algebricks/hyracks-algebricks-compiler/src/main/java/edu/uci/ics/hyracks/algebricks/compiler/api/ICompiler.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.algebricks.compiler.api;
-
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.api.job.IJobletEventListenerFactory;
-import edu.uci.ics.hyracks.api.job.JobSpecification;
-
-public interface ICompiler {
-    public void optimize() throws AlgebricksException;
-
-    public JobSpecification createJob(Object appContext, IJobletEventListenerFactory jobEventListenerFactory)
-            throws AlgebricksException;
-}
diff --git a/hyracks-algebricks/hyracks-algebricks-core/pom.xml b/hyracks-algebricks/hyracks-algebricks-core/pom.xml
deleted file mode 100644
index e6bc4cd..0000000
--- a/hyracks-algebricks/hyracks-algebricks-core/pom.xml
+++ /dev/null
@@ -1,51 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <artifactId>hyracks-algebricks-core</artifactId>
-
-  <parent>
-    <groupId>edu.uci.ics.hyracks</groupId>
-    <artifactId>hyracks-algebricks</artifactId>
-    <version>0.2.2-SNAPSHOT</version>
-  </parent>
-
-  <build>
-    <plugins>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-compiler-plugin</artifactId>
-        <version>2.0.2</version>
-        <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
-        </configuration>
-      </plugin>
-    </plugins>
-  </build>
-  <dependencies>
-  <dependency>
-  	<groupId>edu.uci.ics.hyracks</groupId>
-  	<artifactId>hyracks-storage-am-btree</artifactId>
-  	<version>0.2.2-SNAPSHOT</version>
-  </dependency>
-  <dependency>
-  	<groupId>edu.uci.ics.hyracks</groupId>
-  	<artifactId>hyracks-storage-am-rtree</artifactId>
-  	<version>0.2.2-SNAPSHOT</version>
-  </dependency>
-  <dependency>
-  	<groupId>edu.uci.ics.hyracks</groupId>
-  	<artifactId>hyracks-dataflow-std</artifactId>
-  	<version>0.2.2-SNAPSHOT</version>
-  </dependency>
-  <dependency>
-  	<groupId>edu.uci.ics.hyracks</groupId>
-  	<artifactId>hyracks-algebricks-runtime</artifactId>
-  	<version>0.2.2-SNAPSHOT</version>
-  </dependency>
-  <dependency>
-  	<groupId>edu.uci.ics.hyracks</groupId>
-  	<artifactId>hyracks-algebricks-common</artifactId>
-  	<version>0.2.2-SNAPSHOT</version>
-  </dependency>
-  </dependencies>
-</project>
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/base/PhysicalOperatorTag.java b/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/base/PhysicalOperatorTag.java
deleted file mode 100644
index 0845d05..0000000
--- a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/base/PhysicalOperatorTag.java
+++ /dev/null
@@ -1,50 +0,0 @@
-package edu.uci.ics.hyracks.algebricks.core.algebra.base;
-
-public enum PhysicalOperatorTag {
-    AGGREGATE,
-    ASSIGN,
-    BROADCAST_EXCHANGE,
-    BTREE_SEARCH,
-    STATS,
-    DATASOURCE_SCAN,
-    EMPTY_TUPLE_SOURCE,
-    EXTERNAL_GROUP_BY,
-    IN_MEMORY_HASH_JOIN,
-    HASH_GROUP_BY,
-    HASH_PARTITION_EXCHANGE,
-    HASH_PARTITION_MERGE_EXCHANGE,
-    HYBRID_HASH_JOIN,
-    HDFS_READER,
-    IN_MEMORY_STABLE_SORT,
-    MICRO_PRE_CLUSTERED_GROUP_BY,
-    NESTED_LOOP,
-    NESTED_TUPLE_SOURCE,
-    ONE_TO_ONE_EXCHANGE,
-    PRE_SORTED_DISTINCT_BY,
-    PRE_CLUSTERED_GROUP_BY,
-    RANGE_PARTITION_EXCHANGE,
-    RANDOM_MERGE_EXCHANGE,
-    RTREE_SEARCH,
-    RUNNING_AGGREGATE,
-    SORT_MERGE_EXCHANGE,
-    SINK,
-    SINK_WRITE,
-    SPLIT,
-    STABLE_SORT,
-    STREAM_LIMIT,
-    STREAM_DIE,
-    STREAM_SELECT,
-    STREAM_PROJECT,
-    STRING_STREAM_SCRIPT,
-    SUBPLAN,
-    UNION_ALL,
-    UNNEST,
-    WRITE_RESULT,
-    INSERT_DELETE,
-    INDEX_INSERT_DELETE,
-    UPDATE,
-    INVERTED_INDEX_SEARCH,
-    FUZZY_INVERTED_INDEX_SEARCH,
-    PARTITIONINGSPLIT,
-    EXTENSION_OPERATOR
-}
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/VariableReferenceExpression.java b/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/VariableReferenceExpression.java
deleted file mode 100644
index a998a3b..0000000
--- a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/VariableReferenceExpression.java
+++ /dev/null
@@ -1,95 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.algebricks.core.algebra.expressions;
-
-import java.util.Collection;
-import java.util.List;
-
-import org.apache.commons.lang3.mutable.Mutable;
-
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
-import edu.uci.ics.hyracks.algebricks.core.algebra.visitors.ILogicalExpressionVisitor;
-
-public class VariableReferenceExpression extends AbstractLogicalExpression {
-    private LogicalVariable variable;
-
-    public VariableReferenceExpression(LogicalVariable variable) {
-        this.variable = variable;
-    }
-
-    public LogicalVariable getVariableReference() {
-        return variable;
-    }
-
-    public void setVariable(LogicalVariable variable) {
-        this.variable = variable;
-    }
-
-    @Override
-    public LogicalExpressionTag getExpressionTag() {
-        return LogicalExpressionTag.VARIABLE;
-    }
-
-    @Override
-    public String toString() {
-        return variable.toString();
-    }
-
-    @Override
-    public void getUsedVariables(Collection<LogicalVariable> vars) {
-        // if (!vars.contains(variable)) {
-        vars.add(variable);
-        // }
-    }
-
-    @Override
-    public void substituteVar(LogicalVariable v1, LogicalVariable v2) {
-        if (variable.equals(v1)) {
-            variable = v2;
-        }
-    }
-
-    @Override
-    public boolean equals(Object obj) {
-        if (!(obj instanceof VariableReferenceExpression)) {
-            return false;
-        } else {
-            return variable.equals(((VariableReferenceExpression) obj).getVariableReference());
-        }
-    }
-
-    @Override
-    public int hashCode() {
-        return variable.getId();
-    }
-
-    @Override
-    public <R, T> R accept(ILogicalExpressionVisitor<R, T> visitor, T arg) throws AlgebricksException {
-        return visitor.visitVariableReferenceExpression(this, arg);
-    }
-
-    @Override
-    public AbstractLogicalExpression cloneExpression() {
-        return new VariableReferenceExpression(variable);
-    }
-
-    @Override
-    public boolean splitIntoConjuncts(List<Mutable<ILogicalExpression>> conjs) {
-        return false;
-    }
-}
\ No newline at end of file
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/metadata/IMetadataProvider.java b/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/metadata/IMetadataProvider.java
deleted file mode 100644
index f1909b0..0000000
--- a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/metadata/IMetadataProvider.java
+++ /dev/null
@@ -1,139 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.algebricks.core.algebra.metadata;
-
-import java.util.List;
-
-import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
-import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
-import edu.uci.ics.hyracks.algebricks.core.algebra.functions.IFunctionInfo;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.IOperatorSchema;
-import edu.uci.ics.hyracks.algebricks.core.jobgen.impl.JobGenContext;
-import edu.uci.ics.hyracks.algebricks.data.IPrinterFactory;
-import edu.uci.ics.hyracks.algebricks.runtime.base.IPushRuntimeFactory;
-import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.job.JobSpecification;
-
-public interface IMetadataProvider<S, I> {
-    public IDataSource<S> findDataSource(S id) throws AlgebricksException;
-
-    /**
-     * Obs: A scanner may choose to contribute a null
-     * AlgebricksPartitionConstraint and implement
-     * contributeSchedulingConstraints instead.
-     */
-    public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getScannerRuntime(IDataSource<S> dataSource,
-            List<LogicalVariable> scanVariables, List<LogicalVariable> projectVariables, boolean projectPushed,
-            IOperatorSchema opSchema, IVariableTypeEnvironment typeEnv, JobGenContext context, JobSpecification jobSpec)
-            throws AlgebricksException;
-
-    public boolean scannerOperatorIsLeaf(IDataSource<S> dataSource);
-
-    public Pair<IPushRuntimeFactory, AlgebricksPartitionConstraint> getWriteFileRuntime(IDataSink sink,
-            int[] printColumns, IPrinterFactory[] printerFactories, RecordDescriptor inputDesc)
-            throws AlgebricksException;
-
-    public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getWriteResultRuntime(IDataSource<S> dataSource,
-            IOperatorSchema propagatedSchema, List<LogicalVariable> keys, LogicalVariable payLoadVar,
-            JobGenContext context, JobSpecification jobSpec) throws AlgebricksException;
-
-    public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getInsertRuntime(IDataSource<S> dataSource,
-            IOperatorSchema propagatedSchema, IVariableTypeEnvironment typeEnv, List<LogicalVariable> keys,
-            LogicalVariable payLoadVar, RecordDescriptor recordDesc, JobGenContext context, JobSpecification jobSpec) throws AlgebricksException;
-
-    public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getDeleteRuntime(IDataSource<S> dataSource,
-            IOperatorSchema propagatedSchema, IVariableTypeEnvironment typeEnv, List<LogicalVariable> keys,
-            LogicalVariable payLoadVar, RecordDescriptor recordDesc, JobGenContext context, JobSpecification jobSpec) throws AlgebricksException;
-
-    /**
-     * Creates the insert runtime of IndexInsertDeletePOperator, which models
-     * insert/delete operations into a secondary index.
-     * 
-     * @param dataSource
-     *            Target secondary index.
-     * @param propagatedSchema
-     *            Output schema of the insert/delete operator to be created.
-     * @param inputSchemas
-     *            Output schemas of the insert/delete operator to be created.
-     * @param typeEnv
-     *            Type environment of the original IndexInsertDeleteOperator operator.
-     * @param primaryKeys
-     *            Variables for the dataset's primary keys that the dataSource secondary index belongs to.
-     * @param secondaryKeys
-     *            Variables for the secondary-index keys.
-     * @param filterExpr
-     *            Filtering expression to be pushed inside the runtime op.
-     *            Such a filter may, e.g., exclude NULLs from being inserted/deleted.
-     * @param recordDesc
-     *            Output record descriptor of the runtime op to be created.
-     * @param context
-     *            Job generation context.
-     * @param spec
-     *            Target job specification.
-     * @return
-     *         A Hyracks IOperatorDescriptor and its partition constraint.
-     * @throws AlgebricksException
-     */
-    public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getIndexInsertRuntime(
-            IDataSourceIndex<I, S> dataSource, IOperatorSchema propagatedSchema, IOperatorSchema[] inputSchemas,
-            IVariableTypeEnvironment typeEnv, List<LogicalVariable> primaryKeys, List<LogicalVariable> secondaryKeys,
-            ILogicalExpression filterExpr, RecordDescriptor recordDesc, JobGenContext context, JobSpecification spec)
-            throws AlgebricksException;
-
-    /**
-     * Creates the delete runtime of IndexInsertDeletePOperator, which models
-     * insert/delete operations into a secondary index.
-     * 
-     * @param dataSource
-     *            Target secondary index.
-     * @param propagatedSchema
-     *            Output schema of the insert/delete operator to be created.
-     * @param inputSchemas
-     *            Output schemas of the insert/delete operator to be created.
-     * @param typeEnv
-     *            Type environment of the original IndexInsertDeleteOperator operator.
-     * @param primaryKeys
-     *            Variables for the dataset's primary keys that the dataSource secondary index belongs to.
-     * @param secondaryKeys
-     *            Variables for the secondary-index keys.
-     * @param filterExpr
-     *            Filtering expression to be pushed inside the runtime op.
-     *            Such a filter may, e.g., exclude NULLs from being inserted/deleted.
-     * @param recordDesc
-     *            Output record descriptor of the runtime op to be created.
-     * @param context
-     *            Job generation context.
-     * @param spec
-     *            Target job specification.
-     * @return
-     *         A Hyracks IOperatorDescriptor and its partition constraint.
-     * @throws AlgebricksException
-     */
-    public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getIndexDeleteRuntime(
-            IDataSourceIndex<I, S> dataSource, IOperatorSchema propagatedSchema, IOperatorSchema[] inputSchemas,
-            IVariableTypeEnvironment typeEnv, List<LogicalVariable> primaryKeys, List<LogicalVariable> secondaryKeys,
-            ILogicalExpression filterExpr, RecordDescriptor recordDesc, JobGenContext context, JobSpecification spec)
-            throws AlgebricksException;
-
-    public IDataSourceIndex<I, S> findDataSourceIndex(I indexId, S dataSourceId) throws AlgebricksException;
-
-    public IFunctionInfo lookupFunction(FunctionIdentifier fid);
-}
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/ExtensionOperator.java b/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/ExtensionOperator.java
deleted file mode 100644
index a1e8a87..0000000
--- a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/ExtensionOperator.java
+++ /dev/null
@@ -1,120 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.IPhysicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
-import edu.uci.ics.hyracks.algebricks.core.algebra.properties.VariablePropagationPolicy;
-import edu.uci.ics.hyracks.algebricks.core.algebra.typing.ITypingContext;
-import edu.uci.ics.hyracks.algebricks.core.algebra.visitors.ILogicalExpressionReferenceTransform;
-import edu.uci.ics.hyracks.algebricks.core.algebra.visitors.ILogicalOperatorVisitor;
-
-/**
- * @author rico
- */
-public class ExtensionOperator extends AbstractLogicalOperator {
-
-    private IOperatorExtension delegate;
-
-    public ExtensionOperator(IOperatorExtension delegate) {
-        super();
-        if (delegate == null) {
-            throw new IllegalArgumentException("delegate cannot be null!");
-        }
-        this.delegate = delegate;
-        setExecutionMode(delegate.getExecutionMode());
-    }
-
-    @Override
-    public void recomputeSchema() throws AlgebricksException {
-        schema = new ArrayList<LogicalVariable>(inputs.get(0).getValue().getSchema());
-        delegate.setSchema(schema);
-    }
-
-    @Override
-    public boolean acceptExpressionTransform(ILogicalExpressionReferenceTransform transform) throws AlgebricksException {
-        return delegate.acceptExpressionTransform(transform);
-    }
-
-    @Override
-    public <R, T> R accept(ILogicalOperatorVisitor<R, T> visitor, T arg) throws AlgebricksException {
-        return visitor.visitExtensionOperator(this, arg);
-    }
-
-    @Override
-    public boolean isMap() {
-        return this.delegate.isMap();
-    }
-
-    @Override
-    public VariablePropagationPolicy getVariablePropagationPolicy() {
-        return VariablePropagationPolicy.ALL;
-    }
-
-    @Override
-    public IVariableTypeEnvironment computeOutputTypeEnvironment(ITypingContext ctx) throws AlgebricksException {
-        return this.createPropagatingAllInputsTypeEnvironment(ctx);
-    }
-
-    @Override
-    public LogicalOperatorTag getOperatorTag() {
-        return LogicalOperatorTag.EXTENSION_OPERATOR;
-    }
-
-    public IOperatorExtension getNewInstanceOfDelegateOperator() {
-        return delegate.newInstance();
-    }
-
-    @Override
-    public List<LogicalVariable> getSchema() {
-        return this.schema;
-    }
-
-    @Override
-    public ExecutionMode getExecutionMode() {
-        return delegate.getExecutionMode();
-    }
-
-    @Override
-    public void setExecutionMode(ExecutionMode mode) {
-        delegate.setExecutionMode(mode);
-    }
-
-    @Override
-    public IPhysicalOperator getPhysicalOperator() {
-        return delegate.getPhysicalOperator();
-    }
-
-    @Override
-    public IVariableTypeEnvironment computeInputTypeEnvironment(ITypingContext ctx) throws AlgebricksException {
-        return this.createPropagatingAllInputsTypeEnvironment(ctx);
-    }
-
-    @Override
-    public String toString() {
-        return delegate.toString();
-    }
-
-    public IOperatorExtension getDelegate() {
-        return delegate;
-    }
-
-}
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/IsomorphismOperatorVisitor.java b/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/IsomorphismOperatorVisitor.java
deleted file mode 100644
index 31061db..0000000
--- a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/IsomorphismOperatorVisitor.java
+++ /dev/null
@@ -1,825 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.visitors;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-
-import org.apache.commons.lang3.mutable.Mutable;
-import org.apache.commons.lang3.mutable.MutableObject;
-
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
-import edu.uci.ics.hyracks.algebricks.common.utils.Triple;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalPlan;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractLogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AggregateOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DieOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DistinctOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.EmptyTupleSourceOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ExchangeOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ExtensionOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.GroupByOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.IndexInsertDeleteOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.InnerJoinOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.InsertDeleteOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.LeftOuterJoinOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.LimitOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.NestedTupleSourceOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.OrderOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.OrderOperator.IOrder;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.PartitioningSplitOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ProjectOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ReplicateOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.RunningAggregateOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ScriptOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SinkOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SubplanOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnionAllOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestMapOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.WriteOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.WriteResultOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.plan.ALogicalPlanImpl;
-import edu.uci.ics.hyracks.algebricks.core.algebra.properties.IPartitioningProperty;
-import edu.uci.ics.hyracks.algebricks.core.algebra.properties.IPhysicalPropertiesVector;
-import edu.uci.ics.hyracks.algebricks.core.algebra.visitors.ILogicalOperatorVisitor;
-
-public class IsomorphismOperatorVisitor implements ILogicalOperatorVisitor<Boolean, ILogicalOperator> {
-
-    private Map<LogicalVariable, LogicalVariable> variableMapping = new HashMap<LogicalVariable, LogicalVariable>();
-
-    public IsomorphismOperatorVisitor() {
-    }
-
-    @Override
-    public Boolean visitAggregateOperator(AggregateOperator op, ILogicalOperator arg) throws AlgebricksException {
-        AbstractLogicalOperator aop = (AbstractLogicalOperator) arg;
-        if (aop.getOperatorTag() != LogicalOperatorTag.AGGREGATE)
-            return Boolean.FALSE;
-        AggregateOperator aggOpArg = (AggregateOperator) copyAndSubstituteVar(op, arg);
-        boolean isomorphic = VariableUtilities.varListEqualUnordered(
-                getPairList(op.getVariables(), op.getExpressions()),
-                getPairList(aggOpArg.getVariables(), aggOpArg.getExpressions()));
-        return isomorphic;
-    }
-
-    @Override
-    public Boolean visitRunningAggregateOperator(RunningAggregateOperator op, ILogicalOperator arg)
-            throws AlgebricksException {
-        AbstractLogicalOperator aop = (AbstractLogicalOperator) arg;
-        if (aop.getOperatorTag() != LogicalOperatorTag.RUNNINGAGGREGATE)
-            return Boolean.FALSE;
-        RunningAggregateOperator aggOpArg = (RunningAggregateOperator) copyAndSubstituteVar(op, arg);
-        boolean isomorphic = VariableUtilities.varListEqualUnordered(
-                getPairList(op.getVariables(), op.getExpressions()),
-                getPairList(aggOpArg.getVariables(), aggOpArg.getExpressions()));
-        return isomorphic;
-    }
-
-    @Override
-    public Boolean visitEmptyTupleSourceOperator(EmptyTupleSourceOperator op, ILogicalOperator arg)
-            throws AlgebricksException {
-        AbstractLogicalOperator aop = (AbstractLogicalOperator) copyAndSubstituteVar(op, arg);
-        if (aop.getOperatorTag() != LogicalOperatorTag.EMPTYTUPLESOURCE)
-            return Boolean.FALSE;
-        return Boolean.TRUE;
-    }
-
-    @Override
-    public Boolean visitExtensionOperator(ExtensionOperator op, ILogicalOperator arg) throws AlgebricksException {
-        ExtensionOperator aop = (ExtensionOperator) copyAndSubstituteVar(op, arg);
-        if (aop.getOperatorTag() != LogicalOperatorTag.EXTENSION_OPERATOR)
-            return Boolean.FALSE;
-        return Boolean.TRUE;
-    }
-
-    @Override
-    public Boolean visitGroupByOperator(GroupByOperator op, ILogicalOperator arg) throws AlgebricksException {
-        AbstractLogicalOperator aop = (AbstractLogicalOperator) arg;
-        // require the same physical operator, otherwise delivers different data
-        // properties
-        if (aop.getOperatorTag() != LogicalOperatorTag.GROUP
-                || aop.getPhysicalOperator().getOperatorTag() != op.getPhysicalOperator().getOperatorTag())
-            return Boolean.FALSE;
-
-        List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> keyLists = op.getGroupByList();
-        GroupByOperator gbyOpArg = (GroupByOperator) copyAndSubstituteVar(op, arg);
-        List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> keyListsArg = gbyOpArg.getGroupByList();
-
-        List<Pair<LogicalVariable, ILogicalExpression>> listLeft = new ArrayList<Pair<LogicalVariable, ILogicalExpression>>();
-        List<Pair<LogicalVariable, ILogicalExpression>> listRight = new ArrayList<Pair<LogicalVariable, ILogicalExpression>>();
-
-        for (Pair<LogicalVariable, Mutable<ILogicalExpression>> pair : keyLists)
-            listLeft.add(new Pair<LogicalVariable, ILogicalExpression>(pair.first, pair.second.getValue()));
-        for (Pair<LogicalVariable, Mutable<ILogicalExpression>> pair : keyListsArg)
-            listRight.add(new Pair<LogicalVariable, ILogicalExpression>(pair.first, pair.second.getValue()));
-
-        boolean isomorphic = VariableUtilities.varListEqualUnordered(listLeft, listRight);
-
-        if (!isomorphic)
-            return Boolean.FALSE;
-        int sizeOp = op.getNestedPlans().size();
-        int sizeArg = gbyOpArg.getNestedPlans().size();
-        if (sizeOp != sizeArg)
-            return Boolean.FALSE;
-
-        GroupByOperator argOp = (GroupByOperator) arg;
-        List<ILogicalPlan> plans = op.getNestedPlans();
-        List<ILogicalPlan> plansArg = argOp.getNestedPlans();
-        for (int i = 0; i < plans.size(); i++) {
-            List<Mutable<ILogicalOperator>> roots = plans.get(i).getRoots();
-            List<Mutable<ILogicalOperator>> rootsArg = plansArg.get(i).getRoots();
-            if (roots.size() != rootsArg.size())
-                return Boolean.FALSE;
-            for (int j = 0; j < roots.size(); j++) {
-                ILogicalOperator topOp1 = roots.get(j).getValue();
-                ILogicalOperator topOp2 = rootsArg.get(j).getValue();
-                isomorphic = this.checkBottomUp(topOp1, topOp2);
-                if (!isomorphic)
-                    return Boolean.FALSE;
-            }
-        }
-        return isomorphic;
-    }
-
-    @Override
-    public Boolean visitLimitOperator(LimitOperator op, ILogicalOperator arg) throws AlgebricksException {
-        AbstractLogicalOperator aop = (AbstractLogicalOperator) arg;
-        if (aop.getOperatorTag() != LogicalOperatorTag.LIMIT)
-            return Boolean.FALSE;
-        LimitOperator limitOpArg = (LimitOperator) copyAndSubstituteVar(op, arg);
-        if (op.getOffset() != limitOpArg.getOffset())
-            return Boolean.FALSE;
-        boolean isomorphic = op.getMaxObjects().getValue().equals(limitOpArg.getMaxObjects().getValue());
-        return isomorphic;
-    }
-
-    @Override
-    public Boolean visitDieOperator(DieOperator op, ILogicalOperator arg) throws AlgebricksException {
-        AbstractLogicalOperator aop = (AbstractLogicalOperator) arg;
-        if (aop.getOperatorTag() != LogicalOperatorTag.DIE)
-            return Boolean.FALSE;
-        DieOperator dieOpArg = (DieOperator) copyAndSubstituteVar(op, arg);
-        boolean isomorphic = op.getAfterObjects().getValue().equals(dieOpArg.getAfterObjects().getValue());
-        return isomorphic;
-    }
-
-    @Override
-    public Boolean visitInnerJoinOperator(InnerJoinOperator op, ILogicalOperator arg) throws AlgebricksException {
-        AbstractLogicalOperator aop = (AbstractLogicalOperator) arg;
-        if (aop.getOperatorTag() != LogicalOperatorTag.INNERJOIN)
-            return Boolean.FALSE;
-        InnerJoinOperator joinOpArg = (InnerJoinOperator) copyAndSubstituteVar(op, arg);
-        boolean isomorphic = op.getCondition().getValue().equals(joinOpArg.getCondition().getValue());
-        return isomorphic;
-    }
-
-    @Override
-    public Boolean visitLeftOuterJoinOperator(LeftOuterJoinOperator op, ILogicalOperator arg)
-            throws AlgebricksException {
-        AbstractLogicalOperator aop = (AbstractLogicalOperator) arg;
-        if (aop.getOperatorTag() != LogicalOperatorTag.LEFTOUTERJOIN)
-            return Boolean.FALSE;
-        LeftOuterJoinOperator joinOpArg = (LeftOuterJoinOperator) copyAndSubstituteVar(op, arg);
-        boolean isomorphic = op.getCondition().getValue().equals(joinOpArg.getCondition().getValue());
-        return isomorphic;
-    }
-
-    @Override
-    public Boolean visitNestedTupleSourceOperator(NestedTupleSourceOperator op, ILogicalOperator arg)
-            throws AlgebricksException {
-        AbstractLogicalOperator aop = (AbstractLogicalOperator) arg;
-        if (aop.getOperatorTag() != LogicalOperatorTag.NESTEDTUPLESOURCE)
-            return Boolean.FALSE;
-        return Boolean.TRUE;
-    }
-
-    @Override
-    public Boolean visitOrderOperator(OrderOperator op, ILogicalOperator arg) throws AlgebricksException {
-        AbstractLogicalOperator aop = (AbstractLogicalOperator) arg;
-        if (aop.getOperatorTag() != LogicalOperatorTag.ORDER)
-            return Boolean.FALSE;
-        OrderOperator orderOpArg = (OrderOperator) copyAndSubstituteVar(op, arg);
-        boolean isomorphic = compareIOrderAndExpressions(op.getOrderExpressions(), orderOpArg.getOrderExpressions());
-        return isomorphic;
-    }
-
-    @Override
-    public Boolean visitAssignOperator(AssignOperator op, ILogicalOperator arg) throws AlgebricksException {
-        AbstractLogicalOperator aop = (AbstractLogicalOperator) arg;
-        if (aop.getOperatorTag() != LogicalOperatorTag.ASSIGN)
-            return Boolean.FALSE;
-        AssignOperator assignOpArg = (AssignOperator) copyAndSubstituteVar(op, arg);
-        boolean isomorphic = VariableUtilities.varListEqualUnordered(
-                getPairList(op.getVariables(), op.getExpressions()),
-                getPairList(assignOpArg.getVariables(), assignOpArg.getExpressions()));
-        return isomorphic;
-    }
-
-    @Override
-    public Boolean visitSelectOperator(SelectOperator op, ILogicalOperator arg) throws AlgebricksException {
-        AbstractLogicalOperator aop = (AbstractLogicalOperator) arg;
-        if (aop.getOperatorTag() != LogicalOperatorTag.SELECT)
-            return Boolean.FALSE;
-        SelectOperator selectOpArg = (SelectOperator) copyAndSubstituteVar(op, arg);
-        boolean isomorphic = op.getCondition().getValue().equals(selectOpArg.getCondition().getValue());
-        return isomorphic;
-    }
-
-    @Override
-    public Boolean visitProjectOperator(ProjectOperator op, ILogicalOperator arg) throws AlgebricksException {
-        AbstractLogicalOperator aop = (AbstractLogicalOperator) arg;
-        if (aop.getOperatorTag() != LogicalOperatorTag.PROJECT)
-            return Boolean.FALSE;
-        ProjectOperator projectOpArg = (ProjectOperator) copyAndSubstituteVar(op, arg);
-        boolean isomorphic = VariableUtilities.varListEqualUnordered(op.getVariables(), projectOpArg.getVariables());
-        return isomorphic;
-    }
-
-    @Override
-    public Boolean visitPartitioningSplitOperator(PartitioningSplitOperator op, ILogicalOperator arg)
-            throws AlgebricksException {
-        AbstractLogicalOperator aop = (AbstractLogicalOperator) arg;
-        if (aop.getOperatorTag() != LogicalOperatorTag.PARTITIONINGSPLIT)
-            return Boolean.FALSE;
-        PartitioningSplitOperator partitionOpArg = (PartitioningSplitOperator) copyAndSubstituteVar(op, arg);
-        boolean isomorphic = compareExpressions(op.getExpressions(), partitionOpArg.getExpressions());
-        return isomorphic;
-    }
-
-    @Override
-    public Boolean visitReplicateOperator(ReplicateOperator op, ILogicalOperator arg) throws AlgebricksException {
-        AbstractLogicalOperator aop = (AbstractLogicalOperator) arg;
-        if (aop.getOperatorTag() != LogicalOperatorTag.REPLICATE)
-            return Boolean.FALSE;
-        return Boolean.TRUE;
-    }
-
-    @Override
-    public Boolean visitScriptOperator(ScriptOperator op, ILogicalOperator arg) throws AlgebricksException {
-        AbstractLogicalOperator aop = (AbstractLogicalOperator) arg;
-        if (aop.getOperatorTag() != LogicalOperatorTag.SCRIPT)
-            return Boolean.FALSE;
-        ScriptOperator scriptOpArg = (ScriptOperator) copyAndSubstituteVar(op, arg);
-        boolean isomorphic = op.getScriptDescription().equals(scriptOpArg.getScriptDescription());
-        return isomorphic;
-    }
-
-    @Override
-    public Boolean visitSubplanOperator(SubplanOperator op, ILogicalOperator arg) throws AlgebricksException {
-        AbstractLogicalOperator aop = (AbstractLogicalOperator) arg;
-        if (aop.getOperatorTag() != LogicalOperatorTag.SUBPLAN)
-            return Boolean.FALSE;
-        SubplanOperator subplanOpArg = (SubplanOperator) copyAndSubstituteVar(op, arg);
-        List<ILogicalPlan> plans = op.getNestedPlans();
-        List<ILogicalPlan> plansArg = subplanOpArg.getNestedPlans();
-        for (int i = 0; i < plans.size(); i++) {
-            List<Mutable<ILogicalOperator>> roots = plans.get(i).getRoots();
-            List<Mutable<ILogicalOperator>> rootsArg = plansArg.get(i).getRoots();
-            if (roots.size() == rootsArg.size())
-                return Boolean.FALSE;
-            for (int j = 0; j < roots.size(); j++) {
-                ILogicalOperator topOp1 = roots.get(j).getValue();
-                ILogicalOperator topOp2 = rootsArg.get(j).getValue();
-                boolean isomorphic = this.checkBottomUp(topOp1, topOp2);
-                if (!isomorphic)
-                    return Boolean.FALSE;
-            }
-        }
-        return Boolean.TRUE;
-    }
-
-    @Override
-    public Boolean visitUnionOperator(UnionAllOperator op, ILogicalOperator arg) throws AlgebricksException {
-        AbstractLogicalOperator aop = (AbstractLogicalOperator) arg;
-        if (aop.getOperatorTag() != LogicalOperatorTag.UNIONALL)
-            return Boolean.FALSE;
-        UnionAllOperator unionOpArg = (UnionAllOperator) copyAndSubstituteVar(op, arg);
-        List<Triple<LogicalVariable, LogicalVariable, LogicalVariable>> mapping = op.getVariableMappings();
-        List<Triple<LogicalVariable, LogicalVariable, LogicalVariable>> mappingArg = unionOpArg.getVariableMappings();
-        if (mapping.size() != mappingArg.size())
-            return Boolean.FALSE;
-        return VariableUtilities.varListEqualUnordered(mapping, mappingArg);
-    }
-
-    @Override
-    public Boolean visitUnnestOperator(UnnestOperator op, ILogicalOperator arg) throws AlgebricksException {
-        AbstractLogicalOperator aop = (AbstractLogicalOperator) arg;
-        if (aop.getOperatorTag() != LogicalOperatorTag.UNNEST)
-            return Boolean.FALSE;
-        UnnestOperator unnestOpArg = (UnnestOperator) copyAndSubstituteVar(op, arg);
-        boolean isomorphic = VariableUtilities.varListEqualUnordered(op.getVariables(), unnestOpArg.getVariables())
-                && variableEqual(op.getPositionalVariable(), unnestOpArg.getPositionalVariable());
-        if (!isomorphic)
-            return Boolean.FALSE;
-        isomorphic = op.getExpressionRef().getValue().equals(unnestOpArg.getExpressionRef().getValue());
-        return isomorphic;
-    }
-
-    @Override
-    public Boolean visitUnnestMapOperator(UnnestMapOperator op, ILogicalOperator arg) throws AlgebricksException {
-        AbstractLogicalOperator aop = (AbstractLogicalOperator) arg;
-        if (aop.getOperatorTag() != LogicalOperatorTag.UNNEST_MAP)
-            return Boolean.FALSE;
-        UnnestOperator unnestOpArg = (UnnestOperator) copyAndSubstituteVar(op, arg);
-        boolean isomorphic = VariableUtilities.varListEqualUnordered(op.getVariables(), unnestOpArg.getVariables());
-        if (!isomorphic)
-            return Boolean.FALSE;
-        isomorphic = op.getExpressionRef().getValue().equals(unnestOpArg.getExpressionRef().getValue());
-        return isomorphic;
-    }
-
-    @Override
-    public Boolean visitDataScanOperator(DataSourceScanOperator op, ILogicalOperator arg) throws AlgebricksException {
-        AbstractLogicalOperator aop = (AbstractLogicalOperator) arg;
-        if (aop.getOperatorTag() != LogicalOperatorTag.DATASOURCESCAN)
-            return Boolean.FALSE;
-        DataSourceScanOperator argScan = (DataSourceScanOperator) arg;
-        if (!argScan.getDataSource().toString().equals(op.getDataSource().toString()))
-            return Boolean.FALSE;
-        DataSourceScanOperator scanOpArg = (DataSourceScanOperator) copyAndSubstituteVar(op, arg);
-        boolean isomorphic = VariableUtilities.varListEqualUnordered(op.getVariables(), scanOpArg.getVariables())
-                && op.getDataSource().toString().equals(scanOpArg.getDataSource().toString());
-        return isomorphic;
-    }
-
-    @Override
-    public Boolean visitDistinctOperator(DistinctOperator op, ILogicalOperator arg) throws AlgebricksException {
-        AbstractLogicalOperator aop = (AbstractLogicalOperator) arg;
-        if (aop.getOperatorTag() != LogicalOperatorTag.DISTINCT)
-            return Boolean.FALSE;
-        DistinctOperator distinctOpArg = (DistinctOperator) copyAndSubstituteVar(op, arg);
-        boolean isomorphic = compareExpressions(op.getExpressions(), distinctOpArg.getExpressions());
-        return isomorphic;
-    }
-
-    @Override
-    public Boolean visitExchangeOperator(ExchangeOperator op, ILogicalOperator arg) throws AlgebricksException {
-        AbstractLogicalOperator aop = (AbstractLogicalOperator) arg;
-        if (aop.getOperatorTag() != LogicalOperatorTag.EXCHANGE)
-            return Boolean.FALSE;
-        // require the same partition property
-        if (!(op.getPhysicalOperator().getOperatorTag() == aop.getPhysicalOperator().getOperatorTag()))
-            return Boolean.FALSE;
-        variableMapping.clear();
-        IsomorphismUtilities.mapVariablesTopDown(op, arg, variableMapping);
-        IPhysicalPropertiesVector properties = op.getPhysicalOperator().getDeliveredProperties();
-        IPhysicalPropertiesVector propertiesArg = aop.getPhysicalOperator().getDeliveredProperties();
-        if (properties == null && propertiesArg == null)
-            return Boolean.TRUE;
-        if (properties == null || propertiesArg == null)
-            return Boolean.FALSE;
-        IPartitioningProperty partProp = properties.getPartitioningProperty();
-        IPartitioningProperty partPropArg = propertiesArg.getPartitioningProperty();
-        if (!partProp.getPartitioningType().equals(partPropArg.getPartitioningType()))
-            return Boolean.FALSE;
-        List<LogicalVariable> columns = new ArrayList<LogicalVariable>();
-        partProp.getColumns(columns);
-        List<LogicalVariable> columnsArg = new ArrayList<LogicalVariable>();
-        partPropArg.getColumns(columnsArg);
-        if (columns.size() != columnsArg.size())
-            return Boolean.FALSE;
-        if (columns.size() == 0)
-            return Boolean.TRUE;
-        for (int i = 0; i < columnsArg.size(); i++) {
-            LogicalVariable rightVar = columnsArg.get(i);
-            LogicalVariable leftVar = variableMapping.get(rightVar);
-            if (leftVar != null)
-                columnsArg.set(i, leftVar);
-        }
-        return VariableUtilities.varListEqualUnordered(columns, columnsArg);
-    }
-
-    @Override
-    public Boolean visitWriteOperator(WriteOperator op, ILogicalOperator arg) throws AlgebricksException {
-        AbstractLogicalOperator aop = (AbstractLogicalOperator) arg;
-        if (aop.getOperatorTag() != LogicalOperatorTag.WRITE)
-            return Boolean.FALSE;
-        WriteOperator writeOpArg = (WriteOperator) copyAndSubstituteVar(op, arg);
-        boolean isomorphic = VariableUtilities.varListEqualUnordered(op.getSchema(), writeOpArg.getSchema());
-        return isomorphic;
-    }
-
-    @Override
-    public Boolean visitWriteResultOperator(WriteResultOperator op, ILogicalOperator arg) throws AlgebricksException {
-        AbstractLogicalOperator aop = (AbstractLogicalOperator) arg;
-        if (aop.getOperatorTag() != LogicalOperatorTag.WRITE_RESULT)
-            return Boolean.FALSE;
-        WriteResultOperator writeOpArg = (WriteResultOperator) copyAndSubstituteVar(op, arg);
-        boolean isomorphic = VariableUtilities.varListEqualUnordered(op.getSchema(), writeOpArg.getSchema());
-        if (!op.getDataSource().equals(writeOpArg.getDataSource()))
-            isomorphic = false;
-        if (!op.getPayloadExpression().equals(writeOpArg.getPayloadExpression()))
-            isomorphic = false;
-        return isomorphic;
-    }
-
-    @Override
-    public Boolean visitInsertDeleteOperator(InsertDeleteOperator op, ILogicalOperator arg) throws AlgebricksException {
-        AbstractLogicalOperator aop = (AbstractLogicalOperator) arg;
-        if (aop.getOperatorTag() != LogicalOperatorTag.INSERT_DELETE)
-            return Boolean.FALSE;
-        InsertDeleteOperator insertOpArg = (InsertDeleteOperator) copyAndSubstituteVar(op, arg);
-        boolean isomorphic = VariableUtilities.varListEqualUnordered(op.getSchema(), insertOpArg.getSchema());
-        if (!op.getDataSource().equals(insertOpArg.getDataSource()))
-            isomorphic = false;
-        if (!op.getPayloadExpression().equals(insertOpArg.getPayloadExpression()))
-            isomorphic = false;
-        return isomorphic;
-    }
-
-    @Override
-    public Boolean visitIndexInsertDeleteOperator(IndexInsertDeleteOperator op, ILogicalOperator arg)
-            throws AlgebricksException {
-        AbstractLogicalOperator aop = (AbstractLogicalOperator) arg;
-        if (aop.getOperatorTag() != LogicalOperatorTag.INDEX_INSERT_DELETE)
-            return Boolean.FALSE;
-        IndexInsertDeleteOperator insertOpArg = (IndexInsertDeleteOperator) copyAndSubstituteVar(op, arg);
-        boolean isomorphic = VariableUtilities.varListEqualUnordered(op.getSchema(), insertOpArg.getSchema());
-        if (!op.getDataSourceIndex().equals(insertOpArg.getDataSourceIndex()))
-            isomorphic = false;
-        return isomorphic;
-    }
-
-    @Override
-    public Boolean visitSinkOperator(SinkOperator op, ILogicalOperator arg) throws AlgebricksException {
-        return true;
-    }
-
-    private Boolean compareExpressions(List<Mutable<ILogicalExpression>> opExprs,
-            List<Mutable<ILogicalExpression>> argExprs) {
-        if (opExprs.size() != argExprs.size())
-            return Boolean.FALSE;
-        for (int i = 0; i < opExprs.size(); i++) {
-            boolean isomorphic = opExprs.get(i).getValue().equals(argExprs.get(i).getValue());
-            if (!isomorphic)
-                return Boolean.FALSE;
-        }
-        return Boolean.TRUE;
-    }
-
-    private Boolean compareIOrderAndExpressions(List<Pair<IOrder, Mutable<ILogicalExpression>>> opOrderExprs,
-            List<Pair<IOrder, Mutable<ILogicalExpression>>> argOrderExprs) {
-        if (opOrderExprs.size() != argOrderExprs.size())
-            return Boolean.FALSE;
-        for (int i = 0; i < opOrderExprs.size(); i++) {
-            boolean isomorphic = opOrderExprs.get(i).first.equals(argOrderExprs.get(i).first);
-            if (!isomorphic)
-                return Boolean.FALSE;
-            isomorphic = opOrderExprs.get(i).second.getValue().equals(argOrderExprs.get(i).second.getValue());
-            if (!isomorphic)
-                return Boolean.FALSE;
-        }
-        return Boolean.TRUE;
-    }
-
-    private Boolean checkBottomUp(ILogicalOperator op1, ILogicalOperator op2) throws AlgebricksException {
-        List<Mutable<ILogicalOperator>> inputs1 = op1.getInputs();
-        List<Mutable<ILogicalOperator>> inputs2 = op2.getInputs();
-        if (inputs1.size() != inputs2.size())
-            return Boolean.FALSE;
-        for (int i = 0; i < inputs1.size(); i++) {
-            ILogicalOperator input1 = inputs1.get(i).getValue();
-            ILogicalOperator input2 = inputs2.get(i).getValue();
-            boolean isomorphic = checkBottomUp(input1, input2);
-            if (!isomorphic)
-                return Boolean.FALSE;
-        }
-        return IsomorphismUtilities.isOperatorIsomorphic(op1, op2);
-    }
-
-    private ILogicalOperator copyAndSubstituteVar(ILogicalOperator op, ILogicalOperator argOp)
-            throws AlgebricksException {
-        ILogicalOperator newOp = IsomorphismOperatorVisitor.deepCopy(argOp);
-        variableMapping.clear();
-        IsomorphismUtilities.mapVariablesTopDown(op, argOp, variableMapping);
-
-        List<LogicalVariable> liveVars = new ArrayList<LogicalVariable>();
-        if (argOp.getInputs().size() > 0)
-            for (int i = 0; i < argOp.getInputs().size(); i++)
-                VariableUtilities.getLiveVariables(argOp.getInputs().get(i).getValue(), liveVars);
-        List<LogicalVariable> producedVars = new ArrayList<LogicalVariable>();
-        VariableUtilities.getProducedVariables(argOp, producedVars);
-        List<LogicalVariable> producedVarsNew = new ArrayList<LogicalVariable>();
-        VariableUtilities.getProducedVariables(op, producedVarsNew);
-
-        if (producedVars.size() != producedVarsNew.size())
-            return newOp;
-        for (Entry<LogicalVariable, LogicalVariable> map : variableMapping.entrySet()) {
-            if (liveVars.contains(map.getKey())) {
-                VariableUtilities.substituteVariables(newOp, map.getKey(), map.getValue(), null);
-            }
-        }
-        for (int i = 0; i < producedVars.size(); i++)
-            VariableUtilities.substituteVariables(newOp, producedVars.get(i), producedVarsNew.get(i), null);
-        return newOp;
-    }
-
-    public List<Pair<LogicalVariable, ILogicalExpression>> getPairList(List<LogicalVariable> vars,
-            List<Mutable<ILogicalExpression>> exprs) throws AlgebricksException {
-        List<Pair<LogicalVariable, ILogicalExpression>> list = new ArrayList<Pair<LogicalVariable, ILogicalExpression>>();
-        if (vars.size() != exprs.size())
-            throw new AlgebricksException("variable list size does not equal to expression list size ");
-        for (int i = 0; i < vars.size(); i++) {
-            list.add(new Pair<LogicalVariable, ILogicalExpression>(vars.get(i), exprs.get(i).getValue()));
-        }
-        return list;
-    }
-
-    private static ILogicalOperator deepCopy(ILogicalOperator op) throws AlgebricksException {
-        OperatorDeepCopyVisitor visitor = new OperatorDeepCopyVisitor();
-        return op.accept(visitor, null);
-    }
-
-    private static ILogicalPlan deepCopy(ILogicalPlan plan) throws AlgebricksException {
-        List<Mutable<ILogicalOperator>> roots = plan.getRoots();
-        List<Mutable<ILogicalOperator>> newRoots = new ArrayList<Mutable<ILogicalOperator>>();
-        for (Mutable<ILogicalOperator> opRef : roots)
-            newRoots.add(new MutableObject<ILogicalOperator>(bottomUpCopyOperators(opRef.getValue())));
-        return new ALogicalPlanImpl(newRoots);
-    }
-
-    private static ILogicalOperator bottomUpCopyOperators(ILogicalOperator op) throws AlgebricksException {
-        ILogicalOperator newOp = deepCopy(op);
-        newOp.getInputs().clear();
-        for (Mutable<ILogicalOperator> child : op.getInputs())
-            newOp.getInputs().add(new MutableObject<ILogicalOperator>(bottomUpCopyOperators(child.getValue())));
-        return newOp;
-    }
-
-    private static boolean variableEqual(LogicalVariable var, LogicalVariable varArg) {
-        if (var == null && varArg == null)
-            return true;
-        if (var.equals(varArg))
-            return true;
-        else
-            return false;
-    }
-
-    private static class OperatorDeepCopyVisitor implements ILogicalOperatorVisitor<ILogicalOperator, Void> {
-
-        @Override
-        public ILogicalOperator visitAggregateOperator(AggregateOperator op, Void arg) throws AlgebricksException {
-            ArrayList<LogicalVariable> newList = new ArrayList<LogicalVariable>();
-            ArrayList<Mutable<ILogicalExpression>> newExpressions = new ArrayList<Mutable<ILogicalExpression>>();
-            newList.addAll(op.getVariables());
-            deepCopyExpressionRefs(newExpressions, op.getExpressions());
-            return new AggregateOperator(newList, newExpressions);
-        }
-
-        @Override
-        public ILogicalOperator visitRunningAggregateOperator(RunningAggregateOperator op, Void arg)
-                throws AlgebricksException {
-            ArrayList<LogicalVariable> newList = new ArrayList<LogicalVariable>();
-            ArrayList<Mutable<ILogicalExpression>> newExpressions = new ArrayList<Mutable<ILogicalExpression>>();
-            newList.addAll(op.getVariables());
-            deepCopyExpressionRefs(newExpressions, op.getExpressions());
-            return new RunningAggregateOperator(newList, newExpressions);
-        }
-
-        @Override
-        public ILogicalOperator visitEmptyTupleSourceOperator(EmptyTupleSourceOperator op, Void arg)
-                throws AlgebricksException {
-            return new EmptyTupleSourceOperator();
-        }
-
-        @Override
-        public ILogicalOperator visitGroupByOperator(GroupByOperator op, Void arg) throws AlgebricksException {
-            List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> groupByList = new ArrayList<Pair<LogicalVariable, Mutable<ILogicalExpression>>>();
-            List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> decoList = new ArrayList<Pair<LogicalVariable, Mutable<ILogicalExpression>>>();
-            ArrayList<ILogicalPlan> newSubplans = new ArrayList<ILogicalPlan>();
-            for (Pair<LogicalVariable, Mutable<ILogicalExpression>> pair : op.getGroupByList())
-                groupByList.add(new Pair<LogicalVariable, Mutable<ILogicalExpression>>(pair.first,
-                        deepCopyExpressionRef(pair.second)));
-            for (Pair<LogicalVariable, Mutable<ILogicalExpression>> pair : op.getDecorList())
-                decoList.add(new Pair<LogicalVariable, Mutable<ILogicalExpression>>(pair.first,
-                        deepCopyExpressionRef(pair.second)));
-            for (ILogicalPlan plan : op.getNestedPlans()) {
-                newSubplans.add(IsomorphismOperatorVisitor.deepCopy(plan));
-            }
-            return new GroupByOperator(groupByList, decoList, newSubplans);
-        }
-
-        @Override
-        public ILogicalOperator visitLimitOperator(LimitOperator op, Void arg) throws AlgebricksException {
-            return new LimitOperator(deepCopyExpressionRef(op.getMaxObjects()).getValue(), deepCopyExpressionRef(
-                    op.getOffset()).getValue(), op.isTopmostLimitOp());
-        }
-
-        @Override
-        public ILogicalOperator visitDieOperator(DieOperator op, Void arg) throws AlgebricksException {
-            return new DieOperator(deepCopyExpressionRef(op.getAfterObjects()).getValue());
-        }
-
-        @Override
-        public ILogicalOperator visitInnerJoinOperator(InnerJoinOperator op, Void arg) throws AlgebricksException {
-            return new InnerJoinOperator(deepCopyExpressionRef(op.getCondition()), op.getInputs().get(0), op
-                    .getInputs().get(1));
-        }
-
-        @Override
-        public ILogicalOperator visitLeftOuterJoinOperator(LeftOuterJoinOperator op, Void arg)
-                throws AlgebricksException {
-            return new LeftOuterJoinOperator(deepCopyExpressionRef(op.getCondition()), op.getInputs().get(0), op
-                    .getInputs().get(1));
-        }
-
-        @Override
-        public ILogicalOperator visitNestedTupleSourceOperator(NestedTupleSourceOperator op, Void arg)
-                throws AlgebricksException {
-            return new NestedTupleSourceOperator(null);
-        }
-
-        @Override
-        public ILogicalOperator visitOrderOperator(OrderOperator op, Void arg) throws AlgebricksException {
-            return new OrderOperator(this.deepCopyOrderAndExpression(op.getOrderExpressions()));
-        }
-
-        @Override
-        public ILogicalOperator visitAssignOperator(AssignOperator op, Void arg) throws AlgebricksException {
-            ArrayList<LogicalVariable> newList = new ArrayList<LogicalVariable>();
-            ArrayList<Mutable<ILogicalExpression>> newExpressions = new ArrayList<Mutable<ILogicalExpression>>();
-            newList.addAll(op.getVariables());
-            deepCopyExpressionRefs(newExpressions, op.getExpressions());
-            return new AssignOperator(newList, newExpressions);
-        }
-
-        @Override
-        public ILogicalOperator visitSelectOperator(SelectOperator op, Void arg) throws AlgebricksException {
-            return new SelectOperator(deepCopyExpressionRef(op.getCondition()));
-        }
-
-        @Override
-        public ILogicalOperator visitProjectOperator(ProjectOperator op, Void arg) throws AlgebricksException {
-            ArrayList<LogicalVariable> newList = new ArrayList<LogicalVariable>();
-            newList.addAll(op.getVariables());
-            return new ProjectOperator(newList);
-        }
-
-        @Override
-        public ILogicalOperator visitPartitioningSplitOperator(PartitioningSplitOperator op, Void arg)
-                throws AlgebricksException {
-            ArrayList<Mutable<ILogicalExpression>> newExpressions = new ArrayList<Mutable<ILogicalExpression>>();
-            deepCopyExpressionRefs(newExpressions, op.getExpressions());
-            return new PartitioningSplitOperator(newExpressions, op.getDefaultBranchIndex());
-        }
-
-        @Override
-        public ILogicalOperator visitReplicateOperator(ReplicateOperator op, Void arg) throws AlgebricksException {
-            return new ReplicateOperator(op.getOutputArity());
-        }
-
-        @Override
-        public ILogicalOperator visitScriptOperator(ScriptOperator op, Void arg) throws AlgebricksException {
-            ArrayList<LogicalVariable> newInputList = new ArrayList<LogicalVariable>();
-            ArrayList<LogicalVariable> newOutputList = new ArrayList<LogicalVariable>();
-            newInputList.addAll(op.getInputVariables());
-            newOutputList.addAll(op.getOutputVariables());
-            return new ScriptOperator(op.getScriptDescription(), newInputList, newOutputList);
-        }
-
-        @Override
-        public ILogicalOperator visitSubplanOperator(SubplanOperator op, Void arg) throws AlgebricksException {
-            ArrayList<ILogicalPlan> newSubplans = new ArrayList<ILogicalPlan>();
-            for (ILogicalPlan plan : op.getNestedPlans()) {
-                newSubplans.add(IsomorphismOperatorVisitor.deepCopy(plan));
-            }
-            return new SubplanOperator(newSubplans);
-        }
-
-        @Override
-        public ILogicalOperator visitUnionOperator(UnionAllOperator op, Void arg) throws AlgebricksException {
-            List<Triple<LogicalVariable, LogicalVariable, LogicalVariable>> newVarMap = new ArrayList<Triple<LogicalVariable, LogicalVariable, LogicalVariable>>();
-            List<Triple<LogicalVariable, LogicalVariable, LogicalVariable>> varMap = op.getVariableMappings();
-            for (Triple<LogicalVariable, LogicalVariable, LogicalVariable> triple : varMap)
-                newVarMap.add(new Triple<LogicalVariable, LogicalVariable, LogicalVariable>(triple.first,
-                        triple.second, triple.third));
-            return new UnionAllOperator(newVarMap);
-        }
-
-        @Override
-        public ILogicalOperator visitUnnestOperator(UnnestOperator op, Void arg) throws AlgebricksException {
-            return new UnnestOperator(op.getVariable(), deepCopyExpressionRef(op.getExpressionRef()),
-                    op.getPositionalVariable(), op.getPositionalVariableType());
-        }
-
-        @Override
-        public ILogicalOperator visitUnnestMapOperator(UnnestMapOperator op, Void arg) throws AlgebricksException {
-            ArrayList<LogicalVariable> newInputList = new ArrayList<LogicalVariable>();
-            newInputList.addAll(op.getVariables());
-            return new UnnestMapOperator(newInputList, deepCopyExpressionRef(op.getExpressionRef()),
-                    new ArrayList<Object>(op.getVariableTypes()), op.propagatesInput());
-        }
-
-        @Override
-        public ILogicalOperator visitDataScanOperator(DataSourceScanOperator op, Void arg) throws AlgebricksException {
-            ArrayList<LogicalVariable> newInputList = new ArrayList<LogicalVariable>();
-            newInputList.addAll(op.getVariables());
-            return new DataSourceScanOperator(newInputList, op.getDataSource());
-        }
-
-        @Override
-        public ILogicalOperator visitDistinctOperator(DistinctOperator op, Void arg) throws AlgebricksException {
-            ArrayList<Mutable<ILogicalExpression>> newExpressions = new ArrayList<Mutable<ILogicalExpression>>();
-            deepCopyExpressionRefs(newExpressions, op.getExpressions());
-            return new DistinctOperator(newExpressions);
-        }
-
-        @Override
-        public ILogicalOperator visitExchangeOperator(ExchangeOperator op, Void arg) throws AlgebricksException {
-            return new ExchangeOperator();
-        }
-
-        @Override
-        public ILogicalOperator visitWriteOperator(WriteOperator op, Void arg) throws AlgebricksException {
-            ArrayList<Mutable<ILogicalExpression>> newExpressions = new ArrayList<Mutable<ILogicalExpression>>();
-            deepCopyExpressionRefs(newExpressions, op.getExpressions());
-            return new WriteOperator(newExpressions, op.getDataSink());
-        }
-
-        @Override
-        public ILogicalOperator visitWriteResultOperator(WriteResultOperator op, Void arg) throws AlgebricksException {
-            ArrayList<Mutable<ILogicalExpression>> newKeyExpressions = new ArrayList<Mutable<ILogicalExpression>>();
-            deepCopyExpressionRefs(newKeyExpressions, op.getKeyExpressions());
-            return new WriteResultOperator(op.getDataSource(), deepCopyExpressionRef(op.getPayloadExpression()),
-                    newKeyExpressions);
-        }
-
-        @Override
-        public ILogicalOperator visitInsertDeleteOperator(InsertDeleteOperator op, Void arg) throws AlgebricksException {
-            List<Mutable<ILogicalExpression>> newKeyExpressions = new ArrayList<Mutable<ILogicalExpression>>();
-            deepCopyExpressionRefs(newKeyExpressions, op.getPrimaryKeyExpressions());
-            return new InsertDeleteOperator(op.getDataSource(), deepCopyExpressionRef(op.getPayloadExpression()),
-                    newKeyExpressions, op.getOperation());
-        }
-
-        @Override
-        public ILogicalOperator visitIndexInsertDeleteOperator(IndexInsertDeleteOperator op, Void arg)
-                throws AlgebricksException {
-            List<Mutable<ILogicalExpression>> newPrimaryKeyExpressions = new ArrayList<Mutable<ILogicalExpression>>();
-            deepCopyExpressionRefs(newPrimaryKeyExpressions, op.getPrimaryKeyExpressions());
-            List<Mutable<ILogicalExpression>> newSecondaryKeyExpressions = new ArrayList<Mutable<ILogicalExpression>>();
-            deepCopyExpressionRefs(newSecondaryKeyExpressions, op.getSecondaryKeyExpressions());
-            Mutable<ILogicalExpression> newFilterExpression = new MutableObject<ILogicalExpression>(((AbstractLogicalExpression)op.getFilterExpression())
-                    .cloneExpression());
-            return new IndexInsertDeleteOperator(op.getDataSourceIndex(), newPrimaryKeyExpressions,
-                    newSecondaryKeyExpressions, newFilterExpression, op.getOperation());
-        }
-
-        @Override
-        public ILogicalOperator visitSinkOperator(SinkOperator op, Void arg) throws AlgebricksException {
-            return new SinkOperator();
-        }
-
-        private void deepCopyExpressionRefs(List<Mutable<ILogicalExpression>> newExprs,
-                List<Mutable<ILogicalExpression>> oldExprs) {
-            for (Mutable<ILogicalExpression> oldExpr : oldExprs)
-                newExprs.add(new MutableObject<ILogicalExpression>(((AbstractLogicalExpression) oldExpr.getValue())
-                        .cloneExpression()));
-        }
-
-        private Mutable<ILogicalExpression> deepCopyExpressionRef(Mutable<ILogicalExpression> oldExpr) {
-            return new MutableObject<ILogicalExpression>(
-                    ((AbstractLogicalExpression) oldExpr.getValue()).cloneExpression());
-        }
-
-        private List<Pair<IOrder, Mutable<ILogicalExpression>>> deepCopyOrderAndExpression(
-                List<Pair<IOrder, Mutable<ILogicalExpression>>> ordersAndExprs) {
-            List<Pair<IOrder, Mutable<ILogicalExpression>>> newOrdersAndExprs = new ArrayList<Pair<IOrder, Mutable<ILogicalExpression>>>();
-            for (Pair<IOrder, Mutable<ILogicalExpression>> pair : ordersAndExprs)
-                newOrdersAndExprs.add(new Pair<IOrder, Mutable<ILogicalExpression>>(pair.first,
-                        deepCopyExpressionRef(pair.second)));
-            return newOrdersAndExprs;
-        }
-
-        @Override
-        public ILogicalOperator visitExtensionOperator(ExtensionOperator op, Void arg) throws AlgebricksException {
-            return new ExtensionOperator(op.getNewInstanceOfDelegateOperator());
-        }
-    }
-
-}
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/AbstractPhysicalOperator.java b/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/AbstractPhysicalOperator.java
deleted file mode 100644
index 545d039..0000000
--- a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/AbstractPhysicalOperator.java
+++ /dev/null
@@ -1,137 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.algebricks.core.algebra.operators.physical;
-
-import java.util.Map;
-
-import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksCountPartitionConstraint;
-import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.NotImplementedException;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.IHyracksJobBuilder;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalPlan;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.IPhysicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator.ExecutionMode;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractOperatorWithNestedPlans;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.IOperatorSchema;
-import edu.uci.ics.hyracks.algebricks.core.algebra.properties.IPartitioningRequirementsCoordinator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.properties.IPhysicalPropertiesVector;
-import edu.uci.ics.hyracks.algebricks.core.algebra.properties.PhysicalRequirements;
-import edu.uci.ics.hyracks.algebricks.core.algebra.properties.StructuralPropertiesVector;
-import edu.uci.ics.hyracks.algebricks.core.jobgen.impl.JobGenContext;
-import edu.uci.ics.hyracks.algebricks.core.jobgen.impl.PlanCompiler;
-import edu.uci.ics.hyracks.algebricks.runtime.base.AlgebricksPipeline;
-import edu.uci.ics.hyracks.algebricks.runtime.operators.meta.AlgebricksMetaOperatorDescriptor;
-import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
-import edu.uci.ics.hyracks.api.dataflow.OperatorDescriptorId;
-import edu.uci.ics.hyracks.api.job.JobSpecification;
-
-public abstract class AbstractPhysicalOperator implements IPhysicalOperator {
-
-    protected IPhysicalPropertiesVector deliveredProperties;
-    private boolean disableJobGenBelow = false;
-    private Object hostQueryContext;
-
-    @Override
-    public final IPhysicalPropertiesVector getDeliveredProperties() {
-        return deliveredProperties;
-    }
-
-    @Override
-    public String toString() {
-        return getOperatorTag().toString();
-    }
-
-    public void setHostQueryContext(Object context) {
-        this.hostQueryContext = context;
-    }
-
-    public Object getHostQueryContext() {
-        return hostQueryContext;
-    }
-
-    protected PhysicalRequirements emptyUnaryRequirements() {
-        StructuralPropertiesVector[] req = new StructuralPropertiesVector[] { StructuralPropertiesVector.EMPTY_PROPERTIES_VECTOR };
-        return new PhysicalRequirements(req, IPartitioningRequirementsCoordinator.NO_COORDINATION);
-    }
-
-    @Override
-    public void disableJobGenBelowMe() {
-        this.disableJobGenBelow = true;
-    }
-
-    @Override
-    public boolean isJobGenDisabledBelowMe() {
-        return disableJobGenBelow;
-    }
-
-    protected void contributeOpDesc(IHyracksJobBuilder builder, AbstractLogicalOperator op, IOperatorDescriptor opDesc) {
-        if (op.getExecutionMode() == ExecutionMode.UNPARTITIONED) {
-            AlgebricksPartitionConstraint apc = new AlgebricksCountPartitionConstraint(1);
-            builder.contributeAlgebricksPartitionConstraint(opDesc, apc);
-        }
-        builder.contributeHyracksOperator(op, opDesc);
-    }
-
-    protected AlgebricksPipeline[] compileSubplans(IOperatorSchema outerPlanSchema,
-            AbstractOperatorWithNestedPlans npOp, IOperatorSchema opSchema, JobGenContext context)
-            throws AlgebricksException {
-        AlgebricksPipeline[] subplans = new AlgebricksPipeline[npOp.getNestedPlans().size()];
-        PlanCompiler pc = new PlanCompiler(context);
-        int i = 0;
-        for (ILogicalPlan p : npOp.getNestedPlans()) {
-            subplans[i++] = buildPipelineWithProjection(p, outerPlanSchema, npOp, opSchema, pc);
-        }
-        return subplans;
-    }
-
-    private AlgebricksPipeline buildPipelineWithProjection(ILogicalPlan p, IOperatorSchema outerPlanSchema,
-            AbstractOperatorWithNestedPlans npOp, IOperatorSchema opSchema, PlanCompiler pc) throws AlgebricksException {
-        if (p.getRoots().size() > 1) {
-            throw new NotImplementedException("Nested plans with several roots are not supported.");
-        }
-        JobSpecification nestedJob = pc.compilePlan(p, outerPlanSchema, null);
-        ILogicalOperator topOpInSubplan = p.getRoots().get(0).getValue();
-        JobGenContext context = pc.getContext();
-        IOperatorSchema topOpInSubplanScm = context.getSchema(topOpInSubplan);
-        opSchema.addAllVariables(topOpInSubplanScm);
-
-        Map<OperatorDescriptorId, IOperatorDescriptor> opMap = nestedJob.getOperatorMap();
-        if (opMap.size() != 1) {
-            throw new AlgebricksException(
-                    "Attempting to construct a nested plan with "
-                            + opMap.size()
-                            + " operator descriptors. Currently, nested plans can only consist in linear pipelines of Asterix micro operators.");
-        }
-
-        for (OperatorDescriptorId oid : opMap.keySet()) {
-            IOperatorDescriptor opd = opMap.get(oid);
-            if (!(opd instanceof AlgebricksMetaOperatorDescriptor)) {
-                throw new AlgebricksException(
-                        "Can only generate Hyracks jobs for pipelinable Asterix nested plans, not for "
-                                + opd.getClass().getName());
-            }
-            AlgebricksMetaOperatorDescriptor amod = (AlgebricksMetaOperatorDescriptor) opd;
-
-            return amod.getPipeline();
-            // we suppose that the top operator in the subplan already does the
-            // projection for us
-        }
-
-        throw new IllegalStateException();
-    }
-}
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/HybridHashJoinPOperator.java b/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/HybridHashJoinPOperator.java
deleted file mode 100644
index c737cc4..0000000
--- a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/HybridHashJoinPOperator.java
+++ /dev/null
@@ -1,142 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.algebricks.core.algebra.operators.physical;
-
-import java.util.LinkedList;
-import java.util.List;
-
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.NotImplementedException;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.IHyracksJobBuilder;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.PhysicalOperatorTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractBinaryJoinOperator.JoinKind;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.IOperatorSchema;
-import edu.uci.ics.hyracks.algebricks.core.algebra.properties.ILocalStructuralProperty;
-import edu.uci.ics.hyracks.algebricks.core.jobgen.impl.JobGenContext;
-import edu.uci.ics.hyracks.algebricks.core.jobgen.impl.JobGenHelper;
-import edu.uci.ics.hyracks.algebricks.data.IBinaryComparatorFactoryProvider;
-import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.INullWriterFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.job.IOperatorDescriptorRegistry;
-import edu.uci.ics.hyracks.dataflow.std.join.HybridHashJoinOperatorDescriptor;
-
-public class HybridHashJoinPOperator extends AbstractHashJoinPOperator {
-
-    private final int memSizeInFrames;
-    private final int maxInputBuildSizeInFrames;
-    private final int aveRecordsPerFrame;
-    private final double fudgeFactor;
-
-    public HybridHashJoinPOperator(JoinKind kind, JoinPartitioningType partitioningType,
-            List<LogicalVariable> sideLeftOfEqualities, List<LogicalVariable> sideRightOfEqualities,
-            int memSizeInFrames, int maxInputSize0InFrames, int aveRecordsPerFrame, double fudgeFactor) {
-        super(kind, partitioningType, sideLeftOfEqualities, sideRightOfEqualities);
-        this.memSizeInFrames = memSizeInFrames;
-        this.maxInputBuildSizeInFrames = maxInputSize0InFrames;
-        this.aveRecordsPerFrame = aveRecordsPerFrame;
-        this.fudgeFactor = fudgeFactor;
-    }
-
-    @Override
-    public PhysicalOperatorTag getOperatorTag() {
-        return PhysicalOperatorTag.HYBRID_HASH_JOIN;
-    }
-
-    @Override
-    public boolean isMicroOperator() {
-        return false;
-    }
-
-    public double getFudgeFactor() {
-        return fudgeFactor;
-    }
-
-    public int getMemSizeInFrames() {
-        return memSizeInFrames;
-    }
-
-    @Override
-    public String toString() {
-        return getOperatorTag().toString() + " " + keysLeftBranch + keysRightBranch;
-    }
-
-    @Override
-    public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op,
-            IOperatorSchema propagatedSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema)
-            throws AlgebricksException {
-        int[] keysLeft = JobGenHelper.variablesToFieldIndexes(keysLeftBranch, inputSchemas[0]);
-        int[] keysRight = JobGenHelper.variablesToFieldIndexes(keysRightBranch, inputSchemas[1]);
-        IVariableTypeEnvironment env = context.getTypeEnvironment(op);
-        IBinaryHashFunctionFactory[] hashFunFactories = JobGenHelper.variablesToBinaryHashFunctionFactories(
-                keysLeftBranch, env, context);
-        IBinaryComparatorFactory[] comparatorFactories = new IBinaryComparatorFactory[keysLeft.length];
-        int i = 0;
-        IBinaryComparatorFactoryProvider bcfp = context.getBinaryComparatorFactoryProvider();
-        for (LogicalVariable v : keysLeftBranch) {
-            Object t = env.getVarType(v);
-            comparatorFactories[i++] = bcfp.getBinaryComparatorFactory(t, true);
-        }
-        RecordDescriptor recDescriptor = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), propagatedSchema, context);
-        IOperatorDescriptorRegistry spec = builder.getJobSpec();
-        IOperatorDescriptor opDesc = null;
-        try {
-            switch (kind) {
-                case INNER: {
-                    opDesc = new HybridHashJoinOperatorDescriptor(spec, getMemSizeInFrames(),
-                            maxInputBuildSizeInFrames, aveRecordsPerFrame, getFudgeFactor(), keysLeft, keysRight,
-                            hashFunFactories, comparatorFactories, recDescriptor);
-                    break;
-                }
-                case LEFT_OUTER: {
-                    INullWriterFactory[] nullWriterFactories = new INullWriterFactory[inputSchemas[1].getSize()];
-                    for (int j = 0; j < nullWriterFactories.length; j++) {
-                        nullWriterFactories[j] = context.getNullWriterFactory();
-                    }
-                    opDesc = new HybridHashJoinOperatorDescriptor(spec, getMemSizeInFrames(),
-                            maxInputBuildSizeInFrames, aveRecordsPerFrame, getFudgeFactor(), keysLeft, keysRight,
-                            hashFunFactories, comparatorFactories, recDescriptor, true, nullWriterFactories);
-                    break;
-                }
-                default: {
-                    throw new NotImplementedException();
-                }
-            }
-        } catch (HyracksDataException e) {
-            throw new AlgebricksException(e);
-        }
-        contributeOpDesc(builder, (AbstractLogicalOperator) op, opDesc);
-
-        ILogicalOperator src1 = op.getInputs().get(0).getValue();
-        builder.contributeGraphEdge(src1, 0, op, 0);
-        ILogicalOperator src2 = op.getInputs().get(1).getValue();
-        builder.contributeGraphEdge(src2, 0, op, 1);
-    }
-
-    @Override
-    protected List<ILocalStructuralProperty> deliveredLocalProperties(ILogicalOperator op, IOptimizationContext context)
-            throws AlgebricksException {
-        return new LinkedList<ILocalStructuralProperty>();
-    }
-
-}
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/InsertDeletePOperator.java b/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/InsertDeletePOperator.java
deleted file mode 100644
index d1cb5f0..0000000
--- a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/InsertDeletePOperator.java
+++ /dev/null
@@ -1,98 +0,0 @@
-package edu.uci.ics.hyracks.algebricks.core.algebra.operators.physical;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.IHyracksJobBuilder;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.PhysicalOperatorTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
-import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IDataSource;
-import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IMetadataProvider;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.IOperatorSchema;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.InsertDeleteOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.InsertDeleteOperator.Kind;
-import edu.uci.ics.hyracks.algebricks.core.algebra.properties.IPartitioningRequirementsCoordinator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.properties.IPhysicalPropertiesVector;
-import edu.uci.ics.hyracks.algebricks.core.algebra.properties.PhysicalRequirements;
-import edu.uci.ics.hyracks.algebricks.core.algebra.properties.StructuralPropertiesVector;
-import edu.uci.ics.hyracks.algebricks.core.jobgen.impl.JobGenContext;
-import edu.uci.ics.hyracks.algebricks.core.jobgen.impl.JobGenHelper;
-import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.job.JobSpecification;
-
-@SuppressWarnings("rawtypes")
-public class InsertDeletePOperator extends AbstractPhysicalOperator {
-
-    private LogicalVariable payload;
-    private List<LogicalVariable> keys;
-    private IDataSource<?> dataSource;
-
-    public InsertDeletePOperator(LogicalVariable payload, List<LogicalVariable> keys, IDataSource dataSource) {
-        this.payload = payload;
-        this.keys = keys;
-        this.dataSource = dataSource;
-    }
-
-    @Override
-    public PhysicalOperatorTag getOperatorTag() {
-        return PhysicalOperatorTag.INSERT_DELETE;
-    }
-
-    @Override
-    public void computeDeliveredProperties(ILogicalOperator op, IOptimizationContext context) {
-        AbstractLogicalOperator op2 = (AbstractLogicalOperator) op.getInputs().get(0).getValue();
-        deliveredProperties = (StructuralPropertiesVector) op2.getDeliveredPhysicalProperties().clone();
-    }
-
-    @Override
-    public PhysicalRequirements getRequiredPropertiesForChildren(ILogicalOperator op,
-            IPhysicalPropertiesVector reqdByParent) {
-        List<LogicalVariable> scanVariables = new ArrayList<LogicalVariable>();
-        scanVariables.addAll(keys);
-        scanVariables.add(new LogicalVariable(-1));
-        IPhysicalPropertiesVector r = dataSource.getPropertiesProvider().computePropertiesVector(scanVariables);
-        IPhysicalPropertiesVector[] requirements = new IPhysicalPropertiesVector[1];
-        requirements[0] = r;
-        return new PhysicalRequirements(requirements, IPartitioningRequirementsCoordinator.NO_COORDINATION);
-    }
-
-    @SuppressWarnings("unchecked")
-    @Override
-    public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op,
-            IOperatorSchema propagatedSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema)
-            throws AlgebricksException {
-        InsertDeleteOperator insertDeleteOp = (InsertDeleteOperator) op;
-        IMetadataProvider mp = context.getMetadataProvider();
-        IVariableTypeEnvironment typeEnv = context.getTypeEnvironment(op);
-        JobSpecification spec = builder.getJobSpec();
-        RecordDescriptor inputDesc = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op.getInputs().get(0).getValue()), inputSchemas[0],
-                context);
-
-        Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> runtimeAndConstraints = null;
-        if (insertDeleteOp.getOperation() == Kind.INSERT)
-            runtimeAndConstraints = mp.getInsertRuntime(dataSource, propagatedSchema, typeEnv, keys, payload,
-                    inputDesc, context, spec);
-        else
-            runtimeAndConstraints = mp.getDeleteRuntime(dataSource, propagatedSchema, typeEnv, keys, payload,
-                    inputDesc, context, spec);
-
-        builder.contributeHyracksOperator(insertDeleteOp, runtimeAndConstraints.first);
-        builder.contributeAlgebricksPartitionConstraint(runtimeAndConstraints.first, runtimeAndConstraints.second);
-        ILogicalOperator src = insertDeleteOp.getInputs().get(0).getValue();
-        builder.contributeGraphEdge(src, 0, insertDeleteOp, 0);
-    }
-
-    @Override
-    public boolean isMicroOperator() {
-        return false;
-    }
-
-}
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/NLJoinPOperator.java b/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/NLJoinPOperator.java
deleted file mode 100644
index 8cbd2d8..0000000
--- a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/NLJoinPOperator.java
+++ /dev/null
@@ -1,277 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.algebricks.core.algebra.operators.physical;
-
-import java.util.LinkedList;
-import java.util.List;
-
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.NotImplementedException;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.IHyracksJobBuilder;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.PhysicalOperatorTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IExpressionRuntimeProvider;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractBinaryJoinOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractBinaryJoinOperator.JoinKind;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.IOperatorSchema;
-import edu.uci.ics.hyracks.algebricks.core.algebra.properties.BroadcastPartitioningProperty;
-import edu.uci.ics.hyracks.algebricks.core.algebra.properties.ILocalStructuralProperty;
-import edu.uci.ics.hyracks.algebricks.core.algebra.properties.IPartitioningProperty;
-import edu.uci.ics.hyracks.algebricks.core.algebra.properties.IPartitioningRequirementsCoordinator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.properties.IPhysicalPropertiesVector;
-import edu.uci.ics.hyracks.algebricks.core.algebra.properties.PhysicalRequirements;
-import edu.uci.ics.hyracks.algebricks.core.algebra.properties.StructuralPropertiesVector;
-import edu.uci.ics.hyracks.algebricks.core.jobgen.impl.JobGenContext;
-import edu.uci.ics.hyracks.algebricks.core.jobgen.impl.JobGenHelper;
-import edu.uci.ics.hyracks.algebricks.data.IBinaryBooleanInspector;
-import edu.uci.ics.hyracks.algebricks.data.IBinaryBooleanInspectorFactory;
-import edu.uci.ics.hyracks.algebricks.runtime.base.IScalarEvaluator;
-import edu.uci.ics.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory;
-import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
-import edu.uci.ics.hyracks.api.dataflow.value.ITuplePairComparator;
-import edu.uci.ics.hyracks.api.dataflow.value.ITuplePairComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.job.IOperatorDescriptorRegistry;
-import edu.uci.ics.hyracks.data.std.api.IPointable;
-import edu.uci.ics.hyracks.data.std.primitive.VoidPointable;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.FrameTupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
-import edu.uci.ics.hyracks.dataflow.std.join.NestedLoopJoinOperatorDescriptor;
-
-/**
- * Left input is broadcast and preserves its local properties.
- * Right input can be partitioned in any way.
- */
-public class NLJoinPOperator extends AbstractJoinPOperator {
-
-    private final int memSize;
-
-    public NLJoinPOperator(JoinKind kind, JoinPartitioningType partitioningType, int memSize) {
-        super(kind, partitioningType);
-        this.memSize = memSize;
-    }
-
-    @Override
-    public PhysicalOperatorTag getOperatorTag() {
-        return PhysicalOperatorTag.NESTED_LOOP;
-    }
-
-    @Override
-    public boolean isMicroOperator() {
-        return false;
-    }
-
-    @Override
-    public void computeDeliveredProperties(ILogicalOperator iop, IOptimizationContext context) {
-        if (partitioningType != JoinPartitioningType.BROADCAST) {
-            throw new NotImplementedException(partitioningType + " nested loop joins are not implemented.");
-        }
-
-        IPartitioningProperty pp;
-
-        AbstractLogicalOperator op = (AbstractLogicalOperator) iop;
-
-        if (op.getExecutionMode() == AbstractLogicalOperator.ExecutionMode.PARTITIONED) {
-            AbstractLogicalOperator op2 = (AbstractLogicalOperator) op.getInputs().get(1).getValue();
-            IPhysicalPropertiesVector pv1 = op2.getPhysicalOperator().getDeliveredProperties();
-            if (pv1 == null) {
-                pp = null;
-            } else {
-                pp = pv1.getPartitioningProperty();
-            }
-        } else {
-        	pp = IPartitioningProperty.UNPARTITIONED;
-        }
-
-        List<ILocalStructuralProperty> localProps = new LinkedList<ILocalStructuralProperty>();
-        this.deliveredProperties = new StructuralPropertiesVector(pp, localProps);
-    }
-
-    @Override
-    public PhysicalRequirements getRequiredPropertiesForChildren(ILogicalOperator op,
-            IPhysicalPropertiesVector reqdByParent) {
-        if (partitioningType != JoinPartitioningType.BROADCAST) {
-            throw new NotImplementedException(partitioningType + " nested loop joins are not implemented.");
-        }
-
-        StructuralPropertiesVector[] pv = new StructuralPropertiesVector[2];
-        pv[0] = new StructuralPropertiesVector(new BroadcastPartitioningProperty(null), null);
-        pv[1] = new StructuralPropertiesVector(null, null);
-        return new PhysicalRequirements(pv, IPartitioningRequirementsCoordinator.NO_COORDINATION);
-    }
-
-    @Override
-    public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op,
-            IOperatorSchema propagatedSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema)
-            throws AlgebricksException {
-        AbstractBinaryJoinOperator join = (AbstractBinaryJoinOperator) op;
-        RecordDescriptor recDescriptor = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), propagatedSchema, context);
-        IOperatorSchema[] conditionInputSchemas = new IOperatorSchema[1];
-        conditionInputSchemas[0] = propagatedSchema;
-        IExpressionRuntimeProvider expressionRuntimeProvider = context.getExpressionRuntimeProvider();
-        IScalarEvaluatorFactory cond = expressionRuntimeProvider.createEvaluatorFactory(join.getCondition().getValue(),
-                context.getTypeEnvironment(op), conditionInputSchemas, context);
-        ITuplePairComparatorFactory comparatorFactory = new TuplePairEvaluatorFactory(cond,
-                context.getBinaryBooleanInspectorFactory());
-        IOperatorDescriptorRegistry spec = builder.getJobSpec();
-        IOperatorDescriptor opDesc = null;
-
-        switch (kind) {
-            case INNER: {
-                opDesc = new NestedLoopJoinOperatorDescriptor(spec, comparatorFactory, recDescriptor, memSize);
-                break;
-            }
-            case LEFT_OUTER:
-            default: {
-                throw new NotImplementedException();
-            }
-        }
-        contributeOpDesc(builder, (AbstractLogicalOperator) op, opDesc);
-
-        ILogicalOperator src1 = op.getInputs().get(0).getValue();
-        builder.contributeGraphEdge(src1, 0, op, 0);
-        ILogicalOperator src2 = op.getInputs().get(1).getValue();
-        builder.contributeGraphEdge(src2, 0, op, 1);
-    }
-
-    public static class TuplePairEvaluatorFactory implements ITuplePairComparatorFactory {
-
-        private static final long serialVersionUID = 1L;
-        private final IScalarEvaluatorFactory cond;
-        private final IBinaryBooleanInspectorFactory binaryBooleanInspectorFactory;
-
-        public TuplePairEvaluatorFactory(IScalarEvaluatorFactory cond,
-                IBinaryBooleanInspectorFactory binaryBooleanInspectorFactory) {
-            this.cond = cond;
-            this.binaryBooleanInspectorFactory = binaryBooleanInspectorFactory;
-        }
-
-        @Override
-        public synchronized ITuplePairComparator createTuplePairComparator(IHyracksTaskContext ctx) {
-            return new TuplePairEvaluator(ctx, cond, binaryBooleanInspectorFactory.createBinaryBooleanInspector(ctx));
-        }
-    }
-
-    public static class TuplePairEvaluator implements ITuplePairComparator {
-        private final IHyracksTaskContext ctx;
-        private IScalarEvaluator condEvaluator;
-        private final IScalarEvaluatorFactory condFactory;
-        private final IPointable p;
-        private final CompositeFrameTupleReference compositeTupleRef;
-        private final FrameTupleReference leftRef;
-        private final FrameTupleReference rightRef;
-        private final IBinaryBooleanInspector binaryBooleanInspector;
-
-        public TuplePairEvaluator(IHyracksTaskContext ctx, IScalarEvaluatorFactory condFactory,
-                IBinaryBooleanInspector binaryBooleanInspector) {
-            this.ctx = ctx;
-            this.condFactory = condFactory;
-            this.binaryBooleanInspector = binaryBooleanInspector;
-            this.leftRef = new FrameTupleReference();
-            this.p = VoidPointable.FACTORY.createPointable();
-            this.rightRef = new FrameTupleReference();
-            this.compositeTupleRef = new CompositeFrameTupleReference(leftRef, rightRef);
-        }
-
-        @Override
-        public int compare(IFrameTupleAccessor outerAccessor, int outerIndex, IFrameTupleAccessor innerAccessor,
-                int innerIndex) throws HyracksDataException {
-            if (condEvaluator == null) {
-                try {
-                    this.condEvaluator = condFactory.createScalarEvaluator(ctx);
-                } catch (AlgebricksException ae) {
-                    throw new HyracksDataException(ae);
-                }
-            }
-            compositeTupleRef.reset(outerAccessor, outerIndex, innerAccessor, innerIndex);
-            try {
-                condEvaluator.evaluate(compositeTupleRef, p);
-            } catch (AlgebricksException ae) {
-                throw new HyracksDataException(ae);
-            }
-            boolean result = binaryBooleanInspector
-                    .getBooleanValue(p.getByteArray(), p.getStartOffset(), p.getLength());
-            if (result)
-                return 0;
-            else
-                return 1;
-        }
-    }
-
-    public static class CompositeFrameTupleReference implements IFrameTupleReference {
-
-        private final FrameTupleReference refLeft;
-        private final FrameTupleReference refRight;
-
-        public CompositeFrameTupleReference(FrameTupleReference refLeft, FrameTupleReference refRight) {
-            this.refLeft = refLeft;
-            this.refRight = refRight;
-        }
-
-        public void reset(IFrameTupleAccessor outerAccessor, int outerIndex, IFrameTupleAccessor innerAccessor,
-                int innerIndex) {
-            refLeft.reset(outerAccessor, outerIndex);
-            refRight.reset(innerAccessor, innerIndex);
-        }
-
-        @Override
-        public int getFieldCount() {
-            return refLeft.getFieldCount() + refRight.getFieldCount();
-        }
-
-        @Override
-        public byte[] getFieldData(int fIdx) {
-            int leftFieldCount = refLeft.getFieldCount();
-            if (fIdx < leftFieldCount)
-                return refLeft.getFieldData(fIdx);
-            else
-                return refRight.getFieldData(fIdx - leftFieldCount);
-        }
-
-        @Override
-        public int getFieldStart(int fIdx) {
-            int leftFieldCount = refLeft.getFieldCount();
-            if (fIdx < leftFieldCount)
-                return refLeft.getFieldStart(fIdx);
-            else
-                return refRight.getFieldStart(fIdx - leftFieldCount);
-        }
-
-        @Override
-        public int getFieldLength(int fIdx) {
-            int leftFieldCount = refLeft.getFieldCount();
-            if (fIdx < leftFieldCount)
-                return refLeft.getFieldLength(fIdx);
-            else
-                return refRight.getFieldLength(fIdx - leftFieldCount);
-        }
-
-        @Override
-        public IFrameTupleAccessor getFrameTupleAccessor() {
-            throw new NotImplementedException();
-        }
-
-        @Override
-        public int getTupleIndex() {
-            throw new NotImplementedException();
-        }
-
-    }
-}
\ No newline at end of file
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/prettyprint/LogicalOperatorPrettyPrintVisitor.java b/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/prettyprint/LogicalOperatorPrettyPrintVisitor.java
deleted file mode 100644
index fc71c69..0000000
--- a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/prettyprint/LogicalOperatorPrettyPrintVisitor.java
+++ /dev/null
@@ -1,356 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.algebricks.core.algebra.prettyprint;
-
-import java.util.List;
-
-import org.apache.commons.lang3.mutable.Mutable;
-
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
-import edu.uci.ics.hyracks.algebricks.common.utils.Triple;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalPlan;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractOperatorWithNestedPlans;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AggregateOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DieOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DistinctOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.EmptyTupleSourceOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ExchangeOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.GroupByOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.IndexInsertDeleteOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.InnerJoinOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.InsertDeleteOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.InsertDeleteOperator.Kind;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.LeftOuterJoinOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.LimitOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.NestedTupleSourceOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.OrderOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.PartitioningSplitOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ProjectOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ReplicateOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.RunningAggregateOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ScriptOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SinkOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ExtensionOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SubplanOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnionAllOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestMapOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.WriteOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.WriteResultOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.visitors.ILogicalOperatorVisitor;
-
-public class LogicalOperatorPrettyPrintVisitor implements ILogicalOperatorVisitor<String, Integer> {
-
-    public LogicalOperatorPrettyPrintVisitor() {
-    }
-
-    @Override
-    public String visitAggregateOperator(AggregateOperator op, Integer indent) {
-        StringBuilder buffer = new StringBuilder();
-        addIndent(buffer, indent).append("aggregate ").append(op.getVariables()).append(" <- ");
-        pprintExprList(op.getExpressions(), buffer);
-        return buffer.toString();
-    }
-
-    @Override
-    public String visitRunningAggregateOperator(RunningAggregateOperator op, Integer indent) {
-        StringBuilder buffer = new StringBuilder();
-        addIndent(buffer, indent).append("running-aggregate ").append(op.getVariables()).append(" <- ");
-        pprintExprList(op.getExpressions(), buffer);
-        return buffer.toString();
-    }
-
-    @Override
-    public String visitEmptyTupleSourceOperator(EmptyTupleSourceOperator op, Integer indent) {
-        StringBuilder buffer = new StringBuilder();
-        addIndent(buffer, indent).append("empty-tuple-source");
-        return buffer.toString();
-    }
-
-    @Override
-    public String visitGroupByOperator(GroupByOperator op, Integer indent) throws AlgebricksException {
-        StringBuilder buffer = new StringBuilder();
-        addIndent(buffer, indent).append("group by (").append(op.gByListToString()).append(") decor (")
-                .append(op.decorListToString()).append(") {");
-        printNestedPlans(op, indent, buffer);
-        return buffer.toString();
-    }
-
-    @Override
-    public String visitDistinctOperator(DistinctOperator op, Integer indent) {
-        StringBuilder buffer = new StringBuilder();
-        addIndent(buffer, indent).append("distinct " + "(");
-        pprintExprList(op.getExpressions(), buffer);
-        buffer.append(")");
-        return buffer.toString();
-    }
-
-    @Override
-    public String visitInnerJoinOperator(InnerJoinOperator op, Integer indent) {
-        StringBuilder buffer = new StringBuilder();
-        addIndent(buffer, indent).append("join (").append(op.getCondition().getValue()).append(")");
-        return buffer.toString();
-    }
-
-    @Override
-    public String visitLeftOuterJoinOperator(LeftOuterJoinOperator op, Integer indent) {
-        StringBuilder buffer = new StringBuilder();
-        addIndent(buffer, indent).append("left outer join (").append(op.getCondition().getValue()).append(")");
-        return buffer.toString();
-    }
-
-    @Override
-    public String visitNestedTupleSourceOperator(NestedTupleSourceOperator op, Integer indent) {
-        StringBuilder buffer = new StringBuilder();
-        addIndent(buffer, indent).append("nested tuple source");
-        return buffer.toString();
-    }
-
-    @Override
-    public String visitOrderOperator(OrderOperator op, Integer indent) {
-        StringBuilder buffer = new StringBuilder();
-        addIndent(buffer, indent).append("order ");
-        for (Pair<OrderOperator.IOrder, Mutable<ILogicalExpression>> p : op.getOrderExpressions()) {
-            String fst;
-            switch (p.first.getKind()) {
-                case ASC: {
-                    fst = "ASC";
-                    break;
-                }
-                case DESC: {
-                    fst = "DESC";
-                    break;
-                }
-                default: {
-                    fst = p.first.getExpressionRef().toString();
-                }
-            }
-            buffer.append("(" + fst + ", " + p.second.getValue() + ") ");
-        }
-        return buffer.toString();
-    }
-
-    @Override
-    public String visitAssignOperator(AssignOperator op, Integer indent) {
-        StringBuilder buffer = new StringBuilder();
-        addIndent(buffer, indent).append("assign ").append(op.getVariables()).append(" <- ");
-        pprintExprList(op.getExpressions(), buffer);
-        return buffer.toString();
-    }
-
-    @Override
-    public String visitWriteOperator(WriteOperator op, Integer indent) {
-        StringBuilder buffer = new StringBuilder();
-        addIndent(buffer, indent).append("write ").append(op.getExpressions());
-        return buffer.toString();
-    }
-
-    @Override
-    public String visitWriteResultOperator(WriteResultOperator op, Integer indent) {
-        StringBuilder buffer = new StringBuilder();
-        addIndent(buffer, indent).append("load ").append(op.getDataSource()).append(" from ")
-                .append(op.getPayloadExpression()).append(" partitioned by ").append(op.getKeyExpressions().toString());
-        return buffer.toString();
-    }
-
-    @Override
-    public String visitSelectOperator(SelectOperator op, Integer indent) {
-        StringBuilder buffer = new StringBuilder();
-        addIndent(buffer, indent).append("select " + "(" + op.getCondition().getValue() + ")");
-        return buffer.toString();
-    }
-
-    @Override
-    public String visitProjectOperator(ProjectOperator op, Integer indent) {
-        StringBuilder buffer = new StringBuilder();
-        addIndent(buffer, indent).append("project " + "(" + op.getVariables() + ")");
-        return buffer.toString();
-    }
-
-    @Override
-    public String visitPartitioningSplitOperator(PartitioningSplitOperator op, Integer indent) {
-        StringBuilder buffer = new StringBuilder();
-        addIndent(buffer, indent).append("partitioning-split (" + op.getExpressions() + ")");
-        return buffer.toString();
-    }
-
-    @Override
-    public String visitSubplanOperator(SubplanOperator op, Integer indent) throws AlgebricksException {
-        StringBuilder buffer = new StringBuilder();
-        addIndent(buffer, indent).append("subplan {");
-        printNestedPlans(op, indent, buffer);
-        return buffer.toString();
-    }
-
-    @Override
-    public String visitUnionOperator(UnionAllOperator op, Integer indent) {
-        StringBuilder buffer = new StringBuilder();
-        addIndent(buffer, indent).append("union");
-        for (Triple<LogicalVariable, LogicalVariable, LogicalVariable> v : op.getVariableMappings()) {
-            buffer.append(" (" + v.first + ", " + v.second + ", " + v.third + ")");
-        }
-        return buffer.toString();
-    }
-
-    @Override
-    public String visitUnnestOperator(UnnestOperator op, Integer indent) {
-        StringBuilder buffer = new StringBuilder();
-        addIndent(buffer, indent).append("unnest " + op.getVariable());
-        if (op.getPositionalVariable() != null) {
-            buffer.append(" at " + op.getPositionalVariable());
-        }
-        buffer.append(" <- " + op.getExpressionRef().getValue());
-        return buffer.toString();
-    }
-
-    @Override
-    public String visitUnnestMapOperator(UnnestMapOperator op, Integer indent) {
-        StringBuilder buffer = new StringBuilder();
-        addIndent(buffer, indent).append("unnest-map " + op.getVariables() + " <- " + op.getExpressionRef().getValue());
-        return buffer.toString();
-    }
-
-    @Override
-    public String visitDataScanOperator(DataSourceScanOperator op, Integer indent) {
-        StringBuilder buffer = new StringBuilder();
-        addIndent(buffer, indent).append(
-                "data-scan " + op.getProjectVariables() + "<-" + op.getVariables() + " <- " + op.getDataSource());
-        return buffer.toString();
-    }
-
-    @Override
-    public String visitLimitOperator(LimitOperator op, Integer indent) {
-        StringBuilder buffer = new StringBuilder();
-        addIndent(buffer, indent).append("limit " + op.getMaxObjects().getValue());
-        ILogicalExpression offset = op.getOffset().getValue();
-        if (offset != null) {
-            buffer.append(", " + offset);
-        }
-        return buffer.toString();
-    }
-
-    @Override
-    public String visitDieOperator(DieOperator op, Integer indent) {
-        StringBuilder buffer = new StringBuilder();
-        addIndent(buffer, indent).append("die after " + op.getAfterObjects().getValue());
-        return buffer.toString();
-    }
-
-    @Override
-    public String visitExchangeOperator(ExchangeOperator op, Integer indent) {
-        StringBuilder buffer = new StringBuilder();
-        addIndent(buffer, indent).append("exchange ");
-        return buffer.toString();
-    }
-
-    protected static final StringBuilder addIndent(StringBuilder buffer, int level) {
-        for (int i = 0; i < level; ++i) {
-            buffer.append(' ');
-        }
-        return buffer;
-    }
-
-    private void printNestedPlans(AbstractOperatorWithNestedPlans op, Integer indent, StringBuilder buffer)
-            throws AlgebricksException {
-        boolean first = true;
-        if (op.getNestedPlans().isEmpty()) {
-            buffer.append("}");
-        } else {
-            for (ILogicalPlan p : op.getNestedPlans()) {
-                // PrettyPrintUtil.indent(buffer, level + 10).append("var " +
-                // p.first + ":\n");
-                buffer.append("\n");
-                if (first) {
-                    first = false;
-                } else {
-                    addIndent(buffer, indent).append("       {\n");
-                }
-                PlanPrettyPrinter.printPlan(p, buffer, this, indent + 10);
-                addIndent(buffer, indent).append("       }");
-            }
-        }
-    }
-
-    @Override
-    public String visitScriptOperator(ScriptOperator op, Integer indent) {
-        StringBuilder buffer = new StringBuilder();
-        addIndent(buffer, indent).append(
-                "script (in: " + op.getInputVariables() + ") (out: " + op.getOutputVariables() + ")");
-        return buffer.toString();
-    }
-
-    private void pprintExprList(List<Mutable<ILogicalExpression>> expressions, StringBuilder buffer) {
-        buffer.append("[");
-        boolean first = true;
-        for (Mutable<ILogicalExpression> exprRef : expressions) {
-            if (first) {
-                first = false;
-            } else {
-                buffer.append(", ");
-            }
-            buffer.append(exprRef.getValue());
-        }
-        buffer.append("]");
-    }
-
-    @Override
-    public String visitReplicateOperator(ReplicateOperator op, Integer indent) throws AlgebricksException {
-        StringBuilder buffer = new StringBuilder();
-        addIndent(buffer, indent).append("replicate ");
-        return buffer.toString();
-    }
-
-    @Override
-    public String visitInsertDeleteOperator(InsertDeleteOperator op, Integer indent) throws AlgebricksException {
-        StringBuilder buffer = new StringBuilder();
-        String header = op.getOperation() == Kind.INSERT ? "insert into " : "delete from ";
-        addIndent(buffer, indent).append(header).append(op.getDataSource()).append(" from ")
-                .append(op.getPayloadExpression()).append(" partitioned by ")
-                .append(op.getPrimaryKeyExpressions().toString());
-        return buffer.toString();
-    }
-
-    @Override
-    public String visitIndexInsertDeleteOperator(IndexInsertDeleteOperator op, Integer indent)
-            throws AlgebricksException {
-        StringBuilder buffer = new StringBuilder();
-        String header = op.getOperation() == Kind.INSERT ? "insert into " : "delete from ";
-        addIndent(buffer, indent).append(header).append(op.getDataSourceIndex()).append(" from ")
-                .append(op.getSecondaryKeyExpressions().toString()).append(" ")
-                .append(op.getPrimaryKeyExpressions().toString());
-        return buffer.toString();
-    }
-
-    @Override
-    public String visitSinkOperator(SinkOperator op, Integer indent) throws AlgebricksException {
-        StringBuilder buffer = new StringBuilder();
-        addIndent(buffer, indent).append("sink");
-        return buffer.toString();
-    }
-
-    @Override
-    public String visitExtensionOperator(ExtensionOperator op, Integer indent) throws AlgebricksException {
-        StringBuilder buffer = new StringBuilder();
-        addIndent(buffer, indent).append(op.toString());
-        return buffer.toString();
-    }
-
-}
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/jobgen/impl/JobGenContext.java b/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/jobgen/impl/JobGenContext.java
deleted file mode 100644
index 22a1a81..0000000
--- a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/jobgen/impl/JobGenContext.java
+++ /dev/null
@@ -1,192 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.algebricks.core.jobgen.impl;
-
-import java.util.HashMap;
-import java.util.Map;
-
-import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IExpressionEvalSizeComputer;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IExpressionRuntimeProvider;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IExpressionTypeComputer;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.INullableTypeComputer;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IPartialAggregationTypeComputer;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
-import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IMetadataProvider;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.IOperatorSchema;
-import edu.uci.ics.hyracks.algebricks.core.algebra.typing.ITypingContext;
-import edu.uci.ics.hyracks.algebricks.data.IBinaryBooleanInspectorFactory;
-import edu.uci.ics.hyracks.algebricks.data.IBinaryComparatorFactoryProvider;
-import edu.uci.ics.hyracks.algebricks.data.IBinaryHashFunctionFactoryProvider;
-import edu.uci.ics.hyracks.algebricks.data.IBinaryIntegerInspectorFactory;
-import edu.uci.ics.hyracks.algebricks.data.INormalizedKeyComputerFactoryProvider;
-import edu.uci.ics.hyracks.algebricks.data.IPrinterFactoryProvider;
-import edu.uci.ics.hyracks.algebricks.data.ISerializerDeserializerProvider;
-import edu.uci.ics.hyracks.algebricks.data.ITypeTraitProvider;
-import edu.uci.ics.hyracks.api.dataflow.value.INullWriterFactory;
-
-public class JobGenContext {
-    private final IOperatorSchema outerFlowSchema;
-    private final Map<ILogicalOperator, IOperatorSchema> schemaMap = new HashMap<ILogicalOperator, IOperatorSchema>();
-    private final ISerializerDeserializerProvider serializerDeserializerProvider;
-    private final IBinaryHashFunctionFactoryProvider hashFunctionFactoryProvider;
-    private final IBinaryComparatorFactoryProvider comparatorFactoryProvider;
-    private final IPrinterFactoryProvider printerFactoryProvider;
-    private final ITypeTraitProvider typeTraitProvider;
-    private final IMetadataProvider<?, ?> metadataProvider;
-    private final INullWriterFactory nullWriterFactory;
-    private final INormalizedKeyComputerFactoryProvider normalizedKeyComputerFactoryProvider;
-    private final Object appContext;
-    private final IBinaryBooleanInspectorFactory booleanInspectorFactory;
-    private final IBinaryIntegerInspectorFactory integerInspectorFactory;
-    private final IExpressionRuntimeProvider expressionRuntimeProvider;
-    private final IExpressionTypeComputer expressionTypeComputer;
-    private final IExpressionEvalSizeComputer expressionEvalSizeComputer;
-    private final IPartialAggregationTypeComputer partialAggregationTypeComputer;
-    private final int frameSize;
-    private AlgebricksPartitionConstraint clusterLocations;
-    private int varCounter;
-    private final ITypingContext typingContext;
-
-    public JobGenContext(IOperatorSchema outerFlowSchema, IMetadataProvider<?, ?> metadataProvider, Object appContext,
-            ISerializerDeserializerProvider serializerDeserializerProvider,
-            IBinaryHashFunctionFactoryProvider hashFunctionFactoryProvider,
-            IBinaryComparatorFactoryProvider comparatorFactoryProvider, ITypeTraitProvider typeTraitProvider,
-            IBinaryBooleanInspectorFactory booleanInspectorFactory,
-            IBinaryIntegerInspectorFactory integerInspectorFactory, IPrinterFactoryProvider printerFactoryProvider,
-            INullWriterFactory nullWriterFactory,
-            INormalizedKeyComputerFactoryProvider normalizedKeyComputerFactoryProvider,
-            IExpressionRuntimeProvider expressionRuntimeProvider, IExpressionTypeComputer expressionTypeComputer,
-            INullableTypeComputer nullableTypeComputer, ITypingContext typingContext,
-            IExpressionEvalSizeComputer expressionEvalSizeComputer,
-            IPartialAggregationTypeComputer partialAggregationTypeComputer, int frameSize,
-            AlgebricksPartitionConstraint clusterLocations) {
-        this.outerFlowSchema = outerFlowSchema;
-        this.metadataProvider = metadataProvider;
-        this.appContext = appContext;
-        this.serializerDeserializerProvider = serializerDeserializerProvider;
-        this.hashFunctionFactoryProvider = hashFunctionFactoryProvider;
-        this.comparatorFactoryProvider = comparatorFactoryProvider;
-        this.typeTraitProvider = typeTraitProvider;
-        this.booleanInspectorFactory = booleanInspectorFactory;
-        this.integerInspectorFactory = integerInspectorFactory;
-        this.printerFactoryProvider = printerFactoryProvider;
-        this.clusterLocations = clusterLocations;
-        this.normalizedKeyComputerFactoryProvider = normalizedKeyComputerFactoryProvider;
-        this.nullWriterFactory = nullWriterFactory;
-        this.expressionRuntimeProvider = expressionRuntimeProvider;
-        this.expressionTypeComputer = expressionTypeComputer;
-        this.typingContext = typingContext;
-        this.expressionEvalSizeComputer = expressionEvalSizeComputer;
-        this.partialAggregationTypeComputer = partialAggregationTypeComputer;
-        this.frameSize = frameSize;
-        this.varCounter = 0;
-    }
-
-    public IOperatorSchema getOuterFlowSchema() {
-        return outerFlowSchema;
-    }
-
-    public AlgebricksPartitionConstraint getClusterLocations() {
-        return clusterLocations;
-    }
-
-    public IMetadataProvider<?, ?> getMetadataProvider() {
-        return metadataProvider;
-    }
-
-    public Object getAppContext() {
-        return appContext;
-    }
-
-    public ISerializerDeserializerProvider getSerializerDeserializerProvider() {
-        return serializerDeserializerProvider;
-    }
-
-    public IBinaryHashFunctionFactoryProvider getBinaryHashFunctionFactoryProvider() {
-        return hashFunctionFactoryProvider;
-    }
-
-    public IBinaryComparatorFactoryProvider getBinaryComparatorFactoryProvider() {
-        return comparatorFactoryProvider;
-    }
-
-    public ITypeTraitProvider getTypeTraitProvider() {
-        return typeTraitProvider;
-    }
-
-    public IBinaryBooleanInspectorFactory getBinaryBooleanInspectorFactory() {
-        return booleanInspectorFactory;
-    }
-
-    public IBinaryIntegerInspectorFactory getBinaryIntegerInspectorFactory() {
-        return integerInspectorFactory;
-    }
-
-    public IPrinterFactoryProvider getPrinterFactoryProvider() {
-        return printerFactoryProvider;
-    }
-
-    public IExpressionRuntimeProvider getExpressionRuntimeProvider() {
-        return expressionRuntimeProvider;
-    }
-
-    public IOperatorSchema getSchema(ILogicalOperator op) {
-        return schemaMap.get(op);
-    }
-
-    public void putSchema(ILogicalOperator op, IOperatorSchema schema) {
-        schemaMap.put(op, schema);
-    }
-
-    public LogicalVariable createNewVar() {
-        varCounter++;
-        LogicalVariable var = new LogicalVariable(-varCounter);
-        return var;
-    }
-
-    public Object getType(ILogicalExpression expr, IVariableTypeEnvironment env) throws AlgebricksException {
-        return expressionTypeComputer.getType(expr, typingContext.getMetadataProvider(), env);
-    }
-
-    public INullWriterFactory getNullWriterFactory() {
-        return nullWriterFactory;
-    }
-
-    public INormalizedKeyComputerFactoryProvider getNormalizedKeyComputerFactoryProvider() {
-        return normalizedKeyComputerFactoryProvider;
-    }
-
-    public IExpressionEvalSizeComputer getExpressionEvalSizeComputer() {
-        return expressionEvalSizeComputer;
-    }
-
-    public int getFrameSize() {
-        return frameSize;
-    }
-
-    public IPartialAggregationTypeComputer getPartialAggregationTypeComputer() {
-        return partialAggregationTypeComputer;
-    }
-
-    public IVariableTypeEnvironment getTypeEnvironment(ILogicalOperator op) {
-        return typingContext.getOutputTypeEnvironment(op);
-    }
-
-}
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/jobgen/impl/JobGenHelper.java b/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/jobgen/impl/JobGenHelper.java
deleted file mode 100644
index 790fb93..0000000
--- a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/jobgen/impl/JobGenHelper.java
+++ /dev/null
@@ -1,169 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.algebricks.core.jobgen.impl;
-
-import java.util.Collection;
-import java.util.List;
-import java.util.logging.Logger;
-
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.IOperatorSchema;
-import edu.uci.ics.hyracks.algebricks.data.IBinaryComparatorFactoryProvider;
-import edu.uci.ics.hyracks.algebricks.data.IBinaryHashFunctionFactoryProvider;
-import edu.uci.ics.hyracks.algebricks.data.INormalizedKeyComputerFactoryProvider;
-import edu.uci.ics.hyracks.algebricks.data.IPrinterFactory;
-import edu.uci.ics.hyracks.algebricks.data.IPrinterFactoryProvider;
-import edu.uci.ics.hyracks.algebricks.data.ISerializerDeserializerProvider;
-import edu.uci.ics.hyracks.algebricks.data.ITypeTraitProvider;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputerFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-
-public final class JobGenHelper {
-
-    private static final Logger LOGGER = Logger.getLogger(JobGenHelper.class.getName());
-
-    @SuppressWarnings("rawtypes")
-    public static RecordDescriptor mkRecordDescriptor(IVariableTypeEnvironment env, IOperatorSchema opSchema,
-            JobGenContext context) throws AlgebricksException {        
-		ISerializerDeserializer[] fields = new ISerializerDeserializer[opSchema.getSize()];
-        ITypeTraits[] typeTraits = new ITypeTraits[opSchema.getSize()];
-        ISerializerDeserializerProvider sdp = context.getSerializerDeserializerProvider();
-        ITypeTraitProvider ttp = context.getTypeTraitProvider();
-        int i = 0;
-        for (LogicalVariable var : opSchema) {
-            Object t = env.getVarType(var);
-            if (t == null) {
-                LOGGER.warning("No type for variable " + var);
-            }
-            fields[i] = sdp.getSerializerDeserializer(t);
-            typeTraits[i] = ttp.getTypeTrait(t);
-            i++;
-        }
-        return new RecordDescriptor(fields, typeTraits);
-    }
-    
-    public static IPrinterFactory[] mkPrinterFactories(IOperatorSchema opSchema, IVariableTypeEnvironment env,
-            JobGenContext context, int[] printColumns) throws AlgebricksException {
-        IPrinterFactory[] pf = new IPrinterFactory[printColumns.length];
-        IPrinterFactoryProvider pff = context.getPrinterFactoryProvider();
-        for (int i = 0; i < pf.length; i++) {
-            LogicalVariable v = opSchema.getVariable(printColumns[i]);
-            Object t = env.getVarType(v);
-            pf[i] = pff.getPrinterFactory(t);
-        }
-        return pf;
-    }
-
-    public static int[] variablesToFieldIndexes(Collection<LogicalVariable> varLogical, IOperatorSchema opSchema) {
-        int[] tuplePos = new int[varLogical.size()];
-        int i = 0;
-        for (LogicalVariable var : varLogical) {
-            tuplePos[i] = opSchema.findVariable(var);
-            i++;
-        }
-        return tuplePos;
-    }
-
-    public static IBinaryHashFunctionFactory[] variablesToBinaryHashFunctionFactories(
-            Collection<LogicalVariable> varLogical, IVariableTypeEnvironment env, JobGenContext context)
-            throws AlgebricksException {
-        IBinaryHashFunctionFactory[] funFactories = new IBinaryHashFunctionFactory[varLogical.size()];
-        int i = 0;
-        IBinaryHashFunctionFactoryProvider bhffProvider = context.getBinaryHashFunctionFactoryProvider();
-        for (LogicalVariable var : varLogical) {
-            Object type = env.getVarType(var);
-            funFactories[i++] = bhffProvider.getBinaryHashFunctionFactory(type);
-        }
-        return funFactories;
-    }
-    
-    public static IBinaryComparatorFactory[] variablesToAscBinaryComparatorFactories(
-            Collection<LogicalVariable> varLogical, IVariableTypeEnvironment env, JobGenContext context)
-            throws AlgebricksException {
-        IBinaryComparatorFactory[] compFactories = new IBinaryComparatorFactory[varLogical.size()];
-        IBinaryComparatorFactoryProvider bcfProvider = context.getBinaryComparatorFactoryProvider();
-        int i = 0;
-        for (LogicalVariable v : varLogical) {
-            Object type = env.getVarType(v);
-            compFactories[i++] = bcfProvider.getBinaryComparatorFactory(type, true);
-        }
-        return compFactories;
-    }
-    
-    public static IBinaryComparatorFactory[] variablesToAscBinaryComparatorFactories(
-            List<LogicalVariable> varLogical, int start, int size, IVariableTypeEnvironment env, JobGenContext context)
-            throws AlgebricksException {
-        IBinaryComparatorFactory[] compFactories = new IBinaryComparatorFactory[size];
-        IBinaryComparatorFactoryProvider bcfProvider = context.getBinaryComparatorFactoryProvider();
-        for (int i = 0; i < size; i++) {
-                Object type = env.getVarType(varLogical.get(start + i));
-                compFactories[i] = bcfProvider.getBinaryComparatorFactory(type, true);
-        }
-        return compFactories;
-    }
-
-    public static INormalizedKeyComputerFactory variablesToAscNormalizedKeyComputerFactory(
-            Collection<LogicalVariable> varLogical, IVariableTypeEnvironment env, JobGenContext context)
-            throws AlgebricksException {
-        INormalizedKeyComputerFactoryProvider nkcfProvider = context.getNormalizedKeyComputerFactoryProvider();
-        if (nkcfProvider == null)
-            return null;
-        for (LogicalVariable v : varLogical) {
-            Object type = env.getVarType(v);
-            return nkcfProvider.getNormalizedKeyComputerFactory(type, true);
-        }
-        return null;
-    }
-
-    public static ITypeTraits[] variablesToTypeTraits(Collection<LogicalVariable> varLogical,
-            IVariableTypeEnvironment env, JobGenContext context) throws AlgebricksException {
-        ITypeTraits[] typeTraits = new ITypeTraits[varLogical.size()];
-        ITypeTraitProvider typeTraitProvider = context.getTypeTraitProvider();
-        int i = 0;
-        for (LogicalVariable v : varLogical) {
-            Object type = env.getVarType(v);
-            typeTraits[i++] = typeTraitProvider.getTypeTrait(type);
-        }
-        return typeTraits;
-    }
-    
-    public static ITypeTraits[] variablesToTypeTraits(
-            List<LogicalVariable> varLogical, int start, int size, IVariableTypeEnvironment env, JobGenContext context)
-            throws AlgebricksException {
-        ITypeTraits[] typeTraits = new ITypeTraits[size];
-        ITypeTraitProvider typeTraitProvider = context.getTypeTraitProvider();
-        for (int i = 0; i < size; i++) {
-                Object type = env.getVarType(varLogical.get(start + i));
-                typeTraits[i] = typeTraitProvider.getTypeTrait(type);
-        }
-        return typeTraits;
-    }
-
-    public static int[] projectAllVariables(IOperatorSchema opSchema) {
-        int[] projectionList = new int[opSchema.getSize()];
-        int k = 0;
-        for (LogicalVariable v : opSchema) {
-            projectionList[k++] = opSchema.findVariable(v);
-        }
-        return projectionList;
-    }
-
-}
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/jobgen/impl/PlanCompiler.java b/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/jobgen/impl/PlanCompiler.java
deleted file mode 100644
index 7f04bb0..0000000
--- a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/jobgen/impl/PlanCompiler.java
+++ /dev/null
@@ -1,109 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.algebricks.core.jobgen.impl;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-
-import org.apache.commons.lang3.mutable.Mutable;
-
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.IHyracksJobBuilder;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalPlan;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.IOperatorSchema;
-import edu.uci.ics.hyracks.api.job.IJobletEventListenerFactory;
-import edu.uci.ics.hyracks.api.job.IOperatorDescriptorRegistry;
-import edu.uci.ics.hyracks.api.job.JobSpecification;
-
-public class PlanCompiler {
-    private JobGenContext context;
-    private Map<Mutable<ILogicalOperator>, List<Mutable<ILogicalOperator>>> operatorVisitedToParents = new HashMap<Mutable<ILogicalOperator>, List<Mutable<ILogicalOperator>>>();
-
-    public PlanCompiler(JobGenContext context) {
-        this.context = context;
-    }
-
-    public JobGenContext getContext() {
-        return context;
-    }
-
-    public JobSpecification compilePlan(ILogicalPlan plan, IOperatorSchema outerPlanSchema, IJobletEventListenerFactory jobEventListenerFactory) throws AlgebricksException {
-        JobSpecification spec = new JobSpecification();
-        if (jobEventListenerFactory != null) {
-            spec.setJobletEventListenerFactory(jobEventListenerFactory);
-        }
-        List<ILogicalOperator> rootOps = new ArrayList<ILogicalOperator>();
-        IHyracksJobBuilder builder = new JobBuilder(spec, context.getClusterLocations());
-        for (Mutable<ILogicalOperator> opRef : plan.getRoots()) {
-            compileOpRef(opRef, spec, builder, outerPlanSchema);
-            rootOps.add(opRef.getValue());
-        }
-        reviseEdges(builder);
-        operatorVisitedToParents.clear();
-        builder.buildSpec(rootOps);
-        spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy());
-        return spec;
-    }
-
-    private void compileOpRef(Mutable<ILogicalOperator> opRef, IOperatorDescriptorRegistry spec, IHyracksJobBuilder builder,
-            IOperatorSchema outerPlanSchema) throws AlgebricksException {
-        ILogicalOperator op = opRef.getValue();
-        int n = op.getInputs().size();
-        IOperatorSchema[] schemas = new IOperatorSchema[n];
-        int i = 0;
-        for (Mutable<ILogicalOperator> opRef2 : op.getInputs()) {
-            List<Mutable<ILogicalOperator>> parents = operatorVisitedToParents.get(opRef2);
-            if (parents == null) {
-                parents = new ArrayList<Mutable<ILogicalOperator>>();
-                operatorVisitedToParents.put(opRef2, parents);
-                parents.add(opRef);
-                compileOpRef(opRef2, spec, builder, outerPlanSchema);
-                schemas[i++] = context.getSchema(opRef2.getValue());
-            } else {
-                if (!parents.contains(opRef))
-                    parents.add(opRef);
-                schemas[i++] = context.getSchema(opRef2.getValue());
-                continue;
-            }
-        }
-
-        IOperatorSchema opSchema = new OperatorSchemaImpl();
-        context.putSchema(op, opSchema);
-        op.getVariablePropagationPolicy().propagateVariables(opSchema, schemas);
-        op.contributeRuntimeOperator(builder, context, opSchema, schemas, outerPlanSchema);
-    }
-
-    private void reviseEdges(IHyracksJobBuilder builder) {
-        /**
-         * revise the edges for the case of replicate operator
-         */
-        for (Entry<Mutable<ILogicalOperator>, List<Mutable<ILogicalOperator>>> entry : operatorVisitedToParents
-                .entrySet()) {
-            Mutable<ILogicalOperator> child = entry.getKey();
-            List<Mutable<ILogicalOperator>> parents = entry.getValue();
-            if (parents.size() > 1) {
-                int i = 0;
-                for (Mutable<ILogicalOperator> parent : parents) {
-                    builder.contributeGraphEdge(child.getValue(), i, parent.getValue(), 0);
-                    i++;
-                }
-            }
-        }
-    }
-}
diff --git a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/rewriter/base/PhysicalOptimizationConfig.java b/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/rewriter/base/PhysicalOptimizationConfig.java
deleted file mode 100644
index 9ce910b..0000000
--- a/hyracks-algebricks/hyracks-algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/rewriter/base/PhysicalOptimizationConfig.java
+++ /dev/null
@@ -1,91 +0,0 @@
-package edu.uci.ics.hyracks.algebricks.core.rewriter.base;
-
-import java.util.Properties;
-
-public class PhysicalOptimizationConfig {
-    private static final int MB = 1048576;
-    private static final String FRAMESIZE = "FRAMESIZE";
-    private static final String MAX_FRAMES_EXTERNAL_SORT = "MAX_FRAMES_EXTERNAL_SORT";
-    private static final String MAX_FRAMES_EXTERNAL_GROUP_BY = "MAX_FRAMES_EXTERNAL_GROUP_BY";
-
-    private static final String DEFAULT_HASH_GROUP_TABLE_SIZE = "DEFAULT_HASH_GROUP_TABLE_SIZE";
-    private static final String DEFAULT_EXTERNAL_GROUP_TABLE_SIZE = "DEFAULT_EXTERNAL_GROUP_TABLE_SIZE";
-    private static final String DEFAULT_IN_MEM_HASH_JOIN_TABLE_SIZE = "DEFAULT_IN_MEM_HASH_JOIN_TABLE_SIZE";
-
-    private Properties properties = new Properties();
-
-    public PhysicalOptimizationConfig() {
-        int frameSize = 32768;
-        setInt(FRAMESIZE, frameSize);
-        setInt(MAX_FRAMES_EXTERNAL_SORT, (int) (((long) 512 * MB) / frameSize));
-        setInt(MAX_FRAMES_EXTERNAL_GROUP_BY, (int) (((long) 256 * MB) / frameSize));
-
-        // use http://www.rsok.com/~jrm/printprimes.html to find prime numbers
-        setInt(DEFAULT_HASH_GROUP_TABLE_SIZE, 10485767);
-        setInt(DEFAULT_EXTERNAL_GROUP_TABLE_SIZE, 10485767);
-        setInt(DEFAULT_IN_MEM_HASH_JOIN_TABLE_SIZE, 10485767);
-    }
-
-    public int getFrameSize() {
-        return getInt(FRAMESIZE, 32768);
-    }
-
-    public void setFrameSize(int frameSize) {
-        setInt(FRAMESIZE, frameSize);
-    }
-
-    public int getMaxFramesExternalSort() {
-        int frameSize = getFrameSize();
-        return getInt(MAX_FRAMES_EXTERNAL_SORT, (int) (((long) 512 * MB) / frameSize));
-    }
-
-    public void setMaxFramesExternalSort(int frameLimit) {
-        setInt(MAX_FRAMES_EXTERNAL_SORT, frameLimit);
-    }
-
-    public int getMaxFramesExternalGroupBy() {
-        int frameSize = getFrameSize();
-        return getInt(MAX_FRAMES_EXTERNAL_GROUP_BY, (int) (((long) 256 * MB) / frameSize));
-    }
-
-    public void setMaxFramesExternalGroupBy(int frameLimit) {
-        setInt(MAX_FRAMES_EXTERNAL_GROUP_BY, frameLimit);
-    }
-
-    public int getHashGroupByTableSize() {
-        return getInt(DEFAULT_HASH_GROUP_TABLE_SIZE, 10485767);
-    }
-
-    public void setHashGroupByTableSize(int tableSize) {
-        setInt(DEFAULT_HASH_GROUP_TABLE_SIZE, tableSize);
-    }
-
-    public int getExternalGroupByTableSize() {
-        return getInt(DEFAULT_EXTERNAL_GROUP_TABLE_SIZE, 10485767);
-    }
-
-    public void setExternalGroupByTableSize(int tableSize) {
-        setInt(DEFAULT_EXTERNAL_GROUP_TABLE_SIZE, tableSize);
-    }
-
-    public int getInMemHashJoinTableSize() {
-        return getInt(DEFAULT_IN_MEM_HASH_JOIN_TABLE_SIZE, 10485767);
-    }
-
-    public void setInMemHashJoinTableSize(int tableSize) {
-        setInt(DEFAULT_IN_MEM_HASH_JOIN_TABLE_SIZE, tableSize);
-    }
-
-    private void setInt(String property, int value) {
-        properties.setProperty(property, Integer.toString(value));
-    }
-
-    private int getInt(String property, int defaultValue) {
-        String value = properties.getProperty(property);
-        if (value == null)
-            return defaultValue;
-        else
-            return Integer.parseInt(value);
-    }
-
-}
diff --git a/hyracks-algebricks/hyracks-algebricks-data/pom.xml b/hyracks-algebricks/hyracks-algebricks-data/pom.xml
deleted file mode 100644
index 965867d..0000000
--- a/hyracks-algebricks/hyracks-algebricks-data/pom.xml
+++ /dev/null
@@ -1,36 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <artifactId>hyracks-algebricks-data</artifactId>
-
-  <parent>
-    <groupId>edu.uci.ics.hyracks</groupId>
-    <artifactId>hyracks-algebricks</artifactId>
-    <version>0.2.2-SNAPSHOT</version>
-  </parent>
-
-  <build>
-    <plugins>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-compiler-plugin</artifactId>
-        <version>2.0.2</version>
-        <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
-        </configuration>
-      </plugin>
-    </plugins>
-  </build>
-  <dependencies>
-  <dependency>
-  	<groupId>edu.uci.ics.hyracks</groupId>
-  	<artifactId>hyracks-algebricks-common</artifactId>
-  	<version>0.2.2-SNAPSHOT</version>
-  </dependency>
-  <dependency>
-  	<groupId>edu.uci.ics.hyracks</groupId>
-  	<artifactId>hyracks-data-std</artifactId>
-  	<version>0.2.2-SNAPSHOT</version>
-  </dependency>
-  </dependencies>
-</project>
diff --git a/hyracks-algebricks/hyracks-algebricks-examples/piglet-example/pom.xml b/hyracks-algebricks/hyracks-algebricks-examples/piglet-example/pom.xml
deleted file mode 100644
index 9e95333..0000000
--- a/hyracks-algebricks/hyracks-algebricks-examples/piglet-example/pom.xml
+++ /dev/null
@@ -1,65 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-	<modelVersion>4.0.0</modelVersion>
-	<artifactId>piglet-example</artifactId>
-
-	<parent>
-		<groupId>edu.uci.ics.hyracks</groupId>
-		<artifactId>hyracks-algebricks-examples</artifactId>
-		<version>0.2.2-SNAPSHOT</version>
-	</parent>
-
-	<build>
-		<plugins>
-			<plugin>
-				<groupId>org.apache.maven.plugins</groupId>
-				<artifactId>maven-compiler-plugin</artifactId>
-				<version>2.0.2</version>
-				<configuration>
-					<source>1.6</source>
-					<target>1.6</target>
-				</configuration>
-			</plugin>
-			<plugin>
-				<groupId>org.codehaus.mojo</groupId>
-				<artifactId>javacc-maven-plugin</artifactId>
-				<version>2.6</version>
-				<executions>
-					<execution>
-						<id>javacc</id>
-						<goals>
-							<goal>javacc</goal>
-						</goals>
-						<configuration>
-							<isStatic>false</isStatic>
-						</configuration>
-					</execution>
-				</executions>
-			</plugin>
-			<plugin>
-				<groupId>org.apache.maven.plugins</groupId>
-				<artifactId>maven-surefire-plugin</artifactId>
-				<version>2.7.2</version>
-				<configuration>
-					<includes>
-						<include>**/*TestSuite.java</include>
-						<include>**/*Test.java</include>
-					</includes>
-				</configuration>
-			</plugin>
-		</plugins>
-	</build>
-	<dependencies>
-		<dependency>
-			<groupId>edu.uci.ics.hyracks</groupId>
-			<artifactId>hyracks-algebricks-compiler</artifactId>
-			<version>0.2.2-SNAPSHOT</version>
-		</dependency>
-		<dependency>
-			<groupId>junit</groupId>
-			<artifactId>junit</artifactId>
-			<version>4.8.2</version>
-			<type>jar</type>
-			<scope>test</scope>
-		</dependency>
-	</dependencies>
-</project>
diff --git a/hyracks-algebricks/hyracks-algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/compiler/PigletCompiler.java b/hyracks-algebricks/hyracks-algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/compiler/PigletCompiler.java
deleted file mode 100644
index 740450e..0000000
--- a/hyracks-algebricks/hyracks-algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/compiler/PigletCompiler.java
+++ /dev/null
@@ -1,362 +0,0 @@
-package edu.uci.ics.hyracks.algebricks.examples.piglet.compiler;
-
-import java.io.Reader;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.LinkedHashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import org.apache.commons.lang3.mutable.Mutable;
-import org.apache.commons.lang3.mutable.MutableObject;
-
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
-import edu.uci.ics.hyracks.algebricks.compiler.api.HeuristicCompilerFactoryBuilder;
-import edu.uci.ics.hyracks.algebricks.compiler.api.ICompiler;
-import edu.uci.ics.hyracks.algebricks.compiler.api.ICompilerFactory;
-import edu.uci.ics.hyracks.algebricks.compiler.rewriter.rulecontrollers.SequentialFixpointRuleController;
-import edu.uci.ics.hyracks.algebricks.compiler.rewriter.rulecontrollers.SequentialOnceRuleController;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalPlan;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IExpressionTypeComputer;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.LogicalExpressionJobGenToExpressionRuntimeProviderAdapter;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.functions.AlgebricksBuiltinFunctions;
-import edu.uci.ics.hyracks.algebricks.core.algebra.functions.IFunctionInfo;
-import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IMetadataProvider;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.EmptyTupleSourceOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.WriteOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.plan.ALogicalPlanImpl;
-import edu.uci.ics.hyracks.algebricks.core.algebra.prettyprint.LogicalOperatorPrettyPrintVisitor;
-import edu.uci.ics.hyracks.algebricks.core.algebra.prettyprint.PlanPrettyPrinter;
-import edu.uci.ics.hyracks.algebricks.core.rewriter.base.AbstractRuleController;
-import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
-import edu.uci.ics.hyracks.algebricks.data.ISerializerDeserializerProvider;
-import edu.uci.ics.hyracks.algebricks.data.ITypeTraitProvider;
-import edu.uci.ics.hyracks.algebricks.examples.piglet.ast.ASTNode;
-import edu.uci.ics.hyracks.algebricks.examples.piglet.ast.AssignmentNode;
-import edu.uci.ics.hyracks.algebricks.examples.piglet.ast.DumpNode;
-import edu.uci.ics.hyracks.algebricks.examples.piglet.ast.ExpressionNode;
-import edu.uci.ics.hyracks.algebricks.examples.piglet.ast.FieldAccessExpressionNode;
-import edu.uci.ics.hyracks.algebricks.examples.piglet.ast.FilterNode;
-import edu.uci.ics.hyracks.algebricks.examples.piglet.ast.FunctionTag;
-import edu.uci.ics.hyracks.algebricks.examples.piglet.ast.LiteralExpressionNode;
-import edu.uci.ics.hyracks.algebricks.examples.piglet.ast.LoadNode;
-import edu.uci.ics.hyracks.algebricks.examples.piglet.ast.RelationNode;
-import edu.uci.ics.hyracks.algebricks.examples.piglet.ast.ScalarFunctionExpressionNode;
-import edu.uci.ics.hyracks.algebricks.examples.piglet.exceptions.PigletException;
-import edu.uci.ics.hyracks.algebricks.examples.piglet.metadata.PigletFileDataSink;
-import edu.uci.ics.hyracks.algebricks.examples.piglet.metadata.PigletFileDataSource;
-import edu.uci.ics.hyracks.algebricks.examples.piglet.metadata.PigletMetadataProvider;
-import edu.uci.ics.hyracks.algebricks.examples.piglet.parser.ParseException;
-import edu.uci.ics.hyracks.algebricks.examples.piglet.parser.PigletParser;
-import edu.uci.ics.hyracks.algebricks.examples.piglet.rewriter.PigletRewriteRuleset;
-import edu.uci.ics.hyracks.algebricks.examples.piglet.runtime.PigletExpressionJobGen;
-import edu.uci.ics.hyracks.algebricks.examples.piglet.types.Schema;
-import edu.uci.ics.hyracks.algebricks.examples.piglet.types.Type;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.job.JobSpecification;
-
-public class PigletCompiler {
-    private static final Logger LOGGER = Logger.getLogger(PigletCompiler.class.getName());
-
-    private static List<Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>> buildDefaultLogicalRewrites() {
-        List<Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>> defaultLogicalRewrites = new ArrayList<Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>>();
-        SequentialFixpointRuleController seqCtrlNoDfs = new SequentialFixpointRuleController(false);
-        SequentialFixpointRuleController seqCtrlFullDfs = new SequentialFixpointRuleController(true);
-        SequentialOnceRuleController seqOnceCtrl = new SequentialOnceRuleController(true);
-        defaultLogicalRewrites.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqOnceCtrl,
-                PigletRewriteRuleset.buildTypeInferenceRuleCollection()));
-        defaultLogicalRewrites.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqCtrlFullDfs,
-                PigletRewriteRuleset.buildNormalizationRuleCollection()));
-        defaultLogicalRewrites.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqCtrlNoDfs,
-                PigletRewriteRuleset.buildCondPushDownRuleCollection()));
-        defaultLogicalRewrites.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqCtrlNoDfs,
-                PigletRewriteRuleset.buildJoinInferenceRuleCollection()));
-        defaultLogicalRewrites.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqCtrlNoDfs,
-                PigletRewriteRuleset.buildOpPushDownRuleCollection()));
-        defaultLogicalRewrites.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqOnceCtrl,
-                PigletRewriteRuleset.buildDataExchangeRuleCollection()));
-        defaultLogicalRewrites.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqCtrlNoDfs,
-                PigletRewriteRuleset.buildConsolidationRuleCollection()));
-        return defaultLogicalRewrites;
-    }
-
-    private static List<Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>> buildDefaultPhysicalRewrites() {
-        List<Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>> defaultPhysicalRewrites = new ArrayList<Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>>();
-        SequentialOnceRuleController seqOnceCtrlAllLevels = new SequentialOnceRuleController(true);
-        SequentialOnceRuleController seqOnceCtrlTopLevel = new SequentialOnceRuleController(false);
-        defaultPhysicalRewrites.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqOnceCtrlAllLevels,
-                PigletRewriteRuleset.buildPhysicalRewritesAllLevelsRuleCollection()));
-        defaultPhysicalRewrites.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqOnceCtrlTopLevel,
-                PigletRewriteRuleset.buildPhysicalRewritesTopLevelRuleCollection()));
-        defaultPhysicalRewrites.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqOnceCtrlAllLevels,
-                PigletRewriteRuleset.prepareForJobGenRuleCollection()));
-        return defaultPhysicalRewrites;
-    }
-
-    private final ICompilerFactory cFactory;
-
-    private final PigletMetadataProvider metadataProvider;
-
-    private int varCounter;
-
-    private ILogicalOperator previousOp;
-
-    public PigletCompiler() {
-        HeuristicCompilerFactoryBuilder builder = new HeuristicCompilerFactoryBuilder();
-        builder.setLogicalRewrites(buildDefaultLogicalRewrites());
-        builder.setPhysicalRewrites(buildDefaultPhysicalRewrites());
-        builder.setSerializerDeserializerProvider(new ISerializerDeserializerProvider() {
-            @SuppressWarnings("unchecked")
-            @Override
-            public ISerializerDeserializer getSerializerDeserializer(Object type) throws AlgebricksException {
-                return null;
-            }
-        });
-        builder.setTypeTraitProvider(new ITypeTraitProvider() {
-			public ITypeTraits getTypeTrait(Object type) {
-				return null;
-			}
-        });
-        builder.setPrinterProvider(PigletPrinterFactoryProvider.INSTANCE);
-        builder.setExpressionRuntimeProvider(new LogicalExpressionJobGenToExpressionRuntimeProviderAdapter(
-                new PigletExpressionJobGen()));
-        builder.setExpressionTypeComputer(new IExpressionTypeComputer() {
-            @Override
-            public Object getType(ILogicalExpression expr, IMetadataProvider<?, ?> metadataProvider,
-                    IVariableTypeEnvironment env) throws AlgebricksException {
-                return null;
-            }
-        });
-        cFactory = builder.create();
-        metadataProvider = new PigletMetadataProvider();
-    }
-
-    public List<ASTNode> parse(Reader in) throws ParseException {
-        PigletParser parser = new PigletParser(in);
-        List<ASTNode> statements = parser.Statements();
-        return statements;
-    }
-
-    public JobSpecification compile(List<ASTNode> ast) throws AlgebricksException, PigletException {
-        ILogicalPlan plan = translate(ast);
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("Translated Plan:");
-            LOGGER.info(getPrettyPrintedPlan(plan));
-        }
-        ICompiler compiler = cFactory.createCompiler(plan, metadataProvider, varCounter);
-        compiler.optimize();
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("Optimized Plan:");
-            LOGGER.info(getPrettyPrintedPlan(plan));
-        }
-        return compiler.createJob(null, null);
-    }
-
-    private ILogicalPlan translate(List<ASTNode> ast) throws PigletException {
-        Map<String, Relation> symMap = new HashMap<String, Relation>();
-        List<Mutable<ILogicalOperator>> roots = new ArrayList<Mutable<ILogicalOperator>>();
-        previousOp = null;
-        for (ASTNode an : ast) {
-            switch (an.getTag()) {
-                case DUMP: {
-                    DumpNode dn = (DumpNode) an;
-                    Relation input = symMap.get(dn.getAlias());
-                    List<Mutable<ILogicalExpression>> expressions = new ArrayList<Mutable<ILogicalExpression>>();
-                    for (LogicalVariable v : input.schema.values()) {
-                        expressions.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(v)));
-                    }
-                    PigletFileDataSink dataSink = new PigletFileDataSink(dn.getFile());
-                    ILogicalOperator op = new WriteOperator(expressions, dataSink);
-                    op.getInputs().add(new MutableObject<ILogicalOperator>(input.op));
-                    roots.add(new MutableObject<ILogicalOperator>(op));
-                }
-                    break;
-
-                case ASSIGNMENT: {
-                    AssignmentNode asn = (AssignmentNode) an;
-                    String alias = asn.getAlias();
-                    RelationNode rn = asn.getRelation();
-                    Relation rel = translate(rn, symMap);
-                    previousOp = rel.op;
-                    rel.alias = alias;
-                    symMap.put(alias, rel);
-                }
-                    break;
-            }
-        }
-        return new ALogicalPlanImpl(roots);
-    }
-
-    private Relation translate(RelationNode rn, Map<String, Relation> symMap) throws PigletException {
-        switch (rn.getTag()) {
-            case LOAD: {
-                LoadNode ln = (LoadNode) rn;
-                String file = ln.getDataFile();
-                Schema schema = ln.getSchema();
-                List<Pair<String, Type>> fieldsSchema = schema.getSchema();
-                List<LogicalVariable> variables = new ArrayList<LogicalVariable>();
-                List<Object> types = new ArrayList<Object>();
-                Relation rel = new Relation();
-                for (Pair<String, Type> p : fieldsSchema) {
-                    LogicalVariable v = newVariable();
-                    rel.schema.put(p.first, v);
-                    variables.add(v);
-                    types.add(p.second);
-                }
-                PigletFileDataSource ds = new PigletFileDataSource(file, types.toArray());
-                rel.op = new DataSourceScanOperator(variables, ds);
-                rel.op.getInputs().add(
-                        new MutableObject<ILogicalOperator>(previousOp == null ? new EmptyTupleSourceOperator()
-                                : previousOp));
-                return rel;
-            }
-
-            case FILTER: {
-                FilterNode fn = (FilterNode) rn;
-                String alias = fn.getAlias();
-                ExpressionNode conditionNode = fn.getExpression();
-                Relation inputRel = findInputRelation(alias, symMap);
-                Pair<Relation, LogicalVariable> tempInput = translateScalarExpression(inputRel, conditionNode);
-                Relation rel = new Relation();
-                rel.op = new SelectOperator(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(
-                        tempInput.second)));
-                rel.op.getInputs().add(new MutableObject<ILogicalOperator>(tempInput.first.op));
-                rel.schema.putAll(tempInput.first.schema);
-                return rel;
-            }
-        }
-        throw new IllegalArgumentException("Unknown node: " + rn.getTag() + " encountered");
-    }
-
-    private Pair<Relation, LogicalVariable> translateScalarExpression(Relation inputRel, ExpressionNode expressionNode)
-            throws PigletException {
-        switch (expressionNode.getTag()) {
-            case FIELD_ACCESS: {
-                FieldAccessExpressionNode faen = (FieldAccessExpressionNode) expressionNode;
-                String fieldName = faen.getFieldName();
-                LogicalVariable lVar = findField(fieldName, inputRel.schema);
-                return new Pair<Relation, LogicalVariable>(inputRel, lVar);
-            }
-
-            case LITERAL: {
-                LiteralExpressionNode len = (LiteralExpressionNode) expressionNode;
-                String image = len.getImage();
-                Type type = len.getType();
-                ConstantExpression ce = new ConstantExpression(new ConstantValue(type, image));
-                Relation rel = new Relation();
-                LogicalVariable var = newVariable();
-                List<LogicalVariable> vars = new ArrayList<LogicalVariable>();
-                vars.add(var);
-
-                List<Mutable<ILogicalExpression>> exprs = new ArrayList<Mutable<ILogicalExpression>>();
-                exprs.add(new MutableObject<ILogicalExpression>(ce));
-
-                rel.op = new AssignOperator(vars, exprs);
-                rel.op.getInputs().add(new MutableObject<ILogicalOperator>(inputRel.op));
-                rel.schema.putAll(inputRel.schema);
-
-                return new Pair<Relation, LogicalVariable>(rel, var);
-            }
-
-            case SCALAR_FUNCTION: {
-                ScalarFunctionExpressionNode sfen = (ScalarFunctionExpressionNode) expressionNode;
-                List<Mutable<ILogicalExpression>> argExprs = new ArrayList<Mutable<ILogicalExpression>>();
-                List<ASTNode> arguments = sfen.getArguments();
-                Relation rel = inputRel;
-                for (ASTNode a : arguments) {
-                    Pair<Relation, LogicalVariable> argPair = translateScalarExpression(rel, (ExpressionNode) a);
-                    rel = argPair.first;
-                    argExprs.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(argPair.second)));
-                }
-                Relation outRel = new Relation();
-                outRel.schema.putAll(rel.schema);
-                LogicalVariable var = newVariable();
-                List<LogicalVariable> vars = new ArrayList<LogicalVariable>();
-                vars.add(var);
-
-                IFunctionInfo fInfo = lookupFunction(sfen.getFunctionTag(), sfen.getFunctionName());
-
-                List<Mutable<ILogicalExpression>> exprs = new ArrayList<Mutable<ILogicalExpression>>();
-                exprs.add(new MutableObject<ILogicalExpression>(new ScalarFunctionCallExpression(fInfo, argExprs)));
-                outRel.op = new AssignOperator(vars, exprs);
-                outRel.op.getInputs().add(new MutableObject<ILogicalOperator>(rel.op));
-                return new Pair<Relation, LogicalVariable>(outRel, var);
-            }
-        }
-        return null;
-    }
-
-    private IFunctionInfo lookupFunction(FunctionTag functionTag, String functionName) throws PigletException {
-        switch (functionTag) {
-            case EQ:
-                return metadataProvider.lookupFunction(AlgebricksBuiltinFunctions.EQ);
-
-            case NEQ:
-                return metadataProvider.lookupFunction(AlgebricksBuiltinFunctions.NEQ);
-
-            case LT:
-                return metadataProvider.lookupFunction(AlgebricksBuiltinFunctions.LT);
-
-            case LTE:
-                return metadataProvider.lookupFunction(AlgebricksBuiltinFunctions.LE);
-
-            case GT:
-                return metadataProvider.lookupFunction(AlgebricksBuiltinFunctions.GT);
-
-            case GTE:
-                return metadataProvider.lookupFunction(AlgebricksBuiltinFunctions.GE);
-        }
-        throw new PigletException("Unsupported function: " + functionTag);
-    }
-
-    private LogicalVariable newVariable() {
-        return new LogicalVariable(varCounter++);
-    }
-
-    private LogicalVariable findField(String fieldName, Map<String, LogicalVariable> schema) throws PigletException {
-        LogicalVariable var = schema.get(fieldName);
-        if (var == null) {
-            throw new PigletException("Unable to find field named: " + fieldName);
-        }
-        return var;
-    }
-
-    private Relation findInputRelation(String alias, Map<String, Relation> symMap) throws PigletException {
-        Relation rel = symMap.get(alias);
-        if (rel == null) {
-            throw new PigletException("Unknown alias " + alias + "referenced");
-        }
-        return rel;
-    }
-
-    private static class Relation {
-        String alias;
-        ILogicalOperator op;
-        final Map<String, LogicalVariable> schema;
-
-        public Relation() {
-            schema = new LinkedHashMap<String, LogicalVariable>();
-        }
-    }
-
-    private String getPrettyPrintedPlan(ILogicalPlan plan) throws AlgebricksException {
-        LogicalOperatorPrettyPrintVisitor v = new LogicalOperatorPrettyPrintVisitor();
-        StringBuilder buffer = new StringBuilder();
-        PlanPrettyPrinter.printPlan(plan, buffer, v, 0);
-        return buffer.toString();
-    }
-}
\ No newline at end of file
diff --git a/hyracks-algebricks/hyracks-algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/metadata/PigletMetadataProvider.java b/hyracks-algebricks/hyracks-algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/metadata/PigletMetadataProvider.java
deleted file mode 100644
index f0e487b..0000000
--- a/hyracks-algebricks/hyracks-algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/metadata/PigletMetadataProvider.java
+++ /dev/null
@@ -1,195 +0,0 @@
-package edu.uci.ics.hyracks.algebricks.examples.piglet.metadata;
-
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
-import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
-import edu.uci.ics.hyracks.algebricks.core.algebra.functions.AlgebricksBuiltinFunctions;
-import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
-import edu.uci.ics.hyracks.algebricks.core.algebra.functions.IFunctionInfo;
-import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IDataSink;
-import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IDataSource;
-import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IDataSourceIndex;
-import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IMetadataProvider;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.IOperatorSchema;
-import edu.uci.ics.hyracks.algebricks.core.jobgen.impl.JobGenContext;
-import edu.uci.ics.hyracks.algebricks.data.IPrinterFactory;
-import edu.uci.ics.hyracks.algebricks.examples.piglet.types.Type;
-import edu.uci.ics.hyracks.algebricks.runtime.base.IPushRuntimeFactory;
-import edu.uci.ics.hyracks.algebricks.runtime.operators.std.SinkWriterRuntimeFactory;
-import edu.uci.ics.hyracks.algebricks.runtime.writers.PrinterBasedWriterFactory;
-import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.job.JobSpecification;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.FloatSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.parsers.FloatParserFactory;
-import edu.uci.ics.hyracks.dataflow.common.data.parsers.IValueParserFactory;
-import edu.uci.ics.hyracks.dataflow.common.data.parsers.IntegerParserFactory;
-import edu.uci.ics.hyracks.dataflow.common.data.parsers.UTF8StringParserFactory;
-import edu.uci.ics.hyracks.dataflow.std.file.ConstantFileSplitProvider;
-import edu.uci.ics.hyracks.dataflow.std.file.DelimitedDataTupleParserFactory;
-import edu.uci.ics.hyracks.dataflow.std.file.FileScanOperatorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
-import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.dataflow.std.file.ITupleParserFactory;
-
-public class PigletMetadataProvider implements IMetadataProvider<String, String> {
-    private static final Map<FunctionIdentifier, PigletFunction> FN_MAP;
-
-    static {
-        Map<FunctionIdentifier, PigletFunction> map = new HashMap<FunctionIdentifier, PigletFunction>();
-
-        map.put(AlgebricksBuiltinFunctions.EQ, new PigletFunction(AlgebricksBuiltinFunctions.EQ));
-
-        FN_MAP = Collections.unmodifiableMap(map);
-    }
-
-    @Override
-    public IDataSource<String> findDataSource(String id) throws AlgebricksException {
-        return null;
-    }
-
-    @SuppressWarnings("unchecked")
-    @Override
-	public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getScannerRuntime(
-			IDataSource<String> dataSource,
-			List<LogicalVariable> scanVariables,
-			List<LogicalVariable> projectVariables, boolean projectPushed,
-			IOperatorSchema opSchema, IVariableTypeEnvironment typeEnv,
-			JobGenContext context, JobSpecification jobSpec)
-			throws AlgebricksException {
-        PigletFileDataSource ds = (PigletFileDataSource) dataSource;
-
-        FileSplit[] fileSplits = ds.getFileSplits();
-        String[] locations = new String[fileSplits.length];
-        for (int i = 0; i < fileSplits.length; ++i) {
-            locations[i] = fileSplits[i].getNodeName();
-        }
-        IFileSplitProvider fsp = new ConstantFileSplitProvider(fileSplits);
-
-        Object[] colTypes = ds.getSchemaTypes();
-        IValueParserFactory[] vpfs = new IValueParserFactory[colTypes.length];
-        ISerializerDeserializer[] serDesers = new ISerializerDeserializer[colTypes.length];
-
-        for (int i = 0; i < colTypes.length; ++i) {
-            Type colType = (Type) colTypes[i];
-            IValueParserFactory vpf;
-            ISerializerDeserializer serDeser;
-            switch (colType.getTag()) {
-                case INTEGER:
-                    vpf = IntegerParserFactory.INSTANCE;
-                    serDeser = IntegerSerializerDeserializer.INSTANCE;
-                    break;
-
-                case CHAR_ARRAY:
-                    vpf = UTF8StringParserFactory.INSTANCE;
-                    serDeser = UTF8StringSerializerDeserializer.INSTANCE;
-                    break;
-
-                case FLOAT:
-                    vpf = FloatParserFactory.INSTANCE;
-                    serDeser = FloatSerializerDeserializer.INSTANCE;
-                    break;
-
-                default:
-                    throw new UnsupportedOperationException();
-            }
-            vpfs[i] = vpf;
-            serDesers[i] = serDeser;
-        }
-
-        ITupleParserFactory tpf = new DelimitedDataTupleParserFactory(vpfs, ',');
-        RecordDescriptor rDesc = new RecordDescriptor(serDesers);
-
-        IOperatorDescriptor scanner = new FileScanOperatorDescriptor(jobSpec, fsp, tpf, rDesc);
-        AlgebricksAbsolutePartitionConstraint constraint = new AlgebricksAbsolutePartitionConstraint(locations);
-        return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(scanner, constraint);
-    }
-
-    @Override
-    public boolean scannerOperatorIsLeaf(IDataSource<String> dataSource) {
-        return true;
-    }
-
-    @Override
-    public Pair<IPushRuntimeFactory, AlgebricksPartitionConstraint> getWriteFileRuntime(IDataSink sink,
-            int[] printColumns, IPrinterFactory[] printerFactories, RecordDescriptor inputDesc)
-            throws AlgebricksException {
-        PigletFileDataSink ds = (PigletFileDataSink) sink;
-        FileSplit[] fileSplits = ds.getFileSplits();
-        String[] locations = new String[fileSplits.length];
-        for (int i = 0; i < fileSplits.length; ++i) {
-            locations[i] = fileSplits[i].getNodeName();
-        }
-        IPushRuntimeFactory prf = new SinkWriterRuntimeFactory(printColumns, printerFactories, fileSplits[0]
-                .getLocalFile().getFile(), PrinterBasedWriterFactory.INSTANCE, inputDesc);
-        AlgebricksAbsolutePartitionConstraint constraint = new AlgebricksAbsolutePartitionConstraint(locations);
-        return new Pair<IPushRuntimeFactory, AlgebricksPartitionConstraint>(prf, constraint);
-    }
-
-    @Override
-    public IDataSourceIndex<String, String> findDataSourceIndex(String indexId, String dataSourceId)
-            throws AlgebricksException {
-        return null;
-    }
-
-    @Override
-    public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getWriteResultRuntime(
-            IDataSource<String> dataSource, IOperatorSchema propagatedSchema, List<LogicalVariable> keys,
-            LogicalVariable payLoadVar, JobGenContext context, JobSpecification jobSpec) throws AlgebricksException {
-        // TODO Auto-generated method stub
-        return null;
-    }
-
-    @Override
-    public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getInsertRuntime(IDataSource<String> dataSource,
-            IOperatorSchema propagatedSchema, IVariableTypeEnvironment typeEnv, List<LogicalVariable> keys,
-            LogicalVariable payLoadVar, RecordDescriptor recordDesc, JobGenContext context, JobSpecification jobSpec) throws AlgebricksException {
-        // TODO Auto-generated method stub
-        return null;
-    }
-
-    @Override
-    public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getDeleteRuntime(IDataSource<String> dataSource,
-            IOperatorSchema propagatedSchema, IVariableTypeEnvironment typeEnv, List<LogicalVariable> keys,
-            LogicalVariable payLoadVar, RecordDescriptor recordDesc, JobGenContext context, JobSpecification jobSpec) throws AlgebricksException {
-        // TODO Auto-generated method stub
-        return null;
-    }
-
-    @Override
-    public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getIndexInsertRuntime(
-            IDataSourceIndex<String, String> dataSource, IOperatorSchema propagatedSchema,
-            IOperatorSchema[] inputSchemas, IVariableTypeEnvironment typeEnv, List<LogicalVariable> primaryKeys,
-            List<LogicalVariable> secondaryKeys, ILogicalExpression filterExpr, RecordDescriptor recordDesc,
-            JobGenContext context, JobSpecification spec) throws AlgebricksException {
-        // TODO Auto-generated method stub
-        return null;
-    }
-
-    @Override
-    public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getIndexDeleteRuntime(
-            IDataSourceIndex<String, String> dataSource, IOperatorSchema propagatedSchema,
-            IOperatorSchema[] inputSchemas, IVariableTypeEnvironment typeEnv, List<LogicalVariable> primaryKeys,
-            List<LogicalVariable> secondaryKeys, ILogicalExpression filterExpr, RecordDescriptor recordDesc,
-            JobGenContext context, JobSpecification spec) throws AlgebricksException {
-        // TODO Auto-generated method stub
-        return null;
-    }
-    
-    @Override
-    public IFunctionInfo lookupFunction(FunctionIdentifier fid) {
-        return FN_MAP.get(fid);
-    }
-}
diff --git a/hyracks-algebricks/hyracks-algebricks-examples/pom.xml b/hyracks-algebricks/hyracks-algebricks-examples/pom.xml
deleted file mode 100644
index 4d42dfc..0000000
--- a/hyracks-algebricks/hyracks-algebricks-examples/pom.xml
+++ /dev/null
@@ -1,15 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <artifactId>hyracks-algebricks-examples</artifactId>
-  <packaging>pom</packaging>
-
-  <parent>
-    <groupId>edu.uci.ics.hyracks</groupId>
-    <artifactId>hyracks-algebricks</artifactId>
-    <version>0.2.2-SNAPSHOT</version>
-  </parent>
-
-  <modules>
-    <module>piglet-example</module>
-  </modules>
-</project>
diff --git a/hyracks-algebricks/hyracks-algebricks-rewriter/pom.xml b/hyracks-algebricks/hyracks-algebricks-rewriter/pom.xml
deleted file mode 100644
index 60c9db6..0000000
--- a/hyracks-algebricks/hyracks-algebricks-rewriter/pom.xml
+++ /dev/null
@@ -1,31 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <artifactId>hyracks-algebricks-rewriter</artifactId>
-
-  <parent>
-    <groupId>edu.uci.ics.hyracks</groupId>
-    <artifactId>hyracks-algebricks</artifactId>
-    <version>0.2.2-SNAPSHOT</version>
-  </parent>
-
-  <build>
-    <plugins>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-compiler-plugin</artifactId>
-        <version>2.0.2</version>
-        <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
-        </configuration>
-      </plugin>
-    </plugins>
-  </build>
-  <dependencies>
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks</groupId>
-  		<artifactId>hyracks-algebricks-core</artifactId>
-  		<version>0.2.2-SNAPSHOT</version>
-  	</dependency>
-  </dependencies>
-</project>
diff --git a/hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/ExtractCommonExpressionsRule.java b/hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/ExtractCommonExpressionsRule.java
deleted file mode 100644
index cbe2b4a..0000000
--- a/hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/ExtractCommonExpressionsRule.java
+++ /dev/null
@@ -1,421 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.algebricks.rewriter.rules;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
-import org.apache.commons.lang3.mutable.Mutable;
-import org.apache.commons.lang3.mutable.MutableObject;
-
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractLogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.functions.AlgebricksBuiltinFunctions;
-import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractBinaryJoinOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;
-import edu.uci.ics.hyracks.algebricks.core.algebra.visitors.ILogicalExpressionReferenceTransform;
-import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
-
-/**
- * Factors out common sub-expressions by assigning them to a variables, and replacing the common sub-expressions with references to those variables.
- *
- * Preconditions/Assumptions:
- * Assumes no projects are in the plan. This rule ignores variable reference expressions and constants (other rules deal with those separately).
- * 
- * Postconditions/Examples:
- * Plan with extracted sub-expressions. Generates one assign operator per extracted expression.
- * 
- * Example 1 - Simple Arithmetic Example (simplified)
- * 
- * Before plan:
- * assign [$$1] <- [5 + 6 - 10]
- *   assign [$$0] <- [5 + 6 + 30]
- * 
- * After plan:
- * assign [$$1] <- [$$5 - 10]
- *   assign [$$0] <- [$$5 + 30]
- *     assign [$$5] <- [5 + 6]
- * 
- * Example 2 - Cleaning up 'Distinct By' (simplified)
- * 
- * Before plan: (notice how $$0 is not live after the distinct)
- * assign [$$3] <- [field-access($$0, 1)]
- *   distinct ([%0->$$5])
- *     assign [$$5] <- [field-access($$0, 1)]
- *       unnest $$0 <- [scan-dataset]
- * 
- * After plan: (notice how the issue of $$0 is fixed)
- * assign [$$3] <- [$$5]
- *   distinct ([$$5])
- *     assign [$$5] <- [field-access($$0, 1)]
- *       unnest $$0 <- [scan-dataset]
- * 
- * Example 3 - Pulling Common Expressions Above Joins (simplified)
- * 
- * Before plan:
- * assign [$$9] <- funcZ(funcY($$8))
- *   join (funcX(funcY($$8)))
- * 
- * After plan:
- * assign [$$9] <- funcZ($$10))
- *   select (funcX($$10))
- *     assign [$$10] <- [funcY($$8)]
- *       join (TRUE)
- */
-public class ExtractCommonExpressionsRule implements IAlgebraicRewriteRule {
-
-    private final CommonExpressionSubstitutionVisitor substVisitor = new CommonExpressionSubstitutionVisitor();
-    private final Map<ILogicalExpression, ExprEquivalenceClass> exprEqClassMap = new HashMap<ILogicalExpression, ExprEquivalenceClass>();
-    
-    // Set of operators for which common subexpression elimination should not be performed.
-    private static final Set<LogicalOperatorTag> ignoreOps = new HashSet<LogicalOperatorTag>();
-    static {
-        ignoreOps.add(LogicalOperatorTag.UNNEST);
-        ignoreOps.add(LogicalOperatorTag.UNNEST_MAP);
-        ignoreOps.add(LogicalOperatorTag.ORDER);
-        ignoreOps.add(LogicalOperatorTag.PROJECT);
-        ignoreOps.add(LogicalOperatorTag.AGGREGATE);
-        ignoreOps.add(LogicalOperatorTag.RUNNINGAGGREGATE);
-    }
-    
-    @Override
-    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
-        return false;
-    }
-
-    @Override
-    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
-        exprEqClassMap.clear();
-        substVisitor.setContext(context);
-        boolean modified = removeCommonExpressions(opRef, context);
-        if (modified) {
-            context.computeAndSetTypeEnvironmentForOperator(opRef.getValue());
-        }
-        return modified;
-    }
-
-    private void updateEquivalenceClassMap(LogicalVariable lhs, Mutable<ILogicalExpression> rhsExprRef, ILogicalOperator op) {
-        ExprEquivalenceClass exprEqClass = exprEqClassMap.get(rhsExprRef.getValue());
-        if (exprEqClass == null) {
-            exprEqClass = new ExprEquivalenceClass(op, rhsExprRef);
-            exprEqClassMap.put(rhsExprRef.getValue(), exprEqClass);
-        }
-        exprEqClass.setVariable(lhs);
-    }
-
-    private boolean removeCommonExpressions(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
-            throws AlgebricksException {
-        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
-        if (context.checkIfInDontApplySet(this, opRef.getValue())) {
-            return false;
-        }
-        
-        boolean modified = false;
-        // Recurse into children.
-        for (Mutable<ILogicalOperator> inputOpRef : op.getInputs()) {
-            if (removeCommonExpressions(inputOpRef, context)) {
-                modified = true;
-            }
-        }
-        
-        // TODO: Deal with replicate properly. Currently, we just clear the expr equivalence map, since we want to avoid incorrect expression replacement
-        // (the resulting new variables should be assigned live below a replicate).
-        if (op.getOperatorTag() == LogicalOperatorTag.REPLICATE) {
-            exprEqClassMap.clear();
-            return modified;
-        }
-        // Exclude these operators.
-        if (ignoreOps.contains(op.getOperatorTag())) {
-            return modified;
-        }
-        
-        // Perform common subexpression elimination.
-        substVisitor.setOperator(op);
-        if (op.acceptExpressionTransform(substVisitor)) {
-            modified = true;
-        }
-        
-        // Update equivalence class map.
-        if (op.getOperatorTag() == LogicalOperatorTag.ASSIGN) {
-            AssignOperator assignOp = (AssignOperator) op;
-            int numVars = assignOp.getVariables().size();
-            for (int i = 0; i < numVars; i++) {
-                Mutable<ILogicalExpression> exprRef = assignOp.getExpressions().get(i);
-                ILogicalExpression expr = exprRef.getValue();
-                if (expr.getExpressionTag() == LogicalExpressionTag.VARIABLE
-                        || expr.getExpressionTag() == LogicalExpressionTag.CONSTANT) {
-                    continue;
-                }
-                // Update equivalence class map.
-                LogicalVariable lhs = assignOp.getVariables().get(i);
-                updateEquivalenceClassMap(lhs, exprRef, op);
-            }
-        }
-
-        // TODO: For now do not perform replacement in nested plans
-        // due to the complication of figuring out whether the firstOp in an equivalence class is within a subplan, 
-        // and the resulting variable will not be visible to the outside.
-        // Since subplans should be eliminated in most cases, this behavior is acceptable for now.
-        /*
-        if (op.hasNestedPlans()) {
-            AbstractOperatorWithNestedPlans opWithNestedPlan = (AbstractOperatorWithNestedPlans) op;
-            for (ILogicalPlan nestedPlan : opWithNestedPlan.getNestedPlans()) {
-                for (Mutable<ILogicalOperator> rootRef : nestedPlan.getRoots()) {
-                    if (removeCommonExpressions(rootRef, context)) {
-                        modified = true;
-                    }
-                }
-            }
-        }
-        */
-
-        if (modified) {
-            context.computeAndSetTypeEnvironmentForOperator(op);
-            context.addToDontApplySet(this, op);
-        }
-        return modified;
-    }
-
-    private class CommonExpressionSubstitutionVisitor implements ILogicalExpressionReferenceTransform {
-                
-        private final Set<LogicalVariable> liveVars = new HashSet<LogicalVariable>();
-        private final List<LogicalVariable> usedVars = new ArrayList<LogicalVariable>();
-        private IOptimizationContext context;
-        private ILogicalOperator op;        
-        
-        public void setContext(IOptimizationContext context) {
-            this.context = context;
-        }
-        
-        public void setOperator(ILogicalOperator op) throws AlgebricksException {
-            this.op = op;
-            liveVars.clear();
-            usedVars.clear();
-        }
-        
-        @Override
-        public boolean transform(Mutable<ILogicalExpression> exprRef) throws AlgebricksException {
-            if (liveVars.isEmpty() && usedVars.isEmpty()) {
-                VariableUtilities.getLiveVariables(op, liveVars);
-                VariableUtilities.getUsedVariables(op, usedVars);
-            }
-            
-            AbstractLogicalExpression expr = (AbstractLogicalExpression) exprRef.getValue();
-            boolean modified = false;
-            ExprEquivalenceClass exprEqClass = exprEqClassMap.get(expr);
-            if (exprEqClass != null) {
-                // Replace common subexpression with existing variable. 
-                if (exprEqClass.variableIsSet()) {
-                    // Check if the replacing variable is live at this op.
-                    // However, if the op is already using variables that are not live, then a replacement may enable fixing the plan.
-                    // This behavior is necessary to, e.g., properly deal with distinct by.
-                    // Also just replace the expr if we are replacing common exprs from within the same operator.
-                    if (liveVars.contains(exprEqClass.getVariable()) || !liveVars.containsAll(usedVars)
-                            || op == exprEqClass.getFirstOperator()) {
-                        exprRef.setValue(new VariableReferenceExpression(exprEqClass.getVariable()));
-                        // Do not descend into children since this expr has been completely replaced.
-                        return true;
-                    }
-                } else {
-                    if (assignCommonExpression(exprEqClass, expr)) {
-                        exprRef.setValue(new VariableReferenceExpression(exprEqClass.getVariable()));
-                        // Do not descend into children since this expr has been completely replaced.
-                        return true;
-                    }
-                }
-            } else {
-                if (expr.getExpressionTag() != LogicalExpressionTag.VARIABLE
-                        && expr.getExpressionTag() != LogicalExpressionTag.CONSTANT) {
-                    exprEqClass = new ExprEquivalenceClass(op, exprRef);
-                    exprEqClassMap.put(expr, exprEqClass);
-                }
-            }
-            
-            // Descend into function arguments.
-            if (expr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
-                AbstractFunctionCallExpression funcExpr = (AbstractFunctionCallExpression) expr;
-                for (Mutable<ILogicalExpression> arg : funcExpr.getArguments()) {
-                    if (transform(arg)) {
-                        modified = true;
-                    }
-                }
-            }
-            return modified;
-        }
-        
-        private boolean assignCommonExpression(ExprEquivalenceClass exprEqClass, ILogicalExpression expr) throws AlgebricksException {
-            AbstractLogicalOperator firstOp = (AbstractLogicalOperator) exprEqClass.getFirstOperator();
-            Mutable<ILogicalExpression> firstExprRef = exprEqClass.getFirstExpression();
-            if (firstOp.getOperatorTag() == LogicalOperatorTag.INNERJOIN || firstOp.getOperatorTag() == LogicalOperatorTag.LEFTOUTERJOIN) {
-                // Do not extract common expressions from within the same join operator.
-                if (firstOp == op) {
-                    return false;
-                }
-                AbstractBinaryJoinOperator joinOp = (AbstractBinaryJoinOperator) firstOp;
-                Mutable<ILogicalExpression> joinCond = joinOp.getCondition();                
-                ILogicalExpression enclosingExpr = getEnclosingExpression(joinCond, firstExprRef.getValue());
-                if (enclosingExpr == null) {
-                    // No viable enclosing expression that we can pull out from the join.
-                    return false;
-                }
-                // Place a Select operator beneath op that contains the enclosing expression.
-                SelectOperator selectOp = new SelectOperator(new MutableObject<ILogicalExpression>(enclosingExpr));
-                selectOp.getInputs().add(new MutableObject<ILogicalOperator>(op.getInputs().get(0).getValue()));
-                op.getInputs().get(0).setValue(selectOp);
-                // Set firstOp to be the select below op, since we want to assign the common subexpr there.
-                firstOp = (AbstractLogicalOperator) selectOp;
-            } else if (firstOp.getInputs().size() > 1) { 
-                // Bail for any non-join operator with multiple inputs.
-                return false;
-            }                        
-            LogicalVariable newVar = context.newVar();
-            AssignOperator newAssign = new AssignOperator(newVar, new MutableObject<ILogicalExpression>(firstExprRef.getValue().cloneExpression()));            
-            // Place assign below firstOp.
-            newAssign.getInputs().add(new MutableObject<ILogicalOperator>(firstOp.getInputs().get(0).getValue()));
-            newAssign.setExecutionMode(firstOp.getExecutionMode());
-            firstOp.getInputs().get(0).setValue(newAssign);
-            // Replace original expr with variable reference, and set var in expression equivalence class.
-            firstExprRef.setValue(new VariableReferenceExpression(newVar));
-            exprEqClass.setVariable(newVar);
-            context.computeAndSetTypeEnvironmentForOperator(newAssign);
-            context.computeAndSetTypeEnvironmentForOperator(firstOp);
-            return true;
-        }
-
-        private ILogicalExpression getEnclosingExpression(Mutable<ILogicalExpression> conditionExprRef, ILogicalExpression commonSubExpr) {
-            ILogicalExpression conditionExpr = conditionExprRef.getValue();
-            if (conditionExpr.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
-                return null;
-            }
-            if (isEqJoinCondition(commonSubExpr)) {
-                // Do not eliminate the common expression if we could use it for an equi-join.
-                return null;
-            }
-            AbstractFunctionCallExpression conditionFuncExpr = (AbstractFunctionCallExpression) conditionExpr;
-            // Boolean expression that encloses the common subexpression.
-            ILogicalExpression enclosingBoolExpr = null;
-            // We are not dealing with arbitrarily nested and/or expressions here.
-            FunctionIdentifier funcIdent = conditionFuncExpr.getFunctionIdentifier();
-            if (funcIdent.equals(AlgebricksBuiltinFunctions.AND) || funcIdent.equals(AlgebricksBuiltinFunctions.OR)) {
-                Iterator<Mutable<ILogicalExpression>> argIter = conditionFuncExpr.getArguments().iterator();
-                while (argIter.hasNext()) {
-                    Mutable<ILogicalExpression> argRef = argIter.next();
-                    if (containsExpr(argRef.getValue(), commonSubExpr)) {
-                        enclosingBoolExpr = argRef.getValue();
-                        // Remove the enclosing expression from the argument list.
-                        // We are going to pull it out into a new select operator.
-                        argIter.remove();
-                        break;
-                    }
-                }
-                // If and/or only has a single argument left, pull it out and remove the and/or function.
-                if (conditionFuncExpr.getArguments().size() == 1) {
-                    conditionExprRef.setValue(conditionFuncExpr.getArguments().get(0).getValue());
-                }
-            } else {
-                if (!containsExpr(conditionExprRef.getValue(), commonSubExpr)) {
-                    return null;
-                }
-                enclosingBoolExpr = conditionFuncExpr;
-                // Replace the enclosing expression with TRUE.
-                conditionExprRef.setValue(ConstantExpression.TRUE);
-            }
-            return enclosingBoolExpr;
-        }
-    }
-    
-    private boolean containsExpr(ILogicalExpression expr, ILogicalExpression searchExpr) {
-        if (expr == searchExpr) {
-            return true;
-        }
-        if (expr.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
-            return false;
-        }
-        AbstractFunctionCallExpression funcExpr = (AbstractFunctionCallExpression) expr;
-        for (Mutable<ILogicalExpression> argRef : funcExpr.getArguments()) {
-            if (containsExpr(argRef.getValue(), searchExpr)) {
-                return true;
-            }
-        }
-        return false;
-    }
-    
-    private boolean isEqJoinCondition(ILogicalExpression expr) {
-        AbstractFunctionCallExpression funcExpr = (AbstractFunctionCallExpression) expr;
-        if (funcExpr.getFunctionIdentifier().equals(AlgebricksBuiltinFunctions.EQ)) {
-            ILogicalExpression arg1 = funcExpr.getArguments().get(0).getValue();
-            ILogicalExpression arg2 = funcExpr.getArguments().get(1).getValue();
-            if (arg1.getExpressionTag() == LogicalExpressionTag.VARIABLE
-                    && arg2.getExpressionTag() == LogicalExpressionTag.VARIABLE) {
-                return true;
-            }
-        }
-        return false;
-    }
-    
-    private final class ExprEquivalenceClass {
-        // First operator in which expression is used.
-        private final ILogicalOperator firstOp;
-        
-        // Reference to expression in first op.
-        private final Mutable<ILogicalExpression> firstExprRef;
-        
-        // Variable that this expression has been assigned to.
-        private LogicalVariable var;
-        
-        public ExprEquivalenceClass(ILogicalOperator firstOp, Mutable<ILogicalExpression> firstExprRef) {
-            this.firstOp = firstOp;
-            this.firstExprRef = firstExprRef;
-        }
-        
-        public ILogicalOperator getFirstOperator() {
-            return firstOp;
-        }
-        
-        public Mutable<ILogicalExpression> getFirstExpression() {
-            return firstExprRef;
-        }
-        
-        public void setVariable(LogicalVariable var) {
-            this.var = var;
-        }
-        
-        public LogicalVariable getVariable() {
-            return var;
-        }
-        
-        public boolean variableIsSet() {
-            return var != null;
-        }
-    }
-}
diff --git a/hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/IntroduceGroupByCombinerRule.java b/hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/IntroduceGroupByCombinerRule.java
deleted file mode 100644
index 5c5fdb1..0000000
--- a/hyracks-algebricks/hyracks-algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/IntroduceGroupByCombinerRule.java
+++ /dev/null
@@ -1,218 +0,0 @@
-package edu.uci.ics.hyracks.algebricks.rewriter.rules;
-
-import java.util.ArrayList;
-import java.util.HashSet;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
-import org.apache.commons.lang3.mutable.Mutable;
-import org.apache.commons.lang3.mutable.MutableObject;
-
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalPlan;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.OperatorAnnotations;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator.ExecutionMode;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractOperatorWithNestedPlans;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AggregateOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.GroupByOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;
-import edu.uci.ics.hyracks.algebricks.core.algebra.plan.ALogicalPlanImpl;
-import edu.uci.ics.hyracks.algebricks.core.algebra.util.OperatorPropertiesUtil;
-
-public class IntroduceGroupByCombinerRule extends AbstractIntroduceCombinerRule {
-
-    @Override
-    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
-            throws AlgebricksException {
-        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
-        if (context.checkIfInDontApplySet(this, op)) {
-            return false;
-        }
-        context.addToDontApplySet(this, op);
-        if (op.getOperatorTag() != LogicalOperatorTag.GROUP) {
-            return false;
-        }
-        GroupByOperator gbyOp = (GroupByOperator) op;
-        if (gbyOp.getExecutionMode() != ExecutionMode.PARTITIONED) {
-            return false;
-        }
-
-        BookkeepingInfo bi = new BookkeepingInfo();
-        GroupByOperator newGbyOp = opToPush(gbyOp, bi, context);
-        if (newGbyOp == null) {
-            return false;
-        }
-
-        replaceOriginalAggFuncs(bi.toReplaceMap);
-
-        for (Pair<LogicalVariable, Mutable<ILogicalExpression>> p : gbyOp.getDecorList()) {
-            LogicalVariable newDecorVar = context.newVar();
-            newGbyOp.addDecorExpression(newDecorVar, p.second.getValue());
-            p.second.setValue(new VariableReferenceExpression(newDecorVar));
-        }
-        newGbyOp.setExecutionMode(ExecutionMode.LOCAL);
-        Object v = gbyOp.getAnnotations().get(OperatorAnnotations.USE_HASH_GROUP_BY);
-        newGbyOp.getAnnotations().put(OperatorAnnotations.USE_HASH_GROUP_BY, v);
-
-        Object v2 = gbyOp.getAnnotations().get(OperatorAnnotations.USE_EXTERNAL_GROUP_BY);
-        newGbyOp.getAnnotations().put(OperatorAnnotations.USE_EXTERNAL_GROUP_BY, v2);
-
-        List<LogicalVariable> propagatedVars = new LinkedList<LogicalVariable>();
-        VariableUtilities.getProducedVariables(newGbyOp, propagatedVars);
-
-        Set<LogicalVariable> freeVars = new HashSet<LogicalVariable>();
-        OperatorPropertiesUtil.getFreeVariablesInSubplans(gbyOp, freeVars);
-
-        for (LogicalVariable var : freeVars) {
-            if (!propagatedVars.contains(var)) {
-                LogicalVariable newDecorVar = context.newVar();
-                newGbyOp.addDecorExpression(newDecorVar, new VariableReferenceExpression(var));
-                VariableUtilities.substituteVariables(gbyOp.getNestedPlans().get(0).getRoots().get(0).getValue(), var,
-                        newDecorVar, context);
-            }
-        }
-
-        Mutable<ILogicalOperator> opRef3 = gbyOp.getInputs().get(0);
-        opRef3.setValue(newGbyOp);
-        typeGby(newGbyOp, context);
-        typeGby(gbyOp, context);
-        return true;
-    }
-
-    private void typeGby(AbstractOperatorWithNestedPlans op, IOptimizationContext context) throws AlgebricksException {
-        for (ILogicalPlan p : op.getNestedPlans()) {
-            OperatorPropertiesUtil.typePlan(p, context);
-        }
-        context.computeAndSetTypeEnvironmentForOperator(op);
-    }
-
-    private GroupByOperator opToPush(GroupByOperator gbyOp, BookkeepingInfo bi, IOptimizationContext context)
-            throws AlgebricksException {
-        // Hook up input to new group-by.
-        Mutable<ILogicalOperator> opRef3 = gbyOp.getInputs().get(0);
-        ILogicalOperator op3 = opRef3.getValue();
-        GroupByOperator newGbyOp = new GroupByOperator();
-        newGbyOp.getInputs().add(new MutableObject<ILogicalOperator>(op3));
-        // Copy annotations.        
-        Map<String, Object> annotations = newGbyOp.getAnnotations();
-        annotations.putAll(gbyOp.getAnnotations());
-
-        List<LogicalVariable> gbyVars = gbyOp.getGbyVarList();
-        for (ILogicalPlan p : gbyOp.getNestedPlans()) {
-            Pair<Boolean, ILogicalPlan> bip = tryToPushSubplan(p, gbyOp, newGbyOp, bi, gbyVars, context);
-            if (!bip.first) {
-                // For now, if we cannot push everything, give up.
-                return null;
-            }
-            ILogicalPlan pushedSubplan = bip.second;
-            if (pushedSubplan != null) {
-                newGbyOp.getNestedPlans().add(pushedSubplan);
-            }
-        }
-
-        ArrayList<LogicalVariable> newOpGbyList = new ArrayList<LogicalVariable>();
-        ArrayList<LogicalVariable> replGbyList = new ArrayList<LogicalVariable>();
-        // Find maximal sequence of variable.
-        for (Map.Entry<GroupByOperator, List<LogicalVariable>> e : bi.modifyGbyMap.entrySet()) {
-            List<LogicalVariable> varList = e.getValue();
-            boolean see1 = true;
-            int sz1 = newOpGbyList.size();
-            int i = 0;
-            for (LogicalVariable v : varList) {
-                if (see1) {
-                    if (i < sz1) {
-                        LogicalVariable v2 = newOpGbyList.get(i);
-                        if (v != v2) {
-                            // cannot linearize
-                            return null;
-                        }
-                    } else {
-                        see1 = false;
-                        newOpGbyList.add(v);
-                        replGbyList.add(context.newVar());
-                    }
-                    i++;
-                } else {
-                    newOpGbyList.add(v);
-                    replGbyList.add(context.newVar());
-                }
-            }
-        }
-        // set the vars in the new op
-        int n = newOpGbyList.size();
-        for (int i = 0; i < n; i++) {
-            newGbyOp.addGbyExpression(replGbyList.get(i), new VariableReferenceExpression(newOpGbyList.get(i)));
-            VariableUtilities.substituteVariables(gbyOp, newOpGbyList.get(i), replGbyList.get(i), false, context);
-        }
-        return newGbyOp;
-    }
-
-    private Pair<Boolean, ILogicalPlan> tryToPushSubplan(ILogicalPlan nestedPlan, GroupByOperator oldGbyOp,
-            GroupByOperator newGbyOp, BookkeepingInfo bi, List<LogicalVariable> gbyVars, IOptimizationContext context)
-            throws AlgebricksException {
-        List<Mutable<ILogicalOperator>> pushedRoots = new ArrayList<Mutable<ILogicalOperator>>();
-        for (Mutable<ILogicalOperator> r : nestedPlan.getRoots()) {
-            if (!tryToPushRoot(r, oldGbyOp, newGbyOp, bi, gbyVars, context, pushedRoots)) {
-                // For now, if we cannot push everything, give up.
-                return new Pair<Boolean, ILogicalPlan>(false, null);
-            }
-        }
-        if (pushedRoots.isEmpty()) {
-            return new Pair<Boolean, ILogicalPlan>(true, null);
-        } else {
-            return new Pair<Boolean, ILogicalPlan>(true, new ALogicalPlanImpl(pushedRoots));
-        }
-    }
-
-    private boolean tryToPushRoot(Mutable<ILogicalOperator> root, GroupByOperator oldGbyOp, GroupByOperator newGbyOp,
-            BookkeepingInfo bi, List<LogicalVariable> gbyVars, IOptimizationContext context,
-            List<Mutable<ILogicalOperator>> toPushAccumulate) throws AlgebricksException {
-        AbstractLogicalOperator op1 = (AbstractLogicalOperator) root.getValue();
-        if (op1.getOperatorTag() != LogicalOperatorTag.AGGREGATE) {
-            return false;
-        }
-        AbstractLogicalOperator op2 = (AbstractLogicalOperator) op1.getInputs().get(0).getValue();
-        if (op2.getOperatorTag() == LogicalOperatorTag.NESTEDTUPLESOURCE) {
-            AggregateOperator initAgg = (AggregateOperator) op1;
-            Pair<Boolean, Mutable<ILogicalOperator>> pOpRef = tryToPushAgg(initAgg, newGbyOp, bi.toReplaceMap, context);
-            if (!pOpRef.first) {
-                return false;
-            }
-            Mutable<ILogicalOperator> opRef = pOpRef.second;
-            if (opRef != null) {
-                toPushAccumulate.add(opRef);
-            }
-            bi.modifyGbyMap.put(oldGbyOp, gbyVars);
-            return true;
-        } else {
-            while (op2.getOperatorTag() != LogicalOperatorTag.GROUP && op2.getInputs().size() == 1) {
-                op2 = (AbstractLogicalOperator) op2.getInputs().get(0).getValue();
-            }
-            if (op2.getOperatorTag() != LogicalOperatorTag.GROUP) {
-                return false;
-            }
-            GroupByOperator nestedGby = (GroupByOperator) op2;
-            List<LogicalVariable> gbyVars2 = nestedGby.getGbyVarList();
-            List<LogicalVariable> concatGbyVars = new ArrayList<LogicalVariable>(gbyVars);
-            concatGbyVars.addAll(gbyVars2);
-            for (ILogicalPlan p : nestedGby.getNestedPlans()) {
-                for (Mutable<ILogicalOperator> r2 : p.getRoots()) {
-                    if (!tryToPushRoot(r2, nestedGby, newGbyOp, bi, concatGbyVars, context, toPushAccumulate)) {
-                        return false;
-                    }
-                }
-            }
-            return true;
-        }
-    }
-}
diff --git a/hyracks-algebricks/hyracks-algebricks-runtime/pom.xml b/hyracks-algebricks/hyracks-algebricks-runtime/pom.xml
deleted file mode 100644
index 7617f98..0000000
--- a/hyracks-algebricks/hyracks-algebricks-runtime/pom.xml
+++ /dev/null
@@ -1,51 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <artifactId>hyracks-algebricks-runtime</artifactId>
-
-  <parent>
-    <groupId>edu.uci.ics.hyracks</groupId>
-    <artifactId>hyracks-algebricks</artifactId>
-    <version>0.2.2-SNAPSHOT</version>
-  </parent>
-
-  <build>
-    <plugins>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-compiler-plugin</artifactId>
-        <version>2.0.2</version>
-        <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
-        </configuration>
-      </plugin>
-    </plugins>
-  </build>
-  <dependencies>
-  <dependency>
-  	<groupId>edu.uci.ics.hyracks</groupId>
-  	<artifactId>hyracks-storage-am-btree</artifactId>
-  	<version>0.2.2-SNAPSHOT</version>
-  </dependency>
-  <dependency>
-  	<groupId>edu.uci.ics.hyracks</groupId>
-  	<artifactId>hyracks-storage-am-rtree</artifactId>
-  	<version>0.2.2-SNAPSHOT</version>
-  </dependency>
-  <dependency>
-  	<groupId>edu.uci.ics.hyracks</groupId>
-  	<artifactId>hyracks-dataflow-std</artifactId>
-  	<version>0.2.2-SNAPSHOT</version>
-  </dependency>
-  <dependency>
-  	<groupId>edu.uci.ics.hyracks</groupId>
-  	<artifactId>hyracks-algebricks-common</artifactId>
-  	<version>0.2.2-SNAPSHOT</version>
-  </dependency>
-  <dependency>
-  	<groupId>edu.uci.ics.hyracks</groupId>
-  	<artifactId>hyracks-algebricks-data</artifactId>
-  	<version>0.2.2-SNAPSHOT</version>
-  </dependency>
-  </dependencies>
-</project>
diff --git a/hyracks-algebricks/hyracks-algebricks-tests/pom.xml b/hyracks-algebricks/hyracks-algebricks-tests/pom.xml
deleted file mode 100644
index 257a0fc..0000000
--- a/hyracks-algebricks/hyracks-algebricks-tests/pom.xml
+++ /dev/null
@@ -1,109 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <artifactId>hyracks-algebricks-tests</artifactId>
-
-  <parent>
-    <groupId>edu.uci.ics.hyracks</groupId>
-    <artifactId>hyracks-algebricks</artifactId>
-    <version>0.2.2-SNAPSHOT</version>
-  </parent>
-
-  <build>
-    <plugins>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-compiler-plugin</artifactId>
-        <version>2.0.2</version>
-        <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
-        </configuration>
-      </plugin>
-      <plugin>
-        <artifactId>maven-antrun-plugin</artifactId>
-        <executions>
-          <execution>
-            <phase>generate-sources</phase>
-            <configuration>
-              <tasks>
-                <ant antfile="build-script.xml" target="build">
-                  <property name="main.class" value="edu.uci.ics.hyracks.algebricks.tests.script.IdentityStreamingScript" />
-                  <property name="script.classpath" refid="maven.compile.classpath" />
-                  <property name="jvm.params" value="" />
-                  <property name="program.params" value="" />
-                  <property name="source" value="${basedir}/src/main/scripts/run" />
-                  <property name="target.dir" value="${basedir}/target/testscripts" />
-                  <property name="target" value="idscript" />
-                </ant>
-              </tasks>
-            </configuration>
-            <goals>
-              <goal>run</goal>
-            </goals>
-          </execution>
-        </executions>
-      </plugin>
-    </plugins>
-    <pluginManagement>
-    	<plugins>
-    		<!--This plugin's configuration is used to store Eclipse m2e settings only. It has no influence on the Maven build itself.-->
-    		<plugin>
-    			<groupId>org.eclipse.m2e</groupId>
-    			<artifactId>lifecycle-mapping</artifactId>
-    			<version>1.0.0</version>
-    			<configuration>
-    				<lifecycleMappingMetadata>
-    					<pluginExecutions>
-    						<pluginExecution>
-    							<pluginExecutionFilter>
-    								<groupId>
-    									org.apache.maven.plugins
-    								</groupId>
-    								<artifactId>
-    									maven-antrun-plugin
-    								</artifactId>
-    								<versionRange>[1.3,)</versionRange>
-    								<goals>
-    									<goal>run</goal>
-    								</goals>
-    							</pluginExecutionFilter>
-    							<action>
-    								<ignore />
-    							</action>
-    						</pluginExecution>
-    					</pluginExecutions>
-    				</lifecycleMappingMetadata>
-    			</configuration>
-    		</plugin>
-    	</plugins>
-    </pluginManagement>
-  </build>
-  <dependencies>
-  <dependency>
-  	<groupId>edu.uci.ics.hyracks</groupId>
-  	<artifactId>hyracks-algebricks-compiler</artifactId>
-  	<version>0.2.2-SNAPSHOT</version>
-  </dependency>
-  <dependency>
-  	<groupId>junit</groupId>
-  	<artifactId>junit</artifactId>
-  	<version>4.8.1</version>
-  	<scope>test</scope>
-  </dependency>
-  <dependency>
-  	<groupId>edu.uci.ics.hyracks</groupId>
-  	<artifactId>hyracks-control-cc</artifactId>
-  	<version>0.2.2-SNAPSHOT</version>
-  </dependency>
-  <dependency>
-  	<groupId>edu.uci.ics.hyracks</groupId>
-  	<artifactId>hyracks-control-nc</artifactId>
-  	<version>0.2.2-SNAPSHOT</version>
-  </dependency>
-  <dependency>
-  	<groupId>edu.uci.ics.hyracks</groupId>
-  	<artifactId>hyracks-data-std</artifactId>
-  	<version>0.2.2-SNAPSHOT</version>
-  </dependency>
-  </dependencies>
-</project>
diff --git a/hyracks-algebricks/pom.xml b/hyracks-algebricks/pom.xml
deleted file mode 100644
index e898780..0000000
--- a/hyracks-algebricks/pom.xml
+++ /dev/null
@@ -1,22 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <artifactId>hyracks-algebricks</artifactId>
-  <packaging>pom</packaging>
-
-  <parent>
-    <groupId>edu.uci.ics.hyracks</groupId>
-    <artifactId>hyracks</artifactId>
-    <version>0.2.2-SNAPSHOT</version>
-  </parent>
-
-  <modules>
-    <module>hyracks-algebricks-compiler</module>
-    <module>hyracks-algebricks-common</module>
-    <module>hyracks-algebricks-data</module>
-    <module>hyracks-algebricks-core</module>
-    <module>hyracks-algebricks-runtime</module>
-    <module>hyracks-algebricks-rewriter</module>
-    <module>hyracks-algebricks-tests</module>
-    <module>hyracks-algebricks-examples</module>
-  </modules>
-</project>
diff --git a/hyracks-api/pom.xml b/hyracks-api/pom.xml
deleted file mode 100644
index 6b6a553..0000000
--- a/hyracks-api/pom.xml
+++ /dev/null
@@ -1,56 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <artifactId>hyracks-api</artifactId>
-  <parent>
-    <groupId>edu.uci.ics.hyracks</groupId>
-    <artifactId>hyracks</artifactId>
-    <version>0.2.2-SNAPSHOT</version>
-  </parent>
-
-  <build>
-    <plugins>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-compiler-plugin</artifactId>
-        <version>2.0.2</version>
-        <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
-        </configuration>
-      </plugin>
-    </plugins>
-  </build>
-  <dependencies>
-  	<dependency>
-  		<groupId>org.json</groupId>
-  		<artifactId>json</artifactId>
-  		<version>20090211</version>
-  		<type>jar</type>
-  		<scope>compile</scope>
-  	</dependency>
-  	<dependency>
-  		<groupId>org.apache.httpcomponents</groupId>
-  		<artifactId>httpclient</artifactId>
-  		<version>4.1-alpha2</version>
-  		<type>jar</type>
-  		<scope>compile</scope>
-  	</dependency>
-  	<dependency>
-  		<groupId>args4j</groupId>
-  		<artifactId>args4j</artifactId>
-  		<version>2.0.12</version>
-  		<type>jar</type>
-  		<scope>compile</scope>
-  	</dependency>
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks</groupId>
-  		<artifactId>hyracks-ipc</artifactId>
-  		<version>0.2.2-SNAPSHOT</version>
-  	</dependency>
-  	<dependency>
-  		<groupId>org.apache.commons</groupId>
-  		<artifactId>commons-lang3</artifactId>
-  		<version>3.1</version>
-  	</dependency>
-  </dependencies>
-</project>
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/impl/JobSpecificationActivityClusterGraphGeneratorFactory.java b/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/impl/JobSpecificationActivityClusterGraphGeneratorFactory.java
deleted file mode 100644
index 791f312..0000000
--- a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/impl/JobSpecificationActivityClusterGraphGeneratorFactory.java
+++ /dev/null
@@ -1,88 +0,0 @@
-package edu.uci.ics.hyracks.api.client.impl;
-
-import java.util.EnumSet;
-import java.util.HashSet;
-import java.util.Set;
-
-import edu.uci.ics.hyracks.api.application.ICCApplicationContext;
-import edu.uci.ics.hyracks.api.constraints.Constraint;
-import edu.uci.ics.hyracks.api.constraints.IConstraintAcceptor;
-import edu.uci.ics.hyracks.api.dataflow.IConnectorDescriptor;
-import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
-import edu.uci.ics.hyracks.api.exceptions.HyracksException;
-import edu.uci.ics.hyracks.api.job.ActivityClusterGraph;
-import edu.uci.ics.hyracks.api.job.IActivityClusterGraphGenerator;
-import edu.uci.ics.hyracks.api.job.IActivityClusterGraphGeneratorFactory;
-import edu.uci.ics.hyracks.api.job.JobActivityGraph;
-import edu.uci.ics.hyracks.api.job.JobFlag;
-import edu.uci.ics.hyracks.api.job.JobId;
-import edu.uci.ics.hyracks.api.job.JobSpecification;
-
-public class JobSpecificationActivityClusterGraphGeneratorFactory implements IActivityClusterGraphGeneratorFactory {
-    private static final long serialVersionUID = 1L;
-
-    private final JobSpecification spec;
-
-    public JobSpecificationActivityClusterGraphGeneratorFactory(JobSpecification jobSpec) {
-        this.spec = jobSpec;
-    }
-
-    @Override
-    public IActivityClusterGraphGenerator createActivityClusterGraphGenerator(String appName, JobId jobId,
-            final ICCApplicationContext ccAppCtx, EnumSet<JobFlag> jobFlags) throws HyracksException {
-        final JobActivityGraphBuilder builder = new JobActivityGraphBuilder(spec, jobFlags);
-        PlanUtils.visit(spec, new IConnectorDescriptorVisitor() {
-            @Override
-            public void visit(IConnectorDescriptor conn) throws HyracksException {
-                builder.addConnector(conn);
-            }
-        });
-        PlanUtils.visit(spec, new IOperatorDescriptorVisitor() {
-            @Override
-            public void visit(IOperatorDescriptor op) {
-                op.contributeActivities(builder);
-            }
-        });
-        builder.finish();
-        final JobActivityGraph jag = builder.getActivityGraph();
-        ActivityClusterGraphBuilder acgb = new ActivityClusterGraphBuilder();
-
-        final ActivityClusterGraph acg = acgb.inferActivityClusters(jobId, jag);
-        acg.setFrameSize(spec.getFrameSize());
-        acg.setMaxReattempts(spec.getMaxReattempts());
-        acg.setJobletEventListenerFactory(spec.getJobletEventListenerFactory());
-        acg.setGlobalJobDataFactory(spec.getGlobalJobDataFactory());
-        final Set<Constraint> constraints = new HashSet<Constraint>();
-        final IConstraintAcceptor acceptor = new IConstraintAcceptor() {
-            @Override
-            public void addConstraint(Constraint constraint) {
-                constraints.add(constraint);
-            }
-        };
-        PlanUtils.visit(spec, new IOperatorDescriptorVisitor() {
-            @Override
-            public void visit(IOperatorDescriptor op) {
-                op.contributeSchedulingConstraints(acceptor, ccAppCtx);
-            }
-        });
-        PlanUtils.visit(spec, new IConnectorDescriptorVisitor() {
-            @Override
-            public void visit(IConnectorDescriptor conn) {
-                conn.contributeSchedulingConstraints(acceptor, acg.getConnectorMap().get(conn.getConnectorId()),
-                        ccAppCtx);
-            }
-        });
-        constraints.addAll(spec.getUserConstraints());
-        return new IActivityClusterGraphGenerator() {
-            @Override
-            public ActivityClusterGraph initialize() {
-                return acg;
-            }
-
-            @Override
-            public Set<Constraint> getConstraints() {
-                return constraints;
-            }
-        };
-    }
-}
\ No newline at end of file
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/context/IHyracksRootContext.java b/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/context/IHyracksRootContext.java
deleted file mode 100644
index a94c6de..0000000
--- a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/context/IHyracksRootContext.java
+++ /dev/null
@@ -1,21 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.api.context;
-
-import edu.uci.ics.hyracks.api.io.IIOManager;
-
-public interface IHyracksRootContext {
-    public IIOManager getIOManager();
-}
\ No newline at end of file
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/ILinearizeComparator.java b/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/ILinearizeComparator.java
deleted file mode 100644
index 51a8cfe..0000000
--- a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/ILinearizeComparator.java
+++ /dev/null
@@ -1,20 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.api.dataflow.value;
-
-public interface ILinearizeComparator extends IBinaryComparator {
-    public int getDimensions();
-
-}
\ No newline at end of file
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/ILinearizeComparatorFactory.java b/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/ILinearizeComparatorFactory.java
deleted file mode 100644
index 79619c4..0000000
--- a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/ILinearizeComparatorFactory.java
+++ /dev/null
@@ -1,20 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.api.dataflow.value;
-
-
-public interface ILinearizeComparatorFactory extends IBinaryComparatorFactory {
-    public ILinearizeComparator createBinaryComparator();
-}
\ No newline at end of file
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/io/FileReference.java b/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/io/FileReference.java
deleted file mode 100644
index ffe41d0..0000000
--- a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/io/FileReference.java
+++ /dev/null
@@ -1,65 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.api.io;
-
-import java.io.File;
-import java.io.Serializable;
-
-public final class FileReference implements Serializable {
-    private static final long serialVersionUID = 1L;
-
-    private final File file;
-    private final IODeviceHandle dev;
-
-    public FileReference(IODeviceHandle dev, String devRelPath) {
-        file = new File(dev.getPath(), devRelPath);
-        this.dev = dev;
-    }
-
-    public FileReference(File file) {
-        this.file = file;
-        this.dev = null;
-    }
-
-    public File getFile() {
-    	return file;
-    }
-
-    public IODeviceHandle getDeviceHandle() {
-    	return dev;
-    }
-    
-    @Override
-    public String toString() {
-        return file.getAbsolutePath();
-    }
-
-    @Override
-    public boolean equals(Object o) {
-        if (!(o instanceof FileReference)) {
-            return false;
-        }
-        return file.equals(((FileReference) o).file);
-    }
-
-    @Override
-    public int hashCode() {
-        return file.hashCode();
-    }
-
-    public void delete() {
-        file.delete();
-    }
-}
\ No newline at end of file
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/ActivityClusterGraph.java b/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/ActivityClusterGraph.java
deleted file mode 100644
index dd68747..0000000
--- a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/ActivityClusterGraph.java
+++ /dev/null
@@ -1,128 +0,0 @@
-/*
-2 * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.api.job;
-
-import java.io.Serializable;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.Map;
-
-import org.json.JSONArray;
-import org.json.JSONException;
-import org.json.JSONObject;
-
-import edu.uci.ics.hyracks.api.dataflow.ActivityId;
-import edu.uci.ics.hyracks.api.dataflow.ConnectorDescriptorId;
-
-public class ActivityClusterGraph implements Serializable {
-    private static final long serialVersionUID = 1L;
-
-    private int version;
-
-    private final Map<ActivityClusterId, ActivityCluster> activityClusterMap;
-
-    private final Map<ActivityId, ActivityCluster> activityMap;
-
-    private final Map<ConnectorDescriptorId, ActivityCluster> connectorMap;
-
-    private int frameSize;
-
-    private int maxReattempts;
-
-    private IJobletEventListenerFactory jobletEventListenerFactory;
-
-    private IGlobalJobDataFactory globalJobDataFactory;
-
-    public ActivityClusterGraph() {
-        version = 0;
-        activityClusterMap = new HashMap<ActivityClusterId, ActivityCluster>();
-        activityMap = new HashMap<ActivityId, ActivityCluster>();
-        connectorMap = new HashMap<ConnectorDescriptorId, ActivityCluster>();
-        frameSize = 32768;
-    }
-
-    public Map<ActivityId, ActivityCluster> getActivityMap() {
-        return activityMap;
-    }
-
-    public Map<ConnectorDescriptorId, ActivityCluster> getConnectorMap() {
-        return connectorMap;
-    }
-
-    public Map<ActivityClusterId, ActivityCluster> getActivityClusterMap() {
-        return activityClusterMap;
-    }
-
-    public void addActivityClusters(Collection<ActivityCluster> newActivityClusters) {
-        for (ActivityCluster ac : newActivityClusters) {
-            activityClusterMap.put(ac.getId(), ac);
-            for (ActivityId aid : ac.getActivityMap().keySet()) {
-                activityMap.put(aid, ac);
-            }
-            for (ConnectorDescriptorId cid : ac.getConnectorMap().keySet()) {
-                connectorMap.put(cid, ac);
-            }
-        }
-        ++version;
-    }
-
-    public int getVersion() {
-        return version;
-    }
-
-    public void setFrameSize(int frameSize) {
-        this.frameSize = frameSize;
-    }
-
-    public int getFrameSize() {
-        return frameSize;
-    }
-
-    public void setMaxReattempts(int maxReattempts) {
-        this.maxReattempts = maxReattempts;
-    }
-
-    public int getMaxReattempts() {
-        return maxReattempts;
-    }
-
-    public IJobletEventListenerFactory getJobletEventListenerFactory() {
-        return jobletEventListenerFactory;
-    }
-
-    public void setJobletEventListenerFactory(IJobletEventListenerFactory jobletEventListenerFactory) {
-        this.jobletEventListenerFactory = jobletEventListenerFactory;
-    }
-
-    public IGlobalJobDataFactory getGlobalJobDataFactory() {
-        return globalJobDataFactory;
-    }
-
-    public void setGlobalJobDataFactory(IGlobalJobDataFactory globalJobDataFactory) {
-        this.globalJobDataFactory = globalJobDataFactory;
-    }
-
-    public JSONObject toJSON() throws JSONException {
-        JSONObject acgj = new JSONObject();
-
-        JSONArray acl = new JSONArray();
-        for (ActivityCluster ac : activityClusterMap.values()) {
-            acl.put(ac.toJSON());
-        }
-        acgj.put("version", version);
-        acgj.put("activity-clusters", acl);
-        return acgj;
-    }
-}
\ No newline at end of file
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/JobSpecification.java b/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/JobSpecification.java
deleted file mode 100644
index cf2eec2..0000000
--- a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/JobSpecification.java
+++ /dev/null
@@ -1,321 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.api.job;
-
-import java.io.Serializable;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
-import org.apache.commons.lang3.tuple.Pair;
-import org.json.JSONArray;
-import org.json.JSONException;
-import org.json.JSONObject;
-
-import edu.uci.ics.hyracks.api.constraints.Constraint;
-import edu.uci.ics.hyracks.api.dataflow.ConnectorDescriptorId;
-import edu.uci.ics.hyracks.api.dataflow.IConnectorDescriptor;
-import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
-import edu.uci.ics.hyracks.api.dataflow.OperatorDescriptorId;
-import edu.uci.ics.hyracks.api.dataflow.connectors.IConnectorPolicyAssignmentPolicy;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-
-public class JobSpecification implements Serializable, IOperatorDescriptorRegistry, IConnectorDescriptorRegistry {
-    private static final long serialVersionUID = 1L;
-
-    private final List<OperatorDescriptorId> roots;
-
-    private final Map<OperatorDescriptorId, IOperatorDescriptor> opMap;
-
-    private final Map<ConnectorDescriptorId, IConnectorDescriptor> connMap;
-
-    private final Map<OperatorDescriptorId, List<IConnectorDescriptor>> opInputMap;
-
-    private final Map<OperatorDescriptorId, List<IConnectorDescriptor>> opOutputMap;
-
-    private final Map<ConnectorDescriptorId, Pair<Pair<IOperatorDescriptor, Integer>, Pair<IOperatorDescriptor, Integer>>> connectorOpMap;
-
-    private final Map<String, Serializable> properties;
-
-    private final Set<Constraint> userConstraints;
-
-    private IConnectorPolicyAssignmentPolicy connectorPolicyAssignmentPolicy;
-
-    private int frameSize;
-
-    private int maxReattempts;
-
-    private IJobletEventListenerFactory jobletEventListenerFactory;
-
-    private IGlobalJobDataFactory globalJobDataFactory;
-
-    private transient int operatorIdCounter;
-
-    private transient int connectorIdCounter;
-
-    public JobSpecification() {
-        roots = new ArrayList<OperatorDescriptorId>();
-        opMap = new HashMap<OperatorDescriptorId, IOperatorDescriptor>();
-        connMap = new HashMap<ConnectorDescriptorId, IConnectorDescriptor>();
-        opInputMap = new HashMap<OperatorDescriptorId, List<IConnectorDescriptor>>();
-        opOutputMap = new HashMap<OperatorDescriptorId, List<IConnectorDescriptor>>();
-        connectorOpMap = new HashMap<ConnectorDescriptorId, Pair<Pair<IOperatorDescriptor, Integer>, Pair<IOperatorDescriptor, Integer>>>();
-        properties = new HashMap<String, Serializable>();
-        userConstraints = new HashSet<Constraint>();
-        operatorIdCounter = 0;
-        connectorIdCounter = 0;
-        frameSize = 32768;
-        maxReattempts = 2;
-    }
-
-    @Override
-    public OperatorDescriptorId createOperatorDescriptorId(IOperatorDescriptor op) {
-        OperatorDescriptorId odId = new OperatorDescriptorId(operatorIdCounter++);
-        opMap.put(odId, op);
-        return odId;
-    }
-
-    @Override
-    public ConnectorDescriptorId createConnectorDescriptor(IConnectorDescriptor conn) {
-        ConnectorDescriptorId cdId = new ConnectorDescriptorId(connectorIdCounter++);
-        connMap.put(cdId, conn);
-        return cdId;
-    }
-
-    public void addRoot(IOperatorDescriptor op) {
-        roots.add(op.getOperatorId());
-    }
-
-    public void connect(IConnectorDescriptor conn, IOperatorDescriptor producerOp, int producerPort,
-            IOperatorDescriptor consumerOp, int consumerPort) {
-        insertIntoIndexedMap(opInputMap, consumerOp.getOperatorId(), consumerPort, conn);
-        insertIntoIndexedMap(opOutputMap, producerOp.getOperatorId(), producerPort, conn);
-        connectorOpMap.put(
-                conn.getConnectorId(),
-                Pair.<Pair<IOperatorDescriptor, Integer>, Pair<IOperatorDescriptor, Integer>> of(
-                        Pair.<IOperatorDescriptor, Integer> of(producerOp, producerPort),
-                        Pair.<IOperatorDescriptor, Integer> of(consumerOp, consumerPort)));
-    }
-
-    public void setProperty(String name, Serializable value) {
-        properties.put(name, value);
-    }
-
-    public Serializable getProperty(String name) {
-        return properties.get(name);
-    }
-
-    private <T> void extend(List<T> list, int index) {
-        int n = list.size();
-        for (int i = n; i <= index; ++i) {
-            list.add(null);
-        }
-    }
-
-    public Map<ConnectorDescriptorId, IConnectorDescriptor> getConnectorMap() {
-        return connMap;
-    }
-
-    public Map<ConnectorDescriptorId, Pair<Pair<IOperatorDescriptor, Integer>, Pair<IOperatorDescriptor, Integer>>> getConnectorOperatorMap() {
-        return connectorOpMap;
-    }
-
-    public RecordDescriptor getConnectorRecordDescriptor(IConnectorDescriptor conn) {
-        Pair<Pair<IOperatorDescriptor, Integer>, Pair<IOperatorDescriptor, Integer>> connInfo = connectorOpMap.get(conn
-                .getConnectorId());
-        return connInfo.getLeft().getLeft().getOutputRecordDescriptors()[connInfo.getLeft().getRight()];
-    }
-
-    public IOperatorDescriptor getConsumer(IConnectorDescriptor conn) {
-        Pair<Pair<IOperatorDescriptor, Integer>, Pair<IOperatorDescriptor, Integer>> connInfo = connectorOpMap.get(conn
-                .getConnectorId());
-        return connInfo.getRight().getLeft();
-    }
-
-    public int getConsumerInputIndex(IConnectorDescriptor conn) {
-        Pair<Pair<IOperatorDescriptor, Integer>, Pair<IOperatorDescriptor, Integer>> connInfo = connectorOpMap.get(conn
-                .getConnectorId());
-        return connInfo.getRight().getRight();
-    }
-
-    public IConnectorDescriptor getInputConnectorDescriptor(IOperatorDescriptor op, int inputIndex) {
-        return getInputConnectorDescriptor(op.getOperatorId(), inputIndex);
-    }
-
-    public IConnectorDescriptor getInputConnectorDescriptor(OperatorDescriptorId odId, int inputIndex) {
-        return opInputMap.get(odId).get(inputIndex);
-    }
-
-    public Map<OperatorDescriptorId, List<IConnectorDescriptor>> getOperatorInputMap() {
-        return opInputMap;
-    }
-
-    public RecordDescriptor getOperatorInputRecordDescriptor(OperatorDescriptorId odId, int inputIndex) {
-        return getConnectorRecordDescriptor(getInputConnectorDescriptor(odId, inputIndex));
-    }
-
-    public Map<OperatorDescriptorId, IOperatorDescriptor> getOperatorMap() {
-        return opMap;
-    }
-
-    public Map<OperatorDescriptorId, List<IConnectorDescriptor>> getOperatorOutputMap() {
-        return opOutputMap;
-    }
-
-    public RecordDescriptor getOperatorOutputRecordDescriptor(OperatorDescriptorId odId, int outputIndex) {
-        return getConnectorRecordDescriptor(getOutputConnectorDescriptor(odId, outputIndex));
-    }
-
-    public IConnectorDescriptor getOutputConnectorDescriptor(IOperatorDescriptor op, int outputIndex) {
-        return getOutputConnectorDescriptor(op.getOperatorId(), outputIndex);
-    }
-
-    public IConnectorDescriptor getOutputConnectorDescriptor(OperatorDescriptorId odId, int outputIndex) {
-        return opOutputMap.get(odId).get(outputIndex);
-    }
-
-    public IOperatorDescriptor getProducer(IConnectorDescriptor conn) {
-        Pair<Pair<IOperatorDescriptor, Integer>, Pair<IOperatorDescriptor, Integer>> connInfo = connectorOpMap.get(conn
-                .getConnectorId());
-        return connInfo.getLeft().getLeft();
-    }
-
-    public int getProducerOutputIndex(IConnectorDescriptor conn) {
-        Pair<Pair<IOperatorDescriptor, Integer>, Pair<IOperatorDescriptor, Integer>> connInfo = connectorOpMap.get(conn
-                .getConnectorId());
-        return connInfo.getLeft().getRight();
-    }
-
-    public List<OperatorDescriptorId> getRoots() {
-        return roots;
-    }
-
-    public IConnectorPolicyAssignmentPolicy getConnectorPolicyAssignmentPolicy() {
-        return connectorPolicyAssignmentPolicy;
-    }
-
-    public void setConnectorPolicyAssignmentPolicy(IConnectorPolicyAssignmentPolicy connectorPolicyAssignmentPolicy) {
-        this.connectorPolicyAssignmentPolicy = connectorPolicyAssignmentPolicy;
-    }
-
-    public void setFrameSize(int frameSize) {
-        this.frameSize = frameSize;
-    }
-
-    public int getFrameSize() {
-        return frameSize;
-    }
-
-    public void setMaxReattempts(int maxReattempts) {
-        this.maxReattempts = maxReattempts;
-    }
-
-    public int getMaxReattempts() {
-        return maxReattempts;
-    }
-
-    public void addUserConstraint(Constraint constraint) {
-        userConstraints.add(constraint);
-    }
-
-    public Set<Constraint> getUserConstraints() {
-        return userConstraints;
-    }
-
-    public IJobletEventListenerFactory getJobletEventListenerFactory() {
-        return jobletEventListenerFactory;
-    }
-
-    public void setJobletEventListenerFactory(IJobletEventListenerFactory jobletEventListenerFactory) {
-        this.jobletEventListenerFactory = jobletEventListenerFactory;
-    }
-
-    public IGlobalJobDataFactory getGlobalJobDataFactory() {
-        return globalJobDataFactory;
-    }
-
-    public void setGlobalJobDataFactory(IGlobalJobDataFactory globalJobDataFactory) {
-        this.globalJobDataFactory = globalJobDataFactory;
-    }
-
-    private <K, V> void insertIntoIndexedMap(Map<K, List<V>> map, K key, int index, V value) {
-        List<V> vList = map.get(key);
-        if (vList == null) {
-            vList = new ArrayList<V>();
-            map.put(key, vList);
-        }
-        extend(vList, index);
-        vList.set(index, value);
-    }
-
-    public String toString() {
-        StringBuilder buffer = new StringBuilder();
-
-        for (Map.Entry<OperatorDescriptorId, IOperatorDescriptor> e : opMap.entrySet()) {
-            buffer.append(e.getKey().getId()).append(" : ").append(e.getValue().toString()).append("\n");
-            List<IConnectorDescriptor> inputs = opInputMap.get(e.getKey());
-            if (inputs != null && !inputs.isEmpty()) {
-                buffer.append("   Inputs:\n");
-                for (IConnectorDescriptor c : inputs) {
-                    buffer.append("      ").append(c.getConnectorId().getId()).append(" : ").append(c.toString())
-                            .append("\n");
-                }
-            }
-            List<IConnectorDescriptor> outputs = opOutputMap.get(e.getKey());
-            if (outputs != null && !outputs.isEmpty()) {
-                buffer.append("   Outputs:\n");
-                for (IConnectorDescriptor c : outputs) {
-                    buffer.append("      ").append(c.getConnectorId().getId()).append(" : ").append(c.toString())
-                            .append("\n");
-                }
-            }
-        }
-
-        buffer.append("\n").append("Constraints:\n").append(userConstraints);
-
-        return buffer.toString();
-    }
-
-    public JSONObject toJSON() throws JSONException {
-        JSONObject jjob = new JSONObject();
-
-        JSONArray jopArray = new JSONArray();
-        for (Map.Entry<OperatorDescriptorId, IOperatorDescriptor> e : opMap.entrySet()) {
-            jopArray.put(e.getValue().toJSON());
-        }
-        jjob.put("operators", jopArray);
-
-        JSONArray jcArray = new JSONArray();
-        for (Map.Entry<ConnectorDescriptorId, IConnectorDescriptor> e : connMap.entrySet()) {
-            JSONObject conn = new JSONObject();
-            Pair<Pair<IOperatorDescriptor, Integer>, Pair<IOperatorDescriptor, Integer>> connection = connectorOpMap
-                    .get(e.getKey());
-            if (connection != null) {
-                conn.put("in-operator-id", connection.getLeft().getLeft().getOperatorId().toString());
-                conn.put("in-operator-port", connection.getLeft().getRight().intValue());
-                conn.put("out-operator-id", connection.getRight().getLeft().getOperatorId().toString());
-                conn.put("out-operator-port", connection.getRight().getRight().intValue());
-            }
-            conn.put("connector", e.getValue().toJSON());
-            jcArray.put(conn);
-        }
-        jjob.put("connectors", jcArray);
-
-        return jjob;
-    }
-}
\ No newline at end of file
diff --git a/hyracks-cli/pom.xml b/hyracks-cli/pom.xml
deleted file mode 100644
index dba38d8..0000000
--- a/hyracks-cli/pom.xml
+++ /dev/null
@@ -1,96 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <groupId>edu.uci.ics.hyracks</groupId>
-  <artifactId>hyracks-cli</artifactId>
-  <version>0.2.2-SNAPSHOT</version>
-
-  <parent>
-    <groupId>edu.uci.ics.hyracks</groupId>
-    <artifactId>hyracks</artifactId>
-    <version>0.2.2-SNAPSHOT</version>
-  </parent>
-
-  <build>
-    <plugins>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-compiler-plugin</artifactId>
-        <version>2.0.2</version>
-        <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
-        </configuration>
-      </plugin>
-      <plugin>
-        <groupId>org.codehaus.mojo</groupId>
-        <artifactId>javacc-maven-plugin</artifactId>
-        <version>2.6</version>
-        <executions>
-          <execution>
-            <id>javacc</id>
-            <goals>
-              <goal>javacc</goal>
-            </goals>
-            <configuration>
-              <isStatic>false</isStatic>
-            </configuration>
-          </execution>
-        </executions>
-      </plugin>
-      <plugin>
-        <groupId>org.codehaus.mojo</groupId>
-        <artifactId>appassembler-maven-plugin</artifactId>
-        <executions>
-          <execution>
-            <configuration>
-              <programs>
-                <program>
-                  <mainClass>edu.uci.ics.hyracks.cli.Main</mainClass>
-                  <name>hyrackscli</name>
-                </program>
-              </programs>
-              <repositoryLayout>flat</repositoryLayout>
-              <repositoryName>lib</repositoryName>
-            </configuration>
-            <phase>package</phase>
-            <goals>
-              <goal>assemble</goal>
-            </goals>
-          </execution>
-        </executions>
-      </plugin>
-      <plugin>
-        <artifactId>maven-assembly-plugin</artifactId>
-        <version>2.2-beta-5</version>
-        <executions>
-          <execution>
-            <configuration>
-              <descriptors>
-                <descriptor>src/main/assembly/binary-assembly.xml</descriptor>
-              </descriptors>
-            </configuration>
-            <phase>package</phase>
-            <goals>
-              <goal>attached</goal>
-            </goals>
-          </execution>
-        </executions>
-      </plugin>
-    </plugins>
-  </build>
-  <dependencies>
-  	<dependency>
-  		<groupId>jline</groupId>
-  		<artifactId>jline</artifactId>
-  		<version>0.9.94</version>
-  		<type>jar</type>
-  		<scope>compile</scope>
-  	</dependency>
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks</groupId>
-  		<artifactId>hyracks-api</artifactId>
-  		<version>0.2.2-SNAPSHOT</version>
-  		<scope>compile</scope>
-  	</dependency>
-  </dependencies>
-</project>
diff --git a/hyracks-control/hyracks-control-cc/pom.xml b/hyracks-control/hyracks-control-cc/pom.xml
deleted file mode 100644
index 39ea5da..0000000
--- a/hyracks-control/hyracks-control-cc/pom.xml
+++ /dev/null
@@ -1,56 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <artifactId>hyracks-control-cc</artifactId>
-  <parent>
-    <groupId>edu.uci.ics.hyracks</groupId>
-    <artifactId>hyracks-control</artifactId>
-    <version>0.2.2-SNAPSHOT</version>
-  </parent>
-
-  <build>
-    <plugins>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-compiler-plugin</artifactId>
-        <version>2.0.2</version>
-        <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
-        </configuration>
-      </plugin>
-    </plugins>
-  </build>
-  <dependencies>
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks</groupId>
-  		<artifactId>hyracks-control-common</artifactId>
-  		<version>0.2.2-SNAPSHOT</version>
-  		<type>jar</type>
-  		<scope>compile</scope>
-  	</dependency>
-  	<dependency>
-  		<groupId>org.eclipse.jetty</groupId>
-  		<artifactId>jetty-server</artifactId>
-  		<version>8.0.0.RC0</version>
-  		<type>jar</type>
-  		<scope>compile</scope>
-  	</dependency>
-  	<dependency>
-  		<groupId>org.eclipse.jetty</groupId>
-  		<artifactId>jetty-webapp</artifactId>
-  		<version>8.0.0.RC0</version>
-  		<type>jar</type>
-  		<scope>compile</scope>
-  	</dependency>
-  	<dependency>
-  		<groupId>org.apache.wicket</groupId>
-  		<artifactId>wicket-core</artifactId>
-  		<version>1.5.2</version>
-  	</dependency>
-  	<dependency>
-  		<groupId>org.slf4j</groupId>
-  		<artifactId>slf4j-jcl</artifactId>
-  		<version>1.6.3</version>
-  	</dependency>
-  </dependencies>
-</project>
diff --git a/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/ClusterControllerService.java b/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/ClusterControllerService.java
deleted file mode 100644
index 32b031d..0000000
--- a/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/ClusterControllerService.java
+++ /dev/null
@@ -1,432 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.control.cc;
-
-import java.io.File;
-import java.io.FileReader;
-import java.net.InetSocketAddress;
-import java.util.HashMap;
-import java.util.Hashtable;
-import java.util.LinkedHashMap;
-import java.util.Map;
-import java.util.Set;
-import java.util.Timer;
-import java.util.TimerTask;
-import java.util.concurrent.Executor;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import org.xml.sax.InputSource;
-
-import edu.uci.ics.hyracks.api.client.ClusterControllerInfo;
-import edu.uci.ics.hyracks.api.client.HyracksClientInterfaceFunctions;
-import edu.uci.ics.hyracks.api.client.NodeControllerInfo;
-import edu.uci.ics.hyracks.api.context.ICCContext;
-import edu.uci.ics.hyracks.api.job.JobId;
-import edu.uci.ics.hyracks.api.job.JobStatus;
-import edu.uci.ics.hyracks.api.topology.ClusterTopology;
-import edu.uci.ics.hyracks.api.topology.TopologyDefinitionParser;
-import edu.uci.ics.hyracks.control.cc.application.CCApplicationContext;
-import edu.uci.ics.hyracks.control.cc.job.JobRun;
-import edu.uci.ics.hyracks.control.cc.web.WebServer;
-import edu.uci.ics.hyracks.control.cc.work.ApplicationCreateWork;
-import edu.uci.ics.hyracks.control.cc.work.ApplicationDestroyWork;
-import edu.uci.ics.hyracks.control.cc.work.ApplicationMessageWork;
-import edu.uci.ics.hyracks.control.cc.work.ApplicationStartWork;
-import edu.uci.ics.hyracks.control.cc.work.ApplicationStateChangeWork;
-import edu.uci.ics.hyracks.control.cc.work.GetIpAddressNodeNameMapWork;
-import edu.uci.ics.hyracks.control.cc.work.GetJobStatusWork;
-import edu.uci.ics.hyracks.control.cc.work.GetNodeControllersInfoWork;
-import edu.uci.ics.hyracks.control.cc.work.JobStartWork;
-import edu.uci.ics.hyracks.control.cc.work.JobletCleanupNotificationWork;
-import edu.uci.ics.hyracks.control.cc.work.NodeHeartbeatWork;
-import edu.uci.ics.hyracks.control.cc.work.RegisterNodeWork;
-import edu.uci.ics.hyracks.control.cc.work.RegisterPartitionAvailibilityWork;
-import edu.uci.ics.hyracks.control.cc.work.RegisterPartitionRequestWork;
-import edu.uci.ics.hyracks.control.cc.work.RemoveDeadNodesWork;
-import edu.uci.ics.hyracks.control.cc.work.ReportProfilesWork;
-import edu.uci.ics.hyracks.control.cc.work.TaskCompleteWork;
-import edu.uci.ics.hyracks.control.cc.work.TaskFailureWork;
-import edu.uci.ics.hyracks.control.cc.work.UnregisterNodeWork;
-import edu.uci.ics.hyracks.control.cc.work.WaitForJobCompletionWork;
-import edu.uci.ics.hyracks.control.common.AbstractRemoteService;
-import edu.uci.ics.hyracks.control.common.context.ServerContext;
-import edu.uci.ics.hyracks.control.common.controllers.CCConfig;
-import edu.uci.ics.hyracks.control.common.ipc.CCNCFunctions;
-import edu.uci.ics.hyracks.control.common.ipc.CCNCFunctions.Function;
-import edu.uci.ics.hyracks.control.common.logs.LogFile;
-import edu.uci.ics.hyracks.control.common.work.IPCResponder;
-import edu.uci.ics.hyracks.control.common.work.WorkQueue;
-import edu.uci.ics.hyracks.ipc.api.IIPCHandle;
-import edu.uci.ics.hyracks.ipc.api.IIPCI;
-import edu.uci.ics.hyracks.ipc.exceptions.IPCException;
-import edu.uci.ics.hyracks.ipc.impl.IPCSystem;
-import edu.uci.ics.hyracks.ipc.impl.JavaSerializationBasedPayloadSerializerDeserializer;
-
-public class ClusterControllerService extends AbstractRemoteService {
-    private static Logger LOGGER = Logger.getLogger(ClusterControllerService.class.getName());
-
-    private final CCConfig ccConfig;
-
-    private IPCSystem clusterIPC;
-
-    private IPCSystem clientIPC;
-
-    private final LogFile jobLog;
-
-    private final Map<String, NodeControllerState> nodeRegistry;
-
-    private final Map<String, Set<String>> ipAddressNodeNameMap;
-
-    private final Map<String, CCApplicationContext> applications;
-
-    private final ServerContext serverCtx;
-
-    private final WebServer webServer;
-
-    private ClusterControllerInfo info;
-
-    private final Map<JobId, JobRun> activeRunMap;
-
-    private final Map<JobId, JobRun> runMapArchive;
-
-    private final WorkQueue workQueue;
-
-    private final ExecutorService executor;
-
-    private final Timer timer;
-
-    private final ICCContext ccContext;
-
-    private final DeadNodeSweeper sweeper;
-
-    private long jobCounter;
-
-    public ClusterControllerService(final CCConfig ccConfig) throws Exception {
-        this.ccConfig = ccConfig;
-        File jobLogFolder = new File(ccConfig.ccRoot, "logs/jobs");
-        jobLog = new LogFile(jobLogFolder);
-        nodeRegistry = new LinkedHashMap<String, NodeControllerState>();
-        ipAddressNodeNameMap = new HashMap<String, Set<String>>();
-        applications = new Hashtable<String, CCApplicationContext>();
-        serverCtx = new ServerContext(ServerContext.ServerType.CLUSTER_CONTROLLER, new File(ccConfig.ccRoot));
-        executor = Executors.newCachedThreadPool();
-        IIPCI ccIPCI = new ClusterControllerIPCI();
-        clusterIPC = new IPCSystem(new InetSocketAddress(ccConfig.clusterNetPort), ccIPCI,
-                new CCNCFunctions.SerializerDeserializer());
-        IIPCI ciIPCI = new HyracksClientInterfaceIPCI();
-        clientIPC = new IPCSystem(new InetSocketAddress(ccConfig.clientNetIpAddress, ccConfig.clientNetPort), ciIPCI,
-                new JavaSerializationBasedPayloadSerializerDeserializer());
-        webServer = new WebServer(this);
-        activeRunMap = new HashMap<JobId, JobRun>();
-        runMapArchive = new LinkedHashMap<JobId, JobRun>() {
-            private static final long serialVersionUID = 1L;
-
-            protected boolean removeEldestEntry(Map.Entry<JobId, JobRun> eldest) {
-                return size() > ccConfig.jobHistorySize;
-            }
-        };
-        workQueue = new WorkQueue();
-        this.timer = new Timer(true);
-        final ClusterTopology topology = computeClusterTopology(ccConfig);
-        ccContext = new ICCContext() {
-            @Override
-            public void getIPAddressNodeMap(Map<String, Set<String>> map) throws Exception {
-                GetIpAddressNodeNameMapWork ginmw = new GetIpAddressNodeNameMapWork(ClusterControllerService.this, map);
-                workQueue.scheduleAndSync(ginmw);
-            }
-
-            @Override
-            public ClusterControllerInfo getClusterControllerInfo() {
-                return info;
-            }
-
-            @Override
-            public ClusterTopology getClusterTopology() {
-                return topology;
-            }
-        };
-        sweeper = new DeadNodeSweeper();
-        jobCounter = 0;
-    }
-
-    private static ClusterTopology computeClusterTopology(CCConfig ccConfig) throws Exception {
-        if (ccConfig.clusterTopologyDefinition == null) {
-            return null;
-        }
-        FileReader fr = new FileReader(ccConfig.clusterTopologyDefinition);
-        InputSource in = new InputSource(fr);
-        try {
-            return TopologyDefinitionParser.parse(in);
-        } finally {
-            fr.close();
-        }
-    }
-
-    @Override
-    public void start() throws Exception {
-        LOGGER.log(Level.INFO, "Starting ClusterControllerService: " + this);
-        clusterIPC.start();
-        clientIPC.start();
-        webServer.setPort(ccConfig.httpPort);
-        webServer.start();
-        workQueue.start();
-        info = new ClusterControllerInfo(ccConfig.clientNetIpAddress, ccConfig.clientNetPort,
-                webServer.getListeningPort());
-        timer.schedule(sweeper, 0, ccConfig.heartbeatPeriod);
-        jobLog.open();
-        LOGGER.log(Level.INFO, "Started ClusterControllerService");
-    }
-
-    @Override
-    public void stop() throws Exception {
-        LOGGER.log(Level.INFO, "Stopping ClusterControllerService");
-        executor.shutdownNow();
-        webServer.stop();
-        sweeper.cancel();
-        workQueue.stop();
-        jobLog.close();
-        LOGGER.log(Level.INFO, "Stopped ClusterControllerService");
-    }
-
-    public ServerContext getServerContext() {
-        return serverCtx;
-    }
-
-    public ICCContext getCCContext() {
-        return ccContext;
-    }
-
-    public Map<String, CCApplicationContext> getApplicationMap() {
-        return applications;
-    }
-
-    public Map<JobId, JobRun> getActiveRunMap() {
-        return activeRunMap;
-    }
-
-    public Map<JobId, JobRun> getRunMapArchive() {
-        return runMapArchive;
-    }
-
-    public Map<String, Set<String>> getIpAddressNodeNameMap() {
-        return ipAddressNodeNameMap;
-    }
-
-    public LogFile getJobLogFile() {
-        return jobLog;
-    }
-
-    public WorkQueue getWorkQueue() {
-        return workQueue;
-    }
-
-    public Executor getExecutor() {
-        return executor;
-    }
-
-    public Map<String, NodeControllerState> getNodeMap() {
-        return nodeRegistry;
-    }
-
-    public CCConfig getConfig() {
-        return ccConfig;
-    }
-
-    private JobId createJobId() {
-        return new JobId(jobCounter++);
-    }
-
-    public ClusterControllerInfo getClusterControllerInfo() {
-        return info;
-    }
-
-    public CCConfig getCCConfig() {
-        return ccConfig;
-    }
-
-    public IPCSystem getClusterIPC() {
-        return clusterIPC;
-    }
-
-    private class DeadNodeSweeper extends TimerTask {
-        @Override
-        public void run() {
-            workQueue.schedule(new RemoveDeadNodesWork(ClusterControllerService.this));
-        }
-    }
-
-    private class HyracksClientInterfaceIPCI implements IIPCI {
-        @Override
-        public void deliverIncomingMessage(IIPCHandle handle, long mid, long rmid, Object payload, Exception exception) {
-            HyracksClientInterfaceFunctions.Function fn = (HyracksClientInterfaceFunctions.Function) payload;
-            switch (fn.getFunctionId()) {
-                case GET_CLUSTER_CONTROLLER_INFO: {
-                    try {
-                        handle.send(mid, info, null);
-                    } catch (IPCException e) {
-                        e.printStackTrace();
-                    }
-                    return;
-                }
-
-                case CREATE_APPLICATION: {
-                    HyracksClientInterfaceFunctions.CreateApplicationFunction caf = (HyracksClientInterfaceFunctions.CreateApplicationFunction) fn;
-                    workQueue.schedule(new ApplicationCreateWork(ClusterControllerService.this, caf.getAppName(),
-                            new IPCResponder<Object>(handle, mid)));
-                    return;
-                }
-
-                case START_APPLICATION: {
-                    HyracksClientInterfaceFunctions.StartApplicationFunction saf = (HyracksClientInterfaceFunctions.StartApplicationFunction) fn;
-                    workQueue.schedule(new ApplicationStartWork(ClusterControllerService.this, saf.getAppName(),
-                            new IPCResponder<Object>(handle, mid)));
-                    return;
-                }
-
-                case DESTROY_APPLICATION: {
-                    HyracksClientInterfaceFunctions.DestroyApplicationFunction daf = (HyracksClientInterfaceFunctions.DestroyApplicationFunction) fn;
-                    workQueue.schedule(new ApplicationDestroyWork(ClusterControllerService.this, daf.getAppName(),
-                            new IPCResponder<Object>(handle, mid)));
-                    return;
-                }
-
-                case GET_JOB_STATUS: {
-                    HyracksClientInterfaceFunctions.GetJobStatusFunction gjsf = (HyracksClientInterfaceFunctions.GetJobStatusFunction) fn;
-                    workQueue.schedule(new GetJobStatusWork(ClusterControllerService.this, gjsf.getJobId(),
-                            new IPCResponder<JobStatus>(handle, mid)));
-                    return;
-                }
-
-                case START_JOB: {
-                    HyracksClientInterfaceFunctions.StartJobFunction sjf = (HyracksClientInterfaceFunctions.StartJobFunction) fn;
-                    JobId jobId = createJobId();
-                    workQueue.schedule(new JobStartWork(ClusterControllerService.this, sjf.getAppName(), sjf
-                            .getACGGFBytes(), sjf.getJobFlags(), jobId, new IPCResponder<JobId>(handle, mid)));
-                    return;
-                }
-
-                case WAIT_FOR_COMPLETION: {
-                    HyracksClientInterfaceFunctions.WaitForCompletionFunction wfcf = (HyracksClientInterfaceFunctions.WaitForCompletionFunction) fn;
-                    workQueue.schedule(new WaitForJobCompletionWork(ClusterControllerService.this, wfcf.getJobId(),
-                            new IPCResponder<Object>(handle, mid)));
-                    return;
-                }
-
-                case GET_NODE_CONTROLLERS_INFO: {
-                    workQueue.schedule(new GetNodeControllersInfoWork(ClusterControllerService.this,
-                            new IPCResponder<Map<String, NodeControllerInfo>>(handle, mid)));
-                    return;
-                }
-
-                case GET_CLUSTER_TOPOLOGY: {
-                    try {
-                        handle.send(mid, ccContext.getClusterTopology(), null);
-                    } catch (IPCException e) {
-                        e.printStackTrace();
-                    }
-                    return;
-                }
-            }
-            try {
-                handle.send(mid, null, new IllegalArgumentException("Unknown function " + fn.getFunctionId()));
-            } catch (IPCException e) {
-                e.printStackTrace();
-            }
-        }
-    }
-
-    private class ClusterControllerIPCI implements IIPCI {
-        @Override
-        public void deliverIncomingMessage(IIPCHandle handle, long mid, long rmid, Object payload, Exception exception) {
-            CCNCFunctions.Function fn = (Function) payload;
-            switch (fn.getFunctionId()) {
-                case REGISTER_NODE: {
-                    CCNCFunctions.RegisterNodeFunction rnf = (CCNCFunctions.RegisterNodeFunction) fn;
-                    workQueue.schedule(new RegisterNodeWork(ClusterControllerService.this, rnf.getNodeRegistration()));
-                    return;
-                }
-
-                case UNREGISTER_NODE: {
-                    CCNCFunctions.UnregisterNodeFunction unf = (CCNCFunctions.UnregisterNodeFunction) fn;
-                    workQueue.schedule(new UnregisterNodeWork(ClusterControllerService.this, unf.getNodeId()));
-                    return;
-                }
-
-                case NODE_HEARTBEAT: {
-                    CCNCFunctions.NodeHeartbeatFunction nhf = (CCNCFunctions.NodeHeartbeatFunction) fn;
-                    workQueue.schedule(new NodeHeartbeatWork(ClusterControllerService.this, nhf.getNodeId(), nhf
-                            .getHeartbeatData()));
-                    return;
-                }
-
-                case NOTIFY_JOBLET_CLEANUP: {
-                    CCNCFunctions.NotifyJobletCleanupFunction njcf = (CCNCFunctions.NotifyJobletCleanupFunction) fn;
-                    workQueue.schedule(new JobletCleanupNotificationWork(ClusterControllerService.this,
-                            njcf.getJobId(), njcf.getNodeId()));
-                    return;
-                }
-
-                case REPORT_PROFILE: {
-                    CCNCFunctions.ReportProfileFunction rpf = (CCNCFunctions.ReportProfileFunction) fn;
-                    workQueue.schedule(new ReportProfilesWork(ClusterControllerService.this, rpf.getProfiles()));
-                    return;
-                }
-
-                case NOTIFY_TASK_COMPLETE: {
-                    CCNCFunctions.NotifyTaskCompleteFunction ntcf = (CCNCFunctions.NotifyTaskCompleteFunction) fn;
-                    workQueue.schedule(new TaskCompleteWork(ClusterControllerService.this, ntcf.getJobId(), ntcf
-                            .getTaskId(), ntcf.getNodeId(), ntcf.getStatistics()));
-                    return;
-                }
-                case NOTIFY_TASK_FAILURE: {
-                    CCNCFunctions.NotifyTaskFailureFunction ntff = (CCNCFunctions.NotifyTaskFailureFunction) fn;
-                    workQueue.schedule(new TaskFailureWork(ClusterControllerService.this, ntff.getJobId(), ntff
-                            .getTaskId(), ntff.getDetails(), ntff.getDetails()));
-                    return;
-                }
-
-                case REGISTER_PARTITION_PROVIDER: {
-                    CCNCFunctions.RegisterPartitionProviderFunction rppf = (CCNCFunctions.RegisterPartitionProviderFunction) fn;
-                    workQueue.schedule(new RegisterPartitionAvailibilityWork(ClusterControllerService.this, rppf
-                            .getPartitionDescriptor()));
-                    return;
-                }
-
-                case REGISTER_PARTITION_REQUEST: {
-                    CCNCFunctions.RegisterPartitionRequestFunction rprf = (CCNCFunctions.RegisterPartitionRequestFunction) fn;
-                    workQueue.schedule(new RegisterPartitionRequestWork(ClusterControllerService.this, rprf
-                            .getPartitionRequest()));
-                    return;
-                }
-
-                case APPLICATION_STATE_CHANGE_RESPONSE: {
-                    CCNCFunctions.ApplicationStateChangeResponseFunction astrf = (CCNCFunctions.ApplicationStateChangeResponseFunction) fn;
-                    workQueue.schedule(new ApplicationStateChangeWork(ClusterControllerService.this, astrf));
-                    return;
-                }
-                case SEND_APPLICATION_MESSAGE: {
-                    CCNCFunctions.SendApplicationMessageFunction rsf = (CCNCFunctions.SendApplicationMessageFunction) fn;
-                    workQueue.schedule(new ApplicationMessageWork(ClusterControllerService.this, rsf.getMessage(), rsf
-                            .getAppName(), rsf.getNodeId()));
-                    return;
-                }
-            }
-            LOGGER.warning("Unknown function: " + fn.getFunctionId());
-        }
-    }
-}
\ No newline at end of file
diff --git a/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/scheduler/ActivityClusterPlanner.java b/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/scheduler/ActivityClusterPlanner.java
deleted file mode 100644
index 6c52aac..0000000
--- a/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/scheduler/ActivityClusterPlanner.java
+++ /dev/null
@@ -1,425 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.control.cc.scheduler;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.BitSet;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import org.apache.commons.lang3.tuple.Pair;
-
-import edu.uci.ics.hyracks.api.constraints.expressions.LValueConstraintExpression;
-import edu.uci.ics.hyracks.api.constraints.expressions.PartitionCountExpression;
-import edu.uci.ics.hyracks.api.dataflow.ActivityId;
-import edu.uci.ics.hyracks.api.dataflow.ConnectorDescriptorId;
-import edu.uci.ics.hyracks.api.dataflow.IConnectorDescriptor;
-import edu.uci.ics.hyracks.api.dataflow.OperatorDescriptorId;
-import edu.uci.ics.hyracks.api.dataflow.TaskId;
-import edu.uci.ics.hyracks.api.dataflow.connectors.IConnectorPolicy;
-import edu.uci.ics.hyracks.api.dataflow.connectors.IConnectorPolicyAssignmentPolicy;
-import edu.uci.ics.hyracks.api.dataflow.connectors.PipeliningConnectorPolicy;
-import edu.uci.ics.hyracks.api.exceptions.HyracksException;
-import edu.uci.ics.hyracks.api.job.ActivityCluster;
-import edu.uci.ics.hyracks.api.job.ActivityClusterGraph;
-import edu.uci.ics.hyracks.api.partitions.PartitionId;
-import edu.uci.ics.hyracks.control.cc.job.ActivityClusterPlan;
-import edu.uci.ics.hyracks.control.cc.job.ActivityPlan;
-import edu.uci.ics.hyracks.control.cc.job.JobRun;
-import edu.uci.ics.hyracks.control.cc.job.Task;
-import edu.uci.ics.hyracks.control.cc.job.TaskCluster;
-import edu.uci.ics.hyracks.control.cc.job.TaskClusterId;
-
-public class ActivityClusterPlanner {
-    private static final boolean USE_CONNECTOR_POLICY_IN_TASK_CLUSTER_CONSTRUCTION = true;
-
-    private static final Logger LOGGER = Logger.getLogger(ActivityClusterPlanner.class.getName());
-
-    private final JobScheduler scheduler;
-
-    private final Map<PartitionId, TaskCluster> partitionProducingTaskClusterMap;
-
-    public ActivityClusterPlanner(JobScheduler newJobScheduler) {
-        this.scheduler = newJobScheduler;
-        partitionProducingTaskClusterMap = new HashMap<PartitionId, TaskCluster>();
-    }
-
-    public ActivityClusterPlan planActivityCluster(ActivityCluster ac) throws HyracksException {
-        JobRun jobRun = scheduler.getJobRun();
-        Map<ActivityId, ActivityPartitionDetails> pcMap = computePartitionCounts(ac);
-
-        Map<ActivityId, ActivityPlan> activityPlanMap = buildActivityPlanMap(ac, jobRun, pcMap);
-
-        assignConnectorPolicy(ac, activityPlanMap);
-
-        TaskCluster[] taskClusters = computeTaskClusters(ac, jobRun, activityPlanMap);
-
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("Plan for " + ac);
-            LOGGER.info("Built " + taskClusters.length + " Task Clusters");
-            for (TaskCluster tc : taskClusters) {
-                LOGGER.info("Tasks: " + Arrays.toString(tc.getTasks()));
-            }
-        }
-
-        return new ActivityClusterPlan(taskClusters, activityPlanMap);
-    }
-
-    private Map<ActivityId, ActivityPlan> buildActivityPlanMap(ActivityCluster ac, JobRun jobRun,
-            Map<ActivityId, ActivityPartitionDetails> pcMap) {
-        Map<ActivityId, ActivityPlan> activityPlanMap = new HashMap<ActivityId, ActivityPlan>();
-        Set<ActivityId> depAnIds = new HashSet<ActivityId>();
-        for (ActivityId anId : ac.getActivityMap().keySet()) {
-            depAnIds.clear();
-            getDependencyActivityIds(depAnIds, anId, ac);
-            ActivityPartitionDetails apd = pcMap.get(anId);
-            Task[] tasks = new Task[apd.getPartitionCount()];
-            ActivityPlan activityPlan = new ActivityPlan(apd);
-            for (int i = 0; i < tasks.length; ++i) {
-                TaskId tid = new TaskId(anId, i);
-                tasks[i] = new Task(tid, activityPlan);
-                for (ActivityId danId : depAnIds) {
-                    ActivityCluster dAC = ac.getActivityClusterGraph().getActivityMap().get(danId);
-                    ActivityClusterPlan dACP = jobRun.getActivityClusterPlanMap().get(dAC.getId());
-                    assert dACP != null : "IllegalStateEncountered: Dependent AC is being planned without a plan for dependency AC: Encountered no plan for ActivityID "
-                            + danId;
-                    Task[] dATasks = dACP.getActivityPlanMap().get(danId).getTasks();
-                    assert dATasks != null : "IllegalStateEncountered: Dependent AC is being planned without a plan for dependency AC: Encountered no plan for ActivityID "
-                            + danId;
-                    assert dATasks.length == tasks.length : "Dependency activity partitioned differently from dependent: "
-                            + dATasks.length + " != " + tasks.length;
-                    Task dTask = dATasks[i];
-                    TaskId dTaskId = dTask.getTaskId();
-                    tasks[i].getDependencies().add(dTaskId);
-                    dTask.getDependents().add(tid);
-                }
-            }
-            activityPlan.setTasks(tasks);
-            activityPlanMap.put(anId, activityPlan);
-        }
-        return activityPlanMap;
-    }
-
-    private TaskCluster[] computeTaskClusters(ActivityCluster ac, JobRun jobRun,
-            Map<ActivityId, ActivityPlan> activityPlanMap) {
-        Set<ActivityId> activities = ac.getActivityMap().keySet();
-        Map<TaskId, List<Pair<TaskId, ConnectorDescriptorId>>> taskConnectivity = computeTaskConnectivity(jobRun,
-                activityPlanMap, activities);
-
-        TaskCluster[] taskClusters = USE_CONNECTOR_POLICY_IN_TASK_CLUSTER_CONSTRUCTION ? buildConnectorPolicyAwareTaskClusters(
-                ac, activityPlanMap, taskConnectivity) : buildConnectorPolicyUnawareTaskClusters(ac, activityPlanMap);
-
-        for (TaskCluster tc : taskClusters) {
-            Set<TaskCluster> tcDependencyTaskClusters = tc.getDependencyTaskClusters();
-            for (Task ts : tc.getTasks()) {
-                TaskId tid = ts.getTaskId();
-                List<Pair<TaskId, ConnectorDescriptorId>> cInfoList = taskConnectivity.get(tid);
-                if (cInfoList != null) {
-                    for (Pair<TaskId, ConnectorDescriptorId> p : cInfoList) {
-                        Task targetTS = activityPlanMap.get(p.getLeft().getActivityId()).getTasks()[p.getLeft()
-                                .getPartition()];
-                        TaskCluster targetTC = targetTS.getTaskCluster();
-                        if (targetTC != tc) {
-                            ConnectorDescriptorId cdId = p.getRight();
-                            PartitionId pid = new PartitionId(jobRun.getJobId(), cdId, tid.getPartition(), p.getLeft()
-                                    .getPartition());
-                            tc.getProducedPartitions().add(pid);
-                            targetTC.getRequiredPartitions().add(pid);
-                            partitionProducingTaskClusterMap.put(pid, tc);
-                        }
-                    }
-                }
-
-                for (TaskId dTid : ts.getDependencies()) {
-                    TaskCluster dTC = getTaskCluster(dTid);
-                    dTC.getDependentTaskClusters().add(tc);
-                    tcDependencyTaskClusters.add(dTC);
-                }
-            }
-        }
-        return taskClusters;
-    }
-
-    private TaskCluster[] buildConnectorPolicyUnawareTaskClusters(ActivityCluster ac,
-            Map<ActivityId, ActivityPlan> activityPlanMap) {
-        List<Task> taskStates = new ArrayList<Task>();
-        for (ActivityId anId : ac.getActivityMap().keySet()) {
-            ActivityPlan ap = activityPlanMap.get(anId);
-            Task[] tasks = ap.getTasks();
-            for (Task t : tasks) {
-                taskStates.add(t);
-            }
-        }
-        TaskCluster tc = new TaskCluster(new TaskClusterId(ac.getId(), 0), ac, taskStates.toArray(new Task[taskStates
-                .size()]));
-        for (Task t : tc.getTasks()) {
-            t.setTaskCluster(tc);
-        }
-        return new TaskCluster[] { tc };
-    }
-
-    private Map<TaskId, List<Pair<TaskId, ConnectorDescriptorId>>> computeTaskConnectivity(JobRun jobRun,
-            Map<ActivityId, ActivityPlan> activityPlanMap, Set<ActivityId> activities) {
-        Map<TaskId, List<Pair<TaskId, ConnectorDescriptorId>>> taskConnectivity = new HashMap<TaskId, List<Pair<TaskId, ConnectorDescriptorId>>>();
-        ActivityClusterGraph acg = jobRun.getActivityClusterGraph();
-        BitSet targetBitmap = new BitSet();
-        for (ActivityId ac1 : activities) {
-            ActivityCluster ac = acg.getActivityMap().get(ac1);
-            Task[] ac1TaskStates = activityPlanMap.get(ac1).getTasks();
-            int nProducers = ac1TaskStates.length;
-            List<IConnectorDescriptor> outputConns = ac.getActivityOutputMap().get(ac1);
-            if (outputConns != null) {
-                for (IConnectorDescriptor c : outputConns) {
-                    ConnectorDescriptorId cdId = c.getConnectorId();
-                    ActivityId ac2 = ac.getConsumerActivity(cdId);
-                    Task[] ac2TaskStates = activityPlanMap.get(ac2).getTasks();
-                    int nConsumers = ac2TaskStates.length;
-                    for (int i = 0; i < nProducers; ++i) {
-                        c.indicateTargetPartitions(nProducers, nConsumers, i, targetBitmap);
-                        List<Pair<TaskId, ConnectorDescriptorId>> cInfoList = taskConnectivity.get(ac1TaskStates[i]
-                                .getTaskId());
-                        if (cInfoList == null) {
-                            cInfoList = new ArrayList<Pair<TaskId, ConnectorDescriptorId>>();
-                            taskConnectivity.put(ac1TaskStates[i].getTaskId(), cInfoList);
-                        }
-                        for (int j = targetBitmap.nextSetBit(0); j >= 0; j = targetBitmap.nextSetBit(j + 1)) {
-                            TaskId targetTID = ac2TaskStates[j].getTaskId();
-                            cInfoList.add(Pair.<TaskId, ConnectorDescriptorId> of(targetTID, cdId));
-                        }
-                    }
-                }
-            }
-        }
-        return taskConnectivity;
-    }
-
-    private TaskCluster[] buildConnectorPolicyAwareTaskClusters(ActivityCluster ac,
-            Map<ActivityId, ActivityPlan> activityPlanMap,
-            Map<TaskId, List<Pair<TaskId, ConnectorDescriptorId>>> taskConnectivity) {
-        Map<TaskId, Set<TaskId>> taskClusterMap = new HashMap<TaskId, Set<TaskId>>();
-        for (ActivityId anId : ac.getActivityMap().keySet()) {
-            ActivityPlan ap = activityPlanMap.get(anId);
-            Task[] tasks = ap.getTasks();
-            for (Task t : tasks) {
-                Set<TaskId> cluster = new HashSet<TaskId>();
-                TaskId tid = t.getTaskId();
-                cluster.add(tid);
-                taskClusterMap.put(tid, cluster);
-            }
-        }
-
-        JobRun jobRun = scheduler.getJobRun();
-        Map<ConnectorDescriptorId, IConnectorPolicy> connectorPolicies = jobRun.getConnectorPolicyMap();
-        for (Map.Entry<TaskId, List<Pair<TaskId, ConnectorDescriptorId>>> e : taskConnectivity.entrySet()) {
-            Set<TaskId> cluster = taskClusterMap.get(e.getKey());
-            for (Pair<TaskId, ConnectorDescriptorId> p : e.getValue()) {
-                IConnectorPolicy cPolicy = connectorPolicies.get(p.getRight());
-                if (cPolicy.requiresProducerConsumerCoscheduling()) {
-                    cluster.add(p.getLeft());
-                }
-            }
-        }
-
-        /*
-         * taskClusterMap contains for every TID x, x -> { coscheduled consumer TIDs U x }
-         * We compute the transitive closure of this relation to find the largest set of
-         * tasks that need to be co-scheduled
-         */
-        int counter = 0;
-        TaskId[] ordinalList = new TaskId[taskClusterMap.size()];
-        Map<TaskId, Integer> ordinalMap = new HashMap<TaskId, Integer>();
-        for (TaskId tid : taskClusterMap.keySet()) {
-            ordinalList[counter] = tid;
-            ordinalMap.put(tid, counter);
-            ++counter;
-        }
-
-        int n = ordinalList.length;
-        BitSet[] paths = new BitSet[n];
-        for (Map.Entry<TaskId, Set<TaskId>> e : taskClusterMap.entrySet()) {
-            int i = ordinalMap.get(e.getKey());
-            BitSet bsi = paths[i];
-            if (bsi == null) {
-                bsi = new BitSet(n);
-                paths[i] = bsi;
-            }
-            for (TaskId ttid : e.getValue()) {
-                int j = ordinalMap.get(ttid);
-                paths[i].set(j);
-                BitSet bsj = paths[j];
-                if (bsj == null) {
-                    bsj = new BitSet(n);
-                    paths[j] = bsj;
-                }
-                bsj.set(i);
-            }
-        }
-        for (int k = 0; k < n; ++k) {
-            for (int i = paths[k].nextSetBit(0); i >= 0; i = paths[k].nextSetBit(i + 1)) {
-                for (int j = paths[i].nextClearBit(0); j < n && j >= 0; j = paths[i].nextClearBit(j + 1)) {
-                    paths[i].set(j, paths[k].get(j));
-                    paths[j].set(i, paths[i].get(j));
-                }
-            }
-        }
-        BitSet pending = new BitSet(n);
-        pending.set(0, n);
-        List<List<TaskId>> clusters = new ArrayList<List<TaskId>>();
-        for (int i = pending.nextSetBit(0); i >= 0; i = pending.nextSetBit(i)) {
-            List<TaskId> cluster = new ArrayList<TaskId>();
-            for (int j = paths[i].nextSetBit(0); j >= 0; j = paths[i].nextSetBit(j + 1)) {
-                cluster.add(ordinalList[j]);
-                pending.clear(j);
-            }
-            clusters.add(cluster);
-        }
-
-        List<TaskCluster> tcSet = new ArrayList<TaskCluster>();
-        counter = 0;
-        for (List<TaskId> cluster : clusters) {
-            List<Task> taskStates = new ArrayList<Task>();
-            for (TaskId tid : cluster) {
-                taskStates.add(activityPlanMap.get(tid.getActivityId()).getTasks()[tid.getPartition()]);
-            }
-            TaskCluster tc = new TaskCluster(new TaskClusterId(ac.getId(), counter++), ac,
-                    taskStates.toArray(new Task[taskStates.size()]));
-            tcSet.add(tc);
-            for (TaskId tid : cluster) {
-                activityPlanMap.get(tid.getActivityId()).getTasks()[tid.getPartition()].setTaskCluster(tc);
-            }
-        }
-        TaskCluster[] taskClusters = tcSet.toArray(new TaskCluster[tcSet.size()]);
-        return taskClusters;
-    }
-
-    private TaskCluster getTaskCluster(TaskId tid) {
-        JobRun run = scheduler.getJobRun();
-        ActivityCluster ac = run.getActivityClusterGraph().getActivityMap().get(tid.getActivityId());
-        ActivityClusterPlan acp = run.getActivityClusterPlanMap().get(ac.getId());
-        Task[] tasks = acp.getActivityPlanMap().get(tid.getActivityId()).getTasks();
-        Task task = tasks[tid.getPartition()];
-        assert task.getTaskId().equals(tid);
-        return task.getTaskCluster();
-    }
-
-    private void getDependencyActivityIds(Set<ActivityId> depAnIds, ActivityId anId, ActivityCluster ac) {
-        Set<ActivityId> blockers = ac.getBlocked2BlockerMap().get(anId);
-        if (blockers != null) {
-            depAnIds.addAll(blockers);
-        }
-    }
-
-    private void assignConnectorPolicy(ActivityCluster ac, Map<ActivityId, ActivityPlan> taskMap) {
-        Map<ConnectorDescriptorId, IConnectorPolicy> cPolicyMap = new HashMap<ConnectorDescriptorId, IConnectorPolicy>();
-        Set<ActivityId> activities = ac.getActivityMap().keySet();
-        BitSet targetBitmap = new BitSet();
-        for (ActivityId a1 : activities) {
-            Task[] ac1TaskStates = taskMap.get(a1).getTasks();
-            int nProducers = ac1TaskStates.length;
-            List<IConnectorDescriptor> outputConns = ac.getActivityOutputMap().get(a1);
-            if (outputConns != null) {
-                for (IConnectorDescriptor c : outputConns) {
-                    ConnectorDescriptorId cdId = c.getConnectorId();
-                    ActivityId a2 = ac.getConsumerActivity(cdId);
-                    Task[] ac2TaskStates = taskMap.get(a2).getTasks();
-                    int nConsumers = ac2TaskStates.length;
-
-                    int[] fanouts = new int[nProducers];
-                    for (int i = 0; i < nProducers; ++i) {
-                        c.indicateTargetPartitions(nProducers, nConsumers, i, targetBitmap);
-                        fanouts[i] = targetBitmap.cardinality();
-                    }
-                    IConnectorPolicy cp = assignConnectorPolicy(ac, c, nProducers, nConsumers, fanouts);
-                    cPolicyMap.put(cdId, cp);
-                }
-            }
-        }
-        scheduler.getJobRun().getConnectorPolicyMap().putAll(cPolicyMap);
-    }
-
-    private IConnectorPolicy assignConnectorPolicy(ActivityCluster ac, IConnectorDescriptor c, int nProducers,
-            int nConsumers, int[] fanouts) {
-        IConnectorPolicyAssignmentPolicy cpap = ac.getConnectorPolicyAssignmentPolicy();
-        if (cpap != null) {
-            return cpap.getConnectorPolicyAssignment(c, nProducers, nConsumers, fanouts);
-        }
-        return new PipeliningConnectorPolicy();
-    }
-
-    private Map<ActivityId, ActivityPartitionDetails> computePartitionCounts(ActivityCluster ac)
-            throws HyracksException {
-        PartitionConstraintSolver solver = scheduler.getSolver();
-        Set<LValueConstraintExpression> lValues = new HashSet<LValueConstraintExpression>();
-        for (ActivityId anId : ac.getActivityMap().keySet()) {
-            lValues.add(new PartitionCountExpression(anId.getOperatorDescriptorId()));
-        }
-        solver.solve(lValues);
-        Map<OperatorDescriptorId, Integer> nPartMap = new HashMap<OperatorDescriptorId, Integer>();
-        for (LValueConstraintExpression lv : lValues) {
-            Object value = solver.getValue(lv);
-            if (value == null) {
-                throw new HyracksException("No value found for " + lv);
-            }
-            if (!(value instanceof Number)) {
-                throw new HyracksException("Unexpected type of value bound to " + lv + ": " + value.getClass() + "("
-                        + value + ")");
-            }
-            int nParts = ((Number) value).intValue();
-            if (nParts <= 0) {
-                throw new HyracksException("Unsatisfiable number of partitions for " + lv + ": " + nParts);
-            }
-            nPartMap.put(((PartitionCountExpression) lv).getOperatorDescriptorId(), Integer.valueOf(nParts));
-        }
-        Map<ActivityId, ActivityPartitionDetails> activityPartsMap = new HashMap<ActivityId, ActivityPartitionDetails>();
-        for (ActivityId anId : ac.getActivityMap().keySet()) {
-            int nParts = nPartMap.get(anId.getOperatorDescriptorId());
-            int[] nInputPartitions = null;
-            List<IConnectorDescriptor> inputs = ac.getActivityInputMap().get(anId);
-            if (inputs != null) {
-                nInputPartitions = new int[inputs.size()];
-                for (int i = 0; i < nInputPartitions.length; ++i) {
-                    ConnectorDescriptorId cdId = inputs.get(i).getConnectorId();
-                    ActivityId aid = ac.getProducerActivity(cdId);
-                    Integer nPartInt = nPartMap.get(aid.getOperatorDescriptorId());
-                    nInputPartitions[i] = nPartInt;
-                }
-            }
-            int[] nOutputPartitions = null;
-            List<IConnectorDescriptor> outputs = ac.getActivityOutputMap().get(anId);
-            if (outputs != null) {
-                nOutputPartitions = new int[outputs.size()];
-                for (int i = 0; i < nOutputPartitions.length; ++i) {
-                    ConnectorDescriptorId cdId = outputs.get(i).getConnectorId();
-                    ActivityId aid = ac.getConsumerActivity(cdId);
-                    Integer nPartInt = nPartMap.get(aid.getOperatorDescriptorId());
-                    nOutputPartitions[i] = nPartInt;
-                }
-            }
-            ActivityPartitionDetails apd = new ActivityPartitionDetails(nParts, nInputPartitions, nOutputPartitions);
-            activityPartsMap.put(anId, apd);
-        }
-        return activityPartsMap;
-    }
-
-    public Map<? extends PartitionId, ? extends TaskCluster> getPartitionProducingTaskClusterMap() {
-        return partitionProducingTaskClusterMap;
-    }
-}
\ No newline at end of file
diff --git a/hyracks-control/hyracks-control-common/pom.xml b/hyracks-control/hyracks-control-common/pom.xml
deleted file mode 100644
index 8e20020..0000000
--- a/hyracks-control/hyracks-control-common/pom.xml
+++ /dev/null
@@ -1,42 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <groupId>edu.uci.ics.hyracks</groupId>
-  <artifactId>hyracks-control-common</artifactId>
-  <version>0.2.2-SNAPSHOT</version>
-
-  <parent>
-    <groupId>edu.uci.ics.hyracks</groupId>
-    <artifactId>hyracks-control</artifactId>
-    <version>0.2.2-SNAPSHOT</version>
-  </parent>
-
-  <build>
-    <plugins>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-compiler-plugin</artifactId>
-        <version>2.0.2</version>
-        <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
-        </configuration>
-      </plugin>
-    </plugins>
-  </build>
-  <dependencies>
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks</groupId>
-  		<artifactId>hyracks-api</artifactId>
-  		<version>0.2.2-SNAPSHOT</version>
-  		<type>jar</type>
-  		<scope>compile</scope>
-  	</dependency>
-  	<dependency>
-  		<groupId>commons-io</groupId>
-  		<artifactId>commons-io</artifactId>
-  		<version>1.4</version>
-  		<type>jar</type>
-  		<scope>compile</scope>
-  	</dependency>
-  </dependencies>
-</project>
diff --git a/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/base/IClusterController.java b/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/base/IClusterController.java
deleted file mode 100644
index a25250a..0000000
--- a/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/base/IClusterController.java
+++ /dev/null
@@ -1,53 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.control.common.base;
-
-import java.util.List;
-
-import edu.uci.ics.hyracks.api.dataflow.TaskAttemptId;
-import edu.uci.ics.hyracks.api.job.JobId;
-import edu.uci.ics.hyracks.api.messages.IMessage;
-import edu.uci.ics.hyracks.control.common.application.ApplicationStatus;
-import edu.uci.ics.hyracks.control.common.controllers.NodeRegistration;
-import edu.uci.ics.hyracks.control.common.heartbeat.HeartbeatData;
-import edu.uci.ics.hyracks.control.common.job.PartitionDescriptor;
-import edu.uci.ics.hyracks.control.common.job.PartitionRequest;
-import edu.uci.ics.hyracks.control.common.job.profiling.om.JobProfile;
-import edu.uci.ics.hyracks.control.common.job.profiling.om.TaskProfile;
-
-public interface IClusterController {
-    public void registerNode(NodeRegistration reg) throws Exception;
-
-    public void unregisterNode(String nodeId) throws Exception;
-
-    public void notifyTaskComplete(JobId jobId, TaskAttemptId taskId, String nodeId, TaskProfile statistics)
-            throws Exception;
-
-    public void notifyTaskFailure(JobId jobId, TaskAttemptId taskId, String nodeId, String details) throws Exception;
-
-    public void notifyJobletCleanup(JobId jobId, String nodeId) throws Exception;
-
-    public void nodeHeartbeat(String id, HeartbeatData hbData) throws Exception;
-
-    public void reportProfile(String id, List<JobProfile> profiles) throws Exception;
-
-    public void registerPartitionProvider(PartitionDescriptor partitionDescriptor) throws Exception;
-
-    public void registerPartitionRequest(PartitionRequest partitionRequest) throws Exception;
-
-    public void notifyApplicationStateChange(String nodeId, String appName, ApplicationStatus status) throws Exception;
-
-    public void sendApplicationMessageToCC(byte[] data, String appName, String nodeId) throws Exception;
-}
\ No newline at end of file
diff --git a/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/ipc/CCNCFunctions.java b/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/ipc/CCNCFunctions.java
deleted file mode 100644
index 8f0056f..0000000
--- a/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/ipc/CCNCFunctions.java
+++ /dev/null
@@ -1,845 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.control.common.ipc;
-
-import java.io.ByteArrayInputStream;
-import java.io.ByteArrayOutputStream;
-import java.io.DataInputStream;
-import java.io.DataOutputStream;
-import java.io.IOException;
-import java.io.OutputStream;
-import java.io.Serializable;
-import java.nio.ByteBuffer;
-import java.util.EnumSet;
-import java.util.List;
-import java.util.Map;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import edu.uci.ics.hyracks.api.comm.NetworkAddress;
-import edu.uci.ics.hyracks.api.dataflow.ActivityId;
-import edu.uci.ics.hyracks.api.dataflow.ConnectorDescriptorId;
-import edu.uci.ics.hyracks.api.dataflow.OperatorDescriptorId;
-import edu.uci.ics.hyracks.api.dataflow.TaskAttemptId;
-import edu.uci.ics.hyracks.api.dataflow.TaskId;
-import edu.uci.ics.hyracks.api.dataflow.connectors.IConnectorPolicy;
-import edu.uci.ics.hyracks.api.job.JobFlag;
-import edu.uci.ics.hyracks.api.job.JobId;
-import edu.uci.ics.hyracks.api.job.JobStatus;
-import edu.uci.ics.hyracks.api.partitions.PartitionId;
-import edu.uci.ics.hyracks.control.common.application.ApplicationStatus;
-import edu.uci.ics.hyracks.control.common.controllers.NodeParameters;
-import edu.uci.ics.hyracks.control.common.controllers.NodeRegistration;
-import edu.uci.ics.hyracks.control.common.heartbeat.HeartbeatData;
-import edu.uci.ics.hyracks.control.common.job.PartitionDescriptor;
-import edu.uci.ics.hyracks.control.common.job.PartitionRequest;
-import edu.uci.ics.hyracks.control.common.job.PartitionState;
-import edu.uci.ics.hyracks.control.common.job.TaskAttemptDescriptor;
-import edu.uci.ics.hyracks.control.common.job.profiling.om.JobProfile;
-import edu.uci.ics.hyracks.control.common.job.profiling.om.TaskProfile;
-import edu.uci.ics.hyracks.ipc.api.IPayloadSerializerDeserializer;
-import edu.uci.ics.hyracks.ipc.impl.JavaSerializationBasedPayloadSerializerDeserializer;
-
-public class CCNCFunctions {
-    private static final Logger LOGGER = Logger.getLogger(CCNCFunctions.class.getName());
-
-    private static final int FID_CODE_SIZE = 1;
-
-    public enum FunctionId {
-        REGISTER_NODE,
-        UNREGISTER_NODE,
-        NOTIFY_JOBLET_CLEANUP,
-        NOTIFY_TASK_COMPLETE,
-        NOTIFY_TASK_FAILURE,
-        NODE_HEARTBEAT,
-        REPORT_PROFILE,
-        REGISTER_PARTITION_PROVIDER,
-        REGISTER_PARTITION_REQUEST,
-        APPLICATION_STATE_CHANGE_RESPONSE,
-
-        NODE_REGISTRATION_RESULT,
-        START_TASKS,
-        ABORT_TASKS,
-        CLEANUP_JOBLET,
-        CREATE_APPLICATION,
-        DESTROY_APPLICATION,
-        REPORT_PARTITION_AVAILABILITY,
-        SEND_APPLICATION_MESSAGE,
-
-        OTHER
-    }
-
-    public static class SendApplicationMessageFunction extends Function {
-        private static final long serialVersionUID = 1L;
-        private byte[] serializedMessage;
-        private String nodeId;
-        private String appName;
-
-        public String getNodeId() {
-            return nodeId;
-        }
-
-        public void setNodeId(String nodeId) {
-            this.nodeId = nodeId;
-        }
-
-        public byte[] getMessage() {
-            return serializedMessage;
-        }
-
-        public SendApplicationMessageFunction(byte[] data, String appName, String nodeId) {
-            super();
-            this.serializedMessage = data;
-            this.nodeId = nodeId;
-            this.appName = appName;
-        }
-
-        @Override
-        public FunctionId getFunctionId() {
-            return FunctionId.SEND_APPLICATION_MESSAGE;
-        }
-
-        public String getAppName() {
-            return appName;
-        }
-
-    }
-
-    public static abstract class Function implements Serializable {
-        private static final long serialVersionUID = 1L;
-
-        public abstract FunctionId getFunctionId();
-    }
-
-    public static class RegisterNodeFunction extends Function {
-        private static final long serialVersionUID = 1L;
-
-        private final NodeRegistration reg;
-
-        public RegisterNodeFunction(NodeRegistration reg) {
-            this.reg = reg;
-        }
-
-        @Override
-        public FunctionId getFunctionId() {
-            return FunctionId.REGISTER_NODE;
-        }
-
-        public NodeRegistration getNodeRegistration() {
-            return reg;
-        }
-    }
-
-    public static class UnregisterNodeFunction extends Function {
-        private static final long serialVersionUID = 1L;
-
-        private final String nodeId;
-
-        public UnregisterNodeFunction(String nodeId) {
-            this.nodeId = nodeId;
-        }
-
-        @Override
-        public FunctionId getFunctionId() {
-            return FunctionId.UNREGISTER_NODE;
-        }
-
-        public String getNodeId() {
-            return nodeId;
-        }
-    }
-
-    public static class NotifyTaskCompleteFunction extends Function {
-        private static final long serialVersionUID = 1L;
-
-        private final JobId jobId;
-        private final TaskAttemptId taskId;
-        private final String nodeId;
-        private final TaskProfile statistics;
-
-        public NotifyTaskCompleteFunction(JobId jobId, TaskAttemptId taskId, String nodeId, TaskProfile statistics) {
-            this.jobId = jobId;
-            this.taskId = taskId;
-            this.nodeId = nodeId;
-            this.statistics = statistics;
-        }
-
-        @Override
-        public FunctionId getFunctionId() {
-            return FunctionId.NOTIFY_TASK_COMPLETE;
-        }
-
-        public JobId getJobId() {
-            return jobId;
-        }
-
-        public TaskAttemptId getTaskId() {
-            return taskId;
-        }
-
-        public String getNodeId() {
-            return nodeId;
-        }
-
-        public TaskProfile getStatistics() {
-            return statistics;
-        }
-    }
-
-    public static class NotifyTaskFailureFunction extends Function {
-        private static final long serialVersionUID = 1L;
-
-        private final JobId jobId;
-        private final TaskAttemptId taskId;
-        private final String nodeId;
-        private final String details;
-
-        public NotifyTaskFailureFunction(JobId jobId, TaskAttemptId taskId, String nodeId, String details) {
-            this.jobId = jobId;
-            this.taskId = taskId;
-            this.nodeId = nodeId;
-            this.details = details;
-        }
-
-        @Override
-        public FunctionId getFunctionId() {
-            return FunctionId.NOTIFY_TASK_FAILURE;
-        }
-
-        public JobId getJobId() {
-            return jobId;
-        }
-
-        public TaskAttemptId getTaskId() {
-            return taskId;
-        }
-
-        public String getNodeId() {
-            return nodeId;
-        }
-
-        public String getDetails() {
-            return details;
-        }
-    }
-
-    public static class NotifyJobletCleanupFunction extends Function {
-        private static final long serialVersionUID = 1L;
-
-        private final JobId jobId;
-        private final String nodeId;
-
-        public NotifyJobletCleanupFunction(JobId jobId, String nodeId) {
-            this.jobId = jobId;
-            this.nodeId = nodeId;
-        }
-
-        @Override
-        public FunctionId getFunctionId() {
-            return FunctionId.NOTIFY_JOBLET_CLEANUP;
-        }
-
-        public JobId getJobId() {
-            return jobId;
-        }
-
-        public String getNodeId() {
-            return nodeId;
-        }
-    }
-
-    public static class NodeHeartbeatFunction extends Function {
-        private static final long serialVersionUID = 1L;
-
-        private final String nodeId;
-        private final HeartbeatData hbData;
-
-        public NodeHeartbeatFunction(String nodeId, HeartbeatData hbData) {
-            this.nodeId = nodeId;
-            this.hbData = hbData;
-        }
-
-        @Override
-        public FunctionId getFunctionId() {
-            return FunctionId.NODE_HEARTBEAT;
-        }
-
-        public String getNodeId() {
-            return nodeId;
-        }
-
-        public HeartbeatData getHeartbeatData() {
-            return hbData;
-        }
-    }
-
-    public static class ReportProfileFunction extends Function {
-        private static final long serialVersionUID = 1L;
-
-        private final String nodeId;
-        private final List<JobProfile> profiles;
-
-        public ReportProfileFunction(String nodeId, List<JobProfile> profiles) {
-            this.nodeId = nodeId;
-            this.profiles = profiles;
-        }
-
-        @Override
-        public FunctionId getFunctionId() {
-            return FunctionId.REPORT_PROFILE;
-        }
-
-        public String getNodeId() {
-            return nodeId;
-        }
-
-        public List<JobProfile> getProfiles() {
-            return profiles;
-        }
-    }
-
-    public static class RegisterPartitionProviderFunction extends Function {
-        private static final long serialVersionUID = 1L;
-
-        private final PartitionDescriptor partitionDescriptor;
-
-        public RegisterPartitionProviderFunction(PartitionDescriptor partitionDescriptor) {
-            this.partitionDescriptor = partitionDescriptor;
-        }
-
-        @Override
-        public FunctionId getFunctionId() {
-            return FunctionId.REGISTER_PARTITION_PROVIDER;
-        }
-
-        public PartitionDescriptor getPartitionDescriptor() {
-            return partitionDescriptor;
-        }
-
-        public static Object deserialize(ByteBuffer buffer, int length) throws Exception {
-            ByteArrayInputStream bais = new ByteArrayInputStream(buffer.array(), buffer.position(), length);
-            DataInputStream dis = new DataInputStream(bais);
-
-            // Read PartitionId
-            PartitionId pid = readPartitionId(dis);
-
-            // Read nodeId
-            String nodeId = dis.readUTF();
-
-            // Read TaskAttemptId
-            TaskAttemptId taId = readTaskAttemptId(dis);
-
-            // Read reusable flag
-            boolean reusable = dis.readBoolean();
-
-            // Read Partition State
-            PartitionState state = readPartitionState(dis);
-
-            PartitionDescriptor pd = new PartitionDescriptor(pid, nodeId, taId, reusable);
-            pd.setState(state);
-            return new RegisterPartitionProviderFunction(pd);
-        }
-
-        public static void serialize(OutputStream out, Object object) throws Exception {
-            RegisterPartitionProviderFunction fn = (RegisterPartitionProviderFunction) object;
-
-            DataOutputStream dos = new DataOutputStream(out);
-
-            PartitionDescriptor pd = fn.getPartitionDescriptor();
-
-            // Write PartitionId
-            writePartitionId(dos, pd.getPartitionId());
-
-            // Write nodeId
-            dos.writeUTF(pd.getNodeId());
-
-            // Write TaskAttemptId
-            writeTaskAttemptId(dos, pd.getProducingTaskAttemptId());
-
-            // Write reusable flag
-            dos.writeBoolean(pd.isReusable());
-
-            // Write Partition State
-            writePartitionState(dos, pd.getState());
-        }
-    }
-
-    public static class RegisterPartitionRequestFunction extends Function {
-        private static final long serialVersionUID = 1L;
-
-        private final PartitionRequest partitionRequest;
-
-        public RegisterPartitionRequestFunction(PartitionRequest partitionRequest) {
-            this.partitionRequest = partitionRequest;
-        }
-
-        @Override
-        public FunctionId getFunctionId() {
-            return FunctionId.REGISTER_PARTITION_REQUEST;
-        }
-
-        public PartitionRequest getPartitionRequest() {
-            return partitionRequest;
-        }
-
-        public static Object deserialize(ByteBuffer buffer, int length) throws Exception {
-            ByteArrayInputStream bais = new ByteArrayInputStream(buffer.array(), buffer.position(), length);
-            DataInputStream dis = new DataInputStream(bais);
-
-            // Read PartitionId
-            PartitionId pid = readPartitionId(dis);
-
-            // Read nodeId
-            String nodeId = dis.readUTF();
-
-            // Read TaskAttemptId
-            TaskAttemptId taId = readTaskAttemptId(dis);
-
-            // Read Partition State
-            PartitionState state = readPartitionState(dis);
-
-            PartitionRequest pr = new PartitionRequest(pid, nodeId, taId, state);
-            return new RegisterPartitionRequestFunction(pr);
-        }
-
-        public static void serialize(OutputStream out, Object object) throws Exception {
-            RegisterPartitionRequestFunction fn = (RegisterPartitionRequestFunction) object;
-
-            DataOutputStream dos = new DataOutputStream(out);
-
-            PartitionRequest pr = fn.getPartitionRequest();
-
-            // Write PartitionId
-            writePartitionId(dos, pr.getPartitionId());
-
-            // Write nodeId
-            dos.writeUTF(pr.getNodeId());
-
-            // Write TaskAttemptId
-            writeTaskAttemptId(dos, pr.getRequestingTaskAttemptId());
-
-            // Write Partition State
-            writePartitionState(dos, pr.getMinimumState());
-        }
-    }
-
-    public static class ApplicationStateChangeResponseFunction extends Function {
-        private static final long serialVersionUID = 1L;
-
-        private final String nodeId;
-        private final String appName;
-        private final ApplicationStatus status;
-
-        public ApplicationStateChangeResponseFunction(String nodeId, String appName, ApplicationStatus status) {
-            this.nodeId = nodeId;
-            this.appName = appName;
-            this.status = status;
-        }
-
-        @Override
-        public FunctionId getFunctionId() {
-            return FunctionId.APPLICATION_STATE_CHANGE_RESPONSE;
-        }
-
-        public String getNodeId() {
-            return nodeId;
-        }
-
-        public String getApplicationName() {
-            return appName;
-        }
-
-        public ApplicationStatus getStatus() {
-            return status;
-        }
-    }
-
-    public static class NodeRegistrationResult extends Function {
-        private static final long serialVersionUID = 1L;
-
-        private final NodeParameters params;
-
-        private final Exception exception;
-
-        public NodeRegistrationResult(NodeParameters params, Exception exception) {
-            this.params = params;
-            this.exception = exception;
-        }
-
-        @Override
-        public FunctionId getFunctionId() {
-            return FunctionId.NODE_REGISTRATION_RESULT;
-        }
-
-        public NodeParameters getNodeParameters() {
-            return params;
-        }
-
-        public Exception getException() {
-            return exception;
-        }
-    }
-
-    public static class StartTasksFunction extends Function {
-        private static final long serialVersionUID = 1L;
-
-        private final String appName;
-        private final JobId jobId;
-        private final byte[] planBytes;
-        private final List<TaskAttemptDescriptor> taskDescriptors;
-        private final Map<ConnectorDescriptorId, IConnectorPolicy> connectorPolicies;
-        private final EnumSet<JobFlag> flags;
-
-        public StartTasksFunction(String appName, JobId jobId, byte[] planBytes,
-                List<TaskAttemptDescriptor> taskDescriptors,
-                Map<ConnectorDescriptorId, IConnectorPolicy> connectorPolicies, EnumSet<JobFlag> flags) {
-            this.appName = appName;
-            this.jobId = jobId;
-            this.planBytes = planBytes;
-            this.taskDescriptors = taskDescriptors;
-            this.connectorPolicies = connectorPolicies;
-            this.flags = flags;
-        }
-
-        @Override
-        public FunctionId getFunctionId() {
-            return FunctionId.START_TASKS;
-        }
-
-        public String getAppName() {
-            return appName;
-        }
-
-        public JobId getJobId() {
-            return jobId;
-        }
-
-        public byte[] getPlanBytes() {
-            return planBytes;
-        }
-
-        public List<TaskAttemptDescriptor> getTaskDescriptors() {
-            return taskDescriptors;
-        }
-
-        public Map<ConnectorDescriptorId, IConnectorPolicy> getConnectorPolicies() {
-            return connectorPolicies;
-        }
-
-        public EnumSet<JobFlag> getFlags() {
-            return flags;
-        }
-    }
-
-    public static class AbortTasksFunction extends Function {
-        private static final long serialVersionUID = 1L;
-
-        private final JobId jobId;
-        private final List<TaskAttemptId> tasks;
-
-        public AbortTasksFunction(JobId jobId, List<TaskAttemptId> tasks) {
-            this.jobId = jobId;
-            this.tasks = tasks;
-        }
-
-        @Override
-        public FunctionId getFunctionId() {
-            return FunctionId.ABORT_TASKS;
-        }
-
-        public JobId getJobId() {
-            return jobId;
-        }
-
-        public List<TaskAttemptId> getTasks() {
-            return tasks;
-        }
-    }
-
-    public static class CleanupJobletFunction extends Function {
-        private static final long serialVersionUID = 1L;
-
-        private final JobId jobId;
-        private final JobStatus status;
-
-        public CleanupJobletFunction(JobId jobId, JobStatus status) {
-            this.jobId = jobId;
-            this.status = status;
-        }
-
-        @Override
-        public FunctionId getFunctionId() {
-            return FunctionId.CLEANUP_JOBLET;
-        }
-
-        public JobId getJobId() {
-            return jobId;
-        }
-
-        public JobStatus getStatus() {
-            return status;
-        }
-    }
-
-    public static class CreateApplicationFunction extends Function {
-        private static final long serialVersionUID = 1L;
-
-        private final String appName;
-        private final boolean deployHar;
-        private final byte[] serializedDistributedState;
-
-        public CreateApplicationFunction(String appName, boolean deployHar, byte[] serializedDistributedState) {
-            this.appName = appName;
-            this.deployHar = deployHar;
-            this.serializedDistributedState = serializedDistributedState;
-        }
-
-        @Override
-        public FunctionId getFunctionId() {
-            return FunctionId.CREATE_APPLICATION;
-        }
-
-        public String getAppName() {
-            return appName;
-        }
-
-        public boolean isDeployHar() {
-            return deployHar;
-        }
-
-        public byte[] getSerializedDistributedState() {
-            return serializedDistributedState;
-        }
-    }
-
-    public static class DestroyApplicationFunction extends Function {
-        private static final long serialVersionUID = 1L;
-
-        private final String appName;
-
-        public DestroyApplicationFunction(String appName) {
-            this.appName = appName;
-        }
-
-        @Override
-        public FunctionId getFunctionId() {
-            return FunctionId.DESTROY_APPLICATION;
-        }
-
-        public String getAppName() {
-            return appName;
-        }
-    }
-
-    public static class ReportPartitionAvailabilityFunction extends Function {
-        private static final long serialVersionUID = 1L;
-
-        private final PartitionId pid;
-        private final NetworkAddress networkAddress;
-
-        public ReportPartitionAvailabilityFunction(PartitionId pid, NetworkAddress networkAddress) {
-            this.pid = pid;
-            this.networkAddress = networkAddress;
-        }
-
-        @Override
-        public FunctionId getFunctionId() {
-            return FunctionId.REPORT_PARTITION_AVAILABILITY;
-        }
-
-        public PartitionId getPartitionId() {
-            return pid;
-        }
-
-        public NetworkAddress getNetworkAddress() {
-            return networkAddress;
-        }
-
-        public static Object deserialize(ByteBuffer buffer, int length) throws Exception {
-            ByteArrayInputStream bais = new ByteArrayInputStream(buffer.array(), buffer.position(), length);
-            DataInputStream dis = new DataInputStream(bais);
-
-            // Read PartitionId
-            PartitionId pid = readPartitionId(dis);
-
-            // Read NetworkAddress
-            NetworkAddress networkAddress = readNetworkAddress(dis);
-
-            return new ReportPartitionAvailabilityFunction(pid, networkAddress);
-        }
-
-        public static void serialize(OutputStream out, Object object) throws Exception {
-            ReportPartitionAvailabilityFunction fn = (ReportPartitionAvailabilityFunction) object;
-
-            DataOutputStream dos = new DataOutputStream(out);
-
-            // Write PartitionId
-            writePartitionId(dos, fn.getPartitionId());
-
-            // Write NetworkAddress
-            writeNetworkAddress(dos, fn.getNetworkAddress());
-        }
-    }
-
-    public static class SerializerDeserializer implements IPayloadSerializerDeserializer {
-        private final JavaSerializationBasedPayloadSerializerDeserializer javaSerde;
-
-        public SerializerDeserializer() {
-            javaSerde = new JavaSerializationBasedPayloadSerializerDeserializer();
-        }
-
-        @Override
-        public Object deserializeObject(ByteBuffer buffer, int length) throws Exception {
-            if (length < FID_CODE_SIZE) {
-                throw new IllegalStateException("Message size too small: " + length);
-            }
-            byte fid = buffer.get();
-            return deserialize(fid, buffer, length - FID_CODE_SIZE);
-        }
-
-        @Override
-        public Exception deserializeException(ByteBuffer buffer, int length) throws Exception {
-            if (length < FID_CODE_SIZE) {
-                throw new IllegalStateException("Message size too small: " + length);
-            }
-            byte fid = buffer.get();
-            if (fid != FunctionId.OTHER.ordinal()) {
-                throw new IllegalStateException("Expected FID for OTHER, found: " + fid);
-            }
-            return (Exception) deserialize(fid, buffer, length - FID_CODE_SIZE);
-        }
-
-        @Override
-        public byte[] serializeObject(Object object) throws Exception {
-            if (object instanceof Function) {
-                Function fn = (Function) object;
-                return serialize(object, (byte) fn.getFunctionId().ordinal());
-            } else {
-                return serialize(object, (byte) FunctionId.OTHER.ordinal());
-            }
-        }
-
-        @Override
-        public byte[] serializeException(Exception object) throws Exception {
-            return serialize(object, (byte) FunctionId.OTHER.ordinal());
-        }
-
-        private byte[] serialize(Object object, byte fid) throws Exception {
-            ByteArrayOutputStream baos = new ByteArrayOutputStream();
-            baos.write(fid);
-            try {
-                serialize(baos, object, fid);
-            } catch (Exception e) {
-                LOGGER.log(Level.SEVERE, "Error serializing " + object, e);
-                throw e;
-            }
-            baos.close();
-            return baos.toByteArray();
-        }
-
-        private void serialize(OutputStream out, Object object, byte fid) throws Exception {
-            switch (FunctionId.values()[fid]) {
-                case REGISTER_PARTITION_PROVIDER:
-                    RegisterPartitionProviderFunction.serialize(out, object);
-                    return;
-
-                case REGISTER_PARTITION_REQUEST:
-                    RegisterPartitionRequestFunction.serialize(out, object);
-                    return;
-
-                case REPORT_PARTITION_AVAILABILITY:
-                    ReportPartitionAvailabilityFunction.serialize(out, object);
-                    return;
-            }
-            JavaSerializationBasedPayloadSerializerDeserializer.serialize(out, object);
-        }
-
-        private Object deserialize(byte fid, ByteBuffer buffer, int length) throws Exception {
-            switch (FunctionId.values()[fid]) {
-                case REGISTER_PARTITION_PROVIDER:
-                    return RegisterPartitionProviderFunction.deserialize(buffer, length);
-
-                case REGISTER_PARTITION_REQUEST:
-                    return RegisterPartitionRequestFunction.deserialize(buffer, length);
-
-                case REPORT_PARTITION_AVAILABILITY:
-                    return ReportPartitionAvailabilityFunction.deserialize(buffer, length);
-            }
-
-            return javaSerde.deserializeObject(buffer, length);
-        }
-    }
-
-    private static PartitionId readPartitionId(DataInputStream dis) throws IOException {
-        long jobId = dis.readLong();
-        int cdid = dis.readInt();
-        int senderIndex = dis.readInt();
-        int receiverIndex = dis.readInt();
-        PartitionId pid = new PartitionId(new JobId(jobId), new ConnectorDescriptorId(cdid), senderIndex, receiverIndex);
-        return pid;
-    }
-
-    private static void writePartitionId(DataOutputStream dos, PartitionId pid) throws IOException {
-        dos.writeLong(pid.getJobId().getId());
-        dos.writeInt(pid.getConnectorDescriptorId().getId());
-        dos.writeInt(pid.getSenderIndex());
-        dos.writeInt(pid.getReceiverIndex());
-    }
-
-    private static TaskAttemptId readTaskAttemptId(DataInputStream dis) throws IOException {
-        int odid = dis.readInt();
-        int aid = dis.readInt();
-        int partition = dis.readInt();
-        int attempt = dis.readInt();
-        TaskAttemptId taId = new TaskAttemptId(new TaskId(new ActivityId(new OperatorDescriptorId(odid), aid),
-                partition), attempt);
-        return taId;
-    }
-
-    private static void writeTaskAttemptId(DataOutputStream dos, TaskAttemptId taId) throws IOException {
-        TaskId tid = taId.getTaskId();
-        ActivityId aid = tid.getActivityId();
-        OperatorDescriptorId odId = aid.getOperatorDescriptorId();
-        dos.writeInt(odId.getId());
-        dos.writeInt(aid.getLocalId());
-        dos.writeInt(tid.getPartition());
-        dos.writeInt(taId.getAttempt());
-    }
-
-    private static PartitionState readPartitionState(DataInputStream dis) throws IOException {
-        PartitionState state = PartitionState.values()[dis.readInt()];
-        return state;
-    }
-
-    private static void writePartitionState(DataOutputStream dos, PartitionState state) throws IOException {
-        dos.writeInt(state.ordinal());
-    }
-
-    private static NetworkAddress readNetworkAddress(DataInputStream dis) throws IOException {
-        int bLen = dis.readInt();
-        byte[] ipAddress = new byte[bLen];
-        dis.read(ipAddress);
-        int port = dis.readInt();
-        NetworkAddress networkAddress = new NetworkAddress(ipAddress, port);
-        return networkAddress;
-    }
-
-    private static void writeNetworkAddress(DataOutputStream dos, NetworkAddress networkAddress) throws IOException {
-        byte[] ipAddress = networkAddress.getIpAddress();
-        dos.writeInt(ipAddress.length);
-        dos.write(ipAddress);
-        dos.writeInt(networkAddress.getPort());
-    }
-}
\ No newline at end of file
diff --git a/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/ipc/ClusterControllerRemoteProxy.java b/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/ipc/ClusterControllerRemoteProxy.java
deleted file mode 100644
index d789768..0000000
--- a/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/ipc/ClusterControllerRemoteProxy.java
+++ /dev/null
@@ -1,109 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.control.common.ipc;
-
-import java.util.List;
-
-import edu.uci.ics.hyracks.api.dataflow.TaskAttemptId;
-import edu.uci.ics.hyracks.api.job.JobId;
-import edu.uci.ics.hyracks.control.common.application.ApplicationStatus;
-import edu.uci.ics.hyracks.control.common.base.IClusterController;
-import edu.uci.ics.hyracks.control.common.controllers.NodeRegistration;
-import edu.uci.ics.hyracks.control.common.heartbeat.HeartbeatData;
-import edu.uci.ics.hyracks.control.common.job.PartitionDescriptor;
-import edu.uci.ics.hyracks.control.common.job.PartitionRequest;
-import edu.uci.ics.hyracks.control.common.job.profiling.om.JobProfile;
-import edu.uci.ics.hyracks.control.common.job.profiling.om.TaskProfile;
-import edu.uci.ics.hyracks.ipc.api.IIPCHandle;
-
-public class ClusterControllerRemoteProxy implements IClusterController {
-    private final IIPCHandle ipcHandle;
-
-    public ClusterControllerRemoteProxy(IIPCHandle ipcHandle) {
-        this.ipcHandle = ipcHandle;
-    }
-
-    @Override
-    public void registerNode(NodeRegistration reg) throws Exception {
-        CCNCFunctions.RegisterNodeFunction fn = new CCNCFunctions.RegisterNodeFunction(reg);
-        ipcHandle.send(-1, fn, null);
-    }
-
-    @Override
-    public void unregisterNode(String nodeId) throws Exception {
-        CCNCFunctions.UnregisterNodeFunction fn = new CCNCFunctions.UnregisterNodeFunction(nodeId);
-        ipcHandle.send(-1, fn, null);
-    }
-
-    @Override
-    public void notifyTaskComplete(JobId jobId, TaskAttemptId taskId, String nodeId, TaskProfile statistics)
-            throws Exception {
-        CCNCFunctions.NotifyTaskCompleteFunction fn = new CCNCFunctions.NotifyTaskCompleteFunction(jobId, taskId,
-                nodeId, statistics);
-        ipcHandle.send(-1, fn, null);
-    }
-
-    @Override
-    public void notifyTaskFailure(JobId jobId, TaskAttemptId taskId, String nodeId, String details) throws Exception {
-        CCNCFunctions.NotifyTaskFailureFunction fn = new CCNCFunctions.NotifyTaskFailureFunction(jobId, taskId, nodeId,
-                details);
-        ipcHandle.send(-1, fn, null);
-    }
-
-    @Override
-    public void notifyJobletCleanup(JobId jobId, String nodeId) throws Exception {
-        CCNCFunctions.NotifyJobletCleanupFunction fn = new CCNCFunctions.NotifyJobletCleanupFunction(jobId, nodeId);
-        ipcHandle.send(-1, fn, null);
-    }
-
-    @Override
-    public void nodeHeartbeat(String id, HeartbeatData hbData) throws Exception {
-        CCNCFunctions.NodeHeartbeatFunction fn = new CCNCFunctions.NodeHeartbeatFunction(id, hbData);
-        ipcHandle.send(-1, fn, null);
-    }
-
-    @Override
-    public void reportProfile(String id, List<JobProfile> profiles) throws Exception {
-        CCNCFunctions.ReportProfileFunction fn = new CCNCFunctions.ReportProfileFunction(id, profiles);
-        ipcHandle.send(-1, fn, null);
-    }
-
-    @Override
-    public void registerPartitionProvider(PartitionDescriptor partitionDescriptor) throws Exception {
-        CCNCFunctions.RegisterPartitionProviderFunction fn = new CCNCFunctions.RegisterPartitionProviderFunction(
-                partitionDescriptor);
-        ipcHandle.send(-1, fn, null);
-    }
-
-    @Override
-    public void registerPartitionRequest(PartitionRequest partitionRequest) throws Exception {
-        CCNCFunctions.RegisterPartitionRequestFunction fn = new CCNCFunctions.RegisterPartitionRequestFunction(
-                partitionRequest);
-        ipcHandle.send(-1, fn, null);
-    }
-
-    @Override
-    public void notifyApplicationStateChange(String nodeId, String appName, ApplicationStatus status) throws Exception {
-        CCNCFunctions.ApplicationStateChangeResponseFunction fn = new CCNCFunctions.ApplicationStateChangeResponseFunction(
-                nodeId, appName, status);
-        ipcHandle.send(-1, fn, null);
-    }
-
-    @Override
-    public void sendApplicationMessageToCC(byte[] data, String appName, String nodeId) throws Exception {
-        CCNCFunctions.SendApplicationMessageFunction fn = new CCNCFunctions.SendApplicationMessageFunction(data, appName, nodeId);
-        ipcHandle.send(-1, fn, null);
-    }
-}
\ No newline at end of file
diff --git a/hyracks-control/hyracks-control-nc/pom.xml b/hyracks-control/hyracks-control-nc/pom.xml
deleted file mode 100644
index 9c2a49d..0000000
--- a/hyracks-control/hyracks-control-nc/pom.xml
+++ /dev/null
@@ -1,52 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <artifactId>hyracks-control-nc</artifactId>
-  <parent>
-    <groupId>edu.uci.ics.hyracks</groupId>
-    <artifactId>hyracks-control</artifactId>
-    <version>0.2.2-SNAPSHOT</version>
-  </parent>
-
-  <build>
-    <plugins>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-compiler-plugin</artifactId>
-        <version>2.0.2</version>
-        <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
-        </configuration>
-      </plugin>
-    </plugins>
-  </build>
-  <dependencies>
-  	<dependency>
-  		<groupId>edu.uci.ics.dcache</groupId>
-  		<artifactId>dcache-client</artifactId>
-  		<version>0.0.1</version>
-  		<scope>compile</scope>
-  	</dependency>
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks</groupId>
-  		<artifactId>hyracks-control-common</artifactId>
-  		<version>0.2.2-SNAPSHOT</version>
-  		<type>jar</type>
-  		<scope>compile</scope>
-  	</dependency>
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks</groupId>
-  		<artifactId>hyracks-net</artifactId>
-  		<version>0.2.2-SNAPSHOT</version>
-  	</dependency>
-  </dependencies>
-  <reporting>
-    <plugins>
-      <plugin>
-        <groupId>org.codehaus.mojo</groupId>
-        <artifactId>findbugs-maven-plugin</artifactId>
-        <version>2.0.1</version>
-      </plugin>
-    </plugins>
-  </reporting>
-</project>
diff --git a/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/NodeControllerService.java b/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/NodeControllerService.java
deleted file mode 100644
index 58a173c..0000000
--- a/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/NodeControllerService.java
+++ /dev/null
@@ -1,430 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.control.nc;
-
-import java.io.File;
-import java.lang.management.GarbageCollectorMXBean;
-import java.lang.management.ManagementFactory;
-import java.lang.management.MemoryMXBean;
-import java.lang.management.MemoryUsage;
-import java.lang.management.OperatingSystemMXBean;
-import java.lang.management.RuntimeMXBean;
-import java.lang.management.ThreadMXBean;
-import java.net.InetAddress;
-import java.net.InetSocketAddress;
-import java.text.MessageFormat;
-import java.util.ArrayList;
-import java.util.Hashtable;
-import java.util.List;
-import java.util.Map;
-import java.util.StringTokenizer;
-import java.util.Timer;
-import java.util.TimerTask;
-import java.util.concurrent.Executor;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
-import edu.uci.ics.hyracks.api.context.IHyracksRootContext;
-import edu.uci.ics.hyracks.api.io.IODeviceHandle;
-import edu.uci.ics.hyracks.api.job.JobId;
-import edu.uci.ics.hyracks.control.common.AbstractRemoteService;
-import edu.uci.ics.hyracks.control.common.base.IClusterController;
-import edu.uci.ics.hyracks.control.common.context.ServerContext;
-import edu.uci.ics.hyracks.control.common.controllers.NCConfig;
-import edu.uci.ics.hyracks.control.common.controllers.NodeParameters;
-import edu.uci.ics.hyracks.control.common.controllers.NodeRegistration;
-import edu.uci.ics.hyracks.control.common.heartbeat.HeartbeatData;
-import edu.uci.ics.hyracks.control.common.heartbeat.HeartbeatSchema;
-import edu.uci.ics.hyracks.control.common.ipc.CCNCFunctions;
-import edu.uci.ics.hyracks.control.common.ipc.ClusterControllerRemoteProxy;
-import edu.uci.ics.hyracks.control.common.job.profiling.om.JobProfile;
-import edu.uci.ics.hyracks.control.common.work.FutureValue;
-import edu.uci.ics.hyracks.control.common.work.WorkQueue;
-import edu.uci.ics.hyracks.control.nc.application.NCApplicationContext;
-import edu.uci.ics.hyracks.control.nc.io.IOManager;
-import edu.uci.ics.hyracks.control.nc.net.NetworkManager;
-import edu.uci.ics.hyracks.control.nc.partitions.PartitionManager;
-import edu.uci.ics.hyracks.control.nc.runtime.RootHyracksContext;
-import edu.uci.ics.hyracks.control.nc.work.AbortTasksWork;
-import edu.uci.ics.hyracks.control.nc.work.ApplicationMessageWork;
-import edu.uci.ics.hyracks.control.nc.work.BuildJobProfilesWork;
-import edu.uci.ics.hyracks.control.nc.work.CleanupJobletWork;
-import edu.uci.ics.hyracks.control.nc.work.CreateApplicationWork;
-import edu.uci.ics.hyracks.control.nc.work.DestroyApplicationWork;
-import edu.uci.ics.hyracks.control.nc.work.ReportPartitionAvailabilityWork;
-import edu.uci.ics.hyracks.control.nc.work.StartTasksWork;
-import edu.uci.ics.hyracks.ipc.api.IIPCHandle;
-import edu.uci.ics.hyracks.ipc.api.IIPCI;
-import edu.uci.ics.hyracks.ipc.api.IPCPerformanceCounters;
-import edu.uci.ics.hyracks.ipc.impl.IPCSystem;
-import edu.uci.ics.hyracks.net.protocols.muxdemux.MuxDemuxPerformanceCounters;
-
-public class NodeControllerService extends AbstractRemoteService {
-    private static Logger LOGGER = Logger.getLogger(NodeControllerService.class.getName());
-
-    private NCConfig ncConfig;
-
-    private final String id;
-
-    private final IHyracksRootContext ctx;
-
-    private final IPCSystem ipc;
-
-    private final PartitionManager partitionManager;
-
-    private final NetworkManager netManager;
-
-    private final WorkQueue queue;
-
-    private final Timer timer;
-
-    private boolean registrationPending;
-
-    private Exception registrationException;
-
-    private IClusterController ccs;
-
-    private final Map<JobId, Joblet> jobletMap;
-
-    private final ExecutorService executor;
-
-    private NodeParameters nodeParameters;
-
-    private HeartbeatTask heartbeatTask;
-
-    private final ServerContext serverCtx;
-
-    private final Map<String, NCApplicationContext> applications;
-
-    private final MemoryMXBean memoryMXBean;
-
-    private final List<GarbageCollectorMXBean> gcMXBeans;
-
-    private final ThreadMXBean threadMXBean;
-
-    private final RuntimeMXBean runtimeMXBean;
-
-    private final OperatingSystemMXBean osMXBean;
-
-    public NodeControllerService(NCConfig ncConfig) throws Exception {
-        this.ncConfig = ncConfig;
-        id = ncConfig.nodeId;
-        executor = Executors.newCachedThreadPool();
-        NodeControllerIPCI ipci = new NodeControllerIPCI();
-        ipc = new IPCSystem(new InetSocketAddress(ncConfig.clusterNetIPAddress, 0), ipci,
-                new CCNCFunctions.SerializerDeserializer());
-        this.ctx = new RootHyracksContext(new IOManager(getDevices(ncConfig.ioDevices), executor));
-        if (id == null) {
-            throw new Exception("id not set");
-        }
-        partitionManager = new PartitionManager(this);
-        netManager = new NetworkManager(getIpAddress(ncConfig), partitionManager, ncConfig.nNetThreads);
-
-        queue = new WorkQueue();
-        jobletMap = new Hashtable<JobId, Joblet>();
-        timer = new Timer(true);
-        serverCtx = new ServerContext(ServerContext.ServerType.NODE_CONTROLLER, new File(new File(
-                NodeControllerService.class.getName()), id));
-        applications = new Hashtable<String, NCApplicationContext>();
-        memoryMXBean = ManagementFactory.getMemoryMXBean();
-        gcMXBeans = ManagementFactory.getGarbageCollectorMXBeans();
-        threadMXBean = ManagementFactory.getThreadMXBean();
-        runtimeMXBean = ManagementFactory.getRuntimeMXBean();
-        osMXBean = ManagementFactory.getOperatingSystemMXBean();
-        registrationPending = true;
-    }
-
-    public IHyracksRootContext getRootContext() {
-        return ctx;
-    }
-
-    private static List<IODeviceHandle> getDevices(String ioDevices) {
-        List<IODeviceHandle> devices = new ArrayList<IODeviceHandle>();
-        StringTokenizer tok = new StringTokenizer(ioDevices, ",");
-        while (tok.hasMoreElements()) {
-            String devPath = tok.nextToken().trim();
-            devices.add(new IODeviceHandle(new File(devPath), "."));
-        }
-        return devices;
-    }
-
-    private synchronized void setNodeRegistrationResult(NodeParameters parameters, Exception exception) {
-        this.nodeParameters = parameters;
-        this.registrationException = exception;
-        this.registrationPending = false;
-        notifyAll();
-    }
-
-    @Override
-    public void start() throws Exception {
-        LOGGER.log(Level.INFO, "Starting NodeControllerService");
-        ipc.start();
-        netManager.start();
-        IIPCHandle ccIPCHandle = ipc.getHandle(new InetSocketAddress(ncConfig.ccHost, ncConfig.ccPort));
-        this.ccs = new ClusterControllerRemoteProxy(ccIPCHandle);
-        HeartbeatSchema.GarbageCollectorInfo[] gcInfos = new HeartbeatSchema.GarbageCollectorInfo[gcMXBeans.size()];
-        for (int i = 0; i < gcInfos.length; ++i) {
-            gcInfos[i] = new HeartbeatSchema.GarbageCollectorInfo(gcMXBeans.get(i).getName());
-        }
-        HeartbeatSchema hbSchema = new HeartbeatSchema(gcInfos);
-        ccs.registerNode(new NodeRegistration(ipc.getSocketAddress(), id, ncConfig, netManager.getNetworkAddress(),
-                osMXBean.getName(), osMXBean.getArch(), osMXBean.getVersion(), osMXBean.getAvailableProcessors(),
-                runtimeMXBean.getVmName(), runtimeMXBean.getVmVersion(), runtimeMXBean.getVmVendor(), runtimeMXBean
-                        .getClassPath(), runtimeMXBean.getLibraryPath(), runtimeMXBean.getBootClassPath(),
-                runtimeMXBean.getInputArguments(), runtimeMXBean.getSystemProperties(), hbSchema));
-
-        synchronized (this) {
-            while (registrationPending) {
-                wait();
-            }
-        }
-        if (registrationException != null) {
-            throw registrationException;
-        }
-
-        queue.start();
-
-        heartbeatTask = new HeartbeatTask(ccs);
-
-        // Schedule heartbeat generator.
-        timer.schedule(heartbeatTask, 0, nodeParameters.getHeartbeatPeriod());
-
-        if (nodeParameters.getProfileDumpPeriod() > 0) {
-            // Schedule profile dump generator.
-            timer.schedule(new ProfileDumpTask(ccs), 0, nodeParameters.getProfileDumpPeriod());
-        }
-
-        LOGGER.log(Level.INFO, "Started NodeControllerService");
-    }
-
-    @Override
-    public void stop() throws Exception {
-        LOGGER.log(Level.INFO, "Stopping NodeControllerService");
-        executor.shutdownNow();
-        partitionManager.close();
-        heartbeatTask.cancel();
-        netManager.stop();
-        queue.stop();
-        LOGGER.log(Level.INFO, "Stopped NodeControllerService");
-    }
-
-    public String getId() {
-        return id;
-    }
-
-    public ServerContext getServerContext() {
-        return serverCtx;
-    }
-
-    public Map<String, NCApplicationContext> getApplications() {
-        return applications;
-    }
-
-    public Map<JobId, Joblet> getJobletMap() {
-        return jobletMap;
-    }
-
-    public NetworkManager getNetworkManager() {
-        return netManager;
-    }
-
-    public PartitionManager getPartitionManager() {
-        return partitionManager;
-    }
-
-    public IClusterController getClusterController() {
-        return ccs;
-    }
-
-    public NodeParameters getNodeParameters() {
-        return nodeParameters;
-    }
-
-    public Executor getExecutor() {
-        return executor;
-    }
-
-    public NCConfig getConfiguration() {
-        return ncConfig;
-    }
-
-    public WorkQueue getWorkQueue() {
-        return queue;
-    }
-
-    private static InetAddress getIpAddress(NCConfig ncConfig) throws Exception {
-        String ipaddrStr = ncConfig.dataIPAddress;
-        ipaddrStr = ipaddrStr.trim();
-        Pattern pattern = Pattern.compile("(\\d{1,3})\\.(\\d{1,3})\\.(\\d{1,3})\\.(\\d{1,3})");
-        Matcher m = pattern.matcher(ipaddrStr);
-        if (!m.matches()) {
-            throw new Exception(MessageFormat.format(
-                    "Connection Manager IP Address String %s does is not a valid IP Address.", ipaddrStr));
-        }
-        byte[] ipBytes = new byte[4];
-        ipBytes[0] = (byte) Integer.parseInt(m.group(1));
-        ipBytes[1] = (byte) Integer.parseInt(m.group(2));
-        ipBytes[2] = (byte) Integer.parseInt(m.group(3));
-        ipBytes[3] = (byte) Integer.parseInt(m.group(4));
-        return InetAddress.getByAddress(ipBytes);
-    }
-
-    private class HeartbeatTask extends TimerTask {
-        private IClusterController cc;
-
-        private final HeartbeatData hbData;
-
-        public HeartbeatTask(IClusterController cc) {
-            this.cc = cc;
-            hbData = new HeartbeatData();
-            hbData.gcCollectionCounts = new long[gcMXBeans.size()];
-            hbData.gcCollectionTimes = new long[gcMXBeans.size()];
-        }
-
-        @Override
-        public void run() {
-            MemoryUsage heapUsage = memoryMXBean.getHeapMemoryUsage();
-            hbData.heapInitSize = heapUsage.getInit();
-            hbData.heapUsedSize = heapUsage.getUsed();
-            hbData.heapCommittedSize = heapUsage.getCommitted();
-            hbData.heapMaxSize = heapUsage.getMax();
-            MemoryUsage nonheapUsage = memoryMXBean.getNonHeapMemoryUsage();
-            hbData.nonheapInitSize = nonheapUsage.getInit();
-            hbData.nonheapUsedSize = nonheapUsage.getUsed();
-            hbData.nonheapCommittedSize = nonheapUsage.getCommitted();
-            hbData.nonheapMaxSize = nonheapUsage.getMax();
-            hbData.threadCount = threadMXBean.getThreadCount();
-            hbData.peakThreadCount = threadMXBean.getPeakThreadCount();
-            hbData.totalStartedThreadCount = threadMXBean.getTotalStartedThreadCount();
-            hbData.systemLoadAverage = osMXBean.getSystemLoadAverage();
-            int gcN = gcMXBeans.size();
-            for (int i = 0; i < gcN; ++i) {
-                GarbageCollectorMXBean gcMXBean = gcMXBeans.get(i);
-                hbData.gcCollectionCounts[i] = gcMXBean.getCollectionCount();
-                hbData.gcCollectionTimes[i] = gcMXBean.getCollectionTime();
-            }
-
-            MuxDemuxPerformanceCounters netPC = netManager.getPerformanceCounters();
-            hbData.netPayloadBytesRead = netPC.getPayloadBytesRead();
-            hbData.netPayloadBytesWritten = netPC.getPayloadBytesWritten();
-            hbData.netSignalingBytesRead = netPC.getSignalingBytesRead();
-            hbData.netSignalingBytesWritten = netPC.getSignalingBytesWritten();
-
-            IPCPerformanceCounters ipcPC = ipc.getPerformanceCounters();
-            hbData.ipcMessagesSent = ipcPC.getMessageSentCount();
-            hbData.ipcMessageBytesSent = ipcPC.getMessageBytesSent();
-            hbData.ipcMessagesReceived = ipcPC.getMessageReceivedCount();
-            hbData.ipcMessageBytesReceived = ipcPC.getMessageBytesReceived();
-
-            try {
-                cc.nodeHeartbeat(id, hbData);
-            } catch (Exception e) {
-                e.printStackTrace();
-            }
-        }
-    }
-
-    private class ProfileDumpTask extends TimerTask {
-        private IClusterController cc;
-
-        public ProfileDumpTask(IClusterController cc) {
-            this.cc = cc;
-        }
-
-        @Override
-        public void run() {
-            try {
-                FutureValue<List<JobProfile>> fv = new FutureValue<List<JobProfile>>();
-                BuildJobProfilesWork bjpw = new BuildJobProfilesWork(NodeControllerService.this, fv);
-                queue.scheduleAndSync(bjpw);
-                List<JobProfile> profiles = fv.get();
-                if (!profiles.isEmpty()) {
-                    cc.reportProfile(id, profiles);
-                }
-            } catch (Exception e) {
-                e.printStackTrace();
-            }
-        }
-    }
-
-    private final class NodeControllerIPCI implements IIPCI {
-        @Override
-        public void deliverIncomingMessage(IIPCHandle handle, long mid, long rmid, Object payload, Exception exception) {
-            CCNCFunctions.Function fn = (CCNCFunctions.Function) payload;
-            switch (fn.getFunctionId()) {
-                case SEND_APPLICATION_MESSAGE: {
-                    CCNCFunctions.SendApplicationMessageFunction amf = (CCNCFunctions.SendApplicationMessageFunction) fn;
-                    queue.schedule(new ApplicationMessageWork(NodeControllerService.this, amf.getMessage(), amf
-                            .getAppName(), amf.getNodeId()));
-                    return;
-                }
-                case START_TASKS: {
-                    CCNCFunctions.StartTasksFunction stf = (CCNCFunctions.StartTasksFunction) fn;
-                    queue.schedule(new StartTasksWork(NodeControllerService.this, stf.getAppName(), stf.getJobId(), stf
-                            .getPlanBytes(), stf.getTaskDescriptors(), stf.getConnectorPolicies(), stf.getFlags()));
-                    return;
-                }
-
-                case ABORT_TASKS: {
-                    CCNCFunctions.AbortTasksFunction atf = (CCNCFunctions.AbortTasksFunction) fn;
-                    queue.schedule(new AbortTasksWork(NodeControllerService.this, atf.getJobId(), atf.getTasks()));
-                    return;
-                }
-
-                case CLEANUP_JOBLET: {
-                    CCNCFunctions.CleanupJobletFunction cjf = (CCNCFunctions.CleanupJobletFunction) fn;
-                    queue.schedule(new CleanupJobletWork(NodeControllerService.this, cjf.getJobId(), cjf.getStatus()));
-                    return;
-                }
-
-                case CREATE_APPLICATION: {
-                    CCNCFunctions.CreateApplicationFunction caf = (CCNCFunctions.CreateApplicationFunction) fn;
-                    queue.schedule(new CreateApplicationWork(NodeControllerService.this, caf.getAppName(), caf
-                            .isDeployHar(), caf.getSerializedDistributedState()));
-                    return;
-                }
-
-                case DESTROY_APPLICATION: {
-                    CCNCFunctions.DestroyApplicationFunction daf = (CCNCFunctions.DestroyApplicationFunction) fn;
-                    queue.schedule(new DestroyApplicationWork(NodeControllerService.this, daf.getAppName()));
-                    return;
-                }
-
-                case REPORT_PARTITION_AVAILABILITY: {
-                    CCNCFunctions.ReportPartitionAvailabilityFunction rpaf = (CCNCFunctions.ReportPartitionAvailabilityFunction) fn;
-                    queue.schedule(new ReportPartitionAvailabilityWork(NodeControllerService.this, rpaf
-                            .getPartitionId(), rpaf.getNetworkAddress()));
-                    return;
-                }
-
-                case NODE_REGISTRATION_RESULT: {
-                    CCNCFunctions.NodeRegistrationResult nrrf = (CCNCFunctions.NodeRegistrationResult) fn;
-                    setNodeRegistrationResult(nrrf.getNodeParameters(), nrrf.getException());
-                    return;
-                }
-            }
-            throw new IllegalArgumentException("Unknown function: " + fn.getFunctionId());
-
-        }
-    }
-
-    public void sendApplicationMessageToCC(byte[] data, String appName, String nodeId) throws Exception {
-        ccs.sendApplicationMessageToCC(data, appName, nodeId);
-    }
-}
\ No newline at end of file
diff --git a/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/runtime/RootHyracksContext.java b/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/runtime/RootHyracksContext.java
deleted file mode 100644
index 9e42277..0000000
--- a/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/runtime/RootHyracksContext.java
+++ /dev/null
@@ -1,31 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.control.nc.runtime;
-
-import edu.uci.ics.hyracks.api.context.IHyracksRootContext;
-import edu.uci.ics.hyracks.api.io.IIOManager;
-
-public class RootHyracksContext implements IHyracksRootContext {
-    private final IIOManager ioManager;
-
-    public RootHyracksContext(IIOManager ioManager) {
-        this.ioManager = ioManager;
-    }
-
-    @Override
-    public IIOManager getIOManager() {
-        return ioManager;
-    }
-}
\ No newline at end of file
diff --git a/hyracks-control/pom.xml b/hyracks-control/pom.xml
deleted file mode 100644
index cb668e9..0000000
--- a/hyracks-control/pom.xml
+++ /dev/null
@@ -1,17 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <artifactId>hyracks-control</artifactId>
-  <packaging>pom</packaging>
-
-  <parent>
-    <groupId>edu.uci.ics.hyracks</groupId>
-    <artifactId>hyracks</artifactId>
-    <version>0.2.2-SNAPSHOT</version>
-  </parent>
-
-  <modules>
-    <module>hyracks-control-common</module>
-    <module>hyracks-control-cc</module>
-    <module>hyracks-control-nc</module>
-  </modules>
-</project>
diff --git a/hyracks-data/hyracks-data-std/pom.xml b/hyracks-data/hyracks-data-std/pom.xml
deleted file mode 100644
index fda0a32..0000000
--- a/hyracks-data/hyracks-data-std/pom.xml
+++ /dev/null
@@ -1,31 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <artifactId>hyracks-data-std</artifactId>
-
-  <parent>
-    <groupId>edu.uci.ics.hyracks</groupId>
-    <artifactId>hyracks-data</artifactId>
-    <version>0.2.2-SNAPSHOT</version>
-  </parent>
-
-  <build>
-    <plugins>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-compiler-plugin</artifactId>
-        <version>2.0.2</version>
-        <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
-        </configuration>
-      </plugin>
-    </plugins>
-  </build>
-  <dependencies>
-  <dependency>
-  	<groupId>edu.uci.ics.hyracks</groupId>
-  	<artifactId>hyracks-api</artifactId>
-  	<version>0.2.2-SNAPSHOT</version>
-  </dependency>
-  </dependencies>
-</project>
diff --git a/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/accessors/MurmurHash3BinaryHashFunctionFamily.java b/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/accessors/MurmurHash3BinaryHashFunctionFamily.java
deleted file mode 100644
index 8f2e32c..0000000
--- a/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/accessors/MurmurHash3BinaryHashFunctionFamily.java
+++ /dev/null
@@ -1,61 +0,0 @@
-package edu.uci.ics.hyracks.data.std.accessors;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunction;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFamily;
-
-public class MurmurHash3BinaryHashFunctionFamily implements IBinaryHashFunctionFamily {
-    private static final long serialVersionUID = 1L;
-
-    private static final int C1 = 0xcc9e2d51;
-    private static final int C2 = 0x1b873593;
-    private static final int C3 = 5;
-    private static final int C4 = 0xe6546b64;
-    private static final int C5 = 0x85ebca6b;
-    private static final int C6 = 0xc2b2ae35;
-
-    @Override
-    public IBinaryHashFunction createBinaryHashFunction(final int seed) {
-        return new IBinaryHashFunction() {
-            @Override
-            public int hash(byte[] bytes, int offset, int length) {
-                int h = seed;
-                int p = offset;
-                int remain = length;
-                while (remain > 4) {
-                    int k = ((int) bytes[p]) | (((int) bytes[p + 1]) << 8) | (((int) bytes[p + 2]) << 16)
-                            | (((int) bytes[p + 3]) << 24);
-                    k *= C1;
-                    k = Integer.rotateLeft(k, 15);
-                    k *= C2;
-                    h ^= k;
-                    h = Integer.rotateLeft(h, 13);
-                    h = h * C3 + C4;
-                    p += 4;
-                    remain -= 4;
-                }
-                int k = 0;
-                switch (remain) {
-                    case 3:
-                        k = bytes[p++];
-                    case 2:
-                        k = (k << 8) | bytes[p++];
-                    case 1:
-                        k = (k << 8) | bytes[p++];
-                        k *= C1;
-                        k = Integer.rotateLeft(k, 15);
-                        k *= C2;
-                        h ^= k;
-                        h = Integer.rotateLeft(h, 13);
-                        h = h * C3 + C4;
-                }
-                h ^= length;
-                h ^= (h >>> 16);
-                h *= C5;
-                h ^= (h >>> 13);
-                h *= C6;
-                h ^= (h >>> 16);
-                return h;
-            }
-        };
-    }
-}
\ No newline at end of file
diff --git a/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/api/IPointable.java b/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/api/IPointable.java
deleted file mode 100644
index 8808d95..0000000
--- a/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/api/IPointable.java
+++ /dev/null
@@ -1,30 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.data.std.api;
-
-public interface IPointable extends IValueReference {
-    public void set(byte[] bytes, int start, int length);
-
-    public void set(IValueReference pointer);
-
-    @Override
-    public byte[] getByteArray();
-
-    @Override
-    public int getStartOffset();
-
-    @Override
-    public int getLength();
-}
\ No newline at end of file
diff --git a/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/primitive/DoublePointable.java b/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/primitive/DoublePointable.java
deleted file mode 100644
index 543031d..0000000
--- a/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/primitive/DoublePointable.java
+++ /dev/null
@@ -1,152 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.data.std.primitive;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.data.std.api.AbstractPointable;
-import edu.uci.ics.hyracks.data.std.api.IComparable;
-import edu.uci.ics.hyracks.data.std.api.IHashable;
-import edu.uci.ics.hyracks.data.std.api.INumeric;
-import edu.uci.ics.hyracks.data.std.api.IPointable;
-import edu.uci.ics.hyracks.data.std.api.IPointableFactory;
-
-public final class DoublePointable extends AbstractPointable implements IHashable, IComparable, INumeric {
-	private final static double machineEpsilon;
-	static {
-		float epsilon = 1.0f;
-
-        do {
-           epsilon /= 2.0f;
-        }
-        while ((float)(1.0 + (epsilon/2.0)) != 1.0);
-        machineEpsilon = epsilon;
-	}
-	
-    public static final ITypeTraits TYPE_TRAITS = new ITypeTraits() {
-        private static final long serialVersionUID = 1L;
-
-        @Override
-        public boolean isFixedLength() {
-            return true;
-        }
-
-        @Override
-        public int getFixedLength() {
-            return 8;
-        }
-    };
-
-    public static final IPointableFactory FACTORY = new IPointableFactory() {
-        private static final long serialVersionUID = 1L;
-
-        @Override
-        public IPointable createPointable() {
-            return new DoublePointable();
-        }
-
-        @Override
-        public ITypeTraits getTypeTraits() {
-            return TYPE_TRAITS;
-        }
-    };
-
-    public static long getLongBits(byte[] bytes, int start) {
-        return LongPointable.getLong(bytes, start);
-    }
-
-    public static double getDouble(byte[] bytes, int start) {
-        long bits = getLongBits(bytes, start);
-        return Double.longBitsToDouble(bits);
-    }
-
-    public static void setDouble(byte[] bytes, int start, double value) {
-        long bits = Double.doubleToLongBits(value);
-        LongPointable.setLong(bytes, start, bits);
-    }
-
-    public double getDouble() {
-        return getDouble(bytes, start);
-    }
-
-    public void setDouble(double value) {
-        setDouble(bytes, start, value);
-    }
-
-    public double preIncrement() {
-        double v = getDouble();
-        ++v;
-        setDouble(v);
-        return v;
-    }
-
-    public double postIncrement() {
-        double v = getDouble();
-        double ov = v++;
-        setDouble(v);
-        return ov;
-    }
-
-    @Override
-    public int compareTo(IPointable pointer) {
-        return compareTo(pointer.getByteArray(), pointer.getStartOffset(), pointer.getLength());
-    }
-
-    @Override
-    public int compareTo(byte[] bytes, int start, int length) {
-        double v = getDouble();
-        double ov = getDouble(bytes, start);
-        return v < ov ? -1 : (v > ov ? 1 : 0);
-    }
-
-    @Override
-    public int hash() {
-        long bits = getLongBits(bytes, start);
-        return (int) (bits ^ (bits >>> 32));
-    }
-
-    @Override
-    public byte byteValue() {
-        return (byte) getDouble();
-    }
-
-    @Override
-    public short shortValue() {
-        return (short) getDouble();
-    }
-
-    @Override
-    public int intValue() {
-        return (int) getDouble();
-    }
-
-    @Override
-    public long longValue() {
-        return (long) getDouble();
-    }
-
-    @Override
-    public float floatValue() {
-        return (float) getDouble();
-    }
-
-    @Override
-    public double doubleValue() {
-        return getDouble();
-    }
-
-	public static double getEpsilon() {
-		return machineEpsilon;
-	}
-}
\ No newline at end of file
diff --git a/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/primitive/UTF8StringPointable.java b/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/primitive/UTF8StringPointable.java
deleted file mode 100644
index f6d6093..0000000
--- a/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/primitive/UTF8StringPointable.java
+++ /dev/null
@@ -1,219 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.data.std.primitive;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.data.std.api.AbstractPointable;
-import edu.uci.ics.hyracks.data.std.api.IComparable;
-import edu.uci.ics.hyracks.data.std.api.IHashable;
-import edu.uci.ics.hyracks.data.std.api.IPointable;
-import edu.uci.ics.hyracks.data.std.api.IPointableFactory;
-
-public final class UTF8StringPointable extends AbstractPointable implements IHashable, IComparable {
-    public static final ITypeTraits TYPE_TRAITS = new ITypeTraits() {
-        private static final long serialVersionUID = 1L;
-
-        @Override
-        public boolean isFixedLength() {
-            return false;
-        }
-
-        @Override
-        public int getFixedLength() {
-            return 0;
-        }
-    };
-
-    public static final IPointableFactory FACTORY = new IPointableFactory() {
-        private static final long serialVersionUID = 1L;
-
-        @Override
-        public IPointable createPointable() {
-            return new UTF8StringPointable();
-        }
-
-        @Override
-        public ITypeTraits getTypeTraits() {
-            return TYPE_TRAITS;
-        }
-    };
-
-    /**
-     * Returns the character at the given byte offset. The caller is responsible for making sure that
-     * the provided offset is within bounds and points to the beginning of a valid UTF8 character.
-     * 
-     * @param offset
-     *            - Byte offset
-     * @return Character at the given offset.
-     */
-    public char charAt(int offset) {
-        return charAt(bytes, start + offset);
-    }
-
-    public static char charAt(byte[] b, int s) {
-        int c = b[s] & 0xff;
-        switch (c >> 4) {
-            case 0:
-            case 1:
-            case 2:
-            case 3:
-            case 4:
-            case 5:
-            case 6:
-            case 7:
-                return (char) c;
-
-            case 12:
-            case 13:
-                return (char) (((c & 0x1F) << 6) | ((b[s + 1]) & 0x3F));
-
-            case 14:
-                return (char) (((c & 0x0F) << 12) | (((b[s + 1]) & 0x3F) << 6) | (((b[s + 2]) & 0x3F) << 0));
-
-            default:
-                throw new IllegalArgumentException();
-        }
-    }
-
-    public int charSize(int offset) {
-        return charSize(bytes, start + offset);
-    }
-
-    public static int charSize(byte[] b, int s) {
-        int c = b[s] & 0xff;
-        switch (c >> 4) {
-            case 0:
-            case 1:
-            case 2:
-            case 3:
-            case 4:
-            case 5:
-            case 6:
-            case 7:
-                return 1;
-
-            case 12:
-            case 13:
-                return 2;
-
-            case 14:
-                return 3;
-        }
-        throw new IllegalStateException();
-    }
-
-    public static int getModifiedUTF8Len(char c) {
-        if (c >= 0x0000 && c <= 0x007F) {
-            return 1;
-        } else if (c <= 0x07FF) {
-            return 2;
-        } else {
-            return 3;
-        }
-    }
-
-    /**
-     * Gets the length of the string in characters.
-     * 
-     * @return length of string in characters
-     */
-    public int getStringLength() {
-        return getStringLength(bytes, start);
-    }
-
-    public static int getStringLength(byte[] b, int s) {
-        int pos = s + 2;
-        int end = pos + getUTFLength(b, s);
-        int charCount = 0;
-        while (pos < end) {
-            charCount++;
-            pos += charSize(b, pos);
-        }
-        return charCount;
-    }
-
-    /**
-     * Gets the length of the UTF-8 encoded string in bytes.
-     * 
-     * @return length of UTF-8 encoded string in bytes
-     */
-    public int getUTFLength() {
-        return getUTFLength(bytes, start);
-    }
-
-    public static int getUTFLength(byte[] b, int s) {
-        return ((b[s] & 0xff) << 8) + ((b[s + 1] & 0xff) << 0);
-    }
-
-    @Override
-    public int compareTo(IPointable pointer) {
-        return compareTo(pointer.getByteArray(), pointer.getStartOffset(), pointer.getLength());
-    }
-
-    @Override
-    public int compareTo(byte[] bytes, int start, int length) {
-        int utflen1 = getUTFLength(this.bytes, this.start);
-        int utflen2 = getUTFLength(bytes, start);
-
-        int c1 = 0;
-        int c2 = 0;
-
-        int s1Start = this.start + 2;
-        int s2Start = start + 2;
-
-        while (c1 < utflen1 && c2 < utflen2) {
-            char ch1 = charAt(this.bytes, s1Start + c1);
-            char ch2 = charAt(bytes, s2Start + c2);
-
-            if (ch1 != ch2) {
-                return ch1 - ch2;
-            }
-            c1 += charSize(this.bytes, s1Start + c1);
-            c2 += charSize(bytes, s2Start + c2);
-        }
-        return utflen1 - utflen2;
-    }
-
-    @Override
-    public int hash() {
-        int h = 0;
-        int utflen = getUTFLength(bytes, start);
-        int sStart = start + 2;
-        int c = 0;
-
-        while (c < utflen) {
-            char ch = charAt(bytes, sStart + c);
-            h = 31 * h + ch;
-            c += charSize(bytes, sStart + c);
-        }
-        return h;
-    }
-
-    public static void toString(StringBuilder buffer, byte[] bytes, int start) {
-        int utfLen = getUTFLength(bytes, start);
-        int offset = 2;
-        while (utfLen > 0) {
-            char c = charAt(bytes, start + offset);
-            buffer.append(c);
-            int cLen = UTF8StringPointable.getModifiedUTF8Len(c);
-            offset += cLen;
-            utfLen -= cLen;
-        }
-    }
-
-    public void toString(StringBuilder buffer) {
-        toString(buffer, bytes, start);
-    }
-}
\ No newline at end of file
diff --git a/hyracks-data/pom.xml b/hyracks-data/pom.xml
deleted file mode 100644
index 90f7fb1..0000000
--- a/hyracks-data/pom.xml
+++ /dev/null
@@ -1,15 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <artifactId>hyracks-data</artifactId>
-  <packaging>pom</packaging>
-
-  <parent>
-    <groupId>edu.uci.ics.hyracks</groupId>
-    <artifactId>hyracks</artifactId>
-    <version>0.2.2-SNAPSHOT</version>
-  </parent>
-
-  <modules>
-    <module>hyracks-data-std</module>
-  </modules>
-</project>
diff --git a/hyracks-dataflow-common/pom.xml b/hyracks-dataflow-common/pom.xml
deleted file mode 100644
index 2b32521..0000000
--- a/hyracks-dataflow-common/pom.xml
+++ /dev/null
@@ -1,37 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <artifactId>hyracks-dataflow-common</artifactId>
-  <parent>
-    <groupId>edu.uci.ics.hyracks</groupId>
-    <artifactId>hyracks</artifactId>
-    <version>0.2.2-SNAPSHOT</version>
-  </parent>
-
-  <build>
-    <plugins>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-compiler-plugin</artifactId>
-        <version>2.0.2</version>
-        <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
-        </configuration>
-      </plugin>
-    </plugins>
-  </build>
-  <dependencies>
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks</groupId>
-  		<artifactId>hyracks-api</artifactId>
-  		<version>0.2.2-SNAPSHOT</version>
-  		<type>jar</type>
-  		<scope>compile</scope>
-  	</dependency>
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks</groupId>
-  		<artifactId>hyracks-data-std</artifactId>
-  		<version>0.2.2-SNAPSHOT</version>
-  	</dependency>
-  </dependencies>
-</project>
diff --git a/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/marshalling/ShortSerializerDeserializer.java b/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/marshalling/ShortSerializerDeserializer.java
deleted file mode 100644
index 15384b3..0000000
--- a/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/marshalling/ShortSerializerDeserializer.java
+++ /dev/null
@@ -1,58 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.dataflow.common.data.marshalling;
-
-import java.io.DataInput;
-import java.io.DataOutput;
-import java.io.IOException;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-
-public class ShortSerializerDeserializer implements ISerializerDeserializer<Short> {
-    private static final long serialVersionUID = 1L;
-
-    public static final ShortSerializerDeserializer INSTANCE = new ShortSerializerDeserializer();
-
-    private ShortSerializerDeserializer() {
-    }
-
-    @Override
-    public Short deserialize(DataInput in) throws HyracksDataException {
-        try {
-            return in.readShort();
-        } catch (IOException e) {
-            throw new HyracksDataException(e);
-        }
-    }
-
-    @Override
-    public void serialize(Short instance, DataOutput out) throws HyracksDataException {
-        try {
-            out.writeShort(instance.intValue());
-        } catch (IOException e) {
-            throw new HyracksDataException(e);
-        }
-    }
-
-    public static short getShort(byte[] bytes, int offset) {
-        return (short) (((bytes[offset] & 0xff) << 8) + ((bytes[offset + 1] & 0xff)));
-    }
-
-    public static void putShort(int val, byte[] bytes, int offset) {
-        bytes[offset] = (byte) ((val >>> 8) & 0xFF);
-        bytes[offset + 1] = (byte) ((val >>> 0) & 0xFF);
-    }
-}
diff --git a/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/normalizers/IntegerNormalizedKeyComputerFactory.java b/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/normalizers/IntegerNormalizedKeyComputerFactory.java
deleted file mode 100644
index 2f7a778..0000000
--- a/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/normalizers/IntegerNormalizedKeyComputerFactory.java
+++ /dev/null
@@ -1,35 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.dataflow.common.data.normalizers;
-
-import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputer;
-import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputerFactory;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-
-public class IntegerNormalizedKeyComputerFactory implements INormalizedKeyComputerFactory {
-    private static final long serialVersionUID = 1L;
-
-    @Override
-    public INormalizedKeyComputer createNormalizedKeyComputer() {
-        return new INormalizedKeyComputer() {
-            @Override
-            public int normalize(byte[] bytes, int start, int length) {
-                int value = IntegerSerializerDeserializer.getInt(bytes, start);
-                long unsignedValue = (long) value;
-                return (int) ((unsignedValue - ((long) Integer.MIN_VALUE)) & 0xffffffffL);
-            }
-        };
-    }
-}
\ No newline at end of file
diff --git a/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/parsers/LongParserFactory.java b/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/parsers/LongParserFactory.java
deleted file mode 100644
index 927d21c..0000000
--- a/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/parsers/LongParserFactory.java
+++ /dev/null
@@ -1,111 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.dataflow.common.data.parsers;
-
-import java.io.DataOutput;
-import java.io.IOException;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-
-public class LongParserFactory implements IValueParserFactory {
-    public static final IValueParserFactory INSTANCE = new LongParserFactory();
-
-    private static final long serialVersionUID = 1L;
-
-    private LongParserFactory() {
-    }
-
-    @Override
-    public IValueParser createValueParser() {
-        return new IValueParser() {
-            @Override
-            public void parse(char[] buffer, int start, int length, DataOutput out) throws HyracksDataException {
-                long n = 0;
-                int sign = 1;
-                int i = 0;
-                boolean pre = true;
-                for (; pre && i < length; ++i) {
-                    char ch = buffer[i + start];
-                    switch (ch) {
-                        case ' ':
-                        case '\t':
-                        case '\n':
-                        case '\r':
-                        case '\f':
-                            break;
-
-                        case '-':
-                            sign = -1;
-
-                        case '0':
-                        case '1':
-                        case '2':
-                        case '3':
-                        case '4':
-                        case '5':
-                        case '6':
-                        case '7':
-                        case '8':
-                        case '9':
-                            pre = false;
-                            break;
-
-                        default:
-                            throw new HyracksDataException("Encountered " + ch);
-                    }
-                }
-                boolean post = false;
-                for (; !post && i < length; ++i) {
-                    char ch = buffer[i + start];
-                    switch (ch) {
-                        case '0':
-                        case '1':
-                        case '2':
-                        case '3':
-                        case '4':
-                        case '5':
-                        case '6':
-                        case '7':
-                        case '8':
-                        case '9':
-                            n = n * 10 + (ch - '0');
-                            break;
-                    }
-                }
-
-                for (; i < length; ++i) {
-                    char ch = buffer[i + start];
-                    switch (ch) {
-                        case ' ':
-                        case '\t':
-                        case '\n':
-                        case '\r':
-                        case '\f':
-                            break;
-
-                        default:
-                            throw new HyracksDataException("Encountered " + ch);
-                    }
-                }
-
-                try {
-                    out.writeLong(n * sign);
-                } catch (IOException e) {
-                    throw new HyracksDataException(e);
-                }
-            }
-        };
-    }
-}
\ No newline at end of file
diff --git a/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/util/SerdeUtils.java b/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/util/SerdeUtils.java
deleted file mode 100644
index 9dafa83..0000000
--- a/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/util/SerdeUtils.java
+++ /dev/null
@@ -1,143 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.dataflow.common.util;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
-import edu.uci.ics.hyracks.data.std.primitive.BooleanPointable;
-import edu.uci.ics.hyracks.data.std.primitive.DoublePointable;
-import edu.uci.ics.hyracks.data.std.primitive.FloatPointable;
-import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
-import edu.uci.ics.hyracks.data.std.primitive.LongPointable;
-import edu.uci.ics.hyracks.data.std.primitive.ShortPointable;
-import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.BooleanSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.DoubleSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.FloatSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.Integer64SerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.ShortSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
-
-@SuppressWarnings("rawtypes")
-public class SerdeUtils {
-    public static class PayloadTypeTraits implements ITypeTraits {
-        private static final long serialVersionUID = 1L;
-        final int payloadSize;
-
-        public PayloadTypeTraits(int payloadSize) {
-            this.payloadSize = payloadSize;
-        }
-
-        @Override
-        public boolean isFixedLength() {
-            return true;
-        }
-
-        @Override
-        public int getFixedLength() {
-            return payloadSize;
-        }
-    }
-
-    public static ITypeTraits[] serdesToTypeTraits(ISerializerDeserializer[] serdes) {
-        ITypeTraits[] typeTraits = new ITypeTraits[serdes.length];
-        for (int i = 0; i < serdes.length; i++) {
-            typeTraits[i] = serdeToTypeTrait(serdes[i]);
-        }
-        return typeTraits;
-    }
-
-    public static ITypeTraits[] serdesToTypeTraits(ISerializerDeserializer[] serdes, int payloadSize) {
-        ITypeTraits[] typeTraits = new ITypeTraits[serdes.length + 1];
-        for (int i = 0; i < serdes.length; i++) {
-            typeTraits[i] = serdeToTypeTrait(serdes[i]);
-        }
-        typeTraits[serdes.length] = new PayloadTypeTraits(payloadSize);
-        return typeTraits;
-    }
-
-    public static ITypeTraits serdeToTypeTrait(ISerializerDeserializer serde) {
-        if (serde instanceof ShortSerializerDeserializer) {
-            return ShortPointable.TYPE_TRAITS;
-        }
-        if (serde instanceof IntegerSerializerDeserializer) {
-            return IntegerPointable.TYPE_TRAITS;
-        }
-        if (serde instanceof Integer64SerializerDeserializer) {
-            return LongPointable.TYPE_TRAITS;
-        }
-        if (serde instanceof FloatSerializerDeserializer) {
-            return FloatPointable.TYPE_TRAITS;
-        }
-        if (serde instanceof DoubleSerializerDeserializer) {
-            return DoublePointable.TYPE_TRAITS;
-        }
-        if (serde instanceof BooleanSerializerDeserializer) {
-            return BooleanPointable.TYPE_TRAITS;
-        }
-        return UTF8StringPointable.TYPE_TRAITS;
-    }
-
-    public static IBinaryComparator[] serdesToComparators(ISerializerDeserializer[] serdes, int numSerdes) {
-        IBinaryComparator[] comparators = new IBinaryComparator[numSerdes];
-        for (int i = 0; i < numSerdes; i++) {
-            comparators[i] = serdeToComparator(serdes[i]);
-        }
-        return comparators;
-    }
-
-    public static IBinaryComparator serdeToComparator(ISerializerDeserializer serde) {
-        IBinaryComparatorFactory f = serdeToComparatorFactory(serde);
-        return f.createBinaryComparator();
-    }
-
-    public static IBinaryComparatorFactory[] serdesToComparatorFactories(ISerializerDeserializer[] serdes, int numSerdes) {
-        IBinaryComparatorFactory[] comparatorsFactories = new IBinaryComparatorFactory[numSerdes];
-        for (int i = 0; i < numSerdes; i++) {
-            comparatorsFactories[i] = serdeToComparatorFactory(serdes[i]);
-        }
-        return comparatorsFactories;
-    }
-
-    public static IBinaryComparatorFactory serdeToComparatorFactory(ISerializerDeserializer serde) {
-        if (serde instanceof ShortSerializerDeserializer) {
-            return PointableBinaryComparatorFactory.of(ShortPointable.FACTORY);
-        }
-        if (serde instanceof IntegerSerializerDeserializer) {
-            return PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
-        }
-        if (serde instanceof Integer64SerializerDeserializer) {
-            return PointableBinaryComparatorFactory.of(LongPointable.FACTORY);
-        }
-        if (serde instanceof FloatSerializerDeserializer) {
-            return PointableBinaryComparatorFactory.of(FloatPointable.FACTORY);
-        }
-        if (serde instanceof DoubleSerializerDeserializer) {
-            return PointableBinaryComparatorFactory.of(DoublePointable.FACTORY);
-        }
-        if (serde instanceof BooleanSerializerDeserializer) {
-            throw new UnsupportedOperationException("Binary comparator factory for Boolean not implemented.");
-        }
-        if (serde instanceof UTF8StringSerializerDeserializer) {
-            return PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY);
-        }
-        throw new UnsupportedOperationException("Binary comparator for + " + serde.toString() + " not implemented.");
-    }
-}
diff --git a/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/util/TupleUtils.java b/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/util/TupleUtils.java
deleted file mode 100644
index 02047e8..0000000
--- a/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/util/TupleUtils.java
+++ /dev/null
@@ -1,136 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.dataflow.common.util;
-
-import java.io.ByteArrayInputStream;
-import java.io.DataInput;
-import java.io.DataInputStream;
-import java.io.DataOutput;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.DoubleSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-
-@SuppressWarnings("rawtypes")
-public class TupleUtils {
-    @SuppressWarnings("unchecked")
-    public static void createTuple(ArrayTupleBuilder tupleBuilder, ArrayTupleReference tuple,
-            ISerializerDeserializer[] fieldSerdes, final Object... fields) throws HyracksDataException {
-        DataOutput dos = tupleBuilder.getDataOutput();
-        tupleBuilder.reset();
-        int numFields = Math.min(tupleBuilder.getFieldEndOffsets().length, fields.length);
-        for (int i = 0; i < numFields; i++) {
-            fieldSerdes[i].serialize(fields[i], dos);
-            tupleBuilder.addFieldEndOffset();
-        }
-        tuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());
-    }
-
-    public static ITupleReference createTuple(ISerializerDeserializer[] fieldSerdes, final Object... fields)
-            throws HyracksDataException {
-        ArrayTupleBuilder tupleBuilder = new ArrayTupleBuilder(fields.length);
-        ArrayTupleReference tuple = new ArrayTupleReference();
-        createTuple(tupleBuilder, tuple, fieldSerdes, fields);
-        return tuple;
-    }
-
-    public static void createIntegerTuple(ArrayTupleBuilder tupleBuilder, ArrayTupleReference tuple,
-            final int... fields) throws HyracksDataException {
-        DataOutput dos = tupleBuilder.getDataOutput();
-        tupleBuilder.reset();
-        for (final int i : fields) {
-            IntegerSerializerDeserializer.INSTANCE.serialize(i, dos);
-            tupleBuilder.addFieldEndOffset();
-        }
-        tuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());
-    }
-
-    public static ITupleReference createIntegerTuple(final int... fields) throws HyracksDataException {
-        ArrayTupleBuilder tupleBuilder = new ArrayTupleBuilder(fields.length);
-        ArrayTupleReference tuple = new ArrayTupleReference();
-        createIntegerTuple(tupleBuilder, tuple, fields);
-        return tuple;
-    }
-
-    public static void createDoubleTuple(ArrayTupleBuilder tupleBuilder, ArrayTupleReference tuple,
-            final double... fields) throws HyracksDataException {
-        DataOutput dos = tupleBuilder.getDataOutput();
-        tupleBuilder.reset();
-        for (final double i : fields) {
-            DoubleSerializerDeserializer.INSTANCE.serialize(i, dos);
-            tupleBuilder.addFieldEndOffset();
-        }
-        tuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());
-    }
-
-    public static ITupleReference createDoubleTuple(final double... fields) throws HyracksDataException {
-        ArrayTupleBuilder tupleBuilder = new ArrayTupleBuilder(fields.length);
-        ArrayTupleReference tuple = new ArrayTupleReference();
-        createDoubleTuple(tupleBuilder, tuple, fields);
-        return tuple;
-    }
-
-    public static String printTuple(ITupleReference tuple, ISerializerDeserializer[] fields)
-            throws HyracksDataException {
-        StringBuilder strBuilder = new StringBuilder();
-        int numPrintFields = Math.min(tuple.getFieldCount(), fields.length);
-        for (int i = 0; i < numPrintFields; i++) {
-            ByteArrayInputStream inStream = new ByteArrayInputStream(tuple.getFieldData(i), tuple.getFieldStart(i),
-                    tuple.getFieldLength(i));
-            DataInput dataIn = new DataInputStream(inStream);
-            Object o = fields[i].deserialize(dataIn);
-            strBuilder.append(o.toString());
-            if (i != fields.length - 1) {
-                strBuilder.append(" ");
-            }
-        }
-        return strBuilder.toString();
-    }
-
-    public static Object[] deserializeTuple(ITupleReference tuple, ISerializerDeserializer[] fields)
-            throws HyracksDataException {
-        int numFields = Math.min(tuple.getFieldCount(), fields.length);
-        Object[] objs = new Object[numFields];
-        for (int i = 0; i < numFields; i++) {
-            ByteArrayInputStream inStream = new ByteArrayInputStream(tuple.getFieldData(i), tuple.getFieldStart(i),
-                    tuple.getFieldLength(i));
-            DataInput dataIn = new DataInputStream(inStream);
-            objs[i] = fields[i].deserialize(dataIn);
-        }
-        return objs;
-    }
-
-    public static ITupleReference copyTuple(ITupleReference tuple) throws HyracksDataException {
-        ArrayTupleBuilder tupleBuilder = new ArrayTupleBuilder(tuple.getFieldCount());
-        for (int i = 0; i < tuple.getFieldCount(); i++) {
-            tupleBuilder.addField(tuple.getFieldData(i), tuple.getFieldStart(i), tuple.getFieldLength(i));
-        }
-        ArrayTupleReference tupleCopy = new ArrayTupleReference();
-        tupleCopy.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());
-        return tupleCopy;
-    }
-    
-    public static void copyTuple(ArrayTupleBuilder tupleBuilder, ITupleReference tuple, int numFields) throws HyracksDataException {
-        tupleBuilder.reset();
-        for (int i = 0; i < numFields; i++) {
-            tupleBuilder.addField(tuple.getFieldData(i), tuple.getFieldStart(i), tuple.getFieldLength(i));
-        }
-    }
-}
diff --git a/hyracks-dataflow-hadoop/pom.xml b/hyracks-dataflow-hadoop/pom.xml
deleted file mode 100644
index 3815a2b..0000000
--- a/hyracks-dataflow-hadoop/pom.xml
+++ /dev/null
@@ -1,61 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <groupId>edu.uci.ics.hyracks</groupId>
-  <artifactId>hyracks-dataflow-hadoop</artifactId>
-  <version>0.2.2-SNAPSHOT</version>
-
-  <parent>
-    <groupId>edu.uci.ics.hyracks</groupId>
-    <artifactId>hyracks</artifactId>
-    <version>0.2.2-SNAPSHOT</version>
-  </parent>
-
-  <build>
-    <plugins>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-compiler-plugin</artifactId>
-        <version>2.0.2</version>
-        <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
-        </configuration>
-      </plugin>
-    </plugins>
-  </build>
-  <dependencies>
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks</groupId>
-  		<artifactId>hyracks-api</artifactId>
-  		<version>0.2.2-SNAPSHOT</version>
-  		<type>jar</type>
-  		<scope>compile</scope>
-  	</dependency>
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks</groupId>
-  		<artifactId>hyracks-dataflow-common</artifactId>
-  		<version>0.2.2-SNAPSHOT</version>
-  		<type>jar</type>
-  		<scope>compile</scope>
-  	</dependency>
-  	<dependency>
-  		<groupId>org.apache.hadoop</groupId>
-  		<artifactId>hadoop-core</artifactId>
-  		<version>0.20.2</version>
-  		<type>jar</type>
-  		<scope>compile</scope>
-  	</dependency>
-  	<dependency>
-  		<groupId>edu.uci.ics.dcache</groupId>
-  		<artifactId>dcache-client</artifactId>
-  		<version>0.0.1</version>
-  		<scope>compile</scope>
-  	</dependency>
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks</groupId>
-  		<artifactId>hyracks-dataflow-std</artifactId>
-  		<version>0.2.2-SNAPSHOT</version>
-  		<scope>compile</scope>
-  	</dependency>
-  </dependencies>
-</project>
diff --git a/hyracks-dataflow-std/pom.xml b/hyracks-dataflow-std/pom.xml
deleted file mode 100644
index 550814d..0000000
--- a/hyracks-dataflow-std/pom.xml
+++ /dev/null
@@ -1,49 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <groupId>edu.uci.ics.hyracks</groupId>
-  <artifactId>hyracks-dataflow-std</artifactId>
-  <version>0.2.2-SNAPSHOT</version>
-
-  <parent>
-    <groupId>edu.uci.ics.hyracks</groupId>
-    <artifactId>hyracks</artifactId>
-    <version>0.2.2-SNAPSHOT</version>
-  </parent>
-
-  <build>
-    <plugins>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-compiler-plugin</artifactId>
-        <version>2.0.2</version>
-        <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
-        </configuration>
-      </plugin>
-    </plugins>
-  </build>
-  <dependencies>
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks</groupId>
-  		<artifactId>hyracks-api</artifactId>
-  		<version>0.2.2-SNAPSHOT</version>
-  		<type>jar</type>
-  		<scope>compile</scope>
-  	</dependency>
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks</groupId>
-  		<artifactId>hyracks-dataflow-common</artifactId>
-  		<version>0.2.2-SNAPSHOT</version>
-  		<type>jar</type>
-  		<scope>compile</scope>
-  	</dependency>
-  	<dependency>
-  		<groupId>junit</groupId>
-  		<artifactId>junit</artifactId>
-  		<version>4.8.2</version>
-  		<type>jar</type>
-  		<scope>test</scope>
-  	</dependency>
-  </dependencies>
-</project>
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/InMemoryHashJoin.java b/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/InMemoryHashJoin.java
deleted file mode 100644
index 3e5e30f..0000000
--- a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/InMemoryHashJoin.java
+++ /dev/null
@@ -1,167 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.dataflow.std.join;
-
-import java.io.DataOutput;
-import java.nio.ByteBuffer;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-
-import edu.uci.ics.hyracks.api.comm.IFrameWriter;
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.value.INullWriter;
-import edu.uci.ics.hyracks.api.dataflow.value.ITuplePartitionComputer;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTuplePairComparator;
-import edu.uci.ics.hyracks.dataflow.std.structures.ISerializableTable;
-import edu.uci.ics.hyracks.dataflow.std.structures.TuplePointer;
-
-public class InMemoryHashJoin {
-	
-    private final List<ByteBuffer> buffers;
-    private final FrameTupleAccessor accessorBuild;
-    private final ITuplePartitionComputer tpcBuild;
-    private final FrameTupleAccessor accessorProbe;
-    private final ITuplePartitionComputer tpcProbe;
-    private final FrameTupleAppender appender;
-    private final FrameTuplePairComparator tpComparator;
-    private final ByteBuffer outBuffer;
-    private final boolean isLeftOuter;
-    private final ArrayTupleBuilder nullTupleBuild;
-    private final ISerializableTable table;
-	private final int tableSize;
-    private final TuplePointer storedTuplePointer;
-    
-    public InMemoryHashJoin(IHyracksTaskContext ctx, int tableSize, FrameTupleAccessor accessor0,
-            ITuplePartitionComputer tpc0, FrameTupleAccessor accessor1, ITuplePartitionComputer tpc1,
-            FrameTuplePairComparator comparator, boolean isLeftOuter, INullWriter[] nullWriters1, ISerializableTable table)
-            throws HyracksDataException {
-    	this.tableSize = tableSize;
-       	this.table = table;
-       	storedTuplePointer = new TuplePointer();
-       	buffers = new ArrayList<ByteBuffer>();
-        this.accessorBuild = accessor1;
-        this.tpcBuild = tpc1;
-        this.accessorProbe = accessor0;
-        this.tpcProbe = tpc0;
-        appender = new FrameTupleAppender(ctx.getFrameSize());
-        tpComparator = comparator;
-        outBuffer = ctx.allocateFrame();
-        appender.reset(outBuffer, true);
-        this.isLeftOuter = isLeftOuter;
-        if (isLeftOuter) {
-            int fieldCountOuter = accessor1.getFieldCount();
-            nullTupleBuild = new ArrayTupleBuilder(fieldCountOuter);
-            DataOutput out = nullTupleBuild.getDataOutput();
-            for (int i = 0; i < fieldCountOuter; i++) {
-                nullWriters1[i].writeNull(out);
-                nullTupleBuild.addFieldEndOffset();
-            }
-        } else {
-            nullTupleBuild = null;
-        }
-    }
-
-    public void build(ByteBuffer buffer) throws HyracksDataException {
-        buffers.add(buffer);
-        int bIndex = buffers.size() - 1;
-        accessorBuild.reset(buffer);
-        int tCount = accessorBuild.getTupleCount();
-        for (int i = 0; i < tCount; ++i) {
-            int entry = tpcBuild.partition(accessorBuild, i, tableSize);
-            storedTuplePointer.frameIndex = bIndex;
-            storedTuplePointer.tupleIndex = i;
-            table.insert(entry, storedTuplePointer);
-        }
-    }
-
-    public void join(ByteBuffer buffer, IFrameWriter writer) throws HyracksDataException {
-        accessorProbe.reset(buffer);
-        int tupleCount0 = accessorProbe.getTupleCount();
-        for (int i = 0; i < tupleCount0; ++i) {
-            int entry = tpcProbe.partition(accessorProbe, i, tableSize);
-            boolean matchFound = false;
-            int offset = 0;
-            do {
-                table.getTuplePointer(entry, offset++, storedTuplePointer);
-                if (storedTuplePointer.frameIndex < 0)
-                    break;
-                int bIndex = storedTuplePointer.frameIndex;
-                int tIndex = storedTuplePointer.tupleIndex;
-                accessorBuild.reset(buffers.get(bIndex));
-                int c = tpComparator.compare(accessorProbe, i, accessorBuild, tIndex);
-                if (c == 0) {
-                    matchFound = true;
-                    if (!appender.appendConcat(accessorProbe, i, accessorBuild, tIndex)) {
-                        flushFrame(outBuffer, writer);
-                        appender.reset(outBuffer, true);
-                        if (!appender.appendConcat(accessorProbe, i, accessorBuild, tIndex)) {
-                            throw new IllegalStateException();
-                        }
-                    }
-                }
-            } while (true);
-
-            if (!matchFound && isLeftOuter) {
-                if (!appender.appendConcat(accessorProbe, i, nullTupleBuild.getFieldEndOffsets(),
-                        nullTupleBuild.getByteArray(), 0, nullTupleBuild.getSize())) {
-                    flushFrame(outBuffer, writer);
-                    appender.reset(outBuffer, true);
-                    if (!appender.appendConcat(accessorProbe, i, nullTupleBuild.getFieldEndOffsets(),
-                            nullTupleBuild.getByteArray(), 0, nullTupleBuild.getSize())) {
-                        throw new IllegalStateException();
-                    }
-                }
-            }
-        }
-    }
-
-    public void closeJoin(IFrameWriter writer) throws HyracksDataException {
-        if (appender.getTupleCount() > 0) {
-            flushFrame(outBuffer, writer);
-        }
-    }
-
-    private void flushFrame(ByteBuffer buffer, IFrameWriter writer) throws HyracksDataException {
-        buffer.position(0);
-        buffer.limit(buffer.capacity());
-        writer.nextFrame(buffer);
-        buffer.position(0);
-        buffer.limit(buffer.capacity());
-    }
-
-    private static class Link {
-        private static final int INIT_POINTERS_SIZE = 8;
-
-        long[] pointers;
-        int size;
-
-        Link() {
-            pointers = new long[INIT_POINTERS_SIZE];
-            size = 0;
-        }
-
-        void add(long pointer) {
-            if (size >= pointers.length) {
-                pointers = Arrays.copyOf(pointers, pointers.length * 2);
-            }
-            pointers[size++] = pointer;
-        }
-    }
-}
\ No newline at end of file
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/NestedLoopJoin.java b/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/NestedLoopJoin.java
deleted file mode 100644
index 7e84229..0000000
--- a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/NestedLoopJoin.java
+++ /dev/null
@@ -1,166 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.dataflow.std.join;
-
-import java.nio.ByteBuffer;
-import java.util.ArrayList;
-import java.util.List;
-
-import edu.uci.ics.hyracks.api.comm.IFrameWriter;
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.value.ITuplePairComparator;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
-import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
-import edu.uci.ics.hyracks.dataflow.common.io.RunFileReader;
-import edu.uci.ics.hyracks.dataflow.common.io.RunFileWriter;
-
-public class NestedLoopJoin {
-    private final FrameTupleAccessor accessorInner;
-    private final FrameTupleAccessor accessorOuter;
-    private final FrameTupleAppender appender;
-    private final ITuplePairComparator tpComparator;
-    private final ByteBuffer outBuffer;
-    private final ByteBuffer innerBuffer;
-    private final List<ByteBuffer> outBuffers;
-    private final int memSize;
-    private final IHyracksTaskContext ctx;
-    private RunFileReader runFileReader;
-    private int currentMemSize = 0;
-    private final RunFileWriter runFileWriter;
-
-    public NestedLoopJoin(IHyracksTaskContext ctx, FrameTupleAccessor accessor0, FrameTupleAccessor accessor1,
-            ITuplePairComparator comparators, int memSize) throws HyracksDataException {
-        this.accessorInner = accessor1;
-        this.accessorOuter = accessor0;
-        this.appender = new FrameTupleAppender(ctx.getFrameSize());
-        this.tpComparator = comparators;
-        this.outBuffer = ctx.allocateFrame();
-        this.innerBuffer = ctx.allocateFrame();
-        this.appender.reset(outBuffer, true);
-        this.outBuffers = new ArrayList<ByteBuffer>();
-        this.memSize = memSize;
-        this.ctx = ctx;
-
-        FileReference file = ctx.getJobletContext().createManagedWorkspaceFile(
-                this.getClass().getSimpleName() + this.toString());
-        runFileWriter = new RunFileWriter(file, ctx.getIOManager());
-        runFileWriter.open();
-    }
-
-    public void cache(ByteBuffer buffer) throws HyracksDataException {
-        runFileWriter.nextFrame(buffer);
-    }
-
-    public void join(ByteBuffer outerBuffer, IFrameWriter writer) throws HyracksDataException {
-        if (outBuffers.size() < memSize - 3) {
-            createAndCopyFrame(outerBuffer);
-            return;
-        }
-        if (currentMemSize < memSize - 3) {
-            reloadFrame(outerBuffer);
-            return;
-        }
-        for (ByteBuffer outBuffer : outBuffers) {
-            runFileReader = runFileWriter.createReader();
-            runFileReader.open();
-            while (runFileReader.nextFrame(innerBuffer)) {
-                blockJoin(outBuffer, innerBuffer, writer);
-            }
-            runFileReader.close();
-        }
-        currentMemSize = 0;
-        reloadFrame(outerBuffer);
-    }
-
-    private void createAndCopyFrame(ByteBuffer outerBuffer) {
-        ByteBuffer outerBufferCopy = ctx.allocateFrame();
-        FrameUtils.copy(outerBuffer, outerBufferCopy);
-        outBuffers.add(outerBufferCopy);
-        currentMemSize++;
-    }
-
-    private void reloadFrame(ByteBuffer outerBuffer) {
-        outBuffers.get(currentMemSize).clear();
-        FrameUtils.copy(outerBuffer, outBuffers.get(currentMemSize));
-        currentMemSize++;
-    }
-
-    private void blockJoin(ByteBuffer outerBuffer, ByteBuffer innerBuffer, IFrameWriter writer)
-            throws HyracksDataException {
-        accessorOuter.reset(outerBuffer);
-        accessorInner.reset(innerBuffer);
-        int tupleCount0 = accessorOuter.getTupleCount();
-        int tupleCount1 = accessorInner.getTupleCount();
-
-        for (int i = 0; i < tupleCount0; ++i) {
-            for (int j = 0; j < tupleCount1; ++j) {
-                int c = compare(accessorOuter, i, accessorInner, j);
-                if (c == 0) {
-                    if (!appender.appendConcat(accessorOuter, i, accessorInner, j)) {
-                        flushFrame(outBuffer, writer);
-                        appender.reset(outBuffer, true);
-                        if (!appender.appendConcat(accessorOuter, i, accessorInner, j)) {
-                            throw new IllegalStateException();
-                        }
-                    }
-                }
-            }
-        }
-    }
-
-    public void closeCache() throws HyracksDataException {
-        if (runFileWriter != null) {
-            runFileWriter.close();
-        }
-    }
-
-    public void closeJoin(IFrameWriter writer) throws HyracksDataException {
-        for (int i = 0; i < currentMemSize; i++) {
-            ByteBuffer outBuffer = outBuffers.get(i);
-            runFileReader = runFileWriter.createReader();
-            runFileReader.open();
-            while (runFileReader.nextFrame(innerBuffer)) {
-                blockJoin(outBuffer, innerBuffer, writer);
-            }
-            runFileReader.close();
-        }
-        outBuffers.clear();
-        currentMemSize = 0;
-
-        if (appender.getTupleCount() > 0) {
-            flushFrame(outBuffer, writer);
-        }
-    }
-
-    private void flushFrame(ByteBuffer buffer, IFrameWriter writer) throws HyracksDataException {
-        buffer.position(0);
-        buffer.limit(buffer.capacity());
-        writer.nextFrame(buffer);
-        buffer.position(0);
-        buffer.limit(buffer.capacity());
-    }
-
-    private int compare(FrameTupleAccessor accessor0, int tIndex0, FrameTupleAccessor accessor1, int tIndex1)
-            throws HyracksDataException {
-        int c = tpComparator.compare(accessor0, tIndex0, accessor1, tIndex1);
-        if (c != 0) {
-            return c;
-        }
-        return 0;
-    }
-}
\ No newline at end of file
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/NestedLoopJoinOperatorDescriptor.java b/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/NestedLoopJoinOperatorDescriptor.java
deleted file mode 100644
index a699703..0000000
--- a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/NestedLoopJoinOperatorDescriptor.java
+++ /dev/null
@@ -1,187 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.dataflow.std.join;
-
-import java.io.DataInput;
-import java.io.DataOutput;
-import java.io.IOException;
-import java.nio.ByteBuffer;
-
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.ActivityId;
-import edu.uci.ics.hyracks.api.dataflow.IActivityGraphBuilder;
-import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
-import edu.uci.ics.hyracks.api.dataflow.TaskId;
-import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
-import edu.uci.ics.hyracks.api.dataflow.value.ITuplePairComparator;
-import edu.uci.ics.hyracks.api.dataflow.value.ITuplePairComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.job.IOperatorDescriptorRegistry;
-import edu.uci.ics.hyracks.api.job.JobId;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
-import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
-import edu.uci.ics.hyracks.dataflow.std.base.AbstractActivityNode;
-import edu.uci.ics.hyracks.dataflow.std.base.AbstractOperatorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.base.AbstractStateObject;
-import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryInputSinkOperatorNodePushable;
-import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryInputUnaryOutputOperatorNodePushable;
-
-public class NestedLoopJoinOperatorDescriptor extends AbstractOperatorDescriptor {
-    private static final int JOIN_CACHE_ACTIVITY_ID = 0;
-    private static final int NL_JOIN_ACTIVITY_ID = 1;
-
-    private static final long serialVersionUID = 1L;
-    private final ITuplePairComparatorFactory comparatorFactory;
-    private final int memSize;
-
-    public NestedLoopJoinOperatorDescriptor(IOperatorDescriptorRegistry spec,
-            ITuplePairComparatorFactory comparatorFactory, RecordDescriptor recordDescriptor, int memSize) {
-        super(spec, 2, 1);
-        this.comparatorFactory = comparatorFactory;
-        this.recordDescriptors[0] = recordDescriptor;
-        this.memSize = memSize;
-    }
-
-    @Override
-    public void contributeActivities(IActivityGraphBuilder builder) {
-        ActivityId jcaId = new ActivityId(getOperatorId(), JOIN_CACHE_ACTIVITY_ID);
-        ActivityId nljAid = new ActivityId(getOperatorId(), NL_JOIN_ACTIVITY_ID);
-        JoinCacheActivityNode jc = new JoinCacheActivityNode(jcaId, nljAid);
-        NestedLoopJoinActivityNode nlj = new NestedLoopJoinActivityNode(nljAid);
-
-        builder.addActivity(this, jc);
-        builder.addSourceEdge(1, jc, 0);
-
-        builder.addActivity(this, nlj);
-        builder.addSourceEdge(0, nlj, 0);
-
-        builder.addTargetEdge(0, nlj, 0);
-        builder.addBlockingEdge(jc, nlj);
-    }
-
-    public static class JoinCacheTaskState extends AbstractStateObject {
-        private NestedLoopJoin joiner;
-
-        public JoinCacheTaskState() {
-        }
-
-        private JoinCacheTaskState(JobId jobId, TaskId taskId) {
-            super(jobId, taskId);
-        }
-
-        @Override
-        public void toBytes(DataOutput out) throws IOException {
-
-        }
-
-        @Override
-        public void fromBytes(DataInput in) throws IOException {
-
-        }
-    }
-
-    private class JoinCacheActivityNode extends AbstractActivityNode {
-        private static final long serialVersionUID = 1L;
-
-        private final ActivityId nljAid;
-
-        public JoinCacheActivityNode(ActivityId id, ActivityId nljAid) {
-            super(id);
-            this.nljAid = nljAid;
-        }
-
-        @Override
-        public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx,
-                IRecordDescriptorProvider recordDescProvider, final int partition, int nPartitions) {
-            final RecordDescriptor rd0 = recordDescProvider.getInputRecordDescriptor(nljAid, 0);
-            final RecordDescriptor rd1 = recordDescProvider.getInputRecordDescriptor(getActivityId(), 0);
-            final ITuplePairComparator comparator = comparatorFactory.createTuplePairComparator(ctx);
-
-            IOperatorNodePushable op = new AbstractUnaryInputSinkOperatorNodePushable() {
-                private JoinCacheTaskState state;
-
-                @Override
-                public void open() throws HyracksDataException {
-                    state = new JoinCacheTaskState(ctx.getJobletContext().getJobId(), new TaskId(getActivityId(),
-                            partition));
-                    state.joiner = new NestedLoopJoin(ctx, new FrameTupleAccessor(ctx.getFrameSize(), rd0),
-                            new FrameTupleAccessor(ctx.getFrameSize(), rd1), comparator, memSize);
-                }
-
-                @Override
-                public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
-                    ByteBuffer copyBuffer = ctx.allocateFrame();
-                    FrameUtils.copy(buffer, copyBuffer);
-                    FrameUtils.makeReadable(copyBuffer);
-                    state.joiner.cache(copyBuffer);
-                }
-
-                @Override
-                public void close() throws HyracksDataException {
-                    state.joiner.closeCache();
-                    ctx.setStateObject(state);
-                }
-
-                @Override
-                public void fail() throws HyracksDataException {
-                }
-            };
-            return op;
-        }
-    }
-
-    private class NestedLoopJoinActivityNode extends AbstractActivityNode {
-        private static final long serialVersionUID = 1L;
-
-        public NestedLoopJoinActivityNode(ActivityId id) {
-            super(id);
-        }
-
-        @Override
-        public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx,
-                IRecordDescriptorProvider recordDescProvider, final int partition, int nPartitions) {
-
-            IOperatorNodePushable op = new AbstractUnaryInputUnaryOutputOperatorNodePushable() {
-                private JoinCacheTaskState state;
-
-                @Override
-                public void open() throws HyracksDataException {
-                    state = (JoinCacheTaskState) ctx.getStateObject(new TaskId(new ActivityId(getOperatorId(),
-                            JOIN_CACHE_ACTIVITY_ID), partition));
-                    writer.open();
-                }
-
-                @Override
-                public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
-                    state.joiner.join(buffer, writer);
-                }
-
-                @Override
-                public void close() throws HyracksDataException {
-                    state.joiner.closeJoin(writer);
-                    writer.close();
-                }
-
-                @Override
-                public void fail() throws HyracksDataException {
-                    writer.fail();
-                }
-            };
-            return op;
-        }
-    }
-}
\ No newline at end of file
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/OptimizedHybridHashJoin.java b/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/OptimizedHybridHashJoin.java
deleted file mode 100644
index 2905574..0000000
--- a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/OptimizedHybridHashJoin.java
+++ /dev/null
@@ -1,607 +0,0 @@
-package edu.uci.ics.hyracks.dataflow.std.join;
-
-import java.nio.ByteBuffer;
-import java.util.ArrayList;
-import java.util.BitSet;
-
-import edu.uci.ics.hyracks.api.comm.IFrameWriter;
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
-import edu.uci.ics.hyracks.api.dataflow.value.INullWriter;
-import edu.uci.ics.hyracks.api.dataflow.value.INullWriterFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ITuplePartitionComputer;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTuplePairComparator;
-import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
-import edu.uci.ics.hyracks.dataflow.common.io.RunFileReader;
-import edu.uci.ics.hyracks.dataflow.common.io.RunFileWriter;
-import edu.uci.ics.hyracks.dataflow.std.structures.ISerializableTable;
-import edu.uci.ics.hyracks.dataflow.std.structures.SerializableHashTable;
-
-/**
- * @author pouria
-       This class mainly applies one level of HHJ on a pair of
-       relations. It is always called by the descriptor.
- */
-public class OptimizedHybridHashJoin {
-
-    private final int NO_MORE_FREE_BUFFER = -1;
-    private final int END_OF_PARTITION = -1;
-    private final int INVALID_BUFFER = -2;
-    private final int UNALLOCATED_FRAME = -3;
-    private final int BUFFER_FOR_RESIDENT_PARTS = -1;
-    
-    private IHyracksTaskContext ctx;
-
-    private final String rel0Name;
-    private final String rel1Name;
-
-    private final int[] buildKeys;
-    private final int[] probeKeys;
-
-    private final IBinaryComparator[] comparators;
-
-    private ITuplePartitionComputer buildHpc;
-    private ITuplePartitionComputer probeHpc;
-
-    private final RecordDescriptor buildRd;
-    private final RecordDescriptor probeRd;
-
-    private RunFileWriter[] buildRFWriters; //writing spilled build partitions
-    private RunFileWriter[] probeRFWriters; //writing spilled probe partitions
-
-    private final boolean isLeftOuter;
-    private final INullWriter[] nullWriters1;
-
-    private ByteBuffer[] memBuffs; //Memory buffers for build
-    private int[] curPBuff; //Current (last) Buffer for each partition
-    private int[] nextBuff; //Next buffer in the partition's buffer chain
-    private int[] buildPSizeInTups; //Size of build partitions (in tuples)
-    private int[] probePSizeInTups; //Size of probe partitions (in tuples)
-    private int nextFreeBuffIx; //Index of next available free buffer to allocate/use
-    private BitSet pStatus; //0=resident, 1=spilled
-    private int numOfPartitions;
-    private int memForJoin;
-    private InMemoryHashJoin inMemJoiner; //Used for joining resident partitions
-
-    private final FrameTupleAccessor accessorBuild;
-    private final FrameTupleAccessor accessorProbe;
-    private FrameTupleAppender buildTupAppender;
-    private FrameTupleAppender probeTupAppenderToResident;
-    private FrameTupleAppender probeTupAppenderToSpilled;
-
-    private int numOfSpilledParts;
-    private ByteBuffer[] sPartBuffs;    //Buffers for probe spilled partitions (one buffer per spilled partition)
-    private ByteBuffer probeResBuff;    //Buffer for probe resident partition tuples
-    private ByteBuffer reloadBuffer;    //Buffer for reloading spilled partitions during partition tuning 
-
-    private int[] buildPSizeInFrames; //Used for partition tuning
-    private int freeFramesCounter; //Used for partition tuning
-
-    public OptimizedHybridHashJoin(IHyracksTaskContext ctx, int memForJoin, int numOfPartitions, String rel0Name,
-            String rel1Name, int[] keys0, int[] keys1, IBinaryComparator[] comparators, RecordDescriptor buildRd,
-            RecordDescriptor probeRd, ITuplePartitionComputer probeHpc, ITuplePartitionComputer buildHpc) {
-        this.ctx = ctx;
-        this.memForJoin = memForJoin;
-        this.buildRd = buildRd;
-        this.probeRd = probeRd;
-        this.buildHpc = probeHpc;
-        this.probeHpc = buildHpc;
-        this.buildKeys = keys0;
-        this.probeKeys = keys1;
-        this.comparators = comparators;
-        this.rel0Name = rel0Name;
-        this.rel1Name = rel1Name;
-
-        this.numOfPartitions = numOfPartitions;
-        this.buildRFWriters = new RunFileWriter[numOfPartitions];
-        this.probeRFWriters = new RunFileWriter[numOfPartitions];
-
-        this.accessorBuild = new FrameTupleAccessor(ctx.getFrameSize(), buildRd);
-        this.accessorProbe = new FrameTupleAccessor(ctx.getFrameSize(), probeRd);
-
-        this.isLeftOuter = false;
-        this.nullWriters1 = null;
-
-    }
-
-    public OptimizedHybridHashJoin(IHyracksTaskContext ctx, int memForJoin, int numOfPartitions, String rel0Name,
-            String rel1Name, int[] keys0, int[] keys1, IBinaryComparator[] comparators, RecordDescriptor buildRd,
-            RecordDescriptor probeRd, ITuplePartitionComputer probeHpc, ITuplePartitionComputer buildHpc,
-            boolean isLeftOuter, INullWriterFactory[] nullWriterFactories1) {
-        this.ctx = ctx;
-        this.memForJoin = memForJoin;
-        this.buildRd = buildRd;
-        this.probeRd = probeRd;
-        this.buildHpc = probeHpc;
-        this.probeHpc = buildHpc;
-        this.buildKeys = keys0;
-        this.probeKeys = keys1;
-        this.comparators = comparators;
-        this.rel0Name = rel0Name;
-        this.rel1Name = rel1Name;
-
-        this.numOfPartitions = numOfPartitions;
-        this.buildRFWriters = new RunFileWriter[numOfPartitions];
-        this.probeRFWriters = new RunFileWriter[numOfPartitions];
-
-        this.accessorBuild = new FrameTupleAccessor(ctx.getFrameSize(), buildRd);
-        this.accessorProbe = new FrameTupleAccessor(ctx.getFrameSize(), probeRd);
-
-        this.isLeftOuter = isLeftOuter;
-
-        this.nullWriters1 = isLeftOuter ? new INullWriter[nullWriterFactories1.length] : null;
-        if (isLeftOuter) {
-            for (int i = 0; i < nullWriterFactories1.length; i++) {
-                nullWriters1[i] = nullWriterFactories1[i].createNullWriter();
-            }
-        }
-    }
-
-    public void initBuild() {
-        memBuffs = new ByteBuffer[memForJoin];
-        curPBuff = new int[numOfPartitions];
-        nextBuff = new int[memForJoin];
-        pStatus = new BitSet(numOfPartitions);
-        buildPSizeInTups = new int[numOfPartitions];
-
-        buildPSizeInFrames = new int[numOfPartitions];
-        freeFramesCounter = memForJoin - numOfPartitions;
-
-        for (int i = 0; i < numOfPartitions; i++) { //Allocating one buffer per partition and setting as the head of the chain of buffers for that partition
-            memBuffs[i] = ctx.allocateFrame();
-            curPBuff[i] = i;
-            nextBuff[i] = -1;
-            buildPSizeInFrames[i] = 1; //The dedicated initial buffer
-        }
-
-        nextFreeBuffIx = ((numOfPartitions < memForJoin) ? numOfPartitions : NO_MORE_FREE_BUFFER); //Setting the chain of unallocated frames
-        for (int i = numOfPartitions; i < memBuffs.length; i++) {
-            nextBuff[i] = UNALLOCATED_FRAME;
-        }
-
-        buildTupAppender = new FrameTupleAppender(ctx.getFrameSize());
-
-    }
-
-    public void build(ByteBuffer buffer) throws HyracksDataException {
-        accessorBuild.reset(buffer);
-        int tupleCount = accessorBuild.getTupleCount();
-        for (int i = 0; i < tupleCount; ++i) {
-            int pid = buildHpc.partition(accessorBuild, i, numOfPartitions);
-            processTuple(i, pid);
-            buildPSizeInTups[pid]++;
-        }
-
-    }
-
-    private void processTuple(int tid, int pid) throws HyracksDataException {
-        ByteBuffer partition = memBuffs[curPBuff[pid]]; //Getting current buffer for the target partition
-
-        if (!pStatus.get(pid)) { //resident partition
-            buildTupAppender.reset(partition, false);
-            while (true) {
-                if (buildTupAppender.append(accessorBuild, tid)) { //Tuple added to resident partition successfully
-                    break;
-                }
-                //partition does not have enough room
-                int newBuffIx = allocateFreeBuffer(pid);
-                if (newBuffIx == NO_MORE_FREE_BUFFER) { //Spill one partition
-                    int pidToSpill = selectPartitionToSpill();
-                    if (pidToSpill == -1) { //No more partition to spill
-                        throw new HyracksDataException("not enough memory for Hash Join (Allocation exceeds the limit)");
-                    }
-                    spillPartition(pidToSpill);
-                    buildTupAppender.reset(memBuffs[pidToSpill], true);
-                    processTuple(tid, pid);
-                    break;
-                }  //New Buffer allocated successfully
-                partition = memBuffs[curPBuff[pid]]; //Current Buffer for the partition is now updated by allocateFreeBuffer() call above
-                buildTupAppender.reset(partition, true);
-                if (!buildTupAppender.append(accessorBuild, tid)) {
-                    throw new HyracksDataException("Invalid State (Can not append to newly allocated buffer)");
-                }
-                buildPSizeInFrames[pid]++;
-                break;
-            }
-        } else { //spilled partition
-            boolean needClear = false;
-            while (true) {
-                buildTupAppender.reset(partition, needClear);
-                if (buildTupAppender.append(accessorBuild, tid)) {
-                    break;
-                }
-                //Dedicated in-memory buffer for the partition is full, needed to be flushed first 
-                buildWrite(pid, partition);
-                partition.clear();
-                needClear = true;
-                buildPSizeInFrames[pid]++;
-            }
-        }
-    }
-
-    private int allocateFreeBuffer(int pid) {
-        if (nextFreeBuffIx != NO_MORE_FREE_BUFFER) {
-            if (memBuffs[nextFreeBuffIx] == null) {
-                memBuffs[nextFreeBuffIx] = ctx.allocateFrame();
-            }
-            int curPartBuffIx = curPBuff[pid];
-            curPBuff[pid] = nextFreeBuffIx;
-            int oldNext = nextBuff[nextFreeBuffIx];
-            nextBuff[nextFreeBuffIx] = curPartBuffIx;
-            if (oldNext == UNALLOCATED_FRAME) {
-                nextFreeBuffIx++;
-                if (nextFreeBuffIx == memForJoin) { //No more free buffer
-                    nextFreeBuffIx = NO_MORE_FREE_BUFFER;
-                }
-            } else {
-                nextFreeBuffIx = oldNext;
-            }
-            (memBuffs[curPBuff[pid]]).clear();
-
-            freeFramesCounter--;
-            return (curPBuff[pid]);
-        } else {
-            return NO_MORE_FREE_BUFFER; //A partitions needs to be spilled (if feasible)
-        }
-    }
-
-    private int selectPartitionToSpill() {
-        int maxSize = -1;
-        int partitionToSpill = -1;
-        for (int i = 0; i < buildPSizeInTups.length; i++) { //Find the largest partition, to spill
-            if (!pStatus.get(i) && (buildPSizeInTups[i] > maxSize)) {
-                maxSize = buildPSizeInTups[i];
-                partitionToSpill = i;
-            }
-        }
-        return partitionToSpill;
-    }
-
-    private void spillPartition(int pid) throws HyracksDataException {
-        int curBuffIx = curPBuff[pid];
-        ByteBuffer buff = null;
-        while (curBuffIx != END_OF_PARTITION) {
-            buff = memBuffs[curBuffIx];
-            buildWrite(pid, buff);
-            buff.clear();
-
-            int freedBuffIx = curBuffIx;
-            curBuffIx = nextBuff[curBuffIx];
-
-            if (freedBuffIx != pid) {
-                nextBuff[freedBuffIx] = nextFreeBuffIx;
-                nextFreeBuffIx = freedBuffIx;
-                freeFramesCounter++;
-            }
-        }
-        curPBuff[pid] = pid;
-        pStatus.set(pid);
-    }
-
-    private void buildWrite(int pid, ByteBuffer buff) throws HyracksDataException {
-        RunFileWriter writer = buildRFWriters[pid];
-        if (writer == null) {
-            FileReference file = ctx.getJobletContext().createManagedWorkspaceFile(rel0Name);
-            writer = new RunFileWriter(file, ctx.getIOManager());
-            writer.open();
-            buildRFWriters[pid] = writer;
-        }
-        writer.nextFrame(buff);
-    }
-
-    public void closeBuild() throws HyracksDataException {
-        for (int i = 0; i < numOfPartitions; i++) { //Remove Empty Partitions' allocated frame
-            if (buildPSizeInTups[i] == 0) {
-                buildPSizeInFrames[i]--;
-                nextBuff[curPBuff[i]] = nextFreeBuffIx;
-                nextFreeBuffIx = curPBuff[i];
-                curPBuff[i] = INVALID_BUFFER;
-                freeFramesCounter++;
-            }
-        }
-
-        ByteBuffer buff = null;
-        for (int i = pStatus.nextSetBit(0); i >= 0; i = pStatus.nextSetBit(i + 1)) { //flushing and DeAllocating the dedicated buffers for the spilled partitions
-            buff = memBuffs[i];
-            accessorBuild.reset(buff);
-            if (accessorBuild.getTupleCount() > 0) {
-                buildWrite(i, buff);
-                buildPSizeInFrames[i]++;
-            }
-            nextBuff[i] = nextFreeBuffIx;
-            nextFreeBuffIx = i;
-            freeFramesCounter++;
-            curPBuff[i] = INVALID_BUFFER;
-
-            if (buildRFWriters[i] != null) {
-                buildRFWriters[i].close();
-            }
-        }
-
-        partitionTune(); //Trying to bring back as many spilled partitions as possible, making them resident
-
-        int inMemTupCount = 0;
-        numOfSpilledParts = 0;
-
-        for (int i = 0; i < numOfPartitions; i++) {
-            if (!pStatus.get(i)) {
-                inMemTupCount += buildPSizeInTups[i];
-            } else {
-                numOfSpilledParts++;
-            }
-        }
-
-        createInMemoryJoiner(inMemTupCount);
-        cacheInMemJoin();
-    }
-
-    private void partitionTune() throws HyracksDataException {
-        reloadBuffer = ctx.allocateFrame();
-        ArrayList<Integer> reloadSet = selectPartitionsToReload();
-        for (int i = 0; i < reloadSet.size(); i++) {
-            int pid = reloadSet.get(i);
-            int[] buffsToLoad = new int[buildPSizeInFrames[pid]];
-            for (int j = 0; j < buffsToLoad.length; j++) {
-                buffsToLoad[j] = nextFreeBuffIx;
-                int oldNext = nextBuff[nextFreeBuffIx];
-                if (oldNext == UNALLOCATED_FRAME) {
-                    nextFreeBuffIx++;
-                    if (nextFreeBuffIx == memForJoin) { //No more free buffer
-                        nextFreeBuffIx = NO_MORE_FREE_BUFFER;
-                    }
-                } else {
-                    nextFreeBuffIx = oldNext;
-                }
-
-            }
-            curPBuff[pid] = buffsToLoad[0];
-            for (int k = 1; k < buffsToLoad.length; k++) {
-                nextBuff[buffsToLoad[k - 1]] = buffsToLoad[k];
-            }
-            loadPartitionInMem(pid, buildRFWriters[pid], buffsToLoad);
-        }
-        reloadSet.clear();
-        reloadSet = null;
-    }
-
-    private void loadPartitionInMem(int pid, RunFileWriter wr, int[] buffs) throws HyracksDataException {
-        RunFileReader r = wr.createReader();
-        r.open();
-        int counter = 0;
-        ByteBuffer mBuff = null;
-        reloadBuffer.clear();
-        while (r.nextFrame(reloadBuffer)) {
-            mBuff = memBuffs[buffs[counter]];
-            if (mBuff == null) {
-                mBuff = ctx.allocateFrame();
-                memBuffs[buffs[counter]] = mBuff;
-            }
-            FrameUtils.copy(reloadBuffer, mBuff);
-            counter++;
-            reloadBuffer.clear();
-        }
-
-        int curNext = nextBuff[buffs[buffs.length - 1]];
-        nextBuff[buffs[buffs.length - 1]] = END_OF_PARTITION;
-        nextFreeBuffIx = curNext;
-
-        r.close();
-        pStatus.set(pid, false);
-        buildRFWriters[pid] = null;
-    }
-
-    private ArrayList<Integer> selectPartitionsToReload() {
-        ArrayList<Integer> p = new ArrayList<Integer>();
-        for (int i = pStatus.nextSetBit(0); i >= 0; i = pStatus.nextSetBit(i + 1)) {
-            if (buildPSizeInFrames[i]>0 && (freeFramesCounter - buildPSizeInFrames[i] >= 0) ) {
-                p.add(i);
-                freeFramesCounter -= buildPSizeInFrames[i];
-            }
-            if (freeFramesCounter < 1) { //No more free buffer available
-                return p;
-            }
-        }
-        return p;
-    }
-
-    private void createInMemoryJoiner(int inMemTupCount) throws HyracksDataException {
-        ISerializableTable table = new SerializableHashTable(inMemTupCount, ctx);
-        this.inMemJoiner = new InMemoryHashJoin(ctx, inMemTupCount,
-                new FrameTupleAccessor(ctx.getFrameSize(), probeRd), probeHpc, new FrameTupleAccessor(
-                        ctx.getFrameSize(), buildRd), buildHpc, new FrameTuplePairComparator(probeKeys, buildKeys,
-                        comparators), isLeftOuter, nullWriters1, table);
-    }
-
-    private void cacheInMemJoin() throws HyracksDataException {
-
-        for (int pid = 0; pid < numOfPartitions; pid++) {
-            if (!pStatus.get(pid)) {
-                int nextBuffIx = curPBuff[pid];
-                while (nextBuffIx > -1) { //It is not Invalid or End_Of_Partition
-                    inMemJoiner.build(memBuffs[nextBuffIx]);
-                    nextBuffIx = nextBuff[nextBuffIx];
-                }
-            }
-        }
-    }
-
-    public void initProbe() {
-
-        sPartBuffs = new ByteBuffer[numOfSpilledParts];
-        for (int i = 0; i < numOfSpilledParts; i++) {
-            sPartBuffs[i] = ctx.allocateFrame();
-        }
-        curPBuff = new int[numOfPartitions];
-        int nextBuffIxToAlloc = 0;
-        /* We only need to allocate one frame per spilled partition. 
-         * Resident partitions do not need frames in probe, as their tuples join 
-         * immediately with the resident build tuples using the inMemoryHashJoin */
-        for (int i = 0; i < numOfPartitions; i++) { 
-            curPBuff[i] = (pStatus.get(i)) ? nextBuffIxToAlloc++ : BUFFER_FOR_RESIDENT_PARTS;
-        }
-        probePSizeInTups = new int[numOfPartitions];
-        probeRFWriters = new RunFileWriter[numOfPartitions];
-
-        probeResBuff = ctx.allocateFrame();
-
-        probeTupAppenderToResident = new FrameTupleAppender(ctx.getFrameSize());
-        probeTupAppenderToResident.reset(probeResBuff, true);
-
-        probeTupAppenderToSpilled = new FrameTupleAppender(ctx.getFrameSize());
-
-    }
-
-    public void probe(ByteBuffer buffer, IFrameWriter writer) throws HyracksDataException {
-
-        accessorProbe.reset(buffer);
-        int tupleCount = accessorProbe.getTupleCount();
-
-        if (numOfSpilledParts == 0) {
-            inMemJoiner.join(buffer, writer);
-            return;
-        }
-
-        for (int i = 0; i < tupleCount; ++i) {
-            int pid = probeHpc.partition(accessorProbe, i, numOfPartitions);
-
-            if (buildPSizeInTups[pid] > 0) { //Tuple has potential match from previous phase
-                if (pStatus.get(pid)) { //pid is Spilled
-                    boolean needToClear = false;
-                    ByteBuffer buff = sPartBuffs[curPBuff[pid]];
-                    while (true) {
-                        probeTupAppenderToSpilled.reset(buff, needToClear);
-                        if (probeTupAppenderToSpilled.append(accessorProbe, i)) {
-                            break;
-                        } 
-                        probeWrite(pid, buff);
-                        buff.clear();
-                        needToClear = true;
-                    }
-                } else { //pid is Resident
-                    while (true) {
-                        if (probeTupAppenderToResident.append(accessorProbe, i)){
-                            break;
-                        }
-                        inMemJoiner.join(probeResBuff, writer);
-                        probeTupAppenderToResident.reset(probeResBuff, true);
-                    }
-
-                }
-                probePSizeInTups[pid]++;
-            }
-
-        }
-
-    }
-
-    public void closeProbe(IFrameWriter writer) throws HyracksDataException { //We do NOT join the spilled partitions here, that decision is made at the descriptor level (which join technique to use)
-        inMemJoiner.join(probeResBuff, writer);
-        inMemJoiner.closeJoin(writer);
-
-        for (int pid = pStatus.nextSetBit(0); pid >= 0; pid = pStatus.nextSetBit(pid + 1)) {
-            ByteBuffer buff = sPartBuffs[curPBuff[pid]];
-            accessorProbe.reset(buff);
-            if (accessorProbe.getTupleCount() > 0) {
-                probeWrite(pid, buff);
-            }
-            closeProbeWriter(pid);
-        }
-    }
-
-    private void probeWrite(int pid, ByteBuffer buff) throws HyracksDataException {
-        RunFileWriter pWriter = probeRFWriters[pid];
-        if (pWriter == null) {
-            FileReference file = ctx.createManagedWorkspaceFile(rel1Name);
-            pWriter = new RunFileWriter(file, ctx.getIOManager());
-            pWriter.open();
-            probeRFWriters[pid] = pWriter;
-        }
-        pWriter.nextFrame(buff);
-    }
-
-    private void closeProbeWriter(int pid) throws HyracksDataException {
-        RunFileWriter writer = probeRFWriters[pid];
-        if (writer != null) {
-            writer.close();
-        }
-    }
-
-    public RunFileReader getBuildRFReader(int pid) throws HyracksDataException {
-        return ((buildRFWriters[pid] == null) ? null : (buildRFWriters[pid]).createReader());
-    }
-
-    public long getBuildPartitionSize(int pid) {
-        return ((buildRFWriters[pid] == null) ? 0 : buildRFWriters[pid].getFileSize());
-    }
-
-    public int getBuildPartitionSizeInTup(int pid) {
-        return (buildPSizeInTups[pid]);
-    }
-
-    public RunFileReader getProbeRFReader(int pid) throws HyracksDataException {
-        return ((probeRFWriters[pid] == null) ? null : (probeRFWriters[pid]).createReader());
-    }
-
-    public long getProbePartitionSize(int pid) {
-        return ((probeRFWriters[pid] == null) ? 0 : probeRFWriters[pid].getFileSize());
-    }
-
-    public int getProbePartitionSizeInTup(int pid) {
-        return (probePSizeInTups[pid]);
-    }
-
-    public int getMaxBuildPartitionSize() {
-        int max = buildPSizeInTups[0];
-        for (int i = 1; i < buildPSizeInTups.length; i++) {
-            if (buildPSizeInTups[i] > max) {
-                max = buildPSizeInTups[i];
-            }
-        }
-        return max;
-    }
-
-    public int getMaxProbePartitionSize() {
-        int max = probePSizeInTups[0];
-        for (int i = 1; i < probePSizeInTups.length; i++) {
-            if (probePSizeInTups[i] > max) {
-                max = probePSizeInTups[i];
-            }
-        }
-        return max;
-    }
-
-    public BitSet getPartitinStatus() {
-        return pStatus;
-    }
-
-    public String debugGetStats() {
-        int numOfResidentPartitions = 0;
-        int numOfSpilledPartitions = 0;
-        double sumOfBuildSpilledSizes = 0;
-        double sumOfProbeSpilledSizes = 0;
-        int numOfInMemTups = 0;
-        for (int i = 0; i < numOfPartitions; i++) {
-            if (pStatus.get(i)) { //Spilled
-                numOfSpilledPartitions++;
-                sumOfBuildSpilledSizes += buildPSizeInTups[i];
-                sumOfProbeSpilledSizes += probePSizeInTups[i];
-            } else { //Resident
-                numOfResidentPartitions++;
-                numOfInMemTups += buildPSizeInTups[i];
-            }
-        }
-
-        double avgBuildSpSz = sumOfBuildSpilledSizes / numOfSpilledPartitions;
-        double avgProbeSpSz = sumOfProbeSpilledSizes / numOfSpilledPartitions;
-        String s = "Resident Partitions:\t" + numOfResidentPartitions + "\nSpilled Partitions:\t"
-                + numOfSpilledPartitions + "\nAvg Build Spilled Size:\t" + avgBuildSpSz + "\nAvg Probe Spilled Size:\t"
-                + avgProbeSpSz + "\nIn-Memory Tups:\t" + numOfInMemTups + "\nNum of Free Buffers:\t"
-                + freeFramesCounter;
-        return s;
-    }
-}
\ No newline at end of file
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/OptimizedHybridHashJoinOperatorDescriptor.java b/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/OptimizedHybridHashJoinOperatorDescriptor.java
deleted file mode 100644
index 3a7ee2c..0000000
--- a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/OptimizedHybridHashJoinOperatorDescriptor.java
+++ /dev/null
@@ -1,635 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.dataflow.std.join;
-
-import java.io.DataInput;
-import java.io.DataOutput;
-import java.io.IOException;
-import java.nio.ByteBuffer;
-import java.util.BitSet;
-
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.ActivityId;
-import edu.uci.ics.hyracks.api.dataflow.IActivityGraphBuilder;
-import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
-import edu.uci.ics.hyracks.api.dataflow.TaskId;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFamily;
-import edu.uci.ics.hyracks.api.dataflow.value.INullWriter;
-import edu.uci.ics.hyracks.api.dataflow.value.INullWriterFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
-import edu.uci.ics.hyracks.api.dataflow.value.ITuplePairComparator;
-import edu.uci.ics.hyracks.api.dataflow.value.ITuplePairComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ITuplePartitionComputer;
-import edu.uci.ics.hyracks.api.dataflow.value.ITuplePartitionComputerFamily;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.job.IOperatorDescriptorRegistry;
-import edu.uci.ics.hyracks.api.job.JobId;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTuplePairComparator;
-import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
-import edu.uci.ics.hyracks.dataflow.common.data.partition.FieldHashPartitionComputerFamily;
-import edu.uci.ics.hyracks.dataflow.common.data.partition.RepartitionComputerGeneratorFactory;
-import edu.uci.ics.hyracks.dataflow.common.io.RunFileReader;
-import edu.uci.ics.hyracks.dataflow.std.base.AbstractActivityNode;
-import edu.uci.ics.hyracks.dataflow.std.base.AbstractOperatorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.base.AbstractStateObject;
-import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryInputSinkOperatorNodePushable;
-import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryInputUnaryOutputOperatorNodePushable;
-import edu.uci.ics.hyracks.dataflow.std.structures.ISerializableTable;
-import edu.uci.ics.hyracks.dataflow.std.structures.SerializableHashTable;
-
-/**
- * @author pouria
- *         This class guides the joining process, and switches between different
- *         joining techniques, w.r.t the implemented optimizations and skew in size of the
- *         partitions.
- *         - Operator overview:
- *         Assume we are trying to do (R Join S), with M buffers available, while we have an estimate on the size
- *         of R (in terms of buffers). HHJ (Hybrid Hash Join) has two main phases: Build and Probe, where in our implementation Probe phase
- *         can apply HHJ recursively, based on the value of M and size of R and S. HHJ phases proceed as follow:
- *         BUILD:
- *         Calculate number of partitions (Based on the size of R, fudge factor and M) [See Shapiro's paper for the detailed discussion].
- *         Initialize the build phase (one frame per partition, all partitions considered resident at first)
- *         Read tuples of R, frame by frame, and hash each tuple (based on a given hash function) to find
- *         its target partition and try to append it to that partition:
- *         If target partition's buffer is full, try to allocate a new buffer for it.
- *         if no free buffer is available, find the largest resident partition and spill it. Using its freed
- *         buffers after spilling, allocate a new buffer for the target partition.
- *         Being done with R, close the build phase. (During closing we write the very last buffer of each
- *         spilled partition to the disk, and we do partition tuning, where we try to bring back as many buffers, belonging to
- *         spilled partitions as possible into memory, based on the free buffers - We will stop at the point where remaining free buffers is not enough
- *         for reloading an entire partition back into memory)
- *         Create the hash table for the resident partitions (basically we create an in-memory hash join here)
- *         PROBE:
- *         Initialize the probe phase on S (mainly allocate one buffer per spilled partition, and one buffer
- *         for the whole resident partitions)
- *         Read tuples of S, frame by frame and hash each tuple T to its target partition P
- *         if P is a resident partition, pass T to the in-memory hash join and generate the output record,
- *         if any matching(s) record found
- *         if P is spilled, write T to the dedicated buffer for P (on the probe side)
- *         Once scanning of S is done, we try to join partition pairs (Ri, Si) of the spilled partitions:
- *         if any of Ri or Si is smaller than M, then we simply use an in-memory hash join to join them
- *         otherwise we apply HHJ recursively:
- *         if after applying HHJ recursively, we do not gain enough size reduction (max size of the
- *         resulting partitions were more than 80% of the initial Ri,Si size) then we switch to
- *         nested loop join for joining.
- *         (At each step of partition-pair joining, we consider role reversal, which means if size of Si were
- *         greater than Ri, then we make sure that we switch the roles of build/probe between them)
- */
-
-public class OptimizedHybridHashJoinOperatorDescriptor extends AbstractOperatorDescriptor {
-    private static final int BUILD_AND_PARTITION_ACTIVITY_ID = 0;
-    private static final int PARTITION_AND_JOIN_ACTIVITY_ID = 1;
-
-    private static final long serialVersionUID = 1L;
-    private static final double NLJ_SWITCH_THRESHOLD = 0.8;
-
-    private static final String PROBE_REL = "RelR";
-    private static final String BUILD_REL = "RelS";
-
-    private final int memsize;
-    private final int inputsize0;
-    private final double fudgeFactor;
-    private final int[] probeKeys;
-    private final int[] buildKeys;
-    private final IBinaryHashFunctionFamily[] hashFunctionGeneratorFactories;
-    private final IBinaryComparatorFactory[] comparatorFactories; //For in-mem HJ
-    private final ITuplePairComparatorFactory tuplePairComparatorFactory0; //For NLJ in probe
-    private final ITuplePairComparatorFactory tuplePairComparatorFactory1; //For NLJ in probe
-
-    private final boolean isLeftOuter;
-    private final INullWriterFactory[] nullWriterFactories1;
-
-    public OptimizedHybridHashJoinOperatorDescriptor(IOperatorDescriptorRegistry spec, int memsize, int inputsize0,
-            double factor, int[] keys0, int[] keys1, IBinaryHashFunctionFamily[] hashFunctionGeneratorFactories,
-            IBinaryComparatorFactory[] comparatorFactories, RecordDescriptor recordDescriptor,
-            ITuplePairComparatorFactory tupPaircomparatorFactory0,
-            ITuplePairComparatorFactory tupPaircomparatorFactory1, boolean isLeftOuter,
-            INullWriterFactory[] nullWriterFactories1) throws HyracksDataException {
-
-        super(spec, 2, 1);
-        this.memsize = memsize;
-        this.inputsize0 = inputsize0;
-        this.fudgeFactor = factor;
-        this.probeKeys = keys0;
-        this.buildKeys = keys1;
-        this.hashFunctionGeneratorFactories = hashFunctionGeneratorFactories;
-        this.comparatorFactories = comparatorFactories;
-        this.tuplePairComparatorFactory0 = tupPaircomparatorFactory0;
-        this.tuplePairComparatorFactory1 = tupPaircomparatorFactory1;
-        recordDescriptors[0] = recordDescriptor;
-        this.isLeftOuter = isLeftOuter;
-        this.nullWriterFactories1 = nullWriterFactories1;
-
-    }
-
-    public OptimizedHybridHashJoinOperatorDescriptor(IOperatorDescriptorRegistry spec, int memsize, int inputsize0,
-            double factor, int[] keys0, int[] keys1, IBinaryHashFunctionFamily[] hashFunctionGeneratorFactories,
-            IBinaryComparatorFactory[] comparatorFactories, RecordDescriptor recordDescriptor,
-            ITuplePairComparatorFactory tupPaircomparatorFactory0, ITuplePairComparatorFactory tupPaircomparatorFactory1)
-            throws HyracksDataException {
-
-        super(spec, 2, 1);
-        this.memsize = memsize;
-        this.inputsize0 = inputsize0;
-        this.fudgeFactor = factor;
-        this.probeKeys = keys0;
-        this.buildKeys = keys1;
-        this.hashFunctionGeneratorFactories = hashFunctionGeneratorFactories;
-        this.comparatorFactories = comparatorFactories;
-        this.tuplePairComparatorFactory0 = tupPaircomparatorFactory0;
-        this.tuplePairComparatorFactory1 = tupPaircomparatorFactory1;
-        recordDescriptors[0] = recordDescriptor;
-        this.isLeftOuter = false;
-        this.nullWriterFactories1 = null;
-    }
-
-    @Override
-    public void contributeActivities(IActivityGraphBuilder builder) {
-        ActivityId buildAid = new ActivityId(odId, BUILD_AND_PARTITION_ACTIVITY_ID);
-        ActivityId probeAid = new ActivityId(odId, PARTITION_AND_JOIN_ACTIVITY_ID);
-        PartitionAndBuildActivityNode phase1 = new PartitionAndBuildActivityNode(buildAid, probeAid);
-        ProbeAndJoinActivityNode phase2 = new ProbeAndJoinActivityNode(probeAid, buildAid);
-
-        builder.addActivity(this, phase1);
-        builder.addSourceEdge(0, phase1, 0);
-
-        builder.addActivity(this, phase2);
-        builder.addSourceEdge(1, phase2, 0);
-
-        builder.addBlockingEdge(phase1, phase2);
-
-        builder.addTargetEdge(0, phase2, 0);
-
-    }
-
-    //memorySize is the memory for join (we have already excluded the 2 buffers for in/out)
-    private int getNumberOfPartitions(int memorySize, int buildSize, double factor, int nPartitions)
-            throws HyracksDataException {
-        int numberOfPartitions = 0;
-        if (memorySize <= 1) {
-            throw new HyracksDataException("not enough memory is available for Hybrid Hash Join");
-        }
-        if (memorySize > buildSize) {
-            return 1; //We will switch to in-Mem HJ eventually
-        }
-        numberOfPartitions = (int) (Math.ceil((double) (buildSize * factor / nPartitions - memorySize)
-                / (double) (memorySize - 1)));
-        if (numberOfPartitions <= 0) {
-            numberOfPartitions = 1; //becomes in-memory hash join
-        }
-        if (numberOfPartitions > memorySize) {
-            numberOfPartitions = (int) Math.ceil(Math.sqrt(buildSize * factor / nPartitions));
-            return (numberOfPartitions < memorySize ? numberOfPartitions : memorySize);
-        }
-        return numberOfPartitions;
-    }
-
-    public static class BuildAndPartitionTaskState extends AbstractStateObject {
-
-        private int memForJoin;
-        private int numOfPartitions;
-        private OptimizedHybridHashJoin hybridHJ;
-
-        public BuildAndPartitionTaskState() {
-        }
-
-        private BuildAndPartitionTaskState(JobId jobId, TaskId taskId) {
-            super(jobId, taskId);
-        }
-
-        @Override
-        public void toBytes(DataOutput out) throws IOException {
-
-        }
-
-        @Override
-        public void fromBytes(DataInput in) throws IOException {
-
-        }
-
-    }
-
-    /*
-     * Build phase of Hybrid Hash Join:
-     * Creating an instance of Hybrid Hash Join, using Shapiro's formula
-     * to get the optimal number of partitions, build relation is read and
-     * partitioned, and hybrid hash join instance gets ready for the probing.
-     * (See OptimizedHybridHashJoin for the details on different steps)
-     */
-    private class PartitionAndBuildActivityNode extends AbstractActivityNode {
-        private static final long serialVersionUID = 1L;
-
-        private final ActivityId probeAid;
-
-        public PartitionAndBuildActivityNode(ActivityId id, ActivityId probeAid) {
-            super(id);
-            this.probeAid = probeAid;
-        }
-
-        @Override
-        public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx,
-                IRecordDescriptorProvider recordDescProvider, final int partition, final int nPartitions) {
-
-            final RecordDescriptor probeRd = recordDescProvider.getInputRecordDescriptor(getActivityId(), 0);
-            final RecordDescriptor buildRd = recordDescProvider.getInputRecordDescriptor(probeAid, 0);
-
-            final IBinaryComparator[] comparators = new IBinaryComparator[comparatorFactories.length];
-            for (int i = 0; i < comparatorFactories.length; i++) {
-                comparators[i] = comparatorFactories[i].createBinaryComparator();
-            }
-
-            final INullWriter[] nullWriters1 = isLeftOuter ? new INullWriter[nullWriterFactories1.length] : null;
-            if (isLeftOuter) {
-                for (int i = 0; i < nullWriterFactories1.length; i++) {
-                    nullWriters1[i] = nullWriterFactories1[i].createNullWriter();
-                }
-            }
-
-            IOperatorNodePushable op = new AbstractUnaryInputSinkOperatorNodePushable() {
-                private BuildAndPartitionTaskState state = new BuildAndPartitionTaskState(ctx.getJobletContext()
-                        .getJobId(), new TaskId(getActivityId(), partition));
-
-                ITuplePartitionComputer probeHpc = new FieldHashPartitionComputerFamily(probeKeys,
-                        hashFunctionGeneratorFactories).createPartitioner(0);
-                ITuplePartitionComputer buildHpc = new FieldHashPartitionComputerFamily(buildKeys,
-                        hashFunctionGeneratorFactories).createPartitioner(0);
-
-                @Override
-                public void open() throws HyracksDataException {
-                    if (memsize <= 2) { //Dedicated buffers: One buffer to read and one buffer for output
-                        throw new HyracksDataException("not enough memory for Hybrid Hash Join");
-                    }
-                    state.memForJoin = memsize - 2;
-                    state.numOfPartitions = getNumberOfPartitions(state.memForJoin, inputsize0, fudgeFactor,
-                            nPartitions);
-                    state.hybridHJ = new OptimizedHybridHashJoin(ctx, state.memForJoin, state.numOfPartitions,
-                            PROBE_REL, BUILD_REL, probeKeys, buildKeys, comparators, probeRd, buildRd, probeHpc,
-                            buildHpc);
-                    state.hybridHJ.initBuild();
-                }
-
-                @Override
-                public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
-                    state.hybridHJ.build(buffer);
-                }
-
-                @Override
-                public void close() throws HyracksDataException {
-                    state.hybridHJ.closeBuild();
-                    ctx.setStateObject(state);
-                }
-
-                @Override
-                public void fail() throws HyracksDataException {
-                }
-
-            };
-            return op;
-        }
-    }
-
-    /*
-     * Probe phase of Hybrid Hash Join:
-     * Reading the probe side and partitioning it, resident tuples get
-     * joined with the build side residents (through formerly created HybridHashJoin in the build phase)
-     * and spilled partitions get written to run files. During the close() call, pairs of spilled partition
-     * (build side spilled partition and its corresponding probe side spilled partition) join, by applying
-     * Hybrid Hash Join recursively on them.
-     */
-    private class ProbeAndJoinActivityNode extends AbstractActivityNode {
-
-        private static final long serialVersionUID = 1L;
-
-        private final ActivityId buildAid;
-
-        public ProbeAndJoinActivityNode(ActivityId id, ActivityId buildAid) {
-            super(id);
-            this.buildAid = buildAid;
-        }
-
-        @Override
-        public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx,
-                IRecordDescriptorProvider recordDescProvider, final int partition, final int nPartitions) {
-
-            final RecordDescriptor probeRd = recordDescProvider.getInputRecordDescriptor(buildAid, 0);
-            final RecordDescriptor buildRd = recordDescProvider.getInputRecordDescriptor(getActivityId(), 0);
-            final IBinaryComparator[] comparators = new IBinaryComparator[comparatorFactories.length];
-            final ITuplePairComparator nljComparator0 = tuplePairComparatorFactory0.createTuplePairComparator(ctx);
-            final ITuplePairComparator nljComparator1 = tuplePairComparatorFactory1.createTuplePairComparator(ctx);
-
-            for (int i = 0; i < comparatorFactories.length; i++) {
-                comparators[i] = comparatorFactories[i].createBinaryComparator();
-            }
-
-            final INullWriter[] nullWriters1 = isLeftOuter ? new INullWriter[nullWriterFactories1.length] : null;
-            if (isLeftOuter) {
-                for (int i = 0; i < nullWriterFactories1.length; i++) {
-                    nullWriters1[i] = nullWriterFactories1[i].createNullWriter();
-                }
-            }
-
-            IOperatorNodePushable op = new AbstractUnaryInputUnaryOutputOperatorNodePushable() {
-                private BuildAndPartitionTaskState state;
-                private ByteBuffer rPartbuff = ctx.allocateFrame();
-
-                private ITuplePartitionComputerFamily hpcf0 = new FieldHashPartitionComputerFamily(probeKeys,
-                        hashFunctionGeneratorFactories);
-                private ITuplePartitionComputerFamily hpcf1 = new FieldHashPartitionComputerFamily(buildKeys,
-                        hashFunctionGeneratorFactories);
-
-                private ITuplePartitionComputer hpcRep0;
-                private ITuplePartitionComputer hpcRep1;
-
-                @Override
-                public void open() throws HyracksDataException {
-                    state = (BuildAndPartitionTaskState) ctx.getStateObject(new TaskId(new ActivityId(getOperatorId(),
-                            BUILD_AND_PARTITION_ACTIVITY_ID), partition));
-
-                    writer.open();
-                    state.hybridHJ.initProbe();
-
-                }
-
-                @Override
-                public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
-                    state.hybridHJ.probe(buffer, writer);
-                }
-
-                @Override
-                public void fail() throws HyracksDataException {
-                    writer.fail();
-                }
-
-                @Override
-                public void close() throws HyracksDataException {
-
-                    state.hybridHJ.closeProbe(writer);
-
-                    BitSet partitionStatus = state.hybridHJ.getPartitinStatus();
-                    hpcRep0 = new RepartitionComputerGeneratorFactory(state.numOfPartitions, hpcf0)
-                            .createPartitioner(0);
-                    hpcRep1 = new RepartitionComputerGeneratorFactory(state.numOfPartitions, hpcf1)
-                            .createPartitioner(0);
-
-                    rPartbuff.clear();
-                    for (int pid = partitionStatus.nextSetBit(0); pid >= 0; pid = partitionStatus.nextSetBit(pid + 1)) {
-
-                        RunFileReader bReader = state.hybridHJ.getBuildRFReader(pid);
-                        RunFileReader pReader = state.hybridHJ.getProbeRFReader(pid);
-
-                        if (bReader == null || pReader == null) { //either of sides (or both) does not have any tuple, thus no need for joining (no potential match)
-                            continue;
-                        }
-                        int bSize = state.hybridHJ.getBuildPartitionSizeInTup(pid);
-                        int pSize = state.hybridHJ.getProbePartitionSizeInTup(pid);
-                        int beforeMax = (bSize > pSize) ? bSize : pSize;
-                        joinPartitionPair(state.hybridHJ, bReader, pReader, pid, beforeMax, 1);
-
-                    }
-                    writer.close();
-                }
-
-                private void joinPartitionPair(OptimizedHybridHashJoin ohhj, RunFileReader buildSideReader,
-                        RunFileReader probeSideReader, int pid, int beforeMax, int level) throws HyracksDataException {
-                    ITuplePartitionComputer probeHpc = new FieldHashPartitionComputerFamily(probeKeys,
-                            hashFunctionGeneratorFactories).createPartitioner(level);
-                    ITuplePartitionComputer buildHpc = new FieldHashPartitionComputerFamily(buildKeys,
-                            hashFunctionGeneratorFactories).createPartitioner(level);
-
-                    long buildPartSize = ohhj.getBuildPartitionSize(pid) / ctx.getFrameSize();
-                    long probePartSize = ohhj.getProbePartitionSize(pid) / ctx.getFrameSize();
-
-                    //Apply in-Mem HJ if possible
-                    if ((buildPartSize < state.memForJoin) || (probePartSize < state.memForJoin)) {
-                        int tabSize = -1;
-                        if (buildPartSize < probePartSize) {
-                            tabSize = ohhj.getBuildPartitionSizeInTup(pid);
-                            if (tabSize == 0) {
-                                throw new HyracksDataException(
-                                        "Trying to join an empty partition. Invalid table size for inMemoryHashJoin.");
-                            }
-                            //Build Side is smaller
-                            applyInMemHashJoin(probeKeys, buildKeys, tabSize, probeRd, buildRd, hpcRep1, hpcRep0,
-                                    buildSideReader, probeSideReader);
-
-                        } else { //Role Reversal
-                            tabSize = ohhj.getProbePartitionSizeInTup(pid);
-                            if (tabSize == 0) {
-                                throw new HyracksDataException(
-                                        "Trying to join an empty partition. Invalid table size for inMemoryHashJoin.");
-                            }
-                            //Probe Side is smaller
-                            applyInMemHashJoin(buildKeys, probeKeys, tabSize, buildRd, probeRd, hpcRep0, hpcRep1,
-                                    probeSideReader, buildSideReader);
-                        }
-                    }
-                    //Apply (Recursive) HHJ
-                    else {
-                        OptimizedHybridHashJoin rHHj;
-                        if (buildPartSize < probePartSize) { //Build Side is smaller
-
-                            int n = getNumberOfPartitions(state.memForJoin, (int) buildPartSize, fudgeFactor,
-                                    nPartitions);
-                            rHHj = new OptimizedHybridHashJoin(ctx, state.memForJoin, n, PROBE_REL, BUILD_REL,
-                                    probeKeys, buildKeys, comparators, probeRd, buildRd, probeHpc, buildHpc);
-
-                            buildSideReader.open();
-                            rHHj.initBuild();
-                            rPartbuff.clear();
-                            while (buildSideReader.nextFrame(rPartbuff)) {
-                                rHHj.build(rPartbuff);
-                            }
-
-                            rHHj.closeBuild();
-
-                            probeSideReader.open();
-                            rHHj.initProbe();
-                            rPartbuff.clear();
-                            while (probeSideReader.nextFrame(rPartbuff)) {
-                                rHHj.probe(rPartbuff, writer);
-                            }
-                            rHHj.closeProbe(writer);
-
-                            int maxAfterBuildSize = rHHj.getMaxBuildPartitionSize();
-                            int maxAfterProbeSize = rHHj.getMaxProbePartitionSize();
-                            int afterMax = (maxAfterBuildSize > maxAfterProbeSize) ? maxAfterBuildSize
-                                    : maxAfterProbeSize;
-
-                            BitSet rPStatus = rHHj.getPartitinStatus();
-                            if (afterMax < NLJ_SWITCH_THRESHOLD * beforeMax) {
-                                for (int rPid = rPStatus.nextSetBit(0); rPid >= 0; rPid = rPStatus.nextSetBit(rPid + 1)) {
-                                    RunFileReader rbrfw = rHHj.getBuildRFReader(rPid);
-                                    RunFileReader rprfw = rHHj.getProbeRFReader(rPid);
-
-                                    if (rbrfw == null || rprfw == null) {
-                                        continue;
-                                    }
-
-                                    joinPartitionPair(rHHj, rbrfw, rprfw, rPid, afterMax, (level + 1));
-                                }
-
-                            } else { //Switch to NLJ (Further recursion seems not to be useful)
-                                for (int rPid = rPStatus.nextSetBit(0); rPid >= 0; rPid = rPStatus.nextSetBit(rPid + 1)) {
-                                    RunFileReader rbrfw = rHHj.getBuildRFReader(rPid);
-                                    RunFileReader rprfw = rHHj.getProbeRFReader(rPid);
-
-                                    if (rbrfw == null || rprfw == null) {
-                                        continue;
-                                    }
-
-                                    int buildSideInTups = rHHj.getBuildPartitionSizeInTup(rPid);
-                                    int probeSideInTups = rHHj.getProbePartitionSizeInTup(rPid);
-                                    if (buildSideInTups < probeSideInTups) {
-                                        applyNestedLoopJoin(probeRd, buildRd, state.memForJoin, rbrfw, rprfw,
-                                                nljComparator0);
-                                    } else {
-                                        applyNestedLoopJoin(buildRd, probeRd, state.memForJoin, rprfw, rbrfw,
-                                                nljComparator1);
-                                    }
-                                }
-                            }
-                        } else { //Role Reversal (Probe Side is smaller)
-                            int n = getNumberOfPartitions(state.memForJoin, (int) probePartSize, fudgeFactor,
-                                    nPartitions);
-                            rHHj = new OptimizedHybridHashJoin(ctx, state.memForJoin, n, BUILD_REL, PROBE_REL,
-                                    buildKeys, probeKeys, comparators, buildRd, probeRd, buildHpc, probeHpc);
-
-                            probeSideReader.open();
-                            rHHj.initBuild();
-                            rPartbuff.clear();
-                            while (probeSideReader.nextFrame(rPartbuff)) {
-                                rHHj.build(rPartbuff);
-                            }
-                            rHHj.closeBuild();
-                            rHHj.initProbe();
-                            buildSideReader.open();
-                            rPartbuff.clear();
-                            while (buildSideReader.nextFrame(rPartbuff)) {
-                                rHHj.probe(rPartbuff, writer);
-                            }
-                            rHHj.closeProbe(writer);
-                            int maxAfterBuildSize = rHHj.getMaxBuildPartitionSize();
-                            int maxAfterProbeSize = rHHj.getMaxProbePartitionSize();
-                            int afterMax = (maxAfterBuildSize > maxAfterProbeSize) ? maxAfterBuildSize
-                                    : maxAfterProbeSize;
-                            BitSet rPStatus = rHHj.getPartitinStatus();
-
-                            if (afterMax < NLJ_SWITCH_THRESHOLD * beforeMax) {
-                                for (int rPid = rPStatus.nextSetBit(0); rPid >= 0; rPid = rPStatus.nextSetBit(rPid + 1)) {
-                                    RunFileReader rbrfw = rHHj.getBuildRFReader(rPid);
-                                    RunFileReader rprfw = rHHj.getProbeRFReader(rPid);
-
-                                    if (rbrfw == null || rprfw == null) {
-                                        continue;
-                                    }
-
-                                    joinPartitionPair(rHHj, rprfw, rbrfw, rPid, afterMax, (level + 1));
-                                }
-                            } else { //Switch to NLJ (Further recursion seems not to be effective)
-                                for (int rPid = rPStatus.nextSetBit(0); rPid >= 0; rPid = rPStatus.nextSetBit(rPid + 1)) {
-                                    RunFileReader rbrfw = rHHj.getBuildRFReader(rPid);
-                                    RunFileReader rprfw = rHHj.getProbeRFReader(rPid);
-
-                                    if (rbrfw == null || rprfw == null) {
-                                        continue;
-                                    }
-
-                                    long buildSideSize = rbrfw.getFileSize();
-                                    long probeSideSize = rprfw.getFileSize();
-                                    if (buildSideSize > probeSideSize) {
-                                        applyNestedLoopJoin(buildRd, probeRd, state.memForJoin, rbrfw, rprfw,
-                                                nljComparator1);
-                                    } else {
-                                        applyNestedLoopJoin(probeRd, buildRd, state.memForJoin, rprfw, rbrfw,
-                                                nljComparator0);
-                                    }
-                                }
-                            }
-                        }
-                        buildSideReader.close();
-                        probeSideReader.close();
-                    }
-                }
-
-                private void applyInMemHashJoin(int[] bKeys, int[] pKeys, int tabSize, RecordDescriptor buildRDesc,
-                        RecordDescriptor probeRDesc, ITuplePartitionComputer hpcRepLarger,
-                        ITuplePartitionComputer hpcRepSmaller, RunFileReader bReader, RunFileReader pReader)
-                        throws HyracksDataException {
-
-                    ISerializableTable table = new SerializableHashTable(tabSize, ctx);
-                    InMemoryHashJoin joiner = new InMemoryHashJoin(ctx, tabSize, new FrameTupleAccessor(
-                            ctx.getFrameSize(), probeRDesc), hpcRepLarger, new FrameTupleAccessor(ctx.getFrameSize(),
-                            buildRDesc), hpcRepSmaller, new FrameTuplePairComparator(pKeys, bKeys, comparators),
-                            isLeftOuter, nullWriters1, table);
-
-                    bReader.open();
-                    rPartbuff.clear();
-                    while (bReader.nextFrame(rPartbuff)) {
-                        ByteBuffer copyBuffer = ctx.allocateFrame(); //We need to allocate a copyBuffer, because this buffer gets added to the buffers list in the InMemoryHashJoin
-                        FrameUtils.copy(rPartbuff, copyBuffer);
-                        FrameUtils.makeReadable(copyBuffer);
-                        joiner.build(copyBuffer);
-                        rPartbuff.clear();
-                    }
-                    bReader.close();
-                    rPartbuff.clear();
-                    // probe
-                    pReader.open();
-                    while (pReader.nextFrame(rPartbuff)) {
-                        joiner.join(rPartbuff, writer);
-                        rPartbuff.clear();
-                    }
-                    pReader.close();
-                    joiner.closeJoin(writer);
-                }
-
-                private void applyNestedLoopJoin(RecordDescriptor outerRd, RecordDescriptor innerRd, int memorySize,
-                        RunFileReader outerReader, RunFileReader innerReader, ITuplePairComparator nljComparator)
-                        throws HyracksDataException {
-
-                    NestedLoopJoin nlj = new NestedLoopJoin(ctx, new FrameTupleAccessor(ctx.getFrameSize(), outerRd),
-                            new FrameTupleAccessor(ctx.getFrameSize(), innerRd), nljComparator, memorySize);
-
-                    ByteBuffer cacheBuff = ctx.allocateFrame();
-                    innerReader.open();
-                    while (innerReader.nextFrame(cacheBuff)) {
-                        FrameUtils.makeReadable(cacheBuff);
-                        nlj.cache(cacheBuff);
-                        cacheBuff.clear();
-                    }
-                    nlj.closeCache();
-
-                    ByteBuffer joinBuff = ctx.allocateFrame();
-                    outerReader.open();
-
-                    while (outerReader.nextFrame(joinBuff)) {
-                        FrameUtils.makeReadable(joinBuff);
-                        nlj.join(joinBuff, writer);
-                        joinBuff.clear();
-                    }
-
-                    nlj.closeJoin(writer);
-                    outerReader.close();
-                    innerReader.close();
-                }
-            };
-            return op;
-        }
-    }
-}
diff --git a/hyracks-documentation/pom.xml b/hyracks-documentation/pom.xml
deleted file mode 100644
index c0bc818..0000000
--- a/hyracks-documentation/pom.xml
+++ /dev/null
@@ -1,48 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <groupId>edu.uci.ics.hyracks</groupId>
-  <artifactId>hyracks-documentation</artifactId>
-  <version>0.2.2-SNAPSHOT</version>
-
-  <parent>
-    <groupId>edu.uci.ics.hyracks</groupId>
-    <artifactId>hyracks</artifactId>
-    <version>0.2.2-SNAPSHOT</version>
-  </parent>
-
-  <build>
-    <plugins>
-    	<plugin>
-    		<groupId>org.apache.maven.doxia</groupId>
-    		<artifactId>doxia-maven-plugin</artifactId>
-    		<version>1.1.3</version>
-    		<executions>
-    		  <execution>
-    		    <phase>package</phase>
-    		    <goals>
-    		      <goal>render-books</goal>
-    		    </goals>
-    		  </execution>
-    		</executions>
-    		<configuration>
-    		  <books>
-    		    <book>
-    		      <directory>src/books/user-guide</directory>
-    		      <descriptor>src/books/user-guide/doxia-descriptor.xml</descriptor>
-    		      <formats>
-    		        <format>
-    		          <id>pdf</id>
-    		        </format>
-    		        <format>
-    		          <id>xhtml</id>
-    		        </format>
-    		      </formats>
-    		    </book>
-    		  </books>
-    		</configuration>
-    	</plugin>
-    </plugins>
-  </build>
-  <dependencies>
-  </dependencies>
-</project>
diff --git a/hyracks-examples/btree-example/btreeapp/pom.xml b/hyracks-examples/btree-example/btreeapp/pom.xml
deleted file mode 100644
index 8e8616b..0000000
--- a/hyracks-examples/btree-example/btreeapp/pom.xml
+++ /dev/null
@@ -1,87 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <groupId>edu.uci.ics.hyracks.examples.btree</groupId>
-  <artifactId>btreeapp</artifactId>
-  <version>0.2.2-SNAPSHOT</version>
-
-  <parent>
-    <groupId>edu.uci.ics.hyracks.examples</groupId>
-    <artifactId>btree-example</artifactId>
-    <version>0.2.2-SNAPSHOT</version>
-  </parent>
-
-  <build>
-    <pluginManagement>
-      <plugins>
-        <plugin>
-          <groupId>org.eclipse.m2e</groupId>
-          <artifactId>lifecycle-mapping</artifactId>
-          <version>1.0.0</version>
-          <configuration>
-            <lifecycleMappingMetadata>
-              <pluginExecutions>
-                <pluginExecution>
-                  <pluginExecutionFilter>
-                    <groupId>org.apache.maven.plugins</groupId>
-                    <artifactId>maven-dependency-plugin</artifactId>
-                    <versionRange>[1.0.0,)</versionRange>
-                    <goals>
-                      <goal>copy-dependencies</goal>
-                    </goals>
-                  </pluginExecutionFilter>
-                  <action>
-                    <ignore />
-                  </action>
-                </pluginExecution>
-              </pluginExecutions>
-            </lifecycleMappingMetadata>
-          </configuration>
-        </plugin>
-      </plugins>
-	</pluginManagement>
-  
-    <plugins>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-dependency-plugin</artifactId>
-        <executions>
-          <execution>
-            <id>copy-dependencies</id>
-            <phase>package</phase>
-            <goals>
-              <goal>copy-dependencies</goal>
-            </goals>
-            <configuration>
-              <outputDirectory>target/application/lib</outputDirectory>
-            </configuration>
-          </execution>
-        </executions>
-      </plugin>
-      <plugin>
-        <artifactId>maven-assembly-plugin</artifactId>
-        <version>2.2-beta-5</version>
-        <executions>
-          <execution>
-            <configuration>
-              <descriptors>
-                <descriptor>src/main/assembly/app-assembly.xml</descriptor>
-              </descriptors>
-            </configuration>
-            <phase>package</phase>
-            <goals>
-              <goal>attached</goal>
-            </goals>
-          </execution>
-        </executions>
-      </plugin>
-    </plugins>
-  </build>
-  <dependencies>
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks.examples.btree</groupId>
-  		<artifactId>btreehelper</artifactId>
-  		<version>0.2.2-SNAPSHOT</version>
-  		<scope>compile</scope>
-  	</dependency>
-  </dependencies>
-</project>
diff --git a/hyracks-examples/btree-example/btreeclient/pom.xml b/hyracks-examples/btree-example/btreeclient/pom.xml
deleted file mode 100644
index 42a11dd..0000000
--- a/hyracks-examples/btree-example/btreeclient/pom.xml
+++ /dev/null
@@ -1,84 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <groupId>edu.uci.ics.hyracks.examples.btree</groupId>
-  <artifactId>btreeclient</artifactId>
-  <parent>
-    <groupId>edu.uci.ics.hyracks.examples</groupId>
-    <artifactId>btree-example</artifactId>
-    <version>0.2.2-SNAPSHOT</version>
-  </parent>
-
-  <dependencies>
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks</groupId>
-  		<artifactId>hyracks-dataflow-std</artifactId>
-  		<version>0.2.2-SNAPSHOT</version>
-  		<scope>compile</scope>
-  	</dependency>
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks</groupId>
-  		<artifactId>hyracks-storage-am-btree</artifactId>
-  		<version>0.2.2-SNAPSHOT</version>
-  		<scope>compile</scope>
-  	</dependency>
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks.examples.btree</groupId>
-  		<artifactId>btreehelper</artifactId>
-  		<version>0.2.2-SNAPSHOT</version>
-  		<type>jar</type>
-  		<scope>compile</scope>
-  	</dependency>
-  </dependencies>
-  <build>
-    <plugins>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-compiler-plugin</artifactId>
-        <version>2.0.2</version>
-        <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
-        </configuration>
-      </plugin>
-      <plugin>
-        <groupId>org.codehaus.mojo</groupId>
-        <artifactId>appassembler-maven-plugin</artifactId>
-        <executions>
-          <execution>
-            <configuration>
-              <programs>
-                <program>
-                  <mainClass>edu.uci.ics.hyracks.examples.btree.client.BTreeBulkLoadExample</mainClass>
-                  <name>btreebulkload</name>
-                </program>
-              </programs>
-              <repositoryLayout>flat</repositoryLayout>
-              <repositoryName>lib</repositoryName>
-            </configuration>
-            <phase>package</phase>
-            <goals>
-              <goal>assemble</goal>
-            </goals>
-          </execution>
-        </executions>
-      </plugin>
-      <plugin>
-        <artifactId>maven-assembly-plugin</artifactId>
-        <version>2.2-beta-5</version>
-        <executions>
-          <execution>
-            <configuration>
-              <descriptors>
-                <descriptor>src/main/assembly/binary-assembly.xml</descriptor>
-              </descriptors>
-            </configuration>
-            <phase>package</phase>
-            <goals>
-              <goal>attached</goal>
-            </goals>
-          </execution>
-        </executions>
-      </plugin>
-    </plugins>
-  </build>
-</project>
diff --git a/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/InsertPipelineExample.java b/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/InsertPipelineExample.java
deleted file mode 100644
index 86c11d3..0000000
--- a/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/InsertPipelineExample.java
+++ /dev/null
@@ -1,205 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.examples.btree.client;
-
-import org.kohsuke.args4j.CmdLineParser;
-import org.kohsuke.args4j.Option;
-
-import edu.uci.ics.hyracks.api.client.HyracksConnection;
-import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
-import edu.uci.ics.hyracks.api.constraints.PartitionConstraintHelper;
-import edu.uci.ics.hyracks.api.dataflow.IConnectorDescriptor;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.job.JobId;
-import edu.uci.ics.hyracks.api.job.JobSpecification;
-import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
-import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryHashFunctionFactory;
-import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
-import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.partition.FieldHashPartitionComputerFactory;
-import edu.uci.ics.hyracks.dataflow.std.connectors.MToNPartitioningConnectorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.dataflow.std.misc.NullSinkOperatorDescriptor;
-import edu.uci.ics.hyracks.examples.btree.helper.DataGenOperatorDescriptor;
-import edu.uci.ics.hyracks.examples.btree.helper.IndexLifecycleManagerProvider;
-import edu.uci.ics.hyracks.examples.btree.helper.StorageManagerInterface;
-import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexLifecycleManagerProvider;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexInsertUpdateDeleteOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOperation;
-import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
-
-// This example will insert tuples into the primary and secondary index using an insert pipeline
-
-public class InsertPipelineExample {
-    private static class Options {
-        @Option(name = "-host", usage = "Hyracks Cluster Controller Host name", required = true)
-        public String host;
-
-        @Option(name = "-port", usage = "Hyracks Cluster Controller Port (default: 1098)")
-        public int port = 1098;
-
-        @Option(name = "-app", usage = "Hyracks Application name", required = true)
-        public String app;
-
-        @Option(name = "-target-ncs", usage = "Comma separated list of node-controller names to use", required = true)
-        public String ncs;
-
-        @Option(name = "-num-tuples", usage = "Total number of tuples to to be generated for insertion", required = true)
-        public int numTuples;
-
-        @Option(name = "-primary-btreename", usage = "B-Tree file name of primary index", required = true)
-        public String primaryBTreeName;
-
-        @Option(name = "-secondary-btreename", usage = "B-Tree file name of secondary index", required = true)
-        public String secondaryBTreeName;
-    }
-
-    public static void main(String[] args) throws Exception {
-        Options options = new Options();
-        CmdLineParser parser = new CmdLineParser(options);
-        parser.parseArgument(args);
-
-        IHyracksClientConnection hcc = new HyracksConnection(options.host, options.port);
-
-        JobSpecification job = createJob(options);
-
-        long start = System.currentTimeMillis();
-        JobId jobId = hcc.startJob(options.app, job);
-        hcc.waitForCompletion(jobId);
-        long end = System.currentTimeMillis();
-        System.err.println(start + " " + end + " " + (end - start));
-    }
-
-    private static JobSpecification createJob(Options options) {
-
-        JobSpecification spec = new JobSpecification();
-
-        String[] splitNCs = options.ncs.split(",");
-
-        // schema of tuples to be generated: 4 fields with int, string, string,
-        // string
-        // we will use field 2 as primary key to fill a clustered index
-        RecordDescriptor recDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-                UTF8StringSerializerDeserializer.INSTANCE, // this field will
-                                                           // not go into B-Tree
-                UTF8StringSerializerDeserializer.INSTANCE, // we will use this
-                                                           // as payload
-                IntegerSerializerDeserializer.INSTANCE, // we will use this
-                                                        // field as key
-                IntegerSerializerDeserializer.INSTANCE, // we will use this as
-                                                        // payload
-                UTF8StringSerializerDeserializer.INSTANCE // we will use this as
-                                                          // payload
-                });
-
-        // generate numRecords records with field 2 being unique, integer values
-        // in [0, 100000], and strings with max length of 10 characters, and
-        // random seed 100
-        DataGenOperatorDescriptor dataGen = new DataGenOperatorDescriptor(spec, recDesc, options.numTuples, 2, 0,
-                100000, 10, 100);
-        // run data generator on first nodecontroller given
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, dataGen, splitNCs[0]);
-
-        IIndexLifecycleManagerProvider lcManagerProvider = IndexLifecycleManagerProvider.INSTANCE;
-        IStorageManagerInterface storageManager = StorageManagerInterface.INSTANCE;
-
-        // prepare insertion into primary index
-        // tuples to be put into B-Tree shall have 4 fields
-        int primaryFieldCount = 4;
-        ITypeTraits[] primaryTypeTraits = new ITypeTraits[primaryFieldCount];
-        primaryTypeTraits[0] = IntegerPointable.TYPE_TRAITS;
-        primaryTypeTraits[1] = UTF8StringPointable.TYPE_TRAITS;
-        primaryTypeTraits[2] = IntegerPointable.TYPE_TRAITS;
-        primaryTypeTraits[3] = UTF8StringPointable.TYPE_TRAITS;
-
-        // comparator factories for primary index
-        IBinaryComparatorFactory[] primaryComparatorFactories = new IBinaryComparatorFactory[1];
-        primaryComparatorFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
-
-        // the B-Tree expects its keyfields to be at the front of its input
-        // tuple
-        int[] primaryFieldPermutation = { 2, 1, 3, 4 }; // map field 2 of input
-                                                        // tuple to field 0 of
-                                                        // B-Tree tuple, etc.        
-        IFileSplitProvider primarySplitProvider = JobHelper.createFileSplitProvider(splitNCs, options.primaryBTreeName);
-
-        IIndexDataflowHelperFactory dataflowHelperFactory = new BTreeDataflowHelperFactory();
-
-        // create operator descriptor
-        TreeIndexInsertUpdateDeleteOperatorDescriptor primaryInsert = new TreeIndexInsertUpdateDeleteOperatorDescriptor(
-                spec, recDesc, storageManager, lcManagerProvider, primarySplitProvider, primaryTypeTraits,
-                primaryComparatorFactories, null, primaryFieldPermutation, IndexOperation.INSERT,
-                dataflowHelperFactory, null, NoOpOperationCallbackFactory.INSTANCE);
-        JobHelper.createPartitionConstraint(spec, primaryInsert, splitNCs);
-
-        // prepare insertion into secondary index
-        // tuples to be put into B-Tree shall have 2 fields
-        int secondaryFieldCount = 2;
-        ITypeTraits[] secondaryTypeTraits = new ITypeTraits[secondaryFieldCount];
-        secondaryTypeTraits[0] = UTF8StringPointable.TYPE_TRAITS;
-        secondaryTypeTraits[1] = IntegerPointable.TYPE_TRAITS;
-
-        // comparator factories for secondary index
-        IBinaryComparatorFactory[] secondaryComparatorFactories = new IBinaryComparatorFactory[2];
-        secondaryComparatorFactories[0] = PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY);
-        secondaryComparatorFactories[1] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
-
-        // the B-Tree expects its keyfields to be at the front of its input
-        // tuple
-        int[] secondaryFieldPermutation = { 1, 2 };
-        IFileSplitProvider secondarySplitProvider = JobHelper.createFileSplitProvider(splitNCs,
-                options.secondaryBTreeName);
-        // create operator descriptor
-        TreeIndexInsertUpdateDeleteOperatorDescriptor secondaryInsert = new TreeIndexInsertUpdateDeleteOperatorDescriptor(
-                spec, recDesc, storageManager, lcManagerProvider, secondarySplitProvider, secondaryTypeTraits,
-                secondaryComparatorFactories, null, secondaryFieldPermutation, IndexOperation.INSERT,
-                dataflowHelperFactory, null, NoOpOperationCallbackFactory.INSTANCE);
-        JobHelper.createPartitionConstraint(spec, secondaryInsert, splitNCs);
-
-        // end the insert pipeline at this sink operator
-        NullSinkOperatorDescriptor nullSink = new NullSinkOperatorDescriptor(spec);
-        JobHelper.createPartitionConstraint(spec, nullSink, splitNCs);
-
-        // distribute the records from the datagen via hashing to the bulk load
-        // ops
-        IBinaryHashFunctionFactory[] hashFactories = new IBinaryHashFunctionFactory[1];
-        hashFactories[0] = PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY);
-        IConnectorDescriptor hashConn = new MToNPartitioningConnectorDescriptor(spec,
-                new FieldHashPartitionComputerFactory(new int[] { 0 }, hashFactories));
-
-        // connect the ops
-
-        spec.connect(hashConn, dataGen, 0, primaryInsert, 0);
-
-        spec.connect(new OneToOneConnectorDescriptor(spec), primaryInsert, 0, secondaryInsert, 0);
-
-        spec.connect(new OneToOneConnectorDescriptor(spec), secondaryInsert, 0, nullSink, 0);
-
-        spec.addRoot(nullSink);
-
-        return spec;
-    }
-}
\ No newline at end of file
diff --git a/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/PrimaryIndexBulkLoadExample.java b/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/PrimaryIndexBulkLoadExample.java
deleted file mode 100644
index 105fe9b..0000000
--- a/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/PrimaryIndexBulkLoadExample.java
+++ /dev/null
@@ -1,173 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.examples.btree.client;
-
-import org.kohsuke.args4j.CmdLineParser;
-import org.kohsuke.args4j.Option;
-
-import edu.uci.ics.hyracks.api.client.HyracksConnection;
-import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
-import edu.uci.ics.hyracks.api.constraints.PartitionConstraintHelper;
-import edu.uci.ics.hyracks.api.dataflow.IConnectorDescriptor;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.job.JobId;
-import edu.uci.ics.hyracks.api.job.JobSpecification;
-import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
-import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryHashFunctionFactory;
-import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
-import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.partition.FieldHashPartitionComputerFactory;
-import edu.uci.ics.hyracks.dataflow.std.connectors.MToNPartitioningConnectorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.dataflow.std.sort.ExternalSortOperatorDescriptor;
-import edu.uci.ics.hyracks.examples.btree.helper.DataGenOperatorDescriptor;
-import edu.uci.ics.hyracks.examples.btree.helper.IndexLifecycleManagerProvider;
-import edu.uci.ics.hyracks.examples.btree.helper.StorageManagerInterface;
-import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexLifecycleManagerProvider;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexBulkLoadOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
-import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
-
-// This example will load a primary index from randomly generated data
-
-public class PrimaryIndexBulkLoadExample {
-    private static class Options {
-        @Option(name = "-host", usage = "Hyracks Cluster Controller Host name", required = true)
-        public String host;
-
-        @Option(name = "-port", usage = "Hyracks Cluster Controller Port (default: 1098)")
-        public int port = 1098;
-
-        @Option(name = "-app", usage = "Hyracks Application name", required = true)
-        public String app;
-
-        @Option(name = "-target-ncs", usage = "Comma separated list of node-controller names to use", required = true)
-        public String ncs;
-
-        @Option(name = "-num-tuples", usage = "Total number of tuples to to be generated for loading", required = true)
-        public int numTuples;
-
-        @Option(name = "-btreename", usage = "B-Tree file name", required = true)
-        public String btreeName;
-
-        @Option(name = "-sortbuffer-size", usage = "Sort buffer size in frames (default: 32768)", required = false)
-        public int sbSize = 32768;
-    }
-
-    public static void main(String[] args) throws Exception {
-        Options options = new Options();
-        CmdLineParser parser = new CmdLineParser(options);
-        parser.parseArgument(args);
-
-        IHyracksClientConnection hcc = new HyracksConnection(options.host, options.port);
-
-        JobSpecification job = createJob(options);
-
-        long start = System.currentTimeMillis();
-        JobId jobId = hcc.startJob(options.app, job);
-        hcc.waitForCompletion(jobId);
-        long end = System.currentTimeMillis();
-        System.err.println(start + " " + end + " " + (end - start));
-    }
-
-    private static JobSpecification createJob(Options options) {
-
-        JobSpecification spec = new JobSpecification();
-
-        String[] splitNCs = options.ncs.split(",");
-
-        // schema of tuples to be generated: 5 fields with string, string, int,
-        // int, string
-        // we will use field-index 2 as primary key to fill a clustered index
-        RecordDescriptor recDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-                UTF8StringSerializerDeserializer.INSTANCE, // this field will
-                                                           // not go into B-Tree
-                UTF8StringSerializerDeserializer.INSTANCE, // we will use this
-                                                           // as payload
-                IntegerSerializerDeserializer.INSTANCE, // we will use this
-                                                        // field as key
-                IntegerSerializerDeserializer.INSTANCE, // we will use this as
-                                                        // payload
-                UTF8StringSerializerDeserializer.INSTANCE // we will use this as
-                                                          // payload
-                });
-
-        // generate numRecords records with field 2 being unique, integer values
-        // in [0, 100000], and strings with max length of 10 characters, and
-        // random seed 50
-        DataGenOperatorDescriptor dataGen = new DataGenOperatorDescriptor(spec, recDesc, options.numTuples, 2, 0,
-                100000, 10, 50);
-        // run data generator on first nodecontroller given
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, dataGen, splitNCs[0]);
-
-        // sort the tuples as preparation for bulk load
-        // fields to sort on
-        int[] sortFields = { 2 };
-        // comparators for sort fields
-        IBinaryComparatorFactory[] comparatorFactories = new IBinaryComparatorFactory[1];
-        comparatorFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
-        ExternalSortOperatorDescriptor sorter = new ExternalSortOperatorDescriptor(spec, options.sbSize, sortFields,
-                comparatorFactories, recDesc);
-        JobHelper.createPartitionConstraint(spec, sorter, splitNCs);
-
-        // tuples to be put into B-Tree shall have 4 fields
-        int fieldCount = 4;
-        ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
-        typeTraits[0] = IntegerPointable.TYPE_TRAITS;
-        typeTraits[1] = UTF8StringPointable.TYPE_TRAITS;
-        typeTraits[2] = IntegerPointable.TYPE_TRAITS;
-        typeTraits[3] = UTF8StringPointable.TYPE_TRAITS;
-
-        // create providers for B-Tree
-        IIndexLifecycleManagerProvider lcManagerProvider = IndexLifecycleManagerProvider.INSTANCE;
-        IStorageManagerInterface storageManager = StorageManagerInterface.INSTANCE;
-
-        // the B-Tree expects its keyfields to be at the front of its input
-        // tuple
-        int[] fieldPermutation = { 2, 1, 3, 4 }; // map field 2 of input tuple
-                                                 // to field 0 of B-Tree tuple,
-                                                 // etc.
-        IFileSplitProvider btreeSplitProvider = JobHelper.createFileSplitProvider(splitNCs, options.btreeName);
-        IIndexDataflowHelperFactory dataflowHelperFactory = new BTreeDataflowHelperFactory();
-        TreeIndexBulkLoadOperatorDescriptor btreeBulkLoad = new TreeIndexBulkLoadOperatorDescriptor(spec,
-                storageManager, lcManagerProvider, btreeSplitProvider, typeTraits, comparatorFactories, null,
-                fieldPermutation, 0.7f, false, 1000L, dataflowHelperFactory, NoOpOperationCallbackFactory.INSTANCE);
-        JobHelper.createPartitionConstraint(spec, btreeBulkLoad, splitNCs);
-
-        // distribute the records from the datagen via hashing to the bulk load
-        // ops
-        IBinaryHashFunctionFactory[] hashFactories = new IBinaryHashFunctionFactory[1];
-        hashFactories[0] = PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY);
-        IConnectorDescriptor hashConn = new MToNPartitioningConnectorDescriptor(spec,
-                new FieldHashPartitionComputerFactory(new int[] { 0 }, hashFactories));
-
-        spec.connect(hashConn, dataGen, 0, sorter, 0);
-
-        spec.connect(new OneToOneConnectorDescriptor(spec), sorter, 0, btreeBulkLoad, 0);
-
-        spec.addRoot(btreeBulkLoad);
-
-        return spec;
-    }
-}
\ No newline at end of file
diff --git a/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/PrimaryIndexSearchExample.java b/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/PrimaryIndexSearchExample.java
deleted file mode 100644
index afc3487..0000000
--- a/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/PrimaryIndexSearchExample.java
+++ /dev/null
@@ -1,160 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.examples.btree.client;
-
-import java.io.DataOutput;
-
-import org.kohsuke.args4j.CmdLineParser;
-import org.kohsuke.args4j.Option;
-
-import edu.uci.ics.hyracks.api.client.HyracksConnection;
-import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.job.JobId;
-import edu.uci.ics.hyracks.api.job.JobSpecification;
-import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
-import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
-import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.dataflow.std.misc.ConstantTupleSourceOperatorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.misc.PrinterOperatorDescriptor;
-import edu.uci.ics.hyracks.examples.btree.helper.IndexLifecycleManagerProvider;
-import edu.uci.ics.hyracks.examples.btree.helper.StorageManagerInterface;
-import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeSearchOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexLifecycleManagerProvider;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
-import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
-
-// This example will perform an ordered scan on the primary index
-// i.e. a range-search for [-infinity, +infinity]
-
-public class PrimaryIndexSearchExample {
-    private static class Options {
-        @Option(name = "-host", usage = "Hyracks Cluster Controller Host name", required = true)
-        public String host;
-
-        @Option(name = "-port", usage = "Hyracks Cluster Controller Port (default: 1098)")
-        public int port = 1098;
-
-        @Option(name = "-app", usage = "Hyracks Application name", required = true)
-        public String app;
-
-        @Option(name = "-target-ncs", usage = "Comma separated list of node-controller names to use", required = true)
-        public String ncs;
-
-        @Option(name = "-btreename", usage = "B-Tree file name to search", required = true)
-        public String btreeName;
-    }
-
-    public static void main(String[] args) throws Exception {
-        Options options = new Options();
-        CmdLineParser parser = new CmdLineParser(options);
-        parser.parseArgument(args);
-
-        IHyracksClientConnection hcc = new HyracksConnection(options.host, options.port);
-
-        JobSpecification job = createJob(options);
-
-        long start = System.currentTimeMillis();
-        JobId jobId = hcc.startJob(options.app, job);
-        hcc.waitForCompletion(jobId);
-        long end = System.currentTimeMillis();
-        System.err.println(start + " " + end + " " + (end - start));
-    }
-
-    private static JobSpecification createJob(Options options) throws HyracksDataException {
-
-        JobSpecification spec = new JobSpecification();
-
-        String[] splitNCs = options.ncs.split(",");
-
-        int fieldCount = 4;
-        ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
-        typeTraits[0] = IntegerPointable.TYPE_TRAITS;
-        typeTraits[1] = UTF8StringPointable.TYPE_TRAITS;
-        typeTraits[2] = IntegerPointable.TYPE_TRAITS;
-        typeTraits[3] = UTF8StringPointable.TYPE_TRAITS;
-
-        // comparators for btree
-        IBinaryComparatorFactory[] comparatorFactories = new IBinaryComparatorFactory[1];
-        comparatorFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
-
-        // create roviders for B-Tree
-        IIndexLifecycleManagerProvider lcManagerProvider = IndexLifecycleManagerProvider.INSTANCE;
-        IStorageManagerInterface storageManager = StorageManagerInterface.INSTANCE;
-
-        // schema of tuples coming out of primary index
-        RecordDescriptor recDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-                IntegerSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                IntegerSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE, });
-
-        // build tuple containing low and high search keys
-        ArrayTupleBuilder tb = new ArrayTupleBuilder(comparatorFactories.length * 2); // high
-                                                                                      // key
-                                                                                      // and
-                                                                                      // low
-                                                                                      // key
-        DataOutput dos = tb.getDataOutput();
-
-        tb.reset();
-        IntegerSerializerDeserializer.INSTANCE.serialize(100, dos); // low key
-        tb.addFieldEndOffset();
-        IntegerSerializerDeserializer.INSTANCE.serialize(200, dos); // build
-                                                                    // high key
-        tb.addFieldEndOffset();
-
-        ISerializerDeserializer[] keyRecDescSers = { UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE };
-        RecordDescriptor keyRecDesc = new RecordDescriptor(keyRecDescSers);
-
-        ConstantTupleSourceOperatorDescriptor keyProviderOp = new ConstantTupleSourceOperatorDescriptor(spec,
-                keyRecDesc, tb.getFieldEndOffsets(), tb.getByteArray(), tb.getSize());
-        JobHelper.createPartitionConstraint(spec, keyProviderOp, splitNCs);
-
-        int[] lowKeyFields = { 0 }; // low key is in field 0 of tuples going
-                                    // into search op
-        int[] highKeyFields = { 1 }; // low key is in field 1 of tuples going
-                                     // into search op
-
-        IFileSplitProvider btreeSplitProvider = JobHelper.createFileSplitProvider(splitNCs, options.btreeName);
-        IIndexDataflowHelperFactory dataflowHelperFactory = new BTreeDataflowHelperFactory();
-        BTreeSearchOperatorDescriptor btreeSearchOp = new BTreeSearchOperatorDescriptor(spec, recDesc, storageManager,
-                lcManagerProvider, btreeSplitProvider, typeTraits, comparatorFactories, null, lowKeyFields,
-                highKeyFields, true, true, dataflowHelperFactory, false, NoOpOperationCallbackFactory.INSTANCE);
-        JobHelper.createPartitionConstraint(spec, btreeSearchOp, splitNCs);
-
-        // have each node print the results of its respective B-Tree
-        PrinterOperatorDescriptor printer = new PrinterOperatorDescriptor(spec);
-        JobHelper.createPartitionConstraint(spec, printer, splitNCs);
-
-        spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0, btreeSearchOp, 0);
-
-        spec.connect(new OneToOneConnectorDescriptor(spec), btreeSearchOp, 0, printer, 0);
-
-        spec.addRoot(printer);
-
-        return spec;
-    }
-}
\ No newline at end of file
diff --git a/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/SecondaryIndexBulkLoadExample.java b/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/SecondaryIndexBulkLoadExample.java
deleted file mode 100644
index 0c6ab8e..0000000
--- a/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/SecondaryIndexBulkLoadExample.java
+++ /dev/null
@@ -1,160 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.examples.btree.client;
-
-import org.kohsuke.args4j.CmdLineParser;
-import org.kohsuke.args4j.Option;
-
-import edu.uci.ics.hyracks.api.client.HyracksConnection;
-import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.job.JobId;
-import edu.uci.ics.hyracks.api.job.JobSpecification;
-import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
-import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
-import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.dataflow.std.sort.ExternalSortOperatorDescriptor;
-import edu.uci.ics.hyracks.examples.btree.helper.IndexLifecycleManagerProvider;
-import edu.uci.ics.hyracks.examples.btree.helper.StorageManagerInterface;
-import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexLifecycleManagerProvider;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexBulkLoadOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexDiskOrderScanOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
-import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
-
-// This example will load a secondary index with <key, primary-index key> pairs
-// We require an existing primary index built with PrimaryIndexBulkLoadExample
-
-public class SecondaryIndexBulkLoadExample {
-    private static class Options {
-        @Option(name = "-host", usage = "Hyracks Cluster Controller Host name", required = true)
-        public String host;
-
-        @Option(name = "-port", usage = "Hyracks Cluster Controller Port (default: 1098)")
-        public int port = 1098;
-
-        @Option(name = "-app", usage = "Hyracks Application name", required = true)
-        public String app;
-
-        @Option(name = "-target-ncs", usage = "Comma separated list of node-controller names to use", required = true)
-        public String ncs;
-
-        @Option(name = "-primary-btreename", usage = "Name of primary-index B-Tree to load from", required = true)
-        public String primaryBTreeName;
-
-        @Option(name = "-secondary-btreename", usage = "B-Tree file name for secondary index to be built", required = true)
-        public String secondaryBTreeName;
-
-        @Option(name = "-sortbuffer-size", usage = "Sort buffer size in frames (default: 32768)", required = false)
-        public int sbSize = 32768;
-    }
-
-    public static void main(String[] args) throws Exception {
-        Options options = new Options();
-        CmdLineParser parser = new CmdLineParser(options);
-        parser.parseArgument(args);
-
-        IHyracksClientConnection hcc = new HyracksConnection(options.host, options.port);
-
-        JobSpecification job = createJob(options);
-
-        long start = System.currentTimeMillis();
-        JobId jobId = hcc.startJob(options.app, job);
-        hcc.waitForCompletion(jobId);
-        long end = System.currentTimeMillis();
-        System.err.println(start + " " + end + " " + (end - start));
-    }
-
-    private static JobSpecification createJob(Options options) {
-
-        JobSpecification spec = new JobSpecification();
-
-        String[] splitNCs = options.ncs.split(",");
-
-        IIndexLifecycleManagerProvider lcManagerProvider = IndexLifecycleManagerProvider.INSTANCE;
-        IStorageManagerInterface storageManager = StorageManagerInterface.INSTANCE;
-
-        // schema of tuples that we are retrieving from the primary index
-        RecordDescriptor recDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-                IntegerSerializerDeserializer.INSTANCE, // we will use this as
-                                                        // payload in secondary
-                                                        // index
-                UTF8StringSerializerDeserializer.INSTANCE, // we will use this
-                                                           // ask key in
-                                                           // secondary index
-                IntegerSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE });
-
-        int primaryFieldCount = 4;
-        ITypeTraits[] primaryTypeTraits = new ITypeTraits[primaryFieldCount];
-        primaryTypeTraits[0] = IntegerPointable.TYPE_TRAITS;
-        primaryTypeTraits[1] = UTF8StringPointable.TYPE_TRAITS;
-        primaryTypeTraits[2] = IntegerPointable.TYPE_TRAITS;
-        primaryTypeTraits[3] = UTF8StringPointable.TYPE_TRAITS;
-
-        // comparators for sort fields and BTree fields
-        IBinaryComparatorFactory[] comparatorFactories = new IBinaryComparatorFactory[2];
-        comparatorFactories[0] = PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY);
-        comparatorFactories[1] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
-
-        // use a disk-order scan to read primary index
-        IFileSplitProvider primarySplitProvider = JobHelper.createFileSplitProvider(splitNCs, options.primaryBTreeName);
-        IIndexDataflowHelperFactory dataflowHelperFactory = new BTreeDataflowHelperFactory();
-        TreeIndexDiskOrderScanOperatorDescriptor btreeScanOp = new TreeIndexDiskOrderScanOperatorDescriptor(spec,
-                recDesc, storageManager, lcManagerProvider, primarySplitProvider, primaryTypeTraits,
-                dataflowHelperFactory, NoOpOperationCallbackFactory.INSTANCE);
-        JobHelper.createPartitionConstraint(spec, btreeScanOp, splitNCs);
-
-        // sort the tuples as preparation for bulk load into secondary index
-        // fields to sort on
-        int[] sortFields = { 1, 0 };
-        ExternalSortOperatorDescriptor sorter = new ExternalSortOperatorDescriptor(spec, options.sbSize, sortFields,
-                comparatorFactories, recDesc);
-        JobHelper.createPartitionConstraint(spec, sorter, splitNCs);
-
-        // tuples to be put into B-Tree shall have 2 fields
-        int secondaryFieldCount = 2;
-        ITypeTraits[] secondaryTypeTraits = new ITypeTraits[secondaryFieldCount];
-        secondaryTypeTraits[0] = UTF8StringPointable.TYPE_TRAITS;
-        secondaryTypeTraits[1] = IntegerPointable.TYPE_TRAITS;
-
-        // the B-Tree expects its keyfields to be at the front of its input
-        // tuple
-        int[] fieldPermutation = { 1, 0 };
-        IFileSplitProvider btreeSplitProvider = JobHelper.createFileSplitProvider(splitNCs, options.secondaryBTreeName);
-        TreeIndexBulkLoadOperatorDescriptor btreeBulkLoad = new TreeIndexBulkLoadOperatorDescriptor(spec,
-                storageManager, lcManagerProvider, btreeSplitProvider, secondaryTypeTraits, comparatorFactories, null,
-                fieldPermutation, 0.7f, false, 1000L, dataflowHelperFactory, NoOpOperationCallbackFactory.INSTANCE);
-        JobHelper.createPartitionConstraint(spec, btreeBulkLoad, splitNCs);
-
-        // connect the ops
-
-        spec.connect(new OneToOneConnectorDescriptor(spec), btreeScanOp, 0, sorter, 0);
-
-        spec.connect(new OneToOneConnectorDescriptor(spec), sorter, 0, btreeBulkLoad, 0);
-
-        spec.addRoot(btreeBulkLoad);
-
-        return spec;
-    }
-}
\ No newline at end of file
diff --git a/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/SecondaryIndexSearchExample.java b/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/SecondaryIndexSearchExample.java
deleted file mode 100644
index e2c0be9..0000000
--- a/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/SecondaryIndexSearchExample.java
+++ /dev/null
@@ -1,205 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.examples.btree.client;
-
-import java.io.DataOutput;
-
-import org.kohsuke.args4j.CmdLineParser;
-import org.kohsuke.args4j.Option;
-
-import edu.uci.ics.hyracks.api.client.HyracksConnection;
-import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.job.JobId;
-import edu.uci.ics.hyracks.api.job.JobSpecification;
-import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
-import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
-import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.dataflow.std.misc.ConstantTupleSourceOperatorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.misc.PrinterOperatorDescriptor;
-import edu.uci.ics.hyracks.examples.btree.helper.IndexLifecycleManagerProvider;
-import edu.uci.ics.hyracks.examples.btree.helper.StorageManagerInterface;
-import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeSearchOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexLifecycleManagerProvider;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
-import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
-
-// This example will perform range search on the secondary index
-// and then retrieve the corresponding source records from the primary index
-
-public class SecondaryIndexSearchExample {
-    private static class Options {
-        @Option(name = "-host", usage = "Hyracks Cluster Controller Host name", required = true)
-        public String host;
-
-        @Option(name = "-port", usage = "Hyracks Cluster Controller Port (default: 1098)")
-        public int port = 1098;
-
-        @Option(name = "-app", usage = "Hyracks Application name", required = true)
-        public String app;
-
-        @Option(name = "-target-ncs", usage = "Comma separated list of node-controller names to use", required = true)
-        public String ncs;
-
-        @Option(name = "-primary-btreename", usage = "Primary B-Tree file name", required = true)
-        public String primaryBTreeName;
-
-        @Option(name = "-secondary-btreename", usage = "Secondary B-Tree file name to search", required = true)
-        public String secondaryBTreeName;
-    }
-
-    public static void main(String[] args) throws Exception {
-        Options options = new Options();
-        CmdLineParser parser = new CmdLineParser(options);
-        parser.parseArgument(args);
-
-        IHyracksClientConnection hcc = new HyracksConnection(options.host, options.port);
-
-        JobSpecification job = createJob(options);
-
-        long start = System.currentTimeMillis();
-        JobId jobId = hcc.startJob(options.app, job);
-        hcc.waitForCompletion(jobId);
-        long end = System.currentTimeMillis();
-        System.err.println(start + " " + end + " " + (end - start));
-    }
-
-    private static JobSpecification createJob(Options options) throws HyracksDataException {
-
-        JobSpecification spec = new JobSpecification();
-
-        String[] splitNCs = options.ncs.split(",");
-
-        IIndexLifecycleManagerProvider lcManagerProvider = IndexLifecycleManagerProvider.INSTANCE;
-        IStorageManagerInterface storageManager = StorageManagerInterface.INSTANCE;
-
-        // schema of tuples coming out of secondary index
-        RecordDescriptor secondaryRecDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-                UTF8StringSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE });
-
-        int secondaryFieldCount = 2;
-        ITypeTraits[] secondaryTypeTraits = new ITypeTraits[secondaryFieldCount];
-        secondaryTypeTraits[0] = UTF8StringPointable.TYPE_TRAITS;
-        secondaryTypeTraits[1] = IntegerPointable.TYPE_TRAITS;
-
-        // comparators for sort fields and BTree fields
-        IBinaryComparatorFactory[] secondaryComparatorFactories = new IBinaryComparatorFactory[2];
-        secondaryComparatorFactories[0] = PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY);
-        secondaryComparatorFactories[1] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
-
-        // comparators for primary index
-        IBinaryComparatorFactory[] primaryComparatorFactories = new IBinaryComparatorFactory[1];
-        primaryComparatorFactories[1] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
-
-        // schema of tuples coming out of primary index
-        RecordDescriptor primaryRecDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-                IntegerSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                IntegerSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE, });
-
-        int primaryFieldCount = 4;
-        ITypeTraits[] primaryTypeTraits = new ITypeTraits[primaryFieldCount];
-        primaryTypeTraits[0] = IntegerPointable.TYPE_TRAITS;
-        primaryTypeTraits[1] = UTF8StringPointable.TYPE_TRAITS;
-        primaryTypeTraits[2] = IntegerPointable.TYPE_TRAITS;
-        primaryTypeTraits[3] = UTF8StringPointable.TYPE_TRAITS;
-
-        // comparators for btree, note that we only need a comparator for the
-        // non-unique key
-        // i.e. we will have a range condition on the first field only (implying
-        // [-infinity, +infinity] for the second field)
-        IBinaryComparatorFactory[] searchComparatorFactories = new IBinaryComparatorFactory[1];
-        searchComparatorFactories[0] = PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY);
-
-        // build tuple containing low and high search keys
-        ArrayTupleBuilder tb = new ArrayTupleBuilder(searchComparatorFactories.length * 2); // low
-        // and
-        // high
-        // key
-        DataOutput dos = tb.getDataOutput();
-
-        tb.reset();
-        UTF8StringSerializerDeserializer.INSTANCE.serialize("0", dos); // low
-                                                                       // key
-        tb.addFieldEndOffset();
-        UTF8StringSerializerDeserializer.INSTANCE.serialize("f", dos); // high
-                                                                       // key
-        tb.addFieldEndOffset();
-
-        ISerializerDeserializer[] keyRecDescSers = { UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE };
-        RecordDescriptor keyRecDesc = new RecordDescriptor(keyRecDescSers);
-
-        ConstantTupleSourceOperatorDescriptor keyProviderOp = new ConstantTupleSourceOperatorDescriptor(spec,
-                keyRecDesc, tb.getFieldEndOffsets(), tb.getByteArray(), tb.getSize());
-        JobHelper.createPartitionConstraint(spec, keyProviderOp, splitNCs);
-
-        int[] secondaryLowKeyFields = { 0 }; // low key is in field 0 of tuples
-                                             // going into secondary index
-                                             // search op
-        int[] secondaryHighKeyFields = { 1 }; // high key is in field 1 of
-                                              // tuples going into secondary
-                                              // index search op
-
-        IFileSplitProvider secondarySplitProvider = JobHelper.createFileSplitProvider(splitNCs,
-                options.secondaryBTreeName);
-        IIndexDataflowHelperFactory dataflowHelperFactory = new BTreeDataflowHelperFactory();
-        BTreeSearchOperatorDescriptor secondarySearchOp = new BTreeSearchOperatorDescriptor(spec, secondaryRecDesc,
-                storageManager, lcManagerProvider, secondarySplitProvider, secondaryTypeTraits,
-                searchComparatorFactories, null, secondaryLowKeyFields, secondaryHighKeyFields, true, true,
-                dataflowHelperFactory, false, NoOpOperationCallbackFactory.INSTANCE);
-        JobHelper.createPartitionConstraint(spec, secondarySearchOp, splitNCs);
-
-        // secondary index will output tuples with [UTF8String, Integer]
-        // the Integer field refers to the key in the primary index of the
-        // source data records
-        int[] primaryLowKeyFields = { 1 }; // low key is in field 0 of tuples
-                                           // going into primary index search op
-        int[] primaryHighKeyFields = { 1 }; // high key is in field 1 of tuples
-                                            // going into primary index search
-                                            // op
-
-        IFileSplitProvider primarySplitProvider = JobHelper.createFileSplitProvider(splitNCs, options.primaryBTreeName);
-        BTreeSearchOperatorDescriptor primarySearchOp = new BTreeSearchOperatorDescriptor(spec, primaryRecDesc,
-                storageManager, lcManagerProvider, primarySplitProvider, primaryTypeTraits, primaryComparatorFactories,
-                null, primaryLowKeyFields, primaryHighKeyFields, true, true, dataflowHelperFactory, false,
-                NoOpOperationCallbackFactory.INSTANCE);
-        JobHelper.createPartitionConstraint(spec, primarySearchOp, splitNCs);
-
-        // have each node print the results of its respective B-Tree
-        PrinterOperatorDescriptor printer = new PrinterOperatorDescriptor(spec);
-        JobHelper.createPartitionConstraint(spec, printer, splitNCs);
-
-        spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0, secondarySearchOp, 0);
-
-        spec.connect(new OneToOneConnectorDescriptor(spec), secondarySearchOp, 0, primarySearchOp, 0);
-
-        spec.connect(new OneToOneConnectorDescriptor(spec), primarySearchOp, 0, printer, 0);
-
-        spec.addRoot(printer);
-
-        return spec;
-    }
-}
\ No newline at end of file
diff --git a/hyracks-examples/btree-example/btreehelper/pom.xml b/hyracks-examples/btree-example/btreehelper/pom.xml
deleted file mode 100644
index 7b033be..0000000
--- a/hyracks-examples/btree-example/btreehelper/pom.xml
+++ /dev/null
@@ -1,49 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <groupId>edu.uci.ics.hyracks.examples.btree</groupId>
-  <artifactId>btreehelper</artifactId>
-  <parent>
-    <groupId>edu.uci.ics.hyracks.examples</groupId>
-    <artifactId>btree-example</artifactId>
-    <version>0.2.2-SNAPSHOT</version>
-  </parent>
-
-  <dependencies>
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks</groupId>
-  		<artifactId>hyracks-dataflow-std</artifactId>
-  		<version>0.2.2-SNAPSHOT</version>
-  		<scope>compile</scope>
-  	</dependency>
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks</groupId>
-  		<artifactId>hyracks-storage-am-btree</artifactId>
-  		<version>0.2.2-SNAPSHOT</version>
-  		<scope>compile</scope>
-  	</dependency>
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks</groupId>
-  		<artifactId>hyracks-api</artifactId>
-  		<version>0.2.2-SNAPSHOT</version>
-  		<scope>compile</scope>
-  	</dependency>
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks</groupId>
-  		<artifactId>hyracks-data-std</artifactId>
-  		<version>0.2.2-SNAPSHOT</version>
-  	</dependency>
-  </dependencies>
-  <build>
-    <plugins>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-compiler-plugin</artifactId>
-        <version>2.0.2</version>
-        <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
-        </configuration>
-      </plugin>
-    </plugins>
-  </build>
-</project>
diff --git a/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/IndexLifecycleManagerProvider.java b/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/IndexLifecycleManagerProvider.java
deleted file mode 100644
index 6a17755..0000000
--- a/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/IndexLifecycleManagerProvider.java
+++ /dev/null
@@ -1,15 +0,0 @@
-package edu.uci.ics.hyracks.examples.btree.helper;
-
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexLifecycleManager;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexLifecycleManagerProvider;
-
-public enum IndexLifecycleManagerProvider implements IIndexLifecycleManagerProvider {
-    INSTANCE;
-
-    @Override
-    public IIndexLifecycleManager getLifecycleManager(IHyracksTaskContext ctx) {
-        return RuntimeContext.get(ctx).getIndexLifecycleManager();
-    }
-
-}
diff --git a/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/RuntimeContext.java b/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/RuntimeContext.java
deleted file mode 100644
index 8cce523..0000000
--- a/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/RuntimeContext.java
+++ /dev/null
@@ -1,84 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.examples.btree.helper;
-
-import edu.uci.ics.hyracks.api.application.INCApplicationContext;
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexLifecycleManager;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IndexLifecycleManager;
-import edu.uci.ics.hyracks.storage.common.buffercache.BufferCache;
-import edu.uci.ics.hyracks.storage.common.buffercache.ClockPageReplacementStrategy;
-import edu.uci.ics.hyracks.storage.common.buffercache.HeapBufferAllocator;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-import edu.uci.ics.hyracks.storage.common.buffercache.ICacheMemoryAllocator;
-import edu.uci.ics.hyracks.storage.common.buffercache.IPageReplacementStrategy;
-import edu.uci.ics.hyracks.storage.common.file.IFileMapManager;
-import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
-import edu.uci.ics.hyracks.storage.common.file.ILocalResourceRepository;
-import edu.uci.ics.hyracks.storage.common.file.ILocalResourceRepositoryFactory;
-import edu.uci.ics.hyracks.storage.common.file.ResourceIdFactory;
-import edu.uci.ics.hyracks.storage.common.file.ResourceIdFactoryProvider;
-import edu.uci.ics.hyracks.storage.common.file.TransientFileMapManager;
-import edu.uci.ics.hyracks.storage.common.file.TransientLocalResourceRepositoryFactory;
-
-public class RuntimeContext {
-    private IBufferCache bufferCache;
-    private IFileMapManager fileMapManager;
-    private ILocalResourceRepository localResourceRepository;
-    private IIndexLifecycleManager lcManager;
-    private ResourceIdFactory resourceIdFactory;
-
-    public RuntimeContext(INCApplicationContext appCtx) throws HyracksDataException {
-        fileMapManager = new TransientFileMapManager();
-        ICacheMemoryAllocator allocator = new HeapBufferAllocator();
-        IPageReplacementStrategy prs = new ClockPageReplacementStrategy();
-        bufferCache = new BufferCache(appCtx.getRootContext().getIOManager(), allocator, prs, fileMapManager, 32768,
-                50, 100);
-        lcManager = new IndexLifecycleManager();
-        ILocalResourceRepositoryFactory localResourceRepositoryFactory = new TransientLocalResourceRepositoryFactory();
-        localResourceRepository = localResourceRepositoryFactory.createRepository();  
-        resourceIdFactory = (new ResourceIdFactoryProvider(localResourceRepository)).createResourceIdFactory();
-    }
-
-    public void close() {
-        bufferCache.close();
-    }
-
-    public IBufferCache getBufferCache() {
-        return bufferCache;
-    }
-
-    public IFileMapProvider getFileMapManager() {
-        return fileMapManager;
-    }
-
-    public static RuntimeContext get(IHyracksTaskContext ctx) {
-        return (RuntimeContext) ctx.getJobletContext().getApplicationContext().getApplicationObject();
-    }
-
-    public ILocalResourceRepository getLocalResourceRepository() {
-        return localResourceRepository;
-    }
-
-    public ResourceIdFactory getResourceIdFactory() {
-        return resourceIdFactory;
-    }
-    
-    public IIndexLifecycleManager getIndexLifecycleManager() {
-        return lcManager;
-    }
-}
\ No newline at end of file
diff --git a/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/StorageManagerInterface.java b/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/StorageManagerInterface.java
deleted file mode 100644
index e19847e..0000000
--- a/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/StorageManagerInterface.java
+++ /dev/null
@@ -1,52 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.examples.btree.helper;
-
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
-import edu.uci.ics.hyracks.storage.common.file.ILocalResourceRepository;
-import edu.uci.ics.hyracks.storage.common.file.ResourceIdFactory;
-
-public class StorageManagerInterface implements IStorageManagerInterface {
-    private static final long serialVersionUID = 1L;
-
-    public static final StorageManagerInterface INSTANCE = new StorageManagerInterface();
-
-    private StorageManagerInterface() {
-    }
-
-    @Override
-    public IBufferCache getBufferCache(IHyracksTaskContext ctx) {
-        return RuntimeContext.get(ctx).getBufferCache();
-    }
-
-    @Override
-    public IFileMapProvider getFileMapProvider(IHyracksTaskContext ctx) {
-        return RuntimeContext.get(ctx).getFileMapManager();
-    }
-
-    @Override
-    public ILocalResourceRepository getLocalResourceRepository(IHyracksTaskContext ctx) {
-        return RuntimeContext.get(ctx).getLocalResourceRepository();
-    }
-
-    @Override
-    public ResourceIdFactory getResourceIdFactory(IHyracksTaskContext ctx) {
-        return RuntimeContext.get(ctx).getResourceIdFactory();
-    }
-}
\ No newline at end of file
diff --git a/hyracks-examples/btree-example/pom.xml b/hyracks-examples/btree-example/pom.xml
deleted file mode 100644
index 1f97591..0000000
--- a/hyracks-examples/btree-example/pom.xml
+++ /dev/null
@@ -1,19 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <groupId>edu.uci.ics.hyracks.examples</groupId>
-  <artifactId>btree-example</artifactId>
-  <version>0.2.2-SNAPSHOT</version>
-  <packaging>pom</packaging>
-
-  <parent>
-    <groupId>edu.uci.ics.hyracks</groupId>
-    <artifactId>hyracks-examples</artifactId>
-    <version>0.2.2-SNAPSHOT</version>
-  </parent>
-
-  <modules>
-    <module>btreehelper</module>
-    <module>btreeclient</module>
-    <module>btreeapp</module>
-  </modules>
-</project>
diff --git a/hyracks-examples/hadoop-compat-example/hadoopcompatapp/pom.xml b/hyracks-examples/hadoop-compat-example/hadoopcompatapp/pom.xml
deleted file mode 100644
index 54d2c06..0000000
--- a/hyracks-examples/hadoop-compat-example/hadoopcompatapp/pom.xml
+++ /dev/null
@@ -1,191 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <groupId>edu.uci.ics.hyracks.examples.compat</groupId>
-  <artifactId>hadoopcompatapp</artifactId>
-  <version>0.2.2-SNAPSHOT</version>
-
-  <parent>
-    <groupId>edu.uci.ics.hyracks.examples</groupId>
-    <artifactId>hadoop-compat-example</artifactId>
-    <version>0.2.2-SNAPSHOT</version>
-  </parent>
-
-  <build>
-    <pluginManagement>
-      <plugins>
-        <plugin>
-          <groupId>org.eclipse.m2e</groupId>
-          <artifactId>lifecycle-mapping</artifactId>
-          <version>1.0.0</version>
-          <configuration>
-            <lifecycleMappingMetadata>
-              <pluginExecutions>
-                <pluginExecution>
-                  <pluginExecutionFilter>
-                    <groupId>org.apache.maven.plugins</groupId>
-                    <artifactId>maven-dependency-plugin</artifactId>
-                    <versionRange>[1.0.0,)</versionRange>
-                    <goals>
-                      <goal>copy-dependencies</goal>
-                    </goals>
-                  </pluginExecutionFilter>
-                  <action>
-                    <ignore />
-                  </action>
-                </pluginExecution>
-              </pluginExecutions>
-            </lifecycleMappingMetadata>
-          </configuration>
-        </plugin>
-      </plugins>
-	</pluginManagement>
-  
-    <plugins>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-dependency-plugin</artifactId>
-        <executions>
-          <execution>
-            <id>copy-dependencies</id>
-            <phase>package</phase>
-            <goals>
-              <goal>copy-dependencies</goal>
-            </goals>
-            <configuration>
-              <outputDirectory>target/application/lib</outputDirectory>
-            </configuration>
-          </execution>
-        </executions>
-      </plugin>
-      <plugin>
-        <artifactId>maven-assembly-plugin</artifactId>
-        <version>2.2-beta-5</version>
-        <executions>
-          <execution>
-            <configuration>
-              <descriptors>
-                <descriptor>src/main/assembly/app-assembly.xml</descriptor>
-              </descriptors>
-            </configuration>
-            <phase>package</phase>
-            <goals>
-              <goal>attached</goal>
-            </goals>
-          </execution>
-        </executions>
-      </plugin>
-      <plugin>
-      	<groupId>edu.uci.ics.hyracks</groupId>
-      	<artifactId>hyracks-virtualcluster-maven-plugin</artifactId>
-      	<version>0.2.2-SNAPSHOT</version>
-        <configuration>
-          <hyracksServerHome>${basedir}/../../../hyracks-server/target/hyracks-server-${project.version}-binary-assembly</hyracksServerHome>
-          <hyracksCLIHome>${basedir}/../../../hyracks-cli/target/hyracks-cli-${project.version}-binary-assembly</hyracksCLIHome>
-          <jvmOptions>${jvm.extraargs}</jvmOptions>
-        </configuration>
-        <executions>
-          <execution>
-            <id>hyracks-cc-start</id>
-            <phase>pre-integration-test</phase>
-            <goals>
-              <goal>start-cc</goal>
-            </goals>
-            <configuration>
-	      <workingDir>${project.build.directory}</workingDir>
-            </configuration>
-          </execution>
-          <execution>
-            <id>hyracks-nc1-start</id>
-            <phase>pre-integration-test</phase>
-            <goals>
-              <goal>start-nc</goal>
-            </goals>
-            <configuration>
-              <nodeId>NC1</nodeId>
-              <dataIpAddress>127.0.0.1</dataIpAddress>
-              <ccHost>localhost</ccHost>
-	      <workingDir>${project.build.directory}</workingDir>
-            </configuration>
-          </execution>
-          <execution>
-            <id>hyracks-nc2-start</id>
-            <phase>pre-integration-test</phase>
-            <goals>
-              <goal>start-nc</goal>
-            </goals>
-            <configuration>
-              <nodeId>NC2</nodeId>
-              <dataIpAddress>127.0.0.1</dataIpAddress>
-              <ccHost>localhost</ccHost>
-	      <workingDir>${project.build.directory}</workingDir>
-            </configuration>
-          </execution>
-          <execution>
-            <id>deploy-app</id>
-            <phase>pre-integration-test</phase>
-            <goals>
-              <goal>deploy-app</goal>
-            </goals>
-            <configuration>
-              <ccHost>localhost</ccHost>
-              <appName>compat</appName>
-              <harFile>${project.build.directory}/hadoopcompatapp-${project.version}-app-assembly.zip</harFile>
-            </configuration>
-          </execution>
-	     <execution>
-	       <id>stop-services</id>
-	       <phase>post-integration-test</phase>
-	       <goals>
-	         <goal>stop-services</goal>
-	       </goals>
-             </execution>
-          </executions>
-      </plugin>
-      <plugin>
-      	<groupId>org.apache.maven.plugins</groupId>
-      	<artifactId>maven-compiler-plugin</artifactId>
-      	<version>2.0.2</version>
-        <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
-        </configuration>
-      </plugin>
-      <plugin>
-      	<groupId>org.apache.maven.plugins</groupId>
-      	<artifactId>maven-failsafe-plugin</artifactId>
-      	<version>2.8.1</version>
-      	<executions>
-      	  <execution>
-      	    <id>it</id>
-      	    <phase>integration-test</phase>
-      	    <goals>
-      	      <goal>integration-test</goal>
-      	    </goals>
-      	  </execution>
-      	</executions>
-      </plugin>
-    </plugins>
-  </build>
-  <dependencies>
-     <dependency>
-        <groupId>edu.uci.ics.hyracks.examples.compat</groupId>
-        <artifactId>hadoopcompathelper</artifactId>
-        <version>0.2.2-SNAPSHOT</version>
-        <scope>compile</scope>
-     </dependency>
-     <dependency>
-        <groupId>edu.uci.ics.hyracks.examples.compat</groupId>
-  	    <artifactId>hadoopcompatclient</artifactId>
-  	    <version>0.2.2-SNAPSHOT</version>
-  	    <type>jar</type>
-  	    <scope>test</scope>
-     </dependency>
-     <dependency>
-  	    <groupId>junit</groupId>
-  	    <artifactId>junit</artifactId>
-  	    <version>4.8.2</version>
-  	    <type>jar</type>
-  	    <scope>test</scope>
-     </dependency>
-  </dependencies>
-</project>
diff --git a/hyracks-examples/hadoop-compat-example/hadoopcompatclient/pom.xml b/hyracks-examples/hadoop-compat-example/hadoopcompatclient/pom.xml
deleted file mode 100644
index a08aee8..0000000
--- a/hyracks-examples/hadoop-compat-example/hadoopcompatclient/pom.xml
+++ /dev/null
@@ -1,80 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <groupId>edu.uci.ics.hyracks.examples.compat</groupId>
-  <artifactId>hadoopcompatclient</artifactId>
-  <version>0.2.2-SNAPSHOT</version>
-
-  <parent>
-    <groupId>edu.uci.ics.hyracks.examples</groupId>
-    <artifactId>hadoop-compat-example</artifactId>
-    <version>0.2.2-SNAPSHOT</version>
-  </parent>
-
-  <dependencies>
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks</groupId>
-  		<artifactId>hyracks-dataflow-std</artifactId>
-  		<version>0.2.2-SNAPSHOT</version>
-  		<scope>compile</scope>
-  	</dependency>
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks.examples.compat</groupId>
-  		<artifactId>hadoopcompathelper</artifactId>
-  		<version>0.2.2-SNAPSHOT</version>
-  		<type>jar</type>
-  		<scope>compile</scope>
-  	</dependency>
-  </dependencies>
-  <build>
-    <plugins>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-compiler-plugin</artifactId>
-        <version>2.0.2</version>
-        <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
-        </configuration>
-      </plugin>
-      <plugin>
-        <groupId>org.codehaus.mojo</groupId>
-        <artifactId>appassembler-maven-plugin</artifactId>
-        <executions>
-          <execution>
-            <configuration>
-              <programs>
-                <program>
-                  <mainClass>edu.uci.ics.hyracks.examples.compat.client.WordCountCompatibility</mainClass>
-                  <name>hadoopcompatclient</name>
-                </program>
-              </programs>
-              <repositoryLayout>flat</repositoryLayout>
-              <repositoryName>lib</repositoryName>
-            </configuration>
-            <phase>package</phase>
-            <goals>
-              <goal>assemble</goal>
-            </goals>
-          </execution>
-        </executions>
-      </plugin>
-      <plugin>
-        <artifactId>maven-assembly-plugin</artifactId>
-        <version>2.2-beta-5</version>
-        <executions>
-          <execution>
-            <configuration>
-              <descriptors>
-                <descriptor>src/main/assembly/binary-assembly.xml</descriptor>
-              </descriptors>
-            </configuration>
-            <phase>package</phase>
-            <goals>
-              <goal>attached</goal>
-            </goals>
-          </execution>
-        </executions>
-      </plugin>
-    </plugins>
-  </build>
-</project>
diff --git a/hyracks-examples/hadoop-compat-example/hadoopcompathelper/pom.xml b/hyracks-examples/hadoop-compat-example/hadoopcompathelper/pom.xml
deleted file mode 100644
index 7150420..0000000
--- a/hyracks-examples/hadoop-compat-example/hadoopcompathelper/pom.xml
+++ /dev/null
@@ -1,40 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <groupId>edu.uci.ics.hyracks.examples.compat</groupId>
-  <artifactId>hadoopcompathelper</artifactId>
-  <version>0.2.2-SNAPSHOT</version>
-
-  <parent>
-    <groupId>edu.uci.ics.hyracks.examples</groupId>
-    <artifactId>hadoop-compat-example</artifactId>
-    <version>0.2.2-SNAPSHOT</version>
-  </parent>
-
-  <dependencies>
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks</groupId>
-  		<artifactId>hyracks-dataflow-std</artifactId>
-  		<version>0.2.2-SNAPSHOT</version>
-  		<scope>compile</scope>
-  	</dependency>
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks</groupId>
-  		<artifactId>hyracks-api</artifactId>
-  		<version>0.2.2-SNAPSHOT</version>
-  		<scope>compile</scope>
-  	</dependency>
-  </dependencies>
-  <build>
-    <plugins>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-compiler-plugin</artifactId>
-        <version>2.0.2</version>
-        <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
-        </configuration>
-      </plugin>
-    </plugins>
-  </build>
-</project>
diff --git a/hyracks-examples/hadoop-compat-example/pom.xml b/hyracks-examples/hadoop-compat-example/pom.xml
deleted file mode 100644
index 0a12c57..0000000
--- a/hyracks-examples/hadoop-compat-example/pom.xml
+++ /dev/null
@@ -1,37 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <groupId>edu.uci.ics.hyracks.examples</groupId>
-  <artifactId>hadoop-compat-example</artifactId>
-  <version>0.2.2-SNAPSHOT</version>
-  <packaging>pom</packaging>
-
-  <parent>
-    <groupId>edu.uci.ics.hyracks</groupId>
-    <artifactId>hyracks-examples</artifactId>
-    <version>0.2.2-SNAPSHOT</version>
-  </parent>
-
-  <modules>
-    <module>hadoopcompathelper</module>
-    <module>hadoopcompatclient</module>
-    <module>hadoopcompatapp</module>
-  </modules>
-
-  <dependencies>
-      <dependency>
-         <groupId>org.apache.hadoop</groupId>
-         <artifactId>hadoop-core</artifactId>
-         <version>0.20.2</version>
-         <type>jar</type>
-         <scope>compile</scope>
-      </dependency>
-
-      <dependency>
-         <groupId>edu.uci.ics.hyracks</groupId>
-         <artifactId>hyracks-hadoop-compat</artifactId>
-         <version>0.2.2-SNAPSHOT</version>
-         <type>jar</type>
-         <scope>compile</scope>
-      </dependency>
-  </dependencies>
-</project>
diff --git a/hyracks-examples/hyracks-integration-tests/data/orders-with-locations-part1.txt b/hyracks-examples/hyracks-integration-tests/data/orders-with-locations-part1.txt
deleted file mode 100644
index e1d423f..0000000
--- a/hyracks-examples/hyracks-integration-tests/data/orders-with-locations-part1.txt
+++ /dev/null
@@ -1,557 +0,0 @@
-1|37|O|131251.81|1996-01-02|5-LOW|Clerk#000000951|0|nstructions sleep furiously among |42.3631|-71.065|42.3631|-71.065|
-2|79|O|40183.29|1996-12-01|1-URGENT|Clerk#000000880|0| foxes. pending accounts at the pending, silent asymptot|42.1091|-70.696|42.1091|-70.696|
-3|124|F|160882.76|1993-10-14|5-LOW|Clerk#000000955|0|sly final accounts boost. carefully regular ideas cajole carefully. depos|40.8151|-73.0452|40.8151|-73.0452|
-4|137|O|31084.79|1995-10-11|5-LOW|Clerk#000000124|0|sits. slyly regular warthogs cajole. regular, regular theodolites acro|40.8151|-73.0452|40.8151|-73.0452|
-5|46|F|86615.25|1994-07-30|5-LOW|Clerk#000000925|0|quickly. bold deposits sleep slyly. packages use slyly|42.2481|-71.174|42.2481|-71.174|
-6|56|F|36468.55|1992-02-21|4-NOT SPECIFIED|Clerk#000000058|0|ggle. special, final requests are against the furiously specia|61.1201|-149.89|61.1201|-149.89|
-7|40|O|171488.73|1996-01-10|2-HIGH|Clerk#000000470|0|ly special requests |61.1501|-149.926|61.1501|-149.926|
-32|131|O|116923.00|1995-07-16|2-HIGH|Clerk#000000616|0|ise blithely bold, regular requests. quickly unusual dep|61.181|-149.814|61.181|-149.814|
-33|67|F|99798.76|1993-10-27|3-MEDIUM|Clerk#000000409|0|uriously. furiously final request|61.1517|-149.86|61.1517|-149.86|
-34|62|O|41670.02|1998-07-21|3-MEDIUM|Clerk#000000223|0|ly final packages. fluffily final deposits wake blithely ideas. spe|61.1806|-149.814|61.1806|-149.814|
-35|128|O|148789.52|1995-10-23|4-NOT SPECIFIED|Clerk#000000259|0|zzle. carefully enticing deposits nag furio|61.1806|-149.775|61.1806|-149.775|
-36|116|O|38988.98|1995-11-03|1-URGENT|Clerk#000000358|0| quick packages are blithely. slyly silent accounts wake qu|61.1806|-149.775|61.1806|-149.775|
-37|88|F|113701.89|1992-06-03|3-MEDIUM|Clerk#000000456|0|kly regular pinto beans. carefully unusual waters cajole never|61.1806|-149.775|61.1806|-149.775|
-38|125|O|46366.56|1996-08-21|4-NOT SPECIFIED|Clerk#000000604|0|haggle blithely. furiously express ideas haggle blithely furiously regular re|61.2113|-149.824|61.2113|-149.824|
-39|82|O|219707.84|1996-09-20|3-MEDIUM|Clerk#000000659|0|ole express, ironic requests: ir|61.1967|-149.877|61.1967|-149.877|
-64|34|F|20065.73|1994-07-16|3-MEDIUM|Clerk#000000661|0|wake fluffily. sometimes ironic pinto beans about the dolphin|61.2164|-149.892|61.2164|-149.892|
-65|17|P|65883.92|1995-03-18|1-URGENT|Clerk#000000632|0|ular requests are blithely pending orbits-- even requests against the deposit|61.1571|-149.883|61.1571|-149.883|
-66|130|F|79258.24|1994-01-20|5-LOW|Clerk#000000743|0|y pending requests integrate|61.2048|-149.834|61.2048|-149.834|
-67|58|O|116227.05|1996-12-19|4-NOT SPECIFIED|Clerk#000000547|0|symptotes haggle slyly around the furiously iron|61.0956|-149.843|61.0956|-149.843|
-68|29|O|215135.72|1998-04-18|3-MEDIUM|Clerk#000000440|0| pinto beans sleep carefully. blithely ironic deposits haggle furiously acro|61.1491|-149.809|61.1491|-149.809|
-69|85|F|162176.23|1994-06-04|4-NOT SPECIFIED|Clerk#000000330|0| depths atop the slyly thin deposits detect among the furiously silent accou|61.1981|-149.871|61.1981|-149.871|
-70|65|F|84651.80|1993-12-18|5-LOW|Clerk#000000322|0| carefully ironic request|61.1982|-149.876|61.1982|-149.876|
-71|4|O|178821.73|1998-01-24|4-NOT SPECIFIED|Clerk#000000271|0| express deposits along the blithely regul|61.1924|-149.909|61.1924|-149.909|
-96|109|F|55090.67|1994-04-17|2-HIGH|Clerk#000000395|0|oost furiously. pinto|61.2204|-149.728|61.2204|-149.728|
-97|22|F|68908.31|1993-01-29|3-MEDIUM|Clerk#000000547|0|hang blithely along the regular accounts. furiously even ideas after the|61.196|-149.864|61.196|-149.864|
-98|106|F|51004.44|1994-09-25|1-URGENT|Clerk#000000448|0|c asymptotes. quickly regular packages should have to nag re|61.1987|-149.889|61.1987|-149.889|
-99|89|F|92326.79|1994-03-13|4-NOT SPECIFIED|Clerk#000000973|0|e carefully ironic packages. pending|61.1984|-149.897|61.1984|-149.897|
-100|148|O|141311.01|1998-02-28|4-NOT SPECIFIED|Clerk#000000577|0|heodolites detect slyly alongside of the ent|61.1897|-149.898|61.1897|-149.898|
-101|28|O|95591.40|1996-03-17|3-MEDIUM|Clerk#000000419|0|ding accounts above the slyly final asymptote|61.1228|-149.81|61.1228|-149.81|
-102|1|O|113954.89|1997-05-09|2-HIGH|Clerk#000000596|0| slyly according to the asymptotes. carefully final packages integrate furious|61.1649|-149.881|61.1649|-149.881|
-103|31|O|95563.95|1996-06-20|4-NOT SPECIFIED|Clerk#000000090|0|ges. carefully unusual instructions haggle quickly regular f|61.1934|-149.887|61.1934|-149.887|
-128|74|F|36333.34|1992-06-15|1-URGENT|Clerk#000000385|0|ns integrate fluffily. ironic asymptotes after the regular excuses nag around |61.2164|-149.892|61.2164|-149.892|
-129|73|F|188124.55|1992-11-19|5-LOW|Clerk#000000859|0|ing tithes. carefully pending deposits boost about the silently express |61.1932|-149.886|61.1932|-149.886|
-130|37|F|115717.37|1992-05-08|2-HIGH|Clerk#000000036|0|le slyly unusual, regular packages? express deposits det|61.2072|-149.888|61.2072|-149.888|
-131|94|F|96596.81|1994-06-08|3-MEDIUM|Clerk#000000625|0|after the fluffily special foxes integrate s|61.2125|-149.904|61.2125|-149.904|
-132|28|F|118802.62|1993-06-11|3-MEDIUM|Clerk#000000488|0|sits are daringly accounts. carefully regular foxes sleep slyly about the|61.2142|-149.806|61.2142|-149.806|
-133|44|O|80437.72|1997-11-29|1-URGENT|Clerk#000000738|0|usly final asymptotes |61.1866|-149.923|61.1866|-149.923|
-134|7|F|154260.84|1992-05-01|4-NOT SPECIFIED|Clerk#000000711|0|lar theodolites boos|61.1089|-149.857|61.1089|-149.857|
-135|61|O|174569.88|1995-10-21|4-NOT SPECIFIED|Clerk#000000804|0|l platelets use according t|61.1024|-149.853|61.1024|-149.853|
-160|83|O|86076.86|1996-12-19|4-NOT SPECIFIED|Clerk#000000342|0|thely special sauternes wake slyly of t|61.1891|-149.906|61.1891|-149.906|
-161|17|F|19056.99|1994-08-31|2-HIGH|Clerk#000000322|0|carefully! special instructions sin|61.1891|-149.906|61.1891|-149.906|
-162|16|O|2158.13|1995-05-08|3-MEDIUM|Clerk#000000378|0|nts hinder fluffily ironic instructions. express, express excuses |61.1891|-149.906|61.1891|-149.906|
-163|88|O|125170.86|1997-09-05|3-MEDIUM|Clerk#000000379|0|y final packages. final foxes since the quickly even|61.1891|-149.906|61.1891|-149.906|
-164|1|F|202660.52|1992-10-21|5-LOW|Clerk#000000209|0|cajole ironic courts. slyly final ideas are slyly. blithely final Tiresias sub|61.1891|-149.906|61.1891|-149.906|
-165|28|F|141824.23|1993-01-30|4-NOT SPECIFIED|Clerk#000000292|0|across the blithely regular accounts. bold|61.1891|-149.906|61.1891|-149.906|
-166|109|O|93335.60|1995-09-12|2-HIGH|Clerk#000000440|0|lets. ironic, bold asymptotes kindle|61.1891|-149.906|61.1891|-149.906|
-167|121|F|52982.23|1993-01-04|4-NOT SPECIFIED|Clerk#000000731|0|s nag furiously bold excuses. fluffily iron|61.1891|-149.906|61.1891|-149.906|
-192|83|O|133002.55|1997-11-25|5-LOW|Clerk#000000483|0|y unusual platelets among the final instructions integrate rut|61.1891|-149.906|61.1891|-149.906|
-193|80|F|48053.18|1993-08-08|1-URGENT|Clerk#000000025|0|the furiously final pin|61.1891|-149.906|61.1891|-149.906|
-194|62|F|114097.63|1992-04-05|3-MEDIUM|Clerk#000000352|0|egular requests haggle slyly regular, regular pinto beans. asymptote|61.1891|-149.906|61.1891|-149.906|
-195|136|F|120053.52|1993-12-28|3-MEDIUM|Clerk#000000216|0|old forges are furiously sheaves. slyly fi|61.1891|-149.906|61.1891|-149.906|
-196|65|F|33248.04|1993-03-17|2-HIGH|Clerk#000000988|0|beans boost at the foxes. silent foxes|61.1891|-149.906|61.1891|-149.906|
-197|34|P|100290.07|1995-04-07|2-HIGH|Clerk#000000969|0|solve quickly about the even braids. carefully express deposits affix care|61.1891|-149.906|61.1891|-149.906|
-198|112|O|125792.83|1998-01-02|4-NOT SPECIFIED|Clerk#000000331|0|its. carefully ironic requests sleep. furiously express fox|61.1891|-149.906|61.1891|-149.906|
-199|53|O|80592.44|1996-03-07|2-HIGH|Clerk#000000489|0|g theodolites. special packag|61.1891|-149.906|61.1891|-149.906|
-224|4|F|155680.60|1994-06-18|4-NOT SPECIFIED|Clerk#000000642|0|r the quickly thin courts. carefully|61.1891|-149.906|61.1891|-149.906|
-225|34|P|165890.47|1995-05-25|1-URGENT|Clerk#000000177|0|s. blithely ironic accounts wake quickly fluffily special acc|61.1891|-149.906|61.1891|-149.906|
-226|128|F|180119.22|1993-03-10|2-HIGH|Clerk#000000756|0|s are carefully at the blithely ironic acc|61.1891|-149.906|61.1891|-149.906|
-227|10|O|46076.46|1995-11-10|5-LOW|Clerk#000000919|0| express instructions. slyly regul|61.1891|-149.906|61.1891|-149.906|
-228|46|F|2638.98|1993-02-25|1-URGENT|Clerk#000000562|0|es was slyly among the regular foxes. blithely regular dependenci|61.1891|-149.906|61.1891|-149.906|
-229|112|F|142290.77|1993-12-29|1-URGENT|Clerk#000000628|0|he fluffily even instructions. furiously i|61.1891|-149.906|61.1891|-149.906|
-230|103|F|107231.60|1993-10-27|1-URGENT|Clerk#000000520|0|odolites. carefully quick requ|61.1891|-149.906|61.1891|-149.906|
-231|91|F|141554.06|1994-09-29|2-HIGH|Clerk#000000446|0| packages haggle slyly after the carefully ironic instruct|61.1891|-149.906|61.1891|-149.906|
-256|125|F|106315.25|1993-10-19|4-NOT SPECIFIED|Clerk#000000834|0|he fluffily final ideas might are final accounts. carefully f|61.1891|-149.906|61.1891|-149.906|
-257|124|O|7102.74|1998-03-28|3-MEDIUM|Clerk#000000680|0|ts against the sly warhorses cajole slyly accounts|61.1891|-149.906|61.1891|-149.906|
-258|43|F|186669.10|1993-12-29|1-URGENT|Clerk#000000167|0|dencies. blithely quick packages cajole. ruthlessly final accounts|61.1891|-149.906|61.1891|-149.906|
-259|44|F|75661.70|1993-09-29|4-NOT SPECIFIED|Clerk#000000601|0|ages doubt blithely against the final foxes. carefully express deposits dazzle|61.1891|-149.906|61.1891|-149.906|
-260|106|O|179292.14|1996-12-10|3-MEDIUM|Clerk#000000960|0|lently regular pinto beans sleep after the slyly e|61.1891|-149.906|61.1891|-149.906|
-261|47|F|201003.12|1993-06-29|3-MEDIUM|Clerk#000000310|0|ully fluffily brave instructions. furiousl|61.1891|-149.906|61.1891|-149.906|
-262|31|O|108443.84|1995-11-25|4-NOT SPECIFIED|Clerk#000000551|0|l packages. blithely final pinto beans use carefu|61.1891|-149.906|61.1891|-149.906|
-263|118|F|79782.56|1994-05-17|2-HIGH|Clerk#000000088|0| pending instructions. blithely un|61.1891|-149.906|61.1891|-149.906|
-288|8|O|163794.53|1997-02-21|1-URGENT|Clerk#000000109|0|uriously final requests. even, final ideas det|61.1891|-149.906|61.1891|-149.906|
-289|104|O|131092.67|1997-02-10|3-MEDIUM|Clerk#000000103|0|sily. slyly special excuse|61.1891|-149.906|61.1891|-149.906|
-290|118|F|62814.89|1994-01-01|4-NOT SPECIFIED|Clerk#000000735|0|efully dogged deposits. furiou|61.1891|-149.906|61.1891|-149.906|
-291|142|F|66817.05|1994-03-13|1-URGENT|Clerk#000000923|0|dolites. carefully regular pinto beans cajol|64.8541|-147.813|64.8541|-147.813|
-292|23|F|30783.05|1992-01-13|2-HIGH|Clerk#000000193|0|g pinto beans will have to sleep f|64.8414|-147.606|64.8414|-147.606|
-293|31|F|37248.78|1992-10-02|2-HIGH|Clerk#000000629|0|re bold, ironic deposits. platelets c|64.8371|-147.746|64.8371|-147.746|
-294|52|F|30059.47|1993-07-16|3-MEDIUM|Clerk#000000499|0|kly according to the frays. final dolphins affix quickly |64.8151|-147.707|64.8151|-147.707|
-295|19|F|89345.99|1994-09-29|2-HIGH|Clerk#000000155|0| unusual pinto beans play. regular ideas haggle|64.8371|-147.746|64.8371|-147.746|
-320|1|O|39835.54|1997-11-21|2-HIGH|Clerk#000000573|0|ar foxes nag blithely|64.849|-147.813|64.849|-147.813|
-321|124|F|62251.15|1993-03-21|3-MEDIUM|Clerk#000000289|0|equests run. blithely final dependencies after the deposits wake caref|64.8425|-147.724|64.8425|-147.724|
-322|134|F|127068.89|1992-03-19|1-URGENT|Clerk#000000158|0|fully across the slyly bold packages. packages against the quickly regular i|64.8425|-147.724|64.8425|-147.724|
-323|40|F|79683.42|1994-03-26|1-URGENT|Clerk#000000959|0|arefully pending foxes sleep blithely. slyly express accoun|64.849|-147.826|64.849|-147.826|
-324|106|F|26868.85|1992-03-20|1-URGENT|Clerk#000000352|0| about the ironic, regular deposits run blithely against the excuses|64.815|-147.882|64.815|-147.882|
-325|41|F|71543.41|1993-10-17|5-LOW|Clerk#000000844|0|ly sometimes pending pa|64.8906|-147.628|64.8906|-147.628|
-326|76|O|229165.17|1995-06-04|2-HIGH|Clerk#000000466|0| requests. furiously ironic asymptotes mold carefully alongside of the blit|64.8276|-147.639|64.8276|-147.639|
-327|145|P|24468.16|1995-04-17|5-LOW|Clerk#000000992|0|ng the slyly final courts. slyly even escapades eat |64.8461|-147.813|64.8461|-147.813|
-352|107|F|16003.86|1994-03-08|2-HIGH|Clerk#000000932|0|ke slyly bold pinto beans. blithely regular accounts against the spe|64.8281|-147.812|64.8281|-147.812|
-353|2|F|179984.42|1993-12-31|5-LOW|Clerk#000000449|0| quiet ideas sleep. even instructions cajole slyly. silently spe|64.8377|-147.718|64.8377|-147.718|
-354|139|O|157062.70|1996-03-14|2-HIGH|Clerk#000000511|0|ly regular ideas wake across the slyly silent ideas. final deposits eat b|64.8417|-147.718|64.8417|-147.718|
-355|71|F|69447.25|1994-06-14|5-LOW|Clerk#000000532|0|s. sometimes regular requests cajole. regular, pending accounts a|64.8145|-147.772|64.8145|-147.772|
-356|148|F|162786.67|1994-06-30|4-NOT SPECIFIED|Clerk#000000944|0|as wake along the bold accounts. even, |64.8541|-147.813|64.8541|-147.813|
-357|61|O|98723.11|1996-10-09|2-HIGH|Clerk#000000301|0|e blithely about the express, final accounts. quickl|64.8169|-147.779|64.8169|-147.779|
-358|4|F|226806.66|1993-09-20|2-HIGH|Clerk#000000392|0|l, silent instructions are slyly. silently even de|64.8378|-147.71|64.8378|-147.71|
-359|79|F|142891.22|1994-12-19|3-MEDIUM|Clerk#000000934|0|n dolphins. special courts above the carefully ironic requests use|64.8436|-147.722|64.8436|-147.722|
-384|115|F|122785.82|1992-03-03|5-LOW|Clerk#000000206|0|, even accounts use furiously packages. slyly ironic pla|64.9401|-147.402|64.9401|-147.402|
-385|34|O|50724.06|1996-03-22|5-LOW|Clerk#000000600|0|hless accounts unwind bold pain|64.8426|-147.719|64.8426|-147.719|
-386|61|F|90380.40|1995-01-25|2-HIGH|Clerk#000000648|0| haggle quickly. stealthily bold asymptotes haggle among the furiously even re|64.8534|-147.811|64.8534|-147.811|
-387|4|O|130647.18|1997-01-26|4-NOT SPECIFIED|Clerk#000000768|0| are carefully among the quickly even deposits. furiously silent req|64.9341|-147.928|64.9341|-147.928|
-388|46|F|120533.46|1992-12-16|4-NOT SPECIFIED|Clerk#000000356|0|ar foxes above the furiously ironic deposits nag slyly final reque|64.8393|-147.72|64.8393|-147.72|
-389|127|F|1984.14|1994-02-17|2-HIGH|Clerk#000000062|0|ing to the regular asymptotes. final, pending foxes about the blithely sil|64.8406|-147.731|64.8406|-147.731|
-390|103|O|168562.27|1998-04-07|5-LOW|Clerk#000000404|0|xpress asymptotes use among the regular, final pinto b|64.9281|-147.865|64.9281|-147.865|
-391|112|F|13282.23|1994-11-17|2-HIGH|Clerk#000000256|0|orges thrash fluffil|64.8371|-147.716|64.8371|-147.716|
-416|41|F|71362.50|1993-09-27|5-LOW|Clerk#000000294|0| the accounts. fluffily bold depo|64.9414|-147.841|64.9414|-147.841|
-417|55|F|91982.29|1994-02-06|3-MEDIUM|Clerk#000000468|0|ironic, even packages. thinly unusual accounts sleep along the slyly unusual |64.8363|-147.79|64.8363|-147.79|
-418|95|P|33124.96|1995-04-13|4-NOT SPECIFIED|Clerk#000000643|0|. furiously ironic instruc|64.8371|-147.716|64.8371|-147.716|
-419|118|O|111597.96|1996-10-01|3-MEDIUM|Clerk#000000376|0|osits. blithely pending theodolites boost carefully|64.8591|-147.917|64.8591|-147.917|
-420|91|O|198039.23|1995-10-31|4-NOT SPECIFIED|Clerk#000000756|0|leep carefully final excuses. fluffily pending requests unwind carefully above|64.8363|-147.79|64.8363|-147.79|
-421|40|F|1084.38|1992-02-22|5-LOW|Clerk#000000405|0|egular, even packages according to the final, un|55.3801|-131.682|55.3801|-131.682|
-422|74|O|106045.89|1997-05-31|4-NOT SPECIFIED|Clerk#000000049|0|aggle carefully across the accounts. regular accounts eat fluffi|55.3073|-131.528|55.3073|-131.528|
-423|104|O|26981.31|1996-06-01|1-URGENT|Clerk#000000674|0|quests. deposits cajole quickly. furiously bold accounts haggle q|55.3801|-131.682|55.3801|-131.682|
-448|149|O|114978.03|1995-08-21|3-MEDIUM|Clerk#000000597|0| regular, express foxes use blithely. quic|55.3601|-131.681|55.3601|-131.681|
-449|97|O|41605.63|1995-07-20|2-HIGH|Clerk#000000841|0|. furiously regular theodolites affix blithely |55.3279|-131.613|55.3279|-131.613|
-450|49|P|153386.61|1995-03-05|4-NOT SPECIFIED|Clerk#000000293|0|d theodolites. boldly bold foxes since the pack|55.3129|-131.588|55.3129|-131.588|
-451|100|O|104664.40|1998-05-25|5-LOW|Clerk#000000048|0|nic pinto beans. theodolites poach carefully; |55.3801|-131.682|55.3801|-131.682|
-452|61|O|2007.48|1997-10-14|1-URGENT|Clerk#000000498|0|t, unusual instructions above the blithely bold pint|55.3801|-131.682|55.3801|-131.682|
-453|46|O|216826.73|1997-05-26|5-LOW|Clerk#000000504|0|ss foxes. furiously regular ideas sleep according to t|55.4299|-131.789|55.4299|-131.789|
-454|49|O|23198.24|1995-12-27|5-LOW|Clerk#000000890|0|dolites sleep carefully blithely regular deposits. quickly regul|55.3801|-131.682|55.3801|-131.682|
-455|13|O|138010.76|1996-12-04|1-URGENT|Clerk#000000796|0| about the final platelets. dependen|55.3507|-131.671|55.3507|-131.671|
-480|73|F|20530.97|1993-05-08|5-LOW|Clerk#000000004|0|ealthy pinto beans. fluffily regular requests along the special sheaves wake |55.3801|-131.682|55.3801|-131.682|
-481|31|F|117827.18|1992-10-08|2-HIGH|Clerk#000000230|0|ly final ideas. packages haggle fluffily|55.3394|-131.636|55.3394|-131.636|
-482|127|O|136634.34|1996-03-26|1-URGENT|Clerk#000000295|0|ts. deposits wake: final acco|55.3801|-131.682|55.3801|-131.682|
-483|35|O|39793.05|1995-07-11|2-HIGH|Clerk#000000025|0|cross the carefully final e|55.3103|-131.582|55.3103|-131.582|
-484|55|O|219920.62|1997-01-03|3-MEDIUM|Clerk#000000545|0|grouches use. furiously bold accounts maintain. bold, regular deposits|55.3801|-131.682|55.3801|-131.682|
-485|101|O|110432.76|1997-03-26|2-HIGH|Clerk#000000105|0| regular ideas nag thinly furiously s|55.3801|-131.682|55.3801|-131.682|
-486|52|O|185968.15|1996-03-11|4-NOT SPECIFIED|Clerk#000000803|0|riously dolphins. fluffily ironic requ|55.3801|-131.682|55.3801|-131.682|
-487|109|F|48502.79|1992-08-18|1-URGENT|Clerk#000000086|0|ithely unusual courts eat accordi|55.3801|-131.682|55.3801|-131.682|
-512|64|P|124661.48|1995-05-20|5-LOW|Clerk#000000814|0|ding requests. carefully express theodolites was quickly. furious|55.3801|-131.682|55.3801|-131.682|
-513|61|O|63703.92|1995-05-01|2-HIGH|Clerk#000000522|0|regular packages. pinto beans cajole carefully against the even|55.3424|-131.634|55.3424|-131.634|
-514|76|O|104585.77|1996-04-04|2-HIGH|Clerk#000000094|0| cajole furiously. slyly final excuses cajole. slyly special instructions |55.4097|-131.729|55.4097|-131.729|
-515|142|F|153720.22|1993-08-29|4-NOT SPECIFIED|Clerk#000000700|0|eposits are furiously furiously silent pinto beans. pending pack|55.3801|-131.682|55.3801|-131.682|
-516|44|O|10677.86|1998-04-21|2-HIGH|Clerk#000000305|0|lar, unusual platelets are carefully. even courts sleep bold, final pinto bea|55.3801|-131.682|55.3801|-131.682|
-517|10|O|82197.79|1997-04-07|5-LOW|Clerk#000000359|0|slyly pending deposits cajole quickly packages. furiou|55.3462|-131.658|55.3462|-131.658|
-518|145|O|223537.09|1998-02-08|2-HIGH|Clerk#000000768|0| the carefully bold accounts. quickly regular excuses are|55.3801|-131.682|55.3801|-131.682|
-519|64|O|95731.50|1997-10-31|1-URGENT|Clerk#000000985|0|ains doze furiously against the f|55.3801|-131.682|55.3801|-131.682|
-544|94|F|47627.89|1993-02-17|2-HIGH|Clerk#000000145|0|the special, final accounts. dogged dolphins|55.3801|-131.682|55.3801|-131.682|
-545|64|O|23476.12|1995-11-07|2-HIGH|Clerk#000000537|0|as. blithely final hockey players about th|55.3801|-131.682|55.3801|-131.682|
-546|145|O|14790.37|1996-11-01|2-HIGH|Clerk#000000041|0|osits sleep. slyly special dolphins about the q|55.3801|-131.682|55.3801|-131.682|
-547|100|O|96855.29|1996-06-22|3-MEDIUM|Clerk#000000976|0|ing accounts eat. carefully regular packa|55.3801|-131.682|55.3801|-131.682|
-548|124|F|99088.75|1994-09-21|1-URGENT|Clerk#000000435|0|arefully express instru|55.3801|-131.682|55.3801|-131.682|
-549|110|F|141679.41|1992-07-13|1-URGENT|Clerk#000000196|0|ideas alongside of |55.3801|-131.682|55.3801|-131.682|
-550|25|O|33123.28|1995-08-02|1-URGENT|Clerk#000000204|0|t requests. blithely |61.5856|-149.316|61.5856|-149.316|
-551|91|O|46355.83|1995-05-30|1-URGENT|Clerk#000000179|0|xpress accounts boost quic|61.5781|-149.429|61.5781|-149.429|
-576|31|O|18307.45|1997-05-13|3-MEDIUM|Clerk#000000955|0|l requests affix regular requests. final account|61.6141|-149.457|61.6141|-149.457|
-577|56|F|34768.68|1994-12-19|5-LOW|Clerk#000000154|0| deposits engage stealthil|61.5801|-149.461|61.5801|-149.461|
-578|94|O|70392.02|1997-01-10|5-LOW|Clerk#000000281|0|e blithely even packages. slyly pending platelets bes|61.9071|-150.067|61.9071|-150.067|
-579|68|O|120828.12|1998-03-11|2-HIGH|Clerk#000000862|0| regular instructions. blithely even p|61.5928|-149.392|61.5928|-149.392|
-580|61|O|88219.12|1997-07-05|2-HIGH|Clerk#000000314|0|tegrate fluffily regular accou|61.6141|-149.457|61.6141|-149.457|
-581|70|O|126066.00|1997-02-23|4-NOT SPECIFIED|Clerk#000000239|0| requests. even requests use slyly. blithely ironic |61.5792|-149.36|61.5792|-149.36|
-582|50|O|129004.81|1997-10-21|1-URGENT|Clerk#000000378|0|n pinto beans print a|61.6049|-149.463|61.6049|-149.463|
-583|49|O|127817.38|1997-03-19|3-MEDIUM|Clerk#000000792|0|efully express requests. a|61.6099|-149.328|61.6099|-149.328|
-608|26|O|62567.99|1996-02-28|3-MEDIUM|Clerk#000000995|0|nic waters wake slyly slyly expre|61.5531|-149.651|61.5531|-149.651|
-609|127|F|21088.59|1994-06-01|3-MEDIUM|Clerk#000000348|0|- ironic gifts believe furiously ca|61.6141|-149.457|61.6141|-149.457|
-610|52|O|175142.28|1995-08-02|1-URGENT|Clerk#000000610|0|totes. ironic, unusual packag|61.6141|-149.457|61.6141|-149.457|
-611|106|F|73907.63|1993-01-27|1-URGENT|Clerk#000000401|0|ounts detect furiously ac|61.5531|-149.651|61.5531|-149.651|
-612|82|F|145695.42|1992-10-21|3-MEDIUM|Clerk#000000759|0|boost quickly quickly final excuses. final foxes use bravely afte|61.6141|-149.457|61.6141|-149.457|
-613|139|O|33396.35|1995-06-18|2-HIGH|Clerk#000000172|0|ts hinder among the deposits. fluffily ironic depos|61.7321|-150.12|61.7321|-150.12|
-614|134|F|218116.21|1992-12-01|2-HIGH|Clerk#000000388|0| deposits! even, daring theodol|61.6141|-149.457|61.6141|-149.457|
-615|67|F|32890.89|1992-05-09|5-LOW|Clerk#000000388|0|t to promise asymptotes. packages haggle alongside of the fluffil|61.582|-149.441|61.582|-149.441|
-640|97|F|145495.62|1993-01-23|2-HIGH|Clerk#000000433|0|r, unusual accounts boost carefully final ideas. slyly silent theod|61.5818|-149.44|61.5818|-149.44|
-641|133|F|120626.49|1993-08-30|5-LOW|Clerk#000000175|0|ents cajole furiously about the quickly silent pac|61.6141|-149.457|61.6141|-149.457|
-642|40|F|22994.51|1993-12-16|3-MEDIUM|Clerk#000000357|0| among the requests wake slyly alongside of th|61.7321|-150.12|61.7321|-150.12|
-643|58|P|180396.95|1995-03-25|2-HIGH|Clerk#000000354|0|g dependencies. regular accounts |61.6308|-149.415|61.6308|-149.415|
-644|8|F|201268.06|1992-05-01|1-URGENT|Clerk#000000550|0| blithely unusual platelets haggle ironic, special excuses. excuses unwi|61.5801|-149.461|61.5801|-149.461|
-645|115|F|234763.73|1994-12-03|2-HIGH|Clerk#000000090|0|quickly daring theodolites across the regu|61.5811|-149.444|61.5811|-149.444|
-646|52|F|142070.65|1994-11-22|2-HIGH|Clerk#000000203|0|carefully even foxes. fina|61.6521|-149.92|61.6521|-149.92|
-647|143|O|56449.23|1997-08-07|1-URGENT|Clerk#000000270|0|egular pearls. carefully express asymptotes are. even account|61.6141|-149.307|61.6141|-149.307|
-672|109|F|89877.09|1994-04-14|5-LOW|Clerk#000000106|0|egular requests are furiously according to |61.6168|-149.328|61.6168|-149.328|
-673|80|F|21137.08|1994-03-10|1-URGENT|Clerk#000000448|0| special pinto beans use quickly furiously even depende|61.5714|-149.381|61.5714|-149.381|
-674|34|F|27204.60|1992-08-29|5-LOW|Clerk#000000448|0|ully special deposits. furiously final warhorses affix carefully. fluffily f|61.6521|-149.92|61.6521|-149.92|
-675|13|O|125188.72|1997-07-31|2-HIGH|Clerk#000000168|0|ffily between the careful|61.5858|-149.376|61.5858|-149.376|
-676|38|O|163966.67|1996-12-13|2-HIGH|Clerk#000000248|0|the final deposits. special, pending|61.5822|-149.463|61.5822|-149.463|
-677|124|F|147915.68|1993-11-24|3-MEDIUM|Clerk#000000824|0|uriously special pinto beans cajole carefully. fi|61.5861|-149.303|61.5861|-149.303|
-678|131|F|135761.05|1993-02-27|5-LOW|Clerk#000000530|0|. blithely final somas about the|61.5821|-149.438|61.5821|-149.438|
-679|49|O|8945.03|1995-12-15|2-HIGH|Clerk#000000853|0|tealthy, final pinto beans haggle slyly. pending platelets about the special, |61.6281|-149.338|61.6281|-149.338|
-704|85|O|56210.26|1996-11-21|3-MEDIUM|Clerk#000000682|0|blithely pending platelets wake alongside of the final, iron|61.5771|-149.335|61.5771|-149.335|
-705|43|O|83773.49|1997-02-13|4-NOT SPECIFIED|Clerk#000000294|0|ithely regular dependencies. express, even packages sleep slyly pending t|61.5917|-149.464|61.5917|-149.464|
-706|148|O|23973.60|1995-09-09|1-URGENT|Clerk#000000448|0|g the packages. deposits caj|61.1927|-149.86|61.1927|-149.86|
-707|118|F|58218.35|1994-11-20|3-MEDIUM|Clerk#000000199|0| ideas about the silent, bold deposits nag dolphins|61.1879|-149.85|61.1879|-149.85|
-708|32|O|100445.59|1998-07-03|3-MEDIUM|Clerk#000000101|0|lphins cajole about t|61.1814|-149.849|61.1814|-149.849|
-709|37|O|72055.87|1998-04-21|1-URGENT|Clerk#000000461|0|ons alongside of the carefully bold pinto bea|61.2104|-149.892|61.2104|-149.892|
-710|133|F|208974.42|1993-01-02|5-LOW|Clerk#000000026|0| regular, regular requests boost. fluffily re|61.2093|-149.903|61.2093|-149.903|
-711|64|F|92484.70|1993-09-23|4-NOT SPECIFIED|Clerk#000000856|0|its. fluffily regular gifts are furi|61.1481|-149.829|61.1481|-149.829|
-736|47|O|130204.17|1998-06-21|5-LOW|Clerk#000000881|0|refully of the final pi|61.2161|-149.876|61.2161|-149.876|
-737|121|F|12984.85|1992-04-26|5-LOW|Clerk#000000233|0|ake blithely express, ironic theodolites. blithely special accounts wa|61.1972|-149.75|61.1972|-149.75|
-738|22|F|114145.18|1993-03-02|4-NOT SPECIFIED|Clerk#000000669|0|ly even foxes. furiously regular accounts cajole ca|61.2066|-149.887|61.2066|-149.887|
-739|1|O|159171.69|1998-05-31|5-LOW|Clerk#000000900|0| against the slyly ironic packages nag slyly ironic|61.2161|-149.876|61.2161|-149.876|
-740|44|O|83490.99|1995-07-16|3-MEDIUM|Clerk#000000583|0|courts haggle furiously across the final, regul|61.195|-149.834|61.195|-149.834|
-741|106|O|47985.98|1998-07-07|2-HIGH|Clerk#000000295|0|ic instructions. slyly express instructions solv|61.2038|-149.808|61.2038|-149.808|
-742|103|F|207632.55|1994-12-23|5-LOW|Clerk#000000543|0|equests? slyly ironic dolphins boost carefully above the blithely|61.1228|-149.862|61.1228|-149.862|
-743|79|O|23614.89|1996-10-04|4-NOT SPECIFIED|Clerk#000000933|0|eans. furiously ironic deposits sleep carefully carefully qui|61.2005|-149.785|61.2005|-149.785|
-768|98|O|220636.82|1996-08-20|3-MEDIUM|Clerk#000000411|0|jole slyly ironic packages. slyly even idea|61.181|-149.825|61.181|-149.825|
-769|80|F|43092.76|1993-06-02|3-MEDIUM|Clerk#000000172|0|ggle furiously. ironic packages haggle slyly. bold platelets affix s|61.1867|-149.919|61.1867|-149.919|
-770|32|O|64271.75|1998-05-23|5-LOW|Clerk#000000572|0|heodolites. furiously special pinto beans cajole pac|61.1955|-149.911|61.1955|-149.911|
-771|46|O|105302.05|1995-06-17|1-URGENT|Clerk#000000105|0|s. furiously final instructions across the deposit|61.1089|-149.858|61.1089|-149.858|
-772|97|F|128234.96|1993-04-17|2-HIGH|Clerk#000000430|0|s boost blithely fluffily idle ideas? fluffily even pin|61.1805|-149.889|61.1805|-149.889|
-773|133|F|146862.27|1993-09-26|3-MEDIUM|Clerk#000000307|0|tions are quickly accounts. accounts use bold, even pinto beans. gifts ag|61.1534|-149.985|61.1534|-149.985|
-774|80|O|145857.60|1995-12-04|1-URGENT|Clerk#000000883|0|tealthily even depths|61.1901|-149.911|61.1901|-149.911|
-775|134|F|59455.61|1995-03-18|1-URGENT|Clerk#000000191|0|kly express requests. fluffily silent accounts poach furiously|61.2122|-149.734|61.2122|-149.734|
-800|56|O|87892.38|1998-07-14|2-HIGH|Clerk#000000213|0|y alongside of the pending packages? final platelets nag fluffily carefu|61.1951|-149.906|61.1951|-149.906|
-801|118|F|127717.72|1992-02-18|1-URGENT|Clerk#000000186|0|iously from the furiously enticing reques|61.2043|-149.869|61.2043|-149.869|
-802|137|F|156381.95|1995-01-05|1-URGENT|Clerk#000000516|0|posits. ironic, pending requests cajole. even theodol|61.2036|-149.869|61.2036|-149.869|
-803|16|O|27629.66|1997-04-29|5-LOW|Clerk#000000260|0|ic instructions. even deposits haggle furiously at the deposits-- regular de|61.1883|-149.886|61.1883|-149.886|
-804|50|F|94400.43|1993-03-12|3-MEDIUM|Clerk#000000931|0|s. blithely final foxes are about the packag|61.2141|-149.864|61.2141|-149.864|
-805|127|O|90042.41|1995-07-05|4-NOT SPECIFIED|Clerk#000000856|0|y according to the fluffily |61.1955|-149.782|61.1955|-149.782|
-806|131|O|26839.16|1996-06-20|2-HIGH|Clerk#000000240|0| the ironic packages wake carefully fina|61.2183|-149.894|61.2183|-149.894|
-807|145|F|222392.53|1993-11-24|3-MEDIUM|Clerk#000000012|0|refully special tithes. blithely regular accoun|61.1417|-149.864|61.1417|-149.864|
-832|29|F|68494.08|1992-04-19|5-LOW|Clerk#000000495|0|xes. bravely regular packages sleep up the furiously bold accou|61.1883|-149.883|61.1883|-149.883|
-833|56|F|49033.69|1994-02-13|3-MEDIUM|Clerk#000000437|0|ts haggle quickly across the slyl|61.2161|-149.876|61.2161|-149.876|
-834|43|F|46459.92|1994-05-23|3-MEDIUM|Clerk#000000805|0| sleep. quickly even foxes are boldly. slyly express requests use slyly|61.2193|-149.869|61.2193|-149.869|
-835|65|O|62430.67|1995-10-08|4-NOT SPECIFIED|Clerk#000000416|0|s about the carefully special foxes haggle quickly about the|61.2191|-149.888|61.2191|-149.888|
-836|70|O|72843.48|1996-11-25|4-NOT SPECIFIED|Clerk#000000729|0|ely bold excuses sleep regular ideas. furiously unusual ideas wake furiou|61.2191|-149.888|61.2191|-149.888|
-837|116|F|60918.41|1994-06-15|4-NOT SPECIFIED|Clerk#000000563|0|kages sleep slyly above the ironic, final orbits|61.2191|-149.888|61.2191|-149.888|
-838|17|O|82918.36|1998-01-29|5-LOW|Clerk#000000213|0| slyly around the slyly even|61.2191|-149.888|61.2191|-149.888|
-839|28|O|70182.63|1995-08-08|1-URGENT|Clerk#000000951|0|the carefully even platelets. furiously unusual fo|61.2191|-149.888|61.2191|-149.888|
-864|139|O|74710.74|1997-08-17|1-URGENT|Clerk#000000036|0|ly after the slyly regular deposits. express, regular asymptotes nag ca|61.2191|-149.888|61.2191|-149.888|
-865|4|F|70430.54|1993-05-04|3-MEDIUM|Clerk#000000337|0|. special packages wake after the carefully final accounts. express pinto be|61.2191|-149.888|61.2191|-149.888|
-866|40|F|4766.19|1992-11-28|3-MEDIUM|Clerk#000000718|0|ins after the even, even accounts nod blithel|61.2191|-149.888|61.2191|-149.888|
-867|26|F|7471.75|1993-11-16|3-MEDIUM|Clerk#000000877|0|pades nag quickly final, |61.2191|-149.888|61.2191|-149.888|
-868|104|F|127345.45|1992-06-09|4-NOT SPECIFIED|Clerk#000000782|0|onic theodolites print carefully. blithely dogge|61.2191|-149.888|61.2191|-149.888|
-869|136|O|58932.19|1997-01-12|2-HIGH|Clerk#000000245|0|ar sheaves are slowly. slyly even attainments boost theodolites. furiously|61.2191|-149.888|61.2191|-149.888|
-870|34|F|40492.37|1993-06-20|4-NOT SPECIFIED|Clerk#000000123|0|blithely ironic ideas nod. sly, r|61.2191|-149.888|61.2191|-149.888|
-871|16|O|172861.58|1995-11-15|5-LOW|Clerk#000000882|0|oss the ironic theodolites.|61.1891|-149.906|61.1891|-149.906|
-896|2|F|169847.63|1993-03-09|1-URGENT|Clerk#000000187|0|inal packages eat blithely according to the warhorses. furiously quiet de|61.2191|-149.888|61.2191|-149.888|
-897|49|P|57697.44|1995-03-20|1-URGENT|Clerk#000000316|0| wake quickly against |61.2191|-149.888|61.2191|-149.888|
-898|55|F|101020.75|1993-06-03|2-HIGH|Clerk#000000611|0|. unusual pinto beans haggle quickly across |61.1101|-149.857|61.1101|-149.857|
-899|109|O|125562.09|1998-04-08|5-LOW|Clerk#000000575|0|rts engage carefully final theodolites.|61.1101|-149.857|61.1101|-149.857|
-900|46|F|120073.51|1994-10-01|4-NOT SPECIFIED|Clerk#000000060|0| fluffily express deposits nag furiousl|61.1101|-149.857|61.1101|-149.857|
-901|13|O|81826.12|1998-07-21|4-NOT SPECIFIED|Clerk#000000929|0|lyly even foxes are furious, silent requests. requests about the quickly |61.1101|-149.857|61.1101|-149.857|
-902|10|F|37348.62|1994-07-27|4-NOT SPECIFIED|Clerk#000000811|0|yly final requests over the furiously regula|61.1101|-149.857|61.1101|-149.857|
-903|11|O|109351.87|1995-07-07|4-NOT SPECIFIED|Clerk#000000793|0|e slyly about the final pl|61.1101|-149.857|61.1101|-149.857|
-928|67|F|228136.49|1995-03-02|5-LOW|Clerk#000000450|0|ithely express pinto beans. |61.1101|-149.857|61.1101|-149.857|
-929|83|F|109301.02|1992-10-02|2-HIGH|Clerk#000000160|0|its. furiously even foxes affix carefully finally silent accounts. express req|61.1101|-149.857|61.1101|-149.857|
-930|131|F|199102.23|1994-12-17|1-URGENT|Clerk#000000004|0| accounts nag slyly. ironic, ironic accounts wake blithel|61.1101|-149.857|61.1101|-149.857|
-931|103|F|117909.23|1992-12-07|1-URGENT|Clerk#000000881|0|ss packages haggle furiously express, regular deposits. even, e|61.1101|-149.857|61.1101|-149.857|
-932|41|O|40234.50|1997-05-16|2-HIGH|Clerk#000000218|0|ly express instructions boost furiously reg|61.1101|-149.857|61.1101|-149.857|
-933|97|F|71349.30|1992-08-05|4-NOT SPECIFIED|Clerk#000000752|0|ial courts wake permanently against the furiously regular ideas. unusual |61.1101|-149.857|61.1101|-149.857|
-934|52|O|17213.59|1996-07-03|1-URGENT|Clerk#000000229|0|ts integrate carefully. sly, regular deposits af|61.1101|-149.857|61.1101|-149.857|
-935|50|O|97733.87|1997-09-24|5-LOW|Clerk#000000180|0|iously final deposits cajole. blithely even packages |61.1101|-149.857|61.1101|-149.857|
-960|35|F|63537.13|1994-09-21|3-MEDIUM|Clerk#000000120|0|regular accounts. requests|61.1101|-149.857|61.1101|-149.857|
-961|56|P|158893.16|1995-06-04|4-NOT SPECIFIED|Clerk#000000720|0|ons nag furiously among the quickl|61.1101|-149.857|61.1101|-149.857|
-962|37|F|98258.73|1994-05-06|5-LOW|Clerk#000000463|0|ments nag deposits. fluffily ironic a|61.1101|-149.857|61.1101|-149.857|
-963|26|F|53287.25|1994-05-26|3-MEDIUM|Clerk#000000497|0|uses haggle carefully. slyly even dependencies after the packages ha|61.1101|-149.857|61.1101|-149.857|
-964|76|O|131146.47|1995-05-20|3-MEDIUM|Clerk#000000657|0|print blithely ironic, careful theodolit|61.1101|-149.857|61.1101|-149.857|
-965|70|P|41758.44|1995-05-15|5-LOW|Clerk#000000218|0|iously special packages. slyly pending requests are carefully |64.8591|-147.917|64.8591|-147.917|
-966|14|O|120516.93|1998-04-30|2-HIGH|Clerk#000000239|0|special deposits. furious|64.8273|-147.715|64.8273|-147.715|
-967|110|F|179287.95|1992-06-21|3-MEDIUM|Clerk#000000167|0|excuses engage quickly bold dep|64.8281|-147.715|64.8281|-147.715|
-992|55|O|133665.12|1997-11-11|3-MEDIUM|Clerk#000000875|0|ts. regular pinto beans thrash carefully sl|64.8552|-147.763|64.8552|-147.763|
-993|80|O|198238.65|1995-09-10|3-MEDIUM|Clerk#000000894|0|quickly express accounts among the furiously bol|64.8481|-147.684|64.8481|-147.684|
-994|2|F|41433.48|1994-04-20|5-LOW|Clerk#000000497|0|ole. slyly bold excuses nag caref|64.8522|-147.773|64.8522|-147.773|
-995|116|P|135157.92|1995-05-31|3-MEDIUM|Clerk#000000439|0|deas. blithely final deposits play. express accounts wake blithely caref|64.8467|-147.703|64.8467|-147.703|
-996|71|O|47447.63|1997-12-29|1-URGENT|Clerk#000000497|0|arefully final packages into the slyly final requests affix blit|64.8963|-147.662|64.8963|-147.662|
-997|109|O|27561.82|1997-05-19|2-HIGH|Clerk#000000651|0|ly express depths. furiously final requests haggle furiously. carefu|64.8372|-147.796|64.8372|-147.796|
-998|32|F|65269.38|1994-11-26|4-NOT SPECIFIED|Clerk#000000956|0|ronic dolphins. ironic, bold ideas haggle furiously furious|64.8312|-147.716|64.8312|-147.716|
-999|61|F|145249.13|1993-09-05|5-LOW|Clerk#000000464|0|pitaphs sleep. regular accounts use. f|64.811|-147.71|64.811|-147.71|
-1024|4|O|176084.63|1997-12-23|5-LOW|Clerk#000000903|0| blithely. even, express theodolites cajole slyly across|64.8971|-147.663|64.8971|-147.663|
-1025|103|F|82034.03|1995-05-05|2-HIGH|Clerk#000000376|0|ross the slyly final pa|64.85|-147.699|64.85|-147.699|
-1026|73|O|36464.76|1997-06-04|5-LOW|Clerk#000000223|0|s wake blithely. special acco|64.8389|-147.743|64.8389|-147.743|
-1027|128|F|112770.89|1992-06-03|3-MEDIUM|Clerk#000000241|0|equests cajole. slyly final pinto bean|64.781|-148|64.781|-148|
-1028|70|F|153864.67|1994-01-01|2-HIGH|Clerk#000000131|0|ts are. final, silent deposits are among the fl|64.8377|-147.718|64.8377|-147.718|
-1029|130|F|47440.91|1994-06-21|2-HIGH|Clerk#000000700|0|quests sleep. slyly even foxes wake quickly final theodolites. clo|64.8248|-147.886|64.8248|-147.886|
-1030|134|F|16346.94|1994-06-15|5-LOW|Clerk#000000422|0|ully ironic accounts sleep carefully. requests are carefully alongside of the |64.818|-147.679|64.818|-147.679|
-1031|4|F|128024.71|1994-09-01|3-MEDIUM|Clerk#000000448|0|s; ironic theodolites along the carefully ex|64.8271|-147.79|64.8271|-147.79|
-1056|28|F|38446.39|1995-02-11|1-URGENT|Clerk#000000125|0|t, even deposits hang about the slyly special i|64.8451|-147.812|64.8451|-147.812|
-1057|76|F|108107.42|1992-02-20|1-URGENT|Clerk#000000124|0|cuses dazzle carefully careful, ironic pinto beans. carefully even theod|64.8311|-147.729|64.8311|-147.729|
-1058|53|F|89359.11|1993-04-26|3-MEDIUM|Clerk#000000373|0|kly pending courts haggle. blithely regular sheaves integrate carefully fi|64.8454|-147.855|64.8454|-147.855|
-1059|127|F|198360.22|1994-02-27|1-URGENT|Clerk#000000104|0|en accounts. carefully bold packages cajole daringly special depende|64.8302|-147.744|64.8302|-147.744|
-1060|140|F|121994.04|1993-02-21|3-MEDIUM|Clerk#000000989|0|l platelets sleep quickly slyly special requests. furiously |64.8113|-147.91|64.8113|-147.91|
-1061|103|O|166947.75|1998-05-15|5-LOW|Clerk#000000576|0|uests sleep at the packages. fur|64.8271|-147.789|64.8271|-147.789|
-1062|106|O|39805.04|1997-01-15|1-URGENT|Clerk#000000152|0|eposits use blithely |64.8451|-147.698|64.8451|-147.698|
-1063|37|F|41392.31|1994-04-02|2-HIGH|Clerk#000000024|0|deposits nag quickly regular deposits. quickl|64.8586|-147.69|64.8586|-147.69|
-1088|148|F|47120.41|1992-05-21|5-LOW|Clerk#000000347|0|counts are blithely. platelets print. carefully |64.8507|-147.702|64.8507|-147.702|
-1089|49|O|103192.74|1996-05-04|4-NOT SPECIFIED|Clerk#000000226|0|ns haggle ruthlessly. even requests are quickly abov|64.8371|-147.716|64.8371|-147.716|
-1090|19|O|32929.30|1997-11-15|2-HIGH|Clerk#000000300|0| furiously regular platelets haggle along the slyly unusual foxes! |64.8449|-147.743|64.8449|-147.743|
-1091|83|O|35795.22|1996-08-27|1-URGENT|Clerk#000000549|0| even pinto beans haggle quickly alongside of the eve|64.8475|-147.706|64.8475|-147.706|
-1092|124|P|85552.21|1995-03-04|3-MEDIUM|Clerk#000000006|0|re quickly along the blithe|64.8452|-147.714|64.8452|-147.714|
-1093|101|O|79189.58|1997-07-31|4-NOT SPECIFIED|Clerk#000000159|0| after the carefully ironic requests. carefully ironic packages wake fluffil|64.8125|-147.787|64.8125|-147.787|
-1094|145|O|9006.25|1997-12-24|3-MEDIUM|Clerk#000000570|0|beans affix furiously about the pending, even deposits. finally pendi|55.3801|-131.682|55.3801|-131.682|
-1095|145|O|178491.24|1995-08-22|3-MEDIUM|Clerk#000000709|0|sly bold requests cajole carefully according to|55.3801|-131.682|55.3801|-131.682|
-1120|140|O|107958.62|1997-11-07|3-MEDIUM|Clerk#000000319|0|lly special requests. slyly pending platelets are quickly pending requ|55.3801|-131.682|55.3801|-131.682|
-1121|29|O|241837.88|1997-01-13|3-MEDIUM|Clerk#000000541|0|r escapades. deposits above the fluffily bold requests hag|55.3801|-131.682|55.3801|-131.682|
-1122|121|O|179747.47|1997-01-10|1-URGENT|Clerk#000000083|0|uffily carefully final theodolites. furiously express packages affix|55.3801|-131.682|55.3801|-131.682|
-1123|73|O|93259.93|1996-08-03|3-MEDIUM|Clerk#000000929|0|uriously pending requests. slyly regular instruction|55.3801|-131.682|55.3801|-131.682|
-1124|80|O|141858.97|1998-07-30|5-LOW|Clerk#000000326|0|regular pinto beans along the fluffily silent packages|55.3599|-131.687|55.3599|-131.687|
-1125|25|F|80438.38|1994-10-27|2-HIGH|Clerk#000000510|0|ithely final requests. i|55.4381|-131.803|55.4381|-131.803|
-1126|145|O|59982.31|1998-01-28|4-NOT SPECIFIED|Clerk#000000928|0|d slyly regular ideas: special ideas believe slyly. slyly ironic sheaves w|55.3751|-131.718|55.3751|-131.718|
-1127|58|O|103320.91|1995-09-19|4-NOT SPECIFIED|Clerk#000000397|0|usly silent, regular pinto beans. blithely express requests boos|55.3421|-131.641|55.3421|-131.641|
-1152|49|F|51775.54|1994-08-14|4-NOT SPECIFIED|Clerk#000000496|0|equests. deposits ab|55.3408|-131.64|55.3408|-131.64|
-1153|121|O|220727.97|1996-04-18|5-LOW|Clerk#000000059|0| across the pending deposi|55.2978|-131.534|55.2978|-131.534|
-1154|37|F|192417.85|1992-02-15|1-URGENT|Clerk#000000268|0|old asymptotes are special requests. blithely even deposits sleep furiously|55.3801|-131.682|55.3801|-131.682|
-1155|149|O|126902.81|1997-10-06|2-HIGH|Clerk#000000164|0|c deposits haggle among the ironic, even requests. carefully ironic sheaves n|55.3801|-131.682|55.3801|-131.682|
-1156|133|O|217682.81|1996-10-19|1-URGENT|Clerk#000000200|0| blithely ironic dolphins. furiously pendi|55.3421|-131.622|55.3421|-131.622|
-1157|97|O|85394.06|1998-01-14|4-NOT SPECIFIED|Clerk#000000207|0|out the regular excuses boost carefully against the furio|55.3801|-131.682|55.3801|-131.682|
-1158|142|O|31075.51|1996-06-30|2-HIGH|Clerk#000000549|0|integrate slyly furiously ironic deposit|55.3801|-131.682|55.3801|-131.682|
-1159|70|F|55553.68|1992-09-18|3-MEDIUM|Clerk#000000992|0|ts may sleep. requests according to the|55.3801|-131.682|55.3801|-131.682|
-1184|89|O|39700.29|1997-10-26|5-LOW|Clerk#000000777|0|iously even packages haggle fluffily care|55.3267|-131.523|55.3267|-131.523|
-1185|74|F|47033.21|1992-08-24|5-LOW|Clerk#000000344|0| even escapades are. package|55.3522|-131.685|55.3522|-131.685|
-1186|59|O|82026.18|1996-08-15|4-NOT SPECIFIED|Clerk#000000798|0|ingly regular pinto beans: instructi|55.5351|-133.014|55.5351|-133.014|
-1187|134|F|85948.02|1992-11-20|3-MEDIUM|Clerk#000000047|0|s after the furiously final deposits boost slyly under the|55.5351|-133.014|55.5351|-133.014|
-1188|20|O|54655.07|1996-04-11|2-HIGH|Clerk#000000256|0|ully ironic deposits. slyl|55.5351|-133.014|55.5351|-133.014|
-1189|46|F|71017.99|1994-04-09|1-URGENT|Clerk#000000243|0|f the even accounts. courts print blithely ironic accounts. sile|55.5351|-133.014|55.5351|-133.014|
-1190|13|O|31043.39|1997-03-16|5-LOW|Clerk#000000575|0|ccounts above the foxes integrate carefully after the |55.5351|-133.014|55.5351|-133.014|
-1191|112|O|28623.04|1995-11-07|3-MEDIUM|Clerk#000000011|0|uests nag furiously. carefully even requests|55.4691|-132.855|55.4691|-132.855|
-1216|122|F|68056.57|1992-12-07|5-LOW|Clerk#000000918|0|nal foxes around the e|55.5511|-133.081|55.5511|-133.081|
-1217|7|F|40982.08|1992-04-26|4-NOT SPECIFIED|Clerk#000000538|0| foxes nag quickly. ironic excuses nod. blithely pending|55.5351|-133.014|55.5351|-133.014|
-1218|10|F|99834.47|1994-06-20|4-NOT SPECIFIED|Clerk#000000994|0|s cajole. special, silent deposits about the theo|55.5531|-133.097|55.5531|-133.097|
-1219|28|O|10163.56|1995-10-05|3-MEDIUM|Clerk#000000800|0|od carefully. slyly final dependencies across the even fray|55.5351|-133.014|55.5351|-133.014|
-1220|49|O|122157.14|1996-08-29|1-URGENT|Clerk#000000712|0|inal theodolites wake. fluffily ironic asymptotes cajol|55.4726|-131.793|55.4726|-131.793|
-1221|14|F|117397.16|1992-04-19|4-NOT SPECIFIED|Clerk#000000852|0| detect against the silent, even deposits. carefully ironic|55.3801|-131.682|55.3801|-131.682|
-1222|10|F|47623.94|1993-02-05|3-MEDIUM|Clerk#000000811|0|theodolites use quickly even accounts. carefully final asympto|55.3801|-131.682|55.3801|-131.682|
-1223|10|O|26714.67|1996-05-25|4-NOT SPECIFIED|Clerk#000000238|0|posits was blithely fr|55.3801|-131.682|55.3801|-131.682|
-1248|49|F|210713.88|1992-01-02|1-URGENT|Clerk#000000890|0|t the carefully regular dugouts. s|61.5745|-149.562|61.5745|-149.562|
-1249|149|F|45889.09|1994-01-05|1-URGENT|Clerk#000000095|0|al ideas sleep above the pending pin|61.7321|-150.12|61.7321|-150.12|
-1250|37|F|12907.62|1992-09-29|4-NOT SPECIFIED|Clerk#000000652|0|ts after the fluffily pending instructions use slyly about the s|61.5421|-149.419|61.5421|-149.419|
-1251|38|O|109536.55|1997-10-30|1-URGENT|Clerk#000000276|0|, brave sauternes. deposits boost fluffily.|61.5722|-149.702|61.5722|-149.702|
-1252|149|O|93403.05|1997-08-04|5-LOW|Clerk#000000348|0|ng the slyly regular excuses. special courts nag furiously blithely e|61.5743|-149.405|61.5743|-149.405|
-1253|115|F|92730.74|1993-01-26|1-URGENT|Clerk#000000775|0| requests sleep furiously even foxes. ruthless packag|61.578|-149.441|61.578|-149.441|
-1254|70|O|94649.25|1995-12-22|1-URGENT|Clerk#000000607|0| pinto beans. carefully regular request|61.5826|-149.427|61.5826|-149.427|
-1255|122|F|62518.31|1994-05-30|4-NOT SPECIFIED|Clerk#000000798|0|ct slyly regular accounts. quick|61.5586|-149.351|61.5586|-149.351|
-1280|97|F|91664.85|1993-01-11|5-LOW|Clerk#000000160|0|posits thrash quickly after the theodolites. furiously iro|61.5844|-149.442|61.5844|-149.442|
-1281|62|F|165454.51|1994-12-11|1-URGENT|Clerk#000000430|0|counts. carefully pending accounts eat |61.5817|-149.472|61.5817|-149.472|
-1282|116|F|61297.42|1992-02-29|4-NOT SPECIFIED|Clerk#000000168|0|he quickly special packages. furiously final re|61.6141|-149.457|61.6141|-149.457|
-1283|118|O|202623.92|1996-08-30|4-NOT SPECIFIED|Clerk#000000260|0| pinto beans boost slyly ac|61.5761|-149.602|61.5761|-149.602|
-1284|134|O|106122.38|1996-01-07|2-HIGH|Clerk#000000492|0|s. blithely silent deposits s|61.6141|-149.457|61.6141|-149.457|
-1285|11|F|139124.72|1992-06-01|1-URGENT|Clerk#000000423|0|cial deposits cajole after the ironic requests. p|61.58|-149.434|61.58|-149.434|
-1286|109|F|207291.83|1993-05-14|4-NOT SPECIFIED|Clerk#000000939|0| deposits use carefully from the excuses. slyly bold p|61.6002|-149.429|61.6002|-149.429|
-1287|19|F|131432.42|1994-07-05|2-HIGH|Clerk#000000288|0|ly ironic dolphins integrate furiously among the final packages. st|61.569|-149.347|61.569|-149.347|
-1312|112|F|58111.00|1994-05-19|3-MEDIUM|Clerk#000000538|0|n, express accounts across the ironic|61.5812|-149.448|61.5812|-149.448|
-1313|148|F|46598.65|1994-09-13|1-URGENT|Clerk#000000774|0|ld accounts. regular deposits cajole. ironically pending theodolites use car|61.6141|-149.457|61.6141|-149.457|
-1314|143|F|56207.66|1994-05-13|3-MEDIUM|Clerk#000000485|0|ickly blithe packages nod ideas. furiously bold braids boost around the car|61.6141|-149.457|61.6141|-149.457|
-1315|22|O|121935.23|1998-03-22|5-LOW|Clerk#000000840|0|final theodolites alongside of the carefu|61.6141|-149.457|61.6141|-149.457|
-1316|16|F|163746.47|1993-12-03|1-URGENT|Clerk#000000857|0|ully bold theodolites? pending, bold pin|61.5969|-149.367|61.5969|-149.367|
-1317|100|P|139714.71|1995-05-19|2-HIGH|Clerk#000000373|0|sts. furiously special deposits lose fur|61.58|-149.4|61.58|-149.4|
-1318|128|O|81663.65|1998-06-27|3-MEDIUM|Clerk#000000581|0|s hang bold requests. pending, re|61.5848|-149.445|61.5848|-149.445|
-1319|32|O|31103.83|1996-09-27|2-HIGH|Clerk#000000257|0|y across the ruthlessly ironic accounts. unusu|61.5811|-149.444|61.5811|-149.444|
-1344|17|F|43809.37|1992-04-16|5-LOW|Clerk#000000178|0|omise close, silent requests. pending theodolites boost pending |61.5733|-149.389|61.5733|-149.389|
-1345|95|F|111207.93|1992-10-28|5-LOW|Clerk#000000447|0| regular tithes. quickly fluffy de|61.6141|-149.457|61.6141|-149.457|
-1346|76|F|171975.62|1992-06-18|2-HIGH|Clerk#000000374|0|ges sleep quickly-- even pint|61.5952|-149.436|61.5952|-149.436|
-1347|41|O|173444.60|1997-06-20|5-LOW|Clerk#000000977|0|he furiously even foxes use carefully express req|61.5421|-149.419|61.5421|-149.419|
-1348|19|O|94135.77|1998-04-18|5-LOW|Clerk#000000206|0|tly. quickly even deposi|61.5783|-149.362|61.5783|-149.362|
-1349|64|O|46376.09|1997-10-26|1-URGENT|Clerk#000000543|0|yly! blithely special theodolites cajole. unusual, reg|61.7321|-150.12|61.7321|-150.12|
-1350|52|F|49305.98|1993-08-24|1-URGENT|Clerk#000000635|0|iously about the blithely special a|61.5691|-149.328|61.5691|-149.328|
-1351|106|O|24637.96|1998-04-20|1-URGENT|Clerk#000000012|0| cajole. regular, special re|61.6141|-149.457|61.6141|-149.457|
-1376|47|O|23984.88|1997-05-04|4-NOT SPECIFIED|Clerk#000000730|0|der furiously final, final frets. carefull|61.5819|-149.3|61.5819|-149.3|
-1377|20|O|108334.30|1998-04-24|4-NOT SPECIFIED|Clerk#000000625|0|lly across the blithely express accounts. ironic excuses promise carefully de|61.6431|-149.289|61.6431|-149.289|
-1378|20|O|118495.12|1996-03-09|4-NOT SPECIFIED|Clerk#000000705|0| furiously even tithes cajole slyly among the quick|61.6431|-149.292|61.6431|-149.292|
-1379|65|O|84627.76|1998-05-25|5-LOW|Clerk#000000861|0|y deposits are caref|61.6228|-149.313|61.6228|-149.313|
-1380|137|O|94969.41|1996-07-07|3-MEDIUM|Clerk#000000969|0|inal deposits wake slyly daringly even requests. bold, even foxe|61.2125|-149.894|61.2125|-149.894|
-1381|127|O|58212.22|1998-05-25|3-MEDIUM|Clerk#000000107|0|even requests breach after the bold, ironic instructions. slyly even|61.1879|-149.886|61.1879|-149.886|
-1382|133|F|173522.71|1993-08-17|5-LOW|Clerk#000000241|0|fully final packages sl|61.1594|-149.835|61.1594|-149.835|
-1383|121|F|34797.72|1993-04-27|2-HIGH|Clerk#000000785|0|ts. express requests sleep blithel|61.2123|-149.854|61.2123|-149.854|
-1408|55|O|183965.61|1997-12-26|4-NOT SPECIFIED|Clerk#000000942|0|t the quickly final asymptotes. unusual|61.1951|-149.945|61.1951|-149.945|
-1409|143|F|72440.52|1992-12-31|4-NOT SPECIFIED|Clerk#000000065|0|ructions. furiously unusual excuses are regular, unusual theodolites. fin|61.2138|-149.856|61.2138|-149.856|
-1410|113|O|114879.19|1997-04-12|5-LOW|Clerk#000000123|0|iously along the bravely regular dolphins. pinto beans cajole furiously sp|61.1255|-149.864|61.1255|-149.864|
-1411|95|F|164462.61|1994-12-21|2-HIGH|Clerk#000000566|0|s. furiously special excuses across the pending pinto beans haggle sp|61.2066|-149.808|61.2066|-149.808|
-1412|53|F|78676.54|1993-03-13|4-NOT SPECIFIED|Clerk#000000083|0|uffily daring theodolit|61.2138|-149.896|61.2138|-149.896|
-1413|91|O|75733.58|1997-06-14|3-MEDIUM|Clerk#000000342|0|, ironic instructions. carefully even packages dazzle|61.2161|-149.876|61.2161|-149.876|
-1414|77|O|38057.81|1995-08-16|1-URGENT|Clerk#000000883|0|ccounts. ironic foxes haggle car|61.1594|-149.888|61.1594|-149.888|
-1415|79|F|24654.79|1994-05-29|4-NOT SPECIFIED|Clerk#000000601|0|rays. blithely final ideas affix quickl|61.1806|-149.775|61.1806|-149.775|
-1440|98|O|50201.16|1995-08-10|5-LOW|Clerk#000000956|0| pending requests. closely s|61.1101|-149.857|61.1101|-149.857|
-1441|122|O|156477.94|1997-03-06|4-NOT SPECIFIED|Clerk#000000156|0|ter the excuses. ironic dependencies m|61.1541|-149.958|61.1541|-149.958|
-1442|112|F|7108.12|1994-07-05|4-NOT SPECIFIED|Clerk#000000935|0|nal pinto beans. slyly ironic ideas cajol|61.1268|-149.947|61.1268|-149.947|
-1443|44|O|44672.03|1996-12-16|5-LOW|Clerk#000000185|0|x blithely against the carefully final somas. even asymptotes are. quickly spe|61.0931|-149.785|61.0931|-149.785|
-1444|134|F|207907.60|1994-12-06|3-MEDIUM|Clerk#000000783|0|ove the bold accounts cajole fluffily about|61.1901|-149.892|61.1901|-149.892|
-1445|115|F|154653.32|1995-01-10|3-MEDIUM|Clerk#000000211|0|even packages wake fluffily |61.2183|-149.889|61.2183|-149.889|
-1446|41|O|27663.16|1998-02-16|5-LOW|Clerk#000000274|0|lly regular notornis above the requests sleep final accounts! |61.2164|-149.882|61.2164|-149.882|
-1447|91|F|108171.38|1992-10-15|2-HIGH|Clerk#000000880|0|inly against the blithely pending excuses. regular, pe|61.2161|-149.876|61.2161|-149.876|
-1472|149|O|65331.05|1996-10-06|5-LOW|Clerk#000000303|0|y special dolphins around the final dependencies wake quick|61.219|-149.792|61.219|-149.792|
-1473|94|O|80624.38|1997-03-17|3-MEDIUM|Clerk#000000960|0|furiously close accoun|61.2188|-149.892|61.2188|-149.892|
-1474|70|F|51697.18|1995-01-09|1-URGENT|Clerk#000000438|0|detect quickly above the carefully even |61.2143|-149.837|61.2143|-149.837|
-1475|5|O|185496.66|1997-11-12|2-HIGH|Clerk#000000972|0|cally final packages boost. blithely ironic packa|61.1608|-149.835|61.1608|-149.835|
-1476|145|O|18795.62|1996-06-27|2-HIGH|Clerk#000000673|0|ding accounts hinder alongside of the quickly pending requests. fluf|61.1886|-149.944|61.1886|-149.944|
-1477|76|O|231831.35|1997-08-24|5-LOW|Clerk#000000612|0|ly bold foxes. final ideas would cajo|61.1201|-149.89|61.1201|-149.89|
-1478|50|O|20791.50|1997-08-03|2-HIGH|Clerk#000000827|0|lessly. carefully express|61.1201|-149.89|61.1201|-149.89|
-1479|16|O|31471.04|1995-12-16|4-NOT SPECIFIED|Clerk#000000697|0|he furiously even foxes. thinly bold deposits|61.1585|-149.872|61.1585|-149.872|
-1504|2|F|89399.40|1992-08-28|3-MEDIUM|Clerk#000000381|0|, brave deposits. bold de|61.195|-149.892|61.195|-149.892|
-1505|37|F|55892.35|1992-08-21|2-HIGH|Clerk#000000544|0|s. slyly ironic packages cajole. carefully regular packages haggle |61.0895|-149.694|61.0895|-149.694|
-1506|148|F|195844.84|1992-09-21|3-MEDIUM|Clerk#000000620|0| dependencies. accounts affix blithely slowly unusual deposits. slyly regular |61.2201|-149.831|61.2201|-149.831|
-1507|121|F|96166.92|1993-10-14|3-MEDIUM|Clerk#000000305|0|stealthy, ironic de|61.1663|-149.867|61.1663|-149.867|
-1508|103|O|151282.65|1998-04-10|5-LOW|Clerk#000000117|0| after the furiously regular pinto beans hang slyly quickly ironi|61.2138|-149.906|61.2138|-149.906|
-1509|64|F|180455.98|1993-07-08|5-LOW|Clerk#000000770|0|the regular ideas. regul|61.2193|-149.902|61.2193|-149.902|
-1510|53|O|154590.05|1996-09-17|5-LOW|Clerk#000000128|0|ld carefully. furiously final asymptotes haggle furiously|61.1201|-149.89|61.1201|-149.89|
-1511|79|O|59651.38|1996-12-22|4-NOT SPECIFIED|Clerk#000000386|0|ts above the depend|61.1601|-149.984|61.1601|-149.984|
-1536|94|O|5184.26|1997-01-26|3-MEDIUM|Clerk#000000117|0|ges are! furiously final deposits cajole iron|61.1101|-149.857|61.1101|-149.857|
-1537|109|F|108317.51|1992-02-15|4-NOT SPECIFIED|Clerk#000000862|0|g to the even deposits. ironic, final packages |61.1101|-149.857|61.1101|-149.857|
-1538|29|O|179554.41|1995-06-18|4-NOT SPECIFIED|Clerk#000000258|0| instructions. regular theod|61.1101|-149.857|61.1101|-149.857|
-1539|112|F|39612.63|1995-03-10|5-LOW|Clerk#000000840|0|nstructions boost pa|61.1101|-149.857|61.1101|-149.857|
-1540|16|F|128014.15|1992-08-05|2-HIGH|Clerk#000000927|0|r ideas hinder blithe|61.1101|-149.857|61.1101|-149.857|
-1541|94|P|47286.32|1995-05-18|1-URGENT|Clerk#000000906|0|y. slyly ironic warhorses around the furiously regul|61.1101|-149.857|61.1101|-149.857|
-1542|143|F|132972.24|1993-09-15|3-MEDIUM|Clerk#000000435|0|t the furiously close deposits do was f|61.1101|-149.857|61.1101|-149.857|
-1543|52|O|139047.22|1997-02-20|1-URGENT|Clerk#000000398|0|unts. furiously pend|61.1101|-149.857|61.1101|-149.857|
-1568|17|O|76119.72|1997-01-30|4-NOT SPECIFIED|Clerk#000000554|0|d notornis. carefully |61.1101|-149.857|61.1101|-149.857|
-1569|104|O|87803.55|1998-04-02|5-LOW|Clerk#000000786|0|orbits. fluffily even decoys serve blithely. furiously furious realms nag acro|61.1101|-149.857|61.1101|-149.857|
-1570|124|O|35589.57|1998-03-16|1-URGENT|Clerk#000000745|0|pinto beans haggle furiousl|61.1101|-149.857|61.1101|-149.857|
-1571|103|F|151404.78|1992-12-05|2-HIGH|Clerk#000000565|0|ously furiously bold warthogs. slyly ironic instructions are quickly a|61.1101|-149.857|61.1101|-149.857|
-1572|11|O|47232.79|1996-02-24|2-HIGH|Clerk#000000994|0|fluffily ironic accounts haggle blithely final platelets! slyly regular foxes|61.1101|-149.857|61.1101|-149.857|
-1573|148|F|86918.57|1992-12-28|2-HIGH|Clerk#000000940|0|ess, ironic deposits use along the carefu|61.1101|-149.857|61.1101|-149.857|
-1574|134|O|179923.54|1996-12-12|3-MEDIUM|Clerk#000000809|0| ideas hinder after the carefully unusual |61.1101|-149.857|61.1101|-149.857|
-1575|145|O|197031.52|1995-09-13|3-MEDIUM|Clerk#000000497|0|. furiously regular dep|61.1101|-149.857|61.1101|-149.857|
-1600|94|F|130515.61|1993-03-03|3-MEDIUM|Clerk#000000627|0|tions cajole quietly above the regular, silent requests. slyly fin|61.1101|-149.857|61.1101|-149.857|
-1601|53|F|73962.95|1994-08-27|5-LOW|Clerk#000000469|0|ent deposits are ca|61.1101|-149.857|61.1101|-149.857|
-1602|1|F|4225.26|1993-08-05|5-LOW|Clerk#000000660|0|deposits. busily silent instructions haggle furiously. fin|61.1101|-149.857|61.1101|-149.857|
-1603|2|F|29305.47|1993-07-31|4-NOT SPECIFIED|Clerk#000000869|0|s. slyly silent deposits boo|61.1101|-149.857|61.1101|-149.857|
-1604|113|F|107139.29|1993-07-17|5-LOW|Clerk#000000512|0|lithely silent waters. blithely unusual packages alongside |61.1101|-149.857|61.1101|-149.857|
-1605|58|O|130687.64|1998-04-24|4-NOT SPECIFIED|Clerk#000000616|0|sleep furiously? ruthless, even pinto beans |61.1101|-149.857|61.1101|-149.857|
-1606|53|O|115877.40|1997-04-17|4-NOT SPECIFIED|Clerk#000000550|0|r requests. quickly even platelets breach before the ironically|61.1101|-149.857|61.1101|-149.857|
-1607|149|O|166335.03|1995-12-16|2-HIGH|Clerk#000000498|0| bold, pending foxes haggle. slyly silent |61.1101|-149.857|61.1101|-149.857|
-1632|67|O|183286.33|1997-01-08|3-MEDIUM|Clerk#000000351|0|onic requests are accounts. bold a|61.1101|-149.857|61.1101|-149.857|
-1633|16|O|52359.51|1995-10-14|2-HIGH|Clerk#000000666|0|y silent accounts sl|61.1101|-149.857|61.1101|-149.857|
-1634|70|O|145898.47|1996-09-10|1-URGENT|Clerk#000000360|0|arefully blithely ironic requests. slyly unusual instructions alongside|61.1101|-149.857|61.1101|-149.857|
-1635|4|O|70232.26|1997-02-13|3-MEDIUM|Clerk#000000958|0|s. slyly ironic requests affix slyly |61.1101|-149.857|61.1101|-149.857|
-1636|79|O|172021.87|1997-06-17|3-MEDIUM|Clerk#000000457|0|ding requests. slyly ironic courts wake quickl|61.1101|-149.857|61.1101|-149.857|
-1637|73|F|180912.15|1995-02-08|4-NOT SPECIFIED|Clerk#000000189|0| final accounts. blithely silent ideas cajole bravely. carefully express |61.1101|-149.857|61.1101|-149.857|
-1638|139|O|172436.30|1997-08-13|2-HIGH|Clerk#000000643|0|he fluffily regular asymp|61.1101|-149.857|61.1101|-149.857|
-1639|5|O|104166.56|1995-08-20|4-NOT SPECIFIED|Clerk#000000939|0|haggle furiously. final requests detect furious|61.1101|-149.857|61.1101|-149.857|
-1664|64|O|178060.22|1996-03-03|1-URGENT|Clerk#000000090|0|y quickly even asymptotes. furiously regular packages haggle quickly fin|61.1101|-149.857|61.1101|-149.857|
-1665|76|F|4819.91|1994-05-08|2-HIGH|Clerk#000000920|0|ly regular packages are fluffily even ideas. fluffily final|61.1101|-149.857|61.1101|-149.857|
-1666|95|O|128367.97|1995-10-18|1-URGENT|Clerk#000000849|0|ffily pending dependencies wake fluffily. pending, final accounts |61.1101|-149.857|61.1101|-149.857|
-1667|5|O|125030.37|1997-10-10|2-HIGH|Clerk#000000103|0|e accounts. slyly express accounts must are a|64.8459|-147.759|64.8459|-147.759|
-1668|142|O|137576.19|1997-07-12|4-NOT SPECIFIED|Clerk#000000148|0|eodolites. carefully dogged dolphins haggle q|64.8426|-147.725|64.8426|-147.725|
-1669|2|O|24362.39|1997-06-09|3-MEDIUM|Clerk#000000663|0|er ironic requests detect furiously blithely sp|64.9401|-147.402|64.9401|-147.402|
-1670|25|O|89999.72|1997-05-24|2-HIGH|Clerk#000000320|0|unusual dependencies. furiously special platelets main|64.9401|-147.402|64.9401|-147.402|
-1671|35|O|104391.11|1996-07-27|4-NOT SPECIFIED|Clerk#000000275|0|ly. slyly pending requests was above the |64.8331|-147.647|64.8331|-147.647|
-1696|4|O|102665.03|1998-01-08|4-NOT SPECIFIED|Clerk#000000041|0|bravely bold accounts above the quickly bold|64.8371|-147.716|64.8371|-147.716|
-1697|76|O|122621.31|1996-10-07|1-URGENT|Clerk#000000815|0|o x-ray blithely. pl|64.8574|-147.759|64.8574|-147.759|
-1698|40|O|141118.87|1997-04-23|2-HIGH|Clerk#000000432|0|slyly. carefully express deposit|64.836|-147.727|64.836|-147.727|
-1699|85|F|66408.29|1993-12-30|1-URGENT|Clerk#000000125|0|jole blithely. furiously un|64.8132|-147.76|64.8132|-147.76|
-1700|65|O|89143.36|1996-06-15|3-MEDIUM|Clerk#000000328|0|ely final dolphins wake sometimes above the quietly regular deposits. fur|64.8451|-147.96|64.8451|-147.96|
-1701|130|F|72835.95|1992-05-19|2-HIGH|Clerk#000000395|0|furiously. regular, close theodoli|64.8891|-147.851|64.8891|-147.851|
-1702|67|P|194119.31|1995-05-07|2-HIGH|Clerk#000000300|0|around the carefully final deposits cajole carefully according to the b|64.8151|-147.707|64.8151|-147.707|
-1703|134|F|121220.59|1993-01-28|3-MEDIUM|Clerk#000000463|0| pinto beans poach. bold courts boost. regular, express deposits at|64.8363|-147.803|64.8363|-147.803|
-1728|64|O|131604.34|1996-05-22|2-HIGH|Clerk#000000711|0|beans. slyly regular instructions sleep! slyly final packages|64.8298|-147.611|64.8298|-147.611|
-1729|133|F|12137.76|1992-05-19|2-HIGH|Clerk#000000158|0|pending foxes wake. accounts|64.8989|-147.701|64.8989|-147.701|
-1730|124|O|150886.49|1998-07-24|5-LOW|Clerk#000000794|0| fluffily pending deposits serve. furiously even requests wake furiou|64.8371|-147.716|64.8371|-147.716|
-1731|128|O|190490.78|1996-01-06|1-URGENT|Clerk#000000268|0|lithely regular, final instructions. ironic, express packages are above|64.8147|-147.706|64.8147|-147.706|
-1732|146|F|179854.51|1993-11-29|5-LOW|Clerk#000000903|0|inal requests integrate dolph|64.8451|-147.812|64.8451|-147.812|
-1733|148|O|165489.52|1996-05-12|2-HIGH|Clerk#000000789|0|e carefully according to the accounts. furiously pending instructions sleep|64.8386|-147.788|64.8386|-147.788|
-1734|7|F|44002.53|1994-06-11|2-HIGH|Clerk#000000722|0| final ideas haggle. blithely quick foxes sleep busily bold ideas. i|64.8372|-147.768|64.8372|-147.768|
-1735|22|F|98541.95|1992-12-27|1-URGENT|Clerk#000000458|0|ully idle requests wake qu|64.8151|-147.707|64.8151|-147.707|
-1760|115|O|82151.12|1996-05-17|5-LOW|Clerk#000000917|0| deposits. busily regular deposits wake blithely along the furiously even re|64.843|-147.722|64.843|-147.722|
-1761|106|F|211925.95|1993-12-24|2-HIGH|Clerk#000000817|0|efully slyly bold frets. packages boost b|64.8426|-147.725|64.8426|-147.725|
-1762|77|F|202227.17|1994-08-20|4-NOT SPECIFIED|Clerk#000000653|0|ly ironic packages. furi|64.8615|-147.723|64.8615|-147.723|
-1763|121|O|140685.01|1996-10-29|2-HIGH|Clerk#000000321|0|es. bold dependencies haggle furiously along |64.8694|-147.067|64.8694|-147.067|
-1764|29|F|47384.71|1992-03-25|1-URGENT|Clerk#000000182|0|. slyly final packages integrate carefully acro|64.8404|-147.724|64.8404|-147.724|
-1765|73|O|36551.43|1995-12-03|4-NOT SPECIFIED|Clerk#000000490|0| regular excuses wake slyly|64.9686|-147.577|64.9686|-147.577|
-1766|139|O|41032.81|1996-10-12|2-HIGH|Clerk#000000983|0|unusual deposits affix quickly beyond the carefully s|64.8497|-147.732|64.8497|-147.732|
-1767|25|P|136582.60|1995-03-14|2-HIGH|Clerk#000000327|0|eposits use carefully carefully regular platelets. quickly regular packages al|64.8861|-147.587|64.8861|-147.587|
-1792|49|F|107919.86|1993-11-09|5-LOW|Clerk#000000102|0|ructions haggle along the pending packages. carefully speci|64.8508|-147.703|64.8508|-147.703|
-1793|19|F|82504.56|1992-07-12|4-NOT SPECIFIED|Clerk#000000291|0|regular packages cajole. blithely special packages according to the final d|64.841|-147.72|64.841|-147.72|
-1794|140|O|179462.21|1997-09-28|1-URGENT|Clerk#000000686|0|ally silent pinto beans. regular package|64.8375|-147.721|64.8375|-147.721|
-1795|94|F|146849.33|1994-03-19|2-HIGH|Clerk#000000815|0| quickly final packages! blithely dogged accounts c|64.849|-147.813|64.849|-147.813|
-1796|47|F|33755.47|1992-11-21|2-HIGH|Clerk#000000245|0|eans use furiously around th|55.3801|-131.682|55.3801|-131.682|
-1797|125|O|51494.47|1996-05-07|3-MEDIUM|Clerk#000000508|0|quiet platelets haggle since the quickly ironic instructi|55.3801|-131.682|55.3801|-131.682|
-1798|52|O|46393.97|1997-07-28|1-URGENT|Clerk#000000741|0|al foxes are blithe|55.3603|-131.702|55.3603|-131.702|
-1799|61|F|46815.93|1994-03-07|4-NOT SPECIFIED|Clerk#000000339|0|ns sleep furiously final waters. blithely regular instructions h|55.7511|-132.865|55.7511|-132.865|
-1824|49|F|81351.53|1994-05-05|1-URGENT|Clerk#000000972|0|e blithely fluffily|55.7511|-132.865|55.7511|-132.865|
-1825|148|F|150582.77|1993-12-05|3-MEDIUM|Clerk#000000345|0|ironic, final accou|60.3311|-151.284|60.3311|-151.284|
-1826|82|F|124719.97|1992-04-16|4-NOT SPECIFIED|Clerk#000000718|0|the even asymptotes dazzle fluffily slyly regular asymptotes. final, unu|60.3311|-151.284|60.3311|-151.284|
-1827|106|O|210113.88|1996-06-22|4-NOT SPECIFIED|Clerk#000000369|0|luffily even requests haggle sly|60.3311|-151.284|60.3311|-151.284|
-1828|32|F|137369.50|1994-04-18|3-MEDIUM|Clerk#000000840|0|y quickly bold packag|60.4341|-151.283|60.4341|-151.283|
-1829|112|F|127532.20|1994-05-08|2-HIGH|Clerk#000000537|0| accounts wake above the furiously unusual requests. pending package|60.3311|-151.284|60.3311|-151.284|
-1830|133|F|85122.24|1995-02-23|1-URGENT|Clerk#000000045|0|according to the even,|60.3311|-151.284|60.3311|-151.284|
-1831|71|F|58032.77|1993-12-02|1-URGENT|Clerk#000000854|0| accounts. carefully even accounts boost furiously. regular ideas engage. |60.3311|-151.284|60.3311|-151.284|
-1856|106|F|189361.42|1992-03-20|4-NOT SPECIFIED|Clerk#000000952|0|. special pinto beans run acr|60.3311|-151.284|60.3311|-151.284|
-1857|133|F|102793.59|1993-01-13|2-HIGH|Clerk#000000083|0|hely final ideas slee|60.3311|-151.284|60.3311|-151.284|
-1858|143|O|30457.91|1997-12-13|1-URGENT|Clerk#000000389|0|thely. slyly final deposits sleep|60.4311|-151.286|60.4311|-151.286|
-1859|61|O|105094.09|1997-04-11|4-NOT SPECIFIED|Clerk#000000949|0| the foxes. bravely special excuses nag carefully special r|60.3311|-151.284|60.3311|-151.284|
-1860|10|O|9103.40|1996-04-04|3-MEDIUM|Clerk#000000556|0|osits. quickly bold deposits according to |60.3311|-151.284|60.3311|-151.284|
-1861|70|F|95063.41|1994-01-03|3-MEDIUM|Clerk#000000847|0|r the fluffily close sauternes. furio|60.3311|-151.284|60.3311|-151.284|
-1862|34|O|97981.06|1998-02-24|5-LOW|Clerk#000000348|0|ts snooze ironically abou|60.3311|-151.284|60.3311|-151.284|
-1863|74|F|96359.65|1993-09-23|4-NOT SPECIFIED|Clerk#000000658|0|old sentiments. careful, |60.3191|-151.296|60.3191|-151.296|
-1888|121|F|224724.11|1993-10-31|4-NOT SPECIFIED|Clerk#000000659|0|olites. pinto beans cajole. regular deposits affix. slyly regular|60.3311|-151.284|60.3311|-151.284|
-1889|25|O|96431.77|1997-03-16|1-URGENT|Clerk#000000854|0|p around the regular notornis. unusual deposits|60.3311|-151.284|60.3311|-151.284|
-1890|10|O|202364.58|1996-12-18|4-NOT SPECIFIED|Clerk#000000627|0|romise final, regular deposits. regular fox|60.3311|-151.284|60.3311|-151.284|
-1891|61|F|76848.96|1994-12-15|5-LOW|Clerk#000000495|0|unusual foxes sleep regular deposits. requests wake special pac|60.5563|-151.241|60.5563|-151.241|
-1892|25|F|133273.64|1994-03-26|5-LOW|Clerk#000000733|0|sts. slyly regular dependencies use slyly. ironic, spec|60.6331|-151.163|60.6331|-151.163|
-1893|125|O|116792.13|1997-10-30|2-HIGH|Clerk#000000111|0|olites. silent, special deposits eat slyly quickly express packages; hockey p|60.6331|-151.163|60.6331|-151.163|
-1894|76|F|44387.23|1992-03-30|1-URGENT|Clerk#000000626|0|e furiously. furiously even accounts are slyly final accounts. closely speci|60.6331|-151.163|60.6331|-151.163|
-1895|7|F|44429.81|1994-05-30|3-MEDIUM|Clerk#000000878|0|ress accounts. bold accounts cajole. slyly final pinto beans poach regul|60.6331|-151.163|60.6331|-151.163|
-1920|110|O|119605.91|1998-06-24|5-LOW|Clerk#000000018|0|hely; furiously regular excuses|60.5551|-151.245|60.5551|-151.245|
-1921|88|F|57584.12|1994-01-18|3-MEDIUM|Clerk#000000293|0|counts. slyly quiet requests along the ruthlessly regular accounts are |60.6331|-151.163|60.6331|-151.163|
-1922|56|O|11575.77|1996-07-13|3-MEDIUM|Clerk#000000984|0|side of the blithely final re|60.5506|-151.141|60.5506|-151.141|
-1923|136|O|171128.10|1997-07-07|1-URGENT|Clerk#000000471|0| express dolphins. |60.5681|-151.281|60.5681|-151.281|
-1924|76|O|169756.19|1996-09-07|4-NOT SPECIFIED|Clerk#000000823|0| of the ironic accounts. instructions near the final instr|60.5465|-151.147|60.5465|-151.147|
-1925|17|F|146382.71|1992-03-05|1-URGENT|Clerk#000000986|0|e slyly regular deposits. furiously |60.6331|-151.163|60.6331|-151.163|
-1926|94|O|100035.03|1996-01-31|2-HIGH|Clerk#000000568|0|cajole. even warhorses sleep carefully. |60.5578|-151.116|60.5578|-151.116|
-1927|140|O|23327.88|1995-09-30|3-MEDIUM|Clerk#000000616|0|riously special packages. permanent pearls wake furiously. even packages alo|61.6182|-149.385|61.6182|-149.385|
-1952|67|F|12896.25|1994-03-16|2-HIGH|Clerk#000000254|0| silent accounts boost |61.6141|-149.457|61.6141|-149.457|
-1953|149|F|57213.18|1993-11-30|3-MEDIUM|Clerk#000000891|0| fluffily along the quickly even packages. |61.5765|-149.407|61.5765|-149.407|
-1954|56|O|158853.63|1997-05-31|4-NOT SPECIFIED|Clerk#000000104|0| unusual excuses cajole according to the blithely regular theodolites.|61.6091|-149.77|61.6091|-149.77|
-1955|13|F|103085.13|1992-04-20|1-URGENT|Clerk#000000792|0|ly special ideas. sometimes final |61.5821|-149.438|61.5821|-149.438|
-1956|127|F|88704.26|1992-09-20|4-NOT SPECIFIED|Clerk#000000600|0|ironic ideas are silent ideas. furiously final deposits sleep slyly carefu|61.6183|-149.373|61.6183|-149.373|
-1957|31|O|77482.87|1998-07-21|2-HIGH|Clerk#000000639|0|nding excuses about the |61.6131|-149.403|61.6131|-149.403|
-1958|53|O|176294.34|1995-09-22|5-LOW|Clerk#000000343|0| haggle blithely. flu|61.6352|-149.265|61.6352|-149.265|
-1959|43|O|62277.18|1997-01-13|4-NOT SPECIFIED|Clerk#000000631|0| cajole about the blithely express requests. even excuses mold bl|61.5751|-149.645|61.5751|-149.645|
-1984|52|O|79230.47|1998-04-01|1-URGENT|Clerk#000000416|0| slyly special instructions. unusual foxes use packages. carefully regular req|61.6168|-149.374|61.6168|-149.374|
-1985|7|F|171522.54|1994-09-02|4-NOT SPECIFIED|Clerk#000000741|0|slyly slyly even pains. slyly reg|61.5939|-149.43|61.5939|-149.43|
-1986|149|F|34269.96|1994-05-05|2-HIGH|Clerk#000000609|0|across the theodolites. quick|61.5792|-149.495|61.5792|-149.495|
-1987|100|F|6406.29|1994-04-30|2-HIGH|Clerk#000000652|0|gular platelets alongside |61.6141|-149.457|61.6141|-149.457|
-1988|109|O|117132.72|1995-10-06|4-NOT SPECIFIED|Clerk#000000011|0|ly ironic dolphins serve quickly busy accounts. bu|61.5829|-149.448|61.5829|-149.448|
-1989|118|F|39263.28|1994-03-16|4-NOT SPECIFIED|Clerk#000000747|0|ely bold pinto beans ha|61.5938|-149.387|61.5938|-149.387|
-1990|119|F|48781.39|1994-12-16|2-HIGH|Clerk#000000114|0|e bold patterns. always regul|61.5849|-149.38|61.5849|-149.38|
-1991|19|F|139854.41|1992-09-07|4-NOT SPECIFIED|Clerk#000000854|0|ing accounts can haggle at the carefully final Tiresias-- pending, regular|61.5729|-149.389|61.5729|-149.389|
-2016|8|O|24347.36|1996-08-16|3-MEDIUM|Clerk#000000641|0|the carefully ironic foxes. requests nag bold, r|61.5823|-149.462|61.5823|-149.462|
-2017|101|O|70529.27|1998-05-13|3-MEDIUM|Clerk#000000427|0|nusual requests. blit|61.57|-149.331|61.57|-149.331|
-2018|19|P|25007.95|1995-04-05|4-NOT SPECIFIED|Clerk#000000920|0|gular accounts wake fur|61.5821|-149.438|61.5821|-149.438|
-2019|136|F|43789.14|1992-10-23|1-URGENT|Clerk#000000565|0| furiously bold packages. fluffily fi|61.6141|-149.457|61.6141|-149.457|
-2020|73|F|136162.13|1993-06-21|3-MEDIUM|Clerk#000000192|0|es. furiously regular packages above the furiously special theodolites are a|61.6115|-149.331|61.6115|-149.331|
-2021|70|O|27016.74|1995-07-15|1-URGENT|Clerk#000000155|0|ong the furiously regular requests. unusual deposits wake fluffily inside|61.6091|-149.77|61.6091|-149.77|
-2022|62|F|206742.11|1992-03-15|1-URGENT|Clerk#000000268|0| dependencies sleep fluffily even, ironic deposits. express, silen|61.6141|-149.457|61.6141|-149.457|
-2023|118|F|144123.37|1992-05-06|5-LOW|Clerk#000000137|0|ular courts engage according to the|61.5826|-149.427|61.5826|-149.427|
-2048|17|F|33401.77|1993-11-15|1-URGENT|Clerk#000000934|0|s cajole after the blithely final accounts. f|61.5976|-149.366|61.5976|-149.366|
-2049|31|O|153048.74|1995-12-07|2-HIGH|Clerk#000000859|0|ly regular requests thrash blithely about the fluffily even theodolites. r|61.5976|-149.366|61.5976|-149.366|
-2050|28|F|208517.98|1994-06-02|4-NOT SPECIFIED|Clerk#000000821|0|d accounts against the furiously regular packages use bli|61.5531|-149.651|61.5531|-149.651|
-2051|40|O|87988.34|1996-03-18|4-NOT SPECIFIED|Clerk#000000333|0|ctions sleep blithely. blithely regu|61.5531|-149.651|61.5531|-149.651|
-2052|91|F|141822.19|1992-04-13|2-HIGH|Clerk#000000767|0| requests sleep around the even, even courts. ironic theodolites affix furious|61.5883|-149.456|61.5883|-149.456|
-2053|142|F|125125.57|1995-02-07|1-URGENT|Clerk#000000717|0|ar requests: blithely sly accounts boost carefully across t|61.6249|-149.435|61.6249|-149.435|
-2054|41|F|144335.16|1992-06-08|4-NOT SPECIFIED|Clerk#000000103|0|l requests affix carefully about the furiously special|61.6141|-149.457|61.6141|-149.457|
-2055|97|F|57092.26|1993-09-04|1-URGENT|Clerk#000000067|0|. warhorses affix slyly blithely express instructions? fur|61.5709|-149.452|61.5709|-149.452|
-2080|95|F|45767.69|1993-06-18|5-LOW|Clerk#000000190|0|ironic, pending theodolites are carefully about the quickly regular theodolite|61.6651|-149.465|61.6651|-149.465|
-2081|121|O|145654.97|1997-07-05|2-HIGH|Clerk#000000136|0|ong the regular theo|61.5841|-149.441|61.5841|-149.441|
-2082|49|F|46753.63|1995-01-10|2-HIGH|Clerk#000000354|0|cial accounts. ironic, express dolphins nod slyly sometimes final reques|61.1571|-149.883|61.1571|-149.883|
-2083|101|F|31795.52|1993-07-14|3-MEDIUM|Clerk#000000361|0|al patterns. bold, final foxes nag bravely about the furiously express|61.2198|-149.733|61.2198|-149.733|
-2084|80|F|190652.53|1993-03-17|2-HIGH|Clerk#000000048|0|zle furiously final, careful packages. slyly ironic ideas amo|61.1863|-149.976|61.1863|-149.976|
-2085|49|F|45311.07|1993-11-21|3-MEDIUM|Clerk#000000818|0|ress, express ideas haggle|61.2161|-149.876|61.2161|-149.876|
-2086|142|F|188985.18|1994-10-19|1-URGENT|Clerk#000000046|0| permanently regular|61.2031|-149.749|61.2031|-149.749|
-2087|50|O|53581.41|1998-01-31|2-HIGH|Clerk#000000626|0|e always regular packages nod against the furiously spec|61.1644|-149.897|61.1644|-149.897|
-2112|64|O|17986.15|1997-02-05|2-HIGH|Clerk#000000351|0|against the slyly even id|61.1834|-149.866|61.1834|-149.866|
-2113|32|O|65678.21|1997-11-08|2-HIGH|Clerk#000000527|0|slyly regular instruct|61.1731|-149.889|61.1731|-149.889|
-2114|79|F|106446.02|1995-01-16|5-LOW|Clerk#000000751|0|r, unusual accounts haggle across the busy platelets. carefully |61.1089|-149.854|61.1089|-149.854|
-2115|106|O|134814.65|1998-05-23|4-NOT SPECIFIED|Clerk#000000101|0|odolites boost. carefully regular excuses cajole. quickly ironic pinto be|61.1951|-149.916|61.1951|-149.916|
-2116|23|F|60887.90|1994-08-26|1-URGENT|Clerk#000000197|0|efully after the asymptotes. furiously sp|61.2157|-149.821|61.2157|-149.821|
-2117|22|O|145713.03|1997-04-26|2-HIGH|Clerk#000000887|0|ely even dependencies. regular foxes use blithely.|61.1372|-149.954|61.1372|-149.954|
-2118|134|O|38974.67|1996-10-09|1-URGENT|Clerk#000000196|0|ial requests wake carefully special packages. f|61.1955|-149.737|61.1955|-149.737|
-2119|64|O|34632.57|1996-08-20|2-HIGH|Clerk#000000434|0|uickly pending escapades. fluffily ir|61.1444|-149.867|61.1444|-149.867|
-2144|136|F|119917.28|1994-03-29|3-MEDIUM|Clerk#000000546|0|t. carefully quick requests across the deposits wake regu|61.2178|-149.882|61.2178|-149.882|
-2145|134|F|18885.35|1992-10-03|1-URGENT|Clerk#000000886|0|sts would snooze blithely alongside of th|61.1824|-149.849|61.1824|-149.849|
-2146|118|F|179686.07|1992-09-14|4-NOT SPECIFIED|Clerk#000000476|0|ven packages. dependencies wake slyl|61.2161|-149.876|61.2161|-149.876|
-2147|100|F|91513.79|1992-09-06|4-NOT SPECIFIED|Clerk#000000424|0| haggle carefully furiously final foxes. pending escapades thrash. bold theod|61.2022|-149.84|61.2022|-149.84|
-2148|130|F|19612.03|1995-04-19|4-NOT SPECIFIED|Clerk#000000517|0|ross the furiously unusual theodolites. always expre|61.2099|-149.762|61.2099|-149.762|
-2149|101|F|105145.40|1993-03-13|5-LOW|Clerk#000000555|0|nusual accounts nag furiously special reques|61.1951|-149.84|61.1951|-149.84|
-2150|82|F|166961.06|1994-06-03|3-MEDIUM|Clerk#000000154|0|ect slyly against the even, final packages. quickly regular pinto beans wake c|61.1069|-149.859|61.1069|-149.859|
-2151|58|O|124608.69|1996-11-11|3-MEDIUM|Clerk#000000996|0|c requests. ironic platelets cajole across the quickly fluffy deposits.|61.1635|-149.881|61.1635|-149.881|
-2176|104|F|87248.17|1992-11-10|1-URGENT|Clerk#000000195|0|s haggle regularly accor|61.1201|-149.89|61.1201|-149.89|
-2177|136|O|183493.42|1997-01-20|3-MEDIUM|Clerk#000000161|0|ove the blithely unusual packages cajole carefully fluffily special request|61.1902|-149.908|61.1902|-149.908|
-2178|8|O|79594.68|1996-12-12|3-MEDIUM|Clerk#000000656|0|thely according to the instructions. furious|61.2104|-149.857|61.2104|-149.857|
-2179|41|O|77487.09|1996-09-07|2-HIGH|Clerk#000000935|0|ounts alongside of the furiously unusual braids cajol|61.1771|-149.97|61.1771|-149.97|
-2180|76|O|208481.57|1996-09-14|4-NOT SPECIFIED|Clerk#000000650|0|xpress, unusual pains. furiously ironic excu|61.1859|-149.976|61.1859|-149.976|
-2181|76|O|100954.64|1995-09-13|3-MEDIUM|Clerk#000000814|0|y against the ironic, even|61.2171|-149.9|61.2171|-149.9|
-2182|23|F|116003.11|1994-04-05|2-HIGH|Clerk#000000071|0|ccounts. quickly bold deposits across the excuses sl|61.1162|-149.755|61.1162|-149.755|
-2183|113|O|49841.12|1996-06-22|1-URGENT|Clerk#000000287|0| among the express, ironic packages. slyly ironic platelets integrat|61.1381|-149.844|61.1381|-149.844|
-2208|68|P|245388.06|1995-05-01|4-NOT SPECIFIED|Clerk#000000900|0|symptotes wake slyly blithely unusual packages.|61.1775|-149.941|61.1775|-149.941|
-2209|91|F|129086.93|1992-07-10|2-HIGH|Clerk#000000056|0|er above the slyly silent requests. furiously reg|61.1938|-149.878|61.1938|-149.878|
-2210|32|F|31689.46|1992-01-16|2-HIGH|Clerk#000000941|0| believe carefully quickly express pinto beans. deposi|61.1571|-149.883|61.1571|-149.883|
-2211|92|F|140031.23|1994-06-30|2-HIGH|Clerk#000000464|0|ffily bold courts e|61.1541|-149.958|61.1541|-149.958|
-2212|118|F|17231.05|1994-03-23|3-MEDIUM|Clerk#000000954|0|structions above the unusual requests use fur|61.135|-149.88|61.135|-149.88|
-2213|122|F|146136.10|1993-01-15|4-NOT SPECIFIED|Clerk#000000598|0|osits are carefully reg|61.1101|-149.857|61.1101|-149.857|
\ No newline at end of file
diff --git a/hyracks-examples/hyracks-integration-tests/data/orders-with-locations-part2.txt b/hyracks-examples/hyracks-integration-tests/data/orders-with-locations-part2.txt
deleted file mode 100644
index afbd373..0000000
--- a/hyracks-examples/hyracks-integration-tests/data/orders-with-locations-part2.txt
+++ /dev/null
@@ -1,193 +0,0 @@
-2214|115|O|150345.63|1998-05-05|3-MEDIUM|Clerk#000000253|0|packages. fluffily even accounts haggle blithely. carefully ironic depen|61.1101|-149.857|61.1101|-149.857|
-2215|40|O|108239.46|1996-06-16|4-NOT SPECIFIED|Clerk#000000817|0|le final, final foxes. quickly regular gifts are carefully deposit|61.1101|-149.857|61.1101|-149.857|
-2240|56|F|174090.30|1992-03-06|4-NOT SPECIFIED|Clerk#000000622|0|accounts against the slyly express foxes are after the slyly regular |61.1101|-149.857|61.1101|-149.857|
-2241|103|F|165219.08|1993-05-11|1-URGENT|Clerk#000000081|0|y about the silent excuses. furiously ironic instructions along the sil|61.1101|-149.857|61.1101|-149.857|
-2242|82|O|15082.82|1997-07-20|4-NOT SPECIFIED|Clerk#000000360|0| pending multipliers. carefully express asymptotes use quickl|61.1101|-149.857|61.1101|-149.857|
-2243|49|O|10451.97|1995-06-10|2-HIGH|Clerk#000000813|0|ously regular deposits integrate s|61.1101|-149.857|61.1101|-149.857|
-2244|127|F|21207.08|1993-01-09|1-URGENT|Clerk#000001000|0|ckages. ironic, ironic accounts haggle blithely express excuses. |61.1101|-149.857|61.1101|-149.857|
-2245|58|F|150585.73|1993-04-28|3-MEDIUM|Clerk#000000528|0|ake carefully. braids haggle slyly quickly b|61.1101|-149.857|61.1101|-149.857|
-2246|113|O|85755.84|1996-05-27|4-NOT SPECIFIED|Clerk#000000739|0| final gifts sleep |61.1101|-149.857|61.1101|-149.857|
-2247|95|F|13491.31|1992-08-02|4-NOT SPECIFIED|Clerk#000000947|0|furiously regular packages. final brai|61.1101|-149.857|61.1101|-149.857|
-2272|139|F|127934.71|1993-04-13|2-HIGH|Clerk#000000449|0|s. bold, ironic pinto beans wake. silently specia|61.1101|-149.857|61.1101|-149.857|
-2273|136|O|142291.79|1996-12-14|5-LOW|Clerk#000000155|0|uickly express foxes haggle quickly against|61.1101|-149.857|61.1101|-149.857|
-2274|104|F|58273.89|1993-09-04|4-NOT SPECIFIED|Clerk#000000258|0|nstructions try to hag|61.1101|-149.857|61.1101|-149.857|
-2275|149|F|37398.90|1992-10-22|4-NOT SPECIFIED|Clerk#000000206|0| furiously furious platelets. slyly final packa|61.1101|-149.857|61.1101|-149.857|
-2276|43|O|141159.63|1996-04-29|4-NOT SPECIFIED|Clerk#000000821|0|ecial requests. fox|61.1101|-149.857|61.1101|-149.857|
-2277|89|F|79270.23|1995-01-02|4-NOT SPECIFIED|Clerk#000000385|0|accounts cajole. even i|61.1101|-149.857|61.1101|-149.857|
-2278|142|O|101878.46|1998-04-25|3-MEDIUM|Clerk#000000186|0|r pinto beans integrate after the carefully even deposits. blit|61.1101|-149.857|61.1101|-149.857|
-2279|80|F|142322.33|1993-02-23|3-MEDIUM|Clerk#000000898|0|de of the quickly unusual instructio|61.2141|-149.864|61.2141|-149.864|
-2304|46|F|93769.28|1994-01-07|4-NOT SPECIFIED|Clerk#000000415|0|onic platelets. ironic packages haggle. packages nag doggedly according to|61.2171|-149.9|61.2171|-149.9|
-2305|43|F|122964.66|1993-01-26|2-HIGH|Clerk#000000440|0|ove the furiously even acco|61.2171|-149.9|61.2171|-149.9|
-2306|28|O|244704.23|1995-07-26|2-HIGH|Clerk#000000975|0| wake furiously requests. permanent requests affix. final packages caj|61.2171|-149.9|61.2171|-149.9|
-2307|106|F|59417.76|1993-06-29|5-LOW|Clerk#000000952|0|furiously even asymptotes? carefully regular accounts|61.2171|-149.9|61.2171|-149.9|
-2308|25|F|58546.02|1992-10-25|4-NOT SPECIFIED|Clerk#000000609|0|ts. slyly final depo|61.2171|-149.9|61.2171|-149.9|
-2309|100|O|146933.07|1995-09-04|5-LOW|Clerk#000000803|0|he carefully pending packages. fluffily stealthy foxes engage carefully|61.2171|-149.9|61.2171|-149.9|
-2310|31|O|82928.12|1996-09-20|5-LOW|Clerk#000000917|0|wake carefully. unusual instructions nag ironic, regular excuse|61.2171|-149.9|61.2171|-149.9|
-2311|73|P|153233.93|1995-05-02|2-HIGH|Clerk#000000761|0|ly pending asymptotes-- furiously bold excus|61.2171|-149.9|61.2171|-149.9|
-2336|142|O|22294.51|1996-01-07|4-NOT SPECIFIED|Clerk#000000902|0|c, final excuses sleep furiously among the even theodolites. f|61.2171|-149.9|61.2171|-149.9|
-2337|142|O|45704.96|1997-06-18|4-NOT SPECIFIED|Clerk#000000754|0| quickly. final accounts haggle. carefully final acco|61.2171|-149.9|61.2171|-149.9|
-2338|140|O|28155.92|1997-09-15|2-HIGH|Clerk#000000951|0|riously final dugouts. final, ironic packages wake express, ironic id|61.2171|-149.9|61.2171|-149.9|
-2339|109|F|63470.78|1993-12-15|5-LOW|Clerk#000000847|0| against the regular |61.2171|-149.9|61.2171|-149.9|
-2340|65|O|30778.78|1996-01-12|1-URGENT|Clerk#000000964|0|ter the deposits sleep according to the slyly regular packages. carefully |61.2171|-149.9|61.2171|-149.9|
-2341|82|F|55950.21|1993-05-30|5-LOW|Clerk#000000443|0|sts-- blithely bold dolphins through the deposits nag blithely carefully re|61.2171|-149.9|61.2171|-149.9|
-2342|37|O|104038.78|1996-06-09|1-URGENT|Clerk#000000615|0|oost carefully across the regular accounts. blithely final d|61.2171|-149.9|61.2171|-149.9|
-2343|73|O|85381.00|1995-08-21|3-MEDIUM|Clerk#000000170|0|fluffily over the slyly special deposits. quickl|64.8487|-147.704|64.8487|-147.704|
-2368|13|F|101240.96|1993-08-20|1-URGENT|Clerk#000000830|0|t the bold instructions. carefully unusual |64.8486|-147.705|64.8486|-147.705|
-2369|110|O|73517.91|1996-12-24|2-HIGH|Clerk#000000752|0|iously even requests are dogged, express |64.8087|-147.71|64.8087|-147.71|
-2370|142|F|73924.21|1994-01-17|1-URGENT|Clerk#000000231|0|lyly final packages. quickly final deposits haggl|64.8363|-147.758|64.8363|-147.758|
-2371|19|O|193857.67|1998-01-07|1-URGENT|Clerk#000000028|0|ckages haggle at th|64.8476|-147.704|64.8476|-147.704|
-2372|31|O|104927.66|1997-11-21|5-LOW|Clerk#000000342|0|s: deposits haggle along the final ideas. careful|64.8302|-147.744|64.8302|-147.744|
-2373|28|F|55211.04|1994-03-12|4-NOT SPECIFIED|Clerk#000000306|0| even, special courts grow quickly. pending,|64.8476|-147.812|64.8476|-147.812|
-2374|4|F|115219.88|1993-10-29|4-NOT SPECIFIED|Clerk#000000081|0| blithely regular packages. blithely unusua|64.8144|-147.756|64.8144|-147.756|
-2375|5|O|106612.48|1996-11-20|3-MEDIUM|Clerk#000000197|0|unusual, pending theodolites cajole carefully |64.8183|-147.778|64.8183|-147.778|
-2400|37|O|92798.66|1998-07-25|5-LOW|Clerk#000000782|0|nusual courts nag against the carefully unusual pinto b|64.8494|-147.818|64.8494|-147.818|
-2401|148|O|88448.24|1997-07-29|4-NOT SPECIFIED|Clerk#000000531|0|ully unusual instructions boost carefully silently regular requests. |64.849|-147.822|64.849|-147.822|
-2402|67|O|70403.62|1996-09-06|4-NOT SPECIFIED|Clerk#000000162|0|slyly final sheaves sleep slyly. q|64.8367|-147.716|64.8367|-147.716|
-2403|55|O|111020.79|1998-04-11|3-MEDIUM|Clerk#000000820|0|furiously regular deposits use. furiously unusual accounts wake along the |64.8127|-147.772|64.8127|-147.772|
-2404|77|O|109077.69|1997-03-13|4-NOT SPECIFIED|Clerk#000000409|0|deposits breach furiously. ironic foxes haggle carefully bold packag|64.8143|-147.751|64.8143|-147.751|
-2405|73|O|115929.14|1996-12-23|3-MEDIUM|Clerk#000000535|0|ular, regular asympto|64.842|-147.721|64.842|-147.721|
-2406|7|O|182516.77|1996-10-28|5-LOW|Clerk#000000561|0|blithely regular accounts u|64.8403|-147.714|64.8403|-147.714|
-2407|55|O|112843.52|1998-06-19|2-HIGH|Clerk#000000068|0|uests affix slyly among the slyly regular depos|64.8371|-147.881|64.8371|-147.881|
-2432|103|O|62661.93|1996-07-13|1-URGENT|Clerk#000000115|0|re. slyly even deposits wake bra|64.8151|-147.707|64.8151|-147.707|
-2433|31|F|147071.86|1994-08-22|4-NOT SPECIFIED|Clerk#000000324|0|ess patterns are slyly. packages haggle carefu|64.8151|-147.707|64.8151|-147.707|
-2434|25|O|123956.25|1997-04-27|3-MEDIUM|Clerk#000000190|0|s. quickly ironic dolphins impress final deposits. blithel|64.8541|-147.81|64.8541|-147.81|
-2435|73|F|122490.66|1993-02-21|5-LOW|Clerk#000000112|0|es are carefully along the carefully final instructions. pe|64.8878|-147.496|64.8878|-147.496|
-2436|125|O|73990.08|1995-09-11|4-NOT SPECIFIED|Clerk#000000549|0|arefully. blithely bold deposits affix special accounts. final foxes nag. spe|64.8299|-147.728|64.8299|-147.728|
-2437|85|F|143411.69|1993-04-21|4-NOT SPECIFIED|Clerk#000000578|0|. theodolites wake slyly-- ironic, pending platelets above the carefully exp|64.8132|-147.762|64.8132|-147.762|
-2438|13|F|214494.39|1993-07-15|2-HIGH|Clerk#000000744|0|the final, regular warhorses. regularly |64.8372|-147.713|64.8372|-147.713|
-2439|55|O|41811.12|1997-03-15|2-HIGH|Clerk#000000819|0|lithely after the car|64.7927|-148.036|64.7927|-148.036|
-2464|145|O|30495.65|1997-11-23|5-LOW|Clerk#000000633|0|le about the instructions. courts wake carefully even|64.8717|-147.819|64.8717|-147.819|
-2465|34|O|180737.75|1995-06-24|1-URGENT|Clerk#000000078|0|al pinto beans. final, bold packages wake quickly|64.8527|-147.686|64.8527|-147.686|
-2466|19|F|161625.50|1994-03-06|1-URGENT|Clerk#000000424|0|c pinto beans. express deposits wake quickly. even, final courts nag. package|64.8371|-147.811|64.8371|-147.811|
-2467|35|O|7231.91|1995-07-16|4-NOT SPECIFIED|Clerk#000000914|0|pades sleep furiously. sometimes regular packages again|64.846|-147.705|64.846|-147.705|
-2468|112|O|160627.01|1997-06-09|4-NOT SPECIFIED|Clerk#000000260|0|ickly regular packages. slyly ruthless requests snooze quickly blithe|64.9064|-147.726|64.9064|-147.726|
-2469|124|O|192074.23|1996-11-26|5-LOW|Clerk#000000730|0| sleep closely regular instructions. furiously ironic instructi|64.9347|-147.56|64.9347|-147.56|
-2470|58|O|104966.33|1997-04-19|3-MEDIUM|Clerk#000000452|0|to the furiously final packages? pa|64.8861|-147.677|64.8861|-147.677|
-2471|89|O|34936.31|1998-03-12|4-NOT SPECIFIED|Clerk#000000860|0|carefully blithely regular pac|64.8302|-147.744|64.8302|-147.744|
-2496|136|F|140390.60|1994-01-09|2-HIGH|Clerk#000000142|0|slyly. pending instructions sleep. quic|60.6673|-151.311|60.6673|-151.311|
-2497|47|F|171326.48|1992-08-27|1-URGENT|Clerk#000000977|0|ily ironic pinto beans. furiously final platelets alongside of t|60.6997|-151.38|60.6997|-151.38|
-2498|97|F|45514.27|1993-11-08|5-LOW|Clerk#000000373|0|g the slyly special pinto beans. |60.5658|-151.244|60.5658|-151.244|
-2499|121|O|147243.86|1995-09-24|1-URGENT|Clerk#000000277|0|r the quickly bold foxes. bold instructi|60.6331|-151.163|60.6331|-151.163|
-2500|133|F|131122.82|1992-08-15|2-HIGH|Clerk#000000447|0|integrate slyly pending deposits. furiously ironic accounts across the s|60.6331|-151.163|60.6331|-151.163|
-2501|67|O|79380.51|1997-05-25|5-LOW|Clerk#000000144|0|ickly special theodolite|60.6331|-151.163|60.6331|-151.163|
-2502|70|F|33470.40|1993-05-28|4-NOT SPECIFIED|Clerk#000000914|0|lyly: carefully pending ideas affix again|60.6201|-151.332|60.6201|-151.332|
-2503|7|F|183671.08|1993-06-20|3-MEDIUM|Clerk#000000294|0|ly even packages was. ironic, regular deposits unwind furiously across the p|60.5004|-151.276|60.5004|-151.276|
-2528|55|F|92069.62|1994-11-20|1-URGENT|Clerk#000000789|0|ular dependencies? regular frays kindle according to the blith|60.6331|-151.163|60.6331|-151.163|
-2529|136|O|4104.30|1996-08-20|2-HIGH|Clerk#000000511|0|posits across the silent instructions wake blithely across |60.6331|-151.163|60.6331|-151.163|
-2530|128|F|58853.11|1994-03-21|3-MEDIUM|Clerk#000000291|0|ular instructions about the quic|60.6901|-151.321|60.6901|-151.321|
-2531|44|O|143212.85|1996-05-06|4-NOT SPECIFIED|Clerk#000000095|0|even accounts. furiously ironic excuses sleep fluffily. carefully silen|60.6676|-151.29|60.6676|-151.29|
-2532|94|O|116093.49|1995-10-11|2-HIGH|Clerk#000000498|0|the blithely pending accounts. regular, regular excuses boost aro|60.6331|-151.163|60.6331|-151.163|
-2533|50|O|168495.03|1997-03-24|1-URGENT|Clerk#000000594|0|ecial instructions. spec|60.5632|-151.266|60.5632|-151.266|
-2534|76|O|202784.54|1996-07-17|3-MEDIUM|Clerk#000000332|0|packages cajole ironic requests. furiously regular|60.6331|-151.163|60.6331|-151.163|
-2535|121|F|67018.30|1993-05-25|5-LOW|Clerk#000000296|0|phins cajole beneath the fluffily express asymptotes. c|60.6331|-151.163|60.6331|-151.163|
-2560|131|F|153426.79|1992-09-05|1-URGENT|Clerk#000000538|0|atelets; quickly sly requests|60.6509|-151.342|60.6509|-151.342|
-2561|58|O|137473.58|1997-11-14|1-URGENT|Clerk#000000861|0|ual requests. unusual deposits cajole furiously pending, regular platelets. |60.5601|-151.107|60.5601|-151.107|
-2562|10|F|136360.37|1992-08-01|1-URGENT|Clerk#000000467|0|elets. pending dolphins promise slyly. bo|60.5123|-151.275|60.5123|-151.275|
-2563|62|F|168952.10|1993-11-19|4-NOT SPECIFIED|Clerk#000000150|0|sly even packages after the furio|60.6076|-151.325|60.6076|-151.325|
-2564|77|F|3967.47|1994-09-09|2-HIGH|Clerk#000000718|0|usly regular pinto beans. orbits wake carefully. slyly e|60.6331|-151.163|60.6331|-151.163|
-2565|56|O|204438.57|1998-02-28|3-MEDIUM|Clerk#000000032|0|x-ray blithely along|60.5175|-151.235|60.5175|-151.235|
-2566|86|F|89992.48|1992-10-10|3-MEDIUM|Clerk#000000414|0|ructions boost bold ideas. idly ironic accounts use according to th|60.5535|-151.108|60.5535|-151.108|
-2567|70|O|263411.29|1998-02-27|2-HIGH|Clerk#000000031|0|detect. furiously ironic requests|60.5614|-151.275|60.5614|-151.275|
-2592|101|F|8225.96|1993-03-05|4-NOT SPECIFIED|Clerk#000000524|0|ts nag fluffily. quickly stealthy theodolite|60.5647|-151.195|60.5647|-151.195|
-2593|92|F|134726.09|1993-09-04|2-HIGH|Clerk#000000468|0|r the carefully final|60.6331|-151.163|60.6331|-151.163|
-2594|79|F|94866.39|1992-12-17|1-URGENT|Clerk#000000550|0|ests. theodolites above the blithely even accounts detect furio|60.6331|-151.163|60.6331|-151.163|
-2595|74|O|173130.20|1995-12-14|4-NOT SPECIFIED|Clerk#000000222|0|arefully ironic requests nag carefully ideas. |60.6331|-151.163|60.6331|-151.163|
-2596|43|O|74940.13|1996-08-17|1-URGENT|Clerk#000000242|0|requests. ironic, bold theodolites wak|60.6331|-151.163|60.6331|-151.163|
-2597|104|F|21964.66|1993-02-04|2-HIGH|Clerk#000000757|0|iously ruthless exc|60.6331|-151.163|60.6331|-151.163|
-2598|112|O|84871.50|1996-03-05|3-MEDIUM|Clerk#000000391|0| ironic notornis according to the blithely final requests should |60.6678|-151.31|60.6678|-151.31|
-2599|149|O|62807.13|1996-11-07|2-HIGH|Clerk#000000722|0|ts. slyly regular theodolites wake sil|60.5003|-151.276|60.5003|-151.276|
-2624|52|O|27148.63|1996-11-28|5-LOW|Clerk#000000930|0|ic, regular packages|60.6331|-151.163|60.6331|-151.163|
-2625|40|F|39382.74|1992-10-14|4-NOT SPECIFIED|Clerk#000000386|0| final deposits. blithely ironic ideas |61.5855|-149.326|61.5855|-149.326|
-2626|139|O|84314.51|1995-09-08|4-NOT SPECIFIED|Clerk#000000289|0|gside of the carefully special packages are furiously after the slyly express |61.5979|-149.437|61.5979|-149.437|
-2627|149|F|26798.65|1992-03-24|3-MEDIUM|Clerk#000000181|0|s. silent, ruthless requests|61.6141|-149.457|61.6141|-149.457|
-2628|56|F|165655.99|1993-10-22|5-LOW|Clerk#000000836|0|ajole across the blithely careful accounts. blithely silent deposits sl|61.5799|-149.461|61.5799|-149.461|
-2629|139|O|96458.03|1998-04-06|5-LOW|Clerk#000000680|0|uches dazzle carefully even, express excuses. ac|61.5845|-149.337|61.5845|-149.337|
-2630|85|F|127132.51|1992-10-24|5-LOW|Clerk#000000712|0|inal theodolites. ironic instructions s|61.5351|-149.558|61.5351|-149.558|
-2631|37|F|63103.32|1993-09-24|5-LOW|Clerk#000000833|0| quickly unusual deposits doubt around |61.5811|-149.45|61.5811|-149.45|
-2656|77|F|105492.37|1993-05-04|1-URGENT|Clerk#000000307|0|elets. slyly final accou|61.5793|-149.442|61.5793|-149.442|
-2657|25|O|148176.06|1995-10-17|2-HIGH|Clerk#000000160|0| foxes-- slyly final dependencies around the slyly final theodo|61.5661|-149.313|61.5661|-149.313|
-2658|14|O|163834.46|1995-09-23|3-MEDIUM|Clerk#000000400|0|bout the slyly regular accounts. ironic, |61.6141|-149.457|61.6141|-149.457|
-2659|83|F|79785.52|1993-12-18|4-NOT SPECIFIED|Clerk#000000758|0|cross the pending requests maintain |61.5786|-149.332|61.5786|-149.332|
-2660|127|O|16922.51|1995-08-05|5-LOW|Clerk#000000480|0|ly finally regular deposits. ironic theodolites cajole|61.5811|-149.45|61.5811|-149.45|
-2661|74|O|106036.84|1997-01-04|3-MEDIUM|Clerk#000000217|0|al, regular pinto beans. silently final deposits should have t|61.5825|-149.429|61.5825|-149.429|
-2662|37|O|87689.88|1996-08-21|3-MEDIUM|Clerk#000000589|0|bold pinto beans above the slyly final accounts affix furiously deposits. pac|61.6141|-149.457|61.6141|-149.457|
-2663|95|O|35131.80|1995-09-06|1-URGENT|Clerk#000000950|0|ar requests. furiously final dolphins along the fluffily spe|61.5531|-149.651|61.5531|-149.651|
-2688|98|F|181077.36|1992-01-24|2-HIGH|Clerk#000000720|0|have to nag according to the pending theodolites. sly|61.5531|-149.651|61.5531|-149.651|
-2689|103|F|41552.78|1992-04-09|4-NOT SPECIFIED|Clerk#000000698|0|press pains wake. furiously express theodolites alongsid|61.5698|-149.62|61.5698|-149.62|
-2690|94|O|224674.27|1996-03-31|3-MEDIUM|Clerk#000000760|0|ravely even theodolites |61.6141|-149.457|61.6141|-149.457|
-2691|7|F|30137.17|1992-04-30|5-LOW|Clerk#000000439|0|es at the regular deposits sleep slyly by the fluffy requests. eve|61.5474|-149.458|61.5474|-149.458|
-2692|62|O|24265.24|1997-12-02|3-MEDIUM|Clerk#000000878|0|es. regular asymptotes cajole above t|61.5825|-149.429|61.5825|-149.429|
-2693|19|O|66158.13|1996-09-04|1-URGENT|Clerk#000000370|0|ndle never. blithely regular packages nag carefully enticing platelets. ca|61.5955|-149.423|61.5955|-149.423|
-2694|121|O|102807.59|1996-03-14|5-LOW|Clerk#000000722|0| requests. bold deposits above the theodol|61.5801|-149.461|61.5801|-149.461|
-2695|58|O|138584.20|1996-08-20|1-URGENT|Clerk#000000697|0|ven deposits around the quickly regular packa|61.5785|-149.415|61.5785|-149.415|
-2720|31|F|161307.05|1993-06-08|1-URGENT|Clerk#000000948|0|quickly. special asymptotes are fluffily ironi|61.6402|-149.34|61.6402|-149.34|
-2721|79|O|59180.25|1996-01-27|2-HIGH|Clerk#000000401|0| ideas eat even, unusual ideas. theodolites are carefully|61.583|-149.457|61.583|-149.457|
-2722|35|F|50328.84|1994-04-09|5-LOW|Clerk#000000638|0|rding to the carefully quick deposits. bli|61.5907|-149.295|61.5907|-149.295|
-2723|61|O|104759.25|1995-10-06|5-LOW|Clerk#000000836|0|nts must have to cajo|61.6141|-149.457|61.6141|-149.457|
-2724|137|F|116069.66|1994-09-14|2-HIGH|Clerk#000000217|0| sleep blithely. blithely idle |61.5933|-149.397|61.5933|-149.397|
-2725|89|F|75144.68|1994-05-21|4-NOT SPECIFIED|Clerk#000000835|0|ular deposits. spec|61.6091|-149.77|61.6091|-149.77|
-2726|7|F|47753.00|1992-11-27|5-LOW|Clerk#000000470|0| blithely even dinos sleep care|61.577|-149.411|61.577|-149.411|
-2727|74|O|3089.42|1998-04-19|4-NOT SPECIFIED|Clerk#000000879|0|sual theodolites cajole enticingly above the furiously fin|61.6078|-149.322|61.6078|-149.322|
-2752|59|F|187932.30|1993-11-19|2-HIGH|Clerk#000000648|0| carefully regular foxes are quickly quickl|61.6131|-149.397|61.6131|-149.397|
-2753|16|F|159720.39|1993-11-30|2-HIGH|Clerk#000000380|0|ending instructions. unusual deposits|61.6648|-149.372|61.6648|-149.372|
-2754|145|F|25985.52|1994-04-03|2-HIGH|Clerk#000000960|0|cies detect slyly. |61.5531|-149.651|61.5531|-149.651|
-2755|118|F|101202.18|1992-02-07|4-NOT SPECIFIED|Clerk#000000177|0|ously according to the sly foxes. blithely regular pinto bean|61.5811|-149.45|61.5811|-149.45|
-2756|118|F|142323.38|1994-04-18|1-URGENT|Clerk#000000537|0|arefully special warho|61.583|-149.457|61.583|-149.457|
-2757|76|O|89792.48|1995-07-20|2-HIGH|Clerk#000000216|0| regular requests subl|61.1955|-149.9|61.1955|-149.9|
-2758|43|O|36671.88|1998-07-12|5-LOW|Clerk#000000863|0|s cajole according to the carefully special |61.1844|-149.897|61.1844|-149.897|
-2759|116|F|89731.10|1993-11-25|4-NOT SPECIFIED|Clerk#000000071|0|ts. regular, pending pinto beans sleep ab|61.1901|-149.892|61.1901|-149.892|
-2784|95|O|106635.21|1998-01-07|1-URGENT|Clerk#000000540|0|g deposits alongside of the silent requests s|61.1444|-149.867|61.1444|-149.867|
-2785|148|O|132854.79|1995-07-21|2-HIGH|Clerk#000000098|0|iously pending packages sleep according to the blithely unusual foxe|61.1955|-149.9|61.1955|-149.9|
-2786|79|F|178254.66|1992-03-22|2-HIGH|Clerk#000000976|0|al platelets cajole blithely ironic requests. ironic re|61.1893|-149.887|61.1893|-149.887|
-2787|103|O|3726.14|1995-09-30|1-URGENT|Clerk#000000906|0|he ironic, regular |61.2174|-149.888|61.2174|-149.888|
-2788|124|F|17172.66|1994-09-22|1-URGENT|Clerk#000000641|0|nts wake across the fluffily bold accoun|61.2227|-149.842|61.2227|-149.842|
-2789|37|O|219123.27|1998-03-14|2-HIGH|Clerk#000000972|0|gular patterns boost. carefully even re|61.1263|-149.872|61.1263|-149.872|
-2790|25|F|177458.97|1994-08-19|2-HIGH|Clerk#000000679|0| the carefully express deposits sleep slyly |61.1138|-149.866|61.1138|-149.866|
-2791|121|F|156697.55|1994-10-10|2-HIGH|Clerk#000000662|0|as. slyly ironic accounts play furiously bl|61.2157|-149.821|61.2157|-149.821|
-2816|58|F|42225.53|1994-09-20|2-HIGH|Clerk#000000289|0|kages at the final deposits cajole furious foxes. quickly |61.2174|-149.888|61.2174|-149.888|
-2817|40|F|71453.85|1994-04-19|3-MEDIUM|Clerk#000000982|0|ic foxes haggle upon the daringly even pinto beans. slyly|61.1855|-149.868|61.1855|-149.868|
-2818|49|F|120086.84|1994-12-12|3-MEDIUM|Clerk#000000413|0|eep furiously special ideas. express |61.1951|-149.873|61.1951|-149.873|
-2819|103|F|66927.16|1994-05-05|1-URGENT|Clerk#000000769|0|ngside of the blithely ironic dolphins. furio|61.1444|-149.867|61.1444|-149.867|
-2820|19|F|143813.39|1994-05-20|3-MEDIUM|Clerk#000000807|0|equests are furiously. carefu|61.1883|-149.735|61.1883|-149.735|
-2821|118|F|36592.48|1993-08-09|3-MEDIUM|Clerk#000000323|0|ng requests. even instructions are quickly express, silent instructi|61.2161|-149.876|61.2161|-149.876|
-2822|79|F|40142.15|1993-07-26|2-HIGH|Clerk#000000510|0|furiously against the accounts. unusual accounts aft|61.2161|-149.876|61.2161|-149.876|
-2823|79|O|171894.45|1995-09-09|2-HIGH|Clerk#000000567|0|encies. carefully fluffy accounts m|61.1893|-149.888|61.1893|-149.888|
-2848|70|F|116258.53|1992-03-10|1-URGENT|Clerk#000000256|0|ly fluffy foxes sleep furiously across the slyly regu|61.2174|-149.888|61.2174|-149.888|
-2849|46|O|180054.29|1996-04-30|2-HIGH|Clerk#000000659|0|al packages are after the quickly bold requests. carefully special |61.1914|-149.886|61.1914|-149.886|
-2850|100|O|122969.79|1996-10-02|2-HIGH|Clerk#000000392|0|, regular deposits. furiously pending packages hinder carefully carefully u|61.1541|-149.958|61.1541|-149.958|
-2851|145|O|7859.36|1997-09-07|5-LOW|Clerk#000000566|0|Tiresias wake quickly quickly even|61.1259|-149.717|61.1259|-149.717|
-2852|91|F|99050.81|1993-01-16|1-URGENT|Clerk#000000740|0|ruthless deposits against the final instructions use quickly al|61.2193|-149.902|61.2193|-149.902|
-2853|94|F|103641.15|1994-05-05|2-HIGH|Clerk#000000878|0|the carefully even packages.|61.1879|-149.886|61.1879|-149.886|
-2854|139|F|153568.02|1994-06-27|1-URGENT|Clerk#000000010|0| furiously ironic tithes use furiously |61.1372|-149.912|61.1372|-149.912|
-2855|49|F|48419.58|1993-04-04|4-NOT SPECIFIED|Clerk#000000973|0| silent, regular packages sleep |61.1101|-149.857|61.1101|-149.857|
-2880|8|F|145761.99|1992-03-15|2-HIGH|Clerk#000000756|0|ves maintain doggedly spec|61.1791|-149.94|61.1791|-149.94|
-2881|100|F|45695.84|1992-05-10|5-LOW|Clerk#000000864|0|uriously. slyly express requests according to the silent dol|61.2031|-149.749|61.2031|-149.749|
-2882|121|O|172872.37|1995-08-22|2-HIGH|Clerk#000000891|0|pending deposits. carefully eve|61.1914|-149.877|61.1914|-149.877|
-2883|121|F|170360.27|1995-01-23|5-LOW|Clerk#000000180|0|uses. carefully ironic accounts lose fluffil|61.1944|-149.883|61.1944|-149.883|
-2884|92|O|71683.84|1997-10-12|3-MEDIUM|Clerk#000000780|0|efully express instructions sleep against|61.1923|-149.886|61.1923|-149.886|
-2885|7|F|146896.72|1992-09-19|4-NOT SPECIFIED|Clerk#000000280|0|ly sometimes special excuses. final requests are |61.2123|-149.854|61.2123|-149.854|
-2886|109|F|94527.23|1994-11-13|4-NOT SPECIFIED|Clerk#000000619|0|uctions. ironic packages sle|61.2161|-149.876|61.2161|-149.876|
-2887|109|O|28571.39|1997-05-26|5-LOW|Clerk#000000566|0|slyly even pinto beans. slyly bold epitaphs cajole blithely above t|61.2171|-149.9|61.2171|-149.9|
-2912|94|F|27727.52|1992-03-12|5-LOW|Clerk#000000186|0|jole blithely above the quickly regular packages. carefully regular pinto bean|61.1125|-149.861|61.1125|-149.861|
-2913|43|O|130702.19|1997-07-12|3-MEDIUM|Clerk#000000118|0|mptotes doubt furiously slyly regu|61.1419|-149.896|61.1419|-149.896|
-2914|109|F|60867.14|1993-03-03|3-MEDIUM|Clerk#000000543|0|he slyly regular theodolites are furiously sile|61.145|-149.878|61.145|-149.878|
-2915|94|F|96015.13|1994-03-31|5-LOW|Clerk#000000410|0|ld packages. bold deposits boost blithely. ironic, unusual theodoli|61.1044|-149.865|61.1044|-149.865|
-2916|8|O|20182.22|1995-12-27|2-HIGH|Clerk#000000681|0|ithely blithe deposits sleep beyond the|61.1444|-149.876|61.1444|-149.876|
-2917|91|O|100714.13|1997-12-09|4-NOT SPECIFIED|Clerk#000000061|0| special dugouts among the special deposi|61.1|-149.85|61.1|-149.85|
-2918|118|O|21760.09|1996-09-08|3-MEDIUM|Clerk#000000439|0|ular deposits across th|61.1105|-149.861|61.1105|-149.861|
-2919|53|F|137223.14|1993-12-10|2-HIGH|Clerk#000000209|0|es. pearls wake quietly slyly ironic instructions--|61.1286|-149.957|61.1286|-149.957|
-2944|14|O|146581.14|1997-09-24|4-NOT SPECIFIED|Clerk#000000740|0|deas. permanently special foxes haggle carefully ab|61.1201|-149.89|61.1201|-149.89|
-2945|29|O|223507.72|1996-01-03|2-HIGH|Clerk#000000499|0|ons are carefully toward the permanent, bold pinto beans. regu|61.112|-149.871|61.112|-149.871|
-2946|125|O|102226.59|1996-02-05|5-LOW|Clerk#000000329|0|g instructions about the regular accounts sleep carefully along the pen|61.1427|-149.864|61.1427|-149.864|
-2947|70|P|43360.95|1995-04-26|1-URGENT|Clerk#000000464|0|ronic accounts. accounts run furiously d|61.1212|-149.947|61.1212|-149.947|
-2948|44|F|100758.71|1994-08-23|5-LOW|Clerk#000000701|0| deposits according to the blithely pending |61.1228|-149.939|61.1228|-149.939|
-2949|137|F|94231.71|1994-04-12|2-HIGH|Clerk#000000184|0|y ironic accounts use. quickly blithe accou|61.1093|-149.871|61.1093|-149.871|
-2950|136|O|183620.33|1997-07-06|1-URGENT|Clerk#000000833|0| dolphins around the furiously |61.145|-149.878|61.145|-149.878|
-2951|74|O|125509.17|1996-02-06|2-HIGH|Clerk#000000680|0|gular deposits above the finally regular ideas integrate idly stealthil|61.1191|-149.871|61.1191|-149.871|
-2976|29|F|145768.47|1993-12-10|4-NOT SPECIFIED|Clerk#000000159|0|. furiously ironic asymptotes haggle ruthlessly silently regular r|61.1003|-149.856|61.1003|-149.856|
-2977|73|O|25170.88|1996-08-27|3-MEDIUM|Clerk#000000252|0|quickly special platelets are furio|61.1113|-149.872|61.1113|-149.872|
-2978|44|P|139542.14|1995-05-03|1-URGENT|Clerk#000000135|0|d. even platelets are. ironic dependencies cajole slow, e|61.1084|-149.861|61.1084|-149.861|
-2979|133|O|116789.98|1996-03-23|3-MEDIUM|Clerk#000000820|0|even, ironic foxes sleep along|61.144|-149.878|61.144|-149.878|
-2980|4|O|187514.11|1996-09-14|3-MEDIUM|Clerk#000000661|0|y quick pinto beans wake. slyly re|61.1426|-149.877|61.1426|-149.877|
-2981|49|O|37776.79|1998-07-29|5-LOW|Clerk#000000299|0|hely among the express foxes. blithely stealthy requests cajole boldly. regu|61.1173|-149.861|61.1173|-149.861|
-2982|85|F|55582.94|1995-03-19|2-HIGH|Clerk#000000402|0|lyly. express theodolites affix slyly after the slyly speci|61.1347|-149.914|61.1347|-149.914|
diff --git a/hyracks-examples/hyracks-integration-tests/pom.xml b/hyracks-examples/hyracks-integration-tests/pom.xml
deleted file mode 100644
index b21da8b..0000000
--- a/hyracks-examples/hyracks-integration-tests/pom.xml
+++ /dev/null
@@ -1,102 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-	xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-	<modelVersion>4.0.0</modelVersion>
-	<groupId>edu.uci.ics.hyracks.examples</groupId>
-	<artifactId>hyracks-integration-tests</artifactId>
-	<parent>
-		<groupId>edu.uci.ics.hyracks</groupId>
-		<artifactId>hyracks-examples</artifactId>
-		<version>0.2.2-SNAPSHOT</version>
-	</parent>
-
-	<build>
-		<plugins>
-			<plugin>
-				<groupId>org.apache.maven.plugins</groupId>
-				<artifactId>maven-compiler-plugin</artifactId>
-				<version>2.0.2</version>
-				<configuration>
-					<source>1.6</source>
-					<target>1.6</target>
-				</configuration>
-			</plugin>
-		</plugins>
-	</build>
-	<dependencies>
-		<dependency>
-			<groupId>junit</groupId>
-			<artifactId>junit</artifactId>
-			<version>4.8.1</version>
-			<type>jar</type>
-			<scope>test</scope>
-		</dependency>
-		<dependency>
-			<groupId>edu.uci.ics.hyracks</groupId>
-			<artifactId>hyracks-dataflow-std</artifactId>
-			<version>0.2.2-SNAPSHOT</version>
-			<type>jar</type>
-			<scope>compile</scope>
-		</dependency>
-		<dependency>
-			<groupId>edu.uci.ics.hyracks</groupId>
-			<artifactId>hyracks-control-cc</artifactId>
-			<version>0.2.2-SNAPSHOT</version>
-			<type>jar</type>
-			<scope>compile</scope>
-		</dependency>
-		<dependency>
-			<groupId>edu.uci.ics.hyracks</groupId>
-			<artifactId>hyracks-control-nc</artifactId>
-			<version>0.2.2-SNAPSHOT</version>
-			<type>jar</type>
-			<scope>compile</scope>
-		</dependency>
-		<dependency>
-			<groupId>edu.uci.ics.hyracks</groupId>
-			<artifactId>hyracks-storage-am-btree</artifactId>
-			<version>0.2.2-SNAPSHOT</version>
-			<type>jar</type>
-			<scope>compile</scope>
-		</dependency>
-		<dependency>
-			<groupId>edu.uci.ics.hyracks</groupId>
-			<artifactId>hyracks-storage-am-lsm-btree</artifactId>
-			<version>0.2.2-SNAPSHOT</version>
-			<type>jar</type>
-			<scope>compile</scope>
-		</dependency>
-		<dependency>
-			<groupId>edu.uci.ics.hyracks</groupId>
-			<artifactId>hyracks-storage-am-lsm-rtree</artifactId>
-			<version>0.2.2-SNAPSHOT</version>
-			<type>jar</type>
-			<scope>compile</scope>
-		</dependency>
-		<dependency>
-			<groupId>edu.uci.ics.hyracks</groupId>
-			<artifactId>hyracks-storage-am-lsm-invertedindex</artifactId>
-			<version>0.2.2-SNAPSHOT</version>
-			<type>jar</type>
-			<scope>compile</scope>
-		</dependency>
-		<dependency>
-			<groupId>edu.uci.ics.hyracks</groupId>
-			<artifactId>hyracks-storage-am-rtree</artifactId>
-			<version>0.2.2-SNAPSHOT</version>
-			<type>jar</type>
-			<scope>compile</scope>
-		</dependency>
-		<dependency>
-			<groupId>edu.uci.ics.hyracks</groupId>
-			<artifactId>hyracks-data-std</artifactId>
-			<version>0.2.2-SNAPSHOT</version>
-		</dependency>
-		<dependency>
-			<groupId>edu.uci.ics.hyracks</groupId>
-			<artifactId>hyracks-test-support</artifactId>
-			<version>0.2.2-SNAPSHOT</version>
-			<type>jar</type>
-			<scope>compile</scope>
-		</dependency>
-	</dependencies>
-</project>
diff --git a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/btree/AbstractBTreeOperatorTest.java b/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/btree/AbstractBTreeOperatorTest.java
deleted file mode 100644
index 0af1193..0000000
--- a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/btree/AbstractBTreeOperatorTest.java
+++ /dev/null
@@ -1,327 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.tests.am.btree;
-
-import java.io.DataOutput;
-import java.io.File;
-
-import org.junit.After;
-import org.junit.Before;
-
-import edu.uci.ics.hyracks.api.constraints.PartitionConstraintHelper;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.exceptions.HyracksException;
-import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.api.job.JobSpecification;
-import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
-import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.parsers.IValueParserFactory;
-import edu.uci.ics.hyracks.dataflow.common.data.parsers.UTF8StringParserFactory;
-import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.file.ConstantFileSplitProvider;
-import edu.uci.ics.hyracks.dataflow.std.file.DelimitedDataTupleParserFactory;
-import edu.uci.ics.hyracks.dataflow.std.file.FileScanOperatorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
-import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.dataflow.std.misc.ConstantTupleSourceOperatorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.misc.NullSinkOperatorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.sort.ExternalSortOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeSearchOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexLifecycleManagerProvider;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IndexDropOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexBulkLoadOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexCreateOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexInsertUpdateDeleteOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOperation;
-import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
-import edu.uci.ics.hyracks.storage.common.file.TransientLocalResourceFactoryProvider;
-import edu.uci.ics.hyracks.test.support.TestIndexLifecycleManagerProvider;
-import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
-import edu.uci.ics.hyracks.test.support.TestStorageManagerInterface;
-import edu.uci.ics.hyracks.tests.am.common.ITreeIndexOperatorTestHelper;
-import edu.uci.ics.hyracks.tests.integration.AbstractIntegrationTest;
-
-public abstract class AbstractBTreeOperatorTest extends AbstractIntegrationTest {
-    static {
-        TestStorageManagerComponentHolder.init(8192, 20, 20);
-    }
-
-    protected final IStorageManagerInterface storageManager = new TestStorageManagerInterface();
-    protected final IIndexLifecycleManagerProvider lcManagerProvider = new TestIndexLifecycleManagerProvider();
-    protected IIndexDataflowHelperFactory dataflowHelperFactory;
-
-    // field, type and key declarations for primary index
-    protected final int primaryFieldCount = 6;
-    protected final ITypeTraits[] primaryTypeTraits = new ITypeTraits[primaryFieldCount];
-    protected final int primaryKeyFieldCount = 1;
-    protected final IBinaryComparatorFactory[] primaryComparatorFactories = new IBinaryComparatorFactory[primaryKeyFieldCount];
-    protected final int[] primaryBloomFilterKeyFields = new int[primaryKeyFieldCount];
-
-    protected final RecordDescriptor primaryRecDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-            UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-            UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-            UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE });
-
-    // to be set by subclasses
-    protected String primaryFileName;
-    protected IFileSplitProvider primarySplitProvider;
-
-    // field, type and key declarations for secondary indexes
-    protected final int secondaryFieldCount = 2;
-    protected final ITypeTraits[] secondaryTypeTraits = new ITypeTraits[secondaryFieldCount];
-    protected final int secondaryKeyFieldCount = 2;
-    protected final IBinaryComparatorFactory[] secondaryComparatorFactories = new IBinaryComparatorFactory[secondaryKeyFieldCount];
-    protected final int[] secondaryBloomFilterKeyFields = new int[secondaryKeyFieldCount];
-
-    protected final RecordDescriptor secondaryRecDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-            UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE });
-
-    protected String secondaryFileName;
-    protected IFileSplitProvider secondarySplitProvider;
-
-    protected ITreeIndexOperatorTestHelper testHelper;
-
-    protected ITreeIndexOperatorTestHelper createTestHelper() throws HyracksException {
-        return new BTreeOperatorTestHelper();
-    }
-
-    @Before
-    public void setup() throws Exception {
-        testHelper = createTestHelper();
-        dataflowHelperFactory = createDataFlowHelperFactory();
-        primaryFileName = testHelper.getPrimaryIndexName();
-        primarySplitProvider = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID, new FileReference(
-                new File(primaryFileName))) });
-        secondaryFileName = testHelper.getSecondaryIndexName();
-        secondarySplitProvider = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
-                new FileReference(new File(secondaryFileName))) });
-
-        // field, type and key declarations for primary index
-        primaryTypeTraits[0] = UTF8StringPointable.TYPE_TRAITS;
-        primaryTypeTraits[1] = UTF8StringPointable.TYPE_TRAITS;
-        primaryTypeTraits[2] = UTF8StringPointable.TYPE_TRAITS;
-        primaryTypeTraits[3] = UTF8StringPointable.TYPE_TRAITS;
-        primaryTypeTraits[4] = UTF8StringPointable.TYPE_TRAITS;
-        primaryTypeTraits[5] = UTF8StringPointable.TYPE_TRAITS;
-        primaryComparatorFactories[0] = PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY);
-        primaryBloomFilterKeyFields[0] = 0;
-
-        // field, type and key declarations for secondary indexes
-        secondaryTypeTraits[0] = UTF8StringPointable.TYPE_TRAITS;
-        secondaryTypeTraits[1] = UTF8StringPointable.TYPE_TRAITS;
-        secondaryComparatorFactories[0] = PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY);
-        secondaryComparatorFactories[1] = PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY);
-        secondaryBloomFilterKeyFields[0] = 0;
-        secondaryBloomFilterKeyFields[1] = 1;
-    }
-
-    protected abstract IIndexDataflowHelperFactory createDataFlowHelperFactory();
-
-    public void createPrimaryIndex() throws Exception {
-        JobSpecification spec = new JobSpecification();
-        TransientLocalResourceFactoryProvider localResourceFactoryProvider = new TransientLocalResourceFactoryProvider();
-        TreeIndexCreateOperatorDescriptor primaryCreateOp = new TreeIndexCreateOperatorDescriptor(spec, storageManager,
-                lcManagerProvider, primarySplitProvider, primaryTypeTraits, primaryComparatorFactories,
-                primaryBloomFilterKeyFields, dataflowHelperFactory, localResourceFactoryProvider,
-                NoOpOperationCallbackFactory.INSTANCE);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryCreateOp, NC1_ID);
-        spec.addRoot(primaryCreateOp);
-        runTest(spec);
-    }
-
-    protected void loadPrimaryIndex() throws Exception {
-        JobSpecification spec = new JobSpecification();
-
-        FileSplit[] ordersSplits = new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(
-                "data/tpch0.001/orders-part1.tbl"))) };
-        IFileSplitProvider ordersSplitProvider = new ConstantFileSplitProvider(ordersSplits);
-        RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE });
-
-        FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitProvider,
-                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'), ordersDesc);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID);
-
-        ExternalSortOperatorDescriptor sorter = new ExternalSortOperatorDescriptor(spec, 1000, new int[] { 0 },
-                new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
-                ordersDesc);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, sorter, NC1_ID);
-
-        int[] fieldPermutation = { 0, 1, 2, 4, 5, 7 };
-        TreeIndexBulkLoadOperatorDescriptor primaryBtreeBulkLoad = new TreeIndexBulkLoadOperatorDescriptor(spec,
-                storageManager, lcManagerProvider, primarySplitProvider, primaryTypeTraits, primaryComparatorFactories,
-                primaryBloomFilterKeyFields, fieldPermutation, 0.7f, true, 1000L, dataflowHelperFactory,
-                NoOpOperationCallbackFactory.INSTANCE);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryBtreeBulkLoad, NC1_ID);
-
-        spec.connect(new OneToOneConnectorDescriptor(spec), ordScanner, 0, sorter, 0);
-
-        spec.connect(new OneToOneConnectorDescriptor(spec), sorter, 0, primaryBtreeBulkLoad, 0);
-
-        spec.addRoot(primaryBtreeBulkLoad);
-        runTest(spec);
-    }
-
-    public void createSecondaryIndex() throws Exception {
-        JobSpecification spec = new JobSpecification();
-        TransientLocalResourceFactoryProvider localResourceFactoryProvider = new TransientLocalResourceFactoryProvider();
-        TreeIndexCreateOperatorDescriptor secondaryCreateOp = new TreeIndexCreateOperatorDescriptor(spec,
-                storageManager, lcManagerProvider, secondarySplitProvider, secondaryTypeTraits,
-                secondaryComparatorFactories, secondaryBloomFilterKeyFields, dataflowHelperFactory,
-                localResourceFactoryProvider, NoOpOperationCallbackFactory.INSTANCE);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, secondaryCreateOp, NC1_ID);
-        spec.addRoot(secondaryCreateOp);
-        runTest(spec);
-    }
-
-    protected void loadSecondaryIndex() throws Exception {
-        JobSpecification spec = new JobSpecification();
-
-        // build dummy tuple containing nothing
-        ArrayTupleBuilder tb = new ArrayTupleBuilder(primaryKeyFieldCount * 2);
-        DataOutput dos = tb.getDataOutput();
-
-        tb.reset();
-        UTF8StringSerializerDeserializer.INSTANCE.serialize("0", dos);
-        tb.addFieldEndOffset();
-
-        ISerializerDeserializer[] keyRecDescSers = { UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE };
-        RecordDescriptor keyRecDesc = new RecordDescriptor(keyRecDescSers);
-
-        ConstantTupleSourceOperatorDescriptor keyProviderOp = new ConstantTupleSourceOperatorDescriptor(spec,
-                keyRecDesc, tb.getFieldEndOffsets(), tb.getByteArray(), tb.getSize());
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, keyProviderOp, NC1_ID);
-
-        int[] lowKeyFields = null; // - infinity
-        int[] highKeyFields = null; // + infinity
-
-        // scan primary index
-        BTreeSearchOperatorDescriptor primaryBtreeSearchOp = new BTreeSearchOperatorDescriptor(spec, primaryRecDesc,
-                storageManager, lcManagerProvider, primarySplitProvider, primaryTypeTraits, primaryComparatorFactories,
-                primaryBloomFilterKeyFields, lowKeyFields, highKeyFields, true, true, dataflowHelperFactory, false,
-                NoOpOperationCallbackFactory.INSTANCE);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryBtreeSearchOp, NC1_ID);
-
-        // sort based on secondary keys
-        ExternalSortOperatorDescriptor sorter = new ExternalSortOperatorDescriptor(spec, 1000, new int[] { 3, 0 },
-                new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY),
-                        PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) }, primaryRecDesc);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, sorter, NC1_ID);
-
-        // load secondary index
-        int[] fieldPermutation = { 3, 0 };
-        TreeIndexBulkLoadOperatorDescriptor secondaryBtreeBulkLoad = new TreeIndexBulkLoadOperatorDescriptor(spec,
-                storageManager, lcManagerProvider, secondarySplitProvider, secondaryTypeTraits,
-                secondaryComparatorFactories, secondaryBloomFilterKeyFields, fieldPermutation, 0.7f, true, 1000L,
-                dataflowHelperFactory, NoOpOperationCallbackFactory.INSTANCE);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, secondaryBtreeBulkLoad, NC1_ID);
-
-        spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0, primaryBtreeSearchOp, 0);
-        spec.connect(new OneToOneConnectorDescriptor(spec), primaryBtreeSearchOp, 0, sorter, 0);
-        spec.connect(new OneToOneConnectorDescriptor(spec), sorter, 0, secondaryBtreeBulkLoad, 0);
-
-        spec.addRoot(secondaryBtreeBulkLoad);
-        runTest(spec);
-    }
-
-    protected void insertPipeline(boolean useUpsert) throws Exception {
-        IndexOperation pipelineOperation = useUpsert ? IndexOperation.UPSERT : IndexOperation.INSERT;
-        JobSpecification spec = new JobSpecification();
-
-        FileSplit[] ordersSplits = new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(
-                "data/tpch0.001/orders-part2.tbl"))) };
-        IFileSplitProvider ordersSplitProvider = new ConstantFileSplitProvider(ordersSplits);
-        RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE });
-
-        FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitProvider,
-                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'), ordersDesc);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID);
-
-        // insert into primary index
-        int[] primaryFieldPermutation = { 0, 1, 2, 4, 5, 7 };
-        TreeIndexInsertUpdateDeleteOperatorDescriptor primaryBtreeInsertOp = new TreeIndexInsertUpdateDeleteOperatorDescriptor(
-                spec, ordersDesc, storageManager, lcManagerProvider, primarySplitProvider, primaryTypeTraits,
-                primaryComparatorFactories, primaryBloomFilterKeyFields, primaryFieldPermutation, pipelineOperation,
-                dataflowHelperFactory, null, NoOpOperationCallbackFactory.INSTANCE);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryBtreeInsertOp, NC1_ID);
-
-        // first secondary index
-        int[] fieldPermutationB = { 4, 0 };
-        TreeIndexInsertUpdateDeleteOperatorDescriptor secondaryInsertOp = new TreeIndexInsertUpdateDeleteOperatorDescriptor(
-                spec, ordersDesc, storageManager, lcManagerProvider, secondarySplitProvider, secondaryTypeTraits,
-                secondaryComparatorFactories, secondaryBloomFilterKeyFields, fieldPermutationB, pipelineOperation,
-                dataflowHelperFactory, null, NoOpOperationCallbackFactory.INSTANCE);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, secondaryInsertOp, NC1_ID);
-
-        NullSinkOperatorDescriptor nullSink = new NullSinkOperatorDescriptor(spec);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, nullSink, NC1_ID);
-
-        spec.connect(new OneToOneConnectorDescriptor(spec), ordScanner, 0, primaryBtreeInsertOp, 0);
-
-        spec.connect(new OneToOneConnectorDescriptor(spec), primaryBtreeInsertOp, 0, secondaryInsertOp, 0);
-
-        spec.connect(new OneToOneConnectorDescriptor(spec), secondaryInsertOp, 0, nullSink, 0);
-
-        spec.addRoot(nullSink);
-        runTest(spec);
-    }
-
-    protected void destroyPrimaryIndex() throws Exception {
-        JobSpecification spec = new JobSpecification();
-        IndexDropOperatorDescriptor primaryDropOp = new IndexDropOperatorDescriptor(spec, storageManager,
-                lcManagerProvider, primarySplitProvider, dataflowHelperFactory);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryDropOp, NC1_ID);
-        spec.addRoot(primaryDropOp);
-        runTest(spec);
-    }
-
-    protected void destroySecondaryIndex() throws Exception {
-        JobSpecification spec = new JobSpecification();
-        IndexDropOperatorDescriptor secondaryDropOp = new IndexDropOperatorDescriptor(spec, storageManager,
-                lcManagerProvider, secondarySplitProvider, dataflowHelperFactory);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, secondaryDropOp, NC1_ID);
-        spec.addRoot(secondaryDropOp);
-        runTest(spec);
-    }
-
-    @After
-    public abstract void cleanup() throws Exception;
-}
diff --git a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/btree/BTreeOperatorTestHelper.java b/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/btree/BTreeOperatorTestHelper.java
deleted file mode 100644
index 337bfda..0000000
--- a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/btree/BTreeOperatorTestHelper.java
+++ /dev/null
@@ -1,28 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.tests.am.btree;
-
-import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
-import edu.uci.ics.hyracks.tests.am.common.TreeOperatorTestHelper;
-
-public class BTreeOperatorTestHelper extends TreeOperatorTestHelper {
-
-    public IIndexDataflowHelperFactory createDataFlowHelperFactory() {
-        return new BTreeDataflowHelperFactory();
-    }
-
-}
diff --git a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/btree/BTreePrimaryIndexScanOperatorTest.java b/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/btree/BTreePrimaryIndexScanOperatorTest.java
deleted file mode 100644
index 7e1c42a..0000000
--- a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/btree/BTreePrimaryIndexScanOperatorTest.java
+++ /dev/null
@@ -1,99 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.tests.am.btree;
-
-import java.io.DataOutput;
-
-import org.junit.Before;
-import org.junit.Test;
-
-import edu.uci.ics.hyracks.api.constraints.PartitionConstraintHelper;
-import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.job.JobSpecification;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.file.ConstantFileSplitProvider;
-import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
-import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.dataflow.std.file.PlainFileWriterOperatorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.misc.ConstantTupleSourceOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeSearchOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
-
-public class BTreePrimaryIndexScanOperatorTest extends AbstractBTreeOperatorTest {
-
-    @Before
-    public void setup() throws Exception {
-        super.setup();
-        createPrimaryIndex();
-        loadPrimaryIndex();
-    }
-
-    @Test
-    public void scanPrimaryIndexTest() throws Exception {
-        JobSpecification spec = new JobSpecification();
-
-        // build dummy tuple containing nothing
-        ArrayTupleBuilder tb = new ArrayTupleBuilder(primaryKeyFieldCount * 2);
-        DataOutput dos = tb.getDataOutput();
-
-        tb.reset();
-        UTF8StringSerializerDeserializer.INSTANCE.serialize("0", dos);
-        tb.addFieldEndOffset();
-
-        ISerializerDeserializer[] keyRecDescSers = { UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE };
-        RecordDescriptor keyRecDesc = new RecordDescriptor(keyRecDescSers);
-
-        ConstantTupleSourceOperatorDescriptor keyProviderOp = new ConstantTupleSourceOperatorDescriptor(spec,
-                keyRecDesc, tb.getFieldEndOffsets(), tb.getByteArray(), tb.getSize());
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, keyProviderOp, NC1_ID);
-
-        int[] lowKeyFields = null; // - infinity
-        int[] highKeyFields = null; // + infinity
-
-        BTreeSearchOperatorDescriptor primaryBtreeSearchOp = new BTreeSearchOperatorDescriptor(spec, primaryRecDesc,
-                storageManager, lcManagerProvider, primarySplitProvider, primaryTypeTraits, primaryComparatorFactories,
-                primaryBloomFilterKeyFields, lowKeyFields, highKeyFields, true, true, dataflowHelperFactory, false,
-                NoOpOperationCallbackFactory.INSTANCE);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryBtreeSearchOp, NC1_ID);
-
-        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
-                createTempFile().getAbsolutePath()) });
-        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
-
-        spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0, primaryBtreeSearchOp, 0);
-        spec.connect(new OneToOneConnectorDescriptor(spec), primaryBtreeSearchOp, 0, printer, 0);
-
-        spec.addRoot(printer);
-        runTest(spec);
-    }
-
-    @Override
-    protected IIndexDataflowHelperFactory createDataFlowHelperFactory() {
-        return ((BTreeOperatorTestHelper) testHelper).createDataFlowHelperFactory();
-    }
-
-    @Override
-    public void cleanup() throws Exception {
-        destroyPrimaryIndex();
-    }
-}
diff --git a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/btree/BTreePrimaryIndexSearchOperatorTest.java b/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/btree/BTreePrimaryIndexSearchOperatorTest.java
deleted file mode 100644
index 64bc657..0000000
--- a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/btree/BTreePrimaryIndexSearchOperatorTest.java
+++ /dev/null
@@ -1,104 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.tests.am.btree;
-
-import java.io.DataOutput;
-
-import org.junit.Before;
-import org.junit.Test;
-
-import edu.uci.ics.hyracks.api.constraints.PartitionConstraintHelper;
-import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.job.JobSpecification;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.file.ConstantFileSplitProvider;
-import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
-import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.dataflow.std.file.PlainFileWriterOperatorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.misc.ConstantTupleSourceOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeSearchOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
-
-public class BTreePrimaryIndexSearchOperatorTest extends AbstractBTreeOperatorTest {
-
-    @Before
-    public void setup() throws Exception {
-        super.setup();
-        createPrimaryIndex();
-        loadPrimaryIndex();
-    }
-
-    @Test
-    public void searchPrimaryIndexTest() throws Exception {
-        JobSpecification spec = new JobSpecification();
-
-        // build tuple containing low and high search key
-        // high key and low key
-        ArrayTupleBuilder tb = new ArrayTupleBuilder(primaryKeyFieldCount * 2);
-        DataOutput dos = tb.getDataOutput();
-
-        tb.reset();
-        // low key
-        UTF8StringSerializerDeserializer.INSTANCE.serialize("100", dos);
-        tb.addFieldEndOffset();
-        // high key
-        UTF8StringSerializerDeserializer.INSTANCE.serialize("200", dos);
-        tb.addFieldEndOffset();
-
-        ISerializerDeserializer[] keyRecDescSers = { UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE };
-        RecordDescriptor keyRecDesc = new RecordDescriptor(keyRecDescSers);
-
-        ConstantTupleSourceOperatorDescriptor keyProviderOp = new ConstantTupleSourceOperatorDescriptor(spec,
-                keyRecDesc, tb.getFieldEndOffsets(), tb.getByteArray(), tb.getSize());
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, keyProviderOp, NC1_ID);
-
-        int[] lowKeyFields = { 0 };
-        int[] highKeyFields = { 1 };
-
-        BTreeSearchOperatorDescriptor primaryBtreeSearchOp = new BTreeSearchOperatorDescriptor(spec, primaryRecDesc,
-                storageManager, lcManagerProvider, primarySplitProvider, primaryTypeTraits, primaryComparatorFactories,
-                primaryBloomFilterKeyFields, lowKeyFields, highKeyFields, true, true, dataflowHelperFactory, false,
-                NoOpOperationCallbackFactory.INSTANCE);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryBtreeSearchOp, NC1_ID);
-
-        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
-                createTempFile().getAbsolutePath()) });
-        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
-
-        spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0, primaryBtreeSearchOp, 0);
-        spec.connect(new OneToOneConnectorDescriptor(spec), primaryBtreeSearchOp, 0, printer, 0);
-
-        spec.addRoot(printer);
-        runTest(spec);
-    }
-
-    @Override
-    protected IIndexDataflowHelperFactory createDataFlowHelperFactory() {
-        return ((BTreeOperatorTestHelper) testHelper).createDataFlowHelperFactory();
-    }
-
-    @Override
-    public void cleanup() throws Exception {
-        destroyPrimaryIndex();
-    }
-}
diff --git a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/btree/BTreePrimaryIndexStatsOperatorTest.java b/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/btree/BTreePrimaryIndexStatsOperatorTest.java
deleted file mode 100644
index c8d95ce..0000000
--- a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/btree/BTreePrimaryIndexStatsOperatorTest.java
+++ /dev/null
@@ -1,69 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.tests.am.btree;
-
-import org.junit.Before;
-import org.junit.Test;
-
-import edu.uci.ics.hyracks.api.constraints.PartitionConstraintHelper;
-import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
-import edu.uci.ics.hyracks.api.job.JobSpecification;
-import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.file.ConstantFileSplitProvider;
-import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
-import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.dataflow.std.file.PlainFileWriterOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexStatsOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
-
-public class BTreePrimaryIndexStatsOperatorTest extends AbstractBTreeOperatorTest {
-
-    @Before
-    public void setup() throws Exception {
-        super.setup();
-        createPrimaryIndex();
-        loadPrimaryIndex();
-    }
-
-    @Test
-    public void showPrimaryIndexStats() throws Exception {
-        JobSpecification spec = new JobSpecification();
-
-        TreeIndexStatsOperatorDescriptor primaryStatsOp = new TreeIndexStatsOperatorDescriptor(spec, storageManager,
-                lcManagerProvider, primarySplitProvider, primaryTypeTraits, primaryComparatorFactories,
-                primaryBloomFilterKeyFields, dataflowHelperFactory, NoOpOperationCallbackFactory.INSTANCE);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryStatsOp, NC1_ID);
-        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
-                createTempFile().getAbsolutePath()) });
-        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
-
-        spec.connect(new OneToOneConnectorDescriptor(spec), primaryStatsOp, 0, printer, 0);
-        spec.addRoot(printer);
-        runTest(spec);
-    }
-
-    @Override
-    protected IIndexDataflowHelperFactory createDataFlowHelperFactory() {
-        return ((BTreeOperatorTestHelper) testHelper).createDataFlowHelperFactory();
-    }
-
-    @Override
-    public void cleanup() throws Exception {
-        destroyPrimaryIndex();
-    }
-}
diff --git a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/btree/BTreeSecondaryIndexInsertOperatorTest.java b/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/btree/BTreeSecondaryIndexInsertOperatorTest.java
deleted file mode 100644
index 5a600a6..0000000
--- a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/btree/BTreeSecondaryIndexInsertOperatorTest.java
+++ /dev/null
@@ -1,122 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.tests.am.btree;
-
-import java.io.DataOutput;
-
-import org.junit.Before;
-import org.junit.Test;
-
-import edu.uci.ics.hyracks.api.constraints.PartitionConstraintHelper;
-import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.job.JobSpecification;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.file.ConstantFileSplitProvider;
-import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
-import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.dataflow.std.file.PlainFileWriterOperatorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.misc.ConstantTupleSourceOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeSearchOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
-
-public class BTreeSecondaryIndexInsertOperatorTest extends AbstractBTreeOperatorTest {
-
-    @Before
-    public void setup() throws Exception {
-        super.setup();
-        createPrimaryIndex();
-        loadPrimaryIndex();
-        createSecondaryIndex();
-        loadSecondaryIndex();
-        insertPipeline(false);
-    }
-
-    @Test
-    public void searchUpdatedSecondaryIndexTest() throws Exception {
-        JobSpecification spec = new JobSpecification();
-
-        // build tuple containing search keys (only use the first key as search
-        // key)
-        ArrayTupleBuilder tb = new ArrayTupleBuilder(secondaryKeyFieldCount);
-        DataOutput dos = tb.getDataOutput();
-
-        tb.reset();
-        // low key
-        UTF8StringSerializerDeserializer.INSTANCE.serialize("1998-07-21", dos);
-        tb.addFieldEndOffset();
-        // high key
-        UTF8StringSerializerDeserializer.INSTANCE.serialize("2000-10-18", dos);
-        tb.addFieldEndOffset();
-
-        ISerializerDeserializer[] keyRecDescSers = { UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE };
-        RecordDescriptor keyRecDesc = new RecordDescriptor(keyRecDescSers);
-
-        ConstantTupleSourceOperatorDescriptor keyProviderOp = new ConstantTupleSourceOperatorDescriptor(spec,
-                keyRecDesc, tb.getFieldEndOffsets(), tb.getByteArray(), tb.getSize());
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, keyProviderOp, NC1_ID);
-
-        int[] secondaryLowKeyFields = { 0 };
-        int[] secondaryHighKeyFields = { 1 };
-
-        // search secondary index
-        BTreeSearchOperatorDescriptor secondaryBtreeSearchOp = new BTreeSearchOperatorDescriptor(spec,
-                secondaryRecDesc, storageManager, lcManagerProvider, secondarySplitProvider, secondaryTypeTraits,
-                secondaryComparatorFactories, secondaryBloomFilterKeyFields, secondaryLowKeyFields,
-                secondaryHighKeyFields, true, true, dataflowHelperFactory, false, NoOpOperationCallbackFactory.INSTANCE);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, secondaryBtreeSearchOp, NC1_ID);
-
-        // second field from the tuples coming from secondary index
-        int[] primaryLowKeyFields = { 1 };
-        // second field from the tuples coming from secondary index
-        int[] primaryHighKeyFields = { 1 };
-
-        // search primary index
-        BTreeSearchOperatorDescriptor primaryBtreeSearchOp = new BTreeSearchOperatorDescriptor(spec, primaryRecDesc,
-                storageManager, lcManagerProvider, primarySplitProvider, primaryTypeTraits, primaryComparatorFactories,
-                primaryBloomFilterKeyFields, primaryLowKeyFields, primaryHighKeyFields, true, true,
-                dataflowHelperFactory, false, NoOpOperationCallbackFactory.INSTANCE);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryBtreeSearchOp, NC1_ID);
-
-        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
-                createTempFile().getAbsolutePath()) });
-        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
-
-        spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0, secondaryBtreeSearchOp, 0);
-        spec.connect(new OneToOneConnectorDescriptor(spec), secondaryBtreeSearchOp, 0, primaryBtreeSearchOp, 0);
-        spec.connect(new OneToOneConnectorDescriptor(spec), primaryBtreeSearchOp, 0, printer, 0);
-
-        spec.addRoot(printer);
-        runTest(spec);
-    }
-
-    @Override
-    protected IIndexDataflowHelperFactory createDataFlowHelperFactory() {
-        return ((BTreeOperatorTestHelper) testHelper).createDataFlowHelperFactory();
-    }
-
-    @Override
-    public void cleanup() throws Exception {
-        destroyPrimaryIndex();
-        destroySecondaryIndex();
-    }
-}
diff --git a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/btree/BTreeSecondaryIndexSearchOperatorTest.java b/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/btree/BTreeSecondaryIndexSearchOperatorTest.java
deleted file mode 100644
index e3005cf..0000000
--- a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/btree/BTreeSecondaryIndexSearchOperatorTest.java
+++ /dev/null
@@ -1,121 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.tests.am.btree;
-
-import java.io.DataOutput;
-
-import org.junit.Before;
-import org.junit.Test;
-
-import edu.uci.ics.hyracks.api.constraints.PartitionConstraintHelper;
-import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.job.JobSpecification;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.file.ConstantFileSplitProvider;
-import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
-import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.dataflow.std.file.PlainFileWriterOperatorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.misc.ConstantTupleSourceOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeSearchOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
-
-public class BTreeSecondaryIndexSearchOperatorTest extends AbstractBTreeOperatorTest {
-
-    @Before
-    public void setup() throws Exception {
-        super.setup();
-        createPrimaryIndex();
-        loadPrimaryIndex();
-        createSecondaryIndex();
-        loadSecondaryIndex();
-    }
-
-    @Test
-    public void searchSecondaryIndexTest() throws Exception {
-        JobSpecification spec = new JobSpecification();
-
-        // build tuple containing search keys (only use the first key as search
-        // key)
-        ArrayTupleBuilder tb = new ArrayTupleBuilder(secondaryKeyFieldCount);
-        DataOutput dos = tb.getDataOutput();
-
-        tb.reset();
-        // low key
-        UTF8StringSerializerDeserializer.INSTANCE.serialize("1998-07-21", dos);
-        tb.addFieldEndOffset();
-        // high key
-        UTF8StringSerializerDeserializer.INSTANCE.serialize("2000-10-18", dos);
-        tb.addFieldEndOffset();
-
-        ISerializerDeserializer[] keyRecDescSers = { UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE };
-        RecordDescriptor keyRecDesc = new RecordDescriptor(keyRecDescSers);
-
-        ConstantTupleSourceOperatorDescriptor keyProviderOp = new ConstantTupleSourceOperatorDescriptor(spec,
-                keyRecDesc, tb.getFieldEndOffsets(), tb.getByteArray(), tb.getSize());
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, keyProviderOp, NC1_ID);
-
-        int[] secondaryLowKeyFields = { 0 };
-        int[] secondaryHighKeyFields = { 1 };
-
-        // search secondary index
-        BTreeSearchOperatorDescriptor secondaryBtreeSearchOp = new BTreeSearchOperatorDescriptor(spec,
-                secondaryRecDesc, storageManager, lcManagerProvider, secondarySplitProvider, secondaryTypeTraits,
-                secondaryComparatorFactories, secondaryBloomFilterKeyFields, secondaryLowKeyFields,
-                secondaryHighKeyFields, true, true, dataflowHelperFactory, false, NoOpOperationCallbackFactory.INSTANCE);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, secondaryBtreeSearchOp, NC1_ID);
-
-        int[] primaryLowKeyFields = { 1 }; // second field from the tuples
-        // coming from secondary index
-        int[] primaryHighKeyFields = { 1 }; // second field from the tuples
-        // coming from secondary index
-
-        // search primary index
-        BTreeSearchOperatorDescriptor primaryBtreeSearchOp = new BTreeSearchOperatorDescriptor(spec, primaryRecDesc,
-                storageManager, lcManagerProvider, primarySplitProvider, primaryTypeTraits, primaryComparatorFactories,
-                primaryBloomFilterKeyFields, primaryLowKeyFields, primaryHighKeyFields, true, true,
-                dataflowHelperFactory, false, NoOpOperationCallbackFactory.INSTANCE);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryBtreeSearchOp, NC1_ID);
-
-        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
-                createTempFile().getAbsolutePath()) });
-        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
-
-        spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0, secondaryBtreeSearchOp, 0);
-        spec.connect(new OneToOneConnectorDescriptor(spec), secondaryBtreeSearchOp, 0, primaryBtreeSearchOp, 0);
-        spec.connect(new OneToOneConnectorDescriptor(spec), primaryBtreeSearchOp, 0, printer, 0);
-
-        spec.addRoot(printer);
-        runTest(spec);
-    }
-
-    @Override
-    protected IIndexDataflowHelperFactory createDataFlowHelperFactory() {
-        return ((BTreeOperatorTestHelper) testHelper).createDataFlowHelperFactory();
-    }
-
-    @Override
-    public void cleanup() throws Exception {
-        destroyPrimaryIndex();
-        destroySecondaryIndex();
-    }
-}
diff --git a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/btree/BTreeSecondaryIndexUpsertOperatorTest.java b/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/btree/BTreeSecondaryIndexUpsertOperatorTest.java
deleted file mode 100644
index 758e926..0000000
--- a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/btree/BTreeSecondaryIndexUpsertOperatorTest.java
+++ /dev/null
@@ -1,107 +0,0 @@
-package edu.uci.ics.hyracks.tests.am.btree;
-
-import java.io.DataOutput;
-
-import org.junit.Before;
-import org.junit.Test;
-
-import edu.uci.ics.hyracks.api.constraints.PartitionConstraintHelper;
-import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.job.JobSpecification;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.file.ConstantFileSplitProvider;
-import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
-import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.dataflow.std.file.PlainFileWriterOperatorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.misc.ConstantTupleSourceOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeSearchOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
-
-public class BTreeSecondaryIndexUpsertOperatorTest extends AbstractBTreeOperatorTest {
-
-    @Before
-    public void setup() throws Exception {
-        super.setup();
-        createPrimaryIndex();
-        loadPrimaryIndex();
-        createSecondaryIndex();
-        loadSecondaryIndex();
-        insertPipeline(true);
-    }
-
-    @Test
-    public void searchUpdatedSecondaryIndexTest() throws Exception {
-        JobSpecification spec = new JobSpecification();
-
-        // build tuple containing search keys (only use the first key as search
-        // key)
-        ArrayTupleBuilder tb = new ArrayTupleBuilder(secondaryKeyFieldCount);
-        DataOutput dos = tb.getDataOutput();
-
-        tb.reset();
-        // low key
-        UTF8StringSerializerDeserializer.INSTANCE.serialize("1998-07-21", dos);
-        tb.addFieldEndOffset();
-        // high key
-        UTF8StringSerializerDeserializer.INSTANCE.serialize("2000-10-18", dos);
-        tb.addFieldEndOffset();
-
-        ISerializerDeserializer[] keyRecDescSers = { UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE };
-        RecordDescriptor keyRecDesc = new RecordDescriptor(keyRecDescSers);
-
-        ConstantTupleSourceOperatorDescriptor keyProviderOp = new ConstantTupleSourceOperatorDescriptor(spec,
-                keyRecDesc, tb.getFieldEndOffsets(), tb.getByteArray(), tb.getSize());
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, keyProviderOp, NC1_ID);
-
-        int[] secondaryLowKeyFields = { 0 };
-        int[] secondaryHighKeyFields = { 1 };
-
-        // search secondary index
-        BTreeSearchOperatorDescriptor secondaryBtreeSearchOp = new BTreeSearchOperatorDescriptor(spec,
-                secondaryRecDesc, storageManager, lcManagerProvider, secondarySplitProvider, secondaryTypeTraits,
-                secondaryComparatorFactories, secondaryBloomFilterKeyFields, secondaryLowKeyFields,
-                secondaryHighKeyFields, true, true, dataflowHelperFactory, false, NoOpOperationCallbackFactory.INSTANCE);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, secondaryBtreeSearchOp, NC1_ID);
-
-        // second field from the tuples coming from secondary index
-        int[] primaryLowKeyFields = { 1 };
-        // second field from the tuples coming from secondary index
-        int[] primaryHighKeyFields = { 1 };
-
-        // search primary index
-        BTreeSearchOperatorDescriptor primaryBtreeSearchOp = new BTreeSearchOperatorDescriptor(spec, primaryRecDesc,
-                storageManager, lcManagerProvider, primarySplitProvider, primaryTypeTraits, primaryComparatorFactories,
-                primaryBloomFilterKeyFields, primaryLowKeyFields, primaryHighKeyFields, true, true,
-                dataflowHelperFactory, false, NoOpOperationCallbackFactory.INSTANCE);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryBtreeSearchOp, NC1_ID);
-
-        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
-                createTempFile().getAbsolutePath()) });
-        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
-
-        spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0, secondaryBtreeSearchOp, 0);
-        spec.connect(new OneToOneConnectorDescriptor(spec), secondaryBtreeSearchOp, 0, primaryBtreeSearchOp, 0);
-        spec.connect(new OneToOneConnectorDescriptor(spec), primaryBtreeSearchOp, 0, printer, 0);
-
-        spec.addRoot(printer);
-        runTest(spec);
-    }
-
-    @Override
-    protected IIndexDataflowHelperFactory createDataFlowHelperFactory() {
-        return ((BTreeOperatorTestHelper) testHelper).createDataFlowHelperFactory();
-    }
-
-    @Override
-    public void cleanup() throws Exception {
-        destroyPrimaryIndex();
-        destroySecondaryIndex();
-    }
-}
diff --git a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/common/ITreeIndexOperatorTestHelper.java b/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/common/ITreeIndexOperatorTestHelper.java
deleted file mode 100644
index f0c4519..0000000
--- a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/common/ITreeIndexOperatorTestHelper.java
+++ /dev/null
@@ -1,24 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.tests.am.common;
-
-public interface ITreeIndexOperatorTestHelper {
-    public String getPrimaryIndexName();
-
-    public String getSecondaryIndexName();
-
-    public void cleanup(String primaryFileName, String secondaryFileName);
-}
diff --git a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/common/LSMTreeOperatorTestHelper.java b/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/common/LSMTreeOperatorTestHelper.java
deleted file mode 100644
index 80b5285..0000000
--- a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/common/LSMTreeOperatorTestHelper.java
+++ /dev/null
@@ -1,69 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.tests.am.common;
-
-import java.io.File;
-import java.io.FilenameFilter;
-import java.util.Date;
-
-import edu.uci.ics.hyracks.api.io.IODeviceHandle;
-import edu.uci.ics.hyracks.control.nc.io.IOManager;
-
-public class LSMTreeOperatorTestHelper extends TreeOperatorTestHelper {
-
-    protected final IOManager ioManager;
-
-    public LSMTreeOperatorTestHelper(IOManager ioManager) {
-        this.ioManager = ioManager;
-    }
-
-    public String getPrimaryIndexName() {
-        return "primary" + simpleDateFormat.format(new Date());
-    }
-
-    public String getSecondaryIndexName() {
-        return "secondary" + simpleDateFormat.format(new Date());
-    }
-
-    @Override
-    public void cleanup(String primaryFileName, String secondaryFileName) {
-        for (IODeviceHandle dev : ioManager.getIODevices()) {
-            File primaryDir = new File(dev.getPath(), primaryFileName);
-            cleanupDir(primaryDir);
-            File secondaryDir = new File(dev.getPath(), secondaryFileName);
-            cleanupDir(secondaryDir);
-        }
-    }
-
-    private void cleanupDir(File dir) {
-        if (!dir.exists()) {
-            return;
-        }
-        FilenameFilter filter = new FilenameFilter() {
-            public boolean accept(File dir, String name) {
-                return !name.startsWith(".");
-            }
-        };
-        String[] files = dir.list(filter);
-        if (files != null) {
-            for (String fileName : files) {
-                File file = new File(dir.getPath() + File.separator + fileName);
-                file.delete();
-            }
-        }
-        dir.delete();
-    }
-}
diff --git a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/common/TreeOperatorTestHelper.java b/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/common/TreeOperatorTestHelper.java
deleted file mode 100644
index 935724b..0000000
--- a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/common/TreeOperatorTestHelper.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.tests.am.common;
-
-import java.io.File;
-import java.text.SimpleDateFormat;
-import java.util.Date;
-
-public class TreeOperatorTestHelper implements ITreeIndexOperatorTestHelper {
-
-    protected final SimpleDateFormat simpleDateFormat = new SimpleDateFormat("ddMMyy-hhmmssSS");
-    protected final String sep = System.getProperty("file.separator");
-    protected static int DEFAULT_MEM_PAGE_SIZE = 32768;
-    protected static int DEFAULT_MEM_NUM_PAGES = 1000;
-
-    public String getPrimaryIndexName() {
-        return System.getProperty("java.io.tmpdir") + sep + "primary" + simpleDateFormat.format(new Date());
-    }
-
-    public String getSecondaryIndexName() {
-        return System.getProperty("java.io.tmpdir") + sep + "secondary" + simpleDateFormat.format(new Date());
-    }
-
-    @Override
-    public void cleanup(String primaryFileName, String secondaryFileName) {
-        File primary = new File(primaryFileName);
-        if (primary.exists()) {
-            primary.deleteOnExit();
-        }
-        File secondary = new File(secondaryFileName);
-        if (secondary.exists()) {
-            secondary.deleteOnExit();
-        }
-    }
-}
diff --git a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/invertedindex/AbstractfWordInvertedIndexTest.java b/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/invertedindex/AbstractfWordInvertedIndexTest.java
deleted file mode 100644
index 808afac..0000000
--- a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/invertedindex/AbstractfWordInvertedIndexTest.java
+++ /dev/null
@@ -1,346 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.tests.am.invertedindex;
-
-import java.io.DataOutput;
-import java.io.File;
-import java.text.SimpleDateFormat;
-import java.util.Date;
-
-import org.junit.Before;
-import org.junit.Test;
-
-import edu.uci.ics.hyracks.api.constraints.PartitionConstraintHelper;
-import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.api.job.JobSpecification;
-import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
-import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
-import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.parsers.IValueParserFactory;
-import edu.uci.ics.hyracks.dataflow.common.data.parsers.IntegerParserFactory;
-import edu.uci.ics.hyracks.dataflow.common.data.parsers.UTF8StringParserFactory;
-import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.file.ConstantFileSplitProvider;
-import edu.uci.ics.hyracks.dataflow.std.file.DelimitedDataTupleParserFactory;
-import edu.uci.ics.hyracks.dataflow.std.file.FileScanOperatorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
-import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.dataflow.std.file.PlainFileWriterOperatorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.misc.ConstantTupleSourceOperatorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.sort.ExternalSortOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeSearchOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexLifecycleManagerProvider;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexBulkLoadOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexCreateOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndexSearchModifierFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.dataflow.BinaryTokenizerOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.dataflow.LSMInvertedIndexBulkLoadOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.dataflow.LSMInvertedIndexCreateOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.dataflow.LSMInvertedIndexSearchOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.search.ConjunctiveSearchModifierFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.tokenizers.DelimitedUTF8StringBinaryTokenizerFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.tokenizers.IBinaryTokenizerFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.tokenizers.ITokenFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.tokenizers.UTF8WordTokenFactory;
-import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
-import edu.uci.ics.hyracks.storage.common.file.ILocalResourceFactoryProvider;
-import edu.uci.ics.hyracks.storage.common.file.TransientLocalResourceFactoryProvider;
-import edu.uci.ics.hyracks.test.support.TestIndexLifecycleManagerProvider;
-import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
-import edu.uci.ics.hyracks.test.support.TestStorageManagerInterface;
-import edu.uci.ics.hyracks.tests.integration.AbstractIntegrationTest;
-
-@SuppressWarnings("rawtypes")
-public abstract class AbstractfWordInvertedIndexTest extends AbstractIntegrationTest {
-    static {
-        TestStorageManagerComponentHolder.init(8192, 20, 20);
-    }
-
-    protected static final int MERGE_THRESHOLD = 3;
-
-    protected IStorageManagerInterface storageManager = new TestStorageManagerInterface();
-    protected IIndexLifecycleManagerProvider lcManagerProvider = new TestIndexLifecycleManagerProvider();
-    protected IIndexDataflowHelperFactory btreeDataflowHelperFactory = new BTreeDataflowHelperFactory();
-    protected IIndexDataflowHelperFactory invertedIndexDataflowHelperFactory;
-
-    protected final static SimpleDateFormat simpleDateFormat = new SimpleDateFormat("ddMMyy-hhmmssSS");
-    protected final static String sep = System.getProperty("file.separator");
-    protected final String dateString = simpleDateFormat.format(new Date());
-    protected final String primaryFileName = System.getProperty("java.io.tmpdir") + sep + "primaryBtree" + dateString;
-    protected final String btreeFileName = System.getProperty("java.io.tmpdir") + sep + "invIndexBtree" + dateString;
-
-    protected IFileSplitProvider primaryFileSplitProvider = new ConstantFileSplitProvider(
-            new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(primaryFileName))) });
-    protected IFileSplitProvider btreeFileSplitProvider = new ConstantFileSplitProvider(
-            new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(btreeFileName))) });
-
-    // Primary BTree index.
-    protected int primaryFieldCount = 2;
-    protected ITypeTraits[] primaryTypeTraits = new ITypeTraits[primaryFieldCount];
-    protected int primaryKeyFieldCount = 1;
-    protected IBinaryComparatorFactory[] primaryComparatorFactories = new IBinaryComparatorFactory[primaryKeyFieldCount];
-    protected RecordDescriptor primaryRecDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-            IntegerSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE });
-
-    // Inverted index BTree dictionary.
-    protected ITypeTraits[] tokenTypeTraits;
-    protected IBinaryComparatorFactory[] tokenComparatorFactories;
-
-    // Inverted index stuff.
-    protected int invListElementFieldCount = 1;
-    protected ITypeTraits[] invListsTypeTraits = new ITypeTraits[invListElementFieldCount];
-    protected IBinaryComparatorFactory[] invListsComparatorFactories = new IBinaryComparatorFactory[invListElementFieldCount];
-    protected RecordDescriptor invListsRecDesc = new RecordDescriptor(
-            new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE });
-    protected RecordDescriptor tokenizerRecDesc;
-
-    // Tokenizer stuff.
-    protected ITokenFactory tokenFactory = new UTF8WordTokenFactory();
-    protected IBinaryTokenizerFactory tokenizerFactory = new DelimitedUTF8StringBinaryTokenizerFactory(true, false,
-            tokenFactory);
-
-    // Sorting stuff.
-    IBinaryComparatorFactory[] sortComparatorFactories;
-
-    @Before
-    public void setup() throws Exception {
-        prepare();
-
-        // Field declarations and comparators for primary BTree index.
-        primaryTypeTraits[0] = IntegerPointable.TYPE_TRAITS;
-        primaryTypeTraits[1] = UTF8StringPointable.TYPE_TRAITS;
-        primaryComparatorFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
-
-        // Field declarations and comparators for inverted lists.
-        invListsTypeTraits[0] = IntegerPointable.TYPE_TRAITS;
-        invListsComparatorFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
-
-        createPrimaryIndex();
-        loadPrimaryIndex();
-        printPrimaryIndex();
-        createInvertedIndex();
-        loadInvertedIndex();
-    }
-
-    protected abstract void prepare();
-
-    protected abstract boolean addNumTokensKey();
-
-    public void createPrimaryIndex() throws Exception {
-        JobSpecification spec = new JobSpecification();
-        TransientLocalResourceFactoryProvider localResourceFactoryProvider = new TransientLocalResourceFactoryProvider();
-        TreeIndexCreateOperatorDescriptor primaryCreateOp = new TreeIndexCreateOperatorDescriptor(spec, storageManager,
-                lcManagerProvider, primaryFileSplitProvider, primaryTypeTraits, primaryComparatorFactories, null,
-                btreeDataflowHelperFactory, localResourceFactoryProvider, NoOpOperationCallbackFactory.INSTANCE);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryCreateOp, NC1_ID);
-        spec.addRoot(primaryCreateOp);
-        runTest(spec);
-    }
-
-    public void createInvertedIndex() throws Exception {
-        JobSpecification spec = new JobSpecification();
-        ILocalResourceFactoryProvider localResourceFactoryProvider = new TransientLocalResourceFactoryProvider();
-        LSMInvertedIndexCreateOperatorDescriptor invIndexCreateOp = new LSMInvertedIndexCreateOperatorDescriptor(spec,
-                storageManager, btreeFileSplitProvider, lcManagerProvider, tokenTypeTraits, tokenComparatorFactories,
-                invListsTypeTraits, invListsComparatorFactories, tokenizerFactory, invertedIndexDataflowHelperFactory,
-                localResourceFactoryProvider, NoOpOperationCallbackFactory.INSTANCE);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, invIndexCreateOp, NC1_ID);
-        spec.addRoot(invIndexCreateOp);
-        runTest(spec);
-    }
-
-    @Test
-    public void testConjunctiveSearcher() throws Exception {
-        IInvertedIndexSearchModifierFactory conjunctiveSearchModifierFactory = new ConjunctiveSearchModifierFactory();
-        searchInvertedIndex("of", conjunctiveSearchModifierFactory);
-        searchInvertedIndex("3d", conjunctiveSearchModifierFactory);
-        searchInvertedIndex("of the human", conjunctiveSearchModifierFactory);
-    }
-
-    private IOperatorDescriptor createFileScanOp(JobSpecification spec) {
-        FileSplit[] dblpTitleFileSplits = new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(
-                "data/cleanednumbereddblptitles.txt"))) };
-        IFileSplitProvider dblpTitleSplitProvider = new ConstantFileSplitProvider(dblpTitleFileSplits);
-        RecordDescriptor dblpTitleRecDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-                IntegerSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE });
-        FileScanOperatorDescriptor dblpTitleScanner = new FileScanOperatorDescriptor(spec, dblpTitleSplitProvider,
-                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { IntegerParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE }, '|'), dblpTitleRecDesc);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, dblpTitleScanner, NC1_ID);
-        return dblpTitleScanner;
-    }
-
-    private IOperatorDescriptor createPrimaryBulkLoadOp(JobSpecification spec) {
-        int[] fieldPermutation = { 0, 1 };
-        TreeIndexBulkLoadOperatorDescriptor primaryBtreeBulkLoad = new TreeIndexBulkLoadOperatorDescriptor(spec,
-                storageManager, lcManagerProvider, primaryFileSplitProvider, primaryTypeTraits,
-                primaryComparatorFactories, null, fieldPermutation, 0.7f, true, 1000L, btreeDataflowHelperFactory,
-                NoOpOperationCallbackFactory.INSTANCE);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryBtreeBulkLoad, NC1_ID);
-        return primaryBtreeBulkLoad;
-    }
-
-    private IOperatorDescriptor createScanKeyProviderOp(JobSpecification spec) throws HyracksDataException {
-        // build dummy tuple containing nothing
-        ArrayTupleBuilder tb = new ArrayTupleBuilder(primaryKeyFieldCount * 2);
-        DataOutput dos = tb.getDataOutput();
-        tb.reset();
-        UTF8StringSerializerDeserializer.INSTANCE.serialize("0", dos);
-        tb.addFieldEndOffset();
-        ISerializerDeserializer[] keyRecDescSers = { UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE };
-        RecordDescriptor keyRecDesc = new RecordDescriptor(keyRecDescSers);
-        ConstantTupleSourceOperatorDescriptor keyProviderOp = new ConstantTupleSourceOperatorDescriptor(spec,
-                keyRecDesc, tb.getFieldEndOffsets(), tb.getByteArray(), tb.getSize());
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, keyProviderOp, NC1_ID);
-        return keyProviderOp;
-    }
-
-    private IOperatorDescriptor createPrimaryScanOp(JobSpecification spec) throws HyracksDataException {
-        int[] lowKeyFields = null; // - infinity
-        int[] highKeyFields = null; // + infinity
-        BTreeSearchOperatorDescriptor primaryBtreeSearchOp = new BTreeSearchOperatorDescriptor(spec, primaryRecDesc,
-                storageManager, lcManagerProvider, primaryFileSplitProvider, primaryTypeTraits,
-                primaryComparatorFactories, null, lowKeyFields, highKeyFields, true, true, btreeDataflowHelperFactory,
-                false, NoOpOperationCallbackFactory.INSTANCE);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryBtreeSearchOp, NC1_ID);
-        return primaryBtreeSearchOp;
-    }
-
-    private void loadPrimaryIndex() throws Exception {
-        JobSpecification spec = new JobSpecification();
-        // Assuming that the data is pre-sorted on the key. No need to sort
-        // before bulk load.
-        IOperatorDescriptor fileScanOp = createFileScanOp(spec);
-        IOperatorDescriptor primaryBulkLoad = createPrimaryBulkLoadOp(spec);
-        spec.connect(new OneToOneConnectorDescriptor(spec), fileScanOp, 0, primaryBulkLoad, 0);
-        spec.addRoot(primaryBulkLoad);
-        runTest(spec);
-    }
-
-    private void printPrimaryIndex() throws Exception {
-        JobSpecification spec = new JobSpecification();
-        IOperatorDescriptor keyProviderOp = createScanKeyProviderOp(spec);
-        IOperatorDescriptor primaryScanOp = createPrimaryScanOp(spec);
-        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
-                createTempFile().getAbsolutePath()) });
-        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
-        spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0, primaryScanOp, 0);
-        spec.connect(new OneToOneConnectorDescriptor(spec), primaryScanOp, 0, printer, 0);
-        spec.addRoot(printer);
-        runTest(spec);
-    }
-
-    private IOperatorDescriptor createExternalSortOp(JobSpecification spec, int[] sortFields,
-            RecordDescriptor outputRecDesc) {
-        ExternalSortOperatorDescriptor externalSortOp = new ExternalSortOperatorDescriptor(spec, 1000, sortFields,
-                sortComparatorFactories, outputRecDesc);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, externalSortOp, NC1_ID);
-        return externalSortOp;
-    }
-
-    private IOperatorDescriptor createBinaryTokenizerOp(JobSpecification spec, int docField, int[] keyFields) {
-        BinaryTokenizerOperatorDescriptor binaryTokenizer = new BinaryTokenizerOperatorDescriptor(spec,
-                tokenizerRecDesc, tokenizerFactory, docField, keyFields, addNumTokensKey());
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, binaryTokenizer, NC1_ID);
-        return binaryTokenizer;
-    }
-
-    private IOperatorDescriptor createInvertedIndexBulkLoadOp(JobSpecification spec, int[] fieldPermutation) {
-        LSMInvertedIndexBulkLoadOperatorDescriptor invIndexBulkLoadOp = new LSMInvertedIndexBulkLoadOperatorDescriptor(
-                spec, fieldPermutation, true, 1000L, storageManager, btreeFileSplitProvider, lcManagerProvider,
-                tokenTypeTraits, tokenComparatorFactories, invListsTypeTraits, invListsComparatorFactories,
-                tokenizerFactory, invertedIndexDataflowHelperFactory, NoOpOperationCallbackFactory.INSTANCE);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, invIndexBulkLoadOp, NC1_ID);
-        return invIndexBulkLoadOp;
-    }
-
-    public void loadInvertedIndex() throws Exception {
-        JobSpecification spec = new JobSpecification();
-        IOperatorDescriptor keyProviderOp = createScanKeyProviderOp(spec);
-        IOperatorDescriptor primaryScanOp = createPrimaryScanOp(spec);
-        int docField = 1;
-        int[] keyFields = { 0 };
-        IOperatorDescriptor binaryTokenizerOp = createBinaryTokenizerOp(spec, docField, keyFields);
-        int[] sortFields = new int[sortComparatorFactories.length];
-        int[] fieldPermutation = new int[sortComparatorFactories.length];
-        for (int i = 0; i < sortFields.length; i++) {
-            sortFields[i] = i;
-            fieldPermutation[i] = i;
-        }
-        IOperatorDescriptor externalSortOp = createExternalSortOp(spec, sortFields, tokenizerRecDesc);
-        IOperatorDescriptor invIndexBulkLoadOp = createInvertedIndexBulkLoadOp(spec, fieldPermutation);
-        spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0, primaryScanOp, 0);
-        spec.connect(new OneToOneConnectorDescriptor(spec), primaryScanOp, 0, binaryTokenizerOp, 0);
-        spec.connect(new OneToOneConnectorDescriptor(spec), binaryTokenizerOp, 0, externalSortOp, 0);
-        spec.connect(new OneToOneConnectorDescriptor(spec), externalSortOp, 0, invIndexBulkLoadOp, 0);
-        spec.addRoot(invIndexBulkLoadOp);
-        runTest(spec);
-    }
-
-    private IOperatorDescriptor createQueryProviderOp(JobSpecification spec, String queryString)
-            throws HyracksDataException {
-        // Build tuple with exactly one field, which is the query,
-        ArrayTupleBuilder tb = new ArrayTupleBuilder(1);
-        DataOutput dos = tb.getDataOutput();
-        tb.reset();
-        UTF8StringSerializerDeserializer.INSTANCE.serialize(queryString, dos);
-        tb.addFieldEndOffset();
-        ISerializerDeserializer[] querySerde = { UTF8StringSerializerDeserializer.INSTANCE };
-        RecordDescriptor queryRecDesc = new RecordDescriptor(querySerde);
-        ConstantTupleSourceOperatorDescriptor queryProviderOp = new ConstantTupleSourceOperatorDescriptor(spec,
-                queryRecDesc, tb.getFieldEndOffsets(), tb.getByteArray(), tb.getSize());
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, queryProviderOp, NC1_ID);
-        return queryProviderOp;
-    }
-
-    private IOperatorDescriptor createInvertedIndexSearchOp(JobSpecification spec,
-            IInvertedIndexSearchModifierFactory searchModifierFactory) {
-        LSMInvertedIndexSearchOperatorDescriptor invIndexSearchOp = new LSMInvertedIndexSearchOperatorDescriptor(spec,
-                0, storageManager, btreeFileSplitProvider, lcManagerProvider, tokenTypeTraits,
-                tokenComparatorFactories, invListsTypeTraits, invListsComparatorFactories,
-                invertedIndexDataflowHelperFactory, tokenizerFactory, searchModifierFactory, invListsRecDesc, false,
-                NoOpOperationCallbackFactory.INSTANCE);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, invIndexSearchOp, NC1_ID);
-        return invIndexSearchOp;
-    }
-
-    public void searchInvertedIndex(String queryString, IInvertedIndexSearchModifierFactory searchModifierFactory)
-            throws Exception {
-        JobSpecification spec = new JobSpecification();
-        IOperatorDescriptor queryProviderOp = createQueryProviderOp(spec, queryString);
-        IOperatorDescriptor invIndexSearchOp = createInvertedIndexSearchOp(spec, searchModifierFactory);
-        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
-                createTempFile().getAbsolutePath()) });
-        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
-        spec.connect(new OneToOneConnectorDescriptor(spec), queryProviderOp, 0, invIndexSearchOp, 0);
-        spec.connect(new OneToOneConnectorDescriptor(spec), invIndexSearchOp, 0, printer, 0);
-        spec.addRoot(printer);
-        runTest(spec);
-    }
-}
diff --git a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/invertedindex/BinaryTokenizerOperatorTest.java b/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/invertedindex/BinaryTokenizerOperatorTest.java
deleted file mode 100644
index 47480da..0000000
--- a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/invertedindex/BinaryTokenizerOperatorTest.java
+++ /dev/null
@@ -1,89 +0,0 @@
-package edu.uci.ics.hyracks.tests.am.invertedindex;
-
-import java.io.File;
-
-import org.junit.Test;
-
-import edu.uci.ics.hyracks.api.constraints.PartitionConstraintHelper;
-import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.api.job.JobSpecification;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.ShortSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.parsers.IValueParserFactory;
-import edu.uci.ics.hyracks.dataflow.common.data.parsers.IntegerParserFactory;
-import edu.uci.ics.hyracks.dataflow.common.data.parsers.UTF8StringParserFactory;
-import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.file.ConstantFileSplitProvider;
-import edu.uci.ics.hyracks.dataflow.std.file.DelimitedDataTupleParserFactory;
-import edu.uci.ics.hyracks.dataflow.std.file.FileScanOperatorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
-import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.dataflow.std.file.PlainFileWriterOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.dataflow.BinaryTokenizerOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.tokenizers.DelimitedUTF8StringBinaryTokenizerFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.tokenizers.IBinaryTokenizerFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.tokenizers.ITokenFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.tokenizers.UTF8WordTokenFactory;
-import edu.uci.ics.hyracks.tests.integration.AbstractIntegrationTest;
-
-public class BinaryTokenizerOperatorTest extends AbstractIntegrationTest {
-
-    @Test
-    public void tokenizerTest() throws Exception {
-        test(false);
-    }
-
-    @Test
-    public void tokenizerWithNumTokensTest() throws Exception {
-        test(true);
-    }
-
-    private void test(boolean addNumTokensKey) throws Exception {
-        JobSpecification spec = new JobSpecification();
-
-        FileSplit[] dblpTitleFileSplits = new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(
-                "data/cleanednumbereddblptitles.txt"))) };
-        IFileSplitProvider dblpTitleSplitProvider = new ConstantFileSplitProvider(dblpTitleFileSplits);
-        RecordDescriptor dblpTitleRecDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-                IntegerSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE });
-
-        FileScanOperatorDescriptor dblpTitleScanner = new FileScanOperatorDescriptor(spec, dblpTitleSplitProvider,
-                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { IntegerParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE }, '|'), dblpTitleRecDesc);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, dblpTitleScanner, NC1_ID);
-
-        RecordDescriptor tokenizerRecDesc;
-        if (!addNumTokensKey) {
-            tokenizerRecDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-                    UTF8StringSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE });
-        } else {
-            tokenizerRecDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-                    UTF8StringSerializerDeserializer.INSTANCE, ShortSerializerDeserializer.INSTANCE,
-                    IntegerSerializerDeserializer.INSTANCE });
-        }
-
-        ITokenFactory tokenFactory = new UTF8WordTokenFactory();
-        IBinaryTokenizerFactory tokenizerFactory = new DelimitedUTF8StringBinaryTokenizerFactory(true, false,
-                tokenFactory);
-        int[] keyFields = { 0 };
-        BinaryTokenizerOperatorDescriptor binaryTokenizer = new BinaryTokenizerOperatorDescriptor(spec,
-                tokenizerRecDesc, tokenizerFactory, 1, keyFields, addNumTokensKey);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, binaryTokenizer, NC1_ID);
-
-        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
-                createTempFile().getAbsolutePath()) });
-        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
-
-        spec.connect(new OneToOneConnectorDescriptor(spec), dblpTitleScanner, 0, binaryTokenizer, 0);
-
-        spec.connect(new OneToOneConnectorDescriptor(spec), binaryTokenizer, 0, printer, 0);
-
-        spec.addRoot(printer);
-        runTest(spec);
-    }
-}
\ No newline at end of file
diff --git a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/invertedindex/PartitionedWordInvertedIndexTest.java b/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/invertedindex/PartitionedWordInvertedIndexTest.java
deleted file mode 100644
index 62d4362..0000000
--- a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/invertedindex/PartitionedWordInvertedIndexTest.java
+++ /dev/null
@@ -1,64 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.tests.am.invertedindex;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
-import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
-import edu.uci.ics.hyracks.data.std.primitive.ShortPointable;
-import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.ShortSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.ConstantMergePolicyProvider;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.NoOpIOOperationCallback;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.SynchronousSchedulerProvider;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.ThreadCountingOperationTrackerFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.dataflow.PartitionedLSMInvertedIndexDataflowHelperFactory;
-
-public class PartitionedWordInvertedIndexTest extends AbstractfWordInvertedIndexTest {
-
-    @Override
-    protected void prepare() {
-        // Field declarations and comparators for tokens.
-        tokenTypeTraits = new ITypeTraits[] { UTF8StringPointable.TYPE_TRAITS, ShortPointable.TYPE_TRAITS };
-        tokenComparatorFactories = new IBinaryComparatorFactory[] {
-                PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY),
-                PointableBinaryComparatorFactory.of(ShortPointable.FACTORY) };
-
-        tokenizerRecDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-                UTF8StringSerializerDeserializer.INSTANCE, ShortSerializerDeserializer.INSTANCE,
-                IntegerSerializerDeserializer.INSTANCE });
-
-        sortComparatorFactories = new IBinaryComparatorFactory[] {
-                PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY),
-                PointableBinaryComparatorFactory.of(ShortPointable.FACTORY),
-                PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY) };
-
-        invertedIndexDataflowHelperFactory = new PartitionedLSMInvertedIndexDataflowHelperFactory(
-                new ConstantMergePolicyProvider(MERGE_THRESHOLD), ThreadCountingOperationTrackerFactory.INSTANCE,
-                SynchronousSchedulerProvider.INSTANCE, NoOpIOOperationCallback.INSTANCE, DEFAULT_MEM_PAGE_SIZE,
-                DEFAULT_MEM_NUM_PAGES);
-    }
-
-    @Override
-    protected boolean addNumTokensKey() {
-        return true;
-    }
-}
diff --git a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/invertedindex/WordInvertedIndexTest.java b/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/invertedindex/WordInvertedIndexTest.java
deleted file mode 100644
index c35c6c9..0000000
--- a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/invertedindex/WordInvertedIndexTest.java
+++ /dev/null
@@ -1,58 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.tests.am.invertedindex;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
-import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
-import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.ConstantMergePolicyProvider;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.NoOpIOOperationCallback;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.SynchronousSchedulerProvider;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.ThreadCountingOperationTrackerFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.dataflow.LSMInvertedIndexDataflowHelperFactory;
-
-public class WordInvertedIndexTest extends AbstractfWordInvertedIndexTest {
-
-    @Override
-    protected void prepare() {
-        // Field declarations and comparators for tokens.
-        tokenTypeTraits = new ITypeTraits[] { UTF8StringPointable.TYPE_TRAITS };
-        tokenComparatorFactories = new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory
-                .of(UTF8StringPointable.FACTORY) };
-
-        tokenizerRecDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-                UTF8StringSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE });
-
-        sortComparatorFactories = new IBinaryComparatorFactory[] {
-                PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY),
-                PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY) };
-
-        invertedIndexDataflowHelperFactory = new LSMInvertedIndexDataflowHelperFactory(new ConstantMergePolicyProvider(
-                MERGE_THRESHOLD), ThreadCountingOperationTrackerFactory.INSTANCE,
-                SynchronousSchedulerProvider.INSTANCE, NoOpIOOperationCallback.INSTANCE, DEFAULT_MEM_PAGE_SIZE, DEFAULT_MEM_NUM_PAGES);
-    }
-
-    @Override
-    protected boolean addNumTokensKey() {
-        return false;
-    }
-}
diff --git a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/lsm/btree/LSMBTreeOperatorTestHelper.java b/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/lsm/btree/LSMBTreeOperatorTestHelper.java
deleted file mode 100644
index 912ab0e..0000000
--- a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/lsm/btree/LSMBTreeOperatorTestHelper.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.tests.am.lsm.btree;
-
-import edu.uci.ics.hyracks.control.nc.io.IOManager;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.btree.dataflow.LSMBTreeDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.ConstantMergePolicyProvider;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.NoOpIOOperationCallback;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.SynchronousSchedulerProvider;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.ThreadCountingOperationTrackerFactory;
-import edu.uci.ics.hyracks.tests.am.common.LSMTreeOperatorTestHelper;
-
-public class LSMBTreeOperatorTestHelper extends LSMTreeOperatorTestHelper {
-
-    private static final int MERGE_THRESHOLD = 3;
-
-    public LSMBTreeOperatorTestHelper(IOManager ioManager) {
-        super(ioManager);
-    }
-
-    public IIndexDataflowHelperFactory createDataFlowHelperFactory() {
-        return new LSMBTreeDataflowHelperFactory(new ConstantMergePolicyProvider(MERGE_THRESHOLD),
-                ThreadCountingOperationTrackerFactory.INSTANCE, SynchronousSchedulerProvider.INSTANCE,
-                NoOpIOOperationCallback.INSTANCE, DEFAULT_MEM_PAGE_SIZE, DEFAULT_MEM_NUM_PAGES);
-    }
-
-}
diff --git a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/lsm/btree/LSMBTreePrimaryIndexScanOperatorTest.java b/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/lsm/btree/LSMBTreePrimaryIndexScanOperatorTest.java
deleted file mode 100644
index d751399..0000000
--- a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/lsm/btree/LSMBTreePrimaryIndexScanOperatorTest.java
+++ /dev/null
@@ -1,34 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.tests.am.lsm.btree;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksException;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
-import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
-import edu.uci.ics.hyracks.tests.am.btree.BTreePrimaryIndexScanOperatorTest;
-import edu.uci.ics.hyracks.tests.am.common.ITreeIndexOperatorTestHelper;
-
-public class LSMBTreePrimaryIndexScanOperatorTest extends BTreePrimaryIndexScanOperatorTest {
-
-    protected ITreeIndexOperatorTestHelper createTestHelper() throws HyracksException {
-        return new LSMBTreeOperatorTestHelper(TestStorageManagerComponentHolder.getIOManager());
-    }
-
-    @Override
-    protected IIndexDataflowHelperFactory createDataFlowHelperFactory() {
-        return ((LSMBTreeOperatorTestHelper) testHelper).createDataFlowHelperFactory();
-    }
-}
\ No newline at end of file
diff --git a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/lsm/btree/LSMBTreePrimaryIndexSearchOperatorTest.java b/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/lsm/btree/LSMBTreePrimaryIndexSearchOperatorTest.java
deleted file mode 100644
index bdcf3f6..0000000
--- a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/lsm/btree/LSMBTreePrimaryIndexSearchOperatorTest.java
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.tests.am.lsm.btree;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksException;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
-import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
-import edu.uci.ics.hyracks.tests.am.btree.BTreePrimaryIndexSearchOperatorTest;
-import edu.uci.ics.hyracks.tests.am.common.ITreeIndexOperatorTestHelper;
-
-public class LSMBTreePrimaryIndexSearchOperatorTest extends BTreePrimaryIndexSearchOperatorTest {
-    protected ITreeIndexOperatorTestHelper createTestHelper() throws HyracksException {
-        return new LSMBTreeOperatorTestHelper(TestStorageManagerComponentHolder.getIOManager());
-    }
-
-    @Override
-    protected IIndexDataflowHelperFactory createDataFlowHelperFactory() {
-        return ((LSMBTreeOperatorTestHelper) testHelper).createDataFlowHelperFactory();
-    }
-}
\ No newline at end of file
diff --git a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/lsm/btree/LSMBTreeSecondaryIndexInsertOperatorTest.java b/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/lsm/btree/LSMBTreeSecondaryIndexInsertOperatorTest.java
deleted file mode 100644
index 5ba7279..0000000
--- a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/lsm/btree/LSMBTreeSecondaryIndexInsertOperatorTest.java
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.tests.am.lsm.btree;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksException;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
-import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
-import edu.uci.ics.hyracks.tests.am.btree.BTreeSecondaryIndexInsertOperatorTest;
-import edu.uci.ics.hyracks.tests.am.common.ITreeIndexOperatorTestHelper;
-
-public class LSMBTreeSecondaryIndexInsertOperatorTest extends BTreeSecondaryIndexInsertOperatorTest {
-    protected ITreeIndexOperatorTestHelper createTestHelper() throws HyracksException {
-        return new LSMBTreeOperatorTestHelper(TestStorageManagerComponentHolder.getIOManager());
-    }
-
-    @Override
-    protected IIndexDataflowHelperFactory createDataFlowHelperFactory() {
-        return ((LSMBTreeOperatorTestHelper) testHelper).createDataFlowHelperFactory();
-    }
-}
\ No newline at end of file
diff --git a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/lsm/btree/LSMBTreeSecondaryIndexSearchOperatorTest.java b/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/lsm/btree/LSMBTreeSecondaryIndexSearchOperatorTest.java
deleted file mode 100644
index c1b1cd8..0000000
--- a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/lsm/btree/LSMBTreeSecondaryIndexSearchOperatorTest.java
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.tests.am.lsm.btree;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksException;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
-import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
-import edu.uci.ics.hyracks.tests.am.btree.BTreeSecondaryIndexSearchOperatorTest;
-import edu.uci.ics.hyracks.tests.am.common.ITreeIndexOperatorTestHelper;
-
-public class LSMBTreeSecondaryIndexSearchOperatorTest extends BTreeSecondaryIndexSearchOperatorTest {
-    protected ITreeIndexOperatorTestHelper createTestHelper() throws HyracksException {
-        return new LSMBTreeOperatorTestHelper(TestStorageManagerComponentHolder.getIOManager());
-    }
-
-    @Override
-    protected IIndexDataflowHelperFactory createDataFlowHelperFactory() {
-        return ((LSMBTreeOperatorTestHelper) testHelper).createDataFlowHelperFactory();
-    }
-}
\ No newline at end of file
diff --git a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/lsm/rtree/LSMRTreeOperatorTestHelper.java b/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/lsm/rtree/LSMRTreeOperatorTestHelper.java
deleted file mode 100644
index 84b34b7..0000000
--- a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/lsm/rtree/LSMRTreeOperatorTestHelper.java
+++ /dev/null
@@ -1,47 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.tests.am.lsm.rtree;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ILinearizeComparatorFactory;
-import edu.uci.ics.hyracks.control.nc.io.IOManager;
-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.ConstantMergePolicyProvider;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.NoOpIOOperationCallback;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.SynchronousSchedulerProvider;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.ThreadCountingOperationTrackerFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.rtree.dataflow.LSMRTreeDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreePolicyType;
-import edu.uci.ics.hyracks.tests.am.common.LSMTreeOperatorTestHelper;
-
-public class LSMRTreeOperatorTestHelper extends LSMTreeOperatorTestHelper {
-
-    private static final int MERGE_THRESHOLD = 3;
-
-    public LSMRTreeOperatorTestHelper(IOManager ioManager) {
-        super(ioManager);
-    }
-
-    public IIndexDataflowHelperFactory createDataFlowHelperFactory(
-            IPrimitiveValueProviderFactory[] valueProviderFactories, RTreePolicyType rtreePolicyType,
-            IBinaryComparatorFactory[] btreeComparatorFactories, ILinearizeComparatorFactory linearizerCmpFactory) {
-        return new LSMRTreeDataflowHelperFactory(valueProviderFactories, rtreePolicyType, btreeComparatorFactories,
-                new ConstantMergePolicyProvider(MERGE_THRESHOLD), ThreadCountingOperationTrackerFactory.INSTANCE,
-                SynchronousSchedulerProvider.INSTANCE, NoOpIOOperationCallback.INSTANCE, linearizerCmpFactory,
-                DEFAULT_MEM_PAGE_SIZE, DEFAULT_MEM_NUM_PAGES);
-    }
-}
diff --git a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/lsm/rtree/LSMRTreeSecondaryIndexInsertOperatorTest.java b/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/lsm/rtree/LSMRTreeSecondaryIndexInsertOperatorTest.java
deleted file mode 100644
index 99eb6c3..0000000
--- a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/lsm/rtree/LSMRTreeSecondaryIndexInsertOperatorTest.java
+++ /dev/null
@@ -1,40 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.tests.am.lsm.rtree;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ILinearizeComparatorFactory;
-import edu.uci.ics.hyracks.api.exceptions.HyracksException;
-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreePolicyType;
-import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
-import edu.uci.ics.hyracks.tests.am.common.ITreeIndexOperatorTestHelper;
-import edu.uci.ics.hyracks.tests.am.rtree.RTreeSecondaryIndexInsertOperatorTest;
-
-public class LSMRTreeSecondaryIndexInsertOperatorTest extends RTreeSecondaryIndexInsertOperatorTest {
-    protected ITreeIndexOperatorTestHelper createTestHelper() throws HyracksException {
-        return new LSMRTreeOperatorTestHelper(TestStorageManagerComponentHolder.getIOManager());
-    }
-
-    @Override
-    protected IIndexDataflowHelperFactory createDataFlowHelperFactory(
-            IPrimitiveValueProviderFactory[] secondaryValueProviderFactories, RTreePolicyType rtreePolicyType,
-            IBinaryComparatorFactory[] btreeComparatorFactories, ILinearizeComparatorFactory linearizerCmpFactory) {
-        return ((LSMRTreeOperatorTestHelper) testHelper).createDataFlowHelperFactory(secondaryValueProviderFactories,
-                rtreePolicyType, btreeComparatorFactories, linearizerCmpFactory);
-    }
-}
\ No newline at end of file
diff --git a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/lsm/rtree/LSMRTreeSecondaryIndexSearchOperatorTest.java b/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/lsm/rtree/LSMRTreeSecondaryIndexSearchOperatorTest.java
deleted file mode 100644
index e1e6b04..0000000
--- a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/lsm/rtree/LSMRTreeSecondaryIndexSearchOperatorTest.java
+++ /dev/null
@@ -1,40 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.tests.am.lsm.rtree;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ILinearizeComparatorFactory;
-import edu.uci.ics.hyracks.api.exceptions.HyracksException;
-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreePolicyType;
-import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
-import edu.uci.ics.hyracks.tests.am.common.ITreeIndexOperatorTestHelper;
-import edu.uci.ics.hyracks.tests.am.rtree.RTreeSecondaryIndexSearchOperatorTest;
-
-public class LSMRTreeSecondaryIndexSearchOperatorTest extends RTreeSecondaryIndexSearchOperatorTest {
-    protected ITreeIndexOperatorTestHelper createTestHelper() throws HyracksException {
-        return new LSMRTreeOperatorTestHelper(TestStorageManagerComponentHolder.getIOManager());
-    }
-
-    @Override
-    protected IIndexDataflowHelperFactory createDataFlowHelperFactory(
-            IPrimitiveValueProviderFactory[] secondaryValueProviderFactories, RTreePolicyType rtreePolicyType,
-            IBinaryComparatorFactory[] btreeComparatorFactories, ILinearizeComparatorFactory linearizerCmpFactory) {
-        return ((LSMRTreeOperatorTestHelper) testHelper).createDataFlowHelperFactory(secondaryValueProviderFactories,
-                rtreePolicyType, btreeComparatorFactories, linearizerCmpFactory);
-    }
-}
\ No newline at end of file
diff --git a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesOperatorTestHelper.java b/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesOperatorTestHelper.java
deleted file mode 100644
index bb31dcd..0000000
--- a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesOperatorTestHelper.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.tests.am.lsm.rtree;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ILinearizeComparatorFactory;
-import edu.uci.ics.hyracks.control.nc.io.IOManager;
-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.ConstantMergePolicyProvider;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.NoOpIOOperationCallback;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.SynchronousSchedulerProvider;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.ThreadCountingOperationTrackerFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.rtree.dataflow.LSMRTreeWithAntiMatterTuplesDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreePolicyType;
-import edu.uci.ics.hyracks.tests.am.common.LSMTreeOperatorTestHelper;
-
-public class LSMRTreeWithAntiMatterTuplesOperatorTestHelper extends LSMTreeOperatorTestHelper {
-
-    private static final int MERGE_THRESHOLD = 3;
-
-    public LSMRTreeWithAntiMatterTuplesOperatorTestHelper(IOManager ioManager) {
-        super(ioManager);
-    }
-
-    public IIndexDataflowHelperFactory createDataFlowHelperFactory(
-            IPrimitiveValueProviderFactory[] valueProviderFactories, RTreePolicyType rtreePolicyType,
-            IBinaryComparatorFactory[] btreeComparatorFactories, ILinearizeComparatorFactory linearizerCmpFactory) {
-        return new LSMRTreeWithAntiMatterTuplesDataflowHelperFactory(valueProviderFactories, rtreePolicyType,
-                btreeComparatorFactories, new ConstantMergePolicyProvider(MERGE_THRESHOLD),
-                ThreadCountingOperationTrackerFactory.INSTANCE, SynchronousSchedulerProvider.INSTANCE,
-                NoOpIOOperationCallback.INSTANCE, linearizerCmpFactory);
-    }
-
-}
diff --git a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesSecondaryIndexInsertOperatorTest.java b/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesSecondaryIndexInsertOperatorTest.java
deleted file mode 100644
index bbf4dd3..0000000
--- a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesSecondaryIndexInsertOperatorTest.java
+++ /dev/null
@@ -1,40 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.tests.am.lsm.rtree;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ILinearizeComparatorFactory;
-import edu.uci.ics.hyracks.api.exceptions.HyracksException;
-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreePolicyType;
-import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
-import edu.uci.ics.hyracks.tests.am.common.ITreeIndexOperatorTestHelper;
-import edu.uci.ics.hyracks.tests.am.rtree.RTreeSecondaryIndexInsertOperatorTest;
-
-public class LSMRTreeWithAntiMatterTuplesSecondaryIndexInsertOperatorTest extends RTreeSecondaryIndexInsertOperatorTest {
-    protected ITreeIndexOperatorTestHelper createTestHelper() throws HyracksException {
-        return new LSMRTreeWithAntiMatterTuplesOperatorTestHelper(TestStorageManagerComponentHolder.getIOManager());
-    }
-
-    @Override
-    protected IIndexDataflowHelperFactory createDataFlowHelperFactory(
-            IPrimitiveValueProviderFactory[] secondaryValueProviderFactories, RTreePolicyType rtreePolicyType,
-            IBinaryComparatorFactory[] btreeComparatorFactories, ILinearizeComparatorFactory linearizerCmpFactory) {
-        return ((LSMRTreeWithAntiMatterTuplesOperatorTestHelper) testHelper).createDataFlowHelperFactory(
-                secondaryValueProviderFactories, rtreePolicyType, btreeComparatorFactories, linearizerCmpFactory);
-    }
-}
\ No newline at end of file
diff --git a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesSecondaryIndexSearchOperatorTest.java b/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesSecondaryIndexSearchOperatorTest.java
deleted file mode 100644
index b615636..0000000
--- a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesSecondaryIndexSearchOperatorTest.java
+++ /dev/null
@@ -1,40 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.tests.am.lsm.rtree;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ILinearizeComparatorFactory;
-import edu.uci.ics.hyracks.api.exceptions.HyracksException;
-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreePolicyType;
-import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
-import edu.uci.ics.hyracks.tests.am.common.ITreeIndexOperatorTestHelper;
-import edu.uci.ics.hyracks.tests.am.rtree.RTreeSecondaryIndexSearchOperatorTest;
-
-public class LSMRTreeWithAntiMatterTuplesSecondaryIndexSearchOperatorTest extends RTreeSecondaryIndexSearchOperatorTest {
-    protected ITreeIndexOperatorTestHelper createTestHelper() throws HyracksException {
-        return new LSMRTreeWithAntiMatterTuplesOperatorTestHelper(TestStorageManagerComponentHolder.getIOManager());
-    }
-
-    @Override
-    protected IIndexDataflowHelperFactory createDataFlowHelperFactory(
-            IPrimitiveValueProviderFactory[] secondaryValueProviderFactories, RTreePolicyType rtreePolicyType,
-            IBinaryComparatorFactory[] btreeComparatorFactories, ILinearizeComparatorFactory linearizerCmpFactory) {
-        return ((LSMRTreeWithAntiMatterTuplesOperatorTestHelper) testHelper).createDataFlowHelperFactory(
-                secondaryValueProviderFactories, rtreePolicyType, btreeComparatorFactories, linearizerCmpFactory);
-    }
-}
\ No newline at end of file
diff --git a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/rtree/AbstractRTreeOperatorTest.java b/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/rtree/AbstractRTreeOperatorTest.java
deleted file mode 100644
index c4a63df..0000000
--- a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/rtree/AbstractRTreeOperatorTest.java
+++ /dev/null
@@ -1,369 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.tests.am.rtree;
-
-import java.io.DataOutput;
-import java.io.File;
-
-import org.junit.After;
-import org.junit.Before;
-
-import edu.uci.ics.hyracks.api.constraints.PartitionConstraintHelper;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ILinearizeComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.exceptions.HyracksException;
-import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.api.job.JobSpecification;
-import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
-import edu.uci.ics.hyracks.data.std.primitive.DoublePointable;
-import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.DoubleSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.parsers.DoubleParserFactory;
-import edu.uci.ics.hyracks.dataflow.common.data.parsers.IValueParserFactory;
-import edu.uci.ics.hyracks.dataflow.common.data.parsers.UTF8StringParserFactory;
-import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.file.ConstantFileSplitProvider;
-import edu.uci.ics.hyracks.dataflow.std.file.DelimitedDataTupleParserFactory;
-import edu.uci.ics.hyracks.dataflow.std.file.FileScanOperatorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
-import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.dataflow.std.misc.ConstantTupleSourceOperatorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.misc.NullSinkOperatorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.sort.ExternalSortOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeSearchOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexLifecycleManagerProvider;
-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IndexDropOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexBulkLoadOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexCreateOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexInsertUpdateDeleteOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOperation;
-import edu.uci.ics.hyracks.storage.am.lsm.rtree.utils.LSMRTreeUtils;
-import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreePolicyType;
-import edu.uci.ics.hyracks.storage.am.rtree.util.RTreeUtils;
-import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
-import edu.uci.ics.hyracks.storage.common.file.TransientLocalResourceFactoryProvider;
-import edu.uci.ics.hyracks.test.support.TestIndexLifecycleManagerProvider;
-import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
-import edu.uci.ics.hyracks.test.support.TestStorageManagerInterface;
-import edu.uci.ics.hyracks.tests.am.common.ITreeIndexOperatorTestHelper;
-import edu.uci.ics.hyracks.tests.integration.AbstractIntegrationTest;
-
-public abstract class AbstractRTreeOperatorTest extends AbstractIntegrationTest {
-    static {
-        TestStorageManagerComponentHolder.init(8192, 20, 20);
-    }
-
-    protected final IStorageManagerInterface storageManager = new TestStorageManagerInterface();
-    protected final IIndexLifecycleManagerProvider lcManagerProvider = new TestIndexLifecycleManagerProvider();
-    protected IIndexDataflowHelperFactory rtreeDataflowHelperFactory;
-    protected IIndexDataflowHelperFactory btreeDataflowHelperFactory = new BTreeDataflowHelperFactory();
-
-    // field, type and key declarations for primary index
-    protected final int primaryFieldCount = 10;
-    protected final ITypeTraits[] primaryTypeTraits = new ITypeTraits[primaryFieldCount];
-    protected final int primaryKeyFieldCount = 1;
-    protected final IBinaryComparatorFactory[] primaryComparatorFactories = new IBinaryComparatorFactory[primaryKeyFieldCount];
-
-    protected final RecordDescriptor primaryRecDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-            UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-            UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-            UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-            DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
-            DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE });
-
-    // to be set by subclasses
-    protected String primaryFileName;
-    protected IFileSplitProvider primarySplitProvider;
-
-    // field, type and key declarations for secondary indexes
-    protected final int secondaryFieldCount = 5;
-    protected final ITypeTraits[] secondaryTypeTraits = new ITypeTraits[secondaryFieldCount];
-    protected final int secondaryKeyFieldCount = 4;
-    protected final IBinaryComparatorFactory[] secondaryComparatorFactories = new IBinaryComparatorFactory[secondaryKeyFieldCount];
-
-    protected final RecordDescriptor secondaryRecDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-            DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
-            DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
-            UTF8StringSerializerDeserializer.INSTANCE });
-
-    // This is only used for the LSMRTree. We need a comparator Factories for
-    // the BTree component of the LSMRTree.
-    protected final int btreeKeyFieldCount = 5;
-    protected final IBinaryComparatorFactory[] btreeComparatorFactories = new IBinaryComparatorFactory[btreeKeyFieldCount];
-
-    protected String secondaryFileName;
-    protected IFileSplitProvider secondarySplitProvider;
-
-    protected ITreeIndexOperatorTestHelper testHelper;
-
-    protected ITreeIndexOperatorTestHelper createTestHelper() throws HyracksException {
-        return new RTreeOperatorTestHelper();
-    }
-
-    @Before
-    public void setup() throws Exception {
-        testHelper = createTestHelper();
-
-        primaryFileName = testHelper.getPrimaryIndexName();
-        primarySplitProvider = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID, new FileReference(
-                new File(primaryFileName))) });
-        secondaryFileName = testHelper.getSecondaryIndexName();
-        secondarySplitProvider = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
-                new FileReference(new File(secondaryFileName))) });
-
-        // field, type and key declarations for primary index
-        primaryTypeTraits[0] = UTF8StringPointable.TYPE_TRAITS;
-        primaryTypeTraits[1] = UTF8StringPointable.TYPE_TRAITS;
-        primaryTypeTraits[2] = UTF8StringPointable.TYPE_TRAITS;
-        primaryTypeTraits[3] = UTF8StringPointable.TYPE_TRAITS;
-        primaryTypeTraits[4] = UTF8StringPointable.TYPE_TRAITS;
-        primaryTypeTraits[5] = UTF8StringPointable.TYPE_TRAITS;
-        primaryTypeTraits[6] = UTF8StringPointable.TYPE_TRAITS;
-        primaryTypeTraits[7] = UTF8StringPointable.TYPE_TRAITS;
-        primaryTypeTraits[8] = UTF8StringPointable.TYPE_TRAITS;
-        primaryTypeTraits[9] = UTF8StringPointable.TYPE_TRAITS;
-        primaryComparatorFactories[0] = PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY);
-
-        // field, type and key declarations for secondary indexes
-        secondaryTypeTraits[0] = DoublePointable.TYPE_TRAITS;
-        secondaryTypeTraits[1] = DoublePointable.TYPE_TRAITS;
-        secondaryTypeTraits[2] = DoublePointable.TYPE_TRAITS;
-        secondaryTypeTraits[3] = DoublePointable.TYPE_TRAITS;
-        secondaryTypeTraits[4] = UTF8StringPointable.TYPE_TRAITS;
-        secondaryComparatorFactories[0] = PointableBinaryComparatorFactory.of(DoublePointable.FACTORY);
-        secondaryComparatorFactories[1] = PointableBinaryComparatorFactory.of(DoublePointable.FACTORY);
-        secondaryComparatorFactories[2] = PointableBinaryComparatorFactory.of(DoublePointable.FACTORY);
-        secondaryComparatorFactories[3] = PointableBinaryComparatorFactory.of(DoublePointable.FACTORY);
-
-        // This only used for LSMRTree
-        btreeComparatorFactories[0] = PointableBinaryComparatorFactory.of(DoublePointable.FACTORY);
-        btreeComparatorFactories[1] = PointableBinaryComparatorFactory.of(DoublePointable.FACTORY);
-        btreeComparatorFactories[2] = PointableBinaryComparatorFactory.of(DoublePointable.FACTORY);
-        btreeComparatorFactories[3] = PointableBinaryComparatorFactory.of(DoublePointable.FACTORY);
-        btreeComparatorFactories[4] = PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY);
-
-        IPrimitiveValueProviderFactory[] secondaryValueProviderFactories = RTreeUtils
-                .createPrimitiveValueProviderFactories(secondaryComparatorFactories.length, DoublePointable.FACTORY);
-
-        rtreeDataflowHelperFactory = createDataFlowHelperFactory(secondaryValueProviderFactories,
-                RTreePolicyType.RSTARTREE, btreeComparatorFactories,
-                LSMRTreeUtils.proposeBestLinearizer(secondaryTypeTraits, secondaryComparatorFactories.length));
-
-    }
-
-    protected abstract IIndexDataflowHelperFactory createDataFlowHelperFactory(
-            IPrimitiveValueProviderFactory[] secondaryValueProviderFactories, RTreePolicyType rtreePolicyType,
-            IBinaryComparatorFactory[] btreeComparatorFactories, ILinearizeComparatorFactory linearizerCmpFactory)
-            throws TreeIndexException;
-
-    protected void createPrimaryIndex() throws Exception {
-        JobSpecification spec = new JobSpecification();
-        TransientLocalResourceFactoryProvider localResourceFactoryProvider = new TransientLocalResourceFactoryProvider();
-        TreeIndexCreateOperatorDescriptor primaryCreateOp = new TreeIndexCreateOperatorDescriptor(spec, storageManager,
-                lcManagerProvider, primarySplitProvider, primaryTypeTraits, primaryComparatorFactories, null,
-                btreeDataflowHelperFactory, localResourceFactoryProvider, NoOpOperationCallbackFactory.INSTANCE);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryCreateOp, NC1_ID);
-        spec.addRoot(primaryCreateOp);
-        runTest(spec);
-    }
-
-    protected void loadPrimaryIndex() throws Exception {
-        JobSpecification spec = new JobSpecification();
-
-        FileSplit[] ordersSplits = new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(
-                "data/orders-with-locations-part1.txt"))) };
-        IFileSplitProvider ordersSplitProvider = new ConstantFileSplitProvider(ordersSplits);
-        RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
-                DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
-                DoubleSerializerDeserializer.INSTANCE });
-
-        FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitProvider,
-                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        DoubleParserFactory.INSTANCE, DoubleParserFactory.INSTANCE, DoubleParserFactory.INSTANCE,
-                        DoubleParserFactory.INSTANCE }, '|'), ordersDesc);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID);
-
-        ExternalSortOperatorDescriptor sorter = new ExternalSortOperatorDescriptor(spec, 1000, new int[] { 0 },
-                new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
-                ordersDesc);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, sorter, NC1_ID);
-
-        int[] fieldPermutation = { 0, 1, 2, 4, 5, 7, 9, 10, 11, 12 };
-        TreeIndexBulkLoadOperatorDescriptor primaryBulkLoad = new TreeIndexBulkLoadOperatorDescriptor(spec,
-                storageManager, lcManagerProvider, primarySplitProvider, primaryTypeTraits, primaryComparatorFactories,
-                null, fieldPermutation, 0.7f, false, 1000L, btreeDataflowHelperFactory,
-                NoOpOperationCallbackFactory.INSTANCE);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryBulkLoad, NC1_ID);
-
-        spec.connect(new OneToOneConnectorDescriptor(spec), ordScanner, 0, sorter, 0);
-
-        spec.connect(new OneToOneConnectorDescriptor(spec), sorter, 0, primaryBulkLoad, 0);
-
-        spec.addRoot(primaryBulkLoad);
-        runTest(spec);
-    }
-
-    protected void createSecondaryIndex() throws Exception {
-        JobSpecification spec = new JobSpecification();
-        TransientLocalResourceFactoryProvider localResourceFactoryProvider = new TransientLocalResourceFactoryProvider();
-        TreeIndexCreateOperatorDescriptor secondaryCreateOp = new TreeIndexCreateOperatorDescriptor(spec,
-                storageManager, lcManagerProvider, secondarySplitProvider, secondaryTypeTraits,
-                secondaryComparatorFactories, null, rtreeDataflowHelperFactory, localResourceFactoryProvider,
-                NoOpOperationCallbackFactory.INSTANCE);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, secondaryCreateOp, NC1_ID);
-        spec.addRoot(secondaryCreateOp);
-        runTest(spec);
-    }
-
-    protected void loadSecondaryIndex() throws Exception {
-        JobSpecification spec = new JobSpecification();
-
-        // build dummy tuple containing nothing
-        ArrayTupleBuilder tb = new ArrayTupleBuilder(primaryKeyFieldCount * 2);
-        DataOutput dos = tb.getDataOutput();
-
-        tb.reset();
-        UTF8StringSerializerDeserializer.INSTANCE.serialize("0", dos);
-        tb.addFieldEndOffset();
-
-        ISerializerDeserializer[] keyRecDescSers = { UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE };
-        RecordDescriptor keyRecDesc = new RecordDescriptor(keyRecDescSers);
-
-        ConstantTupleSourceOperatorDescriptor keyProviderOp = new ConstantTupleSourceOperatorDescriptor(spec,
-                keyRecDesc, tb.getFieldEndOffsets(), tb.getByteArray(), tb.getSize());
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, keyProviderOp, NC1_ID);
-
-        int[] lowKeyFields = null; // - infinity
-        int[] highKeyFields = null; // + infinity
-
-        // scan primary index
-        BTreeSearchOperatorDescriptor primarySearchOp = new BTreeSearchOperatorDescriptor(spec, primaryRecDesc,
-                storageManager, lcManagerProvider, primarySplitProvider, primaryTypeTraits, primaryComparatorFactories,
-                null, lowKeyFields, highKeyFields, true, true, btreeDataflowHelperFactory, false,
-                NoOpOperationCallbackFactory.INSTANCE);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primarySearchOp, NC1_ID);
-
-        // load secondary index
-        int[] fieldPermutation = { 6, 7, 8, 9, 0 };
-        TreeIndexBulkLoadOperatorDescriptor secondaryBulkLoad = new TreeIndexBulkLoadOperatorDescriptor(spec,
-                storageManager, lcManagerProvider, secondarySplitProvider, secondaryTypeTraits,
-                secondaryComparatorFactories, null, fieldPermutation, 0.7f, false, 1000L, rtreeDataflowHelperFactory,
-                NoOpOperationCallbackFactory.INSTANCE);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, secondaryBulkLoad, NC1_ID);
-
-        spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0, primarySearchOp, 0);
-        spec.connect(new OneToOneConnectorDescriptor(spec), primarySearchOp, 0, secondaryBulkLoad, 0);
-
-        spec.addRoot(secondaryBulkLoad);
-        runTest(spec);
-    }
-
-    protected void insertPipeline() throws Exception {
-
-        JobSpecification spec = new JobSpecification();
-
-        FileSplit[] ordersSplits = new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(
-                "data/orders-with-locations-part2.txt"))) };
-        IFileSplitProvider ordersSplitProvider = new ConstantFileSplitProvider(ordersSplits);
-        RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
-                DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
-                DoubleSerializerDeserializer.INSTANCE });
-
-        FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitProvider,
-                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        DoubleParserFactory.INSTANCE, DoubleParserFactory.INSTANCE, DoubleParserFactory.INSTANCE,
-                        DoubleParserFactory.INSTANCE }, '|'), ordersDesc);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID);
-
-        // insert into primary index
-        int[] primaryFieldPermutation = { 0, 1, 2, 4, 5, 7, 9, 10, 11, 12 };
-        TreeIndexInsertUpdateDeleteOperatorDescriptor primaryInsertOp = new TreeIndexInsertUpdateDeleteOperatorDescriptor(
-                spec, ordersDesc, storageManager, lcManagerProvider, primarySplitProvider, primaryTypeTraits,
-                primaryComparatorFactories, null, primaryFieldPermutation, IndexOperation.INSERT,
-                btreeDataflowHelperFactory, null, NoOpOperationCallbackFactory.INSTANCE);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryInsertOp, NC1_ID);
-
-        // secondary index
-        int[] secondaryFieldPermutation = { 9, 10, 11, 12, 0 };
-        TreeIndexInsertUpdateDeleteOperatorDescriptor secondaryInsertOp = new TreeIndexInsertUpdateDeleteOperatorDescriptor(
-                spec, ordersDesc, storageManager, lcManagerProvider, secondarySplitProvider, secondaryTypeTraits,
-                secondaryComparatorFactories, null, secondaryFieldPermutation, IndexOperation.INSERT,
-                rtreeDataflowHelperFactory, null, NoOpOperationCallbackFactory.INSTANCE);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, secondaryInsertOp, NC1_ID);
-
-        NullSinkOperatorDescriptor nullSink = new NullSinkOperatorDescriptor(spec);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, nullSink, NC1_ID);
-
-        spec.connect(new OneToOneConnectorDescriptor(spec), ordScanner, 0, primaryInsertOp, 0);
-
-        spec.connect(new OneToOneConnectorDescriptor(spec), primaryInsertOp, 0, secondaryInsertOp, 0);
-
-        spec.connect(new OneToOneConnectorDescriptor(spec), secondaryInsertOp, 0, nullSink, 0);
-
-        spec.addRoot(nullSink);
-        runTest(spec);
-    }
-
-    protected void destroyPrimaryIndex() throws Exception {
-        JobSpecification spec = new JobSpecification();
-        IndexDropOperatorDescriptor primaryDropOp = new IndexDropOperatorDescriptor(spec, storageManager,
-                lcManagerProvider, primarySplitProvider, btreeDataflowHelperFactory);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryDropOp, NC1_ID);
-        spec.addRoot(primaryDropOp);
-        runTest(spec);
-    }
-
-    protected void destroySecondaryIndex() throws Exception {
-        JobSpecification spec = new JobSpecification();
-        IndexDropOperatorDescriptor secondaryDropOp = new IndexDropOperatorDescriptor(spec, storageManager,
-                lcManagerProvider, secondarySplitProvider, rtreeDataflowHelperFactory);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, secondaryDropOp, NC1_ID);
-        spec.addRoot(secondaryDropOp);
-        runTest(spec);
-    }
-
-    @After
-    public abstract void cleanup() throws Exception;
-}
\ No newline at end of file
diff --git a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/rtree/RTreeOperatorTestHelper.java b/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/rtree/RTreeOperatorTestHelper.java
deleted file mode 100644
index 85abcb1..0000000
--- a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/rtree/RTreeOperatorTestHelper.java
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.tests.am.rtree;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.rtree.dataflow.RTreeDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreePolicyType;
-import edu.uci.ics.hyracks.tests.am.common.TreeOperatorTestHelper;
-
-public class RTreeOperatorTestHelper extends TreeOperatorTestHelper {
-
-    public IIndexDataflowHelperFactory createDataFlowHelperFactory(
-            IPrimitiveValueProviderFactory[] valueProviderFactories, RTreePolicyType rtreePolicyType,
-            IBinaryComparatorFactory[] btreeComparatorFactories) {
-        return new RTreeDataflowHelperFactory(valueProviderFactories, rtreePolicyType);
-    }
-
-}
diff --git a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/rtree/RTreeSecondaryIndexInsertOperatorTest.java b/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/rtree/RTreeSecondaryIndexInsertOperatorTest.java
deleted file mode 100644
index 8e68bcf..0000000
--- a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/rtree/RTreeSecondaryIndexInsertOperatorTest.java
+++ /dev/null
@@ -1,132 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.tests.am.rtree;
-
-import java.io.DataOutput;
-
-import org.junit.Before;
-import org.junit.Test;
-
-import edu.uci.ics.hyracks.api.constraints.PartitionConstraintHelper;
-import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ILinearizeComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.job.JobSpecification;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.DoubleSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.file.ConstantFileSplitProvider;
-import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
-import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.dataflow.std.file.PlainFileWriterOperatorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.misc.ConstantTupleSourceOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeSearchOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
-import edu.uci.ics.hyracks.storage.am.rtree.dataflow.RTreeSearchOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreePolicyType;
-
-public class RTreeSecondaryIndexInsertOperatorTest extends AbstractRTreeOperatorTest {
-
-    @Before
-    public void setup() throws Exception {
-        super.setup();
-        createPrimaryIndex();
-        loadPrimaryIndex();
-        createSecondaryIndex();
-        loadSecondaryIndex();
-        insertPipeline();
-    }
-
-    @Test
-    public void searchUpdatedSecondaryIndexTest() throws Exception {
-        JobSpecification spec = new JobSpecification();
-
-        // build tuple
-        ArrayTupleBuilder tb = new ArrayTupleBuilder(secondaryKeyFieldCount);
-        DataOutput dos = tb.getDataOutput();
-
-        tb.reset();
-        DoubleSerializerDeserializer.INSTANCE.serialize(61.2894, dos);
-        tb.addFieldEndOffset();
-        DoubleSerializerDeserializer.INSTANCE.serialize(-149.624, dos);
-        tb.addFieldEndOffset();
-        DoubleSerializerDeserializer.INSTANCE.serialize(61.8894, dos);
-        tb.addFieldEndOffset();
-        DoubleSerializerDeserializer.INSTANCE.serialize(-149.024, dos);
-        tb.addFieldEndOffset();
-
-        ISerializerDeserializer[] keyRecDescSers = { DoubleSerializerDeserializer.INSTANCE,
-                DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
-                DoubleSerializerDeserializer.INSTANCE };
-        RecordDescriptor keyRecDesc = new RecordDescriptor(keyRecDescSers);
-
-        ConstantTupleSourceOperatorDescriptor keyProviderOp = new ConstantTupleSourceOperatorDescriptor(spec,
-                keyRecDesc, tb.getFieldEndOffsets(), tb.getByteArray(), tb.getSize());
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, keyProviderOp, NC1_ID);
-
-        int[] keyFields = { 0, 1, 2, 3 };
-
-        RTreeSearchOperatorDescriptor secondarySearchOp = new RTreeSearchOperatorDescriptor(spec, secondaryRecDesc,
-                storageManager, lcManagerProvider, secondarySplitProvider, secondaryTypeTraits,
-                secondaryComparatorFactories, keyFields, rtreeDataflowHelperFactory, false,
-                NoOpOperationCallbackFactory.INSTANCE);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, secondarySearchOp, NC1_ID);
-
-        // fifth field from the tuples coming from secondary index
-        int[] primaryLowKeyFields = { 4 };
-        // fifth field from the tuples coming from secondary index
-        int[] primaryHighKeyFields = { 4 };
-
-        // search primary index
-        BTreeSearchOperatorDescriptor primarySearchOp = new BTreeSearchOperatorDescriptor(spec, primaryRecDesc,
-                storageManager, lcManagerProvider, primarySplitProvider, primaryTypeTraits, primaryComparatorFactories,
-                null, primaryLowKeyFields, primaryHighKeyFields, true, true, btreeDataflowHelperFactory, false,
-                NoOpOperationCallbackFactory.INSTANCE);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primarySearchOp, NC1_ID);
-
-        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
-                createTempFile().getAbsolutePath()) });
-        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
-
-        spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0, secondarySearchOp, 0);
-        spec.connect(new OneToOneConnectorDescriptor(spec), secondarySearchOp, 0, primarySearchOp, 0);
-        spec.connect(new OneToOneConnectorDescriptor(spec), primarySearchOp, 0, printer, 0);
-
-        spec.addRoot(printer);
-        runTest(spec);
-    }
-
-    @Override
-    protected IIndexDataflowHelperFactory createDataFlowHelperFactory(
-            IPrimitiveValueProviderFactory[] secondaryValueProviderFactories, RTreePolicyType rtreePolicyType,
-            IBinaryComparatorFactory[] btreeComparatorFactories, ILinearizeComparatorFactory linearizerCmpFactory)
-            throws TreeIndexException {
-        return ((RTreeOperatorTestHelper) testHelper).createDataFlowHelperFactory(secondaryValueProviderFactories,
-                rtreePolicyType, null);
-    }
-
-    @Override
-    public void cleanup() throws Exception {
-        destroyPrimaryIndex();
-        destroySecondaryIndex();
-    }
-}
\ No newline at end of file
diff --git a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/rtree/RTreeSecondaryIndexScanOperatorTest.java b/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/rtree/RTreeSecondaryIndexScanOperatorTest.java
deleted file mode 100644
index 90bb918..0000000
--- a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/rtree/RTreeSecondaryIndexScanOperatorTest.java
+++ /dev/null
@@ -1,115 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.tests.am.rtree;
-
-import java.io.DataOutput;
-
-import org.junit.Before;
-import org.junit.Test;
-
-import edu.uci.ics.hyracks.api.constraints.PartitionConstraintHelper;
-import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ILinearizeComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.job.JobSpecification;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.DoubleSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.file.ConstantFileSplitProvider;
-import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
-import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.dataflow.std.file.PlainFileWriterOperatorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.misc.ConstantTupleSourceOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
-import edu.uci.ics.hyracks.storage.am.rtree.dataflow.RTreeSearchOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreePolicyType;
-
-public class RTreeSecondaryIndexScanOperatorTest extends AbstractRTreeOperatorTest {
-
-    @Before
-    public void setup() throws Exception {
-        super.setup();
-        createPrimaryIndex();
-        loadPrimaryIndex();
-        createSecondaryIndex();
-        loadSecondaryIndex();
-    }
-
-    @Test
-    public void scanPrimaryIndexTest() throws Exception {
-        JobSpecification spec = new JobSpecification();
-
-        // build dummy tuple
-        ArrayTupleBuilder tb = new ArrayTupleBuilder(secondaryKeyFieldCount);
-        DataOutput dos = tb.getDataOutput();
-
-        tb.reset();
-        DoubleSerializerDeserializer.INSTANCE.serialize(0.0, dos);
-        tb.addFieldEndOffset();
-        DoubleSerializerDeserializer.INSTANCE.serialize(0.0, dos);
-        tb.addFieldEndOffset();
-        DoubleSerializerDeserializer.INSTANCE.serialize(0.0, dos);
-        tb.addFieldEndOffset();
-        DoubleSerializerDeserializer.INSTANCE.serialize(0.0, dos);
-        tb.addFieldEndOffset();
-
-        ISerializerDeserializer[] keyRecDescSers = { DoubleSerializerDeserializer.INSTANCE,
-                DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
-                DoubleSerializerDeserializer.INSTANCE };
-        RecordDescriptor keyRecDesc = new RecordDescriptor(keyRecDescSers);
-
-        ConstantTupleSourceOperatorDescriptor keyProviderOp = new ConstantTupleSourceOperatorDescriptor(spec,
-                keyRecDesc, tb.getFieldEndOffsets(), tb.getByteArray(), tb.getSize());
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, keyProviderOp, NC1_ID);
-
-        int[] keyFields = null;
-
-        RTreeSearchOperatorDescriptor secondarySearchOp = new RTreeSearchOperatorDescriptor(spec, secondaryRecDesc,
-                storageManager, lcManagerProvider, secondarySplitProvider, secondaryTypeTraits,
-                secondaryComparatorFactories, keyFields, rtreeDataflowHelperFactory, false,
-                NoOpOperationCallbackFactory.INSTANCE);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, secondarySearchOp, NC1_ID);
-
-        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
-                createTempFile().getAbsolutePath()) });
-        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
-
-        spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0, secondarySearchOp, 0);
-        spec.connect(new OneToOneConnectorDescriptor(spec), secondarySearchOp, 0, printer, 0);
-
-        spec.addRoot(printer);
-        runTest(spec);
-    }
-
-    @Override
-    protected IIndexDataflowHelperFactory createDataFlowHelperFactory(
-            IPrimitiveValueProviderFactory[] secondaryValueProviderFactories, RTreePolicyType rtreePolicyType,
-            IBinaryComparatorFactory[] btreeComparatorFactories, ILinearizeComparatorFactory linearizerCmpFactory) {
-        return ((RTreeOperatorTestHelper) testHelper).createDataFlowHelperFactory(secondaryValueProviderFactories,
-                rtreePolicyType, null);
-    }
-    
-    @Override
-    public void cleanup() throws Exception {
-        destroyPrimaryIndex();
-        destroySecondaryIndex();
-    }
-}
\ No newline at end of file
diff --git a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/rtree/RTreeSecondaryIndexSearchOperatorTest.java b/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/rtree/RTreeSecondaryIndexSearchOperatorTest.java
deleted file mode 100644
index 5da122c..0000000
--- a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/rtree/RTreeSecondaryIndexSearchOperatorTest.java
+++ /dev/null
@@ -1,131 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.tests.am.rtree;
-
-import java.io.DataOutput;
-
-import org.junit.Before;
-import org.junit.Test;
-
-import edu.uci.ics.hyracks.api.constraints.PartitionConstraintHelper;
-import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ILinearizeComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.job.JobSpecification;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.DoubleSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.file.ConstantFileSplitProvider;
-import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
-import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.dataflow.std.file.PlainFileWriterOperatorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.misc.ConstantTupleSourceOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeSearchOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
-import edu.uci.ics.hyracks.storage.am.rtree.dataflow.RTreeSearchOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreePolicyType;
-
-public class RTreeSecondaryIndexSearchOperatorTest extends AbstractRTreeOperatorTest {
-
-    @Before
-    public void setup() throws Exception {
-        super.setup();
-        createPrimaryIndex();
-        loadPrimaryIndex();
-        createSecondaryIndex();
-        loadSecondaryIndex();
-    }
-
-    @Test
-    public void searchSecondaryIndexTest() throws Exception {
-        JobSpecification spec = new JobSpecification();
-
-        // build tuple
-        ArrayTupleBuilder tb = new ArrayTupleBuilder(secondaryKeyFieldCount);
-        DataOutput dos = tb.getDataOutput();
-
-        tb.reset();
-        DoubleSerializerDeserializer.INSTANCE.serialize(61.2894, dos);
-        tb.addFieldEndOffset();
-        DoubleSerializerDeserializer.INSTANCE.serialize(-149.624, dos);
-        tb.addFieldEndOffset();
-        DoubleSerializerDeserializer.INSTANCE.serialize(61.8894, dos);
-        tb.addFieldEndOffset();
-        DoubleSerializerDeserializer.INSTANCE.serialize(-149.024, dos);
-        tb.addFieldEndOffset();
-
-        ISerializerDeserializer[] keyRecDescSers = { DoubleSerializerDeserializer.INSTANCE,
-                DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
-                DoubleSerializerDeserializer.INSTANCE };
-        RecordDescriptor keyRecDesc = new RecordDescriptor(keyRecDescSers);
-
-        ConstantTupleSourceOperatorDescriptor keyProviderOp = new ConstantTupleSourceOperatorDescriptor(spec,
-                keyRecDesc, tb.getFieldEndOffsets(), tb.getByteArray(), tb.getSize());
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, keyProviderOp, NC1_ID);
-
-        int[] keyFields = { 0, 1, 2, 3 };
-
-        RTreeSearchOperatorDescriptor secondarySearchOp = new RTreeSearchOperatorDescriptor(spec, secondaryRecDesc,
-                storageManager, lcManagerProvider, secondarySplitProvider, secondaryTypeTraits,
-                secondaryComparatorFactories, keyFields, rtreeDataflowHelperFactory, false,
-                NoOpOperationCallbackFactory.INSTANCE);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, secondarySearchOp, NC1_ID);
-
-        // fifth field from the tuples coming from secondary index
-        int[] primaryLowKeyFields = { 4 };
-        // fifth field from the tuples coming from secondary index
-        int[] primaryHighKeyFields = { 4 };
-
-        // search primary index
-        BTreeSearchOperatorDescriptor primarySearchOp = new BTreeSearchOperatorDescriptor(spec, primaryRecDesc,
-                storageManager, lcManagerProvider, primarySplitProvider, primaryTypeTraits, primaryComparatorFactories,
-                null, primaryLowKeyFields, primaryHighKeyFields, true, true, btreeDataflowHelperFactory, false,
-                NoOpOperationCallbackFactory.INSTANCE);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primarySearchOp, NC1_ID);
-
-        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
-                createTempFile().getAbsolutePath()) });
-        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
-
-        spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0, secondarySearchOp, 0);
-        spec.connect(new OneToOneConnectorDescriptor(spec), secondarySearchOp, 0, primarySearchOp, 0);
-        spec.connect(new OneToOneConnectorDescriptor(spec), primarySearchOp, 0, printer, 0);
-
-        spec.addRoot(printer);
-        runTest(spec);
-    }
-
-    @Override
-    protected IIndexDataflowHelperFactory createDataFlowHelperFactory(
-            IPrimitiveValueProviderFactory[] secondaryValueProviderFactories, RTreePolicyType rtreePolicyType,
-            IBinaryComparatorFactory[] btreeComparatorFactories, ILinearizeComparatorFactory linearizerCmpFactory)
-            throws TreeIndexException {
-        return ((RTreeOperatorTestHelper) testHelper).createDataFlowHelperFactory(secondaryValueProviderFactories,
-                rtreePolicyType, null);
-    }
-
-    @Override
-    public void cleanup() throws Exception {
-        destroyPrimaryIndex();
-        destroySecondaryIndex();
-    }
-}
\ No newline at end of file
diff --git a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/rtree/RTreeSecondaryIndexStatsOperatorTest.java b/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/rtree/RTreeSecondaryIndexStatsOperatorTest.java
deleted file mode 100644
index 83be1d9..0000000
--- a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/rtree/RTreeSecondaryIndexStatsOperatorTest.java
+++ /dev/null
@@ -1,79 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.tests.am.rtree;
-
-import org.junit.Before;
-import org.junit.Test;
-
-import edu.uci.ics.hyracks.api.constraints.PartitionConstraintHelper;
-import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ILinearizeComparatorFactory;
-import edu.uci.ics.hyracks.api.job.JobSpecification;
-import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.file.ConstantFileSplitProvider;
-import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
-import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.dataflow.std.file.PlainFileWriterOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexStatsOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
-import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreePolicyType;
-
-public class RTreeSecondaryIndexStatsOperatorTest extends AbstractRTreeOperatorTest {
-
-    @Before
-    public void setup() throws Exception {
-        super.setup();
-        createPrimaryIndex();
-        loadPrimaryIndex();
-        createSecondaryIndex();
-        loadSecondaryIndex();
-    }
-
-    @Test
-    public void showPrimaryIndexStats() throws Exception {
-        JobSpecification spec = new JobSpecification();
-
-        TreeIndexStatsOperatorDescriptor secondaryStatsOp = new TreeIndexStatsOperatorDescriptor(spec, storageManager,
-                lcManagerProvider, secondarySplitProvider, secondaryTypeTraits, secondaryComparatorFactories, null,
-                rtreeDataflowHelperFactory, NoOpOperationCallbackFactory.INSTANCE);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, secondaryStatsOp, NC1_ID);
-        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
-                createTempFile().getAbsolutePath()) });
-        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
-
-        spec.connect(new OneToOneConnectorDescriptor(spec), secondaryStatsOp, 0, printer, 0);
-        spec.addRoot(printer);
-        runTest(spec);
-    }
-
-    @Override
-    protected IIndexDataflowHelperFactory createDataFlowHelperFactory(
-            IPrimitiveValueProviderFactory[] secondaryValueProviderFactories, RTreePolicyType rtreePolicyType,
-            IBinaryComparatorFactory[] btreeComparatorFactories, ILinearizeComparatorFactory linearizerCmpFactory) {
-        return ((RTreeOperatorTestHelper) testHelper).createDataFlowHelperFactory(secondaryValueProviderFactories,
-                rtreePolicyType, null);
-    }
-
-    @Override
-    public void cleanup() throws Exception {
-        destroyPrimaryIndex();
-        destroySecondaryIndex();
-    }
-}
\ No newline at end of file
diff --git a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/AbstractIntegrationTest.java b/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/AbstractIntegrationTest.java
deleted file mode 100644
index 0e0e899..0000000
--- a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/AbstractIntegrationTest.java
+++ /dev/null
@@ -1,150 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.tests.integration;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.EnumSet;
-import java.util.List;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import org.apache.commons.io.FileUtils;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-import org.junit.Rule;
-import org.junit.rules.TemporaryFolder;
-
-import edu.uci.ics.hyracks.api.client.HyracksConnection;
-import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
-import edu.uci.ics.hyracks.api.job.JobFlag;
-import edu.uci.ics.hyracks.api.job.JobId;
-import edu.uci.ics.hyracks.api.job.JobSpecification;
-import edu.uci.ics.hyracks.control.cc.ClusterControllerService;
-import edu.uci.ics.hyracks.control.common.controllers.CCConfig;
-import edu.uci.ics.hyracks.control.common.controllers.NCConfig;
-import edu.uci.ics.hyracks.control.nc.NodeControllerService;
-
-public abstract class AbstractIntegrationTest {
-    private static final Logger LOGGER = Logger.getLogger(AbstractIntegrationTest.class.getName());
-
-    public static final String NC1_ID = "nc1";
-    public static final String NC2_ID = "nc2";
-
-    private static ClusterControllerService cc;
-    private static NodeControllerService nc1;
-    private static NodeControllerService nc2;
-    private static IHyracksClientConnection hcc;
-
-    private final List<File> outputFiles;
-    
-    protected static int DEFAULT_MEM_PAGE_SIZE = 32768;
-    protected static int DEFAULT_MEM_NUM_PAGES = 1000;
-
-    @Rule
-    public TemporaryFolder outputFolder = new TemporaryFolder();
-
-    public AbstractIntegrationTest() {
-        outputFiles = new ArrayList<File>();
-    }
-
-    @BeforeClass
-    public static void init() throws Exception {
-        CCConfig ccConfig = new CCConfig();
-        ccConfig.clientNetIpAddress = "127.0.0.1";
-        ccConfig.clientNetPort = 39000;
-        ccConfig.clusterNetIpAddress = "127.0.0.1";
-        ccConfig.clusterNetPort = 39001;
-        ccConfig.profileDumpPeriod = 10000;
-        File outDir = new File("target/ClusterController");
-        outDir.mkdirs();
-        File ccRoot = File.createTempFile(AbstractIntegrationTest.class.getName(), ".data", outDir);
-        ccRoot.delete();
-        ccRoot.mkdir();
-        ccConfig.ccRoot = ccRoot.getAbsolutePath();
-        cc = new ClusterControllerService(ccConfig);
-        cc.start();
-
-        NCConfig ncConfig1 = new NCConfig();
-        ncConfig1.ccHost = "localhost";
-        ncConfig1.ccPort = 39001;
-        ncConfig1.clusterNetIPAddress = "127.0.0.1";
-        ncConfig1.dataIPAddress = "127.0.0.1";
-        ncConfig1.nodeId = NC1_ID;
-        nc1 = new NodeControllerService(ncConfig1);
-        nc1.start();
-
-        NCConfig ncConfig2 = new NCConfig();
-        ncConfig2.ccHost = "localhost";
-        ncConfig2.ccPort = 39001;
-        ncConfig2.clusterNetIPAddress = "127.0.0.1";
-        ncConfig2.dataIPAddress = "127.0.0.1";
-        ncConfig2.nodeId = NC2_ID;
-        nc2 = new NodeControllerService(ncConfig2);
-        nc2.start();
-
-        hcc = new HyracksConnection(ccConfig.clientNetIpAddress, ccConfig.clientNetPort);
-        hcc.createApplication("test", null);
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("Starting CC in " + ccRoot.getAbsolutePath());
-        }
-    }
-
-    @AfterClass
-    public static void deinit() throws Exception {
-        nc2.stop();
-        nc1.stop();
-        cc.stop();
-    }
-
-    protected void runTest(JobSpecification spec) throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info(spec.toJSON().toString(2));
-        }
-        JobId jobId = hcc.startJob("test", spec, EnumSet.of(JobFlag.PROFILE_RUNTIME));
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info(jobId.toString());
-        }
-        hcc.waitForCompletion(jobId);
-        dumpOutputFiles();
-    }
-
-    private void dumpOutputFiles() {
-        if (LOGGER.isLoggable(Level.INFO)) {
-            for (File f : outputFiles) {
-                if (f.exists() && f.isFile()) {
-                    try {
-                        LOGGER.info("Reading file: " + f.getAbsolutePath() + " in test: " + getClass().getName());
-                        String data = FileUtils.readFileToString(f);
-                        LOGGER.info(data);
-                    } catch (IOException e) {
-                        LOGGER.info("Error reading file: " + f.getAbsolutePath());
-                        LOGGER.info(e.getMessage());
-                    }
-                }
-            }
-        }
-    }
-
-    protected File createTempFile() throws IOException {
-        File tempFile = File.createTempFile(getClass().getName(), ".tmp", outputFolder.getRoot());
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("Output file: " + tempFile.getAbsolutePath());
-        }
-        outputFiles.add(tempFile);
-        return tempFile;
-    }
-}
\ No newline at end of file
diff --git a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/TPCHCustomerOrderHashJoinTest.java b/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/TPCHCustomerOrderHashJoinTest.java
deleted file mode 100644
index 61d4696..0000000
--- a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/TPCHCustomerOrderHashJoinTest.java
+++ /dev/null
@@ -1,1029 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.tests.integration;
-
-import java.io.DataOutput;
-import java.io.File;
-import java.io.IOException;
-
-import org.junit.Test;
-
-import edu.uci.ics.hyracks.api.constraints.PartitionConstraintHelper;
-import edu.uci.ics.hyracks.api.dataflow.IConnectorDescriptor;
-import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.INullWriter;
-import edu.uci.ics.hyracks.api.dataflow.value.INullWriterFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.api.job.JobSpecification;
-import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
-import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryHashFunctionFactory;
-import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.parsers.IValueParserFactory;
-import edu.uci.ics.hyracks.dataflow.common.data.parsers.UTF8StringParserFactory;
-import edu.uci.ics.hyracks.dataflow.common.data.partition.FieldHashPartitionComputerFactory;
-import edu.uci.ics.hyracks.dataflow.std.connectors.MToNPartitioningConnectorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.connectors.MToNReplicatingConnectorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.file.ConstantFileSplitProvider;
-import edu.uci.ics.hyracks.dataflow.std.file.DelimitedDataTupleParserFactory;
-import edu.uci.ics.hyracks.dataflow.std.file.FileScanOperatorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
-import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.dataflow.std.file.PlainFileWriterOperatorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.join.GraceHashJoinOperatorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.join.HybridHashJoinOperatorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.join.InMemoryHashJoinOperatorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.misc.MaterializingOperatorDescriptor;
-
-public class TPCHCustomerOrderHashJoinTest extends AbstractIntegrationTest {
-    private static class NoopNullWriterFactory implements INullWriterFactory {
-
-        private static final long serialVersionUID = 1L;
-        public static final NoopNullWriterFactory INSTANCE = new NoopNullWriterFactory();
-
-        private NoopNullWriterFactory() {
-        }
-
-        @Override
-        public INullWriter createNullWriter() {
-            return new INullWriter() {
-                @Override
-                public void writeNull(DataOutput out) throws HyracksDataException {
-                    try {
-                        out.writeShort(0);
-                    } catch (IOException e) {
-                        throw new HyracksDataException(e);
-                    }
-                }
-            };
-        }
-    }
-
-    /*
-     * TPCH Customer table: CREATE TABLE CUSTOMER ( C_CUSTKEY INTEGER NOT NULL,
-     * C_NAME VARCHAR(25) NOT NULL, C_ADDRESS VARCHAR(40) NOT NULL, C_NATIONKEY
-     * INTEGER NOT NULL, C_PHONE CHAR(15) NOT NULL, C_ACCTBAL DECIMAL(15,2) NOT
-     * NULL, C_MKTSEGMENT CHAR(10) NOT NULL, C_COMMENT VARCHAR(117) NOT NULL );
-     * TPCH Orders table: CREATE TABLE ORDERS ( O_ORDERKEY INTEGER NOT NULL,
-     * O_CUSTKEY INTEGER NOT NULL, O_ORDERSTATUS CHAR(1) NOT NULL, O_TOTALPRICE
-     * DECIMAL(15,2) NOT NULL, O_ORDERDATE DATE NOT NULL, O_ORDERPRIORITY
-     * CHAR(15) NOT NULL, O_CLERK CHAR(15) NOT NULL, O_SHIPPRIORITY INTEGER NOT
-     * NULL, O_COMMENT VARCHAR(79) NOT NULL );
-     */
-
-    @Test
-    public void customerOrderCIDJoin() throws Exception {
-        JobSpecification spec = new JobSpecification();
-
-        FileSplit[] custSplits = new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(
-                "data/tpch0.001/customer.tbl"))) };
-        IFileSplitProvider custSplitsProvider = new ConstantFileSplitProvider(custSplits);
-        RecordDescriptor custDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE });
-
-        FileSplit[] ordersSplits = new FileSplit[] { new FileSplit(NC2_ID, new FileReference(new File(
-                "data/tpch0.001/orders.tbl"))) };
-        IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
-        RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE });
-
-        RecordDescriptor custOrderJoinDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE });
-
-        FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitsProvider,
-                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'), ordersDesc);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID);
-
-        FileScanOperatorDescriptor custScanner = new FileScanOperatorDescriptor(spec, custSplitsProvider,
-                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE }, '|'), custDesc);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, custScanner, NC1_ID);
-
-        InMemoryHashJoinOperatorDescriptor join = new InMemoryHashJoinOperatorDescriptor(
-                spec,
-                new int[] { 1 },
-                new int[] { 0 },
-                new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) },
-                new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
-                custOrderJoinDesc, 128);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, join, NC1_ID);
-
-        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
-                createTempFile().getAbsolutePath()) });
-        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
-
-        IConnectorDescriptor ordJoinConn = new OneToOneConnectorDescriptor(spec);
-        spec.connect(ordJoinConn, ordScanner, 0, join, 0);
-
-        IConnectorDescriptor custJoinConn = new OneToOneConnectorDescriptor(spec);
-        spec.connect(custJoinConn, custScanner, 0, join, 1);
-
-        IConnectorDescriptor joinPrinterConn = new OneToOneConnectorDescriptor(spec);
-        spec.connect(joinPrinterConn, join, 0, printer, 0);
-
-        spec.addRoot(printer);
-        runTest(spec);
-    }
-
-    @Test
-    public void customerOrderCIDGraceJoin() throws Exception {
-        JobSpecification spec = new JobSpecification();
-
-        FileSplit[] custSplits = new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(
-                "data/tpch0.001/customer.tbl"))) };
-        IFileSplitProvider custSplitsProvider = new ConstantFileSplitProvider(custSplits);
-        RecordDescriptor custDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE });
-
-        FileSplit[] ordersSplits = new FileSplit[] { new FileSplit(NC2_ID, new FileReference(new File(
-                "data/tpch0.001/orders.tbl"))) };
-        IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
-        RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE });
-
-        RecordDescriptor custOrderJoinDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE });
-
-        FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitsProvider,
-                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'), ordersDesc);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID);
-
-        FileScanOperatorDescriptor custScanner = new FileScanOperatorDescriptor(spec, custSplitsProvider,
-                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE }, '|'), custDesc);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, custScanner, NC1_ID);
-
-        GraceHashJoinOperatorDescriptor join = new GraceHashJoinOperatorDescriptor(
-                spec,
-                4,
-                10,
-                200,
-                1.2,
-                new int[] { 1 },
-                new int[] { 0 },
-                new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) },
-                new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
-                custOrderJoinDesc);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, join, NC1_ID);
-
-        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
-                createTempFile().getAbsolutePath()) });
-        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
-
-        IConnectorDescriptor ordJoinConn = new OneToOneConnectorDescriptor(spec);
-        spec.connect(ordJoinConn, ordScanner, 0, join, 0);
-
-        IConnectorDescriptor custJoinConn = new OneToOneConnectorDescriptor(spec);
-        spec.connect(custJoinConn, custScanner, 0, join, 1);
-
-        IConnectorDescriptor joinPrinterConn = new OneToOneConnectorDescriptor(spec);
-        spec.connect(joinPrinterConn, join, 0, printer, 0);
-
-        spec.addRoot(printer);
-        runTest(spec);
-    }
-
-    @Test
-    public void customerOrderCIDHybridHashJoin() throws Exception {
-        JobSpecification spec = new JobSpecification();
-
-        FileSplit[] custSplits = new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(
-                "data/tpch0.001/customer.tbl"))) };
-        IFileSplitProvider custSplitsProvider = new ConstantFileSplitProvider(custSplits);
-        RecordDescriptor custDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE });
-
-        FileSplit[] ordersSplits = new FileSplit[] { new FileSplit(NC2_ID, new FileReference(new File(
-                "data/tpch0.001/orders.tbl"))) };
-        IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
-        RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE });
-
-        RecordDescriptor custOrderJoinDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE });
-
-        FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitsProvider,
-                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'), ordersDesc);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID);
-
-        FileScanOperatorDescriptor custScanner = new FileScanOperatorDescriptor(spec, custSplitsProvider,
-                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE }, '|'), custDesc);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, custScanner, NC1_ID);
-
-        HybridHashJoinOperatorDescriptor join = new HybridHashJoinOperatorDescriptor(
-                spec,
-                5,
-                20,
-                200,
-                1.2,
-                new int[] { 1 },
-                new int[] { 0 },
-                new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) },
-                new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
-                custOrderJoinDesc);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, join, NC1_ID);
-
-        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
-                createTempFile().getAbsolutePath()) });
-        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
-
-        IConnectorDescriptor ordJoinConn = new OneToOneConnectorDescriptor(spec);
-        spec.connect(ordJoinConn, ordScanner, 0, join, 0);
-
-        IConnectorDescriptor custJoinConn = new OneToOneConnectorDescriptor(spec);
-        spec.connect(custJoinConn, custScanner, 0, join, 1);
-
-        IConnectorDescriptor joinPrinterConn = new OneToOneConnectorDescriptor(spec);
-        spec.connect(joinPrinterConn, join, 0, printer, 0);
-
-        spec.addRoot(printer);
-        runTest(spec);
-    }
-
-    @Test
-    public void customerOrderCIDInMemoryHashLeftOuterJoin() throws Exception {
-        JobSpecification spec = new JobSpecification();
-
-        FileSplit[] custSplits = new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(
-                "data/tpch0.001/customer.tbl"))) };
-        IFileSplitProvider custSplitsProvider = new ConstantFileSplitProvider(custSplits);
-        RecordDescriptor custDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE });
-
-        FileSplit[] ordersSplits = new FileSplit[] { new FileSplit(NC2_ID, new FileReference(new File(
-                "data/tpch0.001/orders.tbl"))) };
-        IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
-        RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE });
-
-        RecordDescriptor custOrderJoinDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE });
-
-        FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitsProvider,
-                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'), ordersDesc);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC2_ID);
-
-        FileScanOperatorDescriptor custScanner = new FileScanOperatorDescriptor(spec, custSplitsProvider,
-                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE }, '|'), custDesc);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, custScanner, NC1_ID);
-
-        INullWriterFactory[] nullWriterFactories = new INullWriterFactory[ordersDesc.getFieldCount()];
-        for (int j = 0; j < nullWriterFactories.length; j++) {
-            nullWriterFactories[j] = NoopNullWriterFactory.INSTANCE;
-        }
-
-        InMemoryHashJoinOperatorDescriptor join = new InMemoryHashJoinOperatorDescriptor(
-                spec,
-                new int[] { 0 },
-                new int[] { 1 },
-                new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) },
-                new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
-                custOrderJoinDesc, true, nullWriterFactories, 128);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, join, NC1_ID);
-
-        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
-                createTempFile().getAbsolutePath()) });
-        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
-
-        IConnectorDescriptor ordJoinConn = new OneToOneConnectorDescriptor(spec);
-        spec.connect(ordJoinConn, ordScanner, 0, join, 1);
-
-        IConnectorDescriptor custJoinConn = new OneToOneConnectorDescriptor(spec);
-        spec.connect(custJoinConn, custScanner, 0, join, 0);
-
-        IConnectorDescriptor joinPrinterConn = new OneToOneConnectorDescriptor(spec);
-        spec.connect(joinPrinterConn, join, 0, printer, 0);
-
-        spec.addRoot(printer);
-        runTest(spec);
-    }
-
-    @Test
-    public void customerOrderCIDGraceHashLeftOuterJoin() throws Exception {
-        JobSpecification spec = new JobSpecification();
-
-        FileSplit[] custSplits = new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(
-                "data/tpch0.001/customer.tbl"))) };
-        IFileSplitProvider custSplitsProvider = new ConstantFileSplitProvider(custSplits);
-        RecordDescriptor custDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE });
-
-        FileSplit[] ordersSplits = new FileSplit[] { new FileSplit(NC2_ID, new FileReference(new File(
-                "data/tpch0.001/orders.tbl"))) };
-        IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
-        RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE });
-
-        RecordDescriptor custOrderJoinDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE });
-
-        FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitsProvider,
-                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'), ordersDesc);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC2_ID);
-
-        FileScanOperatorDescriptor custScanner = new FileScanOperatorDescriptor(spec, custSplitsProvider,
-                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE }, '|'), custDesc);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, custScanner, NC1_ID);
-
-        INullWriterFactory[] nullWriterFactories = new INullWriterFactory[ordersDesc.getFieldCount()];
-        for (int j = 0; j < nullWriterFactories.length; j++) {
-            nullWriterFactories[j] = NoopNullWriterFactory.INSTANCE;
-        }
-
-        GraceHashJoinOperatorDescriptor join = new GraceHashJoinOperatorDescriptor(
-                spec,
-                5,
-                20,
-                200,
-                1.2,
-                new int[] { 0 },
-                new int[] { 1 },
-                new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) },
-                new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
-                custOrderJoinDesc, true, nullWriterFactories);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, join, NC1_ID);
-
-        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
-                createTempFile().getAbsolutePath()) });
-        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
-
-        IConnectorDescriptor ordJoinConn = new OneToOneConnectorDescriptor(spec);
-        spec.connect(ordJoinConn, ordScanner, 0, join, 1);
-
-        IConnectorDescriptor custJoinConn = new OneToOneConnectorDescriptor(spec);
-        spec.connect(custJoinConn, custScanner, 0, join, 0);
-
-        IConnectorDescriptor joinPrinterConn = new OneToOneConnectorDescriptor(spec);
-        spec.connect(joinPrinterConn, join, 0, printer, 0);
-
-        spec.addRoot(printer);
-        runTest(spec);
-    }
-
-    @Test
-    public void customerOrderCIDHybridHashLeftOuterJoin() throws Exception {
-        JobSpecification spec = new JobSpecification();
-
-        FileSplit[] custSplits = new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(
-                "data/tpch0.001/customer.tbl"))) };
-        IFileSplitProvider custSplitsProvider = new ConstantFileSplitProvider(custSplits);
-        RecordDescriptor custDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE });
-
-        FileSplit[] ordersSplits = new FileSplit[] { new FileSplit(NC2_ID, new FileReference(new File(
-                "data/tpch0.001/orders.tbl"))) };
-        IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
-        RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE });
-
-        RecordDescriptor custOrderJoinDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE });
-
-        FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitsProvider,
-                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'), ordersDesc);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC2_ID);
-
-        FileScanOperatorDescriptor custScanner = new FileScanOperatorDescriptor(spec, custSplitsProvider,
-                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE }, '|'), custDesc);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, custScanner, NC1_ID);
-
-        INullWriterFactory[] nullWriterFactories = new INullWriterFactory[ordersDesc.getFieldCount()];
-        for (int j = 0; j < nullWriterFactories.length; j++) {
-            nullWriterFactories[j] = NoopNullWriterFactory.INSTANCE;
-        }
-
-        HybridHashJoinOperatorDescriptor join = new HybridHashJoinOperatorDescriptor(
-                spec,
-                5,
-                20,
-                200,
-                1.2,
-                new int[] { 0 },
-                new int[] { 1 },
-                new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) },
-                new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
-                custOrderJoinDesc, true, nullWriterFactories);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, join, NC1_ID);
-
-        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
-                createTempFile().getAbsolutePath()) });
-        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
-
-        IConnectorDescriptor ordJoinConn = new OneToOneConnectorDescriptor(spec);
-        spec.connect(ordJoinConn, ordScanner, 0, join, 1);
-
-        IConnectorDescriptor custJoinConn = new OneToOneConnectorDescriptor(spec);
-        spec.connect(custJoinConn, custScanner, 0, join, 0);
-
-        IConnectorDescriptor joinPrinterConn = new OneToOneConnectorDescriptor(spec);
-        spec.connect(joinPrinterConn, join, 0, printer, 0);
-
-        spec.addRoot(printer);
-        runTest(spec);
-    }
-
-    @Test
-    public void customerOrderCIDJoinMulti() throws Exception {
-        JobSpecification spec = new JobSpecification();
-
-        FileSplit[] custSplits = new FileSplit[] {
-                new FileSplit(NC1_ID, new FileReference(new File("data/tpch0.001/customer-part1.tbl"))),
-                new FileSplit(NC2_ID, new FileReference(new File("data/tpch0.001/customer-part2.tbl"))) };
-        IFileSplitProvider custSplitsProvider = new ConstantFileSplitProvider(custSplits);
-        RecordDescriptor custDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE });
-
-        FileSplit[] ordersSplits = new FileSplit[] {
-                new FileSplit(NC1_ID, new FileReference(new File("data/tpch0.001/orders-part1.tbl"))),
-                new FileSplit(NC2_ID, new FileReference(new File("data/tpch0.001/orders-part2.tbl"))) };
-        IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
-        RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE });
-
-        RecordDescriptor custOrderJoinDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE });
-
-        FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitsProvider,
-                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'), ordersDesc);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID, NC2_ID);
-
-        FileScanOperatorDescriptor custScanner = new FileScanOperatorDescriptor(spec, custSplitsProvider,
-                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE }, '|'), custDesc);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, custScanner, NC1_ID, NC2_ID);
-
-        InMemoryHashJoinOperatorDescriptor join = new InMemoryHashJoinOperatorDescriptor(
-                spec,
-                new int[] { 1 },
-                new int[] { 0 },
-                new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) },
-                new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
-                custOrderJoinDesc, 128);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, join, NC1_ID, NC2_ID);
-
-        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
-                createTempFile().getAbsolutePath()) });
-        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
-
-        IConnectorDescriptor ordJoinConn = new MToNPartitioningConnectorDescriptor(spec,
-                new FieldHashPartitionComputerFactory(new int[] { 1 },
-                        new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory
-                                .of(UTF8StringPointable.FACTORY) }));
-        spec.connect(ordJoinConn, ordScanner, 0, join, 0);
-
-        IConnectorDescriptor custJoinConn = new MToNPartitioningConnectorDescriptor(spec,
-                new FieldHashPartitionComputerFactory(new int[] { 0 },
-                        new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory
-                                .of(UTF8StringPointable.FACTORY) }));
-        spec.connect(custJoinConn, custScanner, 0, join, 1);
-
-        IConnectorDescriptor joinPrinterConn = new MToNReplicatingConnectorDescriptor(spec);
-        spec.connect(joinPrinterConn, join, 0, printer, 0);
-
-        spec.addRoot(printer);
-        runTest(spec);
-    }
-
-    @Test
-    public void customerOrderCIDGraceJoinMulti() throws Exception {
-        JobSpecification spec = new JobSpecification();
-
-        FileSplit[] custSplits = new FileSplit[] {
-                new FileSplit(NC1_ID, new FileReference(new File("data/tpch0.001/customer-part1.tbl"))),
-                new FileSplit(NC2_ID, new FileReference(new File("data/tpch0.001/customer-part2.tbl"))) };
-        IFileSplitProvider custSplitsProvider = new ConstantFileSplitProvider(custSplits);
-        RecordDescriptor custDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE });
-
-        FileSplit[] ordersSplits = new FileSplit[] {
-                new FileSplit(NC1_ID, new FileReference(new File("data/tpch0.001/orders-part1.tbl"))),
-                new FileSplit(NC2_ID, new FileReference(new File("data/tpch0.001/orders-part2.tbl"))) };
-        IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
-        RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE });
-
-        RecordDescriptor custOrderJoinDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE });
-
-        FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitsProvider,
-                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'), ordersDesc);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID, NC2_ID);
-
-        FileScanOperatorDescriptor custScanner = new FileScanOperatorDescriptor(spec, custSplitsProvider,
-                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE }, '|'), custDesc);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, custScanner, NC1_ID, NC2_ID);
-
-        GraceHashJoinOperatorDescriptor join = new GraceHashJoinOperatorDescriptor(
-                spec,
-                3,
-                20,
-                100,
-                1.2,
-                new int[] { 1 },
-                new int[] { 0 },
-                new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) },
-                new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
-                custOrderJoinDesc);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, join, NC1_ID, NC2_ID);
-
-        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
-                createTempFile().getAbsolutePath()) });
-        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
-
-        IConnectorDescriptor ordJoinConn = new MToNPartitioningConnectorDescriptor(spec,
-                new FieldHashPartitionComputerFactory(new int[] { 1 },
-                        new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory
-                                .of(UTF8StringPointable.FACTORY) }));
-        spec.connect(ordJoinConn, ordScanner, 0, join, 0);
-
-        IConnectorDescriptor custJoinConn = new MToNPartitioningConnectorDescriptor(spec,
-                new FieldHashPartitionComputerFactory(new int[] { 0 },
-                        new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory
-                                .of(UTF8StringPointable.FACTORY) }));
-        spec.connect(custJoinConn, custScanner, 0, join, 1);
-
-        IConnectorDescriptor joinPrinterConn = new MToNReplicatingConnectorDescriptor(spec);
-        spec.connect(joinPrinterConn, join, 0, printer, 0);
-
-        spec.addRoot(printer);
-        runTest(spec);
-    }
-
-    @Test
-    public void customerOrderCIDHybridHashJoinMulti() throws Exception {
-        JobSpecification spec = new JobSpecification();
-
-        FileSplit[] custSplits = new FileSplit[] {
-                new FileSplit(NC1_ID, new FileReference(new File("data/tpch0.001/customer-part1.tbl"))),
-                new FileSplit(NC2_ID, new FileReference(new File("data/tpch0.001/customer-part2.tbl"))) };
-        IFileSplitProvider custSplitsProvider = new ConstantFileSplitProvider(custSplits);
-        RecordDescriptor custDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE });
-
-        FileSplit[] ordersSplits = new FileSplit[] {
-                new FileSplit(NC1_ID, new FileReference(new File("data/tpch0.001/orders-part1.tbl"))),
-                new FileSplit(NC2_ID, new FileReference(new File("data/tpch0.001/orders-part2.tbl"))) };
-        IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
-        RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE });
-
-        RecordDescriptor custOrderJoinDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE });
-
-        FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitsProvider,
-                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'), ordersDesc);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID, NC2_ID);
-
-        FileScanOperatorDescriptor custScanner = new FileScanOperatorDescriptor(spec, custSplitsProvider,
-                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE }, '|'), custDesc);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, custScanner, NC1_ID, NC2_ID);
-
-        HybridHashJoinOperatorDescriptor join = new HybridHashJoinOperatorDescriptor(
-                spec,
-                3,
-                20,
-                100,
-                1.2,
-                new int[] { 1 },
-                new int[] { 0 },
-                new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) },
-                new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
-                custOrderJoinDesc);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, join, NC1_ID, NC2_ID);
-
-        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
-                createTempFile().getAbsolutePath()) });
-        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
-
-        IConnectorDescriptor ordJoinConn = new MToNPartitioningConnectorDescriptor(spec,
-                new FieldHashPartitionComputerFactory(new int[] { 1 },
-                        new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory
-                                .of(UTF8StringPointable.FACTORY) }));
-        spec.connect(ordJoinConn, ordScanner, 0, join, 0);
-
-        IConnectorDescriptor custJoinConn = new MToNPartitioningConnectorDescriptor(spec,
-                new FieldHashPartitionComputerFactory(new int[] { 0 },
-                        new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory
-                                .of(UTF8StringPointable.FACTORY) }));
-        spec.connect(custJoinConn, custScanner, 0, join, 1);
-
-        IConnectorDescriptor joinPrinterConn = new MToNReplicatingConnectorDescriptor(spec);
-        spec.connect(joinPrinterConn, join, 0, printer, 0);
-
-        spec.addRoot(printer);
-        runTest(spec);
-    }
-
-    @Test
-    public void customerOrderCIDJoinAutoExpand() throws Exception {
-        JobSpecification spec = new JobSpecification();
-
-        FileSplit[] custSplits = new FileSplit[] {
-                new FileSplit(NC1_ID, new FileReference(new File("data/tpch0.001/customer-part1.tbl"))),
-                new FileSplit(NC2_ID, new FileReference(new File("data/tpch0.001/customer-part2.tbl"))) };
-        IFileSplitProvider custSplitsProvider = new ConstantFileSplitProvider(custSplits);
-        RecordDescriptor custDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE });
-
-        FileSplit[] ordersSplits = new FileSplit[] {
-                new FileSplit(NC1_ID, new FileReference(new File("data/tpch0.001/orders-part1.tbl"))),
-                new FileSplit(NC2_ID, new FileReference(new File("data/tpch0.001/orders-part2.tbl"))) };
-        IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
-        RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE });
-
-        RecordDescriptor custOrderJoinDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE });
-
-        FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitsProvider,
-                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'), ordersDesc);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID, NC2_ID);
-
-        FileScanOperatorDescriptor custScanner = new FileScanOperatorDescriptor(spec, custSplitsProvider,
-                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE }, '|'), custDesc);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, custScanner, NC1_ID, NC2_ID);
-
-        InMemoryHashJoinOperatorDescriptor join = new InMemoryHashJoinOperatorDescriptor(
-                spec,
-                new int[] { 1 },
-                new int[] { 0 },
-                new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) },
-                new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
-                custOrderJoinDesc, 128);
-        PartitionConstraintHelper.addPartitionCountConstraint(spec, join, 2);
-
-        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
-                createTempFile().getAbsolutePath()) });
-        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
-
-        IConnectorDescriptor ordJoinConn = new MToNPartitioningConnectorDescriptor(spec,
-                new FieldHashPartitionComputerFactory(new int[] { 1 },
-                        new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory
-                                .of(UTF8StringPointable.FACTORY) }));
-        spec.connect(ordJoinConn, ordScanner, 0, join, 0);
-
-        IConnectorDescriptor custJoinConn = new MToNPartitioningConnectorDescriptor(spec,
-                new FieldHashPartitionComputerFactory(new int[] { 0 },
-                        new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory
-                                .of(UTF8StringPointable.FACTORY) }));
-        spec.connect(custJoinConn, custScanner, 0, join, 1);
-
-        IConnectorDescriptor joinPrinterConn = new MToNReplicatingConnectorDescriptor(spec);
-        spec.connect(joinPrinterConn, join, 0, printer, 0);
-
-        spec.addRoot(printer);
-        runTest(spec);
-    }
-
-    @Test
-    public void customerOrderCIDJoinMultiMaterialized() throws Exception {
-        JobSpecification spec = new JobSpecification();
-
-        FileSplit[] custSplits = new FileSplit[] {
-                new FileSplit(NC1_ID, new FileReference(new File("data/tpch0.001/customer-part1.tbl"))),
-                new FileSplit(NC2_ID, new FileReference(new File("data/tpch0.001/customer-part2.tbl"))) };
-        IFileSplitProvider custSplitsProvider = new ConstantFileSplitProvider(custSplits);
-        RecordDescriptor custDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE });
-
-        FileSplit[] ordersSplits = new FileSplit[] {
-                new FileSplit(NC1_ID, new FileReference(new File("data/tpch0.001/orders-part1.tbl"))),
-                new FileSplit(NC2_ID, new FileReference(new File("data/tpch0.001/orders-part2.tbl"))) };
-        IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
-        RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE });
-
-        RecordDescriptor custOrderJoinDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE });
-
-        FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitsProvider,
-                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'), ordersDesc);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID, NC2_ID);
-
-        FileScanOperatorDescriptor custScanner = new FileScanOperatorDescriptor(spec, custSplitsProvider,
-                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE }, '|'), custDesc);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, custScanner, NC1_ID, NC2_ID);
-
-        MaterializingOperatorDescriptor ordMat = new MaterializingOperatorDescriptor(spec, ordersDesc);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordMat, NC1_ID, NC2_ID);
-
-        MaterializingOperatorDescriptor custMat = new MaterializingOperatorDescriptor(spec, custDesc);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, custMat, NC1_ID, NC2_ID);
-
-        InMemoryHashJoinOperatorDescriptor join = new InMemoryHashJoinOperatorDescriptor(
-                spec,
-                new int[] { 1 },
-                new int[] { 0 },
-                new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) },
-                new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
-                custOrderJoinDesc, 128);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, join, NC1_ID, NC2_ID);
-
-        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
-                createTempFile().getAbsolutePath()) });
-        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
-
-        IConnectorDescriptor ordPartConn = new MToNPartitioningConnectorDescriptor(spec,
-                new FieldHashPartitionComputerFactory(new int[] { 1 },
-                        new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory
-                                .of(UTF8StringPointable.FACTORY) }));
-        spec.connect(ordPartConn, ordScanner, 0, ordMat, 0);
-
-        IConnectorDescriptor custPartConn = new MToNPartitioningConnectorDescriptor(spec,
-                new FieldHashPartitionComputerFactory(new int[] { 0 },
-                        new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory
-                                .of(UTF8StringPointable.FACTORY) }));
-        spec.connect(custPartConn, custScanner, 0, custMat, 0);
-
-        IConnectorDescriptor ordJoinConn = new OneToOneConnectorDescriptor(spec);
-        spec.connect(ordJoinConn, ordMat, 0, join, 0);
-
-        IConnectorDescriptor custJoinConn = new OneToOneConnectorDescriptor(spec);
-        spec.connect(custJoinConn, custMat, 0, join, 1);
-
-        IConnectorDescriptor joinPrinterConn = new MToNReplicatingConnectorDescriptor(spec);
-        spec.connect(joinPrinterConn, join, 0, printer, 0);
-
-        spec.addRoot(printer);
-        runTest(spec);
-    }
-}
\ No newline at end of file
diff --git a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/TPCHCustomerOrderNestedLoopJoinTest.java b/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/TPCHCustomerOrderNestedLoopJoinTest.java
deleted file mode 100644
index 1e60372..0000000
--- a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/TPCHCustomerOrderNestedLoopJoinTest.java
+++ /dev/null
@@ -1,337 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.tests.integration;
-
-import java.io.File;
-
-import org.junit.Test;
-
-import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
-import edu.uci.ics.hyracks.api.constraints.PartitionConstraintHelper;
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.IConnectorDescriptor;
-import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.dataflow.value.ITuplePairComparator;
-import edu.uci.ics.hyracks.api.dataflow.value.ITuplePairComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.api.job.JobSpecification;
-import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
-import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.parsers.IValueParserFactory;
-import edu.uci.ics.hyracks.dataflow.common.data.parsers.UTF8StringParserFactory;
-import edu.uci.ics.hyracks.dataflow.std.connectors.MToNReplicatingConnectorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.file.ConstantFileSplitProvider;
-import edu.uci.ics.hyracks.dataflow.std.file.DelimitedDataTupleParserFactory;
-import edu.uci.ics.hyracks.dataflow.std.file.FileScanOperatorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
-import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.dataflow.std.file.PlainFileWriterOperatorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.join.NestedLoopJoinOperatorDescriptor;
-
-public class TPCHCustomerOrderNestedLoopJoinTest extends AbstractIntegrationTest {
-    private static class JoinComparatorFactory implements ITuplePairComparatorFactory {
-        private static final long serialVersionUID = 1L;
-
-        private final IBinaryComparatorFactory bFactory;
-        private final int pos0;
-        private final int pos1;
-
-        public JoinComparatorFactory(IBinaryComparatorFactory bFactory, int pos0, int pos1) {
-            this.bFactory = bFactory;
-            this.pos0 = pos0;
-            this.pos1 = pos1;
-        }
-
-        @Override
-        public ITuplePairComparator createTuplePairComparator(IHyracksTaskContext ctx) {
-            return new JoinComparator(bFactory.createBinaryComparator(), pos0, pos1);
-        }
-    }
-
-    private static class JoinComparator implements ITuplePairComparator {
-
-        private final IBinaryComparator bComparator;
-        private final int field0;
-        private final int field1;
-
-        public JoinComparator(IBinaryComparator bComparator, int field0, int field1) {
-            this.bComparator = bComparator;
-            this.field0 = field0;
-            this.field1 = field1;
-        }
-
-        @Override
-        public int compare(IFrameTupleAccessor accessor0, int tIndex0, IFrameTupleAccessor accessor1, int tIndex1) {
-            int tStart0 = accessor0.getTupleStartOffset(tIndex0);
-            int fStartOffset0 = accessor0.getFieldSlotsLength() + tStart0;
-
-            int tStart1 = accessor1.getTupleStartOffset(tIndex1);
-            int fStartOffset1 = accessor1.getFieldSlotsLength() + tStart1;
-
-            int fStart0 = accessor0.getFieldStartOffset(tIndex0, field0);
-            int fEnd0 = accessor0.getFieldEndOffset(tIndex0, field0);
-            int fLen0 = fEnd0 - fStart0;
-
-            int fStart1 = accessor1.getFieldStartOffset(tIndex1, field1);
-            int fEnd1 = accessor1.getFieldEndOffset(tIndex1, field1);
-            int fLen1 = fEnd1 - fStart1;
-
-            int c = bComparator.compare(accessor0.getBuffer().array(), fStart0 + fStartOffset0, fLen0, accessor1
-                    .getBuffer().array(), fStart1 + fStartOffset1, fLen1);
-            if (c != 0) {
-                return c;
-            }
-            return 0;
-        }
-    }
-
-    /*
-     * TPCH Customer table: CREATE TABLE CUSTOMER ( C_CUSTKEY INTEGER NOT NULL,
-     * C_NAME VARCHAR(25) NOT NULL, C_ADDRESS VARCHAR(40) NOT NULL, C_NATIONKEY
-     * INTEGER NOT NULL, C_PHONE CHAR(15) NOT NULL, C_ACCTBAL DECIMAL(15,2) NOT
-     * NULL, C_MKTSEGMENT CHAR(10) NOT NULL, C_COMMENT VARCHAR(117) NOT NULL );
-     * TPCH Orders table: CREATE TABLE ORDERS ( O_ORDERKEY INTEGER NOT NULL,
-     * O_CUSTKEY INTEGER NOT NULL, O_ORDERSTATUS CHAR(1) NOT NULL, O_TOTALPRICE
-     * DECIMAL(15,2) NOT NULL, O_ORDERDATE DATE NOT NULL, O_ORDERPRIORITY
-     * CHAR(15) NOT NULL, O_CLERK CHAR(15) NOT NULL, O_SHIPPRIORITY INTEGER NOT
-     * NULL, O_COMMENT VARCHAR(79) NOT NULL );
-     */
-    @Test
-    public void customerOrderCIDJoin() throws Exception {
-        JobSpecification spec = new JobSpecification();
-
-        FileSplit[] custSplits = new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(
-                "data/tpch0.001/customer.tbl"))) };
-        IFileSplitProvider custSplitsProvider = new ConstantFileSplitProvider(custSplits);
-        RecordDescriptor custDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE });
-
-        FileSplit[] ordersSplits = new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(
-                "data/tpch0.001/orders.tbl"))) };
-        IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
-        RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE });
-
-        RecordDescriptor custOrderJoinDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE });
-
-        FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitsProvider,
-                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'), ordersDesc);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID);
-
-        FileScanOperatorDescriptor custScanner = new FileScanOperatorDescriptor(spec, custSplitsProvider,
-                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE }, '|'), custDesc);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, custScanner, NC1_ID);
-
-        NestedLoopJoinOperatorDescriptor join = new NestedLoopJoinOperatorDescriptor(spec, new JoinComparatorFactory(
-                PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY), 1, 0), custOrderJoinDesc, 4);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, join, NC1_ID);
-
-        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
-                createTempFile().getAbsolutePath()) });
-        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
-
-        IConnectorDescriptor ordJoinConn = new OneToOneConnectorDescriptor(spec);
-        spec.connect(ordJoinConn, ordScanner, 0, join, 0);
-
-        IConnectorDescriptor custJoinConn = new MToNReplicatingConnectorDescriptor(spec);
-        spec.connect(custJoinConn, custScanner, 0, join, 1);
-
-        IConnectorDescriptor joinPrinterConn = new OneToOneConnectorDescriptor(spec);
-        spec.connect(joinPrinterConn, join, 0, printer, 0);
-
-        spec.addRoot(printer);
-        runTest(spec);
-    }
-
-    @Test
-    public void customerOrderCIDJoinMulti() throws Exception {
-        JobSpecification spec = new JobSpecification();
-
-        FileSplit[] custSplits = new FileSplit[] {
-                new FileSplit(NC1_ID, new FileReference(new File("data/tpch0.001/customer-part1.tbl"))),
-                new FileSplit(NC2_ID, new FileReference(new File("data/tpch0.001/customer-part2.tbl"))) };
-        IFileSplitProvider custSplitsProvider = new ConstantFileSplitProvider(custSplits);
-        RecordDescriptor custDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE });
-
-        FileSplit[] ordersSplits = new FileSplit[] {
-                new FileSplit(NC1_ID, new FileReference(new File("data/tpch0.001/orders-part1.tbl"))),
-                new FileSplit(NC2_ID, new FileReference(new File("data/tpch0.001/orders-part2.tbl"))) };
-        IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
-        RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE });
-
-        RecordDescriptor custOrderJoinDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE });
-
-        FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitsProvider,
-                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'), ordersDesc);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID, NC2_ID);
-
-        FileScanOperatorDescriptor custScanner = new FileScanOperatorDescriptor(spec, custSplitsProvider,
-                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE }, '|'), custDesc);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, custScanner, NC1_ID, NC2_ID);
-
-        NestedLoopJoinOperatorDescriptor join = new NestedLoopJoinOperatorDescriptor(spec, new JoinComparatorFactory(
-                PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY), 1, 0), custOrderJoinDesc, 5);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, join, NC1_ID, NC2_ID);
-
-        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
-                createTempFile().getAbsolutePath()) });
-        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
-
-        IConnectorDescriptor ordJoinConn = new OneToOneConnectorDescriptor(spec);
-        spec.connect(ordJoinConn, ordScanner, 0, join, 0);
-
-        IConnectorDescriptor custJoinConn = new MToNReplicatingConnectorDescriptor(spec);
-        spec.connect(custJoinConn, custScanner, 0, join, 1);
-
-        IConnectorDescriptor joinPrinterConn = new MToNReplicatingConnectorDescriptor(spec);
-        spec.connect(joinPrinterConn, join, 0, printer, 0);
-
-        spec.addRoot(printer);
-        runTest(spec);
-    }
-
-    @Test
-    public void customerOrderCIDJoinAutoExpand() throws Exception {
-        JobSpecification spec = new JobSpecification();
-
-        FileSplit[] custSplits = new FileSplit[] {
-                new FileSplit(NC1_ID, new FileReference(new File("data/tpch0.001/customer-part1.tbl"))),
-                new FileSplit(NC2_ID, new FileReference(new File("data/tpch0.001/customer-part2.tbl"))) };
-        IFileSplitProvider custSplitsProvider = new ConstantFileSplitProvider(custSplits);
-        RecordDescriptor custDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE });
-
-        FileSplit[] ordersSplits = new FileSplit[] {
-                new FileSplit(NC1_ID, new FileReference(new File("data/tpch0.001/orders-part1.tbl"))),
-                new FileSplit(NC2_ID, new FileReference(new File("data/tpch0.001/orders-part2.tbl"))) };
-        IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
-        RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE });
-
-        RecordDescriptor custOrderJoinDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE });
-
-        FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitsProvider,
-                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'), ordersDesc);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID, NC2_ID);
-
-        FileScanOperatorDescriptor custScanner = new FileScanOperatorDescriptor(spec, custSplitsProvider,
-                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE }, '|'), custDesc);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, custScanner, NC1_ID, NC2_ID);
-
-        NestedLoopJoinOperatorDescriptor join = new NestedLoopJoinOperatorDescriptor(spec, new JoinComparatorFactory(
-                PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY), 1, 0), custOrderJoinDesc, 6);
-        PartitionConstraintHelper.addPartitionCountConstraint(spec, join, 2);
-
-        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
-                createTempFile().getAbsolutePath()) });
-        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
-
-        IConnectorDescriptor ordJoinConn = new OneToOneConnectorDescriptor(spec);
-        spec.connect(ordJoinConn, ordScanner, 0, join, 0);
-
-        IConnectorDescriptor custJoinConn = new MToNReplicatingConnectorDescriptor(spec);
-        spec.connect(custJoinConn, custScanner, 0, join, 1);
-
-        IConnectorDescriptor joinPrinterConn = new MToNReplicatingConnectorDescriptor(spec);
-        spec.connect(joinPrinterConn, join, 0, printer, 0);
-
-        spec.addRoot(printer);
-        runTest(spec);
-    }
-
-}
\ No newline at end of file
diff --git a/hyracks-examples/pom.xml b/hyracks-examples/pom.xml
deleted file mode 100644
index ceae3d0..0000000
--- a/hyracks-examples/pom.xml
+++ /dev/null
@@ -1,21 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <groupId>edu.uci.ics.hyracks</groupId>
-  <artifactId>hyracks-examples</artifactId>
-  <version>0.2.2-SNAPSHOT</version>
-  <packaging>pom</packaging>
-
-  <parent>
-    <groupId>edu.uci.ics.hyracks</groupId>
-    <artifactId>hyracks</artifactId>
-    <version>0.2.2-SNAPSHOT</version>
-  </parent>
-
-  <modules>
-    <module>tpch-example</module>
-    <module>text-example</module>
-    <module>btree-example</module>
-    <module>hyracks-integration-tests</module>
-    <module>hadoop-compat-example</module>
-  </modules>
-</project>
diff --git a/hyracks-examples/text-example/pom.xml b/hyracks-examples/text-example/pom.xml
deleted file mode 100644
index d5b00a2..0000000
--- a/hyracks-examples/text-example/pom.xml
+++ /dev/null
@@ -1,19 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <groupId>edu.uci.ics.hyracks.examples</groupId>
-  <artifactId>text-example</artifactId>
-  <version>0.2.2-SNAPSHOT</version>
-  <packaging>pom</packaging>
-
-  <parent>
-    <groupId>edu.uci.ics.hyracks</groupId>
-    <artifactId>hyracks-examples</artifactId>
-    <version>0.2.2-SNAPSHOT</version>
-  </parent>
-
-  <modules>
-    <module>texthelper</module>
-    <module>textclient</module>
-    <module>textapp</module>
-  </modules>
-</project>
diff --git a/hyracks-examples/text-example/textapp/pom.xml b/hyracks-examples/text-example/textapp/pom.xml
deleted file mode 100644
index cedc2af..0000000
--- a/hyracks-examples/text-example/textapp/pom.xml
+++ /dev/null
@@ -1,186 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <groupId>edu.uci.ics.hyracks.examples.text</groupId>
-  <artifactId>textapp</artifactId>
-  <version>0.2.2-SNAPSHOT</version>
-
-  <parent>
-    <groupId>edu.uci.ics.hyracks.examples</groupId>
-    <artifactId>text-example</artifactId>
-    <version>0.2.2-SNAPSHOT</version>
-  </parent>
-
-  <build>
-    <pluginManagement>
-      <plugins>
-        <plugin>
-          <groupId>org.eclipse.m2e</groupId>
-          <artifactId>lifecycle-mapping</artifactId>
-          <version>1.0.0</version>
-          <configuration>
-            <lifecycleMappingMetadata>
-              <pluginExecutions>
-                <pluginExecution>
-                  <pluginExecutionFilter>
-                    <groupId>org.apache.maven.plugins</groupId>
-                    <artifactId>maven-dependency-plugin</artifactId>
-                    <versionRange>[1.0.0,)</versionRange>
-                    <goals>
-                      <goal>copy-dependencies</goal>
-                    </goals>
-                  </pluginExecutionFilter>
-                  <action>
-                    <ignore />
-                  </action>
-                </pluginExecution>
-              </pluginExecutions>
-            </lifecycleMappingMetadata>
-          </configuration>
-        </plugin>
-      </plugins>
-	</pluginManagement>
-  
-    <plugins>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-dependency-plugin</artifactId>
-        <executions>
-          <execution>
-            <id>copy-dependencies</id>
-            <phase>package</phase>
-            <goals>
-              <goal>copy-dependencies</goal>
-            </goals>
-            <configuration>
-              <outputDirectory>target/application/lib</outputDirectory>
-            </configuration>
-          </execution>
-        </executions>
-      </plugin>
-      <plugin>
-        <artifactId>maven-assembly-plugin</artifactId>
-        <version>2.2-beta-5</version>
-        <executions>
-          <execution>
-            <configuration>
-              <descriptors>
-                <descriptor>src/main/assembly/app-assembly.xml</descriptor>
-              </descriptors>
-            </configuration>
-            <phase>package</phase>
-            <goals>
-              <goal>attached</goal>
-            </goals>
-          </execution>
-        </executions>
-      </plugin>
-      <plugin>
-      	<groupId>edu.uci.ics.hyracks</groupId>
-      	<artifactId>hyracks-virtualcluster-maven-plugin</artifactId>
-      	<version>0.2.2-SNAPSHOT</version>
-        <configuration>
-          <hyracksServerHome>${basedir}/../../../hyracks-server/target/hyracks-server-${project.version}-binary-assembly</hyracksServerHome>
-          <hyracksCLIHome>${basedir}/../../../hyracks-cli/target/hyracks-cli-${project.version}-binary-assembly</hyracksCLIHome>
-          <jvmOptions>${jvm.extraargs}</jvmOptions>
-        </configuration>
-        <executions>
-          <execution>
-            <id>hyracks-cc-start</id>
-            <phase>pre-integration-test</phase>
-            <goals>
-              <goal>start-cc</goal>
-            </goals>
-          </execution>
-          <execution>
-            <id>hyracks-nc1-start</id>
-            <phase>pre-integration-test</phase>
-            <goals>
-              <goal>start-nc</goal>
-            </goals>
-            <configuration>
-              <nodeId>NC1</nodeId>
-              <dataIpAddress>127.0.0.1</dataIpAddress>
-              <ccHost>localhost</ccHost>
-            </configuration>
-          </execution>
-          <execution>
-            <id>hyracks-nc2-start</id>
-            <phase>pre-integration-test</phase>
-            <goals>
-              <goal>start-nc</goal>
-            </goals>
-            <configuration>
-              <nodeId>NC2</nodeId>
-              <dataIpAddress>127.0.0.1</dataIpAddress>
-              <ccHost>localhost</ccHost>
-            </configuration>
-          </execution>
-          <execution>
-            <id>deploy-app</id>
-            <phase>pre-integration-test</phase>
-            <goals>
-              <goal>deploy-app</goal>
-            </goals>
-            <configuration>
-              <ccHost>localhost</ccHost>
-              <appName>text</appName>
-              <harFile>${project.build.directory}/textapp-${project.version}-app-assembly.zip</harFile>
-            </configuration>
-          </execution>
-          <execution>
-            <id>stop-services</id>
-            <phase>post-integration-test</phase>
-            <goals>
-              <goal>stop-services</goal>
-            </goals>
-          </execution>
-        </executions>
-      </plugin>
-      <plugin>
-      	<groupId>org.apache.maven.plugins</groupId>
-      	<artifactId>maven-compiler-plugin</artifactId>
-      	<version>2.0.2</version>
-        <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
-        </configuration>
-      </plugin>
-      <plugin>
-      	<groupId>org.apache.maven.plugins</groupId>
-      	<artifactId>maven-failsafe-plugin</artifactId>
-      	<version>2.8.1</version>
-      	<executions>
-      	  <execution>
-      	    <id>it</id>
-      	    <phase>integration-test</phase>
-      	    <goals>
-      	      <goal>integration-test</goal>
-      	    </goals>
-      	  </execution>
-      	</executions>
-      </plugin>
-    </plugins>
-  </build>
-  <dependencies>
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks.examples.text</groupId>
-  		<artifactId>texthelper</artifactId>
-  		<version>0.2.2-SNAPSHOT</version>
-  		<scope>compile</scope>
-  	</dependency>
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks.examples.text</groupId>
-  		<artifactId>textclient</artifactId>
-  		<version>0.2.2-SNAPSHOT</version>
-  		<type>jar</type>
-  		<scope>test</scope>
-  	</dependency>
-  	<dependency>
-  		<groupId>junit</groupId>
-  		<artifactId>junit</artifactId>
-  		<version>4.8.2</version>
-  		<type>jar</type>
-  		<scope>test</scope>
-  	</dependency>
-  </dependencies>
-</project>
diff --git a/hyracks-examples/text-example/textclient/pom.xml b/hyracks-examples/text-example/textclient/pom.xml
deleted file mode 100644
index b96381f..0000000
--- a/hyracks-examples/text-example/textclient/pom.xml
+++ /dev/null
@@ -1,98 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <groupId>edu.uci.ics.hyracks.examples.text</groupId>
-  <artifactId>textclient</artifactId>
-  <version>0.2.2-SNAPSHOT</version>
-
-  <parent>
-    <groupId>edu.uci.ics.hyracks.examples</groupId>
-    <artifactId>text-example</artifactId>
-    <version>0.2.2-SNAPSHOT</version>
-  </parent>
-
-  <dependencies>
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks</groupId>
-  		<artifactId>hyracks-dataflow-std</artifactId>
-  		<version>0.2.2-SNAPSHOT</version>
-  		<scope>compile</scope>
-  	</dependency>
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks.examples.text</groupId>
-  		<artifactId>texthelper</artifactId>
-  		<version>0.2.2-SNAPSHOT</version>
-  		<type>jar</type>
-  		<scope>compile</scope>
-  	</dependency>
-  </dependencies>
-  <build>
-    <plugins>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-compiler-plugin</artifactId>
-        <version>2.0.2</version>
-        <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
-        </configuration>
-      </plugin>
-      <plugin>
-        <groupId>org.codehaus.mojo</groupId>
-        <artifactId>appassembler-maven-plugin</artifactId>
-        <executions>
-          <execution>
-          <id>textclient</id>
-            <configuration>
-              <programs>
-                <program>
-                  <mainClass>edu.uci.ics.hyracks.examples.text.client.WordCountMain</mainClass>
-                  <name>textclient</name>
-                </program>
-              </programs>
-              <repositoryLayout>flat</repositoryLayout>
-              <repositoryName>lib</repositoryName>
-            </configuration>
-            <phase>package</phase>
-            <goals>
-              <goal>assemble</goal>
-            </goals>
-          </execution>
-          <execution>
-          	<id>groupclient</id>
-            <configuration>
-              <programs>
-                <program>
-                  <mainClass>edu.uci.ics.hyracks.examples.text.client.ExternalGroupClient</mainClass>
-                  <name>groupclient</name>
-                </program>
-              </programs>
-              <repositoryLayout>flat</repositoryLayout>
-              <repositoryName>lib</repositoryName>
-            </configuration>
-            <phase>package</phase>
-            <goals>
-              <goal>assemble</goal>
-            </goals>
-          </execution>
-        </executions>
-      </plugin>
-      <plugin>
-        <artifactId>maven-assembly-plugin</artifactId>
-        <version>2.2-beta-5</version>
-        <executions>
-          <execution>
-            <configuration>
-              <descriptors>
-                <descriptor>src/main/assembly/binary-assembly.xml</descriptor>
-              </descriptors>
-            </configuration>
-            <phase>package</phase>
-            <goals>
-              <goal>attached</goal>
-            </goals>
-          </execution>
-        </executions>
-      </plugin>
-    </plugins>
-  </build>
-</project>
diff --git a/hyracks-examples/text-example/texthelper/pom.xml b/hyracks-examples/text-example/texthelper/pom.xml
deleted file mode 100644
index 8dc953e..0000000
--- a/hyracks-examples/text-example/texthelper/pom.xml
+++ /dev/null
@@ -1,44 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <groupId>edu.uci.ics.hyracks.examples.text</groupId>
-  <artifactId>texthelper</artifactId>
-
-  <parent>
-    <groupId>edu.uci.ics.hyracks.examples</groupId>
-    <artifactId>text-example</artifactId>
-    <version>0.2.2-SNAPSHOT</version>
-  </parent>
-
-  <dependencies>
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks</groupId>
-  		<artifactId>hyracks-dataflow-std</artifactId>
-  		<version>0.2.2-SNAPSHOT</version>
-  		<scope>compile</scope>
-  	</dependency>
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks</groupId>
-  		<artifactId>hyracks-api</artifactId>
-  		<version>0.2.2-SNAPSHOT</version>
-  		<scope>compile</scope>
-  	</dependency>
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks</groupId>
-  		<artifactId>hyracks-data-std</artifactId>
-  		<version>0.2.2-SNAPSHOT</version>
-  	</dependency>
-  </dependencies>
-  <build>
-    <plugins>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-compiler-plugin</artifactId>
-        <version>2.0.2</version>
-        <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
-        </configuration>
-      </plugin>
-    </plugins>
-  </build>
-</project>
diff --git a/hyracks-examples/tpch-example/pom.xml b/hyracks-examples/tpch-example/pom.xml
deleted file mode 100644
index f34cc08..0000000
--- a/hyracks-examples/tpch-example/pom.xml
+++ /dev/null
@@ -1,18 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <groupId>edu.uci.ics.hyracks.examples</groupId>
-  <artifactId>tpch-example</artifactId>
-  <version>0.2.2-SNAPSHOT</version>
-  <packaging>pom</packaging>
-
-  <parent>
-    <groupId>edu.uci.ics.hyracks</groupId>
-    <artifactId>hyracks-examples</artifactId>
-    <version>0.2.2-SNAPSHOT</version>
-  </parent>
-
-  <modules>
-    <module>tpchclient</module>
-    <module>tpchapp</module>
-  </modules>
-</project>
diff --git a/hyracks-examples/tpch-example/tpchapp/pom.xml b/hyracks-examples/tpch-example/tpchapp/pom.xml
deleted file mode 100644
index 80f2e09..0000000
--- a/hyracks-examples/tpch-example/tpchapp/pom.xml
+++ /dev/null
@@ -1,90 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <groupId>edu.uci.ics.hyracks.examples.tpch</groupId>
-  <artifactId>tpchapp</artifactId>
-  <parent>
-    <groupId>edu.uci.ics.hyracks.examples</groupId>
-    <artifactId>tpch-example</artifactId>
-    <version>0.2.2-SNAPSHOT</version>
-  </parent>
-
-  <build>
-    <pluginManagement>
-      <plugins>
-        <plugin>
-          <groupId>org.eclipse.m2e</groupId>
-          <artifactId>lifecycle-mapping</artifactId>
-          <version>1.0.0</version>
-          <configuration>
-            <lifecycleMappingMetadata>
-              <pluginExecutions>
-                <pluginExecution>
-                  <pluginExecutionFilter>
-                    <groupId>org.apache.maven.plugins</groupId>
-                    <artifactId>maven-dependency-plugin</artifactId>
-                    <versionRange>[1.0.0,)</versionRange>
-                    <goals>
-                      <goal>copy-dependencies</goal>
-                    </goals>
-                  </pluginExecutionFilter>
-                  <action>
-                    <ignore />
-                  </action>
-                </pluginExecution>
-              </pluginExecutions>
-            </lifecycleMappingMetadata>
-          </configuration>
-        </plugin>
-      </plugins>
-	</pluginManagement>
-  
-    <plugins>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-dependency-plugin</artifactId>
-        <executions>
-          <execution>
-            <id>copy-dependencies</id>
-            <phase>package</phase>
-            <goals>
-              <goal>copy-dependencies</goal>
-            </goals>
-            <configuration>
-              <outputDirectory>target/application/lib</outputDirectory>
-            </configuration>
-          </execution>
-        </executions>
-      </plugin>
-      <plugin>
-        <artifactId>maven-assembly-plugin</artifactId>
-        <version>2.2-beta-5</version>
-        <executions>
-          <execution>
-            <configuration>
-              <descriptors>
-                <descriptor>src/main/assembly/app-assembly.xml</descriptor>
-              </descriptors>
-            </configuration>
-            <phase>package</phase>
-            <goals>
-              <goal>attached</goal>
-            </goals>
-          </execution>
-        </executions>
-      </plugin>
-    </plugins>
-  </build>
-  <dependencies>
-    <dependency>
-        <groupId>edu.uci.ics.hyracks</groupId>
-        <artifactId>hyracks-dataflow-std</artifactId>
-        <version>0.2.2-SNAPSHOT</version>
-        <scope>compile</scope>
-    </dependency>
-    <dependency>
-    	<groupId>edu.uci.ics.hyracks</groupId>
-    	<artifactId>hyracks-data-std</artifactId>
-    	<version>0.2.2-SNAPSHOT</version>
-    </dependency>
-  </dependencies>
-</project>
diff --git a/hyracks-examples/tpch-example/tpchclient/pom.xml b/hyracks-examples/tpch-example/tpchclient/pom.xml
deleted file mode 100644
index 6b6a0b9..0000000
--- a/hyracks-examples/tpch-example/tpchclient/pom.xml
+++ /dev/null
@@ -1,59 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <groupId>edu.uci.ics.hyracks.examples.tpch</groupId>
-  <artifactId>tpchclient</artifactId>
-  <parent>
-    <groupId>edu.uci.ics.hyracks.examples</groupId>
-    <artifactId>tpch-example</artifactId>
-    <version>0.2.2-SNAPSHOT</version>
-  </parent>
-
-  <dependencies>
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks</groupId>
-  		<artifactId>hyracks-dataflow-std</artifactId>
-  		<version>0.2.2-SNAPSHOT</version>
-  		<scope>compile</scope>
-  	</dependency>
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks</groupId>
-  		<artifactId>hyracks-data-std</artifactId>
-  		<version>0.2.2-SNAPSHOT</version>
-  	</dependency>
-  </dependencies>
-  <build>
-    <plugins>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-compiler-plugin</artifactId>
-        <version>2.0.2</version>
-        <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
-        </configuration>
-      </plugin>
-      <plugin>
-        <groupId>org.codehaus.mojo</groupId>
-        <artifactId>appassembler-maven-plugin</artifactId>
-        <executions>
-          <execution>
-            <configuration>
-              <programs>
-                <program>
-                  <mainClass>edu.uci.ics.hyracks.examples.tpch.client.Main</mainClass>
-                  <name>tpchclient</name>
-                </program>
-              </programs>
-              <repositoryLayout>flat</repositoryLayout>
-              <repositoryName>lib</repositoryName>
-            </configuration>
-            <phase>package</phase>
-            <goals>
-              <goal>assemble</goal>
-            </goals>
-          </execution>
-        </executions>
-      </plugin>
-    </plugins>
-  </build>
-</project>
diff --git a/hyracks-examples/tpch-example/tpchclient/src/main/java/edu/uci/ics/hyracks/examples/tpch/client/Main.java b/hyracks-examples/tpch-example/tpchclient/src/main/java/edu/uci/ics/hyracks/examples/tpch/client/Main.java
deleted file mode 100644
index 01ccdef..0000000
--- a/hyracks-examples/tpch-example/tpchclient/src/main/java/edu/uci/ics/hyracks/examples/tpch/client/Main.java
+++ /dev/null
@@ -1,359 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.examples.tpch.client;
-
-import java.io.File;
-import java.util.EnumSet;
-
-import org.kohsuke.args4j.CmdLineParser;
-import org.kohsuke.args4j.Option;
-
-import edu.uci.ics.hyracks.api.client.HyracksConnection;
-import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
-import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
-import edu.uci.ics.hyracks.api.constraints.PartitionConstraintHelper;
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.IConnectorDescriptor;
-import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.dataflow.value.ITuplePairComparator;
-import edu.uci.ics.hyracks.api.dataflow.value.ITuplePairComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.api.job.JobFlag;
-import edu.uci.ics.hyracks.api.job.JobId;
-import edu.uci.ics.hyracks.api.job.JobSpecification;
-import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
-import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryHashFunctionFactory;
-import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.parsers.IValueParserFactory;
-import edu.uci.ics.hyracks.dataflow.common.data.parsers.UTF8StringParserFactory;
-import edu.uci.ics.hyracks.dataflow.common.data.partition.FieldHashPartitionComputerFactory;
-import edu.uci.ics.hyracks.dataflow.std.connectors.MToNPartitioningConnectorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.file.ConstantFileSplitProvider;
-import edu.uci.ics.hyracks.dataflow.std.file.DelimitedDataTupleParserFactory;
-import edu.uci.ics.hyracks.dataflow.std.file.FileScanOperatorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
-import edu.uci.ics.hyracks.dataflow.std.file.FrameFileWriterOperatorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.dataflow.std.group.IFieldAggregateDescriptorFactory;
-import edu.uci.ics.hyracks.dataflow.std.group.aggregators.CountFieldAggregatorFactory;
-import edu.uci.ics.hyracks.dataflow.std.group.aggregators.MultiFieldsAggregatorFactory;
-import edu.uci.ics.hyracks.dataflow.std.group.hash.HashGroupOperatorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.join.GraceHashJoinOperatorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.join.HybridHashJoinOperatorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.join.InMemoryHashJoinOperatorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.join.NestedLoopJoinOperatorDescriptor;
-
-public class Main {
-    private static class Options {
-        @Option(name = "-host", usage = "Hyracks Cluster Controller Host name", required = true)
-        public String host;
-
-        @Option(name = "-port", usage = "Hyracks Cluster Controller Port (default: 1098)", required = false)
-        public int port = 1098;
-
-        @Option(name = "-app", usage = "Hyracks Application name", required = true)
-        public String app;
-
-        @Option(name = "-infile-customer-splits", usage = "Comma separated list of file-splits for the CUSTOMER input. A file-split is <node-name>:<path>", required = true)
-        public String inFileCustomerSplits;
-
-        @Option(name = "-infile-order-splits", usage = "Comma separated list of file-splits for the ORDER input. A file-split is <node-name>:<path>", required = true)
-        public String inFileOrderSplits;
-
-        @Option(name = "-outfile-splits", usage = "Comma separated list of file-splits for the output", required = true)
-        public String outFileSplits;
-
-        @Option(name = "-num-join-partitions", usage = "Number of Join partitions to use (default: 1)", required = false)
-        public int numJoinPartitions = 1;
-
-        @Option(name = "-profile", usage = "Enable/Disable profiling. (default: enabled)")
-        public boolean profile = true;
-
-        @Option(name = "-table-size", usage = "Table size for in-memory hash join", required = false)
-        public int tableSize = 8191;
-
-        @Option(name = "-algo", usage = "Join types", required = true)
-        public String algo;
-
-        // For grace/hybrid hash join only
-        @Option(name = "-mem-size", usage = "Memory size for hash join", required = true)
-        public int memSize;
-
-        @Option(name = "-input-size", usage = "Input size of the grace/hybrid hash join", required = false)
-        public int graceInputSize = 10;
-
-        @Option(name = "-records-per-frame", usage = "Records per frame for grace/hybrid hash join", required = false)
-        public int graceRecordsPerFrame = 200;
-
-        @Option(name = "-grace-factor", usage = "Factor of the grace/hybrid hash join", required = false)
-        public double graceFactor = 1.2;
-
-        // Whether group-by is processed after the join
-        @Option(name = "-has-groupby", usage = "Whether to have group-by operation after join (default: disabled)", required = false)
-        public boolean hasGroupBy = false;
-    }
-
-    public static void main(String[] args) throws Exception {
-        Options options = new Options();
-        CmdLineParser parser = new CmdLineParser(options);
-        parser.parseArgument(args);
-
-        IHyracksClientConnection hcc = new HyracksConnection(options.host, options.port);
-
-        JobSpecification job = createJob(parseFileSplits(options.inFileCustomerSplits),
-                parseFileSplits(options.inFileOrderSplits), parseFileSplits(options.outFileSplits),
-                options.numJoinPartitions, options.algo, options.graceInputSize, options.graceRecordsPerFrame,
-                options.graceFactor, options.memSize, options.tableSize, options.hasGroupBy);
-
-        long start = System.currentTimeMillis();
-        JobId jobId = hcc.startJob(options.app, job,
-                options.profile ? EnumSet.of(JobFlag.PROFILE_RUNTIME) : EnumSet.noneOf(JobFlag.class));
-        hcc.waitForCompletion(jobId);
-        long end = System.currentTimeMillis();
-        System.err.println(start + " " + end + " " + (end - start));
-    }
-
-    private static FileSplit[] parseFileSplits(String fileSplits) {
-        String[] splits = fileSplits.split(",");
-        FileSplit[] fSplits = new FileSplit[splits.length];
-        for (int i = 0; i < splits.length; ++i) {
-            String s = splits[i].trim();
-            int idx = s.indexOf(':');
-            if (idx < 0) {
-                throw new IllegalArgumentException("File split " + s + " not well formed");
-            }
-            fSplits[i] = new FileSplit(s.substring(0, idx), new FileReference(new File(s.substring(idx + 1))));
-        }
-        return fSplits;
-    }
-
-    private static JobSpecification createJob(FileSplit[] customerSplits, FileSplit[] orderSplits,
-            FileSplit[] resultSplits, int numJoinPartitions, String algo, int graceInputSize, int graceRecordsPerFrame,
-            double graceFactor, int memSize, int tableSize, boolean hasGroupBy) throws HyracksDataException {
-        JobSpecification spec = new JobSpecification();
-
-        IFileSplitProvider custSplitsProvider = new ConstantFileSplitProvider(customerSplits);
-        RecordDescriptor custDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE });
-
-        IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(orderSplits);
-        RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE });
-
-        RecordDescriptor custOrderJoinDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE });
-
-        FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitsProvider,
-                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'), ordersDesc);
-        createPartitionConstraint(spec, ordScanner, orderSplits);
-
-        FileScanOperatorDescriptor custScanner = new FileScanOperatorDescriptor(spec, custSplitsProvider,
-                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE }, '|'), custDesc);
-        createPartitionConstraint(spec, custScanner, customerSplits);
-
-        IOperatorDescriptor join;
-
-        if ("nestedloop".equalsIgnoreCase(algo)) {
-            join = new NestedLoopJoinOperatorDescriptor(spec, new JoinComparatorFactory(
-                    PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY), 0, 1), custOrderJoinDesc, memSize);
-
-        } else if ("gracehash".equalsIgnoreCase(algo)) {
-            join = new GraceHashJoinOperatorDescriptor(
-                    spec,
-                    memSize,
-                    graceInputSize,
-                    graceRecordsPerFrame,
-                    graceFactor,
-                    new int[] { 0 },
-                    new int[] { 1 },
-                    new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory
-                            .of(UTF8StringPointable.FACTORY) },
-                    new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
-                    custOrderJoinDesc);
-
-        } else if ("hybridhash".equalsIgnoreCase(algo)) {
-            join = new HybridHashJoinOperatorDescriptor(
-                    spec,
-                    memSize,
-                    graceInputSize,
-                    graceRecordsPerFrame,
-                    graceFactor,
-                    new int[] { 0 },
-                    new int[] { 1 },
-                    new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory
-                            .of(UTF8StringPointable.FACTORY) },
-                    new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
-                    custOrderJoinDesc);
-
-        } else {
-            join = new InMemoryHashJoinOperatorDescriptor(
-                    spec,
-                    new int[] { 0 },
-                    new int[] { 1 },
-                    new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory
-                            .of(UTF8StringPointable.FACTORY) },
-                    new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
-                    custOrderJoinDesc, 6000000);
-        }
-
-        PartitionConstraintHelper.addPartitionCountConstraint(spec, join, numJoinPartitions);
-
-        IConnectorDescriptor ordJoinConn = new MToNPartitioningConnectorDescriptor(spec,
-                new FieldHashPartitionComputerFactory(new int[] { 1 },
-                        new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory
-                                .of(UTF8StringPointable.FACTORY) }));
-        spec.connect(ordJoinConn, ordScanner, 0, join, 1);
-
-        IConnectorDescriptor custJoinConn = new MToNPartitioningConnectorDescriptor(spec,
-                new FieldHashPartitionComputerFactory(new int[] { 0 },
-                        new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory
-                                .of(UTF8StringPointable.FACTORY) }));
-        spec.connect(custJoinConn, custScanner, 0, join, 0);
-
-        IOperatorDescriptor endingOp = join;
-
-        if (hasGroupBy) {
-
-            RecordDescriptor groupResultDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-                    UTF8StringSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE });
-
-            HashGroupOperatorDescriptor gby = new HashGroupOperatorDescriptor(
-                    spec,
-                    new int[] { 6 },
-                    new FieldHashPartitionComputerFactory(new int[] { 6 },
-                            new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory
-                                    .of(UTF8StringPointable.FACTORY) }),
-                    new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
-                    new MultiFieldsAggregatorFactory(
-                            new IFieldAggregateDescriptorFactory[] { new CountFieldAggregatorFactory(true) }),
-                    groupResultDesc, 16);
-            createPartitionConstraint(spec, gby, resultSplits);
-
-            IConnectorDescriptor joinGroupConn = new MToNPartitioningConnectorDescriptor(spec,
-                    new FieldHashPartitionComputerFactory(new int[] { 6 },
-                            new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory
-                                    .of(UTF8StringPointable.FACTORY) }));
-            spec.connect(joinGroupConn, join, 0, gby, 0);
-
-            endingOp = gby;
-        }
-
-        IFileSplitProvider outSplitProvider = new ConstantFileSplitProvider(resultSplits);
-        FrameFileWriterOperatorDescriptor writer = new FrameFileWriterOperatorDescriptor(spec, outSplitProvider);
-        createPartitionConstraint(spec, writer, resultSplits);
-
-        IConnectorDescriptor endingPrinterConn = new OneToOneConnectorDescriptor(spec);
-        spec.connect(endingPrinterConn, endingOp, 0, writer, 0);
-
-        spec.addRoot(writer);
-        return spec;
-    }
-
-    private static void createPartitionConstraint(JobSpecification spec, IOperatorDescriptor op, FileSplit[] splits) {
-        String[] parts = new String[splits.length];
-        for (int i = 0; i < splits.length; ++i) {
-            parts[i] = splits[i].getNodeName();
-        }
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, op, parts);
-    }
-
-    static class JoinComparatorFactory implements ITuplePairComparatorFactory {
-        private static final long serialVersionUID = 1L;
-
-        private final IBinaryComparatorFactory bFactory;
-        private final int pos0;
-        private final int pos1;
-
-        public JoinComparatorFactory(IBinaryComparatorFactory bFactory, int pos0, int pos1) {
-            this.bFactory = bFactory;
-            this.pos0 = pos0;
-            this.pos1 = pos1;
-        }
-
-        @Override
-        public ITuplePairComparator createTuplePairComparator(IHyracksTaskContext ctx) {
-            return new JoinComparator(bFactory.createBinaryComparator(), pos0, pos1);
-        }
-    }
-
-    static class JoinComparator implements ITuplePairComparator {
-
-        private final IBinaryComparator bComparator;
-        private final int field0;
-        private final int field1;
-
-        public JoinComparator(IBinaryComparator bComparator, int field0, int field1) {
-            this.bComparator = bComparator;
-            this.field0 = field0;
-            this.field1 = field1;
-        }
-
-        @Override
-        public int compare(IFrameTupleAccessor accessor0, int tIndex0, IFrameTupleAccessor accessor1, int tIndex1) {
-            int tStart0 = accessor0.getTupleStartOffset(tIndex0);
-            int fStartOffset0 = accessor0.getFieldSlotsLength() + tStart0;
-
-            int tStart1 = accessor1.getTupleStartOffset(tIndex1);
-            int fStartOffset1 = accessor1.getFieldSlotsLength() + tStart1;
-
-            int fStart0 = accessor0.getFieldStartOffset(tIndex0, field0);
-            int fEnd0 = accessor0.getFieldEndOffset(tIndex0, field0);
-            int fLen0 = fEnd0 - fStart0;
-
-            int fStart1 = accessor1.getFieldStartOffset(tIndex1, field1);
-            int fEnd1 = accessor1.getFieldEndOffset(tIndex1, field1);
-            int fLen1 = fEnd1 - fStart1;
-
-            int c = bComparator.compare(accessor0.getBuffer().array(), fStart0 + fStartOffset0, fLen0, accessor1
-                    .getBuffer().array(), fStart1 + fStartOffset1, fLen1);
-            if (c != 0) {
-                return c;
-            }
-            return 0;
-        }
-    }
-}
\ No newline at end of file
diff --git a/hyracks-hadoop-compat/pom.xml b/hyracks-hadoop-compat/pom.xml
deleted file mode 100644
index 6649357..0000000
--- a/hyracks-hadoop-compat/pom.xml
+++ /dev/null
@@ -1,87 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <groupId>edu.uci.ics.hyracks</groupId>
-  <artifactId>hyracks-hadoop-compat</artifactId>
-  <version>0.2.2-SNAPSHOT</version>
-
-  <parent>
-    <groupId>edu.uci.ics.hyracks</groupId>
-    <artifactId>hyracks</artifactId>
-    <version>0.2.2-SNAPSHOT</version>
-  </parent>
-
-  <build>
-    <plugins>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-compiler-plugin</artifactId>
-        <version>2.0.2</version>
-        <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
-        </configuration>
-      </plugin>
-      <plugin>
-        <groupId>org.codehaus.mojo</groupId>
-        <artifactId>appassembler-maven-plugin</artifactId>
-        <executions>
-          <execution>
-            <configuration>
-              <programs>
-                <program>
-                  <mainClass>edu.uci.ics.hyracks.hadoop.compat.driver.CompatibilityLayer</mainClass>
-                  <name>hadoop-compat</name>
-                </program>
-              </programs>
-              <repositoryLayout>flat</repositoryLayout>
-              <repositoryName>lib</repositoryName>
-            </configuration>
-            <phase>package</phase>
-            <goals>
-              <goal>assemble</goal>
-            </goals>
-          </execution>
-        </executions>
-      </plugin>
-      <plugin>
-        <artifactId>maven-assembly-plugin</artifactId>
-        <version>2.2-beta-5</version>
-        <executions>
-          <execution>
-            <configuration>
-              <descriptors>
-                <descriptor>src/main/assembly/binary-assembly.xml</descriptor>
-              </descriptors>
-            </configuration>
-            <phase>package</phase>
-            <goals>
-              <goal>attached</goal>
-            </goals>
-          </execution>
-        </executions>
-      </plugin>
-    </plugins>
-  </build>
-  <dependencies>
-  	<dependency>
-  		<groupId>org.apache.hadoop</groupId>
-  		<artifactId>hadoop-core</artifactId>
-  		<version>0.20.2</version>
-  		<type>jar</type>
-  		<scope>compile</scope>
-  	</dependency>
-    <dependency>
-        <groupId>edu.uci.ics.dcache</groupId>
-        <artifactId>dcache-client</artifactId>
-        <version>0.0.1</version>
-        <scope>compile</scope>
-    </dependency>
-    <dependency>
-    	<groupId>edu.uci.ics.hyracks</groupId>
-    	<artifactId>hyracks-dataflow-hadoop</artifactId>
-    	<version>0.2.2-SNAPSHOT</version>
-    	<type>jar</type>
-    	<scope>compile</scope>
-    </dependency>
-  </dependencies>
-</project>
diff --git a/hyracks-ipc/pom.xml b/hyracks-ipc/pom.xml
deleted file mode 100644
index e87969b..0000000
--- a/hyracks-ipc/pom.xml
+++ /dev/null
@@ -1,31 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <artifactId>hyracks-ipc</artifactId>
-  <parent>
-    <groupId>edu.uci.ics.hyracks</groupId>
-    <artifactId>hyracks</artifactId>
-    <version>0.2.2-SNAPSHOT</version>
-  </parent>
-
-  <build>
-    <plugins>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-compiler-plugin</artifactId>
-        <version>2.0.2</version>
-        <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
-        </configuration>
-      </plugin>
-    </plugins>
-  </build>
-  <dependencies>
-  <dependency>
-  	<groupId>junit</groupId>
-  	<artifactId>junit</artifactId>
-  	<version>4.8.1</version>
-  	<scope>test</scope>
-  </dependency>
-  </dependencies>
-</project>
diff --git a/hyracks-maven-plugins/hyracks-virtualcluster-maven-plugin/pom.xml b/hyracks-maven-plugins/hyracks-virtualcluster-maven-plugin/pom.xml
deleted file mode 100644
index bed8f0b..0000000
--- a/hyracks-maven-plugins/hyracks-virtualcluster-maven-plugin/pom.xml
+++ /dev/null
@@ -1,26 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <artifactId>hyracks-virtualcluster-maven-plugin</artifactId>
-  <packaging>maven-plugin</packaging>
-  <name>Hyracks VirtualCluster Maven Plugin</name>
-
-  <parent>
-    <groupId>edu.uci.ics.hyracks</groupId>
-    <artifactId>hyracks-maven-plugins</artifactId>
-    <version>0.2.2-SNAPSHOT</version>
-  </parent>
-
-  <build>
-    <plugins>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-compiler-plugin</artifactId>
-        <version>2.0.2</version>
-        <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
-        </configuration>
-      </plugin>
-    </plugins>
-  </build>
-</project>
diff --git a/hyracks-maven-plugins/pom.xml b/hyracks-maven-plugins/pom.xml
deleted file mode 100644
index 11b9f63..0000000
--- a/hyracks-maven-plugins/pom.xml
+++ /dev/null
@@ -1,25 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <artifactId>hyracks-maven-plugins</artifactId>
-  <packaging>pom</packaging>
-
-  <parent>
-    <groupId>edu.uci.ics.hyracks</groupId>
-    <artifactId>hyracks</artifactId>
-    <version>0.2.2-SNAPSHOT</version>
-  </parent>
-
-  <dependencies>
-  	<dependency>
-  		<groupId>org.apache.maven</groupId>
-  		<artifactId>maven-plugin-api</artifactId>
-  		<version>2.2.1</version>
-  		<type>jar</type>
-  		<scope>compile</scope>
-  	</dependency>
-  </dependencies>
-
-  <modules>
-    <module>hyracks-virtualcluster-maven-plugin</module>
-  </modules>
-</project>
diff --git a/hyracks-net/pom.xml b/hyracks-net/pom.xml
deleted file mode 100644
index b8134e9..0000000
--- a/hyracks-net/pom.xml
+++ /dev/null
@@ -1,31 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <artifactId>hyracks-net</artifactId>
-  <parent>
-    <groupId>edu.uci.ics.hyracks</groupId>
-    <artifactId>hyracks</artifactId>
-    <version>0.2.2-SNAPSHOT</version>
-  </parent>
-
-  <build>
-    <plugins>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-compiler-plugin</artifactId>
-        <version>2.0.2</version>
-        <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
-        </configuration>
-      </plugin>
-    </plugins>
-  </build>
-  <dependencies>
-  <dependency>
-  	<groupId>junit</groupId>
-  	<artifactId>junit</artifactId>
-  	<version>4.8.1</version>
-  	<scope>test</scope>
-  </dependency>
-  </dependencies>
-</project>
diff --git a/hyracks-net/src/main/java/edu/uci/ics/hyracks/net/protocols/muxdemux/ChannelSet.java b/hyracks-net/src/main/java/edu/uci/ics/hyracks/net/protocols/muxdemux/ChannelSet.java
deleted file mode 100644
index dabc8a4..0000000
--- a/hyracks-net/src/main/java/edu/uci/ics/hyracks/net/protocols/muxdemux/ChannelSet.java
+++ /dev/null
@@ -1,234 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.net.protocols.muxdemux;
-
-import java.util.Arrays;
-import java.util.BitSet;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import edu.uci.ics.hyracks.net.exceptions.NetException;
-
-public class ChannelSet {
-    private static final Logger LOGGER = Logger.getLogger(ChannelSet.class.getName());
-
-    private static final int INITIAL_SIZE = 16;
-
-    private final MultiplexedConnection mConn;
-
-    private ChannelControlBlock[] ccbArray;
-
-    private final BitSet allocationBitmap;
-
-    private final BitSet pendingChannelWriteBitmap;
-
-    private final BitSet pendingChannelCreditsBitmap;
-
-    private final BitSet pendingChannelSynBitmap;
-
-    private final BitSet pendingEOSAckBitmap;
-
-    private int openChannelCount;
-
-    private final IEventCounter pendingWriteEventsCounter;
-
-    ChannelSet(MultiplexedConnection mConn, IEventCounter pendingWriteEventsCounter) {
-        this.mConn = mConn;
-        ccbArray = new ChannelControlBlock[INITIAL_SIZE];
-        allocationBitmap = new BitSet();
-        pendingChannelWriteBitmap = new BitSet();
-        pendingChannelCreditsBitmap = new BitSet();
-        pendingChannelSynBitmap = new BitSet();
-        pendingEOSAckBitmap = new BitSet();
-        this.pendingWriteEventsCounter = pendingWriteEventsCounter;
-        openChannelCount = 0;
-    }
-
-    ChannelControlBlock allocateChannel() throws NetException {
-        synchronized (mConn) {
-            int idx = allocationBitmap.nextClearBit(0);
-            if (idx < 0 || idx >= ccbArray.length) {
-                cleanupClosedChannels();
-                idx = allocationBitmap.nextClearBit(0);
-                if (idx < 0 || idx == ccbArray.length) {
-                    idx = ccbArray.length;
-                }
-            }
-            return createChannel(idx);
-        }
-    }
-
-    private void cleanupClosedChannels() {
-        for (int i = 0; i < ccbArray.length; ++i) {
-            ChannelControlBlock ccb = ccbArray[i];
-            if (ccb != null) {
-                if (ccb.completelyClosed()) {
-                    if (LOGGER.isLoggable(Level.FINE)) {
-                        LOGGER.fine("Cleaning free channel: " + ccb);
-                    }
-                    freeChannel(ccb);
-                }
-            }
-        }
-    }
-
-    ChannelControlBlock registerChannel(int channelId) throws NetException {
-        synchronized (mConn) {
-            return createChannel(channelId);
-        }
-    }
-
-    private void freeChannel(ChannelControlBlock channel) {
-        int idx = channel.getChannelId();
-        ccbArray[idx] = null;
-        allocationBitmap.clear(idx);
-        pendingChannelWriteBitmap.clear(idx);
-        pendingChannelCreditsBitmap.clear(idx);
-        pendingChannelSynBitmap.clear(idx);
-        pendingEOSAckBitmap.clear(idx);
-        --openChannelCount;
-    }
-
-    ChannelControlBlock getCCB(int channelId) {
-        return ccbArray[channelId];
-    }
-
-    BitSet getPendingChannelWriteBitmap() {
-        return pendingChannelWriteBitmap;
-    }
-
-    BitSet getPendingChannelCreditsBitmap() {
-        return pendingChannelCreditsBitmap;
-    }
-
-    BitSet getPendingChannelSynBitmap() {
-        return pendingChannelSynBitmap;
-    }
-
-    BitSet getPendingEOSAckBitmap() {
-        return pendingEOSAckBitmap;
-    }
-
-    int getOpenChannelCount() {
-        return openChannelCount;
-    }
-
-    void initiateChannelSyn(int channelId) {
-        synchronized (mConn) {
-            assert !pendingChannelSynBitmap.get(channelId);
-            pendingChannelSynBitmap.set(channelId);
-            pendingWriteEventsCounter.increment();
-        }
-    }
-
-    void addPendingCredits(int channelId, int delta) {
-        if (delta <= 0) {
-            return;
-        }
-        synchronized (mConn) {
-            ChannelControlBlock ccb = ccbArray[channelId];
-            if (ccb != null) {
-                if (ccb.getRemoteEOS()) {
-                    return;
-                }
-                int oldCredits = ccb.getReadCredits();
-                ccb.setReadCredits(oldCredits + delta);
-                if (oldCredits == 0) {
-                    assert !pendingChannelCreditsBitmap.get(channelId);
-                    pendingChannelCreditsBitmap.set(channelId);
-                    pendingWriteEventsCounter.increment();
-                }
-            }
-        }
-    }
-
-    void unmarkPendingCredits(int channelId) {
-        synchronized (mConn) {
-            if (pendingChannelCreditsBitmap.get(channelId)) {
-                pendingChannelCreditsBitmap.clear(channelId);
-                pendingWriteEventsCounter.decrement();
-            }
-        }
-    }
-
-    void markPendingWrite(int channelId) {
-        synchronized (mConn) {
-            assert !pendingChannelWriteBitmap.get(channelId);
-            pendingChannelWriteBitmap.set(channelId);
-            pendingWriteEventsCounter.increment();
-        }
-    }
-
-    void unmarkPendingWrite(int channelId) {
-        synchronized (mConn) {
-            assert pendingChannelWriteBitmap.get(channelId);
-            pendingChannelWriteBitmap.clear(channelId);
-            pendingWriteEventsCounter.decrement();
-        }
-    }
-
-    void markEOSAck(int channelId) {
-        synchronized (mConn) {
-            if (!pendingEOSAckBitmap.get(channelId)) {
-                pendingEOSAckBitmap.set(channelId);
-                pendingWriteEventsCounter.increment();
-            }
-        }
-    }
-
-    void notifyIOError() {
-        synchronized (mConn) {
-            for (int i = 0; i < ccbArray.length; ++i) {
-                ChannelControlBlock ccb = ccbArray[i];
-                if (ccb != null && !ccb.getRemoteEOS()) {
-                    ccb.reportRemoteError(-1);
-                    markEOSAck(i);
-                    unmarkPendingCredits(i);
-                }
-            }
-        }
-    }
-
-    private ChannelControlBlock createChannel(int idx) throws NetException {
-        if (idx > MuxDemuxCommand.MAX_CHANNEL_ID) {
-            throw new NetException("Channel Id > " + MuxDemuxCommand.MAX_CHANNEL_ID + " being opened");
-        }
-        if (idx >= ccbArray.length) {
-            expand(idx);
-        }
-        if (ccbArray[idx] != null) {
-            assert ccbArray[idx].completelyClosed() : ccbArray[idx].toString();
-            if (ccbArray[idx].completelyClosed()) {
-                if (LOGGER.isLoggable(Level.FINE)) {
-                    LOGGER.fine("Cleaning free channel: " + ccbArray[idx]);
-                }
-                freeChannel(ccbArray[idx]);
-            }
-        }
-        assert idx < ccbArray.length;
-        assert !allocationBitmap.get(idx);
-        ChannelControlBlock channel = new ChannelControlBlock(this, idx);
-        ccbArray[idx] = channel;
-        allocationBitmap.set(idx);
-        ++openChannelCount;
-        return channel;
-    }
-
-    private void expand(int idx) {
-        while (idx >= ccbArray.length) {
-            ccbArray = Arrays.copyOf(ccbArray, ccbArray.length * 2);
-        }
-    }
-}
\ No newline at end of file
diff --git a/hyracks-server/pom.xml b/hyracks-server/pom.xml
deleted file mode 100644
index d777a91..0000000
--- a/hyracks-server/pom.xml
+++ /dev/null
@@ -1,86 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <artifactId>hyracks-server</artifactId>
-  <parent>
-    <groupId>edu.uci.ics.hyracks</groupId>
-    <artifactId>hyracks</artifactId>
-    <version>0.2.2-SNAPSHOT</version>
-  </parent>
-
-  <build>
-    <plugins>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-compiler-plugin</artifactId>
-        <version>2.0.2</version>
-        <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
-        </configuration>
-      </plugin>
-      <plugin>
-        <groupId>org.codehaus.mojo</groupId>
-        <artifactId>appassembler-maven-plugin</artifactId>
-        <executions>
-          <execution>
-            <configuration>
-              <programs>
-                <program>
-                  <mainClass>edu.uci.ics.hyracks.control.cc.CCDriver</mainClass>
-                  <name>hyrackscc</name>
-                </program>
-                <program>
-                  <mainClass>edu.uci.ics.hyracks.control.nc.NCDriver</mainClass>
-                  <name>hyracksnc</name>
-                </program>
-                <program>
-                  <mainClass>edu.uci.ics.hyracks.server.drivers.VirtualClusterDriver</mainClass>
-                  <name>hyracks-virtual-cluster</name>
-                </program>
-              </programs>
-              <repositoryLayout>flat</repositoryLayout>
-              <repositoryName>lib</repositoryName>
-            </configuration>
-            <phase>package</phase>
-            <goals>
-              <goal>assemble</goal>
-            </goals>
-          </execution>
-        </executions>
-      </plugin>
-      <plugin>
-        <artifactId>maven-assembly-plugin</artifactId>
-        <version>2.2-beta-5</version>
-        <executions>
-          <execution>
-            <configuration>
-              <descriptors>
-                <descriptor>src/main/assembly/binary-assembly.xml</descriptor>
-              </descriptors>
-            </configuration>
-            <phase>package</phase>
-            <goals>
-              <goal>attached</goal>
-            </goals>
-          </execution>
-        </executions>
-      </plugin>
-    </plugins>
-  </build>
-  <dependencies>
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks</groupId>
-  		<artifactId>hyracks-control-cc</artifactId>
-  		<version>0.2.2-SNAPSHOT</version>
-  		<type>jar</type>
-  		<scope>compile</scope>
-  	</dependency>
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks</groupId>
-  		<artifactId>hyracks-control-nc</artifactId>
-  		<version>0.2.2-SNAPSHOT</version>
-  		<type>jar</type>
-  		<scope>compile</scope>
-  	</dependency>
-  </dependencies>
-</project>
diff --git a/hyracks-storage-am-bloomfilter/pom.xml b/hyracks-storage-am-bloomfilter/pom.xml
deleted file mode 100644
index dab96f9..0000000
--- a/hyracks-storage-am-bloomfilter/pom.xml
+++ /dev/null
@@ -1,42 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <groupId>edu.uci.ics.hyracks</groupId>
-  <artifactId>hyracks-storage-am-bloomfilter</artifactId>
-  <version>0.2.2-SNAPSHOT</version>
-
-  <parent>
-    <groupId>edu.uci.ics.hyracks</groupId>
-    <artifactId>hyracks</artifactId>
-    <version>0.2.2-SNAPSHOT</version>
-  </parent>
-
-  <build>
-    <plugins>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-compiler-plugin</artifactId>
-        <version>2.0.2</version>
-        <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
-        </configuration>
-      </plugin>
-    </plugins>
-  </build>
-  <dependencies>
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks</groupId>
-  		<artifactId>hyracks-storage-am-common</artifactId>
-  		<version>0.2.2-SNAPSHOT</version>
-  		<type>jar</type>
-  		<scope>compile</scope>
-  	</dependency>  	
-  	<dependency>
-  		<groupId>junit</groupId>
-  		<artifactId>junit</artifactId>
-  		<version>4.8.1</version>
-  		<type>jar</type>
-  		<scope>test</scope>
-  	</dependency>  	  		
-  </dependencies>
-</project>
diff --git a/hyracks-storage-am-bloomfilter/src/main/java/edu/uci/ics/hyracks/storage/am/bloomfilter/impls/BloomCalculations.java b/hyracks-storage-am-bloomfilter/src/main/java/edu/uci/ics/hyracks/storage/am/bloomfilter/impls/BloomCalculations.java
deleted file mode 100644
index 9c9a7be..0000000
--- a/hyracks-storage-am-bloomfilter/src/main/java/edu/uci/ics/hyracks/storage/am/bloomfilter/impls/BloomCalculations.java
+++ /dev/null
@@ -1,163 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.bloomfilter.impls;
-
-/**
- * This class has been taken from cassandra source code with minor modifications.
- */
-
-/**
- * The following calculations are taken from:
- * http://www.cs.wisc.edu/~cao/papers/summary-cache/node8.html
- * "Bloom Filters - the math"
- * This class's static methods are meant to facilitate the use of the Bloom
- * Filter class by helping to choose correct values of 'bits per element' and
- * 'number of hash functions, k'.
- */
-public class BloomCalculations {
-
-    private static final int minBuckets = 2;
-    private static final int minK = 1;
-
-    /**
-     * In the following table, the row 'i' shows false positive rates if i buckets
-     * per element are used. Column 'j' shows false positive rates if j hash
-     * functions are used. The first row is 'i=0', the first column is 'j=0'.
-     * Each cell (i,j) the false positive rate determined by using i buckets per
-     * element and j hash functions.
-     */
-    static final double[][] probs = new double[][] {
-            { 1.0 }, // dummy row representing 0 buckets per element
-            { 1.0, 1.0 }, // dummy row representing 1 buckets per element
-            { 1.0, 0.393, 0.400 },
-            { 1.0, 0.283, 0.237, 0.253 },
-            { 1.0, 0.221, 0.155, 0.147, 0.160 },
-            { 1.0, 0.181, 0.109, 0.092, 0.092, 0.101 }, // 5
-            { 1.0, 0.154, 0.0804, 0.0609, 0.0561, 0.0578, 0.0638 },
-            { 1.0, 0.133, 0.0618, 0.0423, 0.0359, 0.0347, 0.0364 },
-            { 1.0, 0.118, 0.0489, 0.0306, 0.024, 0.0217, 0.0216, 0.0229 },
-            { 1.0, 0.105, 0.0397, 0.0228, 0.0166, 0.0141, 0.0133, 0.0135, 0.0145 },
-            { 1.0, 0.0952, 0.0329, 0.0174, 0.0118, 0.00943, 0.00844, 0.00819, 0.00846 }, // 10
-            { 1.0, 0.0869, 0.0276, 0.0136, 0.00864, 0.0065, 0.00552, 0.00513, 0.00509 },
-            { 1.0, 0.08, 0.0236, 0.0108, 0.00646, 0.00459, 0.00371, 0.00329, 0.00314 },
-            { 1.0, 0.074, 0.0203, 0.00875, 0.00492, 0.00332, 0.00255, 0.00217, 0.00199, 0.00194 },
-            { 1.0, 0.0689, 0.0177, 0.00718, 0.00381, 0.00244, 0.00179, 0.00146, 0.00129, 0.00121, 0.0012 },
-            { 1.0, 0.0645, 0.0156, 0.00596, 0.003, 0.00183, 0.00128, 0.001, 0.000852, 0.000775, 0.000744 }, // 15
-            { 1.0, 0.0606, 0.0138, 0.005, 0.00239, 0.00139, 0.000935, 0.000702, 0.000574, 0.000505, 0.00047, 0.000459 },
-            { 1.0, 0.0571, 0.0123, 0.00423, 0.00193, 0.00107, 0.000692, 0.000499, 0.000394, 0.000335, 0.000302,
-                    0.000287, 0.000284 },
-            { 1.0, 0.054, 0.0111, 0.00362, 0.00158, 0.000839, 0.000519, 0.00036, 0.000275, 0.000226, 0.000198,
-                    0.000183, 0.000176 },
-            { 1.0, 0.0513, 0.00998, 0.00312, 0.0013, 0.000663, 0.000394, 0.000264, 0.000194, 0.000155, 0.000132,
-                    0.000118, 0.000111, 0.000109 },
-            { 1.0, 0.0488, 0.00906, 0.0027, 0.00108, 0.00053, 0.000303, 0.000196, 0.00014, 0.000108, 8.89e-05,
-                    7.77e-05, 7.12e-05, 6.79e-05, 6.71e-05 } // 20
-    }; // the first column is a dummy column representing K=0.
-
-    /**
-     * The optimal number of hashes for a given number of bits per element.
-     * These values are automatically calculated from the data above.
-     */
-    private static final int[] optKPerBuckets = new int[probs.length];
-
-    static {
-        for (int i = 0; i < probs.length; i++) {
-            double min = Double.MAX_VALUE;
-            double[] prob = probs[i];
-            for (int j = 0; j < prob.length; j++) {
-                if (prob[j] < min) {
-                    min = prob[j];
-                    optKPerBuckets[i] = Math.max(minK, j);
-                }
-            }
-        }
-    }
-
-    /**
-     * Given the number of buckets that can be used per element, return a
-     * specification that minimizes the false positive rate.
-     * 
-     * @param bucketsPerElement
-     *            The number of buckets per element for the filter.
-     * @return A spec that minimizes the false positive rate.
-     */
-    public static BloomFilterSpecification computeBloomSpec(int bucketsPerElement) {
-        assert bucketsPerElement >= 1;
-        assert bucketsPerElement <= probs.length - 1;
-        return new BloomFilterSpecification(optKPerBuckets[bucketsPerElement], bucketsPerElement);
-    }
-
-    /**
-     * Given a maximum tolerable false positive probability, compute a Bloom
-     * specification which will give less than the specified false positive rate,
-     * but minimize the number of buckets per element and the number of hash
-     * functions used. Because bandwidth (and therefore total bitvector size)
-     * is considered more expensive than computing power, preference is given
-     * to minimizing buckets per element rather than number of hash functions.
-     * 
-     * @param maxBucketsPerElement
-     *            The maximum number of buckets available for the filter.
-     * @param maxFalsePosProb
-     *            The maximum tolerable false positive rate.
-     * @return A Bloom Specification which would result in a false positive rate
-     *         less than specified by the function call
-     * @throws UnsupportedOperationException
-     *             if a filter satisfying the parameters cannot be met
-     */
-    public static BloomFilterSpecification computeBloomSpec(int maxBucketsPerElement, double maxFalsePosProb) {
-        assert maxBucketsPerElement >= 1;
-        assert maxBucketsPerElement <= probs.length - 1;
-        int maxK = probs[maxBucketsPerElement].length - 1;
-
-        // Handle the trivial cases
-        if (maxFalsePosProb >= probs[minBuckets][minK]) {
-            return new BloomFilterSpecification(2, optKPerBuckets[2]);
-        }
-        if (maxFalsePosProb < probs[maxBucketsPerElement][maxK]) {
-            throw new UnsupportedOperationException(String.format("Unable to satisfy %s with %s buckets per element",
-                    maxFalsePosProb, maxBucketsPerElement));
-        }
-
-        // First find the minimal required number of buckets:
-        int bucketsPerElement = 2;
-        int K = optKPerBuckets[2];
-        while (probs[bucketsPerElement][K] > maxFalsePosProb) {
-            bucketsPerElement++;
-            K = optKPerBuckets[bucketsPerElement];
-        }
-        // Now that the number of buckets is sufficient, see if we can relax K
-        // without losing too much precision.
-        while (probs[bucketsPerElement][K - 1] <= maxFalsePosProb) {
-            K--;
-        }
-
-        return new BloomFilterSpecification(K, bucketsPerElement);
-    }
-
-    /**
-     * Calculates the maximum number of buckets per element that this implementation
-     * can support. Crucially, it will lower the bucket count if necessary to meet
-     * BitSet's size restrictions.
-     */
-    public static int maxBucketsPerElement(long numElements) {
-        numElements = Math.max(1, numElements);
-        double v = Long.MAX_VALUE / (double) numElements;
-        if (v < 1.0) {
-            throw new UnsupportedOperationException("Cannot compute probabilities for " + numElements + " elements.");
-        }
-        return Math.min(BloomCalculations.probs.length - 1, (int) v);
-    }
-}
diff --git a/hyracks-storage-am-bloomfilter/src/main/java/edu/uci/ics/hyracks/storage/am/bloomfilter/impls/BloomFilter.java b/hyracks-storage-am-bloomfilter/src/main/java/edu/uci/ics/hyracks/storage/am/bloomfilter/impls/BloomFilter.java
deleted file mode 100644
index 0e796b0..0000000
--- a/hyracks-storage-am-bloomfilter/src/main/java/edu/uci/ics/hyracks/storage/am/bloomfilter/impls/BloomFilter.java
+++ /dev/null
@@ -1,269 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.bloomfilter.impls;
-
-import java.nio.ByteBuffer;
-import java.util.ArrayList;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexBulkLoader;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
-import edu.uci.ics.hyracks.storage.common.file.BufferedFileHandle;
-import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
-
-public class BloomFilter {
-
-    private final static int METADATA_PAGE_ID = 0;
-    private final static int NUM_PAGES_OFFSET = 0; // 0
-    private final static int NUM_HASHES_USED_OFFSET = NUM_PAGES_OFFSET + 4; // 4
-    private final static int NUM_ELEMENTS_OFFSET = NUM_HASHES_USED_OFFSET + 4; // 8
-    private final static int NUM_BITS_OFFSET = NUM_ELEMENTS_OFFSET + 8; // 12
-
-    private final IBufferCache bufferCache;
-    private final IFileMapProvider fileMapProvider;
-    private final FileReference file;
-    private final int[] keyFields;
-    private int fileId = -1;
-    private boolean isActivated = false;
-
-    private int numPages;
-    private int numHashes;
-    private long numElements;
-    private long numBits;
-    private int numBitsPerPage;
-
-    private final ArrayList<ICachedPage> bloomFilterPages = new ArrayList<ICachedPage>();
-    private final static long SEED = 0L;
-
-    public BloomFilter(IBufferCache bufferCache, IFileMapProvider fileMapProvider, FileReference file, int[] keyFields)
-            throws HyracksDataException {
-        this.bufferCache = bufferCache;
-        this.fileMapProvider = fileMapProvider;
-        this.file = file;
-        this.keyFields = keyFields;
-        numBitsPerPage = bufferCache.getPageSize() * Byte.SIZE;
-    }
-
-    public int getFileId() {
-        return fileId;
-    }
-
-    public FileReference getFileReference() {
-        return file;
-    }
-
-    public int getNumPages() throws HyracksDataException {
-        if (!isActivated) {
-            throw new HyracksDataException("The bloom filter is not activated.");
-        }
-        return numPages;
-    }
-
-    public long getNumElements() throws HyracksDataException {
-        if (!isActivated) {
-            throw new HyracksDataException("The bloom filter is not activated.");
-        }
-        return numElements;
-    }
-
-    public boolean contains(ITupleReference tuple, long[] hashes) {
-        MurmurHash128Bit.hash3_x64_128(tuple, keyFields, SEED, hashes);
-        for (int i = 0; i < numHashes; ++i) {
-            long hash = Math.abs((hashes[0] + (long) i * hashes[1]) % numBits);
-
-            ByteBuffer buffer = bloomFilterPages.get((int) (hash / numBitsPerPage)).getBuffer();
-            int byteIndex = (int) (hash % numBitsPerPage) >> 3; // divide by 8
-            byte b = buffer.get(byteIndex);
-            int bitIndex = (int) (hash % numBitsPerPage) & 0x07; // mod 8
-
-            if (!((b & (1L << bitIndex)) != 0)) {
-                return false;
-            }
-        }
-        return true;
-    }
-
-    private void prepareFile() throws HyracksDataException {
-        boolean fileIsMapped = false;
-        synchronized (fileMapProvider) {
-            fileIsMapped = fileMapProvider.isMapped(file);
-            if (!fileIsMapped) {
-                bufferCache.createFile(file);
-            }
-            fileId = fileMapProvider.lookupFileId(file);
-            try {
-                // Also creates the file if it doesn't exist yet.
-                bufferCache.openFile(fileId);
-            } catch (HyracksDataException e) {
-                // Revert state of buffer cache since file failed to open.
-                if (!fileIsMapped) {
-                    bufferCache.deleteFile(fileId, false);
-                }
-                throw e;
-            }
-        }
-    }
-
-    public synchronized void create() throws HyracksDataException {
-        if (isActivated) {
-            throw new HyracksDataException("Failed to create the bloom filter since it is activated.");
-        }
-        prepareFile();
-        ICachedPage metaPage = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, METADATA_PAGE_ID), true);
-        metaPage.acquireWriteLatch();
-        metaPage.getBuffer().putInt(NUM_PAGES_OFFSET, 0);
-        metaPage.getBuffer().putInt(NUM_HASHES_USED_OFFSET, 0);
-        metaPage.getBuffer().putLong(NUM_ELEMENTS_OFFSET, 0L);
-        metaPage.getBuffer().putLong(NUM_BITS_OFFSET, 0L);
-        metaPage.releaseWriteLatch();
-        bufferCache.unpin(metaPage);
-        bufferCache.closeFile(fileId);
-    }
-
-    public synchronized void activate() throws HyracksDataException {
-        if (isActivated) {
-            return;
-        }
-
-        prepareFile();
-        readBloomFilterMetaData();
-
-        int currentPageId = 1;
-        while (currentPageId <= numPages) {
-            ICachedPage page = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, currentPageId), false);
-            bloomFilterPages.add(page);
-            ++currentPageId;
-        }
-        isActivated = true;
-    }
-
-    private void readBloomFilterMetaData() throws HyracksDataException {
-        ICachedPage metaPage = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, METADATA_PAGE_ID), false);
-        metaPage.acquireReadLatch();
-        numPages = metaPage.getBuffer().getInt(NUM_PAGES_OFFSET);
-        numHashes = metaPage.getBuffer().getInt(NUM_HASHES_USED_OFFSET);
-        numElements = metaPage.getBuffer().getLong(NUM_ELEMENTS_OFFSET);
-        numBits = metaPage.getBuffer().getLong(NUM_BITS_OFFSET);
-        metaPage.releaseReadLatch();
-        bufferCache.unpin(metaPage);
-    }
-
-    public synchronized void deactivate() throws HyracksDataException {
-        if (!isActivated) {
-            return;
-        }
-
-        for (int i = 0; i < numPages; ++i) {
-            bufferCache.unpin(bloomFilterPages.get(i));
-        }
-        bloomFilterPages.clear();
-        bufferCache.closeFile(fileId);
-        isActivated = false;
-    }
-
-    public synchronized void destroy() throws HyracksDataException {
-        if (isActivated) {
-            throw new HyracksDataException("Failed to destroy the bloom filter since it is activated.");
-        }
-
-        file.delete();
-        if (fileId == -1) {
-            return;
-        }
-        bufferCache.deleteFile(fileId, false);
-        fileId = -1;
-    }
-
-    public IIndexBulkLoader createBuilder(long numElements, int numHashes, int numBitsPerElement)
-            throws HyracksDataException {
-        return new BloomFilterBuilder(numElements, numHashes, numBitsPerElement);
-    }
-
-    public class BloomFilterBuilder implements IIndexBulkLoader {
-        private final long[] hashes = new long[2];
-
-        private final long numElements;
-        private final int numHashes;
-        private final long numBits;
-        private final int numPages;
-
-        public BloomFilterBuilder(long numElements, int numHashes, int numBitsPerElement) throws HyracksDataException {
-            if (!isActivated) {
-                throw new HyracksDataException("Failed to create the bloom filter builder since it is not activated.");
-            }
-
-            this.numElements = numElements;
-            this.numHashes = numHashes;
-            numBits = numElements * numBitsPerElement;
-            long tmp = (long) Math.ceil(numBits / (double) numBitsPerPage);
-            if (tmp > Integer.MAX_VALUE) {
-                throw new HyracksDataException("Cannot create a bloom filter with his huge number of pages.");
-            }
-            numPages = (int) tmp;
-            if (numElements > 0) {
-                persistBloomFilterMetaData();
-                readBloomFilterMetaData();
-                int currentPageId = 1;
-                while (currentPageId <= numPages) {
-                    ICachedPage page = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, currentPageId), true);
-                    page.acquireWriteLatch();
-                    bloomFilterPages.add(page);
-                    ++currentPageId;
-                }
-            }
-        }
-
-        private void persistBloomFilterMetaData() throws HyracksDataException {
-            ICachedPage metaPage = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, METADATA_PAGE_ID), false);
-            metaPage.acquireWriteLatch();
-            metaPage.getBuffer().putInt(NUM_PAGES_OFFSET, numPages);
-            metaPage.getBuffer().putInt(NUM_HASHES_USED_OFFSET, numHashes);
-            metaPage.getBuffer().putLong(NUM_ELEMENTS_OFFSET, numElements);
-            metaPage.getBuffer().putLong(NUM_BITS_OFFSET, numBits);
-            metaPage.releaseWriteLatch();
-            bufferCache.unpin(metaPage);
-        }
-
-        @Override
-        public void add(ITupleReference tuple) throws IndexException, HyracksDataException {
-            MurmurHash128Bit.hash3_x64_128(tuple, keyFields, SEED, hashes);
-            for (int i = 0; i < numHashes; ++i) {
-                long hash = Math.abs((hashes[0] + (long) i * hashes[1]) % numBits);
-
-                ByteBuffer buffer = bloomFilterPages.get((int) (hash / numBitsPerPage)).getBuffer();
-                int byteIndex = (int) (hash % numBitsPerPage) >> 3; // divide by 8
-                byte b = buffer.get(byteIndex);
-                int bitIndex = (int) (hash % numBitsPerPage) & 0x07; // mod 8
-                b = (byte) (b | (1 << bitIndex));
-
-                buffer.put(byteIndex, b);
-            }
-        }
-
-        @Override
-        public void end() throws HyracksDataException, IndexException {
-            for (int i = 0; i < numPages; ++i) {
-                ICachedPage page = bloomFilterPages.get(i);
-                page.releaseWriteLatch();
-            }
-        }
-
-    }
-}
\ No newline at end of file
diff --git a/hyracks-storage-am-bloomfilter/src/main/java/edu/uci/ics/hyracks/storage/am/bloomfilter/impls/BloomFilterFactory.java b/hyracks-storage-am-bloomfilter/src/main/java/edu/uci/ics/hyracks/storage/am/bloomfilter/impls/BloomFilterFactory.java
deleted file mode 100644
index d430e54..0000000
--- a/hyracks-storage-am-bloomfilter/src/main/java/edu/uci/ics/hyracks/storage/am/bloomfilter/impls/BloomFilterFactory.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.bloomfilter.impls;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
-
-public class BloomFilterFactory {
-    private final IBufferCache bufferCache;
-    private final IFileMapProvider fileMapProvider;
-    private final int[] bloomFilterKeyFields;
-
-    public BloomFilterFactory(IBufferCache bufferCache, IFileMapProvider fileMapProvider, int[] bloomFilterKeyFields) {
-        this.bufferCache = bufferCache;
-        this.fileMapProvider = fileMapProvider;
-        this.bloomFilterKeyFields = bloomFilterKeyFields;
-    }
-
-    public BloomFilter createBloomFiltertInstance(FileReference file) throws HyracksDataException {
-        return new BloomFilter(bufferCache, fileMapProvider, file, bloomFilterKeyFields);
-    }
-
-    public int[] getBloomFilterKeyFields() {
-        return bloomFilterKeyFields;
-    }
-}
diff --git a/hyracks-storage-am-bloomfilter/src/main/java/edu/uci/ics/hyracks/storage/am/bloomfilter/impls/BloomFilterSpecification.java b/hyracks-storage-am-bloomfilter/src/main/java/edu/uci/ics/hyracks/storage/am/bloomfilter/impls/BloomFilterSpecification.java
deleted file mode 100644
index a1e5517..0000000
--- a/hyracks-storage-am-bloomfilter/src/main/java/edu/uci/ics/hyracks/storage/am/bloomfilter/impls/BloomFilterSpecification.java
+++ /dev/null
@@ -1,34 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.bloomfilter.impls;
-
-public final class BloomFilterSpecification {
-    private final int numBucketsPerElement;
-    private final int numHashes;
-
-    public BloomFilterSpecification(int numBucketsPerElement, int numHashes) {
-        this.numBucketsPerElement = numBucketsPerElement;
-        this.numHashes = numHashes;
-    }
-
-    public int getNumBucketsPerElements() {
-        return numBucketsPerElement;
-    }
-
-    public int getNumHashes() {
-        return numHashes;
-    }
-}
diff --git a/hyracks-storage-am-bloomfilter/src/main/java/edu/uci/ics/hyracks/storage/am/bloomfilter/impls/MurmurHash128Bit.java b/hyracks-storage-am-bloomfilter/src/main/java/edu/uci/ics/hyracks/storage/am/bloomfilter/impls/MurmurHash128Bit.java
deleted file mode 100644
index 0bc0a7f..0000000
--- a/hyracks-storage-am-bloomfilter/src/main/java/edu/uci/ics/hyracks/storage/am/bloomfilter/impls/MurmurHash128Bit.java
+++ /dev/null
@@ -1,256 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.bloomfilter.impls;
-
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-
-/**
- * The idea of this class is borrowed from http://murmurhash.googlepages.com/ and cassandra source code.
- * We changed the hash function to operate on ITupleReference instead of a byte array.
- **/
-public class MurmurHash128Bit {
-
-    private final static int DUMMY_FIELD = 0;
-
-    public static long rotl64(long v, int n) {
-        return ((v << n) | (v >>> (64 - n)));
-    }
-
-    public static long fmix(long k) {
-        k ^= k >>> 33;
-        k *= 0xff51afd7ed558ccdL;
-        k ^= k >>> 33;
-        k *= 0xc4ceb9fe1a85ec53L;
-        k ^= k >>> 33;
-
-        return k;
-    }
-
-    public static void hash3_x64_128(ITupleReference tuple, int[] keyFields, long seed, long[] hashes) {
-        int length = 0;
-        for (int i = 0; i < keyFields.length; ++i) {
-            length += tuple.getFieldLength(keyFields[i]);
-        }
-        final int nblocks = length >> 4; // Process as 128-bit blocks.
-
-        long h1 = seed;
-        long h2 = seed;
-
-        long c1 = 0x87c37b91114253d5L;
-        long c2 = 0x4cf5ad432745937fL;
-
-        //----------
-        // body
-
-        int currentFieldIndex = 0;
-        int bytePos = 0;
-        for (int i = 0; i < nblocks; ++i) {
-
-            long k1 = 0L;
-            for (int j = 0; j < 8; ++j) {
-                k1 += (((long) tuple.getFieldData(DUMMY_FIELD)[tuple.getFieldStart(keyFields[currentFieldIndex])
-                        + bytePos] & 0xff) << (j << 3));
-                ++bytePos;
-                if (tuple.getFieldLength(keyFields[currentFieldIndex]) == bytePos) {
-                    ++currentFieldIndex;
-                    bytePos = 0;
-                }
-            }
-            long k2 = 0L;
-            for (int j = 0; j < 8; ++j) {
-                k2 += (((long) tuple.getFieldData(DUMMY_FIELD)[tuple.getFieldStart(keyFields[currentFieldIndex])
-                        + bytePos] & 0xff) << (j << 3));
-                ++bytePos;
-                if (tuple.getFieldLength(keyFields[currentFieldIndex]) == bytePos) {
-                    ++currentFieldIndex;
-                    bytePos = 0;
-                }
-            }
-
-            k1 *= c1;
-            k1 = rotl64(k1, 31);
-            k1 *= c2;
-            h1 ^= k1;
-
-            h1 = rotl64(h1, 27);
-            h1 += h2;
-            h1 = h1 * 5 + 0x52dce729;
-
-            k2 *= c2;
-            k2 = rotl64(k2, 33);
-            k2 *= c1;
-            h2 ^= k2;
-
-            h2 = rotl64(h2, 31);
-            h2 += h1;
-            h2 = h2 * 5 + 0x38495ab5;
-        }
-
-        //----------
-        // tail
-
-        long k1 = 0L;
-        long k2 = 0L;
-
-        currentFieldIndex = keyFields.length - 1;
-        bytePos = tuple.getFieldLength(keyFields[currentFieldIndex]) - 1;
-        switch (length & 15) {
-            case 15:
-                k2 ^= ((long) tuple.getFieldData(DUMMY_FIELD)[tuple.getFieldStart(keyFields[currentFieldIndex])
-                        + bytePos]) << 48;
-                --bytePos;
-                if (bytePos == -1) {
-                    --currentFieldIndex;
-                    bytePos = tuple.getFieldLength(keyFields[currentFieldIndex]) - 1;
-                }
-            case 14:
-                k2 ^= ((long) tuple.getFieldData(DUMMY_FIELD)[tuple.getFieldStart(keyFields[currentFieldIndex])
-                        + bytePos]) << 40;
-                --bytePos;
-                if (bytePos == -1) {
-                    --currentFieldIndex;
-                    bytePos = tuple.getFieldLength(keyFields[currentFieldIndex]) - 1;
-                }
-            case 13:
-                k2 ^= ((long) tuple.getFieldData(DUMMY_FIELD)[tuple.getFieldStart(keyFields[currentFieldIndex])
-                        + bytePos]) << 32;
-                --bytePos;
-                if (bytePos == -1) {
-                    --currentFieldIndex;
-                    bytePos = tuple.getFieldLength(keyFields[currentFieldIndex]) - 1;
-                }
-            case 12:
-                k2 ^= ((long) tuple.getFieldData(DUMMY_FIELD)[tuple.getFieldStart(keyFields[currentFieldIndex])
-                        + bytePos]) << 24;
-                --bytePos;
-                if (bytePos == -1) {
-                    --currentFieldIndex;
-                    bytePos = tuple.getFieldLength(keyFields[currentFieldIndex]) - 1;
-                }
-            case 11:
-                k2 ^= ((long) tuple.getFieldData(DUMMY_FIELD)[tuple.getFieldStart(keyFields[currentFieldIndex])
-                        + bytePos]) << 16;
-                --bytePos;
-                if (bytePos == -1) {
-                    --currentFieldIndex;
-                    bytePos = tuple.getFieldLength(keyFields[currentFieldIndex]) - 1;
-                }
-            case 10:
-                k2 ^= ((long) tuple.getFieldData(DUMMY_FIELD)[tuple.getFieldStart(keyFields[currentFieldIndex])
-                        + bytePos]) << 8;
-                --bytePos;
-                if (bytePos == -1) {
-                    --currentFieldIndex;
-                    bytePos = tuple.getFieldLength(keyFields[currentFieldIndex]) - 1;
-                }
-            case 9:
-                k2 ^= ((long) tuple.getFieldData(DUMMY_FIELD)[tuple.getFieldStart(keyFields[currentFieldIndex])
-                        + bytePos]);
-                --bytePos;
-                if (bytePos == -1) {
-                    --currentFieldIndex;
-                    bytePos = tuple.getFieldLength(keyFields[currentFieldIndex]) - 1;
-                }
-                k2 *= c2;
-                k2 = rotl64(k2, 33);
-                k2 *= c1;
-                h2 ^= k2;
-
-            case 8:
-                k1 ^= ((long) tuple.getFieldData(DUMMY_FIELD)[tuple.getFieldStart(keyFields[currentFieldIndex])
-                        + bytePos]) << 56;
-                --bytePos;
-                if (bytePos == -1) {
-                    --currentFieldIndex;
-                    bytePos = tuple.getFieldLength(keyFields[currentFieldIndex]) - 1;
-                }
-            case 7:
-                k1 ^= ((long) tuple.getFieldData(DUMMY_FIELD)[tuple.getFieldStart(keyFields[currentFieldIndex])
-                        + bytePos]) << 48;
-                --bytePos;
-                if (bytePos == -1) {
-                    --currentFieldIndex;
-                    bytePos = tuple.getFieldLength(keyFields[currentFieldIndex]) - 1;
-                }
-            case 6:
-                k1 ^= ((long) tuple.getFieldData(DUMMY_FIELD)[tuple.getFieldStart(keyFields[currentFieldIndex])
-                        + bytePos]) << 40;
-                --bytePos;
-                if (bytePos == -1) {
-                    --currentFieldIndex;
-                    bytePos = tuple.getFieldLength(keyFields[currentFieldIndex]) - 1;
-                }
-            case 5:
-                k1 ^= ((long) tuple.getFieldData(DUMMY_FIELD)[tuple.getFieldStart(keyFields[currentFieldIndex])
-                        + bytePos]) << 32;
-                --bytePos;
-                if (bytePos == -1) {
-                    --currentFieldIndex;
-                    bytePos = tuple.getFieldLength(keyFields[currentFieldIndex]) - 1;
-                }
-            case 4:
-                k1 ^= ((long) tuple.getFieldData(DUMMY_FIELD)[tuple.getFieldStart(keyFields[currentFieldIndex])
-                        + bytePos]) << 24;
-                --bytePos;
-                if (bytePos == -1) {
-                    --currentFieldIndex;
-                    bytePos = tuple.getFieldLength(keyFields[currentFieldIndex]) - 1;
-                }
-            case 3:
-                k1 ^= ((long) tuple.getFieldData(DUMMY_FIELD)[tuple.getFieldStart(keyFields[currentFieldIndex])
-                        + bytePos]) << 16;
-                --bytePos;
-                if (bytePos == -1) {
-                    --currentFieldIndex;
-                    bytePos = tuple.getFieldLength(keyFields[currentFieldIndex]) - 1;
-                }
-            case 2:
-                k1 ^= ((long) tuple.getFieldData(DUMMY_FIELD)[tuple.getFieldStart(keyFields[currentFieldIndex])
-                        + bytePos]) << 8;
-                --bytePos;
-                if (bytePos == -1) {
-                    --currentFieldIndex;
-                    bytePos = tuple.getFieldLength(keyFields[currentFieldIndex]) - 1;
-                }
-            case 1:
-                k1 ^= ((long) tuple.getFieldData(DUMMY_FIELD)[tuple.getFieldStart(keyFields[currentFieldIndex])
-                        + bytePos]);
-                k1 *= c1;
-                k1 = rotl64(k1, 31);
-                k1 *= c2;
-                h1 ^= k1;
-        };
-
-        //----------
-        // finalization
-
-        h1 ^= length;
-        h2 ^= length;
-
-        h1 += h2;
-        h2 += h1;
-
-        h1 = fmix(h1);
-        h2 = fmix(h2);
-
-        h1 += h2;
-        h2 += h1;
-
-        hashes[0] = h1;
-        hashes[1] = h2;
-    }
-
-}
\ No newline at end of file
diff --git a/hyracks-storage-am-btree/pom.xml b/hyracks-storage-am-btree/pom.xml
deleted file mode 100644
index f673be8..0000000
--- a/hyracks-storage-am-btree/pom.xml
+++ /dev/null
@@ -1,63 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <groupId>edu.uci.ics.hyracks</groupId>
-  <artifactId>hyracks-storage-am-btree</artifactId>
-  <version>0.2.2-SNAPSHOT</version>
-
-  <parent>
-    <groupId>edu.uci.ics.hyracks</groupId>
-    <artifactId>hyracks</artifactId>
-    <version>0.2.2-SNAPSHOT</version>
-  </parent>
-
-  <build>
-    <plugins>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-compiler-plugin</artifactId>
-        <version>2.0.2</version>
-        <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
-        </configuration>
-      </plugin>
-    </plugins>
-  </build>
-  <dependencies>
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks</groupId>
-  		<artifactId>hyracks-storage-common</artifactId>
-  		<version>0.2.2-SNAPSHOT</version>
-  		<type>jar</type>
-  		<scope>compile</scope>
-  	</dependency>  	
-        <dependency>
-  		<groupId>edu.uci.ics.hyracks</groupId>
-  		<artifactId>hyracks-storage-am-common</artifactId>
-  		<version>0.2.2-SNAPSHOT</version>
-  		<type>jar</type>
-  		<scope>compile</scope>
-  	</dependency>  	
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks</groupId>
-  		<artifactId>hyracks-dataflow-common</artifactId>
-  		<version>0.2.2-SNAPSHOT</version>
-  		<type>jar</type>
-  		<scope>compile</scope>
-  	</dependency>  	
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks</groupId>
-  		<artifactId>hyracks-dataflow-std</artifactId>
-  		<version>0.2.2-SNAPSHOT</version>
-  		<type>jar</type>
-  		<scope>compile</scope>
-  	</dependency>  	
-  	<dependency>
-  		<groupId>junit</groupId>
-  		<artifactId>junit</artifactId>
-  		<version>4.8.1</version>
-  		<type>jar</type>
-  		<scope>test</scope>
-  	</dependency>  	  		
-  </dependencies>
-</project>
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeFrame.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeFrame.java
deleted file mode 100644
index ee3fd90..0000000
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeFrame.java
+++ /dev/null
@@ -1,36 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.btree.api;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeOpContext.PageValidationInfo;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
-import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
-
-public interface IBTreeFrame extends ITreeIndexFrame {
-    public int findInsertTupleIndex(ITupleReference tuple) throws TreeIndexException;
-
-    public int findDeleteTupleIndex(ITupleReference tuple) throws TreeIndexException;
-
-    public void insertSorted(ITupleReference tuple);
-
-    public void setSmFlag(boolean smFlag);
-
-    public boolean getSmFlag();
-
-    public void validate(PageValidationInfo pvi) throws HyracksDataException;
-}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeInteriorFrame.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeInteriorFrame.java
deleted file mode 100644
index ffdcc5c..0000000
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeInteriorFrame.java
+++ /dev/null
@@ -1,31 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.btree.api;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
-
-public interface IBTreeInteriorFrame extends IBTreeFrame {
-    public int getChildPageId(RangePredicate pred) throws HyracksDataException;
-
-    public int getLeftmostChildPageId();
-
-    public int getRightmostChildPageId();
-
-    public void setRightmostChildPageId(int pageId);
-
-    public void deleteGreatest();
-}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeLeafFrame.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeLeafFrame.java
deleted file mode 100644
index bbb67bd..0000000
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeLeafFrame.java
+++ /dev/null
@@ -1,44 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.btree.api;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
-import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.FindTupleMode;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.FindTupleNoExactMatchPolicy;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-
-public interface IBTreeLeafFrame extends IBTreeFrame {
-    public int findTupleIndex(ITupleReference searchKey, ITreeIndexTupleReference pageTuple, MultiComparator cmp,
-            FindTupleMode ftm, FindTupleNoExactMatchPolicy ftp) throws HyracksDataException;
-
-    public int findUpdateTupleIndex(ITupleReference tuple) throws TreeIndexException;
-
-    public int findUpsertTupleIndex(ITupleReference tuple) throws TreeIndexException;
-
-    /**
-     * @param searchTuple the tuple to match 
-     * @param targetTupleIndex the index of the tuple to check
-     * @return the tuple at targetTupleIndex if its keys match searchTuple's keys, otherwise null 
-     */
-    public ITupleReference getMatchingKeyTuple(ITupleReference searchTuple, int targetTupleIndex);
-
-    public void setNextLeaf(int nextPage);
-
-    public int getNextLeaf();
-}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IPrefixSlotManager.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IPrefixSlotManager.java
deleted file mode 100644
index 6ec5eef..0000000
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IPrefixSlotManager.java
+++ /dev/null
@@ -1,84 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.btree.api;
-
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.common.api.ISlotManager;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.FindTupleMode;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.FindTupleNoExactMatchPolicy;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-
-/**
- *  A slot consists of two fields. The first field is 1 byte and it indicates the slot number of 
- *  a prefix tuple that is called the first field prefixSlotOff. The second field is 3 bytes and 
- *  it points to the start offset of a tuple that is called the second field tupleOff.
- *  
- *  We distinguish between two slot types:
- *      1) prefix slots that point to prefix tuples (a frame is assumed to have a field numPrefixTuples)
- *      2) tuple slots that point to data tuples (a frame is assumed to have a field numTuples)
- *      
- *  A tuple slot contains a tuple pointer and a pointer to a prefix slot (prefix slot number).
- *  
- *  INSERT procedure:
- *      - A tuple insertion may use an existing prefix tuple
- *      - A tuple insertion may never create a new prefix tuple
- *  
- *  Modifying the prefix slots would be extremely expensive because potentially all tuples slots 
- *  would have to change their prefix slot pointers. All prefixes are recomputed during a reorg 
- *  or compaction.
- */
-public interface IPrefixSlotManager extends ISlotManager {
-    // TODO: Clean up interface after extending ISlotManager.
-
-    public int decodeFirstSlotField(int slot);
-
-    public int decodeSecondSlotField(int slot);
-
-    public int encodeSlotFields(int firstField, int secondField);
-
-    public int findSlot(ITupleReference searchKey, ITreeIndexTupleReference frameTuple,
-            ITreeIndexTupleReference framePrefixTuple, MultiComparator multiCmp, FindTupleMode mode,
-            FindTupleNoExactMatchPolicy matchPolicy);
-
-    public int insertSlot(int slot, int tupleOff);
-
-    /** 
-     * @return the prefix slot number or FieldPrefixSlotManager.TUPLE_UNCOMPRESSED if none found
-     */
-    public int findPrefix(ITupleReference tuple, ITreeIndexTupleReference framePrefixTuple);
-
-    public int getTupleSlotStartOff();
-
-    public int getTupleSlotEndOff();
-
-    public int getPrefixSlotStartOff();
-
-    public int getPrefixSlotEndOff();
-
-    public int getTupleSlotOff(int tupleIndex);
-
-    public int getPrefixSlotOff(int tupleIndex);
-
-    public int getSlotSize();
-
-    public void setSlot(int offset, int value);
-
-    // functions for testing
-    public void setPrefixSlot(int tupleIndex, int slot);
-
-    public void setMultiComparator(MultiComparator cmp);
-}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/ITupleAcceptor.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/ITupleAcceptor.java
deleted file mode 100644
index 5f4b30a..0000000
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/ITupleAcceptor.java
+++ /dev/null
@@ -1,22 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.btree.api;
-
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-
-public interface ITupleAcceptor {
-    public boolean accept(ITupleReference tuple);
-}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/compressors/FieldPrefixCompressor.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/compressors/FieldPrefixCompressor.java
deleted file mode 100644
index 7e27113..0000000
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/compressors/FieldPrefixCompressor.java
+++ /dev/null
@@ -1,454 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.btree.compressors;
-
-import java.nio.ByteBuffer;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.Comparator;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.storage.am.btree.api.IPrefixSlotManager;
-import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeFieldPrefixNSMLeafFrame;
-import edu.uci.ics.hyracks.storage.am.btree.impls.FieldPrefixSlotManager;
-import edu.uci.ics.hyracks.storage.am.btree.impls.FieldPrefixTupleReference;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameCompressor;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriter;
-
-public class FieldPrefixCompressor implements ITreeIndexFrameCompressor {
-
-    // minimum ratio of uncompressed tuples to total tuple to consider re-compression
-    private final float ratioThreshold;
-
-    // minimum number of tuple matching field prefixes to consider compressing them
-    private final int occurrenceThreshold;
-
-    private final ITypeTraits[] typeTraits;
-
-    public FieldPrefixCompressor(ITypeTraits[] typeTraits, float ratioThreshold, int occurrenceThreshold) {
-        this.typeTraits = typeTraits;
-        this.ratioThreshold = ratioThreshold;
-        this.occurrenceThreshold = occurrenceThreshold;
-    }
-
-    @Override
-    public boolean compress(ITreeIndexFrame indexFrame, MultiComparator cmp) throws Exception {
-        BTreeFieldPrefixNSMLeafFrame frame = (BTreeFieldPrefixNSMLeafFrame) indexFrame;
-        int tupleCount = frame.getTupleCount();
-        if (tupleCount <= 0) {
-            frame.setPrefixTupleCount(0);
-            frame.setFreeSpaceOff(frame.getOrigFreeSpaceOff());
-            frame.setTotalFreeSpace(frame.getOrigTotalFreeSpace());
-            return false;
-        }
-
-        if (cmp.getKeyFieldCount() == 1) {
-            return false;
-        }
-
-        int uncompressedTupleCount = frame.getUncompressedTupleCount();
-        float ratio = (float) uncompressedTupleCount / (float) tupleCount;
-        if (ratio < ratioThreshold)
-            return false;
-
-        IBinaryComparator[] cmps = cmp.getComparators();
-        int fieldCount = typeTraits.length;
-
-        ByteBuffer buf = frame.getBuffer();
-        byte[] pageArray = buf.array();
-        IPrefixSlotManager slotManager = (IPrefixSlotManager) frame.getSlotManager();
-
-        // perform analysis pass
-        ArrayList<KeyPartition> keyPartitions = getKeyPartitions(frame, cmp, occurrenceThreshold);
-        if (keyPartitions.size() == 0)
-            return false;
-
-        // for each keyPartition, determine the best prefix length for
-        // compression, and count how many prefix tuple we would need in total
-        int totalSlotsNeeded = 0;
-        int totalPrefixBytes = 0;
-        for (KeyPartition kp : keyPartitions) {
-
-            for (int j = 0; j < kp.pmi.length; j++) {
-                int benefitMinusCost = kp.pmi[j].spaceBenefit - kp.pmi[j].spaceCost;
-                if (benefitMinusCost > kp.maxBenefitMinusCost) {
-                    kp.maxBenefitMinusCost = benefitMinusCost;
-                    kp.maxPmiIndex = j;
-                }
-            }
-
-            // ignore keyPartitions with no benefit and don't count bytes and slots needed
-            if (kp.maxBenefitMinusCost <= 0)
-                continue;
-
-            totalPrefixBytes += kp.pmi[kp.maxPmiIndex].prefixBytes;
-            totalSlotsNeeded += kp.pmi[kp.maxPmiIndex].prefixSlotsNeeded;
-        }
-
-        // we use a greedy heuristic to solve this "knapsack"-like problem
-        // (every keyPartition has a space savings and a number of slots
-        // required, but the number of slots are constrained by MAX_PREFIX_SLOTS)
-        // we sort the keyPartitions by maxBenefitMinusCost / prefixSlotsNeeded
-        // and later choose the top MAX_PREFIX_SLOTS
-        int[] newPrefixSlots;
-        if (totalSlotsNeeded > FieldPrefixSlotManager.MAX_PREFIX_SLOTS) {
-            // order keyPartitions by the heuristic function
-            SortByHeuristic heuristicComparator = new SortByHeuristic();
-            Collections.sort(keyPartitions, heuristicComparator);
-            int slotsUsed = 0;
-            int numberKeyPartitions = -1;
-            for (int i = 0; i < keyPartitions.size(); i++) {
-                KeyPartition kp = keyPartitions.get(i);
-                slotsUsed += kp.pmi[kp.maxPmiIndex].prefixSlotsNeeded;
-                if (slotsUsed > FieldPrefixSlotManager.MAX_PREFIX_SLOTS) {
-                    numberKeyPartitions = i + 1;
-                    slotsUsed -= kp.pmi[kp.maxPmiIndex].prefixSlotsNeeded;
-                    break;
-                }
-            }
-            newPrefixSlots = new int[slotsUsed];
-
-            // remove irrelevant keyPartitions and adjust total prefix bytes
-            while (keyPartitions.size() >= numberKeyPartitions) {
-                int lastIndex = keyPartitions.size() - 1;
-                KeyPartition kp = keyPartitions.get(lastIndex);
-                if (kp.maxBenefitMinusCost > 0)
-                    totalPrefixBytes -= kp.pmi[kp.maxPmiIndex].prefixBytes;
-                keyPartitions.remove(lastIndex);
-            }
-
-            // re-order keyPartitions by prefix (corresponding to original order)
-            SortByOriginalRank originalRankComparator = new SortByOriginalRank();
-            Collections.sort(keyPartitions, originalRankComparator);
-        } else {
-            newPrefixSlots = new int[totalSlotsNeeded];
-        }
-
-        int[] newTupleSlots = new int[tupleCount];
-
-        // WARNING: our hope is that compression is infrequent
-        // here we allocate a big chunk of memory to temporary hold the new, re-compressed tuple
-        // in general it is very hard to avoid this step
-        int prefixFreeSpace = frame.getOrigFreeSpaceOff();
-        int tupleFreeSpace = prefixFreeSpace + totalPrefixBytes;
-        byte[] buffer = new byte[buf.capacity()];
-        ByteBuffer byteBuffer = ByteBuffer.wrap(buffer);
-
-        // perform compression, and reorg
-        // we assume that the keyPartitions are sorted by the prefixes 
-        // (i.e., in the logical target order)
-        int kpIndex = 0;
-        int tupleIndex = 0;
-        int prefixTupleIndex = 0;
-        uncompressedTupleCount = 0;
-
-        TypeAwareTupleWriter tupleWriter = new TypeAwareTupleWriter(typeTraits);
-        FieldPrefixTupleReference tupleToWrite = new FieldPrefixTupleReference(tupleWriter.createTupleReference());
-        tupleToWrite.setFieldCount(fieldCount);
-
-        while (tupleIndex < tupleCount) {
-            if (kpIndex < keyPartitions.size()) {
-
-                // beginning of keyPartition found, compress entire keyPartition
-                if (tupleIndex == keyPartitions.get(kpIndex).firstTupleIndex) {
-
-                    // number of fields we decided to use for compression of this keyPartition
-                    int fieldCountToCompress = keyPartitions.get(kpIndex).maxPmiIndex + 1;
-                    int segmentStart = keyPartitions.get(kpIndex).firstTupleIndex;
-                    int tuplesInSegment = 1;
-
-                    FieldPrefixTupleReference prevTuple = new FieldPrefixTupleReference(
-                            tupleWriter.createTupleReference());
-                    prevTuple.setFieldCount(fieldCount);
-
-                    FieldPrefixTupleReference tuple = new FieldPrefixTupleReference(tupleWriter.createTupleReference());
-                    tuple.setFieldCount(fieldCount);
-
-                    for (int i = tupleIndex + 1; i <= keyPartitions.get(kpIndex).lastTupleIndex; i++) {
-                        prevTuple.resetByTupleIndex(frame, i - 1);
-                        tuple.resetByTupleIndex(frame, i);
-
-                        // check if tuples match in fieldCountToCompress of their first fields
-                        int prefixFieldsMatch = 0;
-                        for (int j = 0; j < fieldCountToCompress; j++) {
-                            if (cmps[j].compare(pageArray, prevTuple.getFieldStart(j), prevTuple.getFieldLength(j),
-                                    pageArray, tuple.getFieldStart(j), tuple.getFieldLength(j)) == 0)
-                                prefixFieldsMatch++;
-                            else
-                                break;
-                        }
-
-                        // the two tuples must match in exactly the number of fields we decided 
-                        // to compress for this keyPartition
-                        int processSegments = 0;
-                        if (prefixFieldsMatch == fieldCountToCompress)
-                            tuplesInSegment++;
-                        else
-                            processSegments++;
-
-                        if (i == keyPartitions.get(kpIndex).lastTupleIndex)
-                            processSegments++;
-
-                        for (int r = 0; r < processSegments; r++) {
-                            // compress current segment and then start new segment
-                            if (tuplesInSegment < occurrenceThreshold || fieldCountToCompress <= 0) {
-                                // segment does not have at least occurrenceThreshold tuples, so 
-                                // write tuples uncompressed
-                                for (int j = 0; j < tuplesInSegment; j++) {
-                                    int slotNum = segmentStart + j;
-                                    tupleToWrite.resetByTupleIndex(frame, slotNum);
-                                    newTupleSlots[tupleCount - 1 - slotNum] = slotManager.encodeSlotFields(
-                                            FieldPrefixSlotManager.TUPLE_UNCOMPRESSED, tupleFreeSpace);
-                                    tupleFreeSpace += tupleWriter.writeTuple(tupleToWrite, byteBuffer, tupleFreeSpace);
-                                }
-                                uncompressedTupleCount += tuplesInSegment;
-                            } else {
-                                // segment has enough tuples: compress segment, extract prefix, 
-                                // write prefix tuple to buffer, and set prefix slot
-                                newPrefixSlots[newPrefixSlots.length - 1 - prefixTupleIndex] = slotManager
-                                        .encodeSlotFields(fieldCountToCompress, prefixFreeSpace);
-                                prefixFreeSpace += tupleWriter.writeTupleFields(prevTuple, 0, fieldCountToCompress,
-                                        byteBuffer.array(), prefixFreeSpace);
-
-                                // truncate tuples, write them to buffer, and set tuple slots
-                                for (int j = 0; j < tuplesInSegment; j++) {
-                                    int currTupleIndex = segmentStart + j;
-                                    tupleToWrite.resetByTupleIndex(frame, currTupleIndex);
-                                    newTupleSlots[tupleCount - 1 - currTupleIndex] = slotManager.encodeSlotFields(
-                                            prefixTupleIndex, tupleFreeSpace);
-                                    tupleFreeSpace += tupleWriter.writeTupleFields(tupleToWrite, fieldCountToCompress,
-                                            fieldCount - fieldCountToCompress, byteBuffer.array(), tupleFreeSpace);
-                                }
-
-                                prefixTupleIndex++;
-                            }
-
-                            // begin new segment
-                            segmentStart = i;
-                            tuplesInSegment = 1;
-                        }
-                    }
-
-                    tupleIndex = keyPartitions.get(kpIndex).lastTupleIndex;
-                    kpIndex++;
-                } else {
-                    // just write the tuple uncompressed
-                    tupleToWrite.resetByTupleIndex(frame, tupleIndex);
-                    newTupleSlots[tupleCount - 1 - tupleIndex] = slotManager.encodeSlotFields(
-                            FieldPrefixSlotManager.TUPLE_UNCOMPRESSED, tupleFreeSpace);
-                    tupleFreeSpace += tupleWriter.writeTuple(tupleToWrite, byteBuffer, tupleFreeSpace);
-                    uncompressedTupleCount++;
-                }
-            } else {
-                // just write the tuple uncompressed
-                tupleToWrite.resetByTupleIndex(frame, tupleIndex);
-                newTupleSlots[tupleCount - 1 - tupleIndex] = slotManager.encodeSlotFields(
-                        FieldPrefixSlotManager.TUPLE_UNCOMPRESSED, tupleFreeSpace);
-                tupleFreeSpace += tupleWriter.writeTuple(tupleToWrite, byteBuffer, tupleFreeSpace);
-                uncompressedTupleCount++;
-            }
-            tupleIndex++;
-        }
-
-        // sanity check to see if we have written exactly as many prefix bytes as computed before
-        if (prefixFreeSpace != frame.getOrigFreeSpaceOff() + totalPrefixBytes) {
-            throw new Exception("ERROR: Number of prefix bytes written don't match computed number");
-        }
-
-        // in some rare instances our procedure could even increase the space requirement which is very dangerous
-        // this can happen to to the greedy solution of the knapsack-like problem
-        // therefore, we check if the new space exceeds the page size to avoid the only danger of 
-        // an increasing space
-        int totalSpace = tupleFreeSpace + newTupleSlots.length * slotManager.getSlotSize() + newPrefixSlots.length
-                * slotManager.getSlotSize();
-        if (totalSpace > buf.capacity())
-            // just leave the page as is
-            return false;
-
-        // copy new tuple and new slots into original page
-        int freeSpaceAfterInit = frame.getOrigFreeSpaceOff();
-        System.arraycopy(buffer, freeSpaceAfterInit, pageArray, freeSpaceAfterInit, tupleFreeSpace - freeSpaceAfterInit);
-
-        // copy prefix slots
-        int slotOffRunner = buf.capacity() - slotManager.getSlotSize();
-        for (int i = 0; i < newPrefixSlots.length; i++) {
-            buf.putInt(slotOffRunner, newPrefixSlots[newPrefixSlots.length - 1 - i]);
-            slotOffRunner -= slotManager.getSlotSize();
-        }
-
-        // copy tuple slots
-        for (int i = 0; i < newTupleSlots.length; i++) {
-            buf.putInt(slotOffRunner, newTupleSlots[newTupleSlots.length - 1 - i]);
-            slotOffRunner -= slotManager.getSlotSize();
-        }
-
-        // update space fields, TODO: we need to update more fields
-        frame.setFreeSpaceOff(tupleFreeSpace);
-        frame.setPrefixTupleCount(newPrefixSlots.length);
-        frame.setUncompressedTupleCount(uncompressedTupleCount);
-        int totalFreeSpace = buf.capacity() - tupleFreeSpace
-                - ((newTupleSlots.length + newPrefixSlots.length) * slotManager.getSlotSize());
-        frame.setTotalFreeSpace(totalFreeSpace);
-
-        return true;
-    }
-
-    // we perform an analysis pass over the tuples to determine the costs and
-    // benefits of different compression options
-    // a "keypartition" is a range of tuples that has an identical first field
-    // for each keypartition we chose a prefix length to use for compression
-    // i.e., all tuples in a keypartition will be compressed based on the same
-    // prefix length (number of fields)
-    // the prefix length may be different for different keypartitions
-    // the occurrenceThreshold determines the minimum number of tuples that must
-    // share a common prefix in order for us to consider compressing them
-    private ArrayList<KeyPartition> getKeyPartitions(BTreeFieldPrefixNSMLeafFrame frame, MultiComparator cmp,
-            int occurrenceThreshold) {
-        IBinaryComparator[] cmps = cmp.getComparators();
-        int fieldCount = typeTraits.length;
-
-        int maxCmps = cmps.length - 1;
-        ByteBuffer buf = frame.getBuffer();
-        byte[] pageArray = buf.array();
-        IPrefixSlotManager slotManager = (IPrefixSlotManager) frame.getSlotManager();
-
-        ArrayList<KeyPartition> keyPartitions = new ArrayList<KeyPartition>();
-        KeyPartition kp = new KeyPartition(maxCmps);
-        keyPartitions.add(kp);
-
-        TypeAwareTupleWriter tupleWriter = new TypeAwareTupleWriter(typeTraits);
-
-        FieldPrefixTupleReference prevTuple = new FieldPrefixTupleReference(tupleWriter.createTupleReference());
-        prevTuple.setFieldCount(fieldCount);
-
-        FieldPrefixTupleReference tuple = new FieldPrefixTupleReference(tupleWriter.createTupleReference());
-        tuple.setFieldCount(fieldCount);
-
-        kp.firstTupleIndex = 0;
-        int tupleCount = frame.getTupleCount();
-        for (int i = 1; i < tupleCount; i++) {
-            prevTuple.resetByTupleIndex(frame, i - 1);
-            tuple.resetByTupleIndex(frame, i);
-
-            int prefixFieldsMatch = 0;
-            for (int j = 0; j < maxCmps; j++) {
-
-                if (cmps[j].compare(pageArray, prevTuple.getFieldStart(j), prevTuple.getFieldLength(j), pageArray,
-                        tuple.getFieldStart(j), prevTuple.getFieldLength(j)) == 0) {
-                    prefixFieldsMatch++;
-                    kp.pmi[j].matches++;
-
-                    int prefixBytes = tupleWriter.bytesRequired(tuple, 0, prefixFieldsMatch);
-                    int spaceBenefit = tupleWriter.bytesRequired(tuple)
-                            - tupleWriter.bytesRequired(tuple, prefixFieldsMatch, tuple.getFieldCount()
-                                    - prefixFieldsMatch);
-
-                    if (kp.pmi[j].matches == occurrenceThreshold) {
-                        // if we compress this prefix, we pay the cost of storing it once, plus 
-                        // the size for one prefix slot
-                        kp.pmi[j].prefixBytes += prefixBytes;
-                        kp.pmi[j].spaceCost += prefixBytes + slotManager.getSlotSize();
-                        kp.pmi[j].prefixSlotsNeeded++;
-                        kp.pmi[j].spaceBenefit += occurrenceThreshold * spaceBenefit;
-                    } else if (kp.pmi[j].matches > occurrenceThreshold) {
-                        // we are beyond the occurrence threshold, every additional tuple with a 
-                        // matching prefix increases the benefit
-                        kp.pmi[j].spaceBenefit += spaceBenefit;
-                    }
-                } else {
-                    kp.pmi[j].matches = 1;
-                    break;
-                }
-            }
-
-            // this means not even the first field matched, so we start to consider a new "key partition"
-            if (maxCmps > 0 && prefixFieldsMatch == 0) {
-                kp.lastTupleIndex = i - 1;
-
-                // remove keyPartitions that don't have enough tuples
-                if ((kp.lastTupleIndex - kp.firstTupleIndex) + 1 < occurrenceThreshold)
-                    keyPartitions.remove(keyPartitions.size() - 1);
-
-                kp = new KeyPartition(maxCmps);
-                keyPartitions.add(kp);
-                kp.firstTupleIndex = i;
-            }
-        }
-        kp.lastTupleIndex = tupleCount - 1;
-        // remove keyPartitions that don't have enough tuples
-        if ((kp.lastTupleIndex - kp.firstTupleIndex) + 1 < occurrenceThreshold)
-            keyPartitions.remove(keyPartitions.size() - 1);
-
-        return keyPartitions;
-    }
-
-    private class PrefixMatchInfo {
-        public int matches = 1;
-        public int spaceCost = 0;
-        public int spaceBenefit = 0;
-        public int prefixSlotsNeeded = 0;
-        public int prefixBytes = 0;
-    }
-
-    private class KeyPartition {
-        public int firstTupleIndex;
-        public int lastTupleIndex;
-        public PrefixMatchInfo[] pmi;
-
-        public int maxBenefitMinusCost = 0;
-        public int maxPmiIndex = -1;
-
-        // number of fields used for compression for this kp of current page
-        public KeyPartition(int numKeyFields) {
-            pmi = new PrefixMatchInfo[numKeyFields];
-            for (int i = 0; i < numKeyFields; i++) {
-                pmi[i] = new PrefixMatchInfo();
-            }
-        }
-    }
-
-    private class SortByHeuristic implements Comparator<KeyPartition> {
-        @Override
-        public int compare(KeyPartition a, KeyPartition b) {
-            if (a.maxPmiIndex < 0) {
-                if (b.maxPmiIndex < 0)
-                    return 0;
-                return 1;
-            } else if (b.maxPmiIndex < 0)
-                return -1;
-
-            // non-negative maxPmiIndex, meaning a non-zero benefit exists
-            float thisHeuristicVal = (float) a.maxBenefitMinusCost / (float) a.pmi[a.maxPmiIndex].prefixSlotsNeeded;
-            float otherHeuristicVal = (float) b.maxBenefitMinusCost / (float) b.pmi[b.maxPmiIndex].prefixSlotsNeeded;
-            if (thisHeuristicVal < otherHeuristicVal)
-                return 1;
-            else if (thisHeuristicVal > otherHeuristicVal)
-                return -1;
-            else
-                return 0;
-        }
-    }
-
-    private class SortByOriginalRank implements Comparator<KeyPartition> {
-        @Override
-        public int compare(KeyPartition a, KeyPartition b) {
-            return a.firstTupleIndex - b.firstTupleIndex;
-        }
-    }
-}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeDataflowHelper.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeDataflowHelper.java
deleted file mode 100644
index 982f0ed..0000000
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeDataflowHelper.java
+++ /dev/null
@@ -1,44 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.btree.dataflow;
-
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.storage.am.btree.exceptions.BTreeException;
-import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
-import edu.uci.ics.hyracks.storage.am.btree.util.BTreeUtils;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.AbstractTreeIndexOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexDataflowHelper;
-
-public class BTreeDataflowHelper extends TreeIndexDataflowHelper {
-    public BTreeDataflowHelper(IIndexOperatorDescriptor opDesc, IHyracksTaskContext ctx, int partition) {
-        super(opDesc, ctx, partition);
-    }
-
-    @Override
-    public ITreeIndex createIndexInstance() throws HyracksDataException {
-        AbstractTreeIndexOperatorDescriptor treeOpDesc = (AbstractTreeIndexOperatorDescriptor) opDesc;
-        try {
-            return BTreeUtils.createBTree(opDesc.getStorageManager().getBufferCache(ctx), opDesc.getStorageManager()
-                    .getFileMapProvider(ctx), treeOpDesc.getTreeIndexTypeTraits(), treeOpDesc
-                    .getTreeIndexComparatorFactories(), BTreeLeafFrameType.REGULAR_NSM, file);
-        } catch (BTreeException e) {
-            throw new HyracksDataException(e);
-        }
-    }
-}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeSearchOperatorDescriptor.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeSearchOperatorDescriptor.java
deleted file mode 100644
index c56308b..0000000
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeSearchOperatorDescriptor.java
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.btree.dataflow;
-
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.job.IOperatorDescriptorRegistry;
-import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexLifecycleManagerProvider;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchOperationCallbackFactory;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.AbstractTreeIndexOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
-import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
-import edu.uci.ics.hyracks.storage.common.file.NoOpLocalResourceFactoryProvider;
-
-public class BTreeSearchOperatorDescriptor extends AbstractTreeIndexOperatorDescriptor {
-
-    private static final long serialVersionUID = 1L;
-
-    protected final int[] lowKeyFields;
-    protected final int[] highKeyFields;
-    protected final boolean lowKeyInclusive;
-    protected final boolean highKeyInclusive;
-
-    public BTreeSearchOperatorDescriptor(IOperatorDescriptorRegistry spec, RecordDescriptor recDesc,
-            IStorageManagerInterface storageManager, IIndexLifecycleManagerProvider lifecycleManagerProvider,
-            IFileSplitProvider fileSplitProvider, ITypeTraits[] typeTraits,
-            IBinaryComparatorFactory[] comparatorFactories, int[] bloomFilterKeyFields, int[] lowKeyFields,
-            int[] highKeyFields, boolean lowKeyInclusive, boolean highKeyInclusive,
-            IIndexDataflowHelperFactory dataflowHelperFactory, boolean retainInput,
-            ISearchOperationCallbackFactory searchOpCallbackProvider) {
-        super(spec, 1, 1, recDesc, storageManager, lifecycleManagerProvider, fileSplitProvider, typeTraits,
-                comparatorFactories, bloomFilterKeyFields, dataflowHelperFactory, null, retainInput,
-                NoOpLocalResourceFactoryProvider.INSTANCE, searchOpCallbackProvider,
-                NoOpOperationCallbackFactory.INSTANCE);
-        this.lowKeyFields = lowKeyFields;
-        this.highKeyFields = highKeyFields;
-        this.lowKeyInclusive = lowKeyInclusive;
-        this.highKeyInclusive = highKeyInclusive;
-    }
-
-    @Override
-    public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx,
-            IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) {
-        return new BTreeSearchOperatorNodePushable(this, ctx, partition, recordDescProvider, lowKeyFields,
-                highKeyFields, lowKeyInclusive, highKeyInclusive);
-    }
-}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeSearchOperatorNodePushable.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeSearchOperatorNodePushable.java
deleted file mode 100644
index dc5d161..0000000
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeSearchOperatorNodePushable.java
+++ /dev/null
@@ -1,70 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.storage.am.btree.dataflow;
-
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
-import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
-import edu.uci.ics.hyracks.storage.am.btree.util.BTreeUtils;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchPredicate;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.AbstractTreeIndexOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IndexSearchOperatorNodePushable;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-import edu.uci.ics.hyracks.storage.am.common.tuples.PermutingFrameTupleReference;
-
-public class BTreeSearchOperatorNodePushable extends IndexSearchOperatorNodePushable {
-    protected final boolean lowKeyInclusive;
-    protected final boolean highKeyInclusive;
-
-    protected PermutingFrameTupleReference lowKey;
-    protected PermutingFrameTupleReference highKey;
-    protected MultiComparator lowKeySearchCmp;
-    protected MultiComparator highKeySearchCmp;
-
-    public BTreeSearchOperatorNodePushable(AbstractTreeIndexOperatorDescriptor opDesc, IHyracksTaskContext ctx,
-            int partition, IRecordDescriptorProvider recordDescProvider, int[] lowKeyFields, int[] highKeyFields,
-            boolean lowKeyInclusive, boolean highKeyInclusive) {
-        super(opDesc, ctx, partition, recordDescProvider);
-        this.lowKeyInclusive = lowKeyInclusive;
-        this.highKeyInclusive = highKeyInclusive;
-        if (lowKeyFields != null && lowKeyFields.length > 0) {
-            lowKey = new PermutingFrameTupleReference();
-            lowKey.setFieldPermutation(lowKeyFields);
-        }
-        if (highKeyFields != null && highKeyFields.length > 0) {
-            highKey = new PermutingFrameTupleReference();
-            highKey.setFieldPermutation(highKeyFields);
-        }
-    }
-
-    @Override
-    protected void resetSearchPredicate(int tupleIndex) {
-        if (lowKey != null) {
-            lowKey.reset(accessor, tupleIndex);
-        }
-        if (highKey != null) {
-            highKey.reset(accessor, tupleIndex);
-        }
-    }
-
-    @Override
-    protected ISearchPredicate createSearchPredicate() {
-        ITreeIndex treeIndex = (ITreeIndex) index;
-        lowKeySearchCmp = BTreeUtils.getSearchMultiComparator(treeIndex.getComparatorFactories(), lowKey);
-        highKeySearchCmp = BTreeUtils.getSearchMultiComparator(treeIndex.getComparatorFactories(), highKey);
-        return new RangePredicate(lowKey, highKey, lowKeyInclusive, highKeyInclusive, lowKeySearchCmp, highKeySearchCmp);
-    }
-}
\ No newline at end of file
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeUpdateSearchOperatorDescriptor.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeUpdateSearchOperatorDescriptor.java
deleted file mode 100644
index f13ecae..0000000
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeUpdateSearchOperatorDescriptor.java
+++ /dev/null
@@ -1,57 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.btree.dataflow;
-
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.job.IOperatorDescriptorRegistry;
-import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexLifecycleManagerProvider;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchOperationCallbackFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.ITupleUpdaterFactory;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
-
-public class BTreeUpdateSearchOperatorDescriptor extends BTreeSearchOperatorDescriptor {
-
-    private static final long serialVersionUID = 1L;
-
-    private final ITupleUpdaterFactory tupleUpdaterFactory;
-
-    public BTreeUpdateSearchOperatorDescriptor(IOperatorDescriptorRegistry spec, RecordDescriptor recDesc,
-            IStorageManagerInterface storageManager, IIndexLifecycleManagerProvider lifecycleManagerProvider,
-            IFileSplitProvider fileSplitProvider, ITypeTraits[] typeTraits,
-            IBinaryComparatorFactory[] comparatorFactories, int[] bloomFilterKeyFields, int[] lowKeyFields,
-            int[] highKeyFields, boolean lowKeyInclusive, boolean highKeyInclusive,
-            IIndexDataflowHelperFactory dataflowHelperFactory, boolean retainInput,
-            ISearchOperationCallbackFactory searchOpCallbackProvider, ITupleUpdaterFactory tupleUpdaterFactory) {
-        super(spec, recDesc, storageManager, lifecycleManagerProvider, fileSplitProvider, typeTraits,
-                comparatorFactories, bloomFilterKeyFields, lowKeyFields, highKeyFields, lowKeyInclusive,
-                highKeyInclusive, dataflowHelperFactory, retainInput, searchOpCallbackProvider);
-        this.tupleUpdaterFactory = tupleUpdaterFactory;
-    }
-
-    @Override
-    public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx,
-            IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) {
-        return new BTreeUpdateSearchOperatorNodePushable(this, ctx, partition, recordDescProvider, lowKeyFields,
-                highKeyFields, lowKeyInclusive, highKeyInclusive, tupleUpdaterFactory.createTupleUpdater());
-    }
-}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeUpdateSearchOperatorNodePushable.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeUpdateSearchOperatorNodePushable.java
deleted file mode 100644
index 648e523..0000000
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeUpdateSearchOperatorNodePushable.java
+++ /dev/null
@@ -1,75 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.btree.dataflow;
-
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
-import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeRangeSearchCursor;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
-import edu.uci.ics.hyracks.storage.am.common.api.ITupleUpdater;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.AbstractTreeIndexOperatorDescriptor;
-
-public class BTreeUpdateSearchOperatorNodePushable extends BTreeSearchOperatorNodePushable {
-    private final ITupleUpdater tupleUpdater;
-
-    public BTreeUpdateSearchOperatorNodePushable(AbstractTreeIndexOperatorDescriptor opDesc, IHyracksTaskContext ctx,
-            int partition, IRecordDescriptorProvider recordDescProvider, int[] lowKeyFields, int[] highKeyFields,
-            boolean lowKeyInclusive, boolean highKeyInclusive, ITupleUpdater tupleUpdater) {
-        super(opDesc, ctx, partition, recordDescProvider, lowKeyFields, highKeyFields, lowKeyInclusive,
-                highKeyInclusive);
-        this.tupleUpdater = tupleUpdater;
-    }
-
-    @Override
-    protected ITreeIndexCursor createCursor() {
-        ITreeIndex treeIndex = (ITreeIndex) index;
-        ITreeIndexFrame cursorFrame = treeIndex.getLeafFrameFactory().createFrame();
-        return new BTreeRangeSearchCursor((IBTreeLeafFrame) cursorFrame, true);
-    }
-
-    @Override
-    protected void writeSearchResults(int tupleIndex) throws Exception {
-        while (cursor.hasNext()) {
-            tb.reset();
-            cursor.next();
-            if (retainInput) {
-                frameTuple.reset(accessor, tupleIndex);
-                for (int i = 0; i < frameTuple.getFieldCount(); i++) {
-                    dos.write(frameTuple.getFieldData(i), frameTuple.getFieldStart(i), frameTuple.getFieldLength(i));
-                    tb.addFieldEndOffset();
-                }
-            }
-            ITupleReference tuple = cursor.getTuple();
-            tupleUpdater.updateTuple(tuple);
-            for (int i = 0; i < tuple.getFieldCount(); i++) {
-                dos.write(tuple.getFieldData(i), tuple.getFieldStart(i), tuple.getFieldLength(i));
-                tb.addFieldEndOffset();
-            }
-            if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
-                FrameUtils.flushFrame(writeBuffer, writer);
-                appender.reset(writeBuffer, true);
-                if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
-                    throw new IllegalStateException();
-                }
-            }
-        }
-    }
-}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/exceptions/BTreeDuplicateKeyException.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/exceptions/BTreeDuplicateKeyException.java
deleted file mode 100644
index 5a1d610..0000000
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/exceptions/BTreeDuplicateKeyException.java
+++ /dev/null
@@ -1,28 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.btree.exceptions;
-
-public class BTreeDuplicateKeyException extends BTreeException {
-    private static final long serialVersionUID = 1L;
-
-    public BTreeDuplicateKeyException(Exception e) {
-        super(e);
-    }
-
-    public BTreeDuplicateKeyException(String message) {
-        super(message);
-    }
-}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/exceptions/BTreeNonExistentKeyException.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/exceptions/BTreeNonExistentKeyException.java
deleted file mode 100644
index 989f118..0000000
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/exceptions/BTreeNonExistentKeyException.java
+++ /dev/null
@@ -1,29 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.btree.exceptions;
-
-public class BTreeNonExistentKeyException extends BTreeException {
-
-    private static final long serialVersionUID = 1L;
-
-    public BTreeNonExistentKeyException(Exception e) {
-        super(e);
-    }
-
-    public BTreeNonExistentKeyException(String message) {
-        super(message);
-    }
-}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/exceptions/BTreeNotUpdateableException.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/exceptions/BTreeNotUpdateableException.java
deleted file mode 100644
index 7e83c69..0000000
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/exceptions/BTreeNotUpdateableException.java
+++ /dev/null
@@ -1,28 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.btree.exceptions;
-
-public class BTreeNotUpdateableException extends BTreeException {
-    private static final long serialVersionUID = 1L;
-
-    public BTreeNotUpdateableException(Exception e) {
-        super(e);
-    }
-
-    public BTreeNotUpdateableException(String message) {
-        super(message);
-    }
-}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeFieldPrefixNSMLeafFrame.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeFieldPrefixNSMLeafFrame.java
deleted file mode 100644
index 354aa1e..0000000
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeFieldPrefixNSMLeafFrame.java
+++ /dev/null
@@ -1,745 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.btree.frames;
-
-import java.nio.ByteBuffer;
-import java.util.ArrayList;
-import java.util.Collections;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
-import edu.uci.ics.hyracks.storage.am.btree.api.IPrefixSlotManager;
-import edu.uci.ics.hyracks.storage.am.btree.compressors.FieldPrefixCompressor;
-import edu.uci.ics.hyracks.storage.am.btree.exceptions.BTreeDuplicateKeyException;
-import edu.uci.ics.hyracks.storage.am.btree.exceptions.BTreeNonExistentKeyException;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeOpContext.PageValidationInfo;
-import edu.uci.ics.hyracks.storage.am.btree.impls.FieldPrefixPrefixTupleReference;
-import edu.uci.ics.hyracks.storage.am.btree.impls.FieldPrefixSlotManager;
-import edu.uci.ics.hyracks.storage.am.btree.impls.FieldPrefixTupleReference;
-import edu.uci.ics.hyracks.storage.am.common.api.ISplitKey;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameCompressor;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriter;
-import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
-import edu.uci.ics.hyracks.storage.am.common.frames.FrameOpSpaceStatus;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.FindTupleMode;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.FindTupleNoExactMatchPolicy;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.SlotOffTupleOff;
-import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriter;
-import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
-
-/**
- * WARNING: only works when tupleWriter is an instance of TypeAwareTupleWriter
- */
-public class BTreeFieldPrefixNSMLeafFrame implements IBTreeLeafFrame {
-
-    protected static final int pageLsnOff = 0; // 0
-    protected static final int tupleCountOff = pageLsnOff + 8; // 8
-    protected static final int freeSpaceOff = tupleCountOff + 4; // 12
-    protected static final int totalFreeSpaceOff = freeSpaceOff + 4; // 16
-    protected static final int levelOff = totalFreeSpaceOff + 4; // 20
-    protected static final int smFlagOff = levelOff + 1; // 21
-    protected static final int uncompressedTupleCountOff = smFlagOff + 1; // 22
-    protected static final int prefixTupleCountOff = uncompressedTupleCountOff + 4; // 26
-    protected static final int nextLeafOff = prefixTupleCountOff + 4; // 30
-
-    private final IPrefixSlotManager slotManager;
-    private final ITreeIndexFrameCompressor compressor;
-    private final FieldPrefixTupleReference frameTuple;
-    private final FieldPrefixPrefixTupleReference framePrefixTuple;
-    private final ITreeIndexTupleWriter tupleWriter;
-
-    private MultiComparator cmp;
-
-    protected ICachedPage page = null;
-    protected ByteBuffer buf = null;
-
-    public BTreeFieldPrefixNSMLeafFrame(ITreeIndexTupleWriter tupleWriter) {
-        this.tupleWriter = tupleWriter;
-        this.frameTuple = new FieldPrefixTupleReference(tupleWriter.createTupleReference());
-        this.slotManager = new FieldPrefixSlotManager();
-
-        ITypeTraits[] typeTraits = ((TypeAwareTupleWriter) tupleWriter).getTypeTraits();
-        this.framePrefixTuple = new FieldPrefixPrefixTupleReference(typeTraits);
-        this.compressor = new FieldPrefixCompressor(typeTraits, 0.001f, 2);
-    }
-
-    @Override
-    public void setPage(ICachedPage page) {
-        this.page = page;
-        this.buf = page.getBuffer();
-        slotManager.setFrame(this);
-    }
-
-    @Override
-    public ByteBuffer getBuffer() {
-        return page.getBuffer();
-    }
-
-    @Override
-    public ICachedPage getPage() {
-        return page;
-    }
-
-    @Override
-    public boolean compress() throws HyracksDataException {
-        try {
-            return compressor.compress(this, cmp);
-        } catch (Exception e) {
-            throw new HyracksDataException(e);
-        }
-    }
-
-    // Assumptions:
-    // 1) prefix tuples are stored contiguously
-    // 2) prefix tuples are located before tuples (physically on the page)
-    // 3) prefix tuples are sorted (last prefix tuple is at highest offset)
-    // This procedure will not move prefix tuples.
-    @Override
-    public boolean compact() {
-        resetSpaceParams();
-
-        int tupleCount = buf.getInt(tupleCountOff);
-
-        // determine start of target free space (depends on assumptions stated above)
-        int freeSpace = buf.getInt(freeSpaceOff);
-        int prefixTupleCount = buf.getInt(prefixTupleCountOff);
-        if (prefixTupleCount > 0) {
-
-            // debug
-            int max = 0;
-            for (int i = 0; i < prefixTupleCount; i++) {
-                framePrefixTuple.resetByTupleIndex(this, i);
-                int end = framePrefixTuple.getFieldStart(framePrefixTuple.getFieldCount() - 1)
-                        + framePrefixTuple.getFieldLength(framePrefixTuple.getFieldCount() - 1);
-                if (end > max)
-                    max = end;
-            }
-
-            framePrefixTuple.resetByTupleIndex(this, prefixTupleCount - 1);
-            freeSpace = framePrefixTuple.getFieldStart(framePrefixTuple.getFieldCount() - 1)
-                    + framePrefixTuple.getFieldLength(framePrefixTuple.getFieldCount() - 1);
-        }
-
-        ArrayList<SlotOffTupleOff> sortedTupleOffs = new ArrayList<SlotOffTupleOff>();
-        sortedTupleOffs.ensureCapacity(tupleCount);
-        for (int i = 0; i < tupleCount; i++) {
-            int tupleSlotOff = slotManager.getTupleSlotOff(i);
-            int tupleSlot = buf.getInt(tupleSlotOff);
-            int tupleOff = slotManager.decodeSecondSlotField(tupleSlot);
-            sortedTupleOffs.add(new SlotOffTupleOff(i, tupleSlotOff, tupleOff));
-
-        }
-        Collections.sort(sortedTupleOffs);
-
-        for (int i = 0; i < sortedTupleOffs.size(); i++) {
-            int tupleOff = sortedTupleOffs.get(i).tupleOff;
-            int tupleSlot = buf.getInt(sortedTupleOffs.get(i).slotOff);
-            int prefixSlotNum = slotManager.decodeFirstSlotField(tupleSlot);
-
-            frameTuple.resetByTupleIndex(this, sortedTupleOffs.get(i).tupleIndex);
-            int tupleEndOff = frameTuple.getFieldStart(frameTuple.getFieldCount() - 1)
-                    + frameTuple.getFieldLength(frameTuple.getFieldCount() - 1);
-            int tupleLength = tupleEndOff - tupleOff;
-            System.arraycopy(buf.array(), tupleOff, buf.array(), freeSpace, tupleLength);
-
-            slotManager.setSlot(sortedTupleOffs.get(i).slotOff, slotManager.encodeSlotFields(prefixSlotNum, freeSpace));
-            freeSpace += tupleLength;
-        }
-
-        buf.putInt(freeSpaceOff, freeSpace);
-        int totalFreeSpace = buf.capacity() - buf.getInt(freeSpaceOff)
-                - ((buf.getInt(tupleCountOff) + buf.getInt(prefixTupleCountOff)) * slotManager.getSlotSize());
-        buf.putInt(totalFreeSpaceOff, totalFreeSpace);
-
-        return false;
-    }
-
-    @Override
-    public void delete(ITupleReference tuple, int slot) {
-        int tupleIndex = slotManager.decodeSecondSlotField(slot);
-        int prefixSlotNum = slotManager.decodeFirstSlotField(slot);
-        int tupleSlotOff = slotManager.getTupleSlotOff(tupleIndex);
-
-        // perform deletion (we just do a memcpy to overwrite the slot)
-        int slotEndOff = slotManager.getTupleSlotEndOff();
-        int length = tupleSlotOff - slotEndOff;
-        System.arraycopy(buf.array(), slotEndOff, buf.array(), slotEndOff + slotManager.getSlotSize(), length);
-
-        // maintain space information, get size of tuple suffix (suffix could be entire tuple)
-        int tupleSize = 0;
-        int suffixFieldStart = 0;
-        if (prefixSlotNum == FieldPrefixSlotManager.TUPLE_UNCOMPRESSED) {
-            suffixFieldStart = 0;
-            buf.putInt(uncompressedTupleCountOff, buf.getInt(uncompressedTupleCountOff) - 1);
-        } else {
-            int prefixSlot = buf.getInt(slotManager.getPrefixSlotOff(prefixSlotNum));
-            suffixFieldStart = slotManager.decodeFirstSlotField(prefixSlot);
-        }
-
-        frameTuple.resetByTupleIndex(this, tupleIndex);
-        tupleSize = tupleWriter.bytesRequired(frameTuple, suffixFieldStart, frameTuple.getFieldCount()
-                - suffixFieldStart);
-
-        buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) - 1);
-        buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) + tupleSize + slotManager.getSlotSize());
-    }
-
-    @Override
-    public FrameOpSpaceStatus hasSpaceInsert(ITupleReference tuple) {
-        int freeContiguous = buf.capacity() - buf.getInt(freeSpaceOff)
-                - ((buf.getInt(tupleCountOff) + buf.getInt(prefixTupleCountOff)) * slotManager.getSlotSize());
-
-        int bytesRequired = tupleWriter.bytesRequired(tuple);
-
-        // See if the tuple would fit uncompressed.
-        if (bytesRequired + slotManager.getSlotSize() <= freeContiguous)
-            return FrameOpSpaceStatus.SUFFICIENT_CONTIGUOUS_SPACE;
-
-        // See if tuple would fit into remaining space after compaction.
-        if (bytesRequired + slotManager.getSlotSize() <= buf.getInt(totalFreeSpaceOff))
-            return FrameOpSpaceStatus.SUFFICIENT_SPACE;
-
-        // See if the tuple matches a prefix and will fit after truncating the prefix.
-        int prefixSlotNum = slotManager.findPrefix(tuple, framePrefixTuple);
-        if (prefixSlotNum != FieldPrefixSlotManager.TUPLE_UNCOMPRESSED) {
-            int prefixSlotOff = slotManager.getPrefixSlotOff(prefixSlotNum);
-            int prefixSlot = buf.getInt(prefixSlotOff);
-            int numPrefixFields = slotManager.decodeFirstSlotField(prefixSlot);
-
-            int compressedSize = tupleWriter.bytesRequired(tuple, numPrefixFields, tuple.getFieldCount()
-                    - numPrefixFields);
-            if (compressedSize + slotManager.getSlotSize() <= freeContiguous)
-                return FrameOpSpaceStatus.SUFFICIENT_CONTIGUOUS_SPACE;
-        }
-
-        return FrameOpSpaceStatus.INSUFFICIENT_SPACE;
-    }
-
-    @Override
-    public void insert(ITupleReference tuple, int tupleIndex) {
-        int slot = slotManager.insertSlot(tupleIndex, buf.getInt(freeSpaceOff));
-        int prefixSlotNum = slotManager.decodeFirstSlotField(slot);
-        int numPrefixFields = 0;
-        if (prefixSlotNum != FieldPrefixSlotManager.TUPLE_UNCOMPRESSED) {
-            int prefixSlotOff = slotManager.getPrefixSlotOff(prefixSlotNum);
-            int prefixSlot = buf.getInt(prefixSlotOff);
-            numPrefixFields = slotManager.decodeFirstSlotField(prefixSlot);
-        } else {
-            buf.putInt(uncompressedTupleCountOff, buf.getInt(uncompressedTupleCountOff) + 1);
-        }
-
-        int freeSpace = buf.getInt(freeSpaceOff);
-        int bytesWritten = tupleWriter.writeTupleFields(tuple, numPrefixFields,
-                tuple.getFieldCount() - numPrefixFields, buf.array(), freeSpace);
-
-        buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) + 1);
-        buf.putInt(freeSpaceOff, buf.getInt(freeSpaceOff) + bytesWritten);
-        buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) - bytesWritten - slotManager.getSlotSize());
-    }
-
-    @Override
-    public FrameOpSpaceStatus hasSpaceUpdate(ITupleReference newTuple, int oldTupleIndex) {
-        int tupleIndex = slotManager.decodeSecondSlotField(oldTupleIndex);
-        frameTuple.resetByTupleIndex(this, tupleIndex);
-
-        int oldTupleBytes = 0;
-        int newTupleBytes = 0;
-
-        int numPrefixFields = frameTuple.getNumPrefixFields();
-        int fieldCount = frameTuple.getFieldCount();
-        if (numPrefixFields != 0) {
-            // Check the space requirements for updating the suffix of the original tuple.
-            oldTupleBytes = frameTuple.getSuffixTupleSize();
-            newTupleBytes = tupleWriter.bytesRequired(newTuple, numPrefixFields, fieldCount - numPrefixFields);
-        } else {
-            // The original tuple is uncompressed.
-            oldTupleBytes = frameTuple.getTupleSize();
-            newTupleBytes = tupleWriter.bytesRequired(newTuple);
-        }
-
-        int additionalBytesRequired = newTupleBytes - oldTupleBytes;
-        // Enough space for an in-place update?
-        if (additionalBytesRequired <= 0) {
-            return FrameOpSpaceStatus.SUFFICIENT_INPLACE_SPACE;
-        }
-
-        int freeContiguous = buf.capacity() - buf.getInt(freeSpaceOff)
-                - ((buf.getInt(tupleCountOff) + buf.getInt(prefixTupleCountOff)) * slotManager.getSlotSize());
-
-        // Enough space if we delete the old tuple and insert the new one without compaction?
-        if (newTupleBytes <= freeContiguous) {
-            return FrameOpSpaceStatus.SUFFICIENT_CONTIGUOUS_SPACE;
-        }
-        // Enough space if we delete the old tuple and compact?
-        if (additionalBytesRequired <= buf.getInt(totalFreeSpaceOff)) {
-            return FrameOpSpaceStatus.SUFFICIENT_SPACE;
-        }
-        return FrameOpSpaceStatus.INSUFFICIENT_SPACE;
-    }
-
-    @Override
-    public void update(ITupleReference newTuple, int oldTupleIndex, boolean inPlace) {
-        int tupleIndex = slotManager.decodeSecondSlotField(oldTupleIndex);
-        int tupleSlotOff = slotManager.getTupleSlotOff(tupleIndex);
-        int tupleSlot = buf.getInt(tupleSlotOff);
-        int prefixSlotNum = slotManager.decodeFirstSlotField(tupleSlot);
-        int suffixTupleStartOff = slotManager.decodeSecondSlotField(tupleSlot);
-
-        frameTuple.resetByTupleIndex(this, tupleIndex);
-        int fieldCount = frameTuple.getFieldCount();
-        int numPrefixFields = frameTuple.getNumPrefixFields();
-        int oldTupleBytes = frameTuple.getSuffixTupleSize();
-        int bytesWritten = 0;
-
-        if (inPlace) {
-            // Overwrite the old tuple suffix in place.
-            bytesWritten = tupleWriter.writeTupleFields(newTuple, numPrefixFields, fieldCount - numPrefixFields,
-                    buf.array(), suffixTupleStartOff);
-        } else {
-            // Insert the new tuple suffix at the end of the free space, and change
-            // the slot value (effectively "deleting" the old tuple).
-            int newSuffixTupleStartOff = buf.getInt(freeSpaceOff);
-            bytesWritten = tupleWriter.writeTupleFields(newTuple, numPrefixFields, fieldCount - numPrefixFields,
-                    buf.array(), newSuffixTupleStartOff);
-            // Update slot value using the same prefix slot num.
-            slotManager.setSlot(tupleSlotOff, slotManager.encodeSlotFields(prefixSlotNum, newSuffixTupleStartOff));
-            // Update contiguous free space pointer.
-            buf.putInt(freeSpaceOff, newSuffixTupleStartOff + bytesWritten);
-        }
-        buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) + oldTupleBytes - bytesWritten);
-    }
-
-    protected void resetSpaceParams() {
-        buf.putInt(freeSpaceOff, getOrigFreeSpaceOff());
-        buf.putInt(totalFreeSpaceOff, getOrigTotalFreeSpace());
-    }
-
-    @Override
-    public void initBuffer(byte level) {
-        buf.putLong(pageLsnOff, 0);
-        // during creation
-        buf.putInt(tupleCountOff, 0);
-        resetSpaceParams();
-        buf.putInt(uncompressedTupleCountOff, 0);
-        buf.putInt(prefixTupleCountOff, 0);
-        buf.put(levelOff, level);
-        buf.put(smFlagOff, (byte) 0);
-        buf.putInt(nextLeafOff, -1);
-    }
-
-    public void setTotalFreeSpace(int totalFreeSpace) {
-        buf.putInt(totalFreeSpaceOff, totalFreeSpace);
-    }
-
-    public int getOrigTotalFreeSpace() {
-        return buf.capacity() - (nextLeafOff + 4);
-    }
-
-    @Override
-    public int findInsertTupleIndex(ITupleReference tuple) throws TreeIndexException {
-        int slot = slotManager.findSlot(tuple, frameTuple, framePrefixTuple, cmp,
-                FindTupleMode.EXCLUSIVE_ERROR_IF_EXISTS, FindTupleNoExactMatchPolicy.HIGHER_KEY);
-        int tupleIndex = slotManager.decodeSecondSlotField(slot);
-        // Error indicator is set if there is an exact match.
-        if (tupleIndex == slotManager.getErrorIndicator()) {
-            throw new BTreeDuplicateKeyException("Trying to insert duplicate key into leaf node.");
-        }
-        return slot;
-    }
-
-    @Override
-    public int findUpsertTupleIndex(ITupleReference tuple) throws TreeIndexException {
-        int slot = slotManager.findSlot(tuple, frameTuple, framePrefixTuple, cmp, FindTupleMode.INCLUSIVE,
-                FindTupleNoExactMatchPolicy.HIGHER_KEY);
-        int tupleIndex = slotManager.decodeSecondSlotField(slot);
-        // Error indicator is set if there is an exact match.
-        if (tupleIndex == slotManager.getErrorIndicator()) {
-            throw new BTreeDuplicateKeyException("Trying to insert duplicate key into leaf node.");
-        }
-        return slot;
-    }
-
-    @Override
-    public ITupleReference getMatchingKeyTuple(ITupleReference searchTuple, int targetTupleIndex) {
-        int tupleIndex = slotManager.decodeSecondSlotField(targetTupleIndex);
-        // Examine the tuple index to determine whether it is valid or not.
-        if (tupleIndex != slotManager.getGreatestKeyIndicator()) {
-            // We need to check the key to determine whether it's an insert or an update.
-            frameTuple.resetByTupleIndex(this, tupleIndex);
-            if (cmp.compare(searchTuple, frameTuple) == 0) {
-                // The keys match, it's an update.
-                return frameTuple;
-            }
-        }
-        // Either the tuple index is a special indicator, or the keys don't match.
-        // In those cases, we are definitely dealing with an insert.
-        return null;
-    }
-
-    @Override
-    public int findUpdateTupleIndex(ITupleReference tuple) throws TreeIndexException {
-        int slot = slotManager.findSlot(tuple, frameTuple, framePrefixTuple, cmp, FindTupleMode.EXACT,
-                FindTupleNoExactMatchPolicy.HIGHER_KEY);
-        int tupleIndex = slotManager.decodeSecondSlotField(slot);
-        // Error indicator is set if there is no exact match.
-        if (tupleIndex == slotManager.getErrorIndicator()) {
-            throw new BTreeNonExistentKeyException("Trying to update a tuple with a nonexistent key in leaf node.");
-        }
-        return slot;
-    }
-
-    @Override
-    public int findDeleteTupleIndex(ITupleReference tuple) throws TreeIndexException {
-        int slot = slotManager.findSlot(tuple, frameTuple, framePrefixTuple, cmp, FindTupleMode.EXACT,
-                FindTupleNoExactMatchPolicy.HIGHER_KEY);
-        int tupleIndex = slotManager.decodeSecondSlotField(slot);
-        // Error indicator is set if there is no exact match.
-        if (tupleIndex == slotManager.getErrorIndicator()) {
-            throw new BTreeNonExistentKeyException("Trying to delete a tuple with a nonexistent key in leaf node.");
-        }
-        return slot;
-    }
-
-    @Override
-    public String printHeader() {
-        StringBuilder strBuilder = new StringBuilder();
-        strBuilder.append("pageLsnOff:                " + pageLsnOff + "\n");
-        strBuilder.append("tupleCountOff:             " + tupleCountOff + "\n");
-        strBuilder.append("freeSpaceOff:              " + freeSpaceOff + "\n");
-        strBuilder.append("totalFreeSpaceOff:         " + totalFreeSpaceOff + "\n");
-        strBuilder.append("levelOff:                  " + levelOff + "\n");
-        strBuilder.append("smFlagOff:                 " + smFlagOff + "\n");
-        strBuilder.append("uncompressedTupleCountOff: " + uncompressedTupleCountOff + "\n");
-        strBuilder.append("prefixTupleCountOff:       " + prefixTupleCountOff + "\n");
-        strBuilder.append("nextLeafOff:               " + nextLeafOff + "\n");
-        return strBuilder.toString();
-    }
-
-    @Override
-    public int getTupleCount() {
-        return buf.getInt(tupleCountOff);
-    }
-
-    public IPrefixSlotManager getSlotManager() {
-        return slotManager;
-    }
-
-    @Override
-    public int getTupleOffset(int slotNum) {
-        int tupleSlotOff = slotManager.getTupleSlotOff(slotNum);
-        int tupleSlot = buf.getInt(tupleSlotOff);
-        return slotManager.decodeSecondSlotField(tupleSlot);
-    }
-
-    @Override
-    public long getPageLsn() {
-        return buf.getLong(pageLsnOff);
-    }
-
-    @Override
-    public void setPageLsn(long pageLsn) {
-        buf.putLong(pageLsnOff, pageLsn);
-    }
-
-    @Override
-    public int getTotalFreeSpace() {
-        return buf.getInt(totalFreeSpaceOff);
-    }
-
-    @Override
-    public boolean isLeaf() {
-        return buf.get(levelOff) == 0;
-    }
-
-    @Override
-    public boolean isInterior() {
-        return buf.get(levelOff) > 0;
-    }
-
-    @Override
-    public byte getLevel() {
-        return buf.get(levelOff);
-    }
-
-    @Override
-    public void setLevel(byte level) {
-        buf.put(levelOff, level);
-    }
-
-    @Override
-    public boolean getSmFlag() {
-        return buf.get(smFlagOff) != 0;
-    }
-
-    @Override
-    public void setSmFlag(boolean smFlag) {
-        if (smFlag)
-            buf.put(smFlagOff, (byte) 1);
-        else
-            buf.put(smFlagOff, (byte) 0);
-    }
-
-    public int getPrefixTupleCount() {
-        return buf.getInt(prefixTupleCountOff);
-    }
-
-    public void setPrefixTupleCount(int prefixTupleCount) {
-        buf.putInt(prefixTupleCountOff, prefixTupleCount);
-    }
-
-    @Override
-    public void insertSorted(ITupleReference tuple) {
-        int freeSpace = buf.getInt(freeSpaceOff);
-        int fieldsToTruncate = 0;
-
-        // check if tuple matches last prefix tuple
-        if (buf.getInt(prefixTupleCountOff) > 0) {
-            framePrefixTuple.resetByTupleIndex(this, buf.getInt(prefixTupleCountOff) - 1);
-            if (cmp.fieldRangeCompare(tuple, framePrefixTuple, 0, framePrefixTuple.getFieldCount()) == 0) {
-                fieldsToTruncate = framePrefixTuple.getFieldCount();
-            }
-        }
-
-        int bytesWritten = tupleWriter.writeTupleFields(tuple, fieldsToTruncate, tuple.getFieldCount()
-                - fieldsToTruncate, buf.array(), freeSpace);
-
-        // insert slot
-        int prefixSlotNum = FieldPrefixSlotManager.TUPLE_UNCOMPRESSED;
-        if (fieldsToTruncate > 0)
-            prefixSlotNum = buf.getInt(prefixTupleCountOff) - 1;
-        else
-            buf.putInt(uncompressedTupleCountOff, buf.getInt(uncompressedTupleCountOff) + 1);
-        int insSlot = slotManager.encodeSlotFields(prefixSlotNum, FieldPrefixSlotManager.GREATEST_KEY_INDICATOR);
-        slotManager.insertSlot(insSlot, freeSpace);
-
-        // update page metadata
-        buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) + 1);
-        buf.putInt(freeSpaceOff, buf.getInt(freeSpaceOff) + bytesWritten);
-        buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) - bytesWritten - slotManager.getSlotSize());
-    }
-
-    @Override
-    public void split(ITreeIndexFrame rightFrame, ITupleReference tuple, ISplitKey splitKey) {
-
-        BTreeFieldPrefixNSMLeafFrame rf = (BTreeFieldPrefixNSMLeafFrame) rightFrame;
-
-        ByteBuffer right = rf.getBuffer();
-        int tupleCount = getTupleCount();
-        int prefixTupleCount = getPrefixTupleCount();
-
-        // Find split point, and determine into which frame the new tuple should
-        // be inserted into.
-        int tuplesToLeft;
-        int midSlotNum = tupleCount / 2;
-        ITreeIndexFrame targetFrame = null;
-        frameTuple.resetByTupleIndex(this, midSlotNum);
-        int comparison = cmp.compare(tuple, frameTuple);
-        if (comparison >= 0) {
-            tuplesToLeft = midSlotNum + (tupleCount % 2);
-            targetFrame = rf;
-        } else {
-            tuplesToLeft = midSlotNum;
-            targetFrame = this;
-        }
-        int tuplesToRight = tupleCount - tuplesToLeft;
-
-        // copy entire page
-        System.arraycopy(buf.array(), 0, right.array(), 0, buf.capacity());
-
-        // determine how many slots go on left and right page
-        int prefixesToLeft = prefixTupleCount;
-        for (int i = tuplesToLeft; i < tupleCount; i++) {
-            int tupleSlotOff = rf.slotManager.getTupleSlotOff(i);
-            int tupleSlot = right.getInt(tupleSlotOff);
-            int prefixSlotNum = rf.slotManager.decodeFirstSlotField(tupleSlot);
-            if (prefixSlotNum != FieldPrefixSlotManager.TUPLE_UNCOMPRESSED) {
-                prefixesToLeft = prefixSlotNum;
-                break;
-            }
-        }
-
-        // if we are splitting in the middle of a prefix both pages need to have the prefix slot and tuple
-        int boundaryTupleSlotOff = rf.slotManager.getTupleSlotOff(tuplesToLeft - 1);
-        int boundaryTupleSlot = buf.getInt(boundaryTupleSlotOff);
-        int boundaryPrefixSlotNum = rf.slotManager.decodeFirstSlotField(boundaryTupleSlot);
-        int prefixesToRight = prefixTupleCount - prefixesToLeft;
-        if (boundaryPrefixSlotNum == prefixesToLeft
-                && boundaryPrefixSlotNum != FieldPrefixSlotManager.TUPLE_UNCOMPRESSED) {
-            prefixesToLeft++; // tuples on both pages share one prefix
-        }
-
-        // move prefix tuples on right page to beginning of page and adjust prefix slots
-        if (prefixesToRight > 0 && prefixesToLeft > 0 && prefixTupleCount > 1) {
-
-            int freeSpace = rf.getOrigFreeSpaceOff();
-            int lastPrefixSlotNum = -1;
-
-            for (int i = tuplesToLeft; i < tupleCount; i++) {
-                int tupleSlotOff = rf.slotManager.getTupleSlotOff(i);
-                int tupleSlot = right.getInt(tupleSlotOff);
-                int prefixSlotNum = rf.slotManager.decodeFirstSlotField(tupleSlot);
-                if (prefixSlotNum != FieldPrefixSlotManager.TUPLE_UNCOMPRESSED) {
-                    framePrefixTuple.resetByTupleIndex(this, prefixSlotNum);
-
-                    int bytesWritten = 0;
-                    if (lastPrefixSlotNum != prefixSlotNum) {
-                        bytesWritten = tupleWriter.writeTuple(framePrefixTuple, right.array(), freeSpace);
-                        int newPrefixSlot = rf.slotManager
-                                .encodeSlotFields(framePrefixTuple.getFieldCount(), freeSpace);
-                        int prefixSlotOff = rf.slotManager.getPrefixSlotOff(prefixSlotNum);
-                        right.putInt(prefixSlotOff, newPrefixSlot);
-                        lastPrefixSlotNum = prefixSlotNum;
-                    }
-
-                    int tupleOff = rf.slotManager.decodeSecondSlotField(tupleSlot);
-                    int newTupleSlot = rf.slotManager.encodeSlotFields(prefixSlotNum
-                            - (prefixTupleCount - prefixesToRight), tupleOff);
-                    right.putInt(tupleSlotOff, newTupleSlot);
-                    freeSpace += bytesWritten;
-                }
-            }
-        }
-
-        // move the modified prefix slots on the right page
-        int prefixSrc = rf.slotManager.getPrefixSlotEndOff();
-        int prefixDest = rf.slotManager.getPrefixSlotEndOff() + (prefixTupleCount - prefixesToRight)
-                * rf.slotManager.getSlotSize();
-        int prefixLength = rf.slotManager.getSlotSize() * prefixesToRight;
-        System.arraycopy(right.array(), prefixSrc, right.array(), prefixDest, prefixLength);
-
-        // on right page we need to copy rightmost tuple slots to left
-        int src = rf.slotManager.getTupleSlotEndOff();
-        int dest = rf.slotManager.getTupleSlotEndOff() + tuplesToLeft * rf.slotManager.getSlotSize()
-                + (prefixTupleCount - prefixesToRight) * rf.slotManager.getSlotSize();
-        int length = rf.slotManager.getSlotSize() * tuplesToRight;
-        System.arraycopy(right.array(), src, right.array(), dest, length);
-
-        right.putInt(tupleCountOff, tuplesToRight);
-        right.putInt(prefixTupleCountOff, prefixesToRight);
-
-        // on left page move slots to reflect possibly removed prefixes
-        src = slotManager.getTupleSlotEndOff() + tuplesToRight * slotManager.getSlotSize();
-        dest = slotManager.getTupleSlotEndOff() + tuplesToRight * slotManager.getSlotSize()
-                + (prefixTupleCount - prefixesToLeft) * slotManager.getSlotSize();
-        length = slotManager.getSlotSize() * tuplesToLeft;
-        System.arraycopy(buf.array(), src, buf.array(), dest, length);
-
-        buf.putInt(tupleCountOff, tuplesToLeft);
-        buf.putInt(prefixTupleCountOff, prefixesToLeft);
-
-        // compact both pages
-        compact();
-        rightFrame.compact();
-
-        // insert last key
-        int targetTupleIndex;
-        // it's safe to catch this exception since it will have been caught before reaching here
-        try {
-            targetTupleIndex = ((IBTreeLeafFrame) targetFrame).findInsertTupleIndex(tuple);
-        } catch (TreeIndexException e) {
-            throw new IllegalStateException(e);
-        }
-        targetFrame.insert(tuple, targetTupleIndex);
-
-        // set split key to be highest value in left page
-        frameTuple.resetByTupleIndex(this, getTupleCount() - 1);
-
-        int splitKeySize = tupleWriter.bytesRequired(frameTuple, 0, cmp.getKeyFieldCount());
-        splitKey.initData(splitKeySize);
-        tupleWriter.writeTupleFields(frameTuple, 0, cmp.getKeyFieldCount(), splitKey.getBuffer().array(), 0);
-        splitKey.getTuple().resetByTupleOffset(splitKey.getBuffer(), 0);
-    }
-
-    @Override
-    public int getFreeSpaceOff() {
-        return buf.getInt(freeSpaceOff);
-    }
-
-    public int getOrigFreeSpaceOff() {
-        return nextLeafOff + 4;
-    }
-
-    @Override
-    public void setFreeSpaceOff(int freeSpace) {
-        buf.putInt(freeSpaceOff, freeSpace);
-    }
-
-    @Override
-    public void setNextLeaf(int page) {
-        buf.putInt(nextLeafOff, page);
-    }
-
-    @Override
-    public int getNextLeaf() {
-        return buf.getInt(nextLeafOff);
-    }
-
-    public int getUncompressedTupleCount() {
-        return buf.getInt(uncompressedTupleCountOff);
-    }
-
-    public void setUncompressedTupleCount(int uncompressedTupleCount) {
-        buf.putInt(uncompressedTupleCountOff, uncompressedTupleCount);
-    }
-
-    @Override
-    public int getSlotSize() {
-        return slotManager.getSlotSize();
-    }
-
-    public ITreeIndexTupleWriter getTupleWriter() {
-        return tupleWriter;
-    }
-
-    @Override
-    public ITreeIndexTupleReference createTupleReference() {
-        return new FieldPrefixTupleReference(tupleWriter.createTupleReference());
-    }
-
-    @Override
-    public int findTupleIndex(ITupleReference searchKey, ITreeIndexTupleReference pageTuple, MultiComparator cmp,
-            FindTupleMode ftm, FindTupleNoExactMatchPolicy ftp) {
-        int slot = slotManager.findSlot(searchKey, pageTuple, framePrefixTuple, cmp, ftm, ftp);
-        int tupleIndex = slotManager.decodeSecondSlotField(slot);
-        // TODO: Revisit this one. Maybe there is a cleaner way to solve this in the RangeSearchCursor.
-        if (tupleIndex == FieldPrefixSlotManager.GREATEST_KEY_INDICATOR
-                || tupleIndex == FieldPrefixSlotManager.ERROR_INDICATOR)
-            return -1;
-        else
-            return tupleIndex;
-    }
-
-    @Override
-    public int getPageHeaderSize() {
-        return nextLeafOff;
-    }
-
-    @Override
-    public void setMultiComparator(MultiComparator cmp) {
-        this.cmp = cmp;
-        this.slotManager.setMultiComparator(cmp);
-    }
-
-    @Override
-    public void validate(PageValidationInfo pvi) {
-        // Do nothing
-    }
-}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeFieldPrefixNSMLeafFrameFactory.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeFieldPrefixNSMLeafFrameFactory.java
deleted file mode 100644
index 79d2f3a..0000000
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeFieldPrefixNSMLeafFrameFactory.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.btree.frames;
-
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriterFactory;
-
-public class BTreeFieldPrefixNSMLeafFrameFactory implements ITreeIndexFrameFactory {
-
-    private static final long serialVersionUID = 1L;
-
-    private final ITreeIndexTupleWriterFactory tupleWriterFactory;
-
-    public BTreeFieldPrefixNSMLeafFrameFactory(ITreeIndexTupleWriterFactory tupleWriterFactory) {
-        this.tupleWriterFactory = tupleWriterFactory;
-    }
-
-    @Override
-    public IBTreeLeafFrame createFrame() {
-        return new BTreeFieldPrefixNSMLeafFrame(tupleWriterFactory.createTupleWriter());
-    }
-
-    @Override
-    public ITreeIndexTupleWriterFactory getTupleWriterFactory() {
-        return tupleWriterFactory;
-    }
-}
\ No newline at end of file
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeLeafFrameType.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeLeafFrameType.java
deleted file mode 100644
index bd543f8..0000000
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeLeafFrameType.java
+++ /dev/null
@@ -1,21 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.btree.frames;
-
-public enum BTreeLeafFrameType {
-    REGULAR_NSM,
-    FIELD_PREFIX_COMPRESSED_NSM
-}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeNSMInteriorFrame.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeNSMInteriorFrame.java
deleted file mode 100644
index 90b167f..0000000
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeNSMInteriorFrame.java
+++ /dev/null
@@ -1,446 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.btree.frames;
-
-import java.nio.ByteBuffer;
-import java.util.ArrayList;
-import java.util.Collections;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrame;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeOpContext.PageValidationInfo;
-import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
-import edu.uci.ics.hyracks.storage.am.common.api.ISplitKey;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriter;
-import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
-import edu.uci.ics.hyracks.storage.am.common.frames.FrameOpSpaceStatus;
-import edu.uci.ics.hyracks.storage.am.common.frames.TreeIndexNSMFrame;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.FindTupleMode;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.FindTupleNoExactMatchPolicy;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.SlotOffTupleOff;
-
-public class BTreeNSMInteriorFrame extends TreeIndexNSMFrame implements IBTreeInteriorFrame {
-
-    private static final int rightLeafOff = smFlagOff + 1;
-    private static final int childPtrSize = 4;
-
-    private final ITreeIndexTupleReference cmpFrameTuple;
-    private final ITreeIndexTupleReference previousFt;
-
-    private MultiComparator cmp;
-
-    public BTreeNSMInteriorFrame(ITreeIndexTupleWriter tupleWriter) {
-        super(tupleWriter, new OrderedSlotManager());
-        cmpFrameTuple = tupleWriter.createTupleReference();
-        previousFt = tupleWriter.createTupleReference();
-    }
-
-    @Override
-    public void initBuffer(byte level) {
-        super.initBuffer(level);
-        buf.putInt(rightLeafOff, -1);
-    }
-
-    @Override
-    public int findInsertTupleIndex(ITupleReference tuple) throws TreeIndexException {
-        return slotManager.findTupleIndex(tuple, frameTuple, cmp, FindTupleMode.INCLUSIVE,
-                FindTupleNoExactMatchPolicy.HIGHER_KEY);
-    }
-
-    @Override
-    public FrameOpSpaceStatus hasSpaceInsert(ITupleReference tuple) {
-        // Tuple bytes + child pointer + slot.
-        int bytesRequired = tupleWriter.bytesRequired(tuple) + childPtrSize + slotManager.getSlotSize();
-        if (bytesRequired <= getFreeContiguousSpace()) {
-            return FrameOpSpaceStatus.SUFFICIENT_CONTIGUOUS_SPACE;
-        }
-        if (bytesRequired <= getTotalFreeSpace()) {
-            return FrameOpSpaceStatus.SUFFICIENT_SPACE;
-        }
-        return FrameOpSpaceStatus.INSUFFICIENT_SPACE;
-    }
-
-    @Override
-    public void insert(ITupleReference tuple, int tupleIndex) {
-        int slotOff = slotManager.insertSlot(tupleIndex, buf.getInt(freeSpaceOff));
-        int freeSpace = buf.getInt(freeSpaceOff);
-        int bytesWritten = tupleWriter.writeTupleFields(tuple, 0, tuple.getFieldCount(), buf.array(), freeSpace);
-        System.arraycopy(tuple.getFieldData(tuple.getFieldCount() - 1), getLeftChildPageOff(tuple), buf.array(),
-                freeSpace + bytesWritten, childPtrSize);
-        int tupleSize = bytesWritten + childPtrSize;
-        buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) + 1);
-        buf.putInt(freeSpaceOff, buf.getInt(freeSpaceOff) + tupleSize);
-        buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) - tupleSize - slotManager.getSlotSize());
-        // Did we insert into the rightmost slot?
-        if (slotOff == slotManager.getSlotEndOff()) {
-            System.arraycopy(tuple.getFieldData(tuple.getFieldCount() - 1), getLeftChildPageOff(tuple) + childPtrSize,
-                    buf.array(), rightLeafOff, childPtrSize);
-        } else {
-            // If slotOff has a right (slot-)neighbor then update its child pointer.
-            // The only time when this is NOT the case, is when this is the very first tuple
-            // (or when the splitkey goes into the rightmost slot but that case is handled in the if above).
-            if (buf.getInt(tupleCountOff) > 1) {
-                int rightNeighborOff = slotOff - slotManager.getSlotSize();
-                frameTuple.resetByTupleOffset(buf, slotManager.getTupleOff(rightNeighborOff));
-                System.arraycopy(tuple.getFieldData(0), getLeftChildPageOff(tuple) + childPtrSize, buf.array(),
-                        getLeftChildPageOff(frameTuple), childPtrSize);
-            }
-        }
-    }
-
-    @Override
-    public int findDeleteTupleIndex(ITupleReference tuple) throws TreeIndexException {
-        return slotManager.findTupleIndex(tuple, frameTuple, cmp, FindTupleMode.INCLUSIVE,
-                FindTupleNoExactMatchPolicy.HIGHER_KEY);
-    }
-
-    @Override
-    public void delete(ITupleReference tuple, int tupleIndex) {
-        int slotOff = slotManager.getSlotOff(tupleIndex);
-        int tupleOff;
-        int keySize;
-        if (tupleIndex == slotManager.getGreatestKeyIndicator()) {
-            tupleOff = slotManager.getTupleOff(slotManager.getSlotEndOff());
-            frameTuple.resetByTupleOffset(buf, tupleOff);
-            keySize = frameTuple.getTupleSize();
-            // Copy new rightmost pointer.
-            System.arraycopy(buf.array(), tupleOff + keySize, buf.array(), rightLeafOff, childPtrSize);
-        } else {
-            tupleOff = slotManager.getTupleOff(slotOff);
-            frameTuple.resetByTupleOffset(buf, tupleOff);
-            keySize = frameTuple.getTupleSize();
-            // Perform deletion (we just do a memcpy to overwrite the slot).
-            int slotStartOff = slotManager.getSlotEndOff();
-            int length = slotOff - slotStartOff;
-            System.arraycopy(buf.array(), slotStartOff, buf.array(), slotStartOff + slotManager.getSlotSize(), length);
-        }
-        // Maintain space information.
-        buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) - 1);
-        buf.putInt(totalFreeSpaceOff,
-                buf.getInt(totalFreeSpaceOff) + keySize + childPtrSize + slotManager.getSlotSize());
-    }
-
-    @Override
-    public void deleteGreatest() {
-        int slotOff = slotManager.getSlotEndOff();
-        int tupleOff = slotManager.getTupleOff(slotOff);
-        frameTuple.resetByTupleOffset(buf, tupleOff);
-        int keySize = tupleWriter.bytesRequired(frameTuple);
-        System.arraycopy(buf.array(), tupleOff + keySize, buf.array(), rightLeafOff, childPtrSize);
-        // Maintain space information.
-        buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) - 1);
-        buf.putInt(totalFreeSpaceOff,
-                buf.getInt(totalFreeSpaceOff) + keySize + childPtrSize + slotManager.getSlotSize());
-        int freeSpace = buf.getInt(freeSpaceOff);
-        if (freeSpace == tupleOff + keySize + childPtrSize) {
-            buf.putInt(freeSpace, freeSpace - (keySize + childPtrSize));
-        }
-    }
-
-    @Override
-    public FrameOpSpaceStatus hasSpaceUpdate(ITupleReference tuple, int oldTupleIndex) {
-        throw new UnsupportedOperationException("Cannot update tuples in interior node.");
-    }
-
-    @Override
-    public void insertSorted(ITupleReference tuple) {
-        int freeSpace = buf.getInt(freeSpaceOff);
-        slotManager.insertSlot(slotManager.getGreatestKeyIndicator(), freeSpace);
-        int bytesWritten = tupleWriter.writeTuple(tuple, buf, freeSpace);
-        System.arraycopy(tuple.getFieldData(tuple.getFieldCount() - 1), getLeftChildPageOff(tuple), buf.array(),
-                freeSpace + bytesWritten, childPtrSize);
-        int tupleSize = bytesWritten + childPtrSize;
-        buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) + 1);
-        buf.putInt(freeSpaceOff, buf.getInt(freeSpaceOff) + tupleSize);
-        buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) - tupleSize - slotManager.getSlotSize());
-        System.arraycopy(tuple.getFieldData(0), getLeftChildPageOff(tuple) + childPtrSize, buf.array(), rightLeafOff,
-                childPtrSize);
-    }
-
-    @Override
-    public void split(ITreeIndexFrame rightFrame, ITupleReference tuple, ISplitKey splitKey) {
-        ByteBuffer right = rightFrame.getBuffer();
-        int tupleCount = getTupleCount();
-
-        // Find split point, and determine into which frame the new tuple should be inserted into.
-        int tuplesToLeft;
-        ITreeIndexFrame targetFrame = null;
-
-        int totalSize = 0;
-        int halfPageSize = buf.capacity() / 2 - getPageHeaderSize();
-        int i;
-        for (i = 0; i < tupleCount; ++i) {
-            frameTuple.resetByTupleIndex(this, i);
-            totalSize += tupleWriter.bytesRequired(frameTuple) + childPtrSize + slotManager.getSlotSize();
-            if (totalSize >= halfPageSize) {
-                break;
-            }
-        }
-
-        if (cmp.compare(tuple, frameTuple) > 0) {
-            tuplesToLeft = i;
-            targetFrame = rightFrame;
-        } else {
-            tuplesToLeft = i + 1;
-            targetFrame = this;
-        }
-        int tuplesToRight = tupleCount - tuplesToLeft;
-
-        // Copy entire page.
-        System.arraycopy(buf.array(), 0, right.array(), 0, buf.capacity());
-
-        // On the right page we need to copy rightmost slots to left.
-        int src = rightFrame.getSlotManager().getSlotEndOff();
-        int dest = rightFrame.getSlotManager().getSlotEndOff() + tuplesToLeft
-                * rightFrame.getSlotManager().getSlotSize();
-        int length = rightFrame.getSlotManager().getSlotSize() * tuplesToRight;
-        System.arraycopy(right.array(), src, right.array(), dest, length);
-        right.putInt(tupleCountOff, tuplesToRight);
-
-        // On the left page, remove the highest key and make its child pointer
-        // the rightmost child pointer.
-        buf.putInt(tupleCountOff, tuplesToLeft);
-
-        // Copy the split key to be inserted.
-        // We must do so because setting the new split key will overwrite the
-        // old split key, and we cannot insert the existing split key at this point.
-        ISplitKey savedSplitKey = splitKey.duplicate(tupleWriter.createTupleReference());
-
-        // Set split key to be highest value in left page.
-        int tupleOff = slotManager.getTupleOff(slotManager.getSlotEndOff());
-        frameTuple.resetByTupleOffset(buf, tupleOff);
-        int splitKeySize = tupleWriter.bytesRequired(frameTuple, 0, cmp.getKeyFieldCount());
-        splitKey.initData(splitKeySize);
-        tupleWriter.writeTuple(frameTuple, splitKey.getBuffer(), 0);
-        splitKey.getTuple().resetByTupleOffset(splitKey.getBuffer(), 0);
-
-        int deleteTupleOff = slotManager.getTupleOff(slotManager.getSlotEndOff());
-        frameTuple.resetByTupleOffset(buf, deleteTupleOff);
-        buf.putInt(rightLeafOff, buf.getInt(getLeftChildPageOff(frameTuple)));
-        buf.putInt(tupleCountOff, tuplesToLeft - 1);
-
-        // Compact both pages.
-        rightFrame.compact();
-        compact();
-
-        // Insert the saved split key.
-        int targetTupleIndex;
-        // it's safe to catch this exception since it will have been caught before reaching here
-        try {
-            targetTupleIndex = ((BTreeNSMInteriorFrame) targetFrame).findInsertTupleIndex(savedSplitKey.getTuple());
-        } catch (TreeIndexException e) {
-            throw new IllegalStateException(e);
-        }
-        targetFrame.insert(savedSplitKey.getTuple(), targetTupleIndex);
-    }
-
-    @Override
-    public boolean compact() {
-        resetSpaceParams();
-        int tupleCount = buf.getInt(tupleCountOff);
-        int freeSpace = buf.getInt(freeSpaceOff);
-        // Sort the slots by the tuple offset they point to.
-        ArrayList<SlotOffTupleOff> sortedTupleOffs = new ArrayList<SlotOffTupleOff>();
-        sortedTupleOffs.ensureCapacity(tupleCount);
-        for (int i = 0; i < tupleCount; i++) {
-            int slotOff = slotManager.getSlotOff(i);
-            int tupleOff = slotManager.getTupleOff(slotOff);
-            sortedTupleOffs.add(new SlotOffTupleOff(i, slotOff, tupleOff));
-        }
-        Collections.sort(sortedTupleOffs);
-        // Iterate over the sorted slots, and move their corresponding tuples to
-        // the left, reclaiming free space.
-        for (int i = 0; i < sortedTupleOffs.size(); i++) {
-            int tupleOff = sortedTupleOffs.get(i).tupleOff;
-            frameTuple.resetByTupleOffset(buf, tupleOff);
-            int tupleEndOff = frameTuple.getFieldStart(frameTuple.getFieldCount() - 1)
-                    + frameTuple.getFieldLength(frameTuple.getFieldCount() - 1);
-            int tupleLength = tupleEndOff - tupleOff + childPtrSize;
-            System.arraycopy(buf.array(), tupleOff, buf.array(), freeSpace, tupleLength);
-            slotManager.setSlot(sortedTupleOffs.get(i).slotOff, freeSpace);
-            freeSpace += tupleLength;
-        }
-        // Update contiguous free space pointer and total free space indicator.
-        buf.putInt(freeSpaceOff, freeSpace);
-        buf.putInt(totalFreeSpaceOff, buf.capacity() - freeSpace - tupleCount * slotManager.getSlotSize());
-        return false;
-    }
-
-    @Override
-    public int getChildPageId(RangePredicate pred) throws HyracksDataException {
-        // Trivial case where there is only a child pointer (and no key).
-        if (buf.getInt(tupleCountOff) == 0) {
-            return buf.getInt(rightLeafOff);
-        }
-        // Trivial cases where no low key or high key was given (e.g. during an
-        // index scan).
-        ITupleReference tuple = null;
-        FindTupleMode fsm = null;
-        // The target comparator may be on a prefix of the BTree key fields.
-        MultiComparator targetCmp = pred.getLowKeyComparator();
-        tuple = pred.getLowKey();
-        if (tuple == null) {
-            return getLeftmostChildPageId();
-        }
-        if (pred.isLowKeyInclusive()) {
-            fsm = FindTupleMode.INCLUSIVE;
-        } else {
-            fsm = FindTupleMode.EXCLUSIVE;
-        }
-        // Search for a matching key.
-        int tupleIndex = slotManager.findTupleIndex(tuple, frameTuple, targetCmp, fsm,
-                FindTupleNoExactMatchPolicy.HIGHER_KEY);
-        int slotOff = slotManager.getSlotOff(tupleIndex);
-        // Follow the rightmost (greatest) child pointer.
-        if (tupleIndex == slotManager.getGreatestKeyIndicator()) {
-            return buf.getInt(rightLeafOff);
-        }
-        // Deal with prefix searches.
-        // slotManager.findTupleIndex() will return an arbitrary tuple matching
-        // the given field prefix (according to the target comparator).
-        // To make sure we traverse the right path, we must find the
-        // leftmost or rightmost tuple that matches the prefix.
-        int origTupleOff = slotManager.getTupleOff(slotOff);
-        cmpFrameTuple.resetByTupleOffset(buf, origTupleOff);
-        int cmpTupleOff = origTupleOff;
-        // The answer set begins with the lowest key matching the prefix.
-        // We must follow the child pointer of the lowest (leftmost) key
-        // matching the given prefix.
-        int maxSlotOff = buf.capacity();
-        slotOff += slotManager.getSlotSize();
-        while (slotOff < maxSlotOff) {
-            cmpTupleOff = slotManager.getTupleOff(slotOff);
-            frameTuple.resetByTupleOffset(buf, cmpTupleOff);
-            if (targetCmp.compare(cmpFrameTuple, frameTuple) != 0) {
-                break;
-            }
-            slotOff += slotManager.getSlotSize();
-        }
-        slotOff -= slotManager.getSlotSize();
-        frameTuple.resetByTupleOffset(buf, slotManager.getTupleOff(slotOff));
-        int childPageOff = getLeftChildPageOff(frameTuple);
-
-        return buf.getInt(childPageOff);
-    }
-
-    @Override
-    protected void resetSpaceParams() {
-        buf.putInt(freeSpaceOff, rightLeafOff + childPtrSize);
-        buf.putInt(totalFreeSpaceOff, buf.capacity() - (rightLeafOff + childPtrSize));
-    }
-
-    @Override
-    public int getLeftmostChildPageId() {
-        int tupleOff = slotManager.getTupleOff(slotManager.getSlotStartOff());
-        frameTuple.resetByTupleOffset(buf, tupleOff);
-        int childPageOff = getLeftChildPageOff(frameTuple);
-        return buf.getInt(childPageOff);
-    }
-
-    @Override
-    public int getRightmostChildPageId() {
-        return buf.getInt(rightLeafOff);
-    }
-
-    @Override
-    public void setRightmostChildPageId(int pageId) {
-        buf.putInt(rightLeafOff, pageId);
-    }
-
-    @Override
-    public int getPageHeaderSize() {
-        return rightLeafOff + 4;
-    }
-
-    private int getLeftChildPageOff(ITupleReference tuple) {
-        return tuple.getFieldStart(tuple.getFieldCount() - 1) + tuple.getFieldLength(tuple.getFieldCount() - 1);
-    }
-
-    @Override
-    public boolean getSmFlag() {
-        return buf.get(smFlagOff) != 0;
-    }
-
-    @Override
-    public void setSmFlag(boolean smFlag) {
-        if (smFlag) {
-            buf.put(smFlagOff, (byte) 1);
-        } else {
-            buf.put(smFlagOff, (byte) 0);
-        }
-    }
-
-    @Override
-    public void setMultiComparator(MultiComparator cmp) {
-        this.cmp = cmp;
-        cmpFrameTuple.setFieldCount(cmp.getKeyFieldCount());
-        frameTuple.setFieldCount(cmp.getKeyFieldCount());
-        previousFt.setFieldCount(cmp.getKeyFieldCount());
-    }
-
-    @Override
-    public ITreeIndexTupleReference createTupleReference() {
-        ITreeIndexTupleReference tuple = tupleWriter.createTupleReference();
-        tuple.setFieldCount(cmp.getKeyFieldCount());
-        return tuple;
-    }
-
-    // For debugging.
-    public ArrayList<Integer> getChildren(MultiComparator cmp) {
-        ArrayList<Integer> ret = new ArrayList<Integer>();
-        frameTuple.setFieldCount(cmp.getKeyFieldCount());
-        int tupleCount = buf.getInt(tupleCountOff);
-        for (int i = 0; i < tupleCount; i++) {
-            int tupleOff = slotManager.getTupleOff(slotManager.getSlotOff(i));
-            frameTuple.resetByTupleOffset(buf, tupleOff);
-            int intVal = IntegerSerializerDeserializer.getInt(
-                    buf.array(),
-                    frameTuple.getFieldStart(frameTuple.getFieldCount() - 1)
-                            + frameTuple.getFieldLength(frameTuple.getFieldCount() - 1));
-            ret.add(intVal);
-        }
-        if (!isLeaf()) {
-            int rightLeaf = buf.getInt(rightLeafOff);
-            if (rightLeaf > 0)
-                ret.add(buf.getInt(rightLeafOff));
-        }
-        return ret;
-    }
-
-    public void validate(PageValidationInfo pvi) throws HyracksDataException {
-        int tupleCount = getTupleCount();
-        for (int i = 0; i < tupleCount; i++) {
-            frameTuple.resetByTupleIndex(this, i);
-            if (!pvi.isLowRangeNull) {
-                assert cmp.compare(pvi.lowRangeTuple, frameTuple) < 0;
-            }
-
-            if (!pvi.isHighRangeNull) {
-                assert cmp.compare(pvi.highRangeTuple, frameTuple) >= 0;
-            }
-
-            if (i > 0) {
-                previousFt.resetByTupleIndex(this, i - 1);
-                assert cmp.compare(previousFt, frameTuple) < 0;
-            }
-        }
-    }
-}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeNSMInteriorFrameFactory.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeNSMInteriorFrameFactory.java
deleted file mode 100644
index 1491b55..0000000
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeNSMInteriorFrameFactory.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.btree.frames;
-
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrame;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriterFactory;
-
-public class BTreeNSMInteriorFrameFactory implements ITreeIndexFrameFactory {
-
-    private static final long serialVersionUID = 1L;
-
-    private final ITreeIndexTupleWriterFactory tupleWriterFactory;
-
-    public BTreeNSMInteriorFrameFactory(ITreeIndexTupleWriterFactory tupleWriterFactory) {
-        this.tupleWriterFactory = tupleWriterFactory;
-    }
-
-    @Override
-    public IBTreeInteriorFrame createFrame() {
-        return new BTreeNSMInteriorFrame(tupleWriterFactory.createTupleWriter());
-    }
-
-    @Override
-    public ITreeIndexTupleWriterFactory getTupleWriterFactory() {
-        return tupleWriterFactory;
-    }
-}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeNSMLeafFrame.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeNSMLeafFrame.java
deleted file mode 100644
index 04b3077..0000000
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeNSMLeafFrame.java
+++ /dev/null
@@ -1,265 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.btree.frames;
-
-import java.nio.ByteBuffer;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
-import edu.uci.ics.hyracks.storage.am.btree.exceptions.BTreeDuplicateKeyException;
-import edu.uci.ics.hyracks.storage.am.btree.exceptions.BTreeNonExistentKeyException;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeOpContext.PageValidationInfo;
-import edu.uci.ics.hyracks.storage.am.common.api.ISplitKey;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriter;
-import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
-import edu.uci.ics.hyracks.storage.am.common.frames.TreeIndexNSMFrame;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.FindTupleMode;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.FindTupleNoExactMatchPolicy;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-
-public class BTreeNSMLeafFrame extends TreeIndexNSMFrame implements IBTreeLeafFrame {
-    protected static final int nextLeafOff = smFlagOff + 1;
-
-    private MultiComparator cmp;
-
-    private final ITreeIndexTupleReference previousFt;
-
-    public BTreeNSMLeafFrame(ITreeIndexTupleWriter tupleWriter) {
-        super(tupleWriter, new OrderedSlotManager());
-        previousFt = tupleWriter.createTupleReference();
-    }
-
-    @Override
-    public void initBuffer(byte level) {
-        super.initBuffer(level);
-        buf.putInt(nextLeafOff, -1);
-    }
-
-    @Override
-    public void setNextLeaf(int page) {
-        buf.putInt(nextLeafOff, page);
-    }
-
-    @Override
-    public int getNextLeaf() {
-        return buf.getInt(nextLeafOff);
-    }
-
-    @Override
-    public int findInsertTupleIndex(ITupleReference tuple) throws TreeIndexException {
-        int tupleIndex = slotManager.findTupleIndex(tuple, frameTuple, cmp, FindTupleMode.EXCLUSIVE_ERROR_IF_EXISTS,
-                FindTupleNoExactMatchPolicy.HIGHER_KEY);
-        // Error indicator is set if there is an exact match.
-        if (tupleIndex == slotManager.getErrorIndicator()) {
-            throw new BTreeDuplicateKeyException("Trying to insert duplicate key into leaf node.");
-        }
-        return tupleIndex;
-    }
-
-    @Override
-    public int findUpdateTupleIndex(ITupleReference tuple) throws TreeIndexException {
-        int tupleIndex = slotManager.findTupleIndex(tuple, frameTuple, cmp, FindTupleMode.EXACT,
-                FindTupleNoExactMatchPolicy.HIGHER_KEY);
-        // Error indicator is set if there is no exact match.
-        if (tupleIndex == slotManager.getErrorIndicator() || tupleIndex == slotManager.getGreatestKeyIndicator()) {
-            throw new BTreeNonExistentKeyException("Trying to update a tuple with a nonexistent key in leaf node.");
-        }
-        return tupleIndex;
-    }
-
-    @Override
-    public int findUpsertTupleIndex(ITupleReference tuple) throws TreeIndexException {
-        int tupleIndex = slotManager.findTupleIndex(tuple, frameTuple, cmp, FindTupleMode.INCLUSIVE,
-                FindTupleNoExactMatchPolicy.HIGHER_KEY);
-        // Just return the found tupleIndex. The caller will make the final
-        // decision whether to insert or update.
-        return tupleIndex;
-    }
-
-    @Override
-    public ITupleReference getMatchingKeyTuple(ITupleReference searchTuple, int targetTupleIndex) {
-        // Examine the tuple index to determine whether it is valid or not.
-        if (targetTupleIndex != slotManager.getGreatestKeyIndicator()) {
-            // We need to check the key to determine whether it's an insert or
-            // an update/delete
-            frameTuple.resetByTupleIndex(this, targetTupleIndex);
-            if (cmp.compare(searchTuple, frameTuple) == 0) {
-                // The keys match, it's an update/delete
-                return frameTuple;
-            }
-        }
-        // Either the tuple index is a special indicator, or the keys don't
-        // match.
-        // In those cases, we are definitely dealing with an insert.
-        return null;
-    }
-
-    @Override
-    public int findDeleteTupleIndex(ITupleReference tuple) throws TreeIndexException {
-        int tupleIndex = slotManager.findTupleIndex(tuple, frameTuple, cmp, FindTupleMode.EXACT,
-                FindTupleNoExactMatchPolicy.HIGHER_KEY);
-        // Error indicator is set if there is no exact match.
-        if (tupleIndex == slotManager.getErrorIndicator() || tupleIndex == slotManager.getGreatestKeyIndicator()) {
-            throw new BTreeNonExistentKeyException("Trying to delete a tuple with a nonexistent key in leaf node.");
-        }
-        return tupleIndex;
-    }
-
-    @Override
-    public void insert(ITupleReference tuple, int tupleIndex) {
-        int freeSpace = buf.getInt(freeSpaceOff);
-        slotManager.insertSlot(tupleIndex, freeSpace);
-        int bytesWritten = tupleWriter.writeTuple(tuple, buf.array(), freeSpace);
-        buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) + 1);
-        buf.putInt(freeSpaceOff, buf.getInt(freeSpaceOff) + bytesWritten);
-        buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) - bytesWritten - slotManager.getSlotSize());
-    }
-
-    @Override
-    public void insertSorted(ITupleReference tuple) {
-        insert(tuple, slotManager.getGreatestKeyIndicator());
-    }
-
-    @Override
-    public void split(ITreeIndexFrame rightFrame, ITupleReference tuple, ISplitKey splitKey) {
-        ByteBuffer right = rightFrame.getBuffer();
-        int tupleCount = getTupleCount();
-
-        // Find split point, and determine into which frame the new tuple should
-        // be inserted into.
-        int tuplesToLeft;
-        ITreeIndexFrame targetFrame = null;
-        int totalSize = 0;
-        int halfPageSize = buf.capacity() / 2 - getPageHeaderSize();
-        int i;
-        for (i = 0; i < tupleCount; ++i) {
-            frameTuple.resetByTupleIndex(this, i);
-            totalSize += tupleWriter.getCopySpaceRequired(frameTuple) + slotManager.getSlotSize();
-            if (totalSize >= halfPageSize) {
-                break;
-            }
-        }
-
-        if (cmp.compare(tuple, frameTuple) >= 0) {
-            tuplesToLeft = i + 1;
-            targetFrame = rightFrame;
-        } else {
-            tuplesToLeft = i;
-            targetFrame = this;
-        }
-        int tuplesToRight = tupleCount - tuplesToLeft;
-
-        // Copy entire page.
-        System.arraycopy(buf.array(), 0, right.array(), 0, buf.capacity());
-
-        // On the right page we need to copy rightmost slots to the left.
-        int src = rightFrame.getSlotManager().getSlotEndOff();
-        int dest = rightFrame.getSlotManager().getSlotEndOff() + tuplesToLeft
-                * rightFrame.getSlotManager().getSlotSize();
-        int length = rightFrame.getSlotManager().getSlotSize() * tuplesToRight;
-        System.arraycopy(right.array(), src, right.array(), dest, length);
-        right.putInt(tupleCountOff, tuplesToRight);
-
-        // On left page only change the tupleCount indicator.
-        buf.putInt(tupleCountOff, tuplesToLeft);
-
-        // Compact both pages.
-        rightFrame.compact();
-        compact();
-
-        // Insert the new tuple.
-        int targetTupleIndex;
-        // it's safe to catch this exception since it will have been caught
-        // before reaching here
-        try {
-            targetTupleIndex = ((BTreeNSMLeafFrame) targetFrame).findInsertTupleIndex(tuple);
-        } catch (TreeIndexException e) {
-            throw new IllegalStateException(e);
-        }
-        targetFrame.insert(tuple, targetTupleIndex);
-
-        // Set the split key to be highest key in the left page.
-        int tupleOff = slotManager.getTupleOff(slotManager.getSlotEndOff());
-        frameTuple.resetByTupleOffset(buf, tupleOff);
-        int splitKeySize = tupleWriter.bytesRequired(frameTuple, 0, cmp.getKeyFieldCount());
-        splitKey.initData(splitKeySize);
-        tupleWriter.writeTupleFields(frameTuple, 0, cmp.getKeyFieldCount(), splitKey.getBuffer().array(), 0);
-        splitKey.getTuple().resetByTupleOffset(splitKey.getBuffer(), 0);
-    }
-
-    @Override
-    protected void resetSpaceParams() {
-        buf.putInt(freeSpaceOff, nextLeafOff + 4);
-        buf.putInt(totalFreeSpaceOff, buf.capacity() - (nextLeafOff + 4));
-    }
-
-    @Override
-    public ITreeIndexTupleReference createTupleReference() {
-        return tupleWriter.createTupleReference();
-    }
-
-    @Override
-    public int findTupleIndex(ITupleReference searchKey, ITreeIndexTupleReference pageTuple, MultiComparator cmp,
-            FindTupleMode ftm, FindTupleNoExactMatchPolicy ftp) {
-        return slotManager.findTupleIndex(searchKey, pageTuple, cmp, ftm, ftp);
-    }
-
-    @Override
-    public int getPageHeaderSize() {
-        return nextLeafOff + 4;
-    }
-
-    @Override
-    public boolean getSmFlag() {
-        return buf.get(smFlagOff) != 0;
-    }
-
-    @Override
-    public void setSmFlag(boolean smFlag) {
-        if (smFlag) {
-            buf.put(smFlagOff, (byte) 1);
-        } else {
-            buf.put(smFlagOff, (byte) 0);
-        }
-    }
-
-    @Override
-    public void setMultiComparator(MultiComparator cmp) {
-        this.cmp = cmp;
-    }
-
-    public void validate(PageValidationInfo pvi) throws HyracksDataException {
-        int tupleCount = getTupleCount();
-        for (int i = 0; i < tupleCount; i++) {
-            frameTuple.resetByTupleIndex(this, i);
-            if (!pvi.isLowRangeNull) {
-                assert cmp.compare(pvi.lowRangeTuple, frameTuple) < 0;
-            }
-
-            if (!pvi.isHighRangeNull) {
-                assert cmp.compare(pvi.highRangeTuple, frameTuple) >= 0;
-            }
-
-            if (i > 0) {
-                previousFt.resetByTupleIndex(this, i - 1);
-                assert cmp.compare(previousFt, frameTuple) < 0;
-            }
-        }
-    }
-}
\ No newline at end of file
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeNSMLeafFrameFactory.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeNSMLeafFrameFactory.java
deleted file mode 100644
index b445fa8..0000000
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeNSMLeafFrameFactory.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.btree.frames;
-
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriterFactory;
-
-public class BTreeNSMLeafFrameFactory implements ITreeIndexFrameFactory {
-
-    private static final long serialVersionUID = 1L;
-
-    private final ITreeIndexTupleWriterFactory tupleWriterFactory;
-
-    public BTreeNSMLeafFrameFactory(ITreeIndexTupleWriterFactory tupleWriterFactory) {
-        this.tupleWriterFactory = tupleWriterFactory;
-    }
-
-    @Override
-    public IBTreeLeafFrame createFrame() {
-        return new BTreeNSMLeafFrame(tupleWriterFactory.createTupleWriter());
-    }
-
-    @Override
-    public ITreeIndexTupleWriterFactory getTupleWriterFactory() {
-        return tupleWriterFactory;
-    }
-}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/OrderedSlotManager.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/OrderedSlotManager.java
deleted file mode 100644
index e51ee99..0000000
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/OrderedSlotManager.java
+++ /dev/null
@@ -1,107 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.btree.frames;
-
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
-import edu.uci.ics.hyracks.storage.am.common.frames.AbstractSlotManager;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.FindTupleMode;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.FindTupleNoExactMatchPolicy;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-
-public class OrderedSlotManager extends AbstractSlotManager {
-
-    @Override
-    public int findTupleIndex(ITupleReference searchKey, ITreeIndexTupleReference frameTuple, MultiComparator multiCmp,
-            FindTupleMode mode, FindTupleNoExactMatchPolicy matchPolicy) {
-        if (frame.getTupleCount() <= 0) {
-            return GREATEST_KEY_INDICATOR;
-        }
-
-        int mid;
-        int begin = 0;
-        int end = frame.getTupleCount() - 1;
-
-        while (begin <= end) {
-            mid = (begin + end) / 2;
-            frameTuple.resetByTupleIndex(frame, mid);
-
-            int cmp = multiCmp.compare(searchKey, frameTuple);
-            if (cmp < 0) {
-                end = mid - 1;
-            } else if (cmp > 0) {
-                begin = mid + 1;
-            } else {
-                if (mode == FindTupleMode.EXCLUSIVE) {
-                    if (matchPolicy == FindTupleNoExactMatchPolicy.HIGHER_KEY) {
-                        begin = mid + 1;
-                    } else {
-                        end = mid - 1;
-                    }
-                } else {
-                    if (mode == FindTupleMode.EXCLUSIVE_ERROR_IF_EXISTS) {
-                        return ERROR_INDICATOR;
-                    } else {
-                        return mid;
-                    }
-                }
-            }
-        }
-
-        if (mode == FindTupleMode.EXACT) {
-            return ERROR_INDICATOR;
-        }
-
-        if (matchPolicy == FindTupleNoExactMatchPolicy.HIGHER_KEY) {
-            if (begin > frame.getTupleCount() - 1) {
-                return GREATEST_KEY_INDICATOR;
-            }
-            frameTuple.resetByTupleIndex(frame, begin);
-            if (multiCmp.compare(searchKey, frameTuple) < 0) {
-                return begin;
-            } else {
-                return GREATEST_KEY_INDICATOR;
-            }
-        } else {
-            if (end < 0) {
-                return GREATEST_KEY_INDICATOR;
-            }
-            frameTuple.resetByTupleIndex(frame, end);
-            if (multiCmp.compare(searchKey, frameTuple) > 0) {
-                return end;
-            } else {
-                return GREATEST_KEY_INDICATOR;
-            }
-        }
-    }
-
-    @Override
-    public int insertSlot(int tupleIndex, int tupleOff) {
-        int slotOff = getSlotOff(tupleIndex);
-        if (tupleIndex == GREATEST_KEY_INDICATOR) {
-            slotOff = getSlotEndOff() - slotSize;
-            setSlot(slotOff, tupleOff);
-            return slotOff;
-        } else {
-            int slotEndOff = getSlotEndOff();
-            int length = (slotOff - slotEndOff) + slotSize;
-            System.arraycopy(frame.getBuffer().array(), slotEndOff, frame.getBuffer().array(), slotEndOff - slotSize,
-                    length);
-            setSlot(slotOff, tupleOff);
-            return slotOff;
-        }
-    }
-}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTree.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTree.java
deleted file mode 100644
index 86bc32a..0000000
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTree.java
+++ /dev/null
@@ -1,1082 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.btree.impls;
-
-import java.util.ArrayList;
-import java.util.List;
-import java.util.concurrent.atomic.AtomicInteger;
-import java.util.concurrent.locks.ReadWriteLock;
-import java.util.concurrent.locks.ReentrantReadWriteLock;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.util.TupleUtils;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeFrame;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrame;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
-import edu.uci.ics.hyracks.storage.am.btree.api.ITupleAcceptor;
-import edu.uci.ics.hyracks.storage.am.btree.exceptions.BTreeException;
-import edu.uci.ics.hyracks.storage.am.btree.exceptions.BTreeNonExistentKeyException;
-import edu.uci.ics.hyracks.storage.am.btree.exceptions.BTreeNotUpdateableException;
-import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMInteriorFrame;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeOpContext.PageValidationInfo;
-import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManager;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexAccessor;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexBulkLoader;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexCursor;
-import edu.uci.ics.hyracks.storage.am.common.api.IModificationOperationCallback;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchOperationCallback;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchPredicate;
-import edu.uci.ics.hyracks.storage.am.common.api.ISplitKey;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexAccessor;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
-import edu.uci.ics.hyracks.storage.am.common.api.UnsortedInputException;
-import edu.uci.ics.hyracks.storage.am.common.frames.FrameOpSpaceStatus;
-import edu.uci.ics.hyracks.storage.am.common.impls.AbstractTreeIndex;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallback;
-import edu.uci.ics.hyracks.storage.am.common.impls.NodeFrontier;
-import edu.uci.ics.hyracks.storage.am.common.impls.TreeIndexDiskOrderScanCursor;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOperation;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
-import edu.uci.ics.hyracks.storage.common.file.BufferedFileHandle;
-import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
-
-public class BTree extends AbstractTreeIndex {
-
-    public static final float DEFAULT_FILL_FACTOR = 0.7f;
-
-    private final static long RESTART_OP = Long.MIN_VALUE;
-    private final static long FULL_RESTART_OP = Long.MIN_VALUE + 1;
-    private final static int MAX_RESTARTS = 10;
-
-    private final AtomicInteger smoCounter;
-    private final ReadWriteLock treeLatch;
-
-    public BTree(IBufferCache bufferCache, IFileMapProvider fileMapProvider, IFreePageManager freePageManager,
-            ITreeIndexFrameFactory interiorFrameFactory, ITreeIndexFrameFactory leafFrameFactory,
-            IBinaryComparatorFactory[] cmpFactories, int fieldCount, FileReference file) {
-        super(bufferCache, fileMapProvider, freePageManager, interiorFrameFactory, leafFrameFactory, cmpFactories,
-                fieldCount, file);
-        this.treeLatch = new ReentrantReadWriteLock(true);
-        this.smoCounter = new AtomicInteger();
-    }
-
-    private void diskOrderScan(ITreeIndexCursor icursor, BTreeOpContext ctx) throws HyracksDataException {
-        TreeIndexDiskOrderScanCursor cursor = (TreeIndexDiskOrderScanCursor) icursor;
-        ctx.reset();
-        RangePredicate diskOrderScanPred = new RangePredicate(null, null, true, true, ctx.cmp, ctx.cmp);
-        int currentPageId = rootPage;
-        int maxPageId = freePageManager.getMaxPage(ctx.metaFrame);
-        ICachedPage page = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, currentPageId), false);
-        page.acquireReadLatch();
-        try {
-            cursor.setBufferCache(bufferCache);
-            cursor.setFileId(fileId);
-            cursor.setCurrentPageId(currentPageId);
-            cursor.setMaxPageId(maxPageId);
-            ctx.cursorInitialState.setPage(page);
-            ctx.cursorInitialState.setSearchOperationCallback(ctx.searchCallback);
-            ctx.cursorInitialState.setOriginialKeyComparator(ctx.cmp);
-            cursor.open(ctx.cursorInitialState, diskOrderScanPred);
-        } catch (Exception e) {
-            page.releaseReadLatch();
-            bufferCache.unpin(page);
-            throw new HyracksDataException(e);
-        }
-    }
-
-    public void validate() throws HyracksDataException {
-        // Stack validation protocol:
-        //      * parent pushes the validation information onto the stack before validation
-        //      * child pops the validation information off of the stack after validating
-        BTreeAccessor accessor = (BTreeAccessor) createAccessor(NoOpOperationCallback.INSTANCE,
-                NoOpOperationCallback.INSTANCE);
-        PageValidationInfo pvi = accessor.ctx.createPageValidationInfo(null);
-        accessor.ctx.validationInfos.addFirst(pvi);
-        validate(accessor.ctx, rootPage);
-    }
-
-    private void validate(BTreeOpContext ctx, int pageId) throws HyracksDataException {
-        ICachedPage page = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
-        ctx.interiorFrame.setPage(page);
-        PageValidationInfo currentPvi = ctx.validationInfos.peekFirst();
-
-        boolean isLeaf = ctx.interiorFrame.isLeaf();
-        if (isLeaf) {
-            ctx.leafFrame.setPage(page);
-            ctx.leafFrame.validate(currentPvi);
-        } else {
-            PageValidationInfo nextPvi = ctx.createPageValidationInfo(currentPvi);
-            List<Integer> children = ((BTreeNSMInteriorFrame) ctx.interiorFrame).getChildren(ctx.cmp);
-            ctx.interiorFrame.validate(currentPvi);
-            for (int i = 0; i < children.size(); i++) {
-                ctx.interiorFrame.setPage(page);
-
-                if (children.size() == 1) {
-                    // There is a single child pointer with no keys, so propagate both low and high ranges
-                    nextPvi.propagateLowRangeKey(currentPvi);
-                    nextPvi.propagateHighRangeKey(currentPvi);
-                } else if (i == 0) {
-                    // There is more than one child pointer and this is the left-most child pointer, so:
-                    //      1) propagate the low range key from the parent
-                    //      2) adjust the high range key
-                    nextPvi.propagateLowRangeKey(currentPvi);
-                    ctx.interiorFrameTuple.resetByTupleIndex(ctx.interiorFrame, i);
-                    nextPvi.adjustHighRangeKey(ctx.interiorFrameTuple);
-                } else if (i == children.size() - 1) {
-                    // There is more than one child pointer and this is the right-most child pointer, so:
-                    //      1) propagate the high range key from the parent
-                    //      2) adjust the low range key
-                    nextPvi.propagateHighRangeKey(currentPvi);
-                    ctx.interiorFrameTuple.resetByTupleIndex(ctx.interiorFrame, i - 1);
-                    nextPvi.adjustLowRangeKey(ctx.interiorFrameTuple);
-                } else {
-                    // There is more than one child pointer and this pointer is not the left/right-most pointer, so:
-                    //      1) adjust the low range key
-                    //      2) adjust the high range key
-                    ctx.interiorFrameTuple.resetByTupleIndex(ctx.interiorFrame, i - 1);
-                    nextPvi.adjustLowRangeKey(ctx.interiorFrameTuple);
-                    ctx.interiorFrameTuple.resetByTupleIndex(ctx.interiorFrame, i);
-                    nextPvi.adjustHighRangeKey(ctx.interiorFrameTuple);
-                }
-
-                ctx.validationInfos.addFirst(nextPvi);
-                validate(ctx, children.get(i));
-            }
-        }
-        bufferCache.unpin(page);
-        ctx.validationInfos.removeFirst();
-    }
-
-    private void search(ITreeIndexCursor cursor, ISearchPredicate searchPred, BTreeOpContext ctx)
-            throws TreeIndexException, HyracksDataException {
-        ctx.reset();
-        ctx.pred = (RangePredicate) searchPred;
-        ctx.cursor = cursor;
-        // simple index scan
-        if (ctx.pred.getLowKeyComparator() == null) {
-            ctx.pred.setLowKeyComparator(ctx.cmp);
-        }
-        if (ctx.pred.getHighKeyComparator() == null) {
-            ctx.pred.setHighKeyComparator(ctx.cmp);
-        }
-        // we use this loop to deal with possibly multiple operation restarts
-        // due to ongoing structure modifications during the descent
-        boolean repeatOp = true;
-        while (repeatOp && ctx.opRestarts < MAX_RESTARTS) {
-            performOp(rootPage, null, true, ctx);
-            // if we reach this stage then we need to restart from the (possibly
-            // new) root
-            if (!ctx.pageLsns.isEmpty() && ctx.pageLsns.getLast() == RESTART_OP) {
-                ctx.pageLsns.removeLast(); // pop the restart op indicator
-                continue;
-            }
-            repeatOp = false;
-        }
-        cursor.setBufferCache(bufferCache);
-        cursor.setFileId(fileId);
-    }
-
-    private void unsetSmPages(BTreeOpContext ctx) throws HyracksDataException {
-        ICachedPage originalPage = ctx.interiorFrame.getPage();
-        for (int i = 0; i < ctx.smPages.size(); i++) {
-            int pageId = ctx.smPages.get(i);
-            ICachedPage smPage = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
-            smPage.acquireWriteLatch();
-            try {
-                ctx.interiorFrame.setPage(smPage);
-                ctx.interiorFrame.setSmFlag(false);
-            } finally {
-                smPage.releaseWriteLatch();
-                bufferCache.unpin(smPage);
-            }
-        }
-        if (ctx.smPages.size() > 0) {
-            if (ctx.smoCount == Integer.MAX_VALUE) {
-                smoCounter.set(0);
-            } else {
-                smoCounter.incrementAndGet();
-            }
-            treeLatch.writeLock().unlock();
-            ctx.smPages.clear();
-        }
-        ctx.interiorFrame.setPage(originalPage);
-    }
-
-    private void createNewRoot(BTreeOpContext ctx) throws HyracksDataException, TreeIndexException {
-        // Make sure the root is always in the same page.
-        ICachedPage leftNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, ctx.splitKey.getLeftPage()),
-                false);
-        leftNode.acquireWriteLatch();
-        try {
-            int newLeftId = freePageManager.getFreePage(ctx.metaFrame);
-            ICachedPage newLeftNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, newLeftId), true);
-            newLeftNode.acquireWriteLatch();
-            try {
-                // Copy left child to new left child.
-                System.arraycopy(leftNode.getBuffer().array(), 0, newLeftNode.getBuffer().array(), 0, newLeftNode
-                        .getBuffer().capacity());
-                ctx.interiorFrame.setPage(newLeftNode);
-                ctx.interiorFrame.setSmFlag(false);
-                // Remember LSN to set it in the root.
-                long leftNodeLSN = ctx.interiorFrame.getPageLsn();
-                // Initialize new root (leftNode becomes new root).
-                ctx.interiorFrame.setPage(leftNode);
-                ctx.interiorFrame.initBuffer((byte) (ctx.interiorFrame.getLevel() + 1));
-                // Copy over LSN.
-                ctx.interiorFrame.setPageLsn(leftNodeLSN);
-                // Will be cleared later in unsetSmPages.
-                ctx.interiorFrame.setSmFlag(true);
-                ctx.splitKey.setLeftPage(newLeftId);
-                int targetTupleIndex = ctx.interiorFrame.findInsertTupleIndex(ctx.splitKey.getTuple());
-                ctx.interiorFrame.insert(ctx.splitKey.getTuple(), targetTupleIndex);
-            } finally {
-                newLeftNode.releaseWriteLatch();
-                bufferCache.unpin(newLeftNode);
-            }
-        } finally {
-            leftNode.releaseWriteLatch();
-            bufferCache.unpin(leftNode);
-        }
-    }
-
-    private void insertUpdateOrDelete(ITupleReference tuple, BTreeOpContext ctx) throws HyracksDataException,
-            TreeIndexException {
-        ctx.reset();
-        ctx.pred.setLowKeyComparator(ctx.cmp);
-        ctx.pred.setHighKeyComparator(ctx.cmp);
-        ctx.pred.setLowKey(tuple, true);
-        ctx.pred.setHighKey(tuple, true);
-        ctx.splitKey.reset();
-        ctx.splitKey.getTuple().setFieldCount(ctx.cmp.getKeyFieldCount());
-        // We use this loop to deal with possibly multiple operation restarts
-        // due to ongoing structure modifications during the descent.
-        boolean repeatOp = true;
-        while (repeatOp && ctx.opRestarts < MAX_RESTARTS) {
-            ctx.smoCount = smoCounter.get();
-            performOp(rootPage, null, true, ctx);
-            // Do we need to restart from the (possibly new) root?
-            if (!ctx.pageLsns.isEmpty()) {
-                if (ctx.pageLsns.getLast() == FULL_RESTART_OP) {
-                    ctx.pageLsns.clear();
-                    continue;
-                } else if (ctx.pageLsns.getLast() == RESTART_OP) {
-                    ctx.pageLsns.removeLast(); // pop the restart op indicator
-                    continue;
-                }
-
-            }
-            // Split key propagated?
-            if (ctx.splitKey.getBuffer() != null) {
-                // Insert or update op. Create a new root.
-                createNewRoot(ctx);
-            }
-            unsetSmPages(ctx);
-            repeatOp = false;
-        }
-
-        if (ctx.opRestarts >= MAX_RESTARTS) {
-            throw new BTreeException("Operation exceeded the maximum number of restarts");
-        }
-    }
-
-    private void insert(ITupleReference tuple, BTreeOpContext ctx) throws HyracksDataException, TreeIndexException {
-        ctx.modificationCallback.before(tuple);
-        insertUpdateOrDelete(tuple, ctx);
-    }
-
-    private void upsert(ITupleReference tuple, BTreeOpContext ctx) throws HyracksDataException, TreeIndexException {
-        ctx.modificationCallback.before(tuple);
-        insertUpdateOrDelete(tuple, ctx);
-    }
-
-    private void update(ITupleReference tuple, BTreeOpContext ctx) throws HyracksDataException, TreeIndexException {
-        // This call only allows updating of non-key fields.
-        // Updating a tuple's key necessitates deleting the old entry, and inserting the new entry.
-        // The user of the BTree is responsible for dealing with non-key updates (i.e., doing a delete + insert). 
-        if (fieldCount == ctx.cmp.getKeyFieldCount()) {
-            throw new BTreeNotUpdateableException("Cannot perform updates when the entire tuple forms the key.");
-        }
-        ctx.modificationCallback.before(tuple);
-        insertUpdateOrDelete(tuple, ctx);
-    }
-
-    private void delete(ITupleReference tuple, BTreeOpContext ctx) throws HyracksDataException, TreeIndexException {
-        ctx.modificationCallback.before(tuple);
-        insertUpdateOrDelete(tuple, ctx);
-    }
-
-    private boolean insertLeaf(ITupleReference tuple, int targetTupleIndex, int pageId, BTreeOpContext ctx)
-            throws Exception {
-        boolean restartOp = false;
-        FrameOpSpaceStatus spaceStatus = ctx.leafFrame.hasSpaceInsert(tuple);
-        switch (spaceStatus) {
-            case SUFFICIENT_CONTIGUOUS_SPACE: {
-                ctx.modificationCallback.found(null, tuple);
-                ctx.leafFrame.insert(tuple, targetTupleIndex);
-                ctx.splitKey.reset();
-                break;
-            }
-            case SUFFICIENT_SPACE: {
-                boolean slotsChanged = ctx.leafFrame.compact();
-                if (slotsChanged) {
-                    targetTupleIndex = ctx.leafFrame.findInsertTupleIndex(tuple);
-                }
-                ctx.modificationCallback.found(null, tuple);
-                ctx.leafFrame.insert(tuple, targetTupleIndex);
-                ctx.splitKey.reset();
-                break;
-            }
-            case INSUFFICIENT_SPACE: {
-                // Try compressing the page first and see if there is space available.
-                boolean reCompressed = ctx.leafFrame.compress();
-                if (reCompressed) {
-                    // Compression could have changed the target tuple index, find it again.
-                    targetTupleIndex = ctx.leafFrame.findInsertTupleIndex(tuple);
-                    spaceStatus = ctx.leafFrame.hasSpaceInsert(tuple);
-                }
-                if (spaceStatus == FrameOpSpaceStatus.SUFFICIENT_CONTIGUOUS_SPACE) {
-                    ctx.modificationCallback.found(null, tuple);
-                    ctx.leafFrame.insert(tuple, targetTupleIndex);
-                    ctx.splitKey.reset();
-                } else {
-                    restartOp = performLeafSplit(pageId, tuple, ctx, -1);
-                }
-                break;
-            }
-        }
-        return restartOp;
-    }
-
-    private boolean performLeafSplit(int pageId, ITupleReference tuple, BTreeOpContext ctx, int updateTupleIndex)
-            throws Exception {
-        // We must never hold a latch on a page while waiting to obtain the tree
-        // latch, because it this could lead to a latch-deadlock.
-        // If we can't get the tree latch, we return, release our page latches,
-        // and restart the operation from one level above.
-        // Lock is released in unsetSmPages(), after sm has fully completed.
-        if (!treeLatch.writeLock().tryLock()) {
-            return true;
-        } else {
-            int tempSmoCount = smoCounter.get();
-            if (tempSmoCount != ctx.smoCount) {
-                treeLatch.writeLock().unlock();
-                return true;
-            }
-        }
-        int rightPageId = freePageManager.getFreePage(ctx.metaFrame);
-        ICachedPage rightNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, rightPageId), true);
-        rightNode.acquireWriteLatch();
-        try {
-            IBTreeLeafFrame rightFrame = ctx.createLeafFrame();
-            rightFrame.setPage(rightNode);
-            rightFrame.initBuffer((byte) 0);
-            rightFrame.setMultiComparator(ctx.cmp);
-
-            // Perform an update (delete + insert) if the updateTupleIndex != -1
-            if (updateTupleIndex != -1) {
-                ITupleReference beforeTuple = ctx.leafFrame.getMatchingKeyTuple(tuple, updateTupleIndex);
-                ctx.modificationCallback.found(beforeTuple, tuple);
-                ctx.leafFrame.delete(tuple, updateTupleIndex);
-            } else {
-                ctx.modificationCallback.found(null, tuple);
-            }
-            ctx.leafFrame.split(rightFrame, tuple, ctx.splitKey);
-
-            ctx.smPages.add(pageId);
-            ctx.smPages.add(rightPageId);
-            ctx.leafFrame.setSmFlag(true);
-            rightFrame.setSmFlag(true);
-
-            rightFrame.setNextLeaf(ctx.leafFrame.getNextLeaf());
-            ctx.leafFrame.setNextLeaf(rightPageId);
-
-            rightFrame.setPageLsn(rightFrame.getPageLsn() + 1);
-            ctx.leafFrame.setPageLsn(ctx.leafFrame.getPageLsn() + 1);
-
-            ctx.splitKey.setPages(pageId, rightPageId);
-        } catch (Exception e) {
-            treeLatch.writeLock().unlock();
-            throw e;
-        } finally {
-            rightNode.releaseWriteLatch();
-            bufferCache.unpin(rightNode);
-        }
-        return false;
-    }
-
-    private boolean updateLeaf(ITupleReference tuple, int oldTupleIndex, int pageId, BTreeOpContext ctx)
-            throws Exception {
-        FrameOpSpaceStatus spaceStatus = ctx.leafFrame.hasSpaceUpdate(tuple, oldTupleIndex);
-        ITupleReference beforeTuple = ctx.leafFrame.getMatchingKeyTuple(tuple, oldTupleIndex);
-        boolean restartOp = false;
-        switch (spaceStatus) {
-            case SUFFICIENT_INPLACE_SPACE: {
-                ctx.modificationCallback.found(beforeTuple, tuple);
-                ctx.leafFrame.update(tuple, oldTupleIndex, true);
-                ctx.splitKey.reset();
-                break;
-            }
-            case SUFFICIENT_CONTIGUOUS_SPACE: {
-                ctx.modificationCallback.found(beforeTuple, tuple);
-                ctx.leafFrame.update(tuple, oldTupleIndex, false);
-                ctx.splitKey.reset();
-                break;
-            }
-            case SUFFICIENT_SPACE: {
-                // Delete the old tuple, compact the frame, and insert the new tuple.
-                ctx.modificationCallback.found(beforeTuple, tuple);
-                ctx.leafFrame.delete(tuple, oldTupleIndex);
-                ctx.leafFrame.compact();
-                int targetTupleIndex = ctx.leafFrame.findInsertTupleIndex(tuple);
-                ctx.leafFrame.insert(tuple, targetTupleIndex);
-                ctx.splitKey.reset();
-                break;
-            }
-            case INSUFFICIENT_SPACE: {
-                restartOp = performLeafSplit(pageId, tuple, ctx, oldTupleIndex);
-                break;
-            }
-        }
-        return restartOp;
-    }
-
-    private boolean upsertLeaf(ITupleReference tuple, int targetTupleIndex, int pageId, BTreeOpContext ctx)
-            throws Exception {
-        boolean restartOp = false;
-        ITupleReference beforeTuple = ctx.leafFrame.getMatchingKeyTuple(tuple, targetTupleIndex);
-        if (ctx.acceptor.accept(beforeTuple)) {
-            if (beforeTuple == null) {
-                restartOp = insertLeaf(tuple, targetTupleIndex, pageId, ctx);
-            } else {
-                restartOp = updateLeaf(tuple, targetTupleIndex, pageId, ctx);
-            }
-        } else {
-            targetTupleIndex = ctx.leafFrame.findInsertTupleIndex(tuple);
-            restartOp = insertLeaf(tuple, targetTupleIndex, pageId, ctx);
-        }
-        return restartOp;
-    }
-
-    private void insertInterior(ICachedPage node, int pageId, ITupleReference tuple, BTreeOpContext ctx)
-            throws Exception {
-        ctx.interiorFrame.setPage(node);
-        int targetTupleIndex = ctx.interiorFrame.findInsertTupleIndex(tuple);
-        FrameOpSpaceStatus spaceStatus = ctx.interiorFrame.hasSpaceInsert(tuple);
-        switch (spaceStatus) {
-            case INSUFFICIENT_SPACE: {
-                int rightPageId = freePageManager.getFreePage(ctx.metaFrame);
-                ICachedPage rightNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, rightPageId), true);
-                rightNode.acquireWriteLatch();
-                try {
-                    IBTreeFrame rightFrame = ctx.createInteriorFrame();
-                    rightFrame.setPage(rightNode);
-                    rightFrame.initBuffer((byte) ctx.interiorFrame.getLevel());
-                    rightFrame.setMultiComparator(ctx.cmp);
-                    // instead of creating a new split key, use the existing
-                    // splitKey
-                    ctx.interiorFrame.split(rightFrame, ctx.splitKey.getTuple(), ctx.splitKey);
-                    ctx.smPages.add(pageId);
-                    ctx.smPages.add(rightPageId);
-                    ctx.interiorFrame.setSmFlag(true);
-                    rightFrame.setSmFlag(true);
-                    rightFrame.setPageLsn(rightFrame.getPageLsn() + 1);
-                    ctx.interiorFrame.setPageLsn(ctx.interiorFrame.getPageLsn() + 1);
-
-                    ctx.splitKey.setPages(pageId, rightPageId);
-                } finally {
-                    rightNode.releaseWriteLatch();
-                    bufferCache.unpin(rightNode);
-                }
-                break;
-            }
-
-            case SUFFICIENT_CONTIGUOUS_SPACE: {
-                ctx.interiorFrame.insert(tuple, targetTupleIndex);
-                ctx.splitKey.reset();
-                break;
-            }
-
-            case SUFFICIENT_SPACE: {
-                boolean slotsChanged = ctx.interiorFrame.compact();
-                if (slotsChanged) {
-                    targetTupleIndex = ctx.interiorFrame.findInsertTupleIndex(tuple);
-                }
-                ctx.interiorFrame.insert(tuple, targetTupleIndex);
-                ctx.splitKey.reset();
-                break;
-            }
-        }
-    }
-
-    private boolean deleteLeaf(ICachedPage node, int pageId, ITupleReference tuple, BTreeOpContext ctx)
-            throws Exception {
-        // Simply delete the tuple, and don't do any rebalancing.
-        // This means that there could be underflow, even an empty page that is
-        // pointed to by an interior node.
-        if (ctx.leafFrame.getTupleCount() == 0) {
-            throw new BTreeNonExistentKeyException("Trying to delete a tuple with a nonexistent key in leaf node.");
-        }
-        int tupleIndex = ctx.leafFrame.findDeleteTupleIndex(tuple);
-        ITupleReference beforeTuple = ctx.leafFrame.getMatchingKeyTuple(tuple, tupleIndex);
-        ctx.modificationCallback.found(beforeTuple, tuple);
-        ctx.leafFrame.delete(tuple, tupleIndex);
-        return false;
-    }
-
-    private final boolean acquireLatch(ICachedPage node, BTreeOpContext ctx, boolean isLeaf) {
-        if (!isLeaf || (ctx.op == IndexOperation.SEARCH && !ctx.cursor.exclusiveLatchNodes())) {
-            node.acquireReadLatch();
-            return true;
-        } else {
-            node.acquireWriteLatch();
-            return false;
-        }
-    }
-
-    private ICachedPage isConsistent(int pageId, BTreeOpContext ctx) throws Exception {
-        ICachedPage node = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
-        node.acquireReadLatch();
-        ctx.interiorFrame.setPage(node);
-        boolean isConsistent = ctx.pageLsns.getLast() == ctx.interiorFrame.getPageLsn();
-        if (!isConsistent) {
-            node.releaseReadLatch();
-            bufferCache.unpin(node);
-            return null;
-        }
-        return node;
-    }
-
-    private void performOp(int pageId, ICachedPage parent, boolean parentIsReadLatched, BTreeOpContext ctx)
-            throws HyracksDataException, TreeIndexException {
-        ICachedPage node = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
-        ctx.interiorFrame.setPage(node);
-        // this check performs an unprotected read in the page
-        // the following could happen: TODO fill out
-        boolean unsafeIsLeaf = ctx.interiorFrame.isLeaf();
-        boolean isReadLatched = acquireLatch(node, ctx, unsafeIsLeaf);
-        boolean smFlag = ctx.interiorFrame.getSmFlag();
-        // re-check leafness after latching
-        boolean isLeaf = ctx.interiorFrame.isLeaf();
-
-        // remember trail of pageLsns, to unwind recursion in case of an ongoing
-        // structure modification
-        ctx.pageLsns.add(ctx.interiorFrame.getPageLsn());
-        try {
-            // Latch coupling: unlatch parent.
-            if (parent != null) {
-                if (parentIsReadLatched) {
-                    parent.releaseReadLatch();
-                } else {
-                    parent.releaseWriteLatch();
-                }
-                bufferCache.unpin(parent);
-            }
-            if (!isLeaf || smFlag) {
-                if (!smFlag) {
-                    // We use this loop to deal with possibly multiple operation
-                    // restarts due to ongoing structure modifications during
-                    // the descent.
-                    boolean repeatOp = true;
-                    while (repeatOp && ctx.opRestarts < MAX_RESTARTS) {
-                        int childPageId = ctx.interiorFrame.getChildPageId(ctx.pred);
-                        performOp(childPageId, node, isReadLatched, ctx);
-
-                        if (!ctx.pageLsns.isEmpty()) {
-                            if (ctx.pageLsns.getLast() == FULL_RESTART_OP) {
-                                break;
-                            } else if (ctx.pageLsns.getLast() == RESTART_OP) {
-                                // Pop the restart op indicator.
-                                ctx.pageLsns.removeLast();
-                                node = isConsistent(pageId, ctx);
-                                if (node != null) {
-                                    isReadLatched = true;
-                                    // Descend the tree again.                                
-                                    continue;
-                                } else {
-                                    // Pop pageLsn of this page (version seen by this op during descent).
-                                    ctx.pageLsns.removeLast();
-                                    // This node is not consistent set the restart indicator for upper level.
-                                    ctx.pageLsns.add(RESTART_OP);
-                                    break;
-                                }
-                            }
-                        }
-
-                        switch (ctx.op) {
-                            case INSERT:
-                            case UPSERT:
-                            case UPDATE: {
-                                // Is there a propagated split key?
-                                if (ctx.splitKey.getBuffer() != null) {
-                                    ICachedPage interiorNode = bufferCache.pin(
-                                            BufferedFileHandle.getDiskPageId(fileId, pageId), false);
-                                    interiorNode.acquireWriteLatch();
-                                    try {
-                                        // Insert or update op. Both can cause split keys to propagate upwards. 
-                                        insertInterior(interiorNode, pageId, ctx.splitKey.getTuple(), ctx);
-                                    } finally {
-                                        interiorNode.releaseWriteLatch();
-                                        bufferCache.unpin(interiorNode);
-                                    }
-                                } else {
-                                    unsetSmPages(ctx);
-                                }
-                                break;
-                            }
-
-                            case DELETE: {
-                                if (ctx.splitKey.getBuffer() != null) {
-                                    throw new BTreeException(
-                                            "Split key was propagated during delete. Delete allows empty leaf pages.");
-                                }
-                                break;
-                            }
-
-                            default: {
-                                // Do nothing for Search and DiskOrderScan.
-                                break;
-                            }
-                        }
-                        // Operation completed.
-                        repeatOp = false;
-                    } // end while
-                } else { // smFlag
-                    ctx.opRestarts++;
-                    if (isReadLatched) {
-                        node.releaseReadLatch();
-                    } else {
-                        node.releaseWriteLatch();
-                    }
-                    bufferCache.unpin(node);
-
-                    // TODO: this should be an instant duration lock, how to do
-                    // this in java?
-                    // instead we just immediately release the lock. this is
-                    // inefficient but still correct and will not cause
-                    // latch-deadlock
-                    treeLatch.readLock().lock();
-                    treeLatch.readLock().unlock();
-
-                    // unwind recursion and restart operation, find lowest page
-                    // with a pageLsn as seen by this operation during descent
-                    ctx.pageLsns.removeLast(); // pop current page lsn
-                    // put special value on the stack to inform caller of
-                    // restart
-                    ctx.pageLsns.add(RESTART_OP);
-                }
-            } else { // isLeaf and !smFlag
-                // We may have to restart an op to avoid latch deadlock.
-                boolean restartOp = false;
-                ctx.leafFrame.setPage(node);
-                switch (ctx.op) {
-                    case INSERT: {
-                        int targetTupleIndex = ctx.leafFrame.findInsertTupleIndex(ctx.pred.getLowKey());
-                        restartOp = insertLeaf(ctx.pred.getLowKey(), targetTupleIndex, pageId, ctx);
-                        break;
-                    }
-                    case UPSERT: {
-                        int targetTupleIndex = ctx.leafFrame.findUpsertTupleIndex(ctx.pred.getLowKey());
-                        restartOp = upsertLeaf(ctx.pred.getLowKey(), targetTupleIndex, pageId, ctx);
-                        break;
-                    }
-                    case UPDATE: {
-                        int oldTupleIndex = ctx.leafFrame.findUpdateTupleIndex(ctx.pred.getLowKey());
-                        restartOp = updateLeaf(ctx.pred.getLowKey(), oldTupleIndex, pageId, ctx);
-                        break;
-                    }
-                    case DELETE: {
-                        restartOp = deleteLeaf(node, pageId, ctx.pred.getLowKey(), ctx);
-                        break;
-                    }
-                    case SEARCH: {
-                        ctx.cursorInitialState.setSearchOperationCallback(ctx.searchCallback);
-                        ctx.cursorInitialState.setOriginialKeyComparator(ctx.cmp);
-                        ctx.cursorInitialState.setPage(node);
-                        ctx.cursorInitialState.setPageId(pageId);
-                        ctx.cursor.open(ctx.cursorInitialState, ctx.pred);
-                        break;
-                    }
-                }
-                if (ctx.op != IndexOperation.SEARCH) {
-                    node.releaseWriteLatch();
-                    bufferCache.unpin(node);
-                }
-                if (restartOp) {
-                    // Wait for the SMO to finish before restarting.
-                    treeLatch.readLock().lock();
-                    treeLatch.readLock().unlock();
-                    ctx.pageLsns.removeLast();
-                    ctx.pageLsns.add(FULL_RESTART_OP);
-                }
-            }
-        } catch (TreeIndexException e) {
-            if (!ctx.exceptionHandled) {
-                if (node != null) {
-                    if (isReadLatched) {
-                        node.releaseReadLatch();
-                    } else {
-                        node.releaseWriteLatch();
-                    }
-                    bufferCache.unpin(node);
-                    ctx.exceptionHandled = true;
-                }
-            }
-            throw e;
-        } catch (Exception e) {
-            e.printStackTrace();
-            if (node != null) {
-                if (isReadLatched) {
-                    node.releaseReadLatch();
-                } else {
-                    node.releaseWriteLatch();
-                }
-                bufferCache.unpin(node);
-            }
-            BTreeException wrappedException = new BTreeException(e);
-            ctx.exceptionHandled = true;
-            throw wrappedException;
-        }
-    }
-
-    private BTreeOpContext createOpContext(IIndexAccessor accessor,
-            IModificationOperationCallback modificationCallback, ISearchOperationCallback searchCallback) {
-        return new BTreeOpContext(accessor, leafFrameFactory, interiorFrameFactory, freePageManager
-                .getMetaDataFrameFactory().createFrame(), cmpFactories, modificationCallback, searchCallback);
-    }
-
-    @SuppressWarnings("rawtypes")
-    public String printTree(IBTreeLeafFrame leafFrame, IBTreeInteriorFrame interiorFrame,
-            ISerializerDeserializer[] keySerdes) throws Exception {
-        MultiComparator cmp = MultiComparator.create(cmpFactories);
-        byte treeHeight = getTreeHeight(leafFrame);
-        StringBuilder strBuilder = new StringBuilder();
-        printTree(rootPage, null, false, leafFrame, interiorFrame, treeHeight, keySerdes, strBuilder, cmp);
-        return strBuilder.toString();
-    }
-
-    @SuppressWarnings("rawtypes")
-    public void printTree(int pageId, ICachedPage parent, boolean unpin, IBTreeLeafFrame leafFrame,
-            IBTreeInteriorFrame interiorFrame, byte treeHeight, ISerializerDeserializer[] keySerdes,
-            StringBuilder strBuilder, MultiComparator cmp) throws Exception {
-        ICachedPage node = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
-        node.acquireReadLatch();
-        try {
-            if (parent != null && unpin == true) {
-                parent.releaseReadLatch();
-                bufferCache.unpin(parent);
-            }
-            interiorFrame.setPage(node);
-            int level = interiorFrame.getLevel();
-            strBuilder.append(String.format("%1d ", level));
-            strBuilder.append(String.format("%3d ", pageId) + ": ");
-            for (int i = 0; i < treeHeight - level; i++) {
-                strBuilder.append("    ");
-            }
-
-            String keyString;
-            if (interiorFrame.isLeaf()) {
-                leafFrame.setPage(node);
-                keyString = printLeafFrameTuples(leafFrame, keySerdes);
-            } else {
-                keyString = printInteriorFrameTuples(interiorFrame, keySerdes);
-            }
-
-            strBuilder.append(keyString + "\n");
-            if (!interiorFrame.isLeaf()) {
-                ArrayList<Integer> children = ((BTreeNSMInteriorFrame) (interiorFrame)).getChildren(cmp);
-                for (int i = 0; i < children.size(); i++) {
-                    printTree(children.get(i), node, i == children.size() - 1, leafFrame, interiorFrame, treeHeight,
-                            keySerdes, strBuilder, cmp);
-                }
-            } else {
-                node.releaseReadLatch();
-                bufferCache.unpin(node);
-            }
-        } catch (Exception e) {
-            node.releaseReadLatch();
-            bufferCache.unpin(node);
-            e.printStackTrace();
-        }
-    }
-
-    @Override
-    public ITreeIndexAccessor createAccessor(IModificationOperationCallback modificationCallback,
-            ISearchOperationCallback searchCallback) {
-        return new BTreeAccessor(this, modificationCallback, searchCallback);
-    }
-
-    // TODO: Class should be private. But currently we need to expose the
-    // setOpContext() API to the LSM Tree for it to work correctly.
-    public class BTreeAccessor implements ITreeIndexAccessor {
-        private BTree btree;
-        private BTreeOpContext ctx;
-
-        public BTreeAccessor(BTree btree, IModificationOperationCallback modificationCalback,
-                ISearchOperationCallback searchCallback) {
-            this.btree = btree;
-            this.ctx = btree.createOpContext(this, modificationCalback, searchCallback);
-        }
-
-        @Override
-        public void insert(ITupleReference tuple) throws HyracksDataException, TreeIndexException {
-            ctx.setOperation(IndexOperation.INSERT);
-            btree.insert(tuple, ctx);
-        }
-
-        @Override
-        public void update(ITupleReference tuple) throws HyracksDataException, TreeIndexException {
-            ctx.setOperation(IndexOperation.UPDATE);
-            btree.update(tuple, ctx);
-        }
-
-        @Override
-        public void delete(ITupleReference tuple) throws HyracksDataException, TreeIndexException {
-            ctx.setOperation(IndexOperation.DELETE);
-            btree.delete(tuple, ctx);
-        }
-
-        @Override
-        public void upsert(ITupleReference tuple) throws HyracksDataException, TreeIndexException {
-            upsertIfConditionElseInsert(tuple, UnconditionalTupleAcceptor.INSTANCE);
-        }
-
-        public void upsertIfConditionElseInsert(ITupleReference tuple, ITupleAcceptor acceptor)
-                throws HyracksDataException, TreeIndexException {
-            ctx.setOperation(IndexOperation.UPSERT);
-            ctx.acceptor = acceptor;
-            btree.upsert(tuple, ctx);
-        }
-
-        @Override
-        public ITreeIndexCursor createSearchCursor() {
-            IBTreeLeafFrame leafFrame = (IBTreeLeafFrame) btree.getLeafFrameFactory().createFrame();
-            return new BTreeRangeSearchCursor(leafFrame, false);
-        }
-
-        @Override
-        public void search(IIndexCursor cursor, ISearchPredicate searchPred) throws HyracksDataException,
-                TreeIndexException {
-            ctx.setOperation(IndexOperation.SEARCH);
-            btree.search((ITreeIndexCursor) cursor, searchPred, ctx);
-        }
-
-        @Override
-        public ITreeIndexCursor createDiskOrderScanCursor() {
-            IBTreeLeafFrame leafFrame = (IBTreeLeafFrame) btree.getLeafFrameFactory().createFrame();
-            return new TreeIndexDiskOrderScanCursor(leafFrame);
-        }
-
-        @Override
-        public void diskOrderScan(ITreeIndexCursor cursor) throws HyracksDataException {
-            ctx.setOperation(IndexOperation.DISKORDERSCAN);
-            btree.diskOrderScan(cursor, ctx);
-        }
-
-        // TODO: Ideally, this method should not exist. But we need it for
-        // the changing the leafFrame and leafFrameFactory of the op context for
-        // the LSM-BTree to work correctly.
-        public BTreeOpContext getOpContext() {
-            return ctx;
-        }
-
-        public ITreeIndexCursor createCountingSearchCursor() {
-            IBTreeLeafFrame leafFrame = (IBTreeLeafFrame) btree.getLeafFrameFactory().createFrame();
-            return new BTreeCountingSearchCursor(leafFrame, false);
-        }
-    }
-
-    @Override
-    public IIndexBulkLoader createBulkLoader(float fillFactor, boolean verifyInput, long numElementsHint)
-            throws TreeIndexException {
-        try {
-            return new BTreeBulkLoader(fillFactor, verifyInput);
-        } catch (HyracksDataException e) {
-            throw new TreeIndexException(e);
-        }
-    }
-
-    public class BTreeBulkLoader extends AbstractTreeIndex.AbstractTreeIndexBulkLoader {
-        protected final ISplitKey splitKey;
-        protected final boolean verifyInput;
-
-        public BTreeBulkLoader(float fillFactor, boolean verifyInput) throws TreeIndexException, HyracksDataException {
-            super(fillFactor);
-            this.verifyInput = verifyInput;
-            splitKey = new BTreeSplitKey(leafFrame.getTupleWriter().createTupleReference());
-            splitKey.getTuple().setFieldCount(cmp.getKeyFieldCount());
-        }
-
-        @Override
-        public void add(ITupleReference tuple) throws IndexException, HyracksDataException {
-            try {
-                NodeFrontier leafFrontier = nodeFrontiers.get(0);
-
-                int spaceNeeded = tupleWriter.bytesRequired(tuple) + slotSize;
-                int spaceUsed = leafFrame.getBuffer().capacity() - leafFrame.getTotalFreeSpace();
-
-                // try to free space by compression
-                if (spaceUsed + spaceNeeded > leafMaxBytes) {
-                    leafFrame.compress();
-                    spaceUsed = leafFrame.getBuffer().capacity() - leafFrame.getTotalFreeSpace();
-                }
-
-                if (spaceUsed + spaceNeeded > leafMaxBytes) {
-                    leafFrontier.lastTuple.resetByTupleIndex(leafFrame, leafFrame.getTupleCount() - 1);
-                    if (verifyInput) {
-                        verifyInputTuple(tuple, leafFrontier.lastTuple);
-                    }
-                    int splitKeySize = tupleWriter.bytesRequired(leafFrontier.lastTuple, 0, cmp.getKeyFieldCount());
-                    splitKey.initData(splitKeySize);
-                    tupleWriter.writeTupleFields(leafFrontier.lastTuple, 0, cmp.getKeyFieldCount(), splitKey
-                            .getBuffer().array(), 0);
-                    splitKey.getTuple().resetByTupleOffset(splitKey.getBuffer(), 0);
-                    splitKey.setLeftPage(leafFrontier.pageId);
-                    leafFrontier.pageId = freePageManager.getFreePage(metaFrame);
-
-                    ((IBTreeLeafFrame) leafFrame).setNextLeaf(leafFrontier.pageId);
-                    leafFrontier.page.releaseWriteLatch();
-                    bufferCache.unpin(leafFrontier.page);
-
-                    splitKey.setRightPage(leafFrontier.pageId);
-                    propagateBulk(1);
-
-                    leafFrontier.page = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, leafFrontier.pageId),
-                            true);
-                    leafFrontier.page.acquireWriteLatch();
-                    leafFrame.setPage(leafFrontier.page);
-                    leafFrame.initBuffer((byte) 0);
-                } else {
-                    if (verifyInput && leafFrame.getTupleCount() > 0) {
-                        leafFrontier.lastTuple.resetByTupleIndex(leafFrame, leafFrame.getTupleCount() - 1);
-                        verifyInputTuple(tuple, leafFrontier.lastTuple);
-                    }
-                }
-
-                leafFrame.setPage(leafFrontier.page);
-                ((IBTreeLeafFrame) leafFrame).insertSorted(tuple);
-            } catch (IndexException e) {
-                handleException();
-                throw e;
-            } catch (HyracksDataException e) {
-                handleException();
-                throw e;
-            } catch (RuntimeException e) {
-                handleException();
-                throw e;
-            }
-        }
-
-        protected void verifyInputTuple(ITupleReference tuple, ITupleReference prevTuple) throws IndexException,
-                HyracksDataException {
-            // New tuple should be strictly greater than last tuple.
-            if (cmp.compare(tuple, prevTuple) <= 0) {
-                throw new UnsortedInputException("Input stream given to BTree bulk load is not sorted.");
-            }
-        }
-
-        protected void propagateBulk(int level) throws HyracksDataException {
-            if (splitKey.getBuffer() == null)
-                return;
-
-            if (level >= nodeFrontiers.size())
-                addLevel();
-
-            NodeFrontier frontier = nodeFrontiers.get(level);
-            interiorFrame.setPage(frontier.page);
-
-            ITupleReference tuple = splitKey.getTuple();
-            int spaceNeeded = tupleWriter.bytesRequired(tuple, 0, cmp.getKeyFieldCount()) + slotSize + 4;
-            int spaceUsed = interiorFrame.getBuffer().capacity() - interiorFrame.getTotalFreeSpace();
-            if (spaceUsed + spaceNeeded > interiorMaxBytes) {
-
-                ISplitKey copyKey = splitKey.duplicate(leafFrame.getTupleWriter().createTupleReference());
-                tuple = copyKey.getTuple();
-
-                frontier.lastTuple.resetByTupleIndex(interiorFrame, interiorFrame.getTupleCount() - 1);
-                int splitKeySize = tupleWriter.bytesRequired(frontier.lastTuple, 0, cmp.getKeyFieldCount());
-                splitKey.initData(splitKeySize);
-                tupleWriter.writeTupleFields(frontier.lastTuple, 0, cmp.getKeyFieldCount(), splitKey.getBuffer()
-                        .array(), 0);
-                splitKey.getTuple().resetByTupleOffset(splitKey.getBuffer(), 0);
-                splitKey.setLeftPage(frontier.pageId);
-
-                ((IBTreeInteriorFrame) interiorFrame).deleteGreatest();
-
-                frontier.page.releaseWriteLatch();
-                bufferCache.unpin(frontier.page);
-                frontier.pageId = freePageManager.getFreePage(metaFrame);
-
-                splitKey.setRightPage(frontier.pageId);
-                propagateBulk(level + 1);
-
-                frontier.page = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, frontier.pageId), true);
-                frontier.page.acquireWriteLatch();
-                interiorFrame.setPage(frontier.page);
-                interiorFrame.initBuffer((byte) level);
-            }
-            ((IBTreeInteriorFrame) interiorFrame).insertSorted(tuple);
-        }
-
-    }
-
-    @SuppressWarnings("rawtypes")
-    public static String printLeafFrameTuples(IBTreeLeafFrame leafFrame, ISerializerDeserializer[] fieldSerdes)
-            throws HyracksDataException {
-        StringBuilder strBuilder = new StringBuilder();
-        ITreeIndexTupleReference tuple = leafFrame.createTupleReference();
-        for (int i = 0; i < leafFrame.getTupleCount(); i++) {
-            tuple.resetByTupleIndex(leafFrame, i);
-            String tupleString = TupleUtils.printTuple(tuple, fieldSerdes);
-            strBuilder.append(tupleString + " | ");
-        }
-        // Print right link.
-        int rightPageId = leafFrame.getNextLeaf();
-        strBuilder.append("(" + rightPageId + ")");
-        return strBuilder.toString();
-    }
-
-    @SuppressWarnings("rawtypes")
-    public static String printInteriorFrameTuples(IBTreeInteriorFrame interiorFrame,
-            ISerializerDeserializer[] fieldSerdes) throws HyracksDataException {
-        StringBuilder strBuilder = new StringBuilder();
-        ITreeIndexTupleReference tuple = interiorFrame.createTupleReference();
-        for (int i = 0; i < interiorFrame.getTupleCount(); i++) {
-            tuple.resetByTupleIndex(interiorFrame, i);
-            // Print child pointer.
-            int numFields = tuple.getFieldCount();
-            int childPageId = IntegerSerializerDeserializer.getInt(tuple.getFieldData(numFields - 1),
-                    tuple.getFieldStart(numFields - 1) + tuple.getFieldLength(numFields - 1));
-            strBuilder.append("(" + childPageId + ") ");
-            String tupleString = TupleUtils.printTuple(tuple, fieldSerdes);
-            strBuilder.append(tupleString + " | ");
-        }
-        // Print rightmost pointer.
-        int rightMostChildPageId = interiorFrame.getRightmostChildPageId();
-        strBuilder.append("(" + rightMostChildPageId + ")");
-        return strBuilder.toString();
-    }
-}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeCountingSearchCursor.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeCountingSearchCursor.java
deleted file mode 100644
index 0ed1dbe..0000000
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeCountingSearchCursor.java
+++ /dev/null
@@ -1,249 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.btree.impls;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
-import edu.uci.ics.hyracks.storage.am.common.api.ICursorInitialState;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchPredicate;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.FindTupleMode;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.FindTupleNoExactMatchPolicy;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
-import edu.uci.ics.hyracks.storage.common.file.BufferedFileHandle;
-
-public class BTreeCountingSearchCursor implements ITreeIndexCursor {
-
-    private int fileId = -1;
-    private ICachedPage page = null;
-    private IBufferCache bufferCache = null;
-
-    private int tupleIndex = 0;
-    private int stopTupleIndex;
-    private int count = -1;
-
-    private FindTupleMode lowKeyFtm;
-    private FindTupleMode highKeyFtm;
-
-    private FindTupleNoExactMatchPolicy lowKeyFtp;
-    private FindTupleNoExactMatchPolicy highKeyFtp;
-
-    private final IBTreeLeafFrame frame;
-    private final ITreeIndexTupleReference frameTuple;
-    private final boolean exclusiveLatchNodes;
-
-    private RangePredicate pred;
-    private MultiComparator lowKeyCmp;
-    private MultiComparator highKeyCmp;
-    private ITupleReference lowKey;
-    private ITupleReference highKey;
-
-    // For storing the count.
-    private byte[] countBuf = new byte[4];
-    private ArrayTupleBuilder tupleBuilder = new ArrayTupleBuilder(1);
-    private ArrayTupleReference countTuple = new ArrayTupleReference();    
-    
-    public BTreeCountingSearchCursor(IBTreeLeafFrame frame, boolean exclusiveLatchNodes) {
-        this.frame = frame;
-        this.frameTuple = frame.createTupleReference();
-        this.exclusiveLatchNodes = exclusiveLatchNodes;
-    }
-
-    @Override
-    public void open(ICursorInitialState initialState, ISearchPredicate searchPred) throws HyracksDataException {
-        // in case open is called multiple times without closing
-        if (page != null) {
-            if (exclusiveLatchNodes) {
-                page.releaseWriteLatch();
-            } else {
-                page.releaseReadLatch();
-            }
-            bufferCache.unpin(page);
-        }
-
-        page = ((BTreeCursorInitialState) initialState).getPage();
-        frame.setPage(page);
-
-        pred = (RangePredicate) searchPred;
-        lowKeyCmp = pred.getLowKeyComparator();
-        highKeyCmp = pred.getHighKeyComparator();
-
-        lowKey = pred.getLowKey();
-        highKey = pred.getHighKey();
-
-        // init
-        lowKeyFtm = FindTupleMode.EXCLUSIVE;
-        if (pred.lowKeyInclusive) {
-            lowKeyFtp = FindTupleNoExactMatchPolicy.LOWER_KEY;
-        } else {
-            lowKeyFtp = FindTupleNoExactMatchPolicy.HIGHER_KEY;
-        }
-
-        highKeyFtm = FindTupleMode.EXCLUSIVE;
-        if (pred.highKeyInclusive) {
-            highKeyFtp = FindTupleNoExactMatchPolicy.HIGHER_KEY;
-        } else {
-            highKeyFtp = FindTupleNoExactMatchPolicy.LOWER_KEY;
-        }
-
-        tupleIndex = getLowKeyIndex();
-        stopTupleIndex = getHighKeyIndex();        
-    }
-
-    private void fetchNextLeafPage(int nextLeafPage) throws HyracksDataException {
-        do {
-            ICachedPage nextLeaf = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, nextLeafPage), false);
-            if (exclusiveLatchNodes) {
-                nextLeaf.acquireWriteLatch();
-                page.releaseWriteLatch();
-            } else {
-                nextLeaf.acquireReadLatch();
-                page.releaseReadLatch();
-            }
-            bufferCache.unpin(page);
-            page = nextLeaf;
-            frame.setPage(page);
-            nextLeafPage = frame.getNextLeaf();
-        } while (frame.getTupleCount() == 0 && nextLeafPage > 0);
-    }
-
-    private int getLowKeyIndex() throws HyracksDataException {
-        if (lowKey == null) {
-            return 0;
-        }
-        int index = frame.findTupleIndex(lowKey, frameTuple, lowKeyCmp, lowKeyFtm, lowKeyFtp);
-        if (pred.lowKeyInclusive) {
-            index++;
-        } else {
-            if (index < 0) {
-                index = frame.getTupleCount();
-            }
-        }
-        return index;
-    }
-
-    private int getHighKeyIndex() throws HyracksDataException {
-        if (highKey == null) {
-            return frame.getTupleCount() - 1;
-        }
-        int index = frame.findTupleIndex(highKey, frameTuple, highKeyCmp, highKeyFtm, highKeyFtp);
-        if (pred.highKeyInclusive) {
-            if (index < 0) {
-                index = frame.getTupleCount() - 1;
-            } else {
-                index--;
-            }
-        }
-        return index;
-    }
-
-    @Override
-    public boolean hasNext() throws HyracksDataException {
-        // get the count for the current page
-        // follow the sibling pointer until last page
-        // if no more tuples on a page, then done
-
-        if (count < 0) {
-            count = 0;
-
-            while (stopTupleIndex >= 0 || frame.getTupleCount() == 0) {
-                count += (stopTupleIndex - tupleIndex + 1);
-
-                int nextLeafPage = frame.getNextLeaf();
-                if (nextLeafPage >= 0) {
-                    fetchNextLeafPage(nextLeafPage);
-                } else {
-                    // No more pages. Done counting!
-                    break;
-                }
-
-                tupleIndex = 0;
-                stopTupleIndex = getHighKeyIndex();
-            }
-
-            return true;
-        }
-
-        return false;
-    }
-
-    @Override
-    public void next() throws HyracksDataException {
-        // Do nothing. Count is performed just once!
-        IntegerSerializerDeserializer.putInt(count, countBuf, 0);
-        tupleBuilder.addField(countBuf, 0, 4);
-        countTuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());
-    }
-
-    @Override
-    public void close() throws HyracksDataException {
-        if (page != null) {
-            if (exclusiveLatchNodes) {
-                page.releaseWriteLatch();
-            } else {
-                page.releaseReadLatch();
-            }
-            bufferCache.unpin(page);
-        }
-        tupleBuilder.reset();
-        tupleIndex = 0;
-        page = null;
-        pred = null;
-        count = -1;
-    }
-
-    @Override
-    public void reset() {
-        try {
-            close();
-        } catch (Exception e) {
-            e.printStackTrace();
-        }        
-    }
-
-    @Override
-    public ITupleReference getTuple() {
-        return countTuple;
-    }
-
-    @Override
-    public ICachedPage getPage() {
-        return page;
-    }
-
-    @Override
-    public void setBufferCache(IBufferCache bufferCache) {
-        this.bufferCache = bufferCache;
-    }
-
-    @Override
-    public void setFileId(int fileId) {
-        this.fileId = fileId;
-    }
-
-    @Override
-    public boolean exclusiveLatchNodes() {
-        return exclusiveLatchNodes;
-    }
-
-}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeCursorInitialState.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeCursorInitialState.java
deleted file mode 100644
index 9d7b612..0000000
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeCursorInitialState.java
+++ /dev/null
@@ -1,63 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.btree.impls;
-
-import edu.uci.ics.hyracks.storage.am.common.api.ICursorInitialState;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexAccessor;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchOperationCallback;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
-
-public class BTreeCursorInitialState implements ICursorInitialState {
-
-    // This is only used by the LSM-RTree
-    private int pageId;
-    private ICachedPage page;
-    private ISearchOperationCallback searchCallback;
-    private MultiComparator originalKeyCmp;
-    private final IIndexAccessor accessor;
-
-    public BTreeCursorInitialState(ICachedPage page, ISearchOperationCallback searchCallback, IIndexAccessor accessor) {
-        this.page = page;
-        this.searchCallback = searchCallback;
-        this.accessor = accessor;
-    }
-    
-    public IIndexAccessor getAccessor() {
-        return accessor;
-    }
-
-    public ICachedPage getPage() {
-        return page;
-    }
-
-    public void setPage(ICachedPage page) {
-        this.page = page;
-    }
-
-    public int getPageId() {
-        return pageId;
-    }
-
-    public void setPageId(int pageId) {
-        this.pageId = pageId;
-    }
-
-    @Override
-    public ISearchOperationCallback getSearchOperationCallback() {
-        return searchCallback;
-    }
-
-    @Override
-    public void setSearchOperationCallback(ISearchOperationCallback searchCallback) {
-        this.searchCallback = searchCallback;
-    }
-
-    @Override
-    public MultiComparator getOriginalKeyComparator() {
-        return originalKeyCmp;
-    }
-
-    @Override
-    public void setOriginialKeyComparator(MultiComparator originalCmp) {
-        this.originalKeyCmp = originalCmp;
-    }
-}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeOpContext.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeOpContext.java
deleted file mode 100644
index 47b47e2..0000000
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeOpContext.java
+++ /dev/null
@@ -1,225 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.btree.impls;
-
-import java.util.ArrayDeque;
-import java.util.Deque;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.dataflow.common.util.TupleUtils;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrame;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
-import edu.uci.ics.hyracks.storage.am.btree.api.ITupleAcceptor;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexAccessor;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexOperationContext;
-import edu.uci.ics.hyracks.storage.am.common.api.IModificationOperationCallback;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchOperationCallback;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrame;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOperation;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.IntArrayList;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.LongArrayList;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-
-public class BTreeOpContext implements IIndexOperationContext {
-    private final int INIT_ARRAYLIST_SIZE = 6;
-
-    public IIndexAccessor accessor;
-    public MultiComparator cmp;
-    public ITreeIndexFrameFactory leafFrameFactory;
-    public ITreeIndexFrameFactory interiorFrameFactory;
-    public IBTreeLeafFrame leafFrame;
-    public IBTreeInteriorFrame interiorFrame;
-    public ITreeIndexMetaDataFrame metaFrame;
-    public IndexOperation op;
-    public ITreeIndexCursor cursor;
-    public BTreeCursorInitialState cursorInitialState;
-    public RangePredicate pred;
-    public BTreeSplitKey splitKey;
-    public LongArrayList pageLsns;
-    public IntArrayList smPages;
-    public IntArrayList freePages;
-    public int opRestarts = 0;
-    public boolean exceptionHandled;
-    public IModificationOperationCallback modificationCallback;
-    public ISearchOperationCallback searchCallback;
-    public ITupleAcceptor acceptor;
-    public int smoCount;
-
-    // Debug
-    public final Deque<PageValidationInfo> validationInfos;
-    public final ITreeIndexTupleReference interiorFrameTuple;
-    public final ITreeIndexTupleReference leafFrameTuple;
-
-    public BTreeOpContext(IIndexAccessor accessor, ITreeIndexFrameFactory leafFrameFactory,
-            ITreeIndexFrameFactory interiorFrameFactory, ITreeIndexMetaDataFrame metaFrame,
-            IBinaryComparatorFactory[] cmpFactories, IModificationOperationCallback modificationCallback,
-            ISearchOperationCallback searchCallback) {
-        this.accessor = accessor;
-        
-        if (cmpFactories[0] != null) {
-            this.cmp = MultiComparator.createIgnoreFieldLength(cmpFactories);
-        } else {
-            this.cmp = null;
-        }
-        
-        this.leafFrameFactory = leafFrameFactory;
-        this.leafFrame = (IBTreeLeafFrame) leafFrameFactory.createFrame();
-        if (leafFrame != null && this.cmp != null) {
-            leafFrame.setMultiComparator(cmp);
-        }
-        this.interiorFrameFactory = interiorFrameFactory;
-        this.interiorFrame = (IBTreeInteriorFrame) interiorFrameFactory.createFrame();
-        if (interiorFrame != null && this.cmp != null) {
-            interiorFrame.setMultiComparator(cmp);
-        }
-        this.metaFrame = metaFrame;
-        this.pageLsns = new LongArrayList(INIT_ARRAYLIST_SIZE, INIT_ARRAYLIST_SIZE);
-        this.smoCount = 0;
-        this.modificationCallback = modificationCallback;
-        this.searchCallback = searchCallback;
-
-        // Debug
-        this.validationInfos = new ArrayDeque<PageValidationInfo>(INIT_ARRAYLIST_SIZE);
-        this.interiorFrameTuple = interiorFrame.createTupleReference();
-        this.leafFrameTuple = leafFrame.createTupleReference();
-    }
-
-    public void reset() {
-        if (pageLsns != null)
-            pageLsns.clear();
-        if (freePages != null)
-            freePages.clear();
-        if (smPages != null)
-            smPages.clear();
-        opRestarts = 0;
-        smoCount = 0;
-        exceptionHandled = false;
-    }
-
-    @Override
-    public void setOperation(IndexOperation newOp) {
-        if (newOp == IndexOperation.SEARCH || newOp == IndexOperation.DISKORDERSCAN) {
-            if (cursorInitialState == null) {
-                cursorInitialState = new BTreeCursorInitialState(null, searchCallback, accessor);
-            }
-        } else {
-            // Insert, delete, update or upsert operation.
-            if (smPages == null) {
-                smPages = new IntArrayList(INIT_ARRAYLIST_SIZE, INIT_ARRAYLIST_SIZE);
-            }
-            if (freePages == null) {
-                freePages = new IntArrayList(INIT_ARRAYLIST_SIZE, INIT_ARRAYLIST_SIZE);
-            }
-            if (pred == null) {
-                pred = new RangePredicate(null, null, true, true, null, null);
-            }
-            if (splitKey == null) {
-                splitKey = new BTreeSplitKey(leafFrame.getTupleWriter().createTupleReference());
-            }
-        }
-        op = newOp;
-        smoCount = 0;
-        exceptionHandled = false;
-    }
-
-    public IBTreeLeafFrame createLeafFrame() {
-        return (IBTreeLeafFrame) leafFrameFactory.createFrame();
-    }
-
-    public IBTreeInteriorFrame createInteriorFrame() {
-        return (IBTreeInteriorFrame) interiorFrameFactory.createFrame();
-    }
-
-    public PageValidationInfo createPageValidationInfo(PageValidationInfo parent) throws HyracksDataException {
-        return new PageValidationInfo(parent);
-    }
-
-    public class PageValidationInfo {
-        public final int numKeyFields;
-
-        public final ArrayTupleBuilder lowRangeBuilder;
-        public final ArrayTupleBuilder highRangeBuilder;
-        public final ArrayTupleReference lowRangeTuple;
-        public final ArrayTupleReference highRangeTuple;
-
-        public boolean isLowRangeNull;
-        public boolean isHighRangeNull;
-
-        public PageValidationInfo() {
-            this.numKeyFields = cmp.getKeyFieldCount();
-            this.lowRangeBuilder = new ArrayTupleBuilder(numKeyFields);
-            this.highRangeBuilder = new ArrayTupleBuilder(numKeyFields);
-            this.lowRangeTuple = new ArrayTupleReference();
-            this.highRangeTuple = new ArrayTupleReference();
-            this.isLowRangeNull = true;
-            this.isHighRangeNull = true;
-        }
-
-        public PageValidationInfo(PageValidationInfo copy) throws HyracksDataException {
-            this();
-            if (copy != null) {
-                propagateLowRangeKey(copy);
-                propagateHighRangeKey(copy);
-            }
-        }
-
-        public void propagateLowRangeKey(PageValidationInfo toPropagate) throws HyracksDataException {
-            isLowRangeNull = toPropagate.isLowRangeNull;
-            if (!isLowRangeNull) {
-                adjustRangeKey(lowRangeBuilder, lowRangeTuple, toPropagate.lowRangeTuple);
-            }
-        }
-
-        public void propagateHighRangeKey(PageValidationInfo toPropagate) throws HyracksDataException {
-            isHighRangeNull = toPropagate.isHighRangeNull;
-            if (!isHighRangeNull) {
-                adjustRangeKey(highRangeBuilder, highRangeTuple, toPropagate.highRangeTuple);
-            }
-        }
-
-        public void adjustLowRangeKey(ITupleReference newLowRangeKey) throws HyracksDataException {
-            isLowRangeNull = newLowRangeKey == null ? true : false;
-            if (!isLowRangeNull) {
-                adjustRangeKey(lowRangeBuilder, lowRangeTuple, newLowRangeKey);
-            }
-        }
-
-        public void adjustHighRangeKey(ITupleReference newHighRangeKey) throws HyracksDataException {
-            isHighRangeNull = newHighRangeKey == null ? true : false;
-            if (!isHighRangeNull) {
-                adjustRangeKey(highRangeBuilder, highRangeTuple, newHighRangeKey);
-            }
-        }
-
-        private void adjustRangeKey(ArrayTupleBuilder builder, ArrayTupleReference tuple, ITupleReference newRangeKey)
-                throws HyracksDataException {
-            TupleUtils.copyTuple(builder, newRangeKey, numKeyFields);
-            tuple.reset(builder.getFieldEndOffsets(), builder.getByteArray());
-        }
-    }
-
-    @Override
-    public IndexOperation getOperation() {
-        return op;
-    }
-}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeRangeSearchCursor.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeRangeSearchCursor.java
deleted file mode 100644
index 607e00a..0000000
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeRangeSearchCursor.java
+++ /dev/null
@@ -1,303 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.btree.impls;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.dataflow.common.util.TupleUtils;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
-import edu.uci.ics.hyracks.storage.am.common.api.ICursorInitialState;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexAccessor;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchOperationCallback;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchPredicate;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.FindTupleMode;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.FindTupleNoExactMatchPolicy;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
-import edu.uci.ics.hyracks.storage.common.file.BufferedFileHandle;
-
-public class BTreeRangeSearchCursor implements ITreeIndexCursor {
-
-    private final IBTreeLeafFrame frame;
-    private final ITreeIndexTupleReference frameTuple;
-    private final boolean exclusiveLatchNodes;
-
-    private IBufferCache bufferCache = null;
-    private int fileId = -1;
-
-    private ICachedPage page = null;
-    private int pageId = -1; // This is used by the LSMRTree flush operation
-
-    private int tupleIndex = 0;
-    private int stopTupleIndex;
-
-    private final RangePredicate reusablePredicate;
-    private final ArrayTupleReference reconciliationTuple;
-    private IIndexAccessor accessor;
-    private ISearchOperationCallback searchCb;
-    private MultiComparator originalKeyCmp;
-    private ArrayTupleBuilder tupleBuilder;
-
-    private FindTupleMode lowKeyFtm;
-    private FindTupleMode highKeyFtm;
-    private FindTupleNoExactMatchPolicy lowKeyFtp;
-    private FindTupleNoExactMatchPolicy highKeyFtp;
-
-    private RangePredicate pred;
-    private MultiComparator lowKeyCmp;
-    private MultiComparator highKeyCmp;
-    protected ITupleReference lowKey;
-    private ITupleReference highKey;
-
-    public BTreeRangeSearchCursor(IBTreeLeafFrame frame, boolean exclusiveLatchNodes) {
-        this.frame = frame;
-        this.frameTuple = frame.createTupleReference();
-        this.exclusiveLatchNodes = exclusiveLatchNodes;
-        this.reusablePredicate = new RangePredicate();
-        this.reconciliationTuple = new ArrayTupleReference();
-    }
-
-    @Override
-    public void close() throws HyracksDataException {
-        if (page != null) {
-            if (exclusiveLatchNodes) {
-                page.releaseWriteLatch();
-            } else {
-                page.releaseReadLatch();
-            }
-            bufferCache.unpin(page);
-        }
-
-        tupleIndex = 0;
-        page = null;
-        pred = null;
-    }
-
-    public ITupleReference getTuple() {
-        return frameTuple;
-    }
-
-    @Override
-    public ICachedPage getPage() {
-        return page;
-    }
-
-    public int getTupleOffset() {
-        return frame.getTupleOffset(tupleIndex - 1);
-    }
-
-    public int getPageId() {
-        return pageId;
-    }
-
-    private void fetchNextLeafPage(int nextLeafPage) throws HyracksDataException {
-        do {
-            ICachedPage nextLeaf = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, nextLeafPage), false);
-            if (exclusiveLatchNodes) {
-                nextLeaf.acquireWriteLatch();
-                page.releaseWriteLatch();
-            } else {
-                nextLeaf.acquireReadLatch();
-                page.releaseReadLatch();
-            }
-            bufferCache.unpin(page);
-
-            page = nextLeaf;
-            frame.setPage(page);
-            pageId = nextLeafPage;
-            nextLeafPage = frame.getNextLeaf();
-        } while (frame.getTupleCount() == 0 && nextLeafPage > 0);
-    }
-
-    @Override
-    public boolean hasNext() throws HyracksDataException {
-        int nextLeafPage;
-        if (tupleIndex >= frame.getTupleCount()) {
-            nextLeafPage = frame.getNextLeaf();
-            if (nextLeafPage >= 0) {
-                fetchNextLeafPage(nextLeafPage);
-                tupleIndex = 0;
-                stopTupleIndex = getHighKeyIndex();
-                if (stopTupleIndex < 0) {
-                    return false;
-                }
-            } else {
-                return false;
-            }
-        }
-
-        if (tupleIndex > stopTupleIndex) {
-            return false;
-        }
-
-        frameTuple.resetByTupleIndex(frame, tupleIndex);
-        while (true) {
-            if (searchCb.proceed(frameTuple)) {
-                return true;
-            } else {
-                // copy the tuple before we unlatch/unpin
-                if (tupleBuilder == null) {
-                    tupleBuilder = new ArrayTupleBuilder(originalKeyCmp.getKeyFieldCount());
-                }
-                TupleUtils.copyTuple(tupleBuilder, frameTuple, originalKeyCmp.getKeyFieldCount());
-                reconciliationTuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());
-
-                // unlatch/unpin
-                if (exclusiveLatchNodes) {
-                    page.releaseWriteLatch();
-                } else {
-                    page.releaseReadLatch();
-                }
-                bufferCache.unpin(page);
-                page = null;
-
-                // reconcile
-                searchCb.reconcile(reconciliationTuple);
-
-                // retraverse the index looking for the reconciled key
-                reusablePredicate.setLowKey(reconciliationTuple, true);
-                try {
-                    accessor.search(this, reusablePredicate);
-                } catch (IndexException e) {
-                    throw new HyracksDataException(e);
-                }
-
-                if (stopTupleIndex < 0 || tupleIndex > stopTupleIndex) {
-                    return false;
-                }
-
-                // see if we found the tuple we reconciled on
-                frameTuple.resetByTupleIndex(frame, tupleIndex);
-                if (originalKeyCmp.compare(reconciliationTuple, frameTuple) == 0) {
-                    return true;
-                } else {
-                    searchCb.cancel(reconciliationTuple);
-                }
-            }
-        }
-    }
-
-    @Override
-    public void next() throws HyracksDataException {
-        tupleIndex++;
-    }
-
-    private int getLowKeyIndex() throws HyracksDataException {
-        if (lowKey == null) {
-            return 0;
-        }
-
-        int index = frame.findTupleIndex(lowKey, frameTuple, lowKeyCmp, lowKeyFtm, lowKeyFtp);
-        if (pred.lowKeyInclusive) {
-            index++;
-        } else {
-            if (index < 0) {
-                index = frame.getTupleCount();
-            }
-        }
-
-        return index;
-    }
-
-    private int getHighKeyIndex() throws HyracksDataException {
-        if (highKey == null) {
-            return frame.getTupleCount() - 1;
-        }
-
-        int index = frame.findTupleIndex(highKey, frameTuple, highKeyCmp, highKeyFtm, highKeyFtp);
-        if (pred.highKeyInclusive) {
-            if (index < 0) {
-                index = frame.getTupleCount() - 1;
-            } else {
-                index--;
-            }
-        }
-
-        return index;
-    }
-
-    @Override
-    public void open(ICursorInitialState initialState, ISearchPredicate searchPred) throws HyracksDataException {
-        // in case open is called multiple times without closing
-        if (page != null) {
-            if (exclusiveLatchNodes) {
-                page.releaseWriteLatch();
-            } else {
-                page.releaseReadLatch();
-            }
-            bufferCache.unpin(page);
-        }
-        accessor = ((BTreeCursorInitialState) initialState).getAccessor();
-        searchCb = initialState.getSearchOperationCallback();
-        originalKeyCmp = initialState.getOriginalKeyComparator();
-        pageId = ((BTreeCursorInitialState) initialState).getPageId();
-        page = initialState.getPage();
-        frame.setPage(page);
-
-        pred = (RangePredicate) searchPred;
-        lowKeyCmp = pred.getLowKeyComparator();
-        highKeyCmp = pred.getHighKeyComparator();
-        lowKey = pred.getLowKey();
-        highKey = pred.getHighKey();
-
-        reusablePredicate.setLowKeyComparator(originalKeyCmp);
-        reusablePredicate.setHighKeyComparator(pred.getHighKeyComparator());
-        reusablePredicate.setHighKey(pred.getHighKey(), pred.isHighKeyInclusive());
-
-        lowKeyFtm = FindTupleMode.EXCLUSIVE;
-        if (pred.lowKeyInclusive) {
-            lowKeyFtp = FindTupleNoExactMatchPolicy.LOWER_KEY;
-        } else {
-            lowKeyFtp = FindTupleNoExactMatchPolicy.HIGHER_KEY;
-        }
-
-        highKeyFtm = FindTupleMode.EXCLUSIVE;
-        if (pred.highKeyInclusive) {
-            highKeyFtp = FindTupleNoExactMatchPolicy.HIGHER_KEY;
-        } else {
-            highKeyFtp = FindTupleNoExactMatchPolicy.LOWER_KEY;
-        }
-
-        tupleIndex = getLowKeyIndex();
-        stopTupleIndex = getHighKeyIndex();
-    }
-
-    @Override
-    public void reset() throws HyracksDataException {
-        close();
-    }
-
-    @Override
-    public void setBufferCache(IBufferCache bufferCache) {
-        this.bufferCache = bufferCache;
-    }
-
-    @Override
-    public void setFileId(int fileId) {
-        this.fileId = fileId;
-    }
-
-    @Override
-    public boolean exclusiveLatchNodes() {
-        return exclusiveLatchNodes;
-    }
-}
\ No newline at end of file
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeSplitKey.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeSplitKey.java
deleted file mode 100644
index 2606c08..0000000
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeSplitKey.java
+++ /dev/null
@@ -1,92 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.btree.impls;
-
-import java.nio.ByteBuffer;
-
-import edu.uci.ics.hyracks.storage.am.common.api.ISplitKey;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
-
-public class BTreeSplitKey implements ISplitKey {
-    public final ITreeIndexTupleReference tuple;
-
-    public byte[] data = null;
-    public ByteBuffer buf = null;
-    public int keySize = 0;
-
-    public BTreeSplitKey(ITreeIndexTupleReference tuple) {
-        this.tuple = tuple;
-    }
-
-    public void initData(int keySize) {
-        // try to reuse existing memory from a lower-level split if possible
-        this.keySize = keySize;
-        if (data != null) {
-            if (data.length < keySize + 8) {
-                data = new byte[keySize + 8]; // add 8 for left and right page
-                buf = ByteBuffer.wrap(data);
-            }
-        } else {
-            data = new byte[keySize + 8]; // add 8 for left and right page
-            buf = ByteBuffer.wrap(data);
-        }
-
-        tuple.resetByTupleOffset(buf, 0);
-    }
-
-    public void reset() {
-        data = null;
-        buf = null;
-    }
-
-    public ByteBuffer getBuffer() {
-        return buf;
-    }
-
-    public ITreeIndexTupleReference getTuple() {
-        return tuple;
-    }
-
-    public int getLeftPage() {
-        return buf.getInt(keySize);
-    }
-
-    public int getRightPage() {
-        return buf.getInt(keySize + 4);
-    }
-
-    public void setLeftPage(int leftPage) {
-        buf.putInt(keySize, leftPage);
-    }
-
-    public void setRightPage(int rightPage) {
-        buf.putInt(keySize + 4, rightPage);
-    }
-
-    public void setPages(int leftPage, int rightPage) {
-        buf.putInt(keySize, leftPage);
-        buf.putInt(keySize + 4, rightPage);
-    }
-
-    public BTreeSplitKey duplicate(ITreeIndexTupleReference copyTuple) {
-        BTreeSplitKey copy = new BTreeSplitKey(copyTuple);
-        copy.data = data.clone();
-        copy.buf = ByteBuffer.wrap(copy.data);
-        copy.tuple.setFieldCount(tuple.getFieldCount());
-        copy.tuple.resetByTupleOffset(copy.buf, 0);
-        return copy;
-    }
-}
\ No newline at end of file
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/FieldPrefixPrefixTupleReference.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/FieldPrefixPrefixTupleReference.java
deleted file mode 100644
index 44fcdef..0000000
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/FieldPrefixPrefixTupleReference.java
+++ /dev/null
@@ -1,42 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.btree.impls;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.storage.am.btree.api.IPrefixSlotManager;
-import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeFieldPrefixNSMLeafFrame;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
-import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleReference;
-
-public class FieldPrefixPrefixTupleReference extends TypeAwareTupleReference {
-
-    public FieldPrefixPrefixTupleReference(ITypeTraits[] typeTraits) {
-        super(typeTraits);
-    }
-
-    // assumes tuple index refers to prefix tuples
-    @Override
-    public void resetByTupleIndex(ITreeIndexFrame frame, int tupleIndex) {
-        BTreeFieldPrefixNSMLeafFrame concreteFrame = (BTreeFieldPrefixNSMLeafFrame) frame;
-        IPrefixSlotManager slotManager = concreteFrame.getSlotManager();
-        int prefixSlotOff = slotManager.getPrefixSlotOff(tupleIndex);
-        int prefixSlot = concreteFrame.getBuffer().getInt(prefixSlotOff);
-        setFieldCount(slotManager.decodeFirstSlotField(prefixSlot));
-        tupleStartOff = slotManager.decodeSecondSlotField(prefixSlot);
-        buf = concreteFrame.getBuffer();
-        resetByTupleOffset(buf, tupleStartOff);
-    }
-}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/FieldPrefixSlotManager.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/FieldPrefixSlotManager.java
deleted file mode 100644
index 4c66fbb..0000000
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/FieldPrefixSlotManager.java
+++ /dev/null
@@ -1,295 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.btree.impls;
-
-import java.nio.ByteBuffer;
-
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.btree.api.IPrefixSlotManager;
-import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeFieldPrefixNSMLeafFrame;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.FindTupleMode;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.FindTupleNoExactMatchPolicy;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-
-public class FieldPrefixSlotManager implements IPrefixSlotManager {
-
-    public static final int TUPLE_UNCOMPRESSED = 0xFF;
-    public static final int MAX_PREFIX_SLOTS = 0xFE;
-    public static final int GREATEST_KEY_INDICATOR = 0x00FFFFFF;
-    public static final int ERROR_INDICATOR = 0x00FFFFFE;
-
-    private static final int slotSize = 4;
-
-    private ByteBuffer buf;
-    private BTreeFieldPrefixNSMLeafFrame frame;
-    private MultiComparator cmp;
-
-    public int decodeFirstSlotField(int slot) {
-        return (slot & 0xFF000000) >>> 24;
-    }
-
-    public int decodeSecondSlotField(int slot) {
-        return slot & 0x00FFFFFF;
-    }
-
-    public int encodeSlotFields(int firstField, int secondField) {
-        return ((firstField & 0x000000FF) << 24) | (secondField & 0x00FFFFFF);
-    }
-
-    // returns prefix slot number, or TUPLE_UNCOMPRESSED of no match was found
-    public int findPrefix(ITupleReference tuple, ITreeIndexTupleReference framePrefixTuple) {
-        int prefixMid;
-        int prefixBegin = 0;
-        int prefixEnd = frame.getPrefixTupleCount() - 1;
-
-        if (frame.getPrefixTupleCount() > 0) {
-            while (prefixBegin <= prefixEnd) {
-                prefixMid = (prefixBegin + prefixEnd) / 2;
-                framePrefixTuple.resetByTupleIndex(frame, prefixMid);
-                int cmpVal = cmp.fieldRangeCompare(tuple, framePrefixTuple, 0, framePrefixTuple.getFieldCount());
-                if (cmpVal < 0)
-                    prefixEnd = prefixMid - 1;
-                else if (cmpVal > 0)
-                    prefixBegin = prefixMid + 1;
-                else
-                    return prefixMid;
-            }
-        }
-
-        return FieldPrefixSlotManager.TUPLE_UNCOMPRESSED;
-    }
-
-    @Override
-    public int findSlot(ITupleReference searchKey, ITreeIndexTupleReference frameTuple,
-            ITreeIndexTupleReference framePrefixTuple, MultiComparator multiCmp, FindTupleMode mode,
-            FindTupleNoExactMatchPolicy matchPolicy) {
-        if (frame.getTupleCount() <= 0)
-            encodeSlotFields(TUPLE_UNCOMPRESSED, GREATEST_KEY_INDICATOR);
-
-        int prefixMid;
-        int prefixBegin = 0;
-        int prefixEnd = frame.getPrefixTupleCount() - 1;
-        int prefixMatch = TUPLE_UNCOMPRESSED;
-
-        // bounds are inclusive on both ends
-        int tuplePrefixSlotNumLbound = prefixBegin;
-        int tuplePrefixSlotNumUbound = prefixEnd;
-
-        // binary search on the prefix slots to determine upper and lower bounds
-        // for the prefixSlotNums in tuple slots
-        while (prefixBegin <= prefixEnd) {
-            prefixMid = (prefixBegin + prefixEnd) / 2;
-            framePrefixTuple.resetByTupleIndex(frame, prefixMid);
-            int cmp = multiCmp.fieldRangeCompare(searchKey, framePrefixTuple, 0, framePrefixTuple.getFieldCount());
-            if (cmp < 0) {
-                prefixEnd = prefixMid - 1;
-                tuplePrefixSlotNumLbound = prefixMid - 1;
-            } else if (cmp > 0) {
-                prefixBegin = prefixMid + 1;
-                tuplePrefixSlotNumUbound = prefixMid + 1;
-            } else {
-                if (mode == FindTupleMode.EXCLUSIVE) {
-                    if (matchPolicy == FindTupleNoExactMatchPolicy.HIGHER_KEY)
-                        prefixBegin = prefixMid + 1;
-                    else
-                        prefixEnd = prefixMid - 1;
-                } else {
-                    tuplePrefixSlotNumLbound = prefixMid;
-                    tuplePrefixSlotNumUbound = prefixMid;
-                    prefixMatch = prefixMid;
-                }
-
-                break;
-            }
-        }
-
-        int tupleMid = -1;
-        int tupleBegin = 0;
-        int tupleEnd = frame.getTupleCount() - 1;
-
-        // binary search on tuples, guided by the lower and upper bounds on prefixSlotNum
-        while (tupleBegin <= tupleEnd) {
-            tupleMid = (tupleBegin + tupleEnd) / 2;
-            int tupleSlotOff = getTupleSlotOff(tupleMid);
-            int tupleSlot = buf.getInt(tupleSlotOff);
-            int prefixSlotNum = decodeFirstSlotField(tupleSlot);
-
-            int cmp = 0;
-            if (prefixSlotNum == TUPLE_UNCOMPRESSED) {
-                frameTuple.resetByTupleIndex(frame, tupleMid);
-                cmp = multiCmp.compare(searchKey, frameTuple);
-            } else {
-                if (prefixSlotNum < tuplePrefixSlotNumLbound)
-                    cmp = 1;
-                else if (prefixSlotNum > tuplePrefixSlotNumUbound)
-                    cmp = -1;
-                else {
-                    frameTuple.resetByTupleIndex(frame, tupleMid);
-                    cmp = multiCmp.compare(searchKey, frameTuple);
-                }
-            }
-
-            if (cmp < 0)
-                tupleEnd = tupleMid - 1;
-            else if (cmp > 0)
-                tupleBegin = tupleMid + 1;
-            else {
-                if (mode == FindTupleMode.EXCLUSIVE) {
-                    if (matchPolicy == FindTupleNoExactMatchPolicy.HIGHER_KEY)
-                        tupleBegin = tupleMid + 1;
-                    else
-                        tupleEnd = tupleMid - 1;
-                } else {
-                    if (mode == FindTupleMode.EXCLUSIVE_ERROR_IF_EXISTS) {
-                        return encodeSlotFields(prefixMatch, ERROR_INDICATOR);
-                    } else {
-                        return encodeSlotFields(prefixMatch, tupleMid);
-                    }
-                }
-            }
-        }
-
-        if (mode == FindTupleMode.EXACT)
-            return encodeSlotFields(prefixMatch, ERROR_INDICATOR);
-
-        // do final comparison to determine whether the search key is greater
-        // than all keys or in between some existing keys
-        if (matchPolicy == FindTupleNoExactMatchPolicy.HIGHER_KEY) {
-            if (tupleBegin > frame.getTupleCount() - 1)
-                return encodeSlotFields(prefixMatch, GREATEST_KEY_INDICATOR);
-            frameTuple.resetByTupleIndex(frame, tupleBegin);
-            if (multiCmp.compare(searchKey, frameTuple) < 0)
-                return encodeSlotFields(prefixMatch, tupleBegin);
-            else
-                return encodeSlotFields(prefixMatch, GREATEST_KEY_INDICATOR);
-        } else {
-            if (tupleEnd < 0)
-                return encodeSlotFields(prefixMatch, GREATEST_KEY_INDICATOR);
-            frameTuple.resetByTupleIndex(frame, tupleEnd);
-            if (multiCmp.compare(searchKey, frameTuple) > 0)
-                return encodeSlotFields(prefixMatch, tupleEnd);
-            else
-                return encodeSlotFields(prefixMatch, GREATEST_KEY_INDICATOR);
-        }
-    }
-
-    public int getPrefixSlotStartOff() {
-        return buf.capacity() - slotSize;
-    }
-
-    public int getPrefixSlotEndOff() {
-        return buf.capacity() - slotSize * frame.getPrefixTupleCount();
-    }
-
-    public int getTupleSlotStartOff() {
-        return getPrefixSlotEndOff() - slotSize;
-    }
-
-    public int getTupleSlotEndOff() {
-        return buf.capacity() - slotSize * (frame.getPrefixTupleCount() + frame.getTupleCount());
-    }
-
-    public int getSlotSize() {
-        return slotSize;
-    }
-
-    public void setSlot(int offset, int value) {
-        frame.getBuffer().putInt(offset, value);
-    }
-
-    public int insertSlot(int slot, int tupleOff) {
-        int slotNum = decodeSecondSlotField(slot);
-        if (slotNum == ERROR_INDICATOR) {
-            System.out.println("WOW BIG PROBLEM!");
-        }
-        if (slotNum == GREATEST_KEY_INDICATOR) {
-            int slotOff = getTupleSlotEndOff() - slotSize;
-            int newSlot = encodeSlotFields(decodeFirstSlotField(slot), tupleOff);
-            setSlot(slotOff, newSlot);
-            return newSlot;
-        } else {
-            int slotEndOff = getTupleSlotEndOff();
-            int slotOff = getTupleSlotOff(slotNum);
-            int length = (slotOff - slotEndOff) + slotSize;
-            System.arraycopy(frame.getBuffer().array(), slotEndOff, frame.getBuffer().array(), slotEndOff - slotSize,
-                    length);
-
-            int newSlot = encodeSlotFields(decodeFirstSlotField(slot), tupleOff);
-            setSlot(slotOff, newSlot);
-            return newSlot;
-        }
-    }
-
-    public int getPrefixSlotOff(int tupleIndex) {
-        return getPrefixSlotStartOff() - tupleIndex * slotSize;
-    }
-
-    public int getTupleSlotOff(int tupleIndex) {
-        return getTupleSlotStartOff() - tupleIndex * slotSize;
-    }
-
-    public void setPrefixSlot(int tupleIndex, int slot) {
-        buf.putInt(getPrefixSlotOff(tupleIndex), slot);
-    }
-
-    @Override
-    public int getGreatestKeyIndicator() {
-        return GREATEST_KEY_INDICATOR;
-    }
-
-    @Override
-    public int getErrorIndicator() {
-        return ERROR_INDICATOR;
-    }
-
-    @Override
-    public void setFrame(ITreeIndexFrame frame) {
-        this.frame = (BTreeFieldPrefixNSMLeafFrame) frame;
-        this.buf = frame.getBuffer();
-    }
-
-    @Override
-    public int findTupleIndex(ITupleReference searchKey, ITreeIndexTupleReference frameTuple, MultiComparator multiCmp,
-            FindTupleMode mode, FindTupleNoExactMatchPolicy matchPolicy) {
-        throw new UnsupportedOperationException("Not implemented.");
-    }
-
-    @Override
-    public int getSlotStartOff() {
-        throw new UnsupportedOperationException("Not implemented.");
-    }
-
-    @Override
-    public int getSlotEndOff() {
-        throw new UnsupportedOperationException("Not implemented.");
-    }
-
-    @Override
-    public int getTupleOff(int slotOff) {
-        throw new UnsupportedOperationException("Not implemented.");
-    }
-
-    @Override
-    public int getSlotOff(int tupleIndex) {
-        throw new UnsupportedOperationException("Not implemented.");
-    }
-
-    public void setMultiComparator(MultiComparator cmp) {
-        this.cmp = cmp;
-    }
-}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/FieldPrefixTupleReference.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/FieldPrefixTupleReference.java
deleted file mode 100644
index b7174d4..0000000
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/FieldPrefixTupleReference.java
+++ /dev/null
@@ -1,119 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.btree.impls;
-
-import java.nio.ByteBuffer;
-
-import edu.uci.ics.hyracks.storage.am.btree.api.IPrefixSlotManager;
-import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeFieldPrefixNSMLeafFrame;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
-
-public class FieldPrefixTupleReference implements ITreeIndexTupleReference {
-
-    private final ITreeIndexTupleReference helperTuple;
-
-    private BTreeFieldPrefixNSMLeafFrame frame;
-    private int prefixTupleStartOff;
-    private int suffixTupleStartOff;
-    private int numPrefixFields;
-    private int fieldCount;
-
-    public FieldPrefixTupleReference(ITreeIndexTupleReference helperTuple) {
-        this.helperTuple = helperTuple;
-        this.fieldCount = helperTuple.getFieldCount();
-    }
-
-    @Override
-    public void resetByTupleIndex(ITreeIndexFrame frame, int tupleIndex) {
-        this.frame = (BTreeFieldPrefixNSMLeafFrame) frame;
-        IPrefixSlotManager slotManager = this.frame.getSlotManager();
-        int tupleSlotOff = slotManager.getTupleSlotOff(tupleIndex);
-        int tupleSlot = this.frame.getBuffer().getInt(tupleSlotOff);
-        int prefixSlotNum = slotManager.decodeFirstSlotField(tupleSlot);
-        suffixTupleStartOff = slotManager.decodeSecondSlotField(tupleSlot);
-
-        if (prefixSlotNum != FieldPrefixSlotManager.TUPLE_UNCOMPRESSED) {
-            int prefixSlotOff = slotManager.getPrefixSlotOff(prefixSlotNum);
-            int prefixSlot = this.frame.getBuffer().getInt(prefixSlotOff);
-            numPrefixFields = slotManager.decodeFirstSlotField(prefixSlot);
-            prefixTupleStartOff = slotManager.decodeSecondSlotField(prefixSlot);
-        } else {
-            numPrefixFields = 0;
-            prefixTupleStartOff = -1;
-        }
-    }
-
-    @Override
-    public void setFieldCount(int fieldCount) {
-        this.fieldCount = fieldCount;
-    }
-
-    @Override
-    public void setFieldCount(int fieldStartIndex, int fieldCount) {
-        throw new UnsupportedOperationException("Not supported.");
-    }
-
-    @Override
-    public int getFieldCount() {
-        return fieldCount;
-    }
-
-    @Override
-    public byte[] getFieldData(int fIdx) {
-        return frame.getBuffer().array();
-    }
-
-    @Override
-    public int getFieldLength(int fIdx) {
-        if (fIdx < numPrefixFields) {
-            helperTuple.setFieldCount(numPrefixFields);
-            helperTuple.resetByTupleOffset(frame.getBuffer(), prefixTupleStartOff);
-            return helperTuple.getFieldLength(fIdx);
-        } else {
-            helperTuple.setFieldCount(numPrefixFields, fieldCount - numPrefixFields);
-            helperTuple.resetByTupleOffset(frame.getBuffer(), suffixTupleStartOff);
-            return helperTuple.getFieldLength(fIdx - numPrefixFields);
-        }
-    }
-
-    @Override
-    public int getFieldStart(int fIdx) {
-        if (fIdx < numPrefixFields) {
-            helperTuple.setFieldCount(numPrefixFields);
-            helperTuple.resetByTupleOffset(frame.getBuffer(), prefixTupleStartOff);
-            return helperTuple.getFieldStart(fIdx);
-        } else {
-            helperTuple.setFieldCount(numPrefixFields, fieldCount - numPrefixFields);
-            helperTuple.resetByTupleOffset(frame.getBuffer(), suffixTupleStartOff);
-            return helperTuple.getFieldStart(fIdx - numPrefixFields);
-        }
-    }
-
-    // unsupported operation
-    @Override
-    public void resetByTupleOffset(ByteBuffer buf, int tupleStartOffset) {
-        throw new UnsupportedOperationException("Resetting this type of frame by offset is not supported.");
-    }
-
-    @Override
-    public int getTupleSize() {
-        return getSuffixTupleSize() + getPrefixTupleSize();
-    }
-
-    public int getSuffixTupleSize() {
-        helperTuple.setFieldCount(numPrefixFields, fieldCount - numPrefixFields);
-        helperTuple.resetByTupleOffset(frame.getBuffer(), suffixTupleStartOff);
-        return helperTuple.getTupleSize();
-    }
-
-    public int getPrefixTupleSize() {
-        if (numPrefixFields == 0)
-            return 0;
-        helperTuple.setFieldCount(numPrefixFields);
-        helperTuple.resetByTupleOffset(frame.getBuffer(), prefixTupleStartOff);
-        return helperTuple.getTupleSize();
-    }
-
-    public int getNumPrefixFields() {
-        return numPrefixFields;
-    }
-}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/UnconditionalTupleAcceptor.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/UnconditionalTupleAcceptor.java
deleted file mode 100644
index 9ae2523..0000000
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/UnconditionalTupleAcceptor.java
+++ /dev/null
@@ -1,14 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.btree.impls;
-
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.btree.api.ITupleAcceptor;
-
-public enum UnconditionalTupleAcceptor implements ITupleAcceptor {
-    INSTANCE;
-
-    @Override
-    public boolean accept(ITupleReference tuple) {
-        return true;
-    }
-
-}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/util/BTreeUtils.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/util/BTreeUtils.java
deleted file mode 100644
index de73459..0000000
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/util/BTreeUtils.java
+++ /dev/null
@@ -1,77 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.btree.util;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.btree.exceptions.BTreeException;
-import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeFieldPrefixNSMLeafFrameFactory;
-import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
-import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMInteriorFrameFactory;
-import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMLeafFrameFactory;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
-import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManager;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrameFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriterFactory;
-import edu.uci.ics.hyracks.storage.am.common.frames.LIFOMetaDataFrameFactory;
-import edu.uci.ics.hyracks.storage.am.common.freepage.LinkedListFreePageManager;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriterFactory;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
-
-public class BTreeUtils {
-    public static BTree createBTree(IBufferCache bufferCache, IFileMapProvider fileMapProvider,
-            ITypeTraits[] typeTraits, IBinaryComparatorFactory[] cmpFactories, BTreeLeafFrameType leafType,
-            FileReference file) throws BTreeException {
-        TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
-        ITreeIndexFrameFactory leafFrameFactory = getLeafFrameFactory(tupleWriterFactory, leafType);
-        ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(tupleWriterFactory);
-        ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
-        IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, 0, metaFrameFactory);
-        BTree btree = new BTree(bufferCache, fileMapProvider, freePageManager, interiorFrameFactory, leafFrameFactory,
-                cmpFactories, typeTraits.length, file);
-        return btree;
-    }
-    
-    public static BTree createBTree(IBufferCache bufferCache, IFreePageManager freePageManager,
-            IFileMapProvider fileMapProvider, ITypeTraits[] typeTraits, IBinaryComparatorFactory[] cmpFactories,
-            BTreeLeafFrameType leafType, FileReference file) throws BTreeException {
-        TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
-        ITreeIndexFrameFactory leafFrameFactory = getLeafFrameFactory(tupleWriterFactory, leafType);
-        ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(tupleWriterFactory);
-        BTree btree = new BTree(bufferCache, fileMapProvider, freePageManager, interiorFrameFactory, leafFrameFactory,
-                cmpFactories, typeTraits.length, file);
-        return btree;
-    }
-
-    // Creates a new MultiComparator by constructing new IBinaryComparators.
-    public static MultiComparator getSearchMultiComparator(IBinaryComparatorFactory[] cmpFactories,
-            ITupleReference searchKey) {
-        if (searchKey == null || cmpFactories.length == searchKey.getFieldCount()) {
-            return MultiComparator.createIgnoreFieldLength(cmpFactories);
-        }
-        IBinaryComparator[] newCmps = new IBinaryComparator[searchKey.getFieldCount()];
-        for (int i = 0; i < searchKey.getFieldCount(); i++) {
-            newCmps[i] = cmpFactories[i].createBinaryComparator();
-        }
-        return new MultiComparator(newCmps);
-    }
-
-    public static ITreeIndexFrameFactory getLeafFrameFactory(ITreeIndexTupleWriterFactory tupleWriterFactory,
-            BTreeLeafFrameType leafType) throws BTreeException {
-        switch (leafType) {
-            case REGULAR_NSM: {
-                return new BTreeNSMLeafFrameFactory(tupleWriterFactory);
-            }
-            case FIELD_PREFIX_COMPRESSED_NSM: {
-                return new BTreeFieldPrefixNSMLeafFrameFactory(tupleWriterFactory);
-            }
-            default: {
-                throw new BTreeException("Unknown BTreeLeafFrameType: " + leafType.toString());
-            }
-        }
-    }
-}
diff --git a/hyracks-storage-am-common/pom.xml b/hyracks-storage-am-common/pom.xml
deleted file mode 100644
index 251b4d7..0000000
--- a/hyracks-storage-am-common/pom.xml
+++ /dev/null
@@ -1,56 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <groupId>edu.uci.ics.hyracks</groupId>
-  <artifactId>hyracks-storage-am-common</artifactId>
-  <version>0.2.2-SNAPSHOT</version>
-
-  <parent>
-    <groupId>edu.uci.ics.hyracks</groupId>
-    <artifactId>hyracks</artifactId>
-    <version>0.2.2-SNAPSHOT</version>
-  </parent>
-
-  <build>
-    <plugins>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-compiler-plugin</artifactId>
-        <version>2.0.2</version>
-        <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
-        </configuration>
-      </plugin>
-    </plugins>
-  </build>
-  <dependencies>
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks</groupId>
-  		<artifactId>hyracks-api</artifactId>
-  		<version>0.2.2-SNAPSHOT</version>
-  		<type>jar</type>
-  		<scope>compile</scope>
-  	</dependency>
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks</groupId>
-  		<artifactId>hyracks-storage-common</artifactId>
-  		<version>0.2.2-SNAPSHOT</version>
-  		<type>jar</type>
-  		<scope>compile</scope>
-  	</dependency>
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks</groupId>
-  		<artifactId>hyracks-dataflow-common</artifactId>
-  		<version>0.2.2-SNAPSHOT</version>
-  		<type>jar</type>
-  		<scope>compile</scope>
-  	</dependency>
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks</groupId>
-  		<artifactId>hyracks-dataflow-std</artifactId>
-  		<version>0.2.2-SNAPSHOT</version>
-  		<type>jar</type>
-  		<scope>compile</scope>
-  	</dependency>
-  </dependencies>
-</project>
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ICursorInitialState.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ICursorInitialState.java
deleted file mode 100644
index 9fbaac2..0000000
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ICursorInitialState.java
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.common.api;
-
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
-
-public interface ICursorInitialState {
-    public ICachedPage getPage();
-
-    public void setPage(ICachedPage page);
-
-    public ISearchOperationCallback getSearchOperationCallback();
-
-    public void setSearchOperationCallback(ISearchOperationCallback searchCallback);
-
-    public MultiComparator getOriginalKeyComparator();
-
-    public void setOriginialKeyComparator(MultiComparator originalCmp);
-}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IFreePageManagerFactory.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IFreePageManagerFactory.java
deleted file mode 100644
index d26e65f..0000000
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IFreePageManagerFactory.java
+++ /dev/null
@@ -1,5 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.common.api;
-
-public interface IFreePageManagerFactory {
-    public IFreePageManager createFreePageManager();
-}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IInMemoryFreePageManager.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IInMemoryFreePageManager.java
deleted file mode 100644
index 67935a7..0000000
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IInMemoryFreePageManager.java
+++ /dev/null
@@ -1,9 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.common.api;
-
-public interface IInMemoryFreePageManager extends IFreePageManager {
-    public int getCapacity();
-
-    public void reset();
-
-    public boolean isFull();
-}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IIndex.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IIndex.java
deleted file mode 100644
index 1557c75..0000000
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IIndex.java
+++ /dev/null
@@ -1,124 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.common.api;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-
-/**
- * This interface describes the operations common to all indexes. Indexes
- * implementing this interface can easily reuse existing index operators for
- * dataflow. Users must perform operations on an via an {@link IIndexAccessor}.
- * During dataflow, the lifecycle of IIndexes are handled through an {@link IIndexLifecycleManager}.
- */
-public interface IIndex {
-
-    /**
-     * Initializes the persistent state of an index.
-     * An index cannot be created if it is in the activated state.
-     * Calling create on an index that is deactivated has the effect of clearing the index.
-     * 
-     * @throws HyracksDataException
-     *             if there is an error in the BufferCache while (un)pinning pages, (un)latching pages,
-     *             creating files, or deleting files
-     *             if the index is in the activated state
-     */
-    public void create() throws HyracksDataException;
-
-    /**
-     * Initializes the index's operational state. An index in the activated state may perform
-     * operations via an {@link IIndexAccessor}.
-     * 
-     * @throws HyracksDataException
-     *             if there is a problem in the BufferCache while (un)pinning pages, (un)latching pages,
-     *             creating files, or deleting files
-     */
-    public void activate() throws HyracksDataException;
-
-    /**
-     * Resets the operational state of the index. Calling clear has the same logical effect
-     * as calling deactivate(), destroy(), create(), then activate(), but not necessarily the
-     * same physical effect.
-     * 
-     * @throws HyracksDataException
-     *             if there is a problem in the BufferCache while (un)pinning pages, (un)latching pages,
-     *             creating files, or deleting files
-     *             if the index is not in the activated state
-     */
-    public void clear() throws HyracksDataException;
-
-    /**
-     * Deinitializes the index's operational state. An index in the deactivated state may not
-     * perform operations.
-     * 
-     * @throws HyracksDataException
-     *             if there is a problem in the BufferCache while (un)pinning pages, (un)latching pages,
-     *             creating files, or deleting files
-     */
-    public void deactivate() throws HyracksDataException;
-
-    /**
-     * Removes the persistent state of an index.
-     * An index cannot be destroyed if it is in the activated state.
-     * 
-     * @throws HyracksDataException
-     *             if there is an error in the BufferCache while (un)pinning pages, (un)latching pages,
-     *             creating files, or deleting files
-     *             if the index is already activated
-     */
-    public void destroy() throws HyracksDataException;
-
-    /**
-     * Creates an {@link IIndexAccessor} for performing operations on this index.
-     * An IIndexAccessor is not thread safe, but different IIndexAccessors can concurrently operate
-     * on the same {@link IIndex}.
-     * 
-     * @returns IIndexAccessor an accessor for this {@link IIndex}
-     * @param modificationCallback
-     *            the callback to be used for modification operations
-     * @param searchCallback
-     *            the callback to be used for search operations
-     */
-    public IIndexAccessor createAccessor(IModificationOperationCallback modificationCallback,
-            ISearchOperationCallback searchCallback);
-
-    /**
-     * Ensures that all pages (and tuples) of the index are logically consistent.
-     * An assertion error is thrown if validation fails.
-     * 
-     * @throws HyracksDataException
-     *             if there is an error performing validation
-     */
-    public void validate() throws HyracksDataException;
-
-    /**
-     * @return the {@link IBufferCache} underlying this index.
-     */
-    public IBufferCache getBufferCache();
-
-    /**
-     * @return the size, in bytes, of pre-allocated memory space that this index was allotted.
-     */
-    public long getMemoryAllocationSize();
-
-    /**
-     * @param fillFactor
-     * @param verifyInput
-     * @throws IndexException
-     */
-    public IIndexBulkLoader createBulkLoader(float fillFactor, boolean verifyInput, long numElementsHint)
-            throws IndexException;
-}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IIndexBulkLoader.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IIndexBulkLoader.java
deleted file mode 100644
index 86ed78f..0000000
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IIndexBulkLoader.java
+++ /dev/null
@@ -1,28 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.common.api;

-

-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;

-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;

-

-public interface IIndexBulkLoader {

-    /**

-     * Append a tuple to the index in the context of a bulk load.

-     * 

-     * @param tuple

-     *            Tuple to be inserted.

-     * @throws IndexException

-     *             If the input stream is invalid for bulk loading (e.g., is not sorted).

-     * @throws HyracksDataException

-     *             If the BufferCache throws while un/pinning or un/latching.

-     */

-    public void add(ITupleReference tuple) throws IndexException, HyracksDataException;

-

-    /**

-     * Finalize the bulk loading operation in the given context.

-     * 

-     * @throws IndexException

-     * @throws HyracksDataException

-     *             If the BufferCache throws while un/pinning or un/latching.

-     */

-    public void end() throws IndexException, HyracksDataException;

-

-}

diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IIndexCursor.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IIndexCursor.java
deleted file mode 100644
index 1c75da2..0000000
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IIndexCursor.java
+++ /dev/null
@@ -1,34 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.common.api;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-
-public interface IIndexCursor {
-    public void open(ICursorInitialState initialState, ISearchPredicate searchPred) throws IndexException,
-            HyracksDataException;
-
-    public boolean hasNext() throws HyracksDataException, IndexException;
-
-    public void next() throws HyracksDataException;
-
-    public void close() throws HyracksDataException;
-
-    public void reset() throws HyracksDataException, IndexException;
-
-    public ITupleReference getTuple();
-}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IIndexDataflowHelper.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IIndexDataflowHelper.java
deleted file mode 100644
index 39b4553..0000000
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IIndexDataflowHelper.java
+++ /dev/null
@@ -1,23 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.common.api;
-
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.io.FileReference;
-
-public interface IIndexDataflowHelper {
-    public void create() throws HyracksDataException;
-
-    public void close() throws HyracksDataException;
-
-    public void open() throws HyracksDataException;
-
-    public void destroy() throws HyracksDataException;
-
-    public IIndex getIndexInstance();
-
-    public FileReference getFileReference();
-
-    public long getResourceID() throws HyracksDataException;
-    
-    public IHyracksTaskContext getTaskContext();
-}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IIndexLifecycleManager.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IIndexLifecycleManager.java
deleted file mode 100644
index c4f43b9..0000000
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IIndexLifecycleManager.java
+++ /dev/null
@@ -1,19 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.common.api;
-
-import java.util.List;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-
-public interface IIndexLifecycleManager {
-    public IIndex getIndex(long resourceID);
-
-    public void register(long resourceID, IIndex index) throws HyracksDataException;
-
-    public void unregister(long resourceID) throws HyracksDataException;
-
-    public void open(long resourceID) throws HyracksDataException;
-
-    public void close(long resourceID);
-
-    public List<IIndex> getOpenIndexes();
-}
\ No newline at end of file
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IIndexLifecycleManagerProvider.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IIndexLifecycleManagerProvider.java
deleted file mode 100644
index 7ae4a48..0000000
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IIndexLifecycleManagerProvider.java
+++ /dev/null
@@ -1,9 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.common.api;
-
-import java.io.Serializable;
-
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-
-public interface IIndexLifecycleManagerProvider extends Serializable {
-    public IIndexLifecycleManager getLifecycleManager(IHyracksTaskContext ctx);
-}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IIndexOperationContext.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IIndexOperationContext.java
deleted file mode 100644
index c5bf83e..0000000
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IIndexOperationContext.java
+++ /dev/null
@@ -1,11 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.common.api;
-
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOperation;
-
-public interface IIndexOperationContext {
-    void setOperation(IndexOperation newOp);
-    
-    IndexOperation getOperation();
-
-    void reset();
-}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IModificationOperationCallback.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IModificationOperationCallback.java
deleted file mode 100644
index 0b68ab0..0000000
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IModificationOperationCallback.java
+++ /dev/null
@@ -1,36 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.common.api;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-
-/**
- * This operation callback allows for arbitrary actions to be taken while traversing 
- * an index structure. The {@link IModificationOperationCallback} will be called on 
- * all modifying operations (e.g. insert, update, delete...) for all indexes.
- * 
- * @author zheilbron
- */
-public interface IModificationOperationCallback {
-
-    /**
-     * This method is called on a tuple that is about to traverse an index's structure 
-     * (i.e. before any pages are pinned or latched). 
-     *
-     * The format of the tuple is the format that would be stored in the index itself.
-     * 
-     * @param tuple the tuple that is about to be operated on
-     */
-    public void before(ITupleReference tuple) throws HyracksDataException;
-
-    /**
-     * This method is called on a tuple when a tuple with a matching key is found for the 
-     * current operation. It is possible that tuple is null, in which case no tuple with a 
-     * matching key was found.
-     * 
-     * When found is called, the leaf page where the tuple resides will be pinned and latched, 
-     * so blocking operations should be avoided.
-     * 
-     * @param tuple a tuple with a matching key, otherwise null if none exists
-     */
-    public void found(ITupleReference before, ITupleReference after) throws HyracksDataException;
-}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IModificationOperationCallbackFactory.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IModificationOperationCallbackFactory.java
deleted file mode 100644
index 65c9c8a..0000000
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IModificationOperationCallbackFactory.java
+++ /dev/null
@@ -1,11 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.common.api;
-
-import java.io.Serializable;
-
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-
-public interface IModificationOperationCallbackFactory extends Serializable {
-    public IModificationOperationCallback createModificationOperationCallback(long resourceId, Object resource, IHyracksTaskContext ctx)
-            throws HyracksDataException;
-}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ISearchOperationCallback.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ISearchOperationCallback.java
deleted file mode 100644
index b62bbcb..0000000
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ISearchOperationCallback.java
+++ /dev/null
@@ -1,45 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.common.api;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-
-/**
- * This operation callback allows for arbitrary actions to be taken while traversing 
- * an index structure. The {@link ISearchOperationCallback} will be called on 
- * all search operations for ordered indexes only.
- * 
- * @author zheilbron
- */
-public interface ISearchOperationCallback {
-
-    /**
-     * During an index search operation, this method will be called on tuples as they are 
-     * passed by with a search cursor. This call will be invoked while a leaf page is latched 
-     * and pinned. If the call returns false, then the page will be unlatched and unpinned and 
-     * {@link #reconcile(ITupleReference)} will be called with the tuple that was not proceeded 
-     * on.
-     * 
-     * @param tuple the tuple that is being passed over by the search cursor
-     * @return true to proceed otherwise false to unlatch and unpin, leading to reconciliation
-     */
-    public boolean proceed(ITupleReference tuple) throws HyracksDataException;
-
-    /**
-     * This method is only called on a tuple that was not 'proceeded' on 
-     * (see {@link #proceed(ITupleReference)}). This method allows an opportunity to reconcile 
-     * by performing any necessary actions before resuming the search (e.g. a try-lock may have 
-     * failed in the proceed call, and now in reconcile we should take a full (blocking) lock).
-     * 
-     * @param tuple the tuple that failed to proceed
-     */
-    public void reconcile(ITupleReference tuple) throws HyracksDataException;
-
-    /**
-     * This method is only called on a tuple that was reconciled on, but not found after 
-     * retraversing. This method allows an opportunity to cancel some action that was taken in 
-     * {@link #reconcile(ITupleReference))}.
-     * 
-     * @param tuple the tuple that was previously reconciled
-     */
-    public void cancel(ITupleReference tuple) throws HyracksDataException;
-}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ISearchOperationCallbackFactory.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ISearchOperationCallbackFactory.java
deleted file mode 100644
index 6389b8d..0000000
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ISearchOperationCallbackFactory.java
+++ /dev/null
@@ -1,11 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.common.api;
-
-import java.io.Serializable;
-
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-
-public interface ISearchOperationCallbackFactory extends Serializable {
-    public ISearchOperationCallback createSearchOperationCallback(long resourceId, IHyracksTaskContext ctx)
-            throws HyracksDataException;
-}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ISlotManager.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ISlotManager.java
deleted file mode 100644
index 3e5e018..0000000
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ISlotManager.java
+++ /dev/null
@@ -1,46 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.common.api;
-
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.FindTupleMode;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.FindTupleNoExactMatchPolicy;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-
-public interface ISlotManager {
-    public int findTupleIndex(ITupleReference searchKey, ITreeIndexTupleReference frameTuple, MultiComparator multiCmp,
-            FindTupleMode mode, FindTupleNoExactMatchPolicy matchPolicy);
-
-    public int getGreatestKeyIndicator();
-
-    public int getErrorIndicator();
-
-    public void setFrame(ITreeIndexFrame frame);
-
-    public int getTupleOff(int slotOff);
-
-    public int insertSlot(int tupleIndex, int tupleOff);
-
-    public int getSlotStartOff();
-
-    public int getSlotEndOff();
-
-    public int getSlotOff(int tupleIndex);
-
-    public int getSlotSize();
-
-    public void setSlot(int slotOff, int value);
-}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndex.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndex.java
deleted file mode 100644
index 2313c2e..0000000
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndex.java
+++ /dev/null
@@ -1,61 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.common.api;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-
-/**
- * Interface describing the operations of tree-based index structures. Indexes
- * implementing this interface can easily reuse the tree index operators for
- * dataflow. We assume that indexes store tuples with a fixed number of fields.
- * Users must perform operations on an ITreeIndex via an ITreeIndexAccessor.
- */
-public interface ITreeIndex extends IIndex {
-    /**
-     * @return The index's leaf frame factory.
-     */
-    public ITreeIndexFrameFactory getLeafFrameFactory();
-
-    /**
-     * @return The index's interior frame factory.
-     */
-    public ITreeIndexFrameFactory getInteriorFrameFactory();
-
-    /**
-     * @return The index's free page manager.
-     */
-    public IFreePageManager getFreePageManager();
-
-    /**
-     * @return The number of fields tuples of this index have.
-     */
-    public int getFieldCount();
-
-    /**
-     * @return The current root page id of this index.
-     */
-    public int getRootPageId();
-
-    /**
-     * @return The file id of this index.
-     */
-    public int getFileId();
-
-    /**
-     * @return Comparator factories.
-     */
-    public IBinaryComparatorFactory[] getComparatorFactories();
-}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexFrame.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexFrame.java
deleted file mode 100644
index 612af25..0000000
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexFrame.java
+++ /dev/null
@@ -1,96 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.common.api;
-
-import java.nio.ByteBuffer;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.common.frames.FrameOpSpaceStatus;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
-
-public interface ITreeIndexFrame {
-
-    public void initBuffer(byte level);
-
-    public FrameOpSpaceStatus hasSpaceInsert(ITupleReference tuple);
-
-    public void insert(ITupleReference tuple, int tupleIndex);
-
-    public FrameOpSpaceStatus hasSpaceUpdate(ITupleReference newTuple, int oldTupleIndex);
-
-    public void update(ITupleReference newTuple, int oldTupleIndex, boolean inPlace);
-
-    public void delete(ITupleReference tuple, int tupleIndex);
-
-    // returns true if slots were modified, false otherwise
-    public boolean compact();
-
-    // returns true if compressed.
-    public boolean compress() throws HyracksDataException;
-
-    public int getTupleCount();
-
-    public int getTupleOffset(int slotNum);
-
-    public int getTotalFreeSpace();
-
-    public void setPageLsn(long pageLsn);
-
-    public long getPageLsn();
-
-    public void setPage(ICachedPage page);
-
-    public ICachedPage getPage();
-
-    public ByteBuffer getBuffer();
-
-    // for debugging
-    public String printHeader();
-
-    public void split(ITreeIndexFrame rightFrame, ITupleReference tuple, ISplitKey splitKey);
-
-    public ISlotManager getSlotManager();
-
-    // ATTENTION: in b-tree operations it may not always be possible to
-    // determine whether an ICachedPage is a leaf or interior node
-    // a compatible interior and leaf implementation MUST return identical
-    // values when given the same ByteBuffer for the functions below
-    public boolean isLeaf();
-
-    public boolean isInterior();
-
-    public byte getLevel();
-
-    public void setLevel(byte level);
-
-    public int getSlotSize();
-
-    // for debugging
-    public int getFreeSpaceOff();
-
-    public void setFreeSpaceOff(int freeSpace);
-
-    public ITreeIndexTupleWriter getTupleWriter();
-
-    public int getPageHeaderSize();
-
-    public ITreeIndexTupleReference createTupleReference();
-
-    public void setMultiComparator(MultiComparator cmp);
-
-}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexMetaDataFrame.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexMetaDataFrame.java
deleted file mode 100644
index b48ded4..0000000
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexMetaDataFrame.java
+++ /dev/null
@@ -1,55 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.common.api;
-
-import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
-
-public interface ITreeIndexMetaDataFrame {
-	public void initBuffer(byte level);
-
-	public void setPage(ICachedPage page);
-
-	public ICachedPage getPage();
-
-	public byte getLevel();
-
-	public void setLevel(byte level);
-
-	public int getNextPage();
-
-	public void setNextPage(int nextPage);
-
-	public int getMaxPage();
-
-	public void setMaxPage(int maxPage);
-
-	public int getFreePage();
-
-	public boolean hasSpace();
-
-	public void addFreePage(int freePage);
-	
-	// Special flag for LSM-Components to mark whether they are valid or not. 
-	public boolean isValid();
-	
-	// Set special validity flag.
-	public void setValid(boolean isValid);
-	
-	// Special placeholder for LSN information. Used for transactional LSM indexes.
-	public long getLSN();
-	
-	public void setLSN(long lsn);
-}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexTupleWriter.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexTupleWriter.java
deleted file mode 100644
index 43991f1..0000000
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexTupleWriter.java
+++ /dev/null
@@ -1,42 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.common.api;
-
-import java.nio.ByteBuffer;
-
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-
-public interface ITreeIndexTupleWriter {
-    public int writeTuple(ITupleReference tuple, ByteBuffer targetBuf, int targetOff);
-
-    public int writeTuple(ITupleReference tuple, byte[] targetBuf, int targetOff);
-
-    public int bytesRequired(ITupleReference tuple);
-
-    public int writeTupleFields(ITupleReference tuple, int startField, int numFields, byte[] targetBuf, int targetOff);
-
-    public int bytesRequired(ITupleReference tuple, int startField, int numFields);
-
-    // return a tuplereference instance that can read the tuple written by this
-    // writer the main idea is that the format of the written tuple may not be the same
-    // as the format written by this writer
-    public ITreeIndexTupleReference createTupleReference();
-
-    // This method is only used by the BTree leaf frame split method since tuples
-    // in the LSM-BTree can be either matter or anti-matter tuples and we want
-    // to calculate the size of all tuples in the frame.
-    public int getCopySpaceRequired(ITupleReference tuple);
-}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/UnsortedInputException.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/UnsortedInputException.java
deleted file mode 100644
index e3304c9..0000000
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/UnsortedInputException.java
+++ /dev/null
@@ -1,31 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.common.api;
-
-/**
- * Thrown when trying to bulk load an index with an unsorted input stream. 
- */
-public class UnsortedInputException extends IndexException {
-	private static final long serialVersionUID = 1L;
-	
-	public UnsortedInputException(Exception e) {
-		super(e);
-	}
-	
-	public UnsortedInputException(String message) {
-        super(message);
-    }
-}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/AbstractIndexOperatorDescriptor.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/AbstractIndexOperatorDescriptor.java
deleted file mode 100644
index c26bbfa..0000000
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/AbstractIndexOperatorDescriptor.java
+++ /dev/null
@@ -1,115 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.common.dataflow;
-
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.job.IOperatorDescriptorRegistry;
-import edu.uci.ics.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexLifecycleManagerProvider;
-import edu.uci.ics.hyracks.storage.am.common.api.IModificationOperationCallbackFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchOperationCallbackFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.ITupleFilterFactory;
-import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
-import edu.uci.ics.hyracks.storage.common.file.ILocalResourceFactoryProvider;
-
-public abstract class AbstractIndexOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor implements
-        IIndexOperatorDescriptor {
-
-    private static final long serialVersionUID = 1L;
-
-    protected final IFileSplitProvider fileSplitProvider;
-    protected final IStorageManagerInterface storageManager;
-    protected final IIndexLifecycleManagerProvider lifecycleManagerProvider;
-    protected final IIndexDataflowHelperFactory dataflowHelperFactory;
-    protected final ITupleFilterFactory tupleFilterFactory;
-    protected final boolean retainInput;
-    protected final ISearchOperationCallbackFactory searchOpCallbackFactory;
-    protected final IModificationOperationCallbackFactory modificationOpCallbackFactory;
-    protected final ILocalResourceFactoryProvider localResourceFactoryProvider;
-
-    public AbstractIndexOperatorDescriptor(IOperatorDescriptorRegistry spec, int inputArity, int outputArity,
-            RecordDescriptor recDesc, IStorageManagerInterface storageManager,
-            IIndexLifecycleManagerProvider lifecycleManagerProvider, IFileSplitProvider fileSplitProvider,
-            IIndexDataflowHelperFactory dataflowHelperFactory, ITupleFilterFactory tupleFilterFactory,
-            boolean retainInput, ILocalResourceFactoryProvider localResourceFactoryProvider,
-            ISearchOperationCallbackFactory searchOpCallbackFactory,
-            IModificationOperationCallbackFactory modificationOpCallbackFactory) {
-        super(spec, inputArity, outputArity);
-        this.fileSplitProvider = fileSplitProvider;
-        this.storageManager = storageManager;
-        this.lifecycleManagerProvider = lifecycleManagerProvider;
-        this.dataflowHelperFactory = dataflowHelperFactory;
-        this.retainInput = retainInput;
-        this.tupleFilterFactory = tupleFilterFactory;
-        this.localResourceFactoryProvider = localResourceFactoryProvider;
-        this.searchOpCallbackFactory = searchOpCallbackFactory;
-        this.modificationOpCallbackFactory = modificationOpCallbackFactory;
-        if (outputArity > 0) {
-            recordDescriptors[0] = recDesc;
-        }
-    }
-
-    @Override
-    public IFileSplitProvider getFileSplitProvider() {
-        return fileSplitProvider;
-    }
-
-    @Override
-    public IStorageManagerInterface getStorageManager() {
-        return storageManager;
-    }
-
-    @Override
-    public IIndexLifecycleManagerProvider getLifecycleManagerProvider() {
-        return lifecycleManagerProvider;
-    }
-
-    @Override
-    public RecordDescriptor getRecordDescriptor() {
-        return recordDescriptors[0];
-    }
-
-    @Override
-    public IIndexDataflowHelperFactory getIndexDataflowHelperFactory() {
-        return dataflowHelperFactory;
-    }
-
-    @Override
-    public boolean getRetainInput() {
-        return retainInput;
-    }
-
-    @Override
-    public ISearchOperationCallbackFactory getSearchOpCallbackFactory() {
-        return searchOpCallbackFactory;
-    }
-    
-    @Override
-    public IModificationOperationCallbackFactory getModificationOpCallbackFactory() {
-        return modificationOpCallbackFactory;
-    }
-
-    @Override
-    public ITupleFilterFactory getTupleFilterFactory() {
-        return tupleFilterFactory;
-    }
-    
-    @Override
-    public ILocalResourceFactoryProvider getLocalResourceFactoryProvider() {
-        return localResourceFactoryProvider;
-    }
-}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/AbstractTreeIndexOperatorDescriptor.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/AbstractTreeIndexOperatorDescriptor.java
deleted file mode 100644
index 8e87dfe..0000000
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/AbstractTreeIndexOperatorDescriptor.java
+++ /dev/null
@@ -1,65 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.common.dataflow;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.job.IOperatorDescriptorRegistry;
-import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexLifecycleManagerProvider;
-import edu.uci.ics.hyracks.storage.am.common.api.IModificationOperationCallbackFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchOperationCallbackFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.ITupleFilterFactory;
-import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
-import edu.uci.ics.hyracks.storage.common.file.ILocalResourceFactoryProvider;
-
-public abstract class AbstractTreeIndexOperatorDescriptor extends AbstractIndexOperatorDescriptor {
-
-    private static final long serialVersionUID = 1L;
-
-    protected final ITypeTraits[] typeTraits;
-    protected final IBinaryComparatorFactory[] comparatorFactories;
-    protected final int[] bloomFilterKeyFields;
-
-    public AbstractTreeIndexOperatorDescriptor(IOperatorDescriptorRegistry spec, int inputArity, int outputArity,
-            RecordDescriptor recDesc, IStorageManagerInterface storageManager,
-            IIndexLifecycleManagerProvider lifecycleManagerProvider, IFileSplitProvider fileSplitProvider,
-            ITypeTraits[] typeTraits, IBinaryComparatorFactory[] comparatorFactories, int[] bloomFilterKeyFields,
-            IIndexDataflowHelperFactory dataflowHelperFactory, ITupleFilterFactory tupleFilterFactory,
-            boolean retainInput, ILocalResourceFactoryProvider localResourceFactoryProvider,
-            ISearchOperationCallbackFactory searchOpCallbackFactory,
-            IModificationOperationCallbackFactory modificationOpCallbackFactory) {
-        super(spec, inputArity, outputArity, recDesc, storageManager, lifecycleManagerProvider, fileSplitProvider,
-                dataflowHelperFactory, tupleFilterFactory, retainInput, localResourceFactoryProvider,
-                searchOpCallbackFactory, modificationOpCallbackFactory);
-        this.typeTraits = typeTraits;
-        this.comparatorFactories = comparatorFactories;
-        this.bloomFilterKeyFields = bloomFilterKeyFields;
-    }
-
-    public IBinaryComparatorFactory[] getTreeIndexComparatorFactories() {
-        return comparatorFactories;
-    }
-
-    public ITypeTraits[] getTreeIndexTypeTraits() {
-        return typeTraits;
-    }
-    
-    public int[] getTreeIndexBloomFilterKeyFields() {
-        return bloomFilterKeyFields;
-    }
-}
\ No newline at end of file
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/IIndexDataflowHelperFactory.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/IIndexDataflowHelperFactory.java
deleted file mode 100644
index 71760c9..0000000
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/IIndexDataflowHelperFactory.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.common.dataflow;
-
-import java.io.Serializable;
-
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexDataflowHelper;
-
-public interface IIndexDataflowHelperFactory extends Serializable {
-    public IIndexDataflowHelper createIndexDataflowHelper(IIndexOperatorDescriptor opDesc,
-            final IHyracksTaskContext ctx, int partition);
-}
\ No newline at end of file
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/IIndexOperatorDescriptor.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/IIndexOperatorDescriptor.java
deleted file mode 100644
index 6193414..0000000
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/IIndexOperatorDescriptor.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.common.dataflow;
-
-import edu.uci.ics.hyracks.api.dataflow.IActivity;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexLifecycleManagerProvider;
-import edu.uci.ics.hyracks.storage.am.common.api.IModificationOperationCallbackFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchOperationCallbackFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.ITupleFilterFactory;
-import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
-import edu.uci.ics.hyracks.storage.common.file.ILocalResourceFactoryProvider;
-
-public interface IIndexOperatorDescriptor extends IActivity {
-    public IFileSplitProvider getFileSplitProvider();
-
-    public IStorageManagerInterface getStorageManager();
-
-    public IIndexLifecycleManagerProvider getLifecycleManagerProvider();
-
-    public RecordDescriptor getRecordDescriptor();
-
-    public IIndexDataflowHelperFactory getIndexDataflowHelperFactory();
-
-    public boolean getRetainInput();
-
-    public ISearchOperationCallbackFactory getSearchOpCallbackFactory();
-    
-    public IModificationOperationCallbackFactory getModificationOpCallbackFactory();
-    
-    public ITupleFilterFactory getTupleFilterFactory();
-    
-    public ILocalResourceFactoryProvider getLocalResourceFactoryProvider();
-}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/IndexBulkLoadOperatorNodePushable.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/IndexBulkLoadOperatorNodePushable.java
deleted file mode 100644
index 1b6271d..0000000
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/IndexBulkLoadOperatorNodePushable.java
+++ /dev/null
@@ -1,99 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.storage.am.common.dataflow;
-
-import java.nio.ByteBuffer;
-
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
-import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryInputSinkOperatorNodePushable;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndex;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexBulkLoader;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexDataflowHelper;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.am.common.tuples.PermutingFrameTupleReference;
-
-public class IndexBulkLoadOperatorNodePushable extends AbstractUnaryInputSinkOperatorNodePushable {
-    private final IIndexOperatorDescriptor opDesc;
-    private final IHyracksTaskContext ctx;
-    private final float fillFactor;
-    private final boolean verifyInput;
-    private final long numElementsHint;
-    private final IIndexDataflowHelper indexHelper;
-    private FrameTupleAccessor accessor;
-    private IIndex index;
-    private IIndexBulkLoader bulkLoader;
-    private IRecordDescriptorProvider recDescProvider;
-    private PermutingFrameTupleReference tuple = new PermutingFrameTupleReference();
-
-    public IndexBulkLoadOperatorNodePushable(IIndexOperatorDescriptor opDesc, IHyracksTaskContext ctx, int partition,
-            int[] fieldPermutation, float fillFactor, boolean verifyInput, long numElementsHint,
-            IRecordDescriptorProvider recordDescProvider) {
-        this.opDesc = opDesc;
-        this.ctx = ctx;
-        this.indexHelper = opDesc.getIndexDataflowHelperFactory().createIndexDataflowHelper(opDesc, ctx, partition);
-        this.fillFactor = fillFactor;
-        this.verifyInput = verifyInput;
-        this.numElementsHint = numElementsHint;
-        this.recDescProvider = recordDescProvider;
-        tuple.setFieldPermutation(fieldPermutation);
-    }
-
-    @Override
-    public void open() throws HyracksDataException {
-        RecordDescriptor recDesc = recDescProvider.getInputRecordDescriptor(opDesc.getActivityId(), 0);
-        accessor = new FrameTupleAccessor(ctx.getFrameSize(), recDesc);
-        indexHelper.open();
-        index = indexHelper.getIndexInstance();
-        try {
-            bulkLoader = index.createBulkLoader(fillFactor, verifyInput, numElementsHint);
-        } catch (Exception e) {
-            indexHelper.close();
-            throw new HyracksDataException(e);
-        }
-    }
-
-    @Override
-    public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
-        accessor.reset(buffer);
-        int tupleCount = accessor.getTupleCount();
-        for (int i = 0; i < tupleCount; i++) {
-            tuple.reset(accessor, i);
-            try {
-                bulkLoader.add(tuple);
-            } catch (IndexException e) {
-                throw new HyracksDataException(e);
-            }
-        }
-    }
-
-    @Override
-    public void close() throws HyracksDataException {
-        try {
-            bulkLoader.end();
-        } catch (Exception e) {
-            throw new HyracksDataException(e);
-        } finally {
-            indexHelper.close();
-        }
-    }
-
-    @Override
-    public void fail() throws HyracksDataException {
-    }
-}
\ No newline at end of file
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/IndexCreateOperatorNodePushable.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/IndexCreateOperatorNodePushable.java
deleted file mode 100644
index f965f01..0000000
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/IndexCreateOperatorNodePushable.java
+++ /dev/null
@@ -1,54 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.common.dataflow;
-
-import edu.uci.ics.hyracks.api.comm.IFrameWriter;
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.std.base.AbstractOperatorNodePushable;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexDataflowHelper;
-
-public class IndexCreateOperatorNodePushable extends AbstractOperatorNodePushable {
-    private final IIndexDataflowHelper indexHelper;
-
-    public IndexCreateOperatorNodePushable(IIndexOperatorDescriptor opDesc, IHyracksTaskContext ctx, int partition) {
-        this.indexHelper = opDesc.getIndexDataflowHelperFactory().createIndexDataflowHelper(opDesc, ctx, partition);
-    }
-
-    @Override
-    public void deinitialize() throws HyracksDataException {
-    }
-
-    @Override
-    public int getInputArity() {
-        return 0;
-    }
-
-    @Override
-    public IFrameWriter getInputFrameWriter(int index) {
-        return null;
-    }
-
-    @Override
-    public void initialize() throws HyracksDataException {
-        indexHelper.create();
-    }
-
-    @Override
-    public void setOutputFrameWriter(int index, IFrameWriter writer, RecordDescriptor recordDesc) {
-    }
-}
\ No newline at end of file
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/IndexDataflowHelper.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/IndexDataflowHelper.java
deleted file mode 100644
index e46efff..0000000
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/IndexDataflowHelper.java
+++ /dev/null
@@ -1,146 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.common.dataflow;
-
-import java.io.IOException;
-
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndex;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexDataflowHelper;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexLifecycleManager;
-import edu.uci.ics.hyracks.storage.common.file.ILocalResourceFactory;
-import edu.uci.ics.hyracks.storage.common.file.ILocalResourceRepository;
-import edu.uci.ics.hyracks.storage.common.file.LocalResource;
-import edu.uci.ics.hyracks.storage.common.file.ResourceIdFactory;
-
-public abstract class IndexDataflowHelper implements IIndexDataflowHelper {
-
-    protected final IIndexOperatorDescriptor opDesc;
-    protected final IHyracksTaskContext ctx;
-    protected final IIndexLifecycleManager lcManager;
-    protected final ILocalResourceRepository localResourceRepository;
-    protected final ResourceIdFactory resourceIdFactory;
-    protected final FileReference file;
-    protected final int partition;
-
-    protected IIndex index;
-
-    public IndexDataflowHelper(IIndexOperatorDescriptor opDesc, final IHyracksTaskContext ctx, int partition) {
-        this.opDesc = opDesc;
-        this.ctx = ctx;
-        this.lcManager = opDesc.getLifecycleManagerProvider().getLifecycleManager(ctx);
-        this.localResourceRepository = opDesc.getStorageManager().getLocalResourceRepository(ctx);
-        this.resourceIdFactory = opDesc.getStorageManager().getResourceIdFactory(ctx);
-        this.partition = partition;
-        this.file = opDesc.getFileSplitProvider().getFileSplits()[partition].getLocalFile();
-    }
-
-    protected abstract IIndex createIndexInstance() throws HyracksDataException;
-
-    public IIndex getIndexInstance() {
-        return index;
-    }
-
-    public void create() throws HyracksDataException {
-        synchronized (lcManager) {
-            long resourceID = getResourceID();
-            index = lcManager.getIndex(resourceID);
-            if (index != null) {
-                lcManager.unregister(resourceID);
-            } else {
-                index = createIndexInstance();
-            }
-
-            // The previous resource ID needs to be removed since calling IIndex.create() may possibly destroy 
-            // any physical artifact that the LocalResourceRepository is managing (e.g. a file containing the resource ID). 
-            // Once the index has been created, a new resource ID can be generated.
-            if (resourceID != -1) {
-                localResourceRepository.deleteResourceByName(file.getFile().getPath());
-            }
-            index.create();
-            try {
-                //TODO Create LocalResource through LocalResourceFactory interface
-                resourceID = resourceIdFactory.createId();
-                ILocalResourceFactory localResourceFactory = opDesc.getLocalResourceFactoryProvider()
-                        .getLocalResourceFactory();
-                localResourceRepository.insert(localResourceFactory.createLocalResource(resourceID, file.getFile()
-                        .getPath(), partition));
-            } catch (IOException e) {
-                throw new HyracksDataException(e);
-            }
-            lcManager.register(resourceID, index);
-        }
-    }
-
-    public void open() throws HyracksDataException {
-        synchronized (lcManager) {
-            long resourceID = getResourceID();
-
-            if (resourceID == -1) {
-                throw new HyracksDataException("Index does not have a valid resource ID. Has it been created yet?");
-            }
-
-            index = lcManager.getIndex(resourceID);
-            if (index == null) {
-                index = createIndexInstance();
-                lcManager.register(resourceID, index);
-            }
-            lcManager.open(resourceID);
-        }
-    }
-
-    public void close() throws HyracksDataException {
-        synchronized (lcManager) {
-            lcManager.close(getResourceID());
-        }
-    }
-
-    public void destroy() throws HyracksDataException {
-        synchronized (lcManager) {
-            long resourceID = getResourceID();
-            index = lcManager.getIndex(resourceID);
-            if (index != null) {
-                lcManager.unregister(resourceID);
-            } else {
-                index = createIndexInstance();
-            }
-
-            if (resourceID != -1) {
-                localResourceRepository.deleteResourceByName(file.getFile().getPath());
-            }
-            index.destroy();
-        }
-    }
-
-    public FileReference getFileReference() {
-        return file;
-    }
-
-    public long getResourceID() throws HyracksDataException {
-        LocalResource localResource = localResourceRepository.getResourceByName(file.getFile().getPath());
-        if (localResource == null) {
-            return -1;
-        } else {
-            return localResource.getResourceId();
-        }
-    }
-
-    public IHyracksTaskContext getTaskContext() {
-        return ctx;
-    }
-}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/IndexDropOperatorDescriptor.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/IndexDropOperatorDescriptor.java
deleted file mode 100644
index 6f890d7..0000000
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/IndexDropOperatorDescriptor.java
+++ /dev/null
@@ -1,53 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.common.dataflow;
-
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.job.IOperatorDescriptorRegistry;
-import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
-import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexLifecycleManagerProvider;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
-import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
-import edu.uci.ics.hyracks.storage.common.file.NoOpLocalResourceFactoryProvider;
-
-public class IndexDropOperatorDescriptor extends AbstractTreeIndexOperatorDescriptor {
-
-    private static final long serialVersionUID = 1L;
-
-    public IndexDropOperatorDescriptor(IOperatorDescriptorRegistry spec, IStorageManagerInterface storageManager,
-            IIndexLifecycleManagerProvider lifecycleManagerProvider, IFileSplitProvider fileSplitProvider,
-            IIndexDataflowHelperFactory dataflowHelperFactory) {
-        // TODO: providing the type traits below is a hack to allow:
-        // 1) Type traits not to be specified when creating the drop operator
-        // 2) The LSMRTreeDataflowHelper to get acceptable type traits
-        // This should eventually not be *hacked*, but I don't know the proper fix yet. -zheilbron
-        super(spec, 0, 0, null, storageManager, lifecycleManagerProvider, fileSplitProvider, new ITypeTraits[] {
-                IntegerPointable.TYPE_TRAITS, IntegerPointable.TYPE_TRAITS }, new IBinaryComparatorFactory[] { null }, null,
-                dataflowHelperFactory, null, false, NoOpLocalResourceFactoryProvider.INSTANCE,
-                NoOpOperationCallbackFactory.INSTANCE, NoOpOperationCallbackFactory.INSTANCE);
-    }
-
-    @Override
-    public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
-            IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) {
-        return new IndexDropOperatorNodePushable(this, ctx, partition);
-    }
-}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/IndexDropOperatorNodePushable.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/IndexDropOperatorNodePushable.java
deleted file mode 100644
index 73bed72..0000000
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/IndexDropOperatorNodePushable.java
+++ /dev/null
@@ -1,55 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.common.dataflow;
-
-import edu.uci.ics.hyracks.api.comm.IFrameWriter;
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.std.base.AbstractOperatorNodePushable;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexDataflowHelper;
-
-public class IndexDropOperatorNodePushable extends AbstractOperatorNodePushable {
-    private final IIndexDataflowHelper indexHelper;
-
-    public IndexDropOperatorNodePushable(IIndexOperatorDescriptor opDesc, IHyracksTaskContext ctx,
-            int partition) {
-        this.indexHelper = opDesc.getIndexDataflowHelperFactory().createIndexDataflowHelper(opDesc, ctx, partition);
-    }
-
-    @Override
-    public void deinitialize() throws HyracksDataException {
-    }
-
-    @Override
-    public int getInputArity() {
-        return 0;
-    }
-
-    @Override
-    public IFrameWriter getInputFrameWriter(int index) {
-        return null;
-    }
-
-    @Override
-    public void initialize() throws HyracksDataException {
-        indexHelper.destroy();
-    }
-
-    @Override
-    public void setOutputFrameWriter(int index, IFrameWriter writer, RecordDescriptor recordDesc) {
-    }
-}
\ No newline at end of file
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/IndexInsertUpdateDeleteOperatorNodePushable.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/IndexInsertUpdateDeleteOperatorNodePushable.java
deleted file mode 100644
index 059f42a..0000000
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/IndexInsertUpdateDeleteOperatorNodePushable.java
+++ /dev/null
@@ -1,144 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.storage.am.common.dataflow;
-
-import java.nio.ByteBuffer;
-
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
-import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.FrameTupleReference;
-import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryInputUnaryOutputOperatorNodePushable;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndex;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexAccessor;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexDataflowHelper;
-import edu.uci.ics.hyracks.storage.am.common.api.IModificationOperationCallback;
-import edu.uci.ics.hyracks.storage.am.common.api.ITupleFilter;
-import edu.uci.ics.hyracks.storage.am.common.api.ITupleFilterFactory;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallback;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOperation;
-import edu.uci.ics.hyracks.storage.am.common.tuples.PermutingFrameTupleReference;
-
-public class IndexInsertUpdateDeleteOperatorNodePushable extends AbstractUnaryInputUnaryOutputOperatorNodePushable {
-    protected final IIndexOperatorDescriptor opDesc;
-    protected final IHyracksTaskContext ctx;
-    protected final IIndexDataflowHelper indexHelper;
-    protected final IRecordDescriptorProvider recordDescProvider;
-    protected final IndexOperation op;
-    protected final PermutingFrameTupleReference tuple = new PermutingFrameTupleReference();
-    protected FrameTupleAccessor accessor;
-    protected FrameTupleReference frameTuple;
-    protected ByteBuffer writeBuffer;
-    protected IIndexAccessor indexAccessor;
-    protected ITupleFilter tupleFilter;
-    protected IModificationOperationCallback modCallback;
-
-    public IndexInsertUpdateDeleteOperatorNodePushable(IIndexOperatorDescriptor opDesc, IHyracksTaskContext ctx,
-            int partition, int[] fieldPermutation, IRecordDescriptorProvider recordDescProvider, IndexOperation op) {
-        this.opDesc = opDesc;
-        this.ctx = ctx;
-        this.indexHelper = opDesc.getIndexDataflowHelperFactory().createIndexDataflowHelper(opDesc, ctx, partition);
-        this.recordDescProvider = recordDescProvider;
-        this.op = op;
-        tuple.setFieldPermutation(fieldPermutation);
-    }
-
-    @Override
-    public void open() throws HyracksDataException {
-        RecordDescriptor inputRecDesc = recordDescProvider.getInputRecordDescriptor(opDesc.getActivityId(), 0);
-        accessor = new FrameTupleAccessor(ctx.getFrameSize(), inputRecDesc);
-        writeBuffer = ctx.allocateFrame();
-        writer.open();
-        indexHelper.open();
-        IIndex index = indexHelper.getIndexInstance();
-        try {
-            modCallback = opDesc.getModificationOpCallbackFactory().createModificationOperationCallback(
-                    indexHelper.getResourceID(), index, ctx);
-            indexAccessor = index.createAccessor(modCallback, NoOpOperationCallback.INSTANCE);
-            ITupleFilterFactory tupleFilterFactory = opDesc.getTupleFilterFactory();
-            if (tupleFilterFactory != null) {
-                tupleFilter = tupleFilterFactory.createTupleFilter(indexHelper.getTaskContext());
-                frameTuple = new FrameTupleReference();
-            }
-        } catch (Exception e) {
-            indexHelper.close();
-            throw new HyracksDataException(e);
-        }
-    }
-
-    @Override
-    public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
-        accessor.reset(buffer);
-        int tupleCount = accessor.getTupleCount();
-        for (int i = 0; i < tupleCount; i++) {
-            try {
-                if (tupleFilter != null) {
-                    frameTuple.reset(accessor, i);
-                    if (!tupleFilter.accept(frameTuple)) {
-                        continue;
-                    }
-                }
-                tuple.reset(accessor, i);
-
-                switch (op) {
-                    case INSERT: {
-                        indexAccessor.insert(tuple);
-                        break;
-                    }
-                    case UPDATE: {
-                        indexAccessor.update(tuple);
-                        break;
-                    }
-                    case UPSERT: {
-                        indexAccessor.upsert(tuple);
-                        break;
-                    }
-                    case DELETE: {
-                        indexAccessor.delete(tuple);
-                        break;
-                    }
-                    default: {
-                        throw new HyracksDataException("Unsupported operation " + op
-                                + " in tree index InsertUpdateDelete operator");
-                    }
-                }
-            } catch (HyracksDataException e) {
-                throw e;
-            } catch (Exception e) {
-                throw new HyracksDataException(e);
-            }
-        }
-        // Pass a copy of the frame to next op.
-        System.arraycopy(buffer.array(), 0, writeBuffer.array(), 0, buffer.capacity());
-        FrameUtils.flushFrame(writeBuffer, writer);
-    }
-
-    @Override
-    public void close() throws HyracksDataException {
-        try {
-            writer.close();
-        } finally {
-            indexHelper.close();
-        }
-    }
-
-    @Override
-    public void fail() throws HyracksDataException {
-        writer.fail();
-    }
-}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/IndexLifecycleManager.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/IndexLifecycleManager.java
deleted file mode 100644
index 197aecc..0000000
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/IndexLifecycleManager.java
+++ /dev/null
@@ -1,181 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.common.dataflow;
-
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndex;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexLifecycleManager;
-
-public class IndexLifecycleManager implements IIndexLifecycleManager {
-    private static final long DEFAULT_MEMORY_BUDGET = 1024 * 1024 * 100; // 100 megabytes
-
-    private final Map<Long, IndexInfo> indexInfos;
-    private final long memoryBudget;
-
-    private long memoryUsed;
-
-    public IndexLifecycleManager() {
-        this(DEFAULT_MEMORY_BUDGET);
-    }
-
-    public IndexLifecycleManager(long memoryBudget) {
-        this.indexInfos = new HashMap<Long, IndexInfo>();
-        this.memoryBudget = memoryBudget;
-        this.memoryUsed = 0;
-    }
-
-    private boolean evictCandidateIndex() throws HyracksDataException {
-        // Why min()? As a heuristic for eviction, we will take an open index (an index consuming memory) 
-        // that is not being used (refcount == 0) and has been least recently used. The sort order defined 
-        // for IndexInfo maintains this. See IndexInfo.compareTo().
-        IndexInfo info = Collections.min(indexInfos.values());
-        if (info.referenceCount != 0 || !info.isOpen) {
-            return false;
-        }
-
-        info.index.deactivate();
-        memoryUsed -= info.index.getMemoryAllocationSize();
-        info.isOpen = false;
-
-        return true;
-    }
-
-    @Override
-    public IIndex getIndex(long resourceID) {
-        IndexInfo info = indexInfos.get(resourceID);
-        return info == null ? null : info.index;
-    }
-
-    @Override
-    public void register(long resourceID, IIndex index) throws HyracksDataException {
-        if (indexInfos.containsKey(resourceID)) {
-            throw new HyracksDataException("Index with resource ID " + resourceID + " already exists.");
-        }
-
-        indexInfos.put(resourceID, new IndexInfo(index));
-    }
-
-    @Override
-    public void unregister(long resourceID) throws HyracksDataException {
-        IndexInfo info = indexInfos.remove(resourceID);
-        if (info == null) {
-            throw new HyracksDataException("Index with resource ID " + resourceID + " does not exist.");
-        }
-
-        if (info.referenceCount != 0) {
-            indexInfos.put(resourceID, info);
-            throw new HyracksDataException("Cannot remove index while it is open.");
-        }
-
-        if (info.isOpen) {
-            info.index.deactivate();
-            memoryUsed -= info.index.getMemoryAllocationSize();
-        }
-    }
-
-    @Override
-    public void open(long resourceID) throws HyracksDataException {
-        IndexInfo info = indexInfos.get(resourceID);
-        if (info == null) {
-            throw new HyracksDataException("Failed to open index with resource ID " + resourceID
-                    + " since it does not exist.");
-        }
-
-        long inMemorySize = info.index.getMemoryAllocationSize();
-        while (memoryUsed + inMemorySize > memoryBudget) {
-            if (!evictCandidateIndex()) {
-                throw new HyracksDataException("Cannot activate index since memory budget would be exceeded.");
-            }
-        }
-
-        if (!info.isOpen) {
-            info.index.activate();
-            info.isOpen = true;
-            memoryUsed += inMemorySize;
-        }
-        info.touch();
-    }
-
-    @Override
-    public void close(long resourceID) {
-        indexInfos.get(resourceID).untouch();
-    }
-
-    private class IndexInfo implements Comparable<IndexInfo> {
-        private final IIndex index;
-        private int referenceCount;
-        private long lastAccess;
-        private boolean isOpen;
-
-        public IndexInfo(IIndex index) {
-            this.index = index;
-            this.lastAccess = -1;
-            this.referenceCount = 0;
-            this.isOpen = false;
-        }
-
-        public void touch() {
-            lastAccess = System.currentTimeMillis();
-            referenceCount++;
-        }
-
-        public void untouch() {
-            lastAccess = System.currentTimeMillis();
-            referenceCount--;
-        }
-
-        @Override
-        public int compareTo(IndexInfo i) {
-            // sort by (isOpen, referenceCount, lastAccess) ascending, where true < false
-            //
-            // Example sort order:
-            // -------------------
-            // (F, 0, 70)       <-- largest
-            // (F, 0, 60)
-            // (T, 10, 80)
-            // (T, 10, 70)
-            // (T, 9, 90)
-            // (T, 0, 100)      <-- smallest
-            if (isOpen && !i.isOpen) {
-                return -1;
-            } else if (!isOpen && i.isOpen) {
-                return 1;
-            } else {
-                if (referenceCount < i.referenceCount) {
-                    return -1;
-                } else if (referenceCount > i.referenceCount) {
-                    return 1;
-                } else {
-                    if (lastAccess < i.lastAccess) {
-                        return -1;
-                    } else if (lastAccess > i.lastAccess) {
-                        return 1;
-                    } else {
-                        return 0;
-                    }
-                }
-            }
-
-        }
-
-        public String toString() {
-            return "{index: " + index + ", isOpen: " + isOpen + ", refCount: " + referenceCount + ", lastAccess: "
-                    + lastAccess + "}";
-        }
-    }
-
-    @Override
-    public List<IIndex> getOpenIndexes() {
-        List<IIndex> openIndexes = new ArrayList<IIndex>();
-        for (IndexInfo i : indexInfos.values()) {
-            if (i.isOpen) {
-                openIndexes.add(i.index);
-            }
-        }
-        return openIndexes;
-    }
-}
\ No newline at end of file
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/IndexSearchOperatorNodePushable.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/IndexSearchOperatorNodePushable.java
deleted file mode 100644
index 98beea2..0000000
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/IndexSearchOperatorNodePushable.java
+++ /dev/null
@@ -1,165 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.storage.am.common.dataflow;
-
-import java.io.DataOutput;
-import java.nio.ByteBuffer;
-
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
-import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.FrameTupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryInputUnaryOutputOperatorNodePushable;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndex;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexAccessor;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexCursor;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexDataflowHelper;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchOperationCallback;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchPredicate;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallback;
-
-public abstract class IndexSearchOperatorNodePushable extends AbstractUnaryInputUnaryOutputOperatorNodePushable {
-    protected final IIndexOperatorDescriptor opDesc;
-    protected final IHyracksTaskContext ctx;
-    protected final IIndexDataflowHelper indexHelper;
-    protected FrameTupleAccessor accessor;
-
-    protected ByteBuffer writeBuffer;
-    protected FrameTupleAppender appender;
-    protected ArrayTupleBuilder tb;
-    protected DataOutput dos;
-
-    protected IIndex index;
-    protected ISearchPredicate searchPred;
-    protected IIndexCursor cursor;
-    protected IIndexAccessor indexAccessor;
-
-    protected final RecordDescriptor inputRecDesc;
-    protected final boolean retainInput;
-    protected FrameTupleReference frameTuple;
-
-    public IndexSearchOperatorNodePushable(IIndexOperatorDescriptor opDesc, IHyracksTaskContext ctx, int partition,
-            IRecordDescriptorProvider recordDescProvider) {
-        this.opDesc = opDesc;
-        this.ctx = ctx;
-        this.indexHelper = opDesc.getIndexDataflowHelperFactory().createIndexDataflowHelper(opDesc, ctx, partition);
-        this.retainInput = opDesc.getRetainInput();
-        this.inputRecDesc = recordDescProvider.getInputRecordDescriptor(opDesc.getActivityId(), 0);
-    }
-
-    protected abstract ISearchPredicate createSearchPredicate();
-
-    protected abstract void resetSearchPredicate(int tupleIndex);
-
-    protected IIndexCursor createCursor() {
-        return indexAccessor.createSearchCursor();
-    }
-
-    @Override
-    public void open() throws HyracksDataException {
-        accessor = new FrameTupleAccessor(ctx.getFrameSize(), inputRecDesc);
-        writer.open();
-        indexHelper.open();
-        index = indexHelper.getIndexInstance();
-        try {
-            searchPred = createSearchPredicate();
-            writeBuffer = ctx.allocateFrame();
-            tb = new ArrayTupleBuilder(recordDesc.getFieldCount());
-            dos = tb.getDataOutput();
-            appender = new FrameTupleAppender(ctx.getFrameSize());
-            appender.reset(writeBuffer, true);
-            ISearchOperationCallback searchCallback = opDesc.getSearchOpCallbackFactory()
-                    .createSearchOperationCallback(indexHelper.getResourceID(), ctx);
-            indexAccessor = index.createAccessor(NoOpOperationCallback.INSTANCE, searchCallback);
-            cursor = createCursor();
-            if (retainInput) {
-                frameTuple = new FrameTupleReference();
-            }
-        } catch (Exception e) {
-            indexHelper.close();
-            throw new HyracksDataException(e);
-        }
-    }
-
-    protected void writeSearchResults(int tupleIndex) throws Exception {
-        while (cursor.hasNext()) {
-            tb.reset();
-            cursor.next();
-            if (retainInput) {
-                frameTuple.reset(accessor, tupleIndex);
-                for (int i = 0; i < frameTuple.getFieldCount(); i++) {
-                    dos.write(frameTuple.getFieldData(i), frameTuple.getFieldStart(i), frameTuple.getFieldLength(i));
-                    tb.addFieldEndOffset();
-                }
-            }
-            ITupleReference tuple = cursor.getTuple();
-            for (int i = 0; i < tuple.getFieldCount(); i++) {
-                dos.write(tuple.getFieldData(i), tuple.getFieldStart(i), tuple.getFieldLength(i));
-                tb.addFieldEndOffset();
-            }
-            if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
-                FrameUtils.flushFrame(writeBuffer, writer);
-                appender.reset(writeBuffer, true);
-                if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
-                    throw new IllegalStateException();
-                }
-            }
-        }
-    }
-
-    @Override
-    public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
-        accessor.reset(buffer);
-        int tupleCount = accessor.getTupleCount();
-        try {
-            for (int i = 0; i < tupleCount; i++) {
-                resetSearchPredicate(i);
-                cursor.reset();
-                indexAccessor.search(cursor, searchPred);
-                writeSearchResults(i);
-            }
-        } catch (Exception e) {
-            throw new HyracksDataException(e);
-        }
-    }
-
-    @Override
-    public void close() throws HyracksDataException {
-        try {
-            if (appender.getTupleCount() > 0) {
-                FrameUtils.flushFrame(writeBuffer, writer);
-            }
-            writer.close();
-            try {
-                cursor.close();
-            } catch (Exception e) {
-                throw new HyracksDataException(e);
-            }
-        } finally {
-            indexHelper.close();
-        }
-    }
-
-    @Override
-    public void fail() throws HyracksDataException {
-        writer.fail();
-    }
-}
\ No newline at end of file
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexBulkLoadOperatorDescriptor.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexBulkLoadOperatorDescriptor.java
deleted file mode 100644
index f7f57e6..0000000
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexBulkLoadOperatorDescriptor.java
+++ /dev/null
@@ -1,63 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.common.dataflow;
-
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.job.IOperatorDescriptorRegistry;
-import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexLifecycleManagerProvider;
-import edu.uci.ics.hyracks.storage.am.common.api.IModificationOperationCallbackFactory;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
-import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
-import edu.uci.ics.hyracks.storage.common.file.NoOpLocalResourceFactoryProvider;
-
-public class TreeIndexBulkLoadOperatorDescriptor extends AbstractTreeIndexOperatorDescriptor {
-
-    private static final long serialVersionUID = 1L;
-
-    private final int[] fieldPermutation;
-    private final float fillFactor;
-    private final boolean verifyInput;
-    private final long numElementsHint;
-
-    public TreeIndexBulkLoadOperatorDescriptor(IOperatorDescriptorRegistry spec,
-            IStorageManagerInterface storageManager, IIndexLifecycleManagerProvider lifecycleManagerProvider,
-            IFileSplitProvider fileSplitProvider, ITypeTraits[] typeTraits,
-            IBinaryComparatorFactory[] comparatorFactories, int[] bloomFilterKeyFields, int[] fieldPermutation,
-            float fillFactor, boolean verifyInput, long numElementsHint,
-            IIndexDataflowHelperFactory dataflowHelperFactory,
-            IModificationOperationCallbackFactory modificationOpCallbackFactory) {
-        super(spec, 1, 0, null, storageManager, lifecycleManagerProvider, fileSplitProvider, typeTraits,
-                comparatorFactories, bloomFilterKeyFields, dataflowHelperFactory, null, false,
-                NoOpLocalResourceFactoryProvider.INSTANCE, NoOpOperationCallbackFactory.INSTANCE,
-                modificationOpCallbackFactory);
-        this.fieldPermutation = fieldPermutation;
-        this.fillFactor = fillFactor;
-        this.verifyInput = verifyInput;
-        this.numElementsHint = numElementsHint;
-    }
-
-    @Override
-    public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
-            IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) {
-        return new IndexBulkLoadOperatorNodePushable(this, ctx, partition, fieldPermutation, fillFactor, verifyInput,
-                numElementsHint, recordDescProvider);
-    }
-}
\ No newline at end of file
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexCreateOperatorDescriptor.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexCreateOperatorDescriptor.java
deleted file mode 100644
index 8b7e81d..0000000
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexCreateOperatorDescriptor.java
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.common.dataflow;
-
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.job.IOperatorDescriptorRegistry;
-import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexLifecycleManagerProvider;
-import edu.uci.ics.hyracks.storage.am.common.api.IModificationOperationCallbackFactory;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
-import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
-import edu.uci.ics.hyracks.storage.common.file.ILocalResourceFactoryProvider;
-
-public class TreeIndexCreateOperatorDescriptor extends AbstractTreeIndexOperatorDescriptor {
-
-    private static final long serialVersionUID = 1L;
-
-    public TreeIndexCreateOperatorDescriptor(IOperatorDescriptorRegistry spec, IStorageManagerInterface storageManager,
-            IIndexLifecycleManagerProvider lifecycleManagerProvider, IFileSplitProvider fileSplitProvider,
-            ITypeTraits[] typeTraits, IBinaryComparatorFactory[] comparatorFactories, int[] bloomFilterKeyFields,
-            IIndexDataflowHelperFactory dataflowHelperFactory,
-            ILocalResourceFactoryProvider localResourceFactoryProvider,
-            IModificationOperationCallbackFactory modificationOpCallbackFactory) {
-        super(spec, 0, 0, null, storageManager, lifecycleManagerProvider, fileSplitProvider, typeTraits,
-                comparatorFactories, bloomFilterKeyFields, dataflowHelperFactory, null, false,
-                localResourceFactoryProvider, NoOpOperationCallbackFactory.INSTANCE, modificationOpCallbackFactory);
-    }
-
-    @Override
-    public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
-            IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) {
-        return new IndexCreateOperatorNodePushable(this, ctx, partition);
-    }
-}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexDataflowHelper.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexDataflowHelper.java
deleted file mode 100644
index 711dfe0..0000000
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexDataflowHelper.java
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.common.dataflow;
-
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
-import edu.uci.ics.hyracks.storage.am.common.impls.TreeIndexDiskOrderScanCursor;
-
-public abstract class TreeIndexDataflowHelper extends IndexDataflowHelper {
-
-    public TreeIndexDataflowHelper(IIndexOperatorDescriptor opDesc, IHyracksTaskContext ctx, int partition) {
-        super(opDesc, ctx, partition);
-    }
-
-    public ITreeIndexCursor createDiskOrderScanCursor(ITreeIndexFrame leafFrame) throws HyracksDataException {
-        return new TreeIndexDiskOrderScanCursor(leafFrame);
-    }
-}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexDiskOrderScanOperatorDescriptor.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexDiskOrderScanOperatorDescriptor.java
deleted file mode 100644
index a8644e4..0000000
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexDiskOrderScanOperatorDescriptor.java
+++ /dev/null
@@ -1,49 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.common.dataflow;
-
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
-import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.job.IOperatorDescriptorRegistry;
-import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexLifecycleManagerProvider;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchOperationCallbackFactory;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
-import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
-import edu.uci.ics.hyracks.storage.common.file.NoOpLocalResourceFactoryProvider;
-
-public class TreeIndexDiskOrderScanOperatorDescriptor extends AbstractTreeIndexOperatorDescriptor {
-
-    private static final long serialVersionUID = 1L;
-
-    public TreeIndexDiskOrderScanOperatorDescriptor(IOperatorDescriptorRegistry spec, RecordDescriptor recDesc,
-            IStorageManagerInterface storageManager, IIndexLifecycleManagerProvider lifecycleManagerProvider,
-            IFileSplitProvider fileSplitProvider, ITypeTraits[] typeTraits,
-            IIndexDataflowHelperFactory dataflowHelperFactory, ISearchOperationCallbackFactory searchOpCallbackProvider) {
-        super(spec, 0, 1, recDesc, storageManager, lifecycleManagerProvider, fileSplitProvider, typeTraits, null, null,
-                dataflowHelperFactory, null, false, NoOpLocalResourceFactoryProvider.INSTANCE,
-                searchOpCallbackProvider, NoOpOperationCallbackFactory.INSTANCE);
-    }
-
-    @Override
-    public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
-            IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) {
-        return new TreeIndexDiskOrderScanOperatorNodePushable(this, ctx, partition);
-    }
-}
\ No newline at end of file
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexDiskOrderScanOperatorNodePushable.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexDiskOrderScanOperatorNodePushable.java
deleted file mode 100644
index a861af7..0000000
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexDiskOrderScanOperatorNodePushable.java
+++ /dev/null
@@ -1,107 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.storage.am.common.dataflow;
-
-import java.io.DataOutput;
-import java.nio.ByteBuffer;
-
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
-import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryOutputSourceOperatorNodePushable;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchOperationCallback;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexAccessor;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallback;
-
-public class TreeIndexDiskOrderScanOperatorNodePushable extends AbstractUnaryOutputSourceOperatorNodePushable {
-    private final AbstractTreeIndexOperatorDescriptor opDesc;
-    private final IHyracksTaskContext ctx;
-    private final TreeIndexDataflowHelper treeIndexHelper;
-    private ITreeIndex treeIndex;
-
-    public TreeIndexDiskOrderScanOperatorNodePushable(AbstractTreeIndexOperatorDescriptor opDesc,
-            IHyracksTaskContext ctx, int partition) {
-        this.opDesc = opDesc;
-        this.ctx = ctx;
-        this.treeIndexHelper = (TreeIndexDataflowHelper) opDesc.getIndexDataflowHelperFactory()
-                .createIndexDataflowHelper(opDesc, ctx, partition);
-    }
-
-    @Override
-    public void initialize() throws HyracksDataException {
-        treeIndexHelper.open();
-        treeIndex = (ITreeIndex) treeIndexHelper.getIndexInstance();
-        try {
-            ITreeIndexFrame cursorFrame = treeIndex.getLeafFrameFactory().createFrame();
-            ITreeIndexCursor cursor = treeIndexHelper.createDiskOrderScanCursor(cursorFrame);
-            ISearchOperationCallback searchCallback = opDesc.getSearchOpCallbackFactory().createSearchOperationCallback(
-                    treeIndexHelper.getResourceID(), ctx);
-            ITreeIndexAccessor indexAccessor = (ITreeIndexAccessor) treeIndex.createAccessor(
-                    NoOpOperationCallback.INSTANCE, searchCallback);
-            writer.open();
-            try {
-                indexAccessor.diskOrderScan(cursor);
-                int fieldCount = treeIndex.getFieldCount();
-                ByteBuffer frame = ctx.allocateFrame();
-                FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
-                appender.reset(frame, true);
-                ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
-                DataOutput dos = tb.getDataOutput();
-
-                while (cursor.hasNext()) {
-                    tb.reset();
-                    cursor.next();
-
-                    ITupleReference frameTuple = cursor.getTuple();
-                    for (int i = 0; i < frameTuple.getFieldCount(); i++) {
-                        dos.write(frameTuple.getFieldData(i), frameTuple.getFieldStart(i), frameTuple.getFieldLength(i));
-                        tb.addFieldEndOffset();
-                    }
-
-                    if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
-                        FrameUtils.flushFrame(frame, writer);
-                        appender.reset(frame, true);
-                        if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
-                            throw new IllegalStateException();
-                        }
-                    }
-                }
-                if (appender.getTupleCount() > 0) {
-                    FrameUtils.flushFrame(frame, writer);
-                }
-            } catch (Exception e) {
-                writer.fail();
-                throw new HyracksDataException(e);
-            } finally {
-                cursor.close();
-                writer.close();
-            }
-        } catch (Exception e) {
-            treeIndexHelper.close();
-            throw new HyracksDataException(e);
-        }
-    }
-
-    @Override
-    public void deinitialize() throws HyracksDataException {
-        treeIndexHelper.close();
-    }
-}
\ No newline at end of file
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexInsertUpdateDeleteOperatorDescriptor.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexInsertUpdateDeleteOperatorDescriptor.java
deleted file mode 100644
index 84e6090..0000000
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexInsertUpdateDeleteOperatorDescriptor.java
+++ /dev/null
@@ -1,61 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.common.dataflow;
-
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.job.IOperatorDescriptorRegistry;
-import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexLifecycleManagerProvider;
-import edu.uci.ics.hyracks.storage.am.common.api.IModificationOperationCallbackFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.ITupleFilterFactory;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOperation;
-import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
-import edu.uci.ics.hyracks.storage.common.file.NoOpLocalResourceFactoryProvider;
-
-public class TreeIndexInsertUpdateDeleteOperatorDescriptor extends AbstractTreeIndexOperatorDescriptor {
-
-    private static final long serialVersionUID = 1L;
-
-    private final int[] fieldPermutation;
-    private final IndexOperation op;
-
-    public TreeIndexInsertUpdateDeleteOperatorDescriptor(IOperatorDescriptorRegistry spec, RecordDescriptor recDesc,
-            IStorageManagerInterface storageManager, IIndexLifecycleManagerProvider lifecycleManagerProvider,
-            IFileSplitProvider fileSplitProvider, ITypeTraits[] typeTraits,
-            IBinaryComparatorFactory[] comparatorFactories, int[] bloomFilterKeyFields, int[] fieldPermutation,
-            IndexOperation op, IIndexDataflowHelperFactory dataflowHelperFactory,
-            ITupleFilterFactory tupleFilterFactory, IModificationOperationCallbackFactory modificationOpCallbackProvider) {
-        super(spec, 1, 1, recDesc, storageManager, lifecycleManagerProvider, fileSplitProvider, typeTraits,
-                comparatorFactories, bloomFilterKeyFields, dataflowHelperFactory, tupleFilterFactory, false,
-                NoOpLocalResourceFactoryProvider.INSTANCE, NoOpOperationCallbackFactory.INSTANCE,
-                modificationOpCallbackProvider);
-        this.fieldPermutation = fieldPermutation;
-        this.op = op;
-    }
-
-    @Override
-    public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
-            IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) {
-        return new IndexInsertUpdateDeleteOperatorNodePushable(this, ctx, partition, fieldPermutation,
-                recordDescProvider, op);
-    }
-}
\ No newline at end of file
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexStatsOperatorDescriptor.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexStatsOperatorDescriptor.java
deleted file mode 100644
index b589f96..0000000
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexStatsOperatorDescriptor.java
+++ /dev/null
@@ -1,54 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.storage.am.common.dataflow;
-
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.job.IOperatorDescriptorRegistry;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexLifecycleManagerProvider;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchOperationCallbackFactory;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
-import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
-import edu.uci.ics.hyracks.storage.common.file.NoOpLocalResourceFactoryProvider;
-
-public class TreeIndexStatsOperatorDescriptor extends AbstractTreeIndexOperatorDescriptor {
-
-    private static final long serialVersionUID = 1L;
-    private static final RecordDescriptor recDesc = new RecordDescriptor(
-            new ISerializerDeserializer[] { UTF8StringSerializerDeserializer.INSTANCE });
-
-    public TreeIndexStatsOperatorDescriptor(IOperatorDescriptorRegistry spec, IStorageManagerInterface storageManager,
-            IIndexLifecycleManagerProvider lifecycleManagerProvider, IFileSplitProvider fileSplitProvider,
-            ITypeTraits[] typeTraits, IBinaryComparatorFactory[] comparatorFactories, int[] bloomFilterKeyFields,
-            IIndexDataflowHelperFactory dataflowHelperFactory, ISearchOperationCallbackFactory searchOpCallbackProvider) {
-        super(spec, 0, 1, recDesc, storageManager, lifecycleManagerProvider, fileSplitProvider, typeTraits,
-                comparatorFactories, bloomFilterKeyFields, dataflowHelperFactory, null, false,
-                NoOpLocalResourceFactoryProvider.INSTANCE, searchOpCallbackProvider,
-                NoOpOperationCallbackFactory.INSTANCE);
-    }
-
-    @Override
-    public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
-            IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) {
-        return new TreeIndexStatsOperatorNodePushable(this, ctx, partition);
-    }
-}
\ No newline at end of file
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexStatsOperatorNodePushable.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexStatsOperatorNodePushable.java
deleted file mode 100644
index 09d357d..0000000
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexStatsOperatorNodePushable.java
+++ /dev/null
@@ -1,92 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.storage.am.common.dataflow;
-
-import java.io.DataOutput;
-import java.nio.ByteBuffer;
-
-import edu.uci.ics.hyracks.api.comm.IFrameWriter;
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
-import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryOutputSourceOperatorNodePushable;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
-import edu.uci.ics.hyracks.storage.am.common.util.TreeIndexStats;
-import edu.uci.ics.hyracks.storage.am.common.util.TreeIndexStatsGatherer;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
-
-public class TreeIndexStatsOperatorNodePushable extends AbstractUnaryOutputSourceOperatorNodePushable {
-    private final AbstractTreeIndexOperatorDescriptor opDesc;
-    private final IHyracksTaskContext ctx;
-    private final TreeIndexDataflowHelper treeIndexHelper;
-    private TreeIndexStatsGatherer statsGatherer;
-
-    public TreeIndexStatsOperatorNodePushable(AbstractTreeIndexOperatorDescriptor opDesc, IHyracksTaskContext ctx,
-            int partition) {
-        this.opDesc = opDesc;
-        this.ctx = ctx;
-        this.treeIndexHelper = (TreeIndexDataflowHelper) opDesc.getIndexDataflowHelperFactory()
-                .createIndexDataflowHelper(opDesc, ctx, partition);
-
-    }
-
-    @Override
-    public void deinitialize() throws HyracksDataException {
-    }
-
-    @Override
-    public IFrameWriter getInputFrameWriter(int index) {
-        return null;
-    }
-
-    @Override
-    public void initialize() throws HyracksDataException {
-        treeIndexHelper.open();
-        ITreeIndex treeIndex = (ITreeIndex) treeIndexHelper.getIndexInstance();
-        try {
-            writer.open();
-            IBufferCache bufferCache = opDesc.getStorageManager().getBufferCache(ctx);
-            IFileMapProvider fileMapProvider = opDesc.getStorageManager().getFileMapProvider(ctx);
-            int indexFileId = fileMapProvider.lookupFileId(treeIndexHelper.getFileReference());
-            statsGatherer = new TreeIndexStatsGatherer(bufferCache, treeIndex.getFreePageManager(), indexFileId,
-                    treeIndex.getRootPageId());
-            TreeIndexStats stats = statsGatherer.gatherStats(treeIndex.getLeafFrameFactory().createFrame(), treeIndex
-                    .getInteriorFrameFactory().createFrame(), treeIndex.getFreePageManager().getMetaDataFrameFactory()
-                    .createFrame());
-            // Write the stats output as a single string field.
-            ByteBuffer frame = ctx.allocateFrame();
-            FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
-            appender.reset(frame, true);
-            ArrayTupleBuilder tb = new ArrayTupleBuilder(1);
-            DataOutput dos = tb.getDataOutput();
-            tb.reset();
-            UTF8StringSerializerDeserializer.INSTANCE.serialize(stats.toString(), dos);
-            tb.addFieldEndOffset();
-            if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
-                throw new IllegalStateException();
-            }
-            FrameUtils.flushFrame(frame, writer);
-        } catch (Exception e) {
-            writer.fail();
-        } finally {
-            writer.close();
-            treeIndexHelper.close();
-        }
-    }
-}
\ No newline at end of file
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/datagen/DataGenThread.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/datagen/DataGenThread.java
deleted file mode 100644
index 6fff620..0000000
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/datagen/DataGenThread.java
+++ /dev/null
@@ -1,100 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.common.datagen;
-
-import java.io.IOException;
-import java.util.Random;
-import java.util.concurrent.BlockingQueue;
-import java.util.concurrent.LinkedBlockingQueue;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-
-/**
- * Quick & dirty data generator for multi-thread testing. 
- *
- */
-@SuppressWarnings("rawtypes")
-public class DataGenThread extends Thread {
-    public final BlockingQueue<TupleBatch> tupleBatchQueue;
-    private final int maxNumBatches;
-    private final int maxOutstandingBatches;        
-    private int numBatches = 0;
-    private final Random rnd;
-    
-    // maxOutstandingBatches pre-created tuple-batches for populating the queue.
-    private TupleBatch[] tupleBatches;
-    private int ringPos;
-    
-    public DataGenThread(int numConsumers, int maxNumBatches, int batchSize, ISerializerDeserializer[] fieldSerdes,
-            int payloadSize, int rndSeed, int maxOutstandingBatches, boolean sorted) {
-        this.maxNumBatches = maxNumBatches;
-        this.maxOutstandingBatches = maxOutstandingBatches;
-        rnd = new Random(rndSeed);
-        tupleBatches = new TupleBatch[maxOutstandingBatches];
-        IFieldValueGenerator[] fieldGens = DataGenUtils.getFieldGensFromSerdes(fieldSerdes, rnd, sorted);
-        for (int i = 0; i < maxOutstandingBatches; i++) {
-            tupleBatches[i] = new TupleBatch(batchSize, fieldGens, fieldSerdes, payloadSize);
-        }
-        tupleBatchQueue = new LinkedBlockingQueue<TupleBatch>(maxOutstandingBatches);
-        ringPos = 0;
-    }
-    
-    public DataGenThread(int numConsumers, int maxNumBatches, int batchSize, ISerializerDeserializer[] fieldSerdes,
-            IFieldValueGenerator[] fieldGens, int rndSeed, int maxOutstandingBatches) {
-        this.maxNumBatches = maxNumBatches;
-        this.maxOutstandingBatches = maxOutstandingBatches;
-        rnd = new Random(rndSeed);
-        tupleBatches = new TupleBatch[maxOutstandingBatches];
-        for (int i = 0; i < maxOutstandingBatches; i++) {
-            tupleBatches[i] = new TupleBatch(batchSize, fieldGens, fieldSerdes, 0);
-        }
-        tupleBatchQueue = new LinkedBlockingQueue<TupleBatch>(maxOutstandingBatches);
-        ringPos = 0;
-    }
-    
-    @Override
-    public void run() {
-        while(numBatches < maxNumBatches) {
-            boolean added = false;
-            try {
-                if (tupleBatches[ringPos].inUse.compareAndSet(false, true)) {                    
-                    tupleBatches[ringPos].generate();
-                    tupleBatchQueue.put(tupleBatches[ringPos]);
-                    added = true;
-                }
-            } catch (IOException e) {
-                e.printStackTrace();
-            } catch (InterruptedException e) {
-                e.printStackTrace();
-            }
-            if (added) {
-                numBatches++;
-                ringPos++;
-                if (ringPos >= maxOutstandingBatches) {
-                    ringPos = 0;
-                }
-            }
-        }
-    }
-    
-    public TupleBatch getBatch() throws InterruptedException {
-        return tupleBatchQueue.take();
-    }
-    
-    public void releaseBatch(TupleBatch batch) {
-        batch.inUse.set(false);
-    }
-}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/datagen/DataGenUtils.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/datagen/DataGenUtils.java
deleted file mode 100644
index b3914e6..0000000
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/datagen/DataGenUtils.java
+++ /dev/null
@@ -1,60 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.common.datagen;
-
-import java.util.Random;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.DoubleSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.FloatSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
-
-@SuppressWarnings("rawtypes") 
-public class DataGenUtils {
-    public static IFieldValueGenerator getFieldGenFromSerde(ISerializerDeserializer serde, Random rnd, boolean sorted) {
-        if (serde instanceof IntegerSerializerDeserializer) {
-            if (sorted) {
-                return new SortedIntegerFieldValueGenerator();
-            } else {
-                return new IntegerFieldValueGenerator(rnd);
-            }
-        } else if (serde instanceof FloatSerializerDeserializer) {
-            if (sorted) {
-                return new SortedFloatFieldValueGenerator();
-            } else {
-                return new FloatFieldValueGenerator(rnd);
-            }
-        } else if (serde instanceof DoubleSerializerDeserializer) {
-            if (sorted) {
-                return new SortedDoubleFieldValueGenerator();
-            } else {
-                return new DoubleFieldValueGenerator(rnd);
-            }
-        } else if (serde instanceof UTF8StringSerializerDeserializer) {
-            return new StringFieldValueGenerator(20, rnd);
-        }
-        return null;
-    }
-    
-    public static IFieldValueGenerator[] getFieldGensFromSerdes(ISerializerDeserializer[] serdes, Random rnd, boolean sorted) {
-        IFieldValueGenerator[] fieldValueGens = new IFieldValueGenerator[serdes.length];
-        for (int i = 0; i < serdes.length; i++) {
-            fieldValueGens[i] = getFieldGenFromSerde(serdes[i], rnd, sorted);
-        }
-        return fieldValueGens;
-    }
-}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/datagen/DocumentStringFieldValueGenerator.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/datagen/DocumentStringFieldValueGenerator.java
deleted file mode 100644
index b12bb7d..0000000
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/datagen/DocumentStringFieldValueGenerator.java
+++ /dev/null
@@ -1,99 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.common.datagen;
-
-import java.io.BufferedReader;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Random;
-
-public class DocumentStringFieldValueGenerator implements IFieldValueGenerator<String> {
-    private final String FIRST_NAMES_FILE = "dist.all.first.cleaned";
-    private final String LAST_NAMES_FILE = "dist.all.last.cleaned";
-
-    private final int docMinWords;
-    private final int docMaxWords;
-    private final int maxDictionarySize;
-    private final Random rnd;
-    private int[] cumulIntRanges;
-
-    private List<String> tokenDict = new ArrayList<String>();
-
-    public DocumentStringFieldValueGenerator(int docMinWords, int docMaxWords, int maxDictionarySize, Random rnd)
-            throws IOException {
-        this.docMinWords = docMinWords;
-        this.docMaxWords = docMaxWords;
-        this.maxDictionarySize = maxDictionarySize;
-        this.rnd = rnd;
-        initDictionary();
-        double[] zipfProbDist = ProbabilityHelper.getZipfProbDist(tokenDict.size(), 1);
-        cumulIntRanges = ProbabilityHelper.getCumulIntRanges(zipfProbDist);
-    }
-
-    private void initDictionary() throws IOException {
-        String line;
-        int count = 0;
-
-        // Read first names from data file.
-        InputStream firstNamesIn = this.getClass().getClassLoader().getResourceAsStream(FIRST_NAMES_FILE);
-        BufferedReader firstNamesReader = new BufferedReader(new InputStreamReader(firstNamesIn));
-        try {
-            while (count < maxDictionarySize && (line = firstNamesReader.readLine()) != null) {
-                tokenDict.add(line.trim());
-                count++;
-            }
-        } finally {
-            firstNamesReader.close();
-        }
-
-        // Read last names from data file.
-        InputStream lastNamesIn = this.getClass().getClassLoader().getResourceAsStream(LAST_NAMES_FILE);
-        BufferedReader lastNamesReader = new BufferedReader(new InputStreamReader(lastNamesIn));
-        try {
-            while (count < maxDictionarySize && (line = lastNamesReader.readLine()) != null) {
-                tokenDict.add(line.trim());
-                count++;
-            }
-        } finally {
-            lastNamesReader.close();
-        }
-    }
-
-    @Override
-    public String next() {
-        StringBuilder strBuilder = new StringBuilder();
-        int numWords = Math.abs(rnd.nextInt()) % (docMaxWords - docMinWords + 1) + docMinWords;
-        for (int i = 0; i < numWords; i++) {
-            int ix = ProbabilityHelper.choose(cumulIntRanges, rnd.nextInt());
-            strBuilder.append(tokenDict.get(ix));
-            if (i != numWords - 1) {
-                strBuilder.append(" ");
-            }
-        }
-        return strBuilder.toString();
-    }
-
-    public List<String> getTokenDictionary() {
-        return tokenDict;
-    }
-
-    @Override
-    public void reset() {
-    }
-}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/datagen/DoubleFieldValueGenerator.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/datagen/DoubleFieldValueGenerator.java
deleted file mode 100644
index c98c249..0000000
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/datagen/DoubleFieldValueGenerator.java
+++ /dev/null
@@ -1,35 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.common.datagen;
-
-import java.util.Random;
-
-public class DoubleFieldValueGenerator implements IFieldValueGenerator<Double> {
-    protected final Random rnd;
-
-    public DoubleFieldValueGenerator(Random rnd) {
-        this.rnd = rnd;
-    }
-
-    @Override
-    public Double next() {
-        return rnd.nextDouble();
-    }
-
-    @Override
-    public void reset() {
-    }
-}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/datagen/FloatFieldValueGenerator.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/datagen/FloatFieldValueGenerator.java
deleted file mode 100644
index 7c3ff81..0000000
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/datagen/FloatFieldValueGenerator.java
+++ /dev/null
@@ -1,35 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.common.datagen;
-
-import java.util.Random;
-
-public class FloatFieldValueGenerator implements IFieldValueGenerator<Float> {
-    protected final Random rnd;
-
-    public FloatFieldValueGenerator(Random rnd) {
-        this.rnd = rnd;
-    }
-
-    @Override
-    public Float next() {
-        return rnd.nextFloat();
-    }
-
-    @Override
-    public void reset() {
-    }
-}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/datagen/IFieldValueGenerator.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/datagen/IFieldValueGenerator.java
deleted file mode 100644
index dfeead6..0000000
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/datagen/IFieldValueGenerator.java
+++ /dev/null
@@ -1,21 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.common.datagen;
-
-public interface IFieldValueGenerator<T> {
-    public T next();
-    public void reset();
-}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/datagen/IntegerFieldValueGenerator.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/datagen/IntegerFieldValueGenerator.java
deleted file mode 100644
index cd6e2a6..0000000
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/datagen/IntegerFieldValueGenerator.java
+++ /dev/null
@@ -1,35 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.common.datagen;
-
-import java.util.Random;
-
-public class IntegerFieldValueGenerator implements IFieldValueGenerator<Integer> {
-    protected final Random rnd;
-
-    public IntegerFieldValueGenerator(Random rnd) {
-        this.rnd = rnd;
-    }
-
-    @Override
-    public Integer next() {
-        return rnd.nextInt();
-    }
-
-    @Override
-    public void reset() {
-    }
-}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/datagen/PersonNameFieldValueGenerator.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/datagen/PersonNameFieldValueGenerator.java
deleted file mode 100644
index 6b86278..0000000
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/datagen/PersonNameFieldValueGenerator.java
+++ /dev/null
@@ -1,97 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.common.datagen;
-
-import java.io.BufferedReader;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Random;
-
-public class PersonNameFieldValueGenerator implements IFieldValueGenerator<String> {
-    private final String FIRST_NAMES_FILE = "dist.all.first.cleaned";
-    private final String LAST_NAMES_FILE = "dist.all.last.cleaned";
-
-    private final Random rnd;
-    private final double middleInitialProb;
-    private final String letters = "ABCDEFGHIJKLMNOPQRSTUVWXYZ";
-
-    private List<String> firstNames = new ArrayList<String>();
-    private List<String> lastNames = new ArrayList<String>();
-
-    public PersonNameFieldValueGenerator(Random rnd, double middleInitialProb)
-            throws IOException {
-        this.rnd = rnd;
-        this.middleInitialProb = middleInitialProb;
-        initNames();
-    }
-
-    private void initNames() throws IOException {
-        String line;
-
-        // Read first names from data file.
-        InputStream firstNamesIn = this.getClass().getClassLoader().getResourceAsStream(FIRST_NAMES_FILE);
-        BufferedReader firstNamesReader = new BufferedReader(new InputStreamReader(firstNamesIn));
-        try {
-            while ((line = firstNamesReader.readLine()) != null) {
-                firstNames.add(line.trim());
-            }
-        } finally {
-            firstNamesReader.close();
-        }
-
-        // Read last names from data file.
-        InputStream lastNamesIn = this.getClass().getClassLoader().getResourceAsStream(LAST_NAMES_FILE);
-        BufferedReader lastNamesReader = new BufferedReader(new InputStreamReader(lastNamesIn));
-        try {
-            while ((line = lastNamesReader.readLine()) != null) {
-                lastNames.add(line.trim());
-            }
-        } finally {
-            lastNamesReader.close();
-        }
-    }
-
-    @Override
-    public String next() {
-        StringBuilder strBuilder = new StringBuilder();
-
-        // First name.
-        int fix = Math.abs(rnd.nextInt()) % firstNames.size();
-        strBuilder.append(firstNames.get(fix));
-        strBuilder.append(" ");
-        
-        // Optional middle initial.
-        double d = Math.abs(rnd.nextDouble());
-        if (d <= middleInitialProb) {
-            int mix = Math.abs(rnd.nextInt()) % letters.length();
-            strBuilder.append(letters.charAt(mix));
-            strBuilder.append(". ");
-        }
-        
-        // Last name.
-        int lix = Math.abs(rnd.nextInt()) % lastNames.size();
-        strBuilder.append(lastNames.get(lix));
-        
-        return strBuilder.toString();
-    }
-
-    @Override
-    public void reset() {
-    }
-}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/datagen/ProbabilityHelper.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/datagen/ProbabilityHelper.java
deleted file mode 100644
index 1674681..0000000
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/datagen/ProbabilityHelper.java
+++ /dev/null
@@ -1,64 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.common.datagen;
-
-import java.util.Arrays;
-
-public class ProbabilityHelper {
-    public static double[] getUniformProbDist(int numChoices) {
-        double[] probDist = new double[numChoices];
-        for (int i = 0; i < numChoices; i++) {
-            probDist[i] = 1.0 / (double) numChoices;
-        }
-        return probDist;
-    }
-
-    public static double[] getZipfProbDist(int numChoices, int zipfSkew) {
-        double[] probDist = new double[numChoices];
-        double divisor = 0;
-        for (int i = 1; i <= numChoices; i++) {
-            divisor += 1.0 / (double) Math.pow((double) i, (double) zipfSkew);
-        }
-        for (int i = 1; i <= numChoices; i++) {
-            probDist[i - 1] = (1.0 / (double) Math.pow((double) i, (double) zipfSkew)) / divisor;
-        }
-        return probDist;
-    }
-
-    public static int[] getCumulIntRanges(double[] probDist) {
-        int[] opRanges = new int[probDist.length];
-        if (opRanges.length > 1) {
-            opRanges[0] = (int) Math.floor(Integer.MAX_VALUE * probDist[0]);
-            for (int i = 1; i < opRanges.length - 1; i++) {
-                opRanges[i] = opRanges[i - 1] + (int) Math.floor(Integer.MAX_VALUE * probDist[i]);
-            }
-            opRanges[opRanges.length - 1] = Integer.MAX_VALUE;
-        } else {
-            opRanges[0] = Integer.MAX_VALUE;
-        }
-        return opRanges;
-    }
-
-    public static int choose(int[] cumulIntRanges, int randomInt) {
-        int rndVal = Math.abs(randomInt);
-        int ix = Arrays.binarySearch(cumulIntRanges, rndVal);
-        if (ix < 0) {
-            ix = -ix - 1;
-        }
-        return ix;
-    }
-
-}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/datagen/SortedDoubleFieldValueGenerator.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/datagen/SortedDoubleFieldValueGenerator.java
deleted file mode 100644
index e93b8de..0000000
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/datagen/SortedDoubleFieldValueGenerator.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.common.datagen;
-
-public class SortedDoubleFieldValueGenerator implements IFieldValueGenerator<Double> {
-    private double val;
-    private final double startVal;
-    
-    public SortedDoubleFieldValueGenerator() {
-        startVal = 0.0d;
-        reset();
-    }
-    
-    public SortedDoubleFieldValueGenerator(double startVal) {
-        this.startVal = startVal;
-        reset();
-    }
-    
-    @Override
-    public Double next() {
-        return val++;
-    }
-
-    @Override
-    public void reset() {
-        val = startVal;        
-    }
-}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/datagen/SortedFloatFieldValueGenerator.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/datagen/SortedFloatFieldValueGenerator.java
deleted file mode 100644
index fb163e1..0000000
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/datagen/SortedFloatFieldValueGenerator.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.common.datagen;
-
-public class SortedFloatFieldValueGenerator implements IFieldValueGenerator<Float> {
-    private float val = 0.0f;
-    private final float startVal;
-    
-    public SortedFloatFieldValueGenerator() {
-        startVal = 0.0f;
-        reset();
-    }
-    
-    public SortedFloatFieldValueGenerator(float startVal) {
-        this.startVal = startVal;
-        reset();
-    }
-    
-    @Override
-    public Float next() {
-        return val++;
-    }
-
-    @Override
-    public void reset() {
-        val = startVal;
-    }
-}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/datagen/SortedIntegerFieldValueGenerator.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/datagen/SortedIntegerFieldValueGenerator.java
deleted file mode 100644
index a036772..0000000
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/datagen/SortedIntegerFieldValueGenerator.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.common.datagen;
-
-public class SortedIntegerFieldValueGenerator implements IFieldValueGenerator<Integer> {
-    private int val = 0;
-    private final int startVal;
-
-    public SortedIntegerFieldValueGenerator() {
-        startVal = 0;
-        reset();
-    }
-    
-    public SortedIntegerFieldValueGenerator(int startVal) {
-        this.startVal = startVal;
-        reset();
-    }
-    
-    @Override
-    public Integer next() {
-        return val++;
-    }
-
-    @Override
-    public void reset() {
-        val = startVal;
-    }
-}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/datagen/StringFieldValueGenerator.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/datagen/StringFieldValueGenerator.java
deleted file mode 100644
index 6bf01a4..0000000
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/datagen/StringFieldValueGenerator.java
+++ /dev/null
@@ -1,46 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.common.datagen;
-
-import java.util.Random;
-
-public class StringFieldValueGenerator implements IFieldValueGenerator<String> {
-    private int maxLen;
-    private final Random rnd;
-    
-    public StringFieldValueGenerator(int maxLen, Random rnd) {
-        this.maxLen = maxLen;
-        this.rnd = rnd;
-    }
-
-    public void setMaxLength(int maxLen) {
-        this.maxLen = maxLen;
-    }
-    
-    @Override
-    public String next() {
-        String s = Long.toHexString(Double.doubleToLongBits(rnd.nextDouble()));
-        StringBuilder strBuilder = new StringBuilder();
-        for (int i = 0; i < s.length() && i < maxLen; i++) {
-            strBuilder.append(s.charAt(Math.abs(rnd.nextInt()) % s.length()));
-        }
-        return strBuilder.toString();
-    }
-
-    @Override
-    public void reset() {
-    }
-}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/datagen/TupleBatch.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/datagen/TupleBatch.java
deleted file mode 100644
index 375aeb0..0000000
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/datagen/TupleBatch.java
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.common.datagen;
-
-import java.io.IOException;
-import java.util.concurrent.atomic.AtomicBoolean;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-
-@SuppressWarnings("rawtypes")
-public class TupleBatch {
-    private final int size;
-    private final TupleGenerator[] tupleGens;
-    public final AtomicBoolean inUse = new AtomicBoolean(false);
-    
-    public TupleBatch(int size, IFieldValueGenerator[] fieldGens, ISerializerDeserializer[] fieldSerdes, int payloadSize) {        
-        this.size = size;
-        tupleGens = new TupleGenerator[size];
-        for (int i = 0; i < size; i++) {
-            tupleGens[i] = new TupleGenerator(fieldGens, fieldSerdes, payloadSize);
-        }
-    }
-    
-    public void generate() throws IOException {
-        for(TupleGenerator tupleGen : tupleGens) {
-            tupleGen.next();
-        }
-    }
-    
-    public int size() {
-        return size;
-    }
-    
-    public ITupleReference get(int ix) {
-        return tupleGens[ix].get();
-    }
-}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/datagen/TupleGenerator.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/datagen/TupleGenerator.java
deleted file mode 100644
index d0a1062..0000000
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/datagen/TupleGenerator.java
+++ /dev/null
@@ -1,80 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.common.datagen;
-
-import java.io.DataOutput;
-import java.io.IOException;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-
-@SuppressWarnings({"rawtypes", "unchecked" })
-public class TupleGenerator {    
-    protected final ISerializerDeserializer[] fieldSerdes;
-    protected final IFieldValueGenerator[] fieldGens;
-    protected final ArrayTupleBuilder tb;
-    protected final ArrayTupleReference tuple;
-    protected final byte[] payload;
-    protected final DataOutput tbDos;
-    
-    public TupleGenerator(IFieldValueGenerator[] fieldGens, ISerializerDeserializer[] fieldSerdes, int payloadSize) {
-        this.fieldSerdes = fieldSerdes;
-        this.fieldGens = fieldGens;
-        tuple = new ArrayTupleReference();
-        if (payloadSize > 0) {
-            tb = new ArrayTupleBuilder(fieldSerdes.length + 1);
-            payload = new byte[payloadSize];
-        } else {
-            tb = new ArrayTupleBuilder(fieldSerdes.length);
-            payload = null;
-        }        
-        tbDos = tb.getDataOutput();
-    }
-
-    public ITupleReference next() throws IOException {
-        tb.reset();
-        for (int i = 0; i < fieldSerdes.length; i++) {
-            fieldSerdes[i].serialize(fieldGens[i].next(), tbDos);
-            tb.addFieldEndOffset();
-        }
-        if (payload != null) {
-            tbDos.write(payload);
-            tb.addFieldEndOffset();
-        }
-        tuple.reset(tb.getFieldEndOffsets(), tb.getByteArray());
-        return tuple;
-    }
-    
-    public ITupleReference get() {
-        return tuple;
-    }
-    
-    public void reset() {
-        for (IFieldValueGenerator fieldGen : fieldGens) {
-            fieldGen.reset();
-        }
-    }
-    
-    public ISerializerDeserializer[] getFieldSerdes() {
-        return fieldSerdes;
-    }
-    
-    public IFieldValueGenerator[] getFieldGens() {
-        return fieldGens;
-    }
-}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/LIFOMetaDataFrame.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/LIFOMetaDataFrame.java
deleted file mode 100644
index 1b8bc15..0000000
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/LIFOMetaDataFrame.java
+++ /dev/null
@@ -1,145 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.common.frames;
-
-import java.nio.ByteBuffer;
-
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrame;
-import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
-
-// all meta pages of this kind have a negative level
-// the first meta page has level -1, all other meta pages have level -2
-// the first meta page is special because it guarantees to have a correct max page
-// other meta pages (i.e., with level -2) have junk in the max page field
-
-public class LIFOMetaDataFrame implements ITreeIndexMetaDataFrame {
-
-    // Arbitrarily chosen magic integer.
-    protected static final int MAGIC_VALID_INT = 0x5bd1e995;
-    
-	protected static final int tupleCountOff = 0; //0
-	protected static final int freeSpaceOff = tupleCountOff + 4; //4
-	protected static final int maxPageOff = freeSpaceOff + 4; //8
-	protected static final int levelOff = maxPageOff + 12; //20
-	protected static final int nextPageOff = levelOff + 1; // 21
-	protected static final int validOff = nextPageOff + 4; // 25
-	protected static final int lsnOff = validOff + 4; // 29
-
-	protected ICachedPage page = null;
-	protected ByteBuffer buf = null;
-
-	public int getMaxPage() {
-		return buf.getInt(maxPageOff);
-	}
-
-	public void setMaxPage(int maxPage) {
-		buf.putInt(maxPageOff, maxPage);
-	}
-
-	public int getFreePage() {
-		int tupleCount = buf.getInt(tupleCountOff);
-		if (tupleCount > 0) {
-			// return the last page from the linked list of free pages
-			// TODO: this is a dumb policy, but good enough for now
-			int lastPageOff = buf.getInt(freeSpaceOff) - 4;
-			buf.putInt(freeSpaceOff, lastPageOff);
-			buf.putInt(tupleCountOff, tupleCount - 1);
-			return buf.getInt(lastPageOff);
-		} else {
-			return -1;
-		}
-	}
-
-	// must be checked before adding free page
-	// user of this class is responsible for getting a free page as a new meta
-	// page, latching it, etc. if there is no space on this page
-	public boolean hasSpace() {
-		return buf.getInt(freeSpaceOff) + 4 < buf.capacity();
-	}
-
-	// no bounds checking is done, there must be free space
-	public void addFreePage(int freePage) {
-		int freeSpace = buf.getInt(freeSpaceOff);
-		buf.putInt(freeSpace, freePage);
-		buf.putInt(freeSpaceOff, freeSpace + 4);
-		buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) + 1);
-	}
-
-	@Override
-	public byte getLevel() {
-		return buf.get(levelOff);
-	}
-
-	@Override
-	public void setLevel(byte level) {
-		buf.put(levelOff, level);
-	}
-
-	@Override
-	public ICachedPage getPage() {
-		return page;
-	}
-
-	@Override
-	public void setPage(ICachedPage page) {
-		this.page = page;
-		this.buf = page.getBuffer();
-	}
-
-	@Override
-	public void initBuffer(byte level) {
-		buf.putInt(tupleCountOff, 0);
-		buf.putInt(freeSpaceOff, lsnOff + 4);
-		//buf.putInt(maxPageOff, -1);
-		buf.put(levelOff, level);
-		buf.putInt(nextPageOff, -1);
-		setValid(false);
-	}
-
-	@Override
-	public int getNextPage() {
-		return buf.getInt(nextPageOff);
-	}
-
-	@Override
-	public void setNextPage(int nextPage) {
-		buf.putInt(nextPageOff, nextPage);
-	}
-
-    @Override
-    public boolean isValid() {
-        return buf.getInt(validOff) == MAGIC_VALID_INT;
-    }
-
-    @Override
-    public void setValid(boolean isValid) {
-        if (isValid) {
-            buf.putInt(validOff, MAGIC_VALID_INT);
-        } else {
-            buf.putInt(validOff, 0);
-        }
-    }
-
-    @Override
-    public long getLSN() {
-        return buf.getLong(lsnOff);
-    }
-
-    @Override
-    public void setLSN(long lsn) {
-        buf.putLong(lsnOff, lsn);
-    }
-}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/TreeIndexNSMFrame.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/TreeIndexNSMFrame.java
deleted file mode 100644
index 31ce573..0000000
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/TreeIndexNSMFrame.java
+++ /dev/null
@@ -1,296 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.common.frames;
-
-import java.nio.ByteBuffer;
-import java.util.ArrayList;
-import java.util.Collections;
-
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.common.api.ISlotManager;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriter;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.SlotOffTupleOff;
-import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
-
-public abstract class TreeIndexNSMFrame implements ITreeIndexFrame {
-
-    protected static final int pageLsnOff = 0; // 0
-    protected static final int tupleCountOff = pageLsnOff + 8; // 8
-    protected static final int freeSpaceOff = tupleCountOff + 4; // 12
-    protected static final int totalFreeSpaceOff = freeSpaceOff + 4; // 16
-    protected static final int levelOff = totalFreeSpaceOff + 4; // 20
-    protected static final int smFlagOff = levelOff + 1; // 21
-
-    protected ICachedPage page = null;
-    protected ByteBuffer buf = null;
-    protected ISlotManager slotManager;
-
-    protected ITreeIndexTupleWriter tupleWriter;
-    protected ITreeIndexTupleReference frameTuple;
-
-    public TreeIndexNSMFrame(ITreeIndexTupleWriter tupleWriter, ISlotManager slotManager) {
-        this.tupleWriter = tupleWriter;
-        this.frameTuple = tupleWriter.createTupleReference();
-        this.slotManager = slotManager;
-        this.slotManager.setFrame(this);
-    }
-
-    @Override
-    public void initBuffer(byte level) {
-        buf.putLong(pageLsnOff, 0); // TODO: might to set to a different lsn
-        // during creation
-        buf.putInt(tupleCountOff, 0);
-        resetSpaceParams();
-        buf.put(levelOff, level);
-        buf.put(smFlagOff, (byte) 0);
-    }
-
-    @Override
-    public boolean isLeaf() {
-        return buf.get(levelOff) == 0;
-    }
-
-    @Override
-    public boolean isInterior() {
-        return buf.get(levelOff) > 0;
-    }
-
-    @Override
-    public byte getLevel() {
-        return buf.get(levelOff);
-    }
-
-    @Override
-    public void setLevel(byte level) {
-        buf.put(levelOff, level);
-    }
-
-    @Override
-    public int getFreeSpaceOff() {
-        return buf.getInt(freeSpaceOff);
-    }
-
-    @Override
-    public void setFreeSpaceOff(int freeSpace) {
-        buf.putInt(freeSpaceOff, freeSpace);
-    }
-
-    @Override
-    public void setPage(ICachedPage page) {
-        this.page = page;
-        this.buf = page.getBuffer();
-    }
-
-    @Override
-    public ByteBuffer getBuffer() {
-        return page.getBuffer();
-    }
-
-    @Override
-    public ICachedPage getPage() {
-        return page;
-    }
-
-    @Override
-    public boolean compact() {
-        resetSpaceParams();
-        int tupleCount = buf.getInt(tupleCountOff);
-        int freeSpace = buf.getInt(freeSpaceOff);
-        // Sort the slots by the tuple offset they point to.
-        ArrayList<SlotOffTupleOff> sortedTupleOffs = new ArrayList<SlotOffTupleOff>();
-        sortedTupleOffs.ensureCapacity(tupleCount);
-        for (int i = 0; i < tupleCount; i++) {
-            int slotOff = slotManager.getSlotOff(i);
-            int tupleOff = slotManager.getTupleOff(slotOff);
-            sortedTupleOffs.add(new SlotOffTupleOff(i, slotOff, tupleOff));
-        }
-        Collections.sort(sortedTupleOffs);
-        // Iterate over the sorted slots, and move their corresponding tuples to
-        // the left, reclaiming free space.
-        for (int i = 0; i < sortedTupleOffs.size(); i++) {
-            int tupleOff = sortedTupleOffs.get(i).tupleOff;
-            frameTuple.resetByTupleOffset(buf, tupleOff);
-            int tupleEndOff = frameTuple.getFieldStart(frameTuple.getFieldCount() - 1)
-                    + frameTuple.getFieldLength(frameTuple.getFieldCount() - 1);
-            int tupleLength = tupleEndOff - tupleOff;
-            System.arraycopy(buf.array(), tupleOff, buf.array(), freeSpace, tupleLength);
-            slotManager.setSlot(sortedTupleOffs.get(i).slotOff, freeSpace);
-            freeSpace += tupleLength;
-        }
-        // Update contiguous free space pointer and total free space indicator.
-        buf.putInt(freeSpaceOff, freeSpace);
-        buf.putInt(totalFreeSpaceOff, buf.capacity() - freeSpace - tupleCount * slotManager.getSlotSize());
-        return false;
-    }
-
-    @Override
-    public void delete(ITupleReference tuple, int tupleIndex) {
-        int slotOff = slotManager.getSlotOff(tupleIndex);
-        int tupleOff = slotManager.getTupleOff(slotOff);
-        frameTuple.resetByTupleOffset(buf, tupleOff);
-        int tupleSize = tupleWriter.bytesRequired(frameTuple);
-
-        // perform deletion (we just do a memcpy to overwrite the slot)
-        int slotStartOff = slotManager.getSlotEndOff();
-        int length = slotOff - slotStartOff;
-        System.arraycopy(buf.array(), slotStartOff, buf.array(), slotStartOff + slotManager.getSlotSize(), length);
-
-        // maintain space information
-        buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) - 1);
-        buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) + tupleSize + slotManager.getSlotSize());
-    }
-
-    @Override
-    public FrameOpSpaceStatus hasSpaceInsert(ITupleReference tuple) {
-        int bytesRequired = tupleWriter.bytesRequired(tuple);
-        // Enough space in the contiguous space region?
-        if (bytesRequired + slotManager.getSlotSize() <= buf.capacity() - buf.getInt(freeSpaceOff)
-                - (buf.getInt(tupleCountOff) * slotManager.getSlotSize())) {
-            return FrameOpSpaceStatus.SUFFICIENT_CONTIGUOUS_SPACE;
-        }
-        // Enough space after compaction?
-        if (bytesRequired + slotManager.getSlotSize() <= buf.getInt(totalFreeSpaceOff)) {
-            return FrameOpSpaceStatus.SUFFICIENT_SPACE;
-        }
-        return FrameOpSpaceStatus.INSUFFICIENT_SPACE;
-    }
-
-    @Override
-    public FrameOpSpaceStatus hasSpaceUpdate(ITupleReference newTuple, int oldTupleIndex) {
-        frameTuple.resetByTupleIndex(this, oldTupleIndex);
-        int oldTupleBytes = frameTuple.getTupleSize();
-        int newTupleBytes = tupleWriter.bytesRequired(newTuple);
-        int additionalBytesRequired = newTupleBytes - oldTupleBytes;
-        // Enough space for an in-place update?
-        if (additionalBytesRequired <= 0) {
-            return FrameOpSpaceStatus.SUFFICIENT_INPLACE_SPACE;
-        }
-        // Enough space if we delete the old tuple and insert the new one
-        // without compaction?
-        if (newTupleBytes <= buf.capacity() - buf.getInt(freeSpaceOff)
-                - (buf.getInt(tupleCountOff) * slotManager.getSlotSize())) {
-            return FrameOpSpaceStatus.SUFFICIENT_CONTIGUOUS_SPACE;
-        }
-        // Enough space if we delete the old tuple and compact?
-        if (additionalBytesRequired <= buf.getInt(totalFreeSpaceOff)) {
-            return FrameOpSpaceStatus.SUFFICIENT_SPACE;
-        }
-        return FrameOpSpaceStatus.INSUFFICIENT_SPACE;
-    }
-
-    protected void resetSpaceParams() {
-        buf.putInt(freeSpaceOff, smFlagOff + 1);
-        buf.putInt(totalFreeSpaceOff, buf.capacity() - (smFlagOff + 1));
-    }
-
-    @Override
-    public void insert(ITupleReference tuple, int tupleIndex) {
-        slotManager.insertSlot(tupleIndex, buf.getInt(freeSpaceOff));
-        int bytesWritten = tupleWriter.writeTuple(tuple, buf.array(), buf.getInt(freeSpaceOff));
-        buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) + 1);
-        buf.putInt(freeSpaceOff, buf.getInt(freeSpaceOff) + bytesWritten);
-        buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) - bytesWritten - slotManager.getSlotSize());
-    }
-
-    @Override
-    public void update(ITupleReference newTuple, int oldTupleIndex, boolean inPlace) {
-        frameTuple.resetByTupleIndex(this, oldTupleIndex);
-        int oldTupleBytes = frameTuple.getTupleSize();
-        int slotOff = slotManager.getSlotOff(oldTupleIndex);
-        int bytesWritten = 0;
-        if (inPlace) {
-            // Overwrite the old tuple in place.
-            bytesWritten = tupleWriter.writeTuple(newTuple, buf.array(), buf.getInt(slotOff));
-        } else {
-            // Insert the new tuple at the end of the free space, and change the
-            // slot value (effectively "deleting" the old tuple).
-            int newTupleOff = buf.getInt(freeSpaceOff);
-            bytesWritten = tupleWriter.writeTuple(newTuple, buf.array(), newTupleOff);
-            // Update slot value.
-            buf.putInt(slotOff, newTupleOff);
-            // Update contiguous free space pointer.
-            buf.putInt(freeSpaceOff, newTupleOff + bytesWritten);
-        }
-        buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) + oldTupleBytes - bytesWritten);
-    }
-
-    @Override
-    public String printHeader() {
-        StringBuilder strBuilder = new StringBuilder();
-        strBuilder.append("pageLsnOff:        " + pageLsnOff + "\n");
-        strBuilder.append("tupleCountOff:     " + tupleCountOff + "\n");
-        strBuilder.append("freeSpaceOff:      " + freeSpaceOff + "\n");
-        strBuilder.append("totalFreeSpaceOff: " + totalFreeSpaceOff + "\n");
-        strBuilder.append("levelOff:          " + levelOff + "\n");
-        strBuilder.append("smFlagOff:         " + smFlagOff + "\n");
-        return strBuilder.toString();
-    }
-
-    @Override
-    public int getTupleCount() {
-        return buf.getInt(tupleCountOff);
-    }
-
-    public ISlotManager getSlotManager() {
-        return slotManager;
-    }
-
-    @Override
-    public int getTupleOffset(int slotNum) {
-        return slotManager.getTupleOff(slotManager.getSlotStartOff() - slotNum * slotManager.getSlotSize());
-    }
-
-    @Override
-    public long getPageLsn() {
-        return buf.getLong(pageLsnOff);
-    }
-
-    @Override
-    public void setPageLsn(long pageLsn) {
-        buf.putLong(pageLsnOff, pageLsn);
-    }
-
-    @Override
-    public int getTotalFreeSpace() {
-        return buf.getInt(totalFreeSpaceOff);
-    }
-
-    @Override
-    public boolean compress() {
-        return false;
-    }
-
-    @Override
-    public int getSlotSize() {
-        return slotManager.getSlotSize();
-    }
-
-    @Override
-    public ITreeIndexTupleWriter getTupleWriter() {
-        return tupleWriter;
-    }
-
-    @Override
-    public ITreeIndexTupleReference createTupleReference() {
-        return tupleWriter.createTupleReference();
-    }
-
-    public int getFreeContiguousSpace() {
-        return buf.capacity() - getFreeSpaceOff() - (getTupleCount() * slotManager.getSlotSize());
-    }
-}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/freepage/LinkedListFreePageManagerFactory.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/freepage/LinkedListFreePageManagerFactory.java
deleted file mode 100644
index 1c373d5..0000000
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/freepage/LinkedListFreePageManagerFactory.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.common.freepage;
-
-import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManager;
-import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManagerFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrameFactory;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-
-public class LinkedListFreePageManagerFactory implements IFreePageManagerFactory {
-
-    private final ITreeIndexMetaDataFrameFactory metaDataFrameFactory;
-    private final IBufferCache bufferCache;
-
-    public LinkedListFreePageManagerFactory(IBufferCache bufferCache,
-            ITreeIndexMetaDataFrameFactory metaDataFrameFactory) {
-        this.metaDataFrameFactory = metaDataFrameFactory;
-        this.bufferCache = bufferCache;
-    }
-
-    public IFreePageManager createFreePageManager() {
-        return new LinkedListFreePageManager(bufferCache, 0, metaDataFrameFactory);
-    }
-}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/impls/AbstractTreeIndex.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/impls/AbstractTreeIndex.java
deleted file mode 100644
index 9bf4a4f..0000000
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/impls/AbstractTreeIndex.java
+++ /dev/null
@@ -1,359 +0,0 @@
-/*

- * Copyright 2009-2012 by The Regents of the University of California

- * Licensed under the Apache License, Version 2.0 (the "License");

- * you may not use this file except in compliance with the License.

- * you may obtain a copy of the License from

- * 

- *     http://www.apache.org/licenses/LICENSE-2.0

- * 

- * Unless required by applicable law or agreed to in writing, software

- * distributed under the License is distributed on an "AS IS" BASIS,

- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

- * See the License for the specific language governing permissions and

- * limitations under the License.

- */

-

-package edu.uci.ics.hyracks.storage.am.common.impls;

-

-import java.util.ArrayList;

-

-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;

-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;

-import edu.uci.ics.hyracks.api.io.FileReference;

-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;

-import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManager;

-import edu.uci.ics.hyracks.storage.am.common.api.IIndexBulkLoader;

-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;

-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexAccessor;

-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;

-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;

-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrame;

-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriter;

-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;

-import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;

-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;

-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;

-import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;

-import edu.uci.ics.hyracks.storage.common.file.BufferedFileHandle;

-import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;

-

-public abstract class AbstractTreeIndex implements ITreeIndex {

-

-    protected final static int rootPage = 1;

-

-    protected final IBufferCache bufferCache;

-    protected final IFileMapProvider fileMapProvider;

-    protected final IFreePageManager freePageManager;

-

-    protected final ITreeIndexFrameFactory interiorFrameFactory;

-    protected final ITreeIndexFrameFactory leafFrameFactory;

-

-    protected final IBinaryComparatorFactory[] cmpFactories;

-    protected final int fieldCount;

-

-    protected FileReference file;

-    protected int fileId = -1;

-

-    private boolean isActivated = false;

-

-    public AbstractTreeIndex(IBufferCache bufferCache, IFileMapProvider fileMapProvider,

-            IFreePageManager freePageManager, ITreeIndexFrameFactory interiorFrameFactory,

-            ITreeIndexFrameFactory leafFrameFactory, IBinaryComparatorFactory[] cmpFactories, int fieldCount,

-            FileReference file) {

-        this.bufferCache = bufferCache;

-        this.fileMapProvider = fileMapProvider;

-        this.freePageManager = freePageManager;

-        this.interiorFrameFactory = interiorFrameFactory;

-        this.leafFrameFactory = leafFrameFactory;

-        this.cmpFactories = cmpFactories;

-        this.fieldCount = fieldCount;

-        this.file = file;

-    }

-

-    public synchronized void create() throws HyracksDataException {

-        if (isActivated) {

-            throw new HyracksDataException("Failed to create the index since it is activated.");

-        }

-

-        boolean fileIsMapped = false;

-        synchronized (fileMapProvider) {

-            fileIsMapped = fileMapProvider.isMapped(file);

-            if (!fileIsMapped) {

-                bufferCache.createFile(file);

-            }

-            fileId = fileMapProvider.lookupFileId(file);

-            try {

-                // Also creates the file if it doesn't exist yet.

-                bufferCache.openFile(fileId);

-            } catch (HyracksDataException e) {

-                // Revert state of buffer cache since file failed to open.

-                if (!fileIsMapped) {

-                    bufferCache.deleteFile(fileId, false);

-                }

-                throw e;

-            }

-        }

-

-        freePageManager.open(fileId);

-        initEmptyTree();

-        freePageManager.close();

-        bufferCache.closeFile(fileId);

-    }

-

-    private void initEmptyTree() throws HyracksDataException {

-        ITreeIndexFrame frame = leafFrameFactory.createFrame();

-        ITreeIndexMetaDataFrame metaFrame = freePageManager.getMetaDataFrameFactory().createFrame();

-        freePageManager.init(metaFrame, rootPage);

-

-        ICachedPage rootNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, rootPage), true);

-        rootNode.acquireWriteLatch();

-        try {

-            frame.setPage(rootNode);

-            frame.initBuffer((byte) 0);

-        } finally {

-            rootNode.releaseWriteLatch();

-            bufferCache.unpin(rootNode);

-        }

-    }

-

-    public synchronized void activate() throws HyracksDataException {

-        if (isActivated) {

-            return;

-        }

-

-        boolean fileIsMapped = false;

-        synchronized (fileMapProvider) {

-            fileIsMapped = fileMapProvider.isMapped(file);

-            if (!fileIsMapped) {

-                bufferCache.createFile(file);

-            }

-            fileId = fileMapProvider.lookupFileId(file);

-            try {

-                // Also creates the file if it doesn't exist yet.

-                bufferCache.openFile(fileId);

-            } catch (HyracksDataException e) {

-                // Revert state of buffer cache since file failed to open.

-                if (!fileIsMapped) {

-                    bufferCache.deleteFile(fileId, false);

-                }

-                throw e;

-            }

-        }

-        freePageManager.open(fileId);

-

-        // TODO: Should probably have some way to check that the tree is physically consistent

-        // or that the file we just opened actually is a tree

-

-        isActivated = true;

-    }

-

-    public synchronized void deactivate() throws HyracksDataException {

-        if (!isActivated) {

-            return;

-        }

-

-        bufferCache.closeFile(fileId);

-        freePageManager.close();

-

-        isActivated = false;

-    }

-

-    public synchronized void destroy() throws HyracksDataException {

-        if (isActivated) {

-            throw new HyracksDataException("Failed to destroy the index since it is activated.");

-        }

-

-        file.delete();

-        if (fileId == -1) {

-            return;

-        }

-

-        bufferCache.deleteFile(fileId, false);

-        fileId = -1;

-    }

-

-    public synchronized void clear() throws HyracksDataException {

-        if (!isActivated) {

-            throw new HyracksDataException("Failed to clear the index since it is not activated.");

-        }

-        initEmptyTree();

-    }

-

-    public boolean isEmptyTree(ITreeIndexFrame frame) throws HyracksDataException {

-        ICachedPage rootNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, rootPage), false);

-        rootNode.acquireReadLatch();

-        try {

-            frame.setPage(rootNode);

-            if (frame.getLevel() == 0 && frame.getTupleCount() == 0) {

-                return true;

-            } else {

-                return false;

-            }

-        } finally {

-            rootNode.releaseReadLatch();

-            bufferCache.unpin(rootNode);

-        }

-    }

-

-    public byte getTreeHeight(ITreeIndexFrame frame) throws HyracksDataException {

-        ICachedPage rootNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, rootPage), false);

-        rootNode.acquireReadLatch();

-        try {

-            frame.setPage(rootNode);

-            return frame.getLevel();

-        } finally {

-            rootNode.releaseReadLatch();

-            bufferCache.unpin(rootNode);

-        }

-    }

-

-    public int getFileId() {

-        return fileId;

-    }

-

-    public FileReference getFileReference() {

-        return file;

-    }

-

-    public IBufferCache getBufferCache() {

-        return bufferCache;

-    }

-

-    public ITreeIndexFrameFactory getInteriorFrameFactory() {

-        return interiorFrameFactory;

-    }

-

-    public ITreeIndexFrameFactory getLeafFrameFactory() {

-        return leafFrameFactory;

-    }

-

-    public IBinaryComparatorFactory[] getComparatorFactories() {

-        return cmpFactories;

-    }

-

-    public IFreePageManager getFreePageManager() {

-        return freePageManager;

-    }

-

-    public int getRootPageId() {

-        return rootPage;

-    }

-

-    public int getFieldCount() {

-        return fieldCount;

-    }

-

-    public abstract class AbstractTreeIndexBulkLoader implements IIndexBulkLoader {

-        protected final MultiComparator cmp;

-        protected final int slotSize;

-        protected final int leafMaxBytes;

-        protected final int interiorMaxBytes;

-        protected final ArrayList<NodeFrontier> nodeFrontiers = new ArrayList<NodeFrontier>();

-        protected final ITreeIndexMetaDataFrame metaFrame;

-        protected final ITreeIndexTupleWriter tupleWriter;

-        protected ITreeIndexFrame leafFrame;

-        protected ITreeIndexFrame interiorFrame;

-

-        public AbstractTreeIndexBulkLoader(float fillFactor) throws TreeIndexException, HyracksDataException {

-            leafFrame = leafFrameFactory.createFrame();

-            interiorFrame = interiorFrameFactory.createFrame();

-            metaFrame = freePageManager.getMetaDataFrameFactory().createFrame();

-

-            if (!isEmptyTree(leafFrame)) {

-                throw new TreeIndexException("Cannot bulk-load a non-empty tree.");

-            }

-

-            this.cmp = MultiComparator.createIgnoreFieldLength(cmpFactories);

-

-            leafFrame.setMultiComparator(cmp);

-            interiorFrame.setMultiComparator(cmp);

-

-            tupleWriter = leafFrame.getTupleWriter();

-

-            NodeFrontier leafFrontier = new NodeFrontier(leafFrame.createTupleReference());

-            leafFrontier.pageId = freePageManager.getFreePage(metaFrame);

-            leafFrontier.page = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, leafFrontier.pageId), true);

-            leafFrontier.page.acquireWriteLatch();

-

-            interiorFrame.setPage(leafFrontier.page);

-            interiorFrame.initBuffer((byte) 0);

-            interiorMaxBytes = (int) ((float) interiorFrame.getBuffer().capacity() * fillFactor);

-

-            leafFrame.setPage(leafFrontier.page);

-            leafFrame.initBuffer((byte) 0);

-            leafMaxBytes = (int) ((float) leafFrame.getBuffer().capacity() * fillFactor);

-            slotSize = leafFrame.getSlotSize();

-

-            nodeFrontiers.add(leafFrontier);

-        }

-

-        public abstract void add(ITupleReference tuple) throws IndexException, HyracksDataException;

-

-        protected void handleException() throws HyracksDataException {

-            // Unlatch and unpin pages.

-            for (NodeFrontier nodeFrontier : nodeFrontiers) {

-                nodeFrontier.page.releaseWriteLatch();

-                bufferCache.unpin(nodeFrontier.page);

-            }

-        }

-

-        @Override

-        public void end() throws HyracksDataException {

-            // copy the root generated from the bulk-load to *the* root page location

-            ICachedPage newRoot = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, rootPage), true);

-            newRoot.acquireWriteLatch();

-            NodeFrontier lastNodeFrontier = nodeFrontiers.get(nodeFrontiers.size() - 1);

-            try {

-                System.arraycopy(lastNodeFrontier.page.getBuffer().array(), 0, newRoot.getBuffer().array(), 0,

-                        lastNodeFrontier.page.getBuffer().capacity());

-            } finally {

-                newRoot.releaseWriteLatch();

-                bufferCache.unpin(newRoot);

-

-                // register old root as a free page

-                freePageManager.addFreePage(metaFrame, lastNodeFrontier.pageId);

-

-                for (int i = 0; i < nodeFrontiers.size(); i++) {

-                    nodeFrontiers.get(i).page.releaseWriteLatch();

-                    bufferCache.unpin(nodeFrontiers.get(i).page);

-                }

-            }

-        }

-

-        protected void addLevel() throws HyracksDataException {

-            NodeFrontier frontier = new NodeFrontier(tupleWriter.createTupleReference());

-            frontier.pageId = freePageManager.getFreePage(metaFrame);

-            frontier.page = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, frontier.pageId), true);

-            frontier.page.acquireWriteLatch();

-            frontier.lastTuple.setFieldCount(cmp.getKeyFieldCount());

-            interiorFrame.setPage(frontier.page);

-            interiorFrame.initBuffer((byte) nodeFrontiers.size());

-            nodeFrontiers.add(frontier);

-        }

-    }

-

-    public class TreeIndexInsertBulkLoader implements IIndexBulkLoader {

-        ITreeIndexAccessor accessor = (ITreeIndexAccessor) createAccessor(NoOpOperationCallback.INSTANCE,

-                NoOpOperationCallback.INSTANCE);

-

-        @Override

-        public void add(ITupleReference tuple) throws HyracksDataException {

-            try {

-                accessor.insert(tuple);

-            } catch (IndexException e) {

-                throw new HyracksDataException(e);

-            }

-        }

-

-        @Override

-        public void end() throws HyracksDataException {

-            // do nothing

-        }

-

-    }

-

-    @Override

-    public long getMemoryAllocationSize() {

-        return 0;

-    }

-}

diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/impls/NoOpOperationCallback.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/impls/NoOpOperationCallback.java
deleted file mode 100644
index 6fc8dcf..0000000
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/impls/NoOpOperationCallback.java
+++ /dev/null
@@ -1,52 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.common.impls;
-
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.common.api.IModificationOperationCallback;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchOperationCallback;
-
-/**
- * Dummy operation callback that simply does nothing.
- */
-public enum NoOpOperationCallback implements IModificationOperationCallback, ISearchOperationCallback {
-    INSTANCE;
-
-    @Override
-    public boolean proceed(ITupleReference tuple) {
-        return true;
-    }
-
-    @Override
-    public void reconcile(ITupleReference tuple) {
-        // Do nothing.
-    }
-
-    @Override
-    public void before(ITupleReference tuple) {
-        // Do nothing.        
-    }
-
-    @Override
-    public void found(ITupleReference before, ITupleReference after) {
-        // Do nothing.        
-    }
-
-    @Override
-    public void cancel(ITupleReference tuple) {
-        // Do nothing.
-    }
-}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/impls/NoOpOperationCallbackFactory.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/impls/NoOpOperationCallbackFactory.java
deleted file mode 100644
index 4c641a6..0000000
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/impls/NoOpOperationCallbackFactory.java
+++ /dev/null
@@ -1,26 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.common.impls;
-
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.storage.am.common.api.IModificationOperationCallback;
-import edu.uci.ics.hyracks.storage.am.common.api.IModificationOperationCallbackFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchOperationCallback;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchOperationCallbackFactory;
-
-/**
- * Dummy NoOp callback factory used primarily for testing. Always returns the {@link NoOpOperationCallback} instance.
- * Implemented as an enum to preserve singleton model while being serializable
- */
-public enum NoOpOperationCallbackFactory implements ISearchOperationCallbackFactory,
-        IModificationOperationCallbackFactory {
-    INSTANCE;
-
-    @Override
-    public IModificationOperationCallback createModificationOperationCallback(long resourceId, Object resource, IHyracksTaskContext ctx) {
-        return NoOpOperationCallback.INSTANCE;
-    }
-
-    @Override
-    public ISearchOperationCallback createSearchOperationCallback(long resourceId, IHyracksTaskContext ctx) {
-        return NoOpOperationCallback.INSTANCE;
-    }
-}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/impls/NodeFrontier.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/impls/NodeFrontier.java
deleted file mode 100644
index 56f5fdb..0000000
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/impls/NodeFrontier.java
+++ /dev/null
@@ -1,29 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.common.impls;
-
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
-import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
-
-public class NodeFrontier {
-    public ICachedPage page;
-    public int pageId;
-    public ITreeIndexTupleReference lastTuple;
-
-    public NodeFrontier(ITreeIndexTupleReference lastTuple) {
-        this.lastTuple = lastTuple;
-    }
-}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/impls/TreeIndexDiskOrderScanCursor.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/impls/TreeIndexDiskOrderScanCursor.java
deleted file mode 100644
index 738d987..0000000
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/impls/TreeIndexDiskOrderScanCursor.java
+++ /dev/null
@@ -1,150 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.common.impls;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.storage.am.common.api.ICursorInitialState;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchPredicate;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
-import edu.uci.ics.hyracks.storage.common.file.BufferedFileHandle;
-
-public class TreeIndexDiskOrderScanCursor implements ITreeIndexCursor {
-
-    private int tupleIndex = 0;
-    private int fileId = -1;
-    private int currentPageId = -1;
-    private int maxPageId = -1;
-    private ICachedPage page = null;
-    private IBufferCache bufferCache = null;
-
-    private final ITreeIndexFrame frame;
-    private final ITreeIndexTupleReference frameTuple;
-
-    public TreeIndexDiskOrderScanCursor(ITreeIndexFrame frame) {
-        this.frame = frame;
-        this.frameTuple = frame.createTupleReference();
-    }
-
-    @Override
-    public void close() throws HyracksDataException {
-        page.releaseReadLatch();
-        bufferCache.unpin(page);
-        page = null;
-    }
-
-    @Override
-    public ITreeIndexTupleReference getTuple() {
-        return frameTuple;
-    }
-
-    @Override
-    public ICachedPage getPage() {
-        return page;
-    }
-
-    private boolean positionToNextLeaf(boolean skipCurrent) throws HyracksDataException {
-        while ((frame.getLevel() != 0 || skipCurrent || frame.getTupleCount() == 0) && (currentPageId <= maxPageId)) {
-            currentPageId++;
-
-            page.releaseReadLatch();
-            bufferCache.unpin(page);
-
-            ICachedPage nextPage = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, currentPageId), false);
-            nextPage.acquireReadLatch();
-
-            page = nextPage;
-            frame.setPage(page);
-            tupleIndex = 0;
-            skipCurrent = false;
-        }
-        if (currentPageId <= maxPageId) {
-            return true;
-        } else {
-            return false;
-        }
-    }
-
-    @Override
-    public boolean hasNext() throws HyracksDataException {
-        if (currentPageId > maxPageId) {
-            return false;
-        }
-        if (tupleIndex >= frame.getTupleCount()) {
-            boolean nextLeafExists = positionToNextLeaf(true);
-            if (nextLeafExists) {
-                frameTuple.resetByTupleIndex(frame, tupleIndex);
-                return true;
-            } else {
-                return false;
-            }
-        }
-        frameTuple.resetByTupleIndex(frame, tupleIndex);
-        return true;
-    }
-
-    @Override
-    public void next() throws HyracksDataException {
-        tupleIndex++;
-    }
-
-    @Override
-    public void open(ICursorInitialState initialState, ISearchPredicate searchPred) throws HyracksDataException {
-        // in case open is called multiple times without closing
-        if (page != null) {
-            page.releaseReadLatch();
-            bufferCache.unpin(page);
-        }
-        page = initialState.getPage();
-        tupleIndex = 0;
-        frame.setPage(page);
-        positionToNextLeaf(false);
-    }
-
-    @Override
-    public void reset() {
-        tupleIndex = 0;
-        currentPageId = -1;
-        maxPageId = -1;
-        page = null;
-    }
-
-    @Override
-    public void setBufferCache(IBufferCache bufferCache) {
-        this.bufferCache = bufferCache;
-    }
-
-    @Override
-    public void setFileId(int fileId) {
-        this.fileId = fileId;
-    }
-
-    public void setCurrentPageId(int currentPageId) {
-        this.currentPageId = currentPageId;
-    }
-
-    public void setMaxPageId(int maxPageId) {
-        this.maxPageId = maxPageId;
-    }
-
-    @Override
-    public boolean exclusiveLatchNodes() {
-        return false;
-    }
-}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/DoubleArrayList.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/DoubleArrayList.java
deleted file mode 100644
index 318a102..0000000
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/DoubleArrayList.java
+++ /dev/null
@@ -1,98 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.common.ophelpers;
-
-public class DoubleArrayList {
-    private double[] data;
-    private int size;
-    private int first;
-    private final int growth;
-
-    public DoubleArrayList(int initialCapacity, int growth) {
-        data = new double[initialCapacity];
-        size = 0;
-        first = 0;
-        this.growth = growth;
-    }
-
-    public int size() {
-        return size;
-    }
-
-    public int first() {
-        return first;
-    }
-
-    public void add(double i) {
-        if (size == data.length) {
-            double[] newData = new double[data.length + growth];
-            System.arraycopy(data, 0, newData, 0, data.length);
-            data = newData;
-        }
-
-        data[size++] = i;
-    }
-
-    public void addFirst(double i) {
-        double[] newData = new double[data.length + 1];
-        System.arraycopy(data, 0, newData, 0, first);
-        System.arraycopy(data, first, newData, first + 1, size - first);
-        data = newData;
-        data[first] = i;
-        size++;
-    }
-
-    public void removeLast() {
-        if (size > 0)
-            size--;
-    }
-
-    // WARNING: caller is responsible for checking size > 0
-    public double getLast() {
-        return data[size - 1];
-    }
-
-    public double get(int i) {
-        return data[i];
-    }
-
-    // WARNING: caller is responsible for checking i < size
-    public void set(int i, int value) {
-        data[i] = value;
-
-    }
-
-    public double getFirst() {
-        return data[first];
-    }
-
-    public void moveFirst() {
-        first++;
-    }
-
-    public void clear() {
-        size = 0;
-        first = 0;
-    }
-
-    public boolean isLast() {
-        return size == first;
-    }
-
-    public boolean isEmpty() {
-        return size == 0;
-    }
-}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/FieldLengthIgnoringMultiComparator.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/FieldLengthIgnoringMultiComparator.java
deleted file mode 100644
index 4e43a34..0000000
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/FieldLengthIgnoringMultiComparator.java
+++ /dev/null
@@ -1,79 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.common.ophelpers;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-
-/**
- * MultiComparator that always passes 0 as a tuple's field length. This may speed up comparisons.
- */
-public class FieldLengthIgnoringMultiComparator extends MultiComparator {
-
-    public FieldLengthIgnoringMultiComparator(IBinaryComparator[] cmps) {
-        super(cmps);
-    }
-
-    @Override
-    public int compare(ITupleReference tupleA, ITupleReference tupleB) {
-        for (int i = 0; i < cmps.length; i++) {
-            int cmp = cmps[i].compare(tupleA.getFieldData(i), tupleA.getFieldStart(i), 0, tupleB.getFieldData(i),
-                    tupleB.getFieldStart(i), 0);
-            if (cmp != 0) {
-                return cmp;
-            }
-        }
-        return 0;
-    }
-
-    @Override
-    public int selectiveFieldCompare(ITupleReference tupleA, ITupleReference tupleB, int[] fields) {
-        for (int j = 0; j < cmps.length; j++) {
-            int i = fields[j];
-            int cmp = cmps[j].compare(tupleA.getFieldData(i), tupleA.getFieldStart(i), 0, tupleB.getFieldData(i),
-                    tupleB.getFieldStart(i), 0);
-            if (cmp != 0) {
-                return cmp;
-            }
-        }
-        return 0;
-    }
-
-    @Override
-    public int compare(ITupleReference tupleA, ITupleReference tupleB, int startFieldIndex) {
-        for (int i = 0; i < cmps.length; i++) {
-            int ix = startFieldIndex + i;
-            int cmp = cmps[i].compare(tupleA.getFieldData(ix), tupleA.getFieldStart(ix), 0, tupleB.getFieldData(ix),
-                    tupleB.getFieldStart(ix), 0);
-            if (cmp != 0) {
-                return cmp;
-            }
-        }
-        return 0;
-    }
-
-    @Override
-    public int fieldRangeCompare(ITupleReference tupleA, ITupleReference tupleB, int startFieldIndex, int numFields) {
-        for (int i = startFieldIndex; i < startFieldIndex + numFields; i++) {
-            int cmp = cmps[i].compare(tupleA.getFieldData(i), tupleA.getFieldStart(i), 0, tupleB.getFieldData(i),
-                    tupleB.getFieldStart(i), 0);
-            if (cmp != 0) {
-                return cmp;
-            }
-        }
-        return 0;
-    }
-}
\ No newline at end of file
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/FieldLengthIgnoringSingleComparator.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/FieldLengthIgnoringSingleComparator.java
deleted file mode 100644
index d35297a..0000000
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/FieldLengthIgnoringSingleComparator.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.common.ophelpers;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-
-/**
- * MultiComparator optimized for the special case where there is only a single comparator.
- * Further speeds up comparisons by always passing 0 as the field's length.
- */
-public class FieldLengthIgnoringSingleComparator extends MultiComparator {
-    private final IBinaryComparator cmp;
-
-    protected FieldLengthIgnoringSingleComparator(IBinaryComparator[] cmps) {
-        super(cmps);
-        this.cmp = cmps[0];
-    }
-
-    public int compare(ITupleReference tupleA, ITupleReference tupleB) {
-        return cmp.compare(tupleA.getFieldData(0), tupleA.getFieldStart(0), 0, tupleB.getFieldData(0),
-                tupleB.getFieldStart(0), 0);
-    }
-}
\ No newline at end of file
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/IndexOperation.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/IndexOperation.java
deleted file mode 100644
index 1885348..0000000
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/IndexOperation.java
+++ /dev/null
@@ -1,29 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.common.ophelpers;
-
-public enum IndexOperation {
-    INSERT,
-    DELETE,
-    UPDATE,
-    UPSERT,
-    SEARCH,
-    DISKORDERSCAN,
-    PHYSICALDELETE,
-    NOOP,
-    MERGE,
-    FLUSH
-}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/MultiComparator.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/MultiComparator.java
deleted file mode 100644
index 567973d..0000000
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/MultiComparator.java
+++ /dev/null
@@ -1,151 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.common.ophelpers;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-
-public class MultiComparator {
-
-    protected final IBinaryComparator[] cmps;
-
-    public MultiComparator(IBinaryComparator[] cmps) {
-        this.cmps = cmps;
-    }
-
-    public int compare(ITupleReference tupleA, ITupleReference tupleB) {
-        for (int i = 0; i < cmps.length; i++) {
-            int cmp = cmps[i].compare(tupleA.getFieldData(i), tupleA.getFieldStart(i), tupleA.getFieldLength(i),
-                    tupleB.getFieldData(i), tupleB.getFieldStart(i), tupleB.getFieldLength(i));
-            if (cmp != 0) {
-                return cmp;
-            }
-        }
-        return 0;
-    }
-
-    public int selectiveFieldCompare(ITupleReference tupleA, ITupleReference tupleB, int[] fields) {
-        for (int j = 0; j < cmps.length; j++) {
-            int i = fields[j];
-            int cmp = cmps[j].compare(tupleA.getFieldData(i), tupleA.getFieldStart(i), tupleA.getFieldLength(i),
-                    tupleB.getFieldData(i), tupleB.getFieldStart(i), tupleB.getFieldLength(i));
-            if (cmp != 0) {
-                return cmp;
-            }
-        }
-        return 0;
-    }
-
-    public int compare(ITupleReference tupleA, ITupleReference tupleB, int startFieldIndex) {
-        for (int i = 0; i < cmps.length; i++) {
-            int ix = startFieldIndex + i;
-            int cmp = cmps[i].compare(tupleA.getFieldData(ix), tupleA.getFieldStart(ix), tupleA.getFieldLength(ix),
-                    tupleB.getFieldData(ix), tupleB.getFieldStart(ix), tupleB.getFieldLength(ix));
-            if (cmp != 0) {
-                return cmp;
-            }
-        }
-        return 0;
-    }
-
-    public int fieldRangeCompare(ITupleReference tupleA, ITupleReference tupleB, int startFieldIndex, int numFields) {
-        for (int i = startFieldIndex; i < startFieldIndex + numFields; i++) {
-            int cmp = cmps[i].compare(tupleA.getFieldData(i), tupleA.getFieldStart(i), tupleA.getFieldLength(i),
-                    tupleB.getFieldData(i), tupleB.getFieldStart(i), tupleB.getFieldLength(i));
-            if (cmp != 0) {
-                return cmp;
-            }
-        }
-        return 0;
-    }
-
-    public IBinaryComparator[] getComparators() {
-        return cmps;
-    }
-
-    public int getKeyFieldCount() {
-        return cmps.length;
-    }
-
-    public static MultiComparator create(IBinaryComparatorFactory[] cmpFactories) {
-        IBinaryComparator[] cmps = new IBinaryComparator[cmpFactories.length];
-        for (int i = 0; i < cmpFactories.length; i++) {
-            cmps[i] = cmpFactories[i].createBinaryComparator();
-        }
-        if (cmps.length == 1) {
-            return new SingleComparator(cmps);
-        } else {
-            return new MultiComparator(cmps);
-        }
-    }
-
-    public static MultiComparator createIgnoreFieldLength(IBinaryComparatorFactory[] cmpFactories) {
-        IBinaryComparator[] cmps = new IBinaryComparator[cmpFactories.length];
-        for (int i = 0; i < cmpFactories.length; i++) {
-            cmps[i] = cmpFactories[i].createBinaryComparator();
-        }
-        if (cmps.length == 1) {
-            return new FieldLengthIgnoringSingleComparator(cmps);
-        } else {
-            return new FieldLengthIgnoringMultiComparator(cmps);
-        }
-    }
-
-    public static MultiComparator createIgnoreFieldLength(IBinaryComparatorFactory[] cmpFactories, int startIndex,
-            int numCmps) {
-        IBinaryComparator[] cmps = new IBinaryComparator[numCmps];
-        for (int i = startIndex; i < startIndex + numCmps; i++) {
-            cmps[i] = cmpFactories[i].createBinaryComparator();
-        }
-        if (cmps.length == 1) {
-            return new FieldLengthIgnoringSingleComparator(cmps);
-        } else {
-            return new FieldLengthIgnoringMultiComparator(cmps);
-        }
-    }
-
-    public static MultiComparator create(IBinaryComparatorFactory[] cmpFactories, int startIndex, int numCmps) {
-        IBinaryComparator[] cmps = new IBinaryComparator[numCmps];
-        for (int i = startIndex; i < startIndex + numCmps; i++) {
-            cmps[i] = cmpFactories[i].createBinaryComparator();
-        }
-        if (cmps.length == 1) {
-            return new SingleComparator(cmps);
-        } else {
-            return new MultiComparator(cmps);
-        }
-    }
-
-    public static MultiComparator create(IBinaryComparatorFactory[]... cmpFactories) {
-        int size = 0;
-        for (int i = 0; i < cmpFactories.length; i++) {
-            size += cmpFactories[i].length;
-        }
-        IBinaryComparator[] cmps = new IBinaryComparator[size];
-        int x = 0;
-        for (int i = 0; i < cmpFactories.length; i++) {
-            for (int j = 0; j < cmpFactories[i].length; j++) {
-                cmps[x++] = cmpFactories[i][j].createBinaryComparator();
-            }
-        }
-        if (cmps.length == 1) {
-            return new SingleComparator(cmps);
-        } else {
-            return new MultiComparator(cmps);
-        }
-    }
-}
\ No newline at end of file
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/SingleComparator.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/SingleComparator.java
deleted file mode 100644
index c8841f3..0000000
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/SingleComparator.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.common.ophelpers;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-
-/**
- * MultiComparator optimized for the special case where there is only a single comparator.
- */
-public class SingleComparator extends MultiComparator {
-
-    private final IBinaryComparator cmp;
-
-    protected SingleComparator(IBinaryComparator[] cmps) {
-        super(cmps);
-        this.cmp = cmps[0];
-    }
-
-    public int compare(ITupleReference tupleA, ITupleReference tupleB) {
-        return cmp.compare(tupleA.getFieldData(0), tupleA.getFieldStart(0), tupleA.getFieldLength(0),
-                tupleB.getFieldData(0), tupleB.getFieldStart(0), tupleB.getFieldLength(0));
-    }
-}
\ No newline at end of file
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/ConcatenatingTupleReference.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/ConcatenatingTupleReference.java
deleted file mode 100644
index c5662b1..0000000
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/ConcatenatingTupleReference.java
+++ /dev/null
@@ -1,114 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.common.tuples;
-
-import java.util.Arrays;
-
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-
-/**
- * WARNING: getFieldData(), getFieldStart() and getFieldLength() are log and not constant time.
- */
-public class ConcatenatingTupleReference implements ITupleReference {
-
-    private final ITupleReference[] tuples;
-    private final int[] fieldCounts;
-    private int numTuples;
-    private int totalFieldCount;
-    
-    public ConcatenatingTupleReference(int maxTuples) {
-        tuples = new ITupleReference[maxTuples];
-        fieldCounts = new int[maxTuples];
-        reset();        
-    }
-    
-    public void reset() {
-        numTuples = 0;
-        totalFieldCount = 0;
-    }
-    
-    public void addTuple(ITupleReference tuple) {
-        tuples[numTuples] = tuple;
-        totalFieldCount += tuple.getFieldCount();
-        if (numTuples > 0) {
-            fieldCounts[numTuples] = fieldCounts[numTuples - 1] + tuple.getFieldCount();
-        } else {
-            fieldCounts[numTuples] = tuple.getFieldCount();
-        }
-        ++numTuples;
-    }
-    
-    public void removeLastTuple() {
-        if (numTuples > 0) {
-            ITupleReference lastTuple = tuples[--numTuples];
-            totalFieldCount -= lastTuple.getFieldCount();
-        }
-    }
-    
-    public int getNumTuples() {
-        return numTuples;
-    }
-    
-    public boolean hasMaxTuples() {
-        return numTuples == tuples.length;
-    }
-    
-    @Override
-    public int getFieldCount() {
-        return totalFieldCount;
-    }
-
-    @Override
-    public byte[] getFieldData(int fIdx) {
-        int tupleIndex = getTupleIndex(fIdx);
-        int fieldIndex = getFieldIndex(tupleIndex, fIdx);
-        return tuples[tupleIndex].getFieldData(fieldIndex);
-    }
-
-    @Override
-    public int getFieldStart(int fIdx) {
-        int tupleIndex = getTupleIndex(fIdx);
-        int fieldIndex = getFieldIndex(tupleIndex, fIdx);
-        return tuples[tupleIndex].getFieldStart(fieldIndex);
-    }
-
-    @Override
-    public int getFieldLength(int fIdx) {
-        int tupleIndex = getTupleIndex(fIdx);
-        int fieldIndex = getFieldIndex(tupleIndex, fIdx);
-        return tuples[tupleIndex].getFieldLength(fieldIndex);
-    }
-    
-    private int getTupleIndex(int fIdx) {
-        int tupleIndex = Arrays.binarySearch(fieldCounts, 0, numTuples - 1, fIdx);
-        if (tupleIndex < 0) {
-            tupleIndex = -tupleIndex - 1;
-        } else {
-            ++tupleIndex;
-        }
-        return tupleIndex;
-    }
-    
-    private int getFieldIndex(int tupleIndex, int fIdx) {
-        int fieldIndex = -1;
-        if (tupleIndex > 0) {
-            fieldIndex = fIdx - fieldCounts[tupleIndex - 1]; 
-        } else {
-            fieldIndex = fIdx;
-        }
-        return fieldIndex;
-    }
-}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/PermutingFrameTupleReference.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/PermutingFrameTupleReference.java
deleted file mode 100644
index 5f97f04..0000000
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/PermutingFrameTupleReference.java
+++ /dev/null
@@ -1,65 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.common.tuples;
-
-import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
-
-public class PermutingFrameTupleReference implements IFrameTupleReference {
-	private IFrameTupleAccessor fta;
-	private int tIndex;
-	private int[] fieldPermutation;
-
-	public void setFieldPermutation(int[] fieldPermutation) {
-		this.fieldPermutation = fieldPermutation;
-	}
-
-	public void reset(IFrameTupleAccessor fta, int tIndex) {
-		this.fta = fta;
-		this.tIndex = tIndex;
-	}
-
-	@Override
-	public IFrameTupleAccessor getFrameTupleAccessor() {
-		return fta;
-	}
-
-	@Override
-	public int getTupleIndex() {
-		return tIndex;
-	}
-
-	@Override
-	public int getFieldCount() {
-		return fieldPermutation.length;
-	}
-
-	@Override
-	public byte[] getFieldData(int fIdx) {
-		return fta.getBuffer().array();
-	}
-
-	@Override
-	public int getFieldStart(int fIdx) {
-		return fta.getTupleStartOffset(tIndex) + fta.getFieldSlotsLength()
-				+ fta.getFieldStartOffset(tIndex, fieldPermutation[fIdx]);
-	}
-
-	@Override
-	public int getFieldLength(int fIdx) {
-		return fta.getFieldLength(tIndex, fieldPermutation[fIdx]);
-	}
-}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/PermutingTupleReference.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/PermutingTupleReference.java
deleted file mode 100644
index 0272ff6..0000000
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/PermutingTupleReference.java
+++ /dev/null
@@ -1,52 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.common.tuples;
-
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-
-public class PermutingTupleReference implements ITupleReference {
-
-    private final int[] fieldPermutation;
-    private ITupleReference sourceTuple;
-    
-    public PermutingTupleReference(int[] fieldPermutation) {
-        this.fieldPermutation = fieldPermutation;
-    }
-    
-    public void reset(ITupleReference sourceTuple) {
-        this.sourceTuple = sourceTuple;
-    }
-
-    @Override
-    public int getFieldCount() {
-        return fieldPermutation.length;
-    }
-
-    @Override
-    public byte[] getFieldData(int fIdx) {
-        return sourceTuple.getFieldData(fieldPermutation[fIdx]);
-    }
-
-    @Override
-    public int getFieldStart(int fIdx) {
-        return sourceTuple.getFieldStart(fieldPermutation[fIdx]);
-    }
-
-    @Override
-    public int getFieldLength(int fIdx) {
-        return sourceTuple.getFieldLength(fieldPermutation[fIdx]);
-    }
-}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/SimpleTupleWriter.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/SimpleTupleWriter.java
deleted file mode 100644
index 8c41dd3..0000000
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/SimpleTupleWriter.java
+++ /dev/null
@@ -1,121 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.common.tuples;
-
-import java.nio.ByteBuffer;
-
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriter;
-
-public class SimpleTupleWriter implements ITreeIndexTupleWriter {
-
-    // Write short in little endian to target byte array at given offset.
-    private static void writeShortL(short s, byte[] buf, int targetOff) {
-        buf[targetOff] = (byte) (s >> 8);
-        buf[targetOff + 1] = (byte) (s >> 0);
-    }
-
-    @Override
-    public int bytesRequired(ITupleReference tuple) {
-        int bytes = getNullFlagsBytes(tuple) + getFieldSlotsBytes(tuple);
-        for (int i = 0; i < tuple.getFieldCount(); i++) {
-            bytes += tuple.getFieldLength(i);
-        }
-        return bytes;
-    }
-
-    @Override
-    public int bytesRequired(ITupleReference tuple, int startField, int numFields) {
-        int bytes = getNullFlagsBytes(tuple, startField, numFields) + getFieldSlotsBytes(tuple, startField, numFields);
-        for (int i = startField; i < startField + numFields; i++) {
-            bytes += tuple.getFieldLength(i);
-        }
-        return bytes;
-    }
-
-    @Override
-    public ITreeIndexTupleReference createTupleReference() {
-        return new SimpleTupleReference();
-    }
-
-    @Override
-    public int writeTuple(ITupleReference tuple, ByteBuffer targetBuf, int targetOff) {
-        return writeTuple(tuple, targetBuf.array(), targetOff);
-    }
-
-    @Override
-    public int writeTuple(ITupleReference tuple, byte[] targetBuf, int targetOff) {
-        int runner = targetOff;
-        int nullFlagsBytes = getNullFlagsBytes(tuple);
-        int fieldSlotsBytes = getFieldSlotsBytes(tuple);
-        for (int i = 0; i < nullFlagsBytes; i++) {
-            targetBuf[runner++] = (byte) 0;
-        }
-        runner += fieldSlotsBytes;
-        int fieldEndOff = 0;
-        for (int i = 0; i < tuple.getFieldCount(); i++) {
-            System.arraycopy(tuple.getFieldData(i), tuple.getFieldStart(i), targetBuf, runner, tuple.getFieldLength(i));
-            fieldEndOff += tuple.getFieldLength(i);
-            runner += tuple.getFieldLength(i);
-            writeShortL((short) fieldEndOff, targetBuf, targetOff + nullFlagsBytes + i * 2);
-        }
-        return runner - targetOff;
-    }
-
-    @Override
-    public int writeTupleFields(ITupleReference tuple, int startField, int numFields, byte[] targetBuf, int targetOff) {
-        int runner = targetOff;
-        int nullFlagsBytes = getNullFlagsBytes(tuple, startField, numFields);
-        for (int i = 0; i < nullFlagsBytes; i++) {
-            targetBuf[runner++] = (byte) 0;
-        }
-        runner += getFieldSlotsBytes(tuple, startField, numFields);
-
-        int fieldEndOff = 0;
-        int fieldCounter = 0;
-        for (int i = startField; i < startField + numFields; i++) {
-            System.arraycopy(tuple.getFieldData(i), tuple.getFieldStart(i), targetBuf, runner, tuple.getFieldLength(i));
-            fieldEndOff += tuple.getFieldLength(i);
-            runner += tuple.getFieldLength(i);
-            writeShortL((short) fieldEndOff, targetBuf, targetOff + nullFlagsBytes + fieldCounter * 2);
-            fieldCounter++;
-        }
-
-        return runner - targetOff;
-    }
-
-    protected int getNullFlagsBytes(ITupleReference tuple) {
-        return (int) Math.ceil((double) tuple.getFieldCount() / 8.0);
-    }
-
-    protected int getFieldSlotsBytes(ITupleReference tuple) {
-        return tuple.getFieldCount() * 2;
-    }
-
-    protected int getNullFlagsBytes(ITupleReference tuple, int startField, int numFields) {
-        return (int) Math.ceil((double) numFields / 8.0);
-    }
-
-    protected int getFieldSlotsBytes(ITupleReference tuple, int startField, int numFields) {
-        return numFields * 2;
-    }
-
-    @Override
-    public int getCopySpaceRequired(ITupleReference tuple) {
-        return bytesRequired(tuple);
-    }
-}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/TypeAwareTupleWriter.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/TypeAwareTupleWriter.java
deleted file mode 100644
index 1e12bea..0000000
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/TypeAwareTupleWriter.java
+++ /dev/null
@@ -1,155 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.common.tuples;
-
-import java.nio.ByteBuffer;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriter;
-
-public class TypeAwareTupleWriter implements ITreeIndexTupleWriter {
-
-    protected ITypeTraits[] typeTraits;
-    protected VarLenIntEncoderDecoder encDec = new VarLenIntEncoderDecoder();
-
-    public TypeAwareTupleWriter(ITypeTraits[] typeTraits) {
-        this.typeTraits = typeTraits;
-    }
-
-    @Override
-    public int bytesRequired(ITupleReference tuple) {
-        int bytes = getNullFlagsBytes(tuple) + getFieldSlotsBytes(tuple);
-        for (int i = 0; i < tuple.getFieldCount(); i++) {
-            bytes += tuple.getFieldLength(i);
-        }
-        return bytes;
-    }
-
-    @Override
-    public int bytesRequired(ITupleReference tuple, int startField, int numFields) {
-        int bytes = getNullFlagsBytes(numFields) + getFieldSlotsBytes(tuple, startField, numFields);
-        for (int i = startField; i < startField + numFields; i++) {
-            bytes += tuple.getFieldLength(i);
-        }
-        return bytes;
-    }
-
-    @Override
-    public ITreeIndexTupleReference createTupleReference() {
-        return new TypeAwareTupleReference(typeTraits);
-    }
-
-    @Override
-    public int writeTuple(ITupleReference tuple, ByteBuffer targetBuf, int targetOff) {
-        return writeTuple(tuple, targetBuf.array(), targetOff);
-    }
-
-    @Override
-    public int writeTuple(ITupleReference tuple, byte[] targetBuf, int targetOff) {
-        int runner = targetOff;
-        int nullFlagsBytes = getNullFlagsBytes(tuple);
-        // write null indicator bits
-        for (int i = 0; i < nullFlagsBytes; i++) {
-            targetBuf[runner++] = (byte) 0;
-        }
-
-        // write field slots for variable length fields
-        encDec.reset(targetBuf, runner);
-        for (int i = 0; i < tuple.getFieldCount(); i++) {
-            if (!typeTraits[i].isFixedLength()) {
-                encDec.encode(tuple.getFieldLength(i));
-            }
-        }
-        runner = encDec.getPos();
-
-        // write data fields
-        for (int i = 0; i < tuple.getFieldCount(); i++) {
-            System.arraycopy(tuple.getFieldData(i), tuple.getFieldStart(i), targetBuf, runner, tuple.getFieldLength(i));
-            runner += tuple.getFieldLength(i);
-        }
-
-        return runner - targetOff;
-    }
-
-    @Override
-    public int writeTupleFields(ITupleReference tuple, int startField, int numFields, byte[] targetBuf, int targetOff) {
-        int runner = targetOff;
-        int nullFlagsBytes = getNullFlagsBytes(numFields);
-        // write null indicator bits
-        for (int i = 0; i < nullFlagsBytes; i++) {
-            targetBuf[runner++] = (byte) 0;
-        }
-
-        // write field slots for variable length fields
-        encDec.reset(targetBuf, runner);
-        for (int i = startField; i < startField + numFields; i++) {
-            if (!typeTraits[i].isFixedLength()) {
-                encDec.encode(tuple.getFieldLength(i));
-            }
-        }
-        runner = encDec.getPos();
-
-        for (int i = startField; i < startField + numFields; i++) {
-            System.arraycopy(tuple.getFieldData(i), tuple.getFieldStart(i), targetBuf, runner, tuple.getFieldLength(i));
-            runner += tuple.getFieldLength(i);
-        }
-
-        return runner - targetOff;
-    }
-
-    protected int getNullFlagsBytes(ITupleReference tuple) {
-        return (int) Math.ceil((double) tuple.getFieldCount() / 8.0);
-    }
-
-    protected int getFieldSlotsBytes(ITupleReference tuple) {
-        int fieldSlotBytes = 0;
-        for (int i = 0; i < tuple.getFieldCount(); i++) {
-            if (!typeTraits[i].isFixedLength()) {
-                fieldSlotBytes += encDec.getBytesRequired(tuple.getFieldLength(i));
-            }
-        }
-        return fieldSlotBytes;
-    }
-
-    protected int getNullFlagsBytes(int numFields) {
-        return (int) Math.ceil((double) numFields / 8.0);
-    }
-
-    protected int getFieldSlotsBytes(ITupleReference tuple, int startField, int numFields) {
-        int fieldSlotBytes = 0;
-        for (int i = startField; i < startField + numFields; i++) {
-            if (!typeTraits[i].isFixedLength()) {
-                fieldSlotBytes += encDec.getBytesRequired(tuple.getFieldLength(i));
-            }
-        }
-        return fieldSlotBytes;
-    }
-
-    public ITypeTraits[] getTypeTraits() {
-        return typeTraits;
-    }
-
-    public void setTypeTraits(ITypeTraits[] typeTraits) {
-        this.typeTraits = typeTraits;
-    }
-
-    @Override
-    public int getCopySpaceRequired(ITupleReference tuple) {
-        return bytesRequired(tuple);
-    }
-}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/util/HashMultiSet.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/util/HashMultiSet.java
deleted file mode 100644
index e4ccdcb..0000000
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/util/HashMultiSet.java
+++ /dev/null
@@ -1,113 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.common.util;
-
-import java.util.AbstractCollection;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-
-/**
- * Quick and dirty implementation of a HashMultiSet backed by a HashMap.
- * It only implements a minimal subset of the collection interface to make our tests work.
- */
-public class HashMultiSet<E> extends AbstractCollection<E> {
-
-    private final Map<E, List<E>> map = new HashMap<E, List<E>>(); 
-    private int size = 0;
-    
-    @Override
-    public boolean add(E e) {
-        List<E> list = map.get(e);
-        if (list == null) {
-            list = new ArrayList<E>();
-            map.put(e, list);
-        }
-        list.add(e);
-        size++;
-        return true;
-    }
-    
-    @Override
-    public boolean contains(Object o) {
-        return map.containsKey(o);
-    }
-    
-    @Override
-    public boolean remove(Object o) {
-        List<E> list = map.get(o);
-        if (list == null) {
-            return false;            
-        }
-        list.remove(list.size() - 1);
-        if (list.isEmpty()) {
-            map.remove(o);
-        }
-        size--;
-        return true;
-    }
-    
-    @Override
-    public Iterator<E> iterator() {
-        return new HashMultiSetIterator();
-    }
-
-    @Override
-    public int size() {
-        return size;
-    }
-    
-    @Override
-    public void clear() {
-        map.clear();
-        size = 0;
-    }
-    
-    private class HashMultiSetIterator implements Iterator<E> {
-
-        private Iterator<Map.Entry<E, List<E>>> mapIter;
-        private Iterator<E> listIter;
-        
-        public HashMultiSetIterator() {
-            mapIter = map.entrySet().iterator();
-        }
-        
-        @Override
-        public boolean hasNext() {
-            if (mapIter.hasNext() || (listIter != null && listIter.hasNext())) {
-                return true;
-            }
-            return false;
-        }
-
-        @Override
-        public E next() {
-            if (listIter == null || (listIter != null && !listIter.hasNext())) {
-                Map.Entry<E, List<E>> entry = mapIter.next();
-                listIter = entry.getValue().iterator();
-                return listIter.next();
-            }
-            return listIter.next();
-        }
-
-        @Override
-        public void remove() {
-            throw new IllegalStateException("Not implemented");
-        }
-    }
-}
diff --git a/hyracks-storage-am-common/src/main/resources/dist.all.first.cleaned b/hyracks-storage-am-common/src/main/resources/dist.all.first.cleaned
deleted file mode 100755
index de64afa..0000000
--- a/hyracks-storage-am-common/src/main/resources/dist.all.first.cleaned
+++ /dev/null
@@ -1,5494 +0,0 @@
-Mary 
-Patricia 
-Linda 
-Barbara 
-Elizabeth 
-Jennifer 
-Maria 
-Susan 
-Margaret 
-Dorothy 
-Lisa 
-Nancy 
-Karen 
-Betty 
-Helen 
-Sandra 
-Donna 
-Carol 
-Ruth 
-Sharon 
-Michelle 
-Laura 
-Sarah 
-Kimberly 
-Deborah 
-Jessica 
-Shirley 
-Cynthia 
-Angela 
-Melissa 
-Brenda 
-Amy 
-Anna 
-Rebecca 
-Virginia 
-Kathleen 
-Pamela 
-Martha 
-Debra 
-Amanda 
-Stephanie 
-Carolyn 
-Christine 
-Marie 
-Janet 
-Catherine 
-Frances 
-Ann 
-Joyce 
-Diane 
-Alice 
-Julie 
-Heather 
-Teresa 
-Doris 
-Gloria 
-Evelyn 
-Jean 
-Cheryl 
-Mildred 
-Katherine 
-Joan 
-Ashley 
-Judith 
-Rose 
-Janice 
-Kelly 
-Nicole 
-Judy 
-Christina 
-Kathy 
-Theresa 
-Beverly 
-Denise 
-Tammy 
-Irene 
-Jane 
-Lori 
-Rachel 
-Marilyn 
-Andrea 
-Kathryn 
-Louise 
-Sara 
-Anne 
-Jacqueline 
-Wanda 
-Bonnie 
-Julia 
-Ruby 
-Lois 
-Tina 
-Phyllis 
-Norma 
-Paula 
-Diana 
-Annie 
-Lillian 
-Emily 
-Robin 
-Peggy 
-Crystal 
-Gladys 
-Rita 
-Dawn 
-Connie 
-Florence 
-Tracy 
-Edna 
-Tiffany 
-Carmen 
-Rosa 
-Cindy 
-Grace 
-Wendy 
-Victoria 
-Edith 
-Kim 
-Sherry 
-Sylvia 
-Josephine 
-Thelma 
-Shannon 
-Sheila 
-Ethel 
-Ellen 
-Elaine 
-Marjorie 
-Carrie 
-Charlotte 
-Monica 
-Esther 
-Pauline 
-Emma 
-Juanita 
-Anita 
-Rhonda 
-Hazel 
-Amber 
-Eva 
-Debbie 
-April 
-Leslie 
-Clara 
-Lucille 
-Jamie 
-Joanne 
-Eleanor 
-Valerie 
-Danielle 
-Megan 
-Alicia 
-Suzanne 
-Michele 
-Gail 
-Bertha 
-Darlene 
-Veronica 
-Jill 
-Erin 
-Geraldine 
-Lauren 
-Cathy 
-Joann 
-Lorraine 
-Lynn 
-Sally 
-Regina 
-Erica 
-Beatrice 
-Dolores 
-Bernice 
-Audrey 
-Yvonne 
-Annette 
-June 
-Samantha 
-Marion 
-Dana 
-Stacy 
-Ana 
-Renee 
-Ida 
-Vivian 
-Roberta 
-Holly 
-Brittany 
-Melanie 
-Loretta 
-Yolanda 
-Jeanette 
-Laurie 
-Katie 
-Kristen 
-Vanessa 
-Alma 
-Sue 
-Elsie 
-Beth 
-Jeanne 
-Vicki 
-Carla 
-Tara 
-Rosemary 
-Eileen 
-Terri 
-Gertrude 
-Lucy 
-Tonya 
-Ella 
-Stacey 
-Wilma 
-Gina 
-Kristin 
-Jessie 
-Natalie 
-Agnes 
-Vera 
-Willie 
-Charlene 
-Bessie 
-Delores 
-Melinda 
-Pearl 
-Arlene 
-Maureen 
-Colleen 
-Allison 
-Tamara 
-Joy 
-Georgia 
-Constance 
-Lillie 
-Claudia 
-Jackie 
-Marcia 
-Tanya 
-Nellie 
-Minnie 
-Marlene 
-Heidi 
-Glenda 
-Lydia 
-Viola 
-Courtney 
-Marian 
-Stella 
-Caroline 
-Dora 
-Jo 
-Vickie 
-Mattie 
-Terry 
-Maxine 
-Irma 
-Mabel 
-Marsha 
-Myrtle 
-Lena 
-Christy 
-Deanna 
-Patsy 
-Hilda 
-Gwendolyn 
-Jennie 
-Nora 
-Margie 
-Nina 
-Cassandra 
-Leah 
-Penny 
-Kay 
-Priscilla 
-Naomi 
-Carole 
-Brandy 
-Olga 
-Billie 
-Dianne 
-Tracey 
-Leona 
-Jenny 
-Felicia 
-Sonia 
-Miriam 
-Velma 
-Becky 
-Bobbie 
-Violet 
-Kristina 
-Toni 
-Misty 
-Mae 
-Shelly 
-Daisy 
-Ramona 
-Sherri 
-Erika 
-Katrina 
-Claire 
-Lindsey 
-Lindsay 
-Geneva 
-Guadalupe 
-Belinda 
-Margarita 
-Sheryl 
-Cora 
-Faye 
-Ada 
-Natasha 
-Sabrina 
-Isabel 
-Marguerite 
-Hattie 
-Harriet 
-Molly 
-Cecilia 
-Kristi 
-Brandi 
-Blanche 
-Sandy 
-Rosie 
-Joanna 
-Iris 
-Eunice 
-Angie 
-Inez 
-Lynda 
-Madeline 
-Amelia 
-Alberta 
-Genevieve 
-Monique 
-Jodi 
-Janie 
-Maggie 
-Kayla 
-Sonya 
-Jan 
-Lee 
-Kristine 
-Candace 
-Fannie 
-Maryann 
-Opal 
-Alison 
-Yvette 
-Melody 
-Luz 
-Susie 
-Olivia 
-Flora 
-Shelley 
-Kristy 
-Mamie 
-Lula 
-Lola 
-Verna 
-Beulah 
-Antoinette 
-Candice 
-Juana 
-Jeannette 
-Pam 
-Kelli 
-Hannah 
-Whitney 
-Bridget 
-Karla 
-Celia 
-Latoya 
-Patty 
-Shelia 
-Gayle 
-Della 
-Vicky 
-Lynne 
-Sheri 
-Marianne 
-Kara 
-Jacquelyn 
-Erma 
-Blanca 
-Myra 
-Leticia 
-Pat 
-Krista 
-Roxanne 
-Angelica 
-Johnnie 
-Robyn 
-Francis 
-Adrienne 
-Rosalie 
-Alexandra 
-Brooke 
-Bethany 
-Sadie 
-Bernadette 
-Traci 
-Jody 
-Kendra 
-Jasmine 
-Nichole 
-Rachael 
-Chelsea 
-Mable 
-Ernestine 
-Muriel 
-Marcella 
-Elena 
-Krystal 
-Angelina 
-Nadine 
-Kari 
-Estelle 
-Dianna 
-Paulette 
-Lora 
-Mona 
-Doreen 
-Rosemarie 
-Angel 
-Desiree 
-Antonia 
-Hope 
-Ginger 
-Janis 
-Betsy 
-Christie 
-Freda 
-Mercedes 
-Meredith 
-Lynette 
-Teri 
-Cristina 
-Eula 
-Leigh 
-Meghan 
-Sophia 
-Eloise 
-Rochelle 
-Gretchen 
-Cecelia 
-Raquel 
-Henrietta 
-Alyssa 
-Jana 
-Kelley 
-Gwen 
-Kerry 
-Jenna 
-Tricia 
-Laverne 
-Olive 
-Alexis 
-Tasha 
-Silvia 
-Elvira 
-Casey 
-Delia 
-Sophie 
-Kate 
-Patti 
-Lorena 
-Kellie 
-Sonja 
-Lila 
-Lana 
-Darla 
-May 
-Mindy 
-Essie 
-Mandy 
-Lorene 
-Elsa 
-Josefina 
-Jeannie 
-Miranda 
-Dixie 
-Lucia 
-Marta 
-Faith 
-Lela 
-Johanna 
-Shari 
-Camille 
-Tami 
-Shawna 
-Elisa 
-Ebony 
-Melba 
-Ora 
-Nettie 
-Tabitha 
-Ollie 
-Jaime 
-Winifred 
-Kristie 
-Marina 
-Alisha 
-Aimee 
-Rena 
-Myrna 
-Marla 
-Tammie 
-Latasha 
-Bonita 
-Patrice 
-Ronda 
-Sherrie 
-Addie 
-Francine 
-Deloris 
-Stacie 
-Adriana 
-Cheri 
-Shelby 
-Abigail 
-Celeste 
-Jewel 
-Cara 
-Adele 
-Rebekah 
-Lucinda 
-Dorthy 
-Chris 
-Effie 
-Trina 
-Reba 
-Shawn 
-Sallie 
-Aurora 
-Lenora 
-Etta 
-Lottie 
-Kerri 
-Trisha 
-Nikki 
-Estella 
-Francisca 
-Josie 
-Tracie 
-Marissa 
-Karin 
-Brittney 
-Janelle 
-Lourdes 
-Laurel 
-Helene 
-Fern 
-Elva 
-Corinne 
-Kelsey 
-Ina 
-Bettie 
-Elisabeth 
-Aida 
-Caitlin 
-Ingrid 
-Iva 
-Eugenia 
-Christa 
-Goldie 
-Cassie 
-Maude 
-Jenifer 
-Therese 
-Frankie 
-Dena 
-Lorna 
-Janette 
-Latonya 
-Candy 
-Morgan 
-Consuelo 
-Tamika 
-Rosetta 
-Debora 
-Cherie 
-Polly 
-Dina 
-Jewell 
-Fay 
-Jillian 
-Dorothea 
-Nell 
-Trudy 
-Esperanza 
-Patrica 
-Kimberley 
-Shanna 
-Helena 
-Carolina 
-Cleo 
-Stefanie 
-Rosario 
-Ola 
-Janine 
-Mollie 
-Lupe 
-Alisa 
-Lou 
-Maribel 
-Susanne 
-Bette 
-Susana 
-Elise 
-Cecile 
-Isabelle 
-Lesley 
-Jocelyn 
-Paige 
-Joni 
-Rachelle 
-Leola 
-Daphne 
-Alta 
-Ester 
-Petra 
-Graciela 
-Imogene 
-Jolene 
-Keisha 
-Lacey 
-Glenna 
-Gabriela 
-Keri 
-Ursula 
-Lizzie 
-Kirsten 
-Shana 
-Adeline 
-Mayra 
-Jayne 
-Jaclyn 
-Gracie 
-Sondra 
-Carmela 
-Marisa 
-Rosalind 
-Charity 
-Tonia 
-Beatriz 
-Marisol 
-Clarice 
-Jeanine 
-Sheena 
-Angeline 
-Frieda 
-Lily 
-Robbie 
-Shauna 
-Millie 
-Claudette 
-Cathleen 
-Angelia 
-Gabrielle 
-Autumn 
-Katharine 
-Summer 
-Jodie 
-Staci 
-Lea 
-Christi 
-Jimmie 
-Justine 
-Elma 
-Luella 
-Margret 
-Dominique 
-Socorro 
-Rene 
-Martina 
-Margo 
-Mavis 
-Callie 
-Bobbi 
-Maritza 
-Lucile 
-Leanne 
-Jeannine 
-Deana 
-Aileen 
-Lorie 
-Ladonna 
-Willa 
-Manuela 
-Gale 
-Selma 
-Dolly 
-Sybil 
-Abby 
-Lara 
-Dale 
-Ivy 
-Dee 
-Winnie 
-Marcy 
-Luisa 
-Jeri 
-Magdalena 
-Ofelia 
-Meagan 
-Audra 
-Matilda 
-Leila 
-Cornelia 
-Bianca 
-Simone 
-Bettye 
-Randi 
-Virgie 
-Latisha 
-Barbra 
-Georgina 
-Eliza 
-Leann 
-Bridgette 
-Rhoda 
-Haley 
-Adela 
-Nola 
-Bernadine 
-Flossie 
-Ila 
-Greta 
-Ruthie 
-Nelda 
-Minerva 
-Lilly 
-Terrie 
-Letha 
-Hilary 
-Estela 
-Valarie 
-Brianna 
-Rosalyn 
-Earline 
-Catalina 
-Ava 
-Mia 
-Clarissa 
-Lidia 
-Corrine 
-Alexandria 
-Concepcion 
-Tia 
-Sharron 
-Rae 
-Dona 
-Ericka 
-Jami 
-Elnora 
-Chandra 
-Lenore 
-Neva 
-Marylou 
-Melisa 
-Tabatha 
-Serena 
-Avis 
-Allie 
-Sofia 
-Jeanie 
-Odessa 
-Nannie 
-Harriett 
-Loraine 
-Penelope 
-Milagros 
-Emilia 
-Benita 
-Allyson 
-Ashlee 
-Tania 
-Tommie 
-Esmeralda 
-Karina 
-Eve 
-Pearlie 
-Zelma 
-Malinda 
-Noreen 
-Tameka 
-Saundra 
-Hillary 
-Amie 
-Althea 
-Rosalinda 
-Jordan 
-Lilia 
-Alana 
-Gay 
-Clare 
-Alejandra 
-Elinor 
-Michael 
-Lorrie 
-Jerri 
-Darcy 
-Earnestine 
-Carmella 
-Taylor 
-Noemi 
-Marcie 
-Liza 
-Annabelle 
-Louisa 
-Earlene 
-Mallory 
-Carlene 
-Nita 
-Selena 
-Tanisha 
-Katy 
-Julianne 
-John 
-Lakisha 
-Edwina 
-Maricela 
-Margery 
-Kenya 
-Dollie 
-Roxie 
-Roslyn 
-Kathrine 
-Nanette 
-Charmaine 
-Lavonne 
-Ilene 
-Kris 
-Tammi 
-Suzette 
-Corine 
-Kaye 
-Jerry 
-Merle 
-Chrystal 
-Lina 
-Deanne 
-Lilian 
-Juliana 
-Aline 
-Luann 
-Kasey 
-Maryanne 
-Evangeline 
-Colette 
-Melva 
-Lawanda 
-Yesenia 
-Nadia 
-Madge 
-Kathie 
-Eddie 
-Ophelia 
-Valeria 
-Nona 
-Mitzi 
-Mari 
-Georgette 
-Claudine 
-Fran 
-Alissa 
-Roseann 
-Lakeisha 
-Susanna 
-Reva 
-Deidre 
-Chasity 
-Sheree 
-Carly 
-James 
-Elvia 
-Alyce 
-Deirdre 
-Gena 
-Briana 
-Araceli 
-Katelyn 
-Rosanne 
-Wendi 
-Tessa 
-Berta 
-Marva 
-Imelda 
-Marietta 
-Marci 
-Leonor 
-Arline 
-Sasha 
-Madelyn 
-Janna 
-Juliette 
-Deena 
-Aurelia 
-Josefa 
-Augusta 
-Liliana 
-Young 
-Christian 
-Lessie 
-Amalia 
-Savannah 
-Anastasia 
-Vilma 
-Natalia 
-Rosella 
-Lynnette 
-Corina 
-Alfreda 
-Leanna 
-Carey 
-Amparo 
-Coleen 
-Tamra 
-Aisha 
-Wilda 
-Karyn 
-Cherry 
-Queen 
-Maura 
-Mai 
-Evangelina 
-Rosanna 
-Hallie 
-Erna 
-Enid 
-Mariana 
-Lacy 
-Juliet 
-Jacklyn 
-Freida 
-Madeleine 
-Mara 
-Hester 
-Cathryn 
-Lelia 
-Casandra 
-Bridgett 
-Angelita 
-Jannie 
-Dionne 
-Annmarie 
-Katina 
-Beryl 
-Phoebe 
-Millicent 
-Katheryn 
-Diann 
-Carissa 
-Maryellen 
-Liz 
-Lauri 
-Helga 
-Gilda 
-Adrian 
-Rhea 
-Marquita 
-Hollie 
-Tisha 
-Tamera 
-Angelique 
-Francesca 
-Britney 
-Kaitlin 
-Lolita 
-Florine 
-Rowena 
-Reyna 
-Twila 
-Fanny 
-Janell 
-Ines 
-Concetta 
-Bertie 
-Alba 
-Brigitte 
-Alyson 
-Vonda 
-Pansy 
-Elba 
-Noelle 
-Letitia 
-Kitty 
-Deann 
-Brandie 
-Louella 
-Leta 
-Felecia 
-Sharlene 
-Lesa 
-Beverley 
-Robert 
-Isabella 
-Herminia 
-Terra 
-Celina 
-Tori 
-Octavia 
-Jade 
-Denice 
-Germaine 
-Sierra 
-Michell 
-Cortney 
-Nelly 
-Doretha 
-Sydney 
-Deidra 
-Monika 
-Lashonda 
-Judi 
-Chelsey 
-Antionette 
-Margot 
-Bobby 
-Adelaide 
-Nan 
-Leeann 
-Elisha 
-Dessie 
-Libby 
-Kathi 
-Gayla 
-Latanya 
-Mina 
-Mellisa 
-Kimberlee 
-Jasmin 
-Renae 
-Zelda 
-Elda 
-Ma 
-Justina 
-Gussie 
-Emilie 
-Camilla 
-Abbie 
-Rocio 
-Kaitlyn 
-Jesse 
-Edythe 
-Ashleigh 
-Selina 
-Lakesha 
-Geri 
-Allene 
-Pamala 
-Michaela 
-Dayna 
-Caryn 
-Rosalia 
-Sun 
-Jacquline 
-Rebeca 
-Marybeth 
-Krystle 
-Iola 
-Dottie 
-Bennie 
-Belle 
-Aubrey 
-Griselda 
-Ernestina 
-Elida 
-Adrianne 
-Demetria 
-Delma 
-Chong 
-Jaqueline 
-Destiny 
-Arleen 
-Virgina 
-Retha 
-Fatima 
-Tillie 
-Eleanore 
-Cari 
-Treva 
-Birdie 
-Wilhelmina 
-Rosalee 
-Maurine 
-Latrice 
-Yong 
-Jena 
-Taryn 
-Elia 
-Debby 
-Maudie 
-Jeanna 
-Delilah 
-Catrina 
-Shonda 
-Hortencia 
-Theodora 
-Teresita 
-Robbin 
-Danette 
-Maryjane 
-Freddie 
-Delphine 
-Brianne 
-Nilda 
-Danna 
-Cindi 
-Bess 
-Iona 
-Hanna 
-Ariel 
-Winona 
-Vida 
-Rosita 
-Marianna 
-William 
-Racheal 
-Guillermina 
-Eloisa 
-Celestine 
-Caren 
-Malissa 
-Lona 
-Chantel 
-Shellie 
-Marisela 
-Leora 
-Agatha 
-Soledad 
-Migdalia 
-Ivette 
-Christen 
-Athena 
-Janel 
-Chloe 
-Veda 
-Pattie 
-Tessie 
-Tera 
-Marilynn 
-Lucretia 
-Karrie 
-Dinah 
-Daniela 
-Alecia 
-Adelina 
-Vernice 
-Shiela 
-Portia 
-Merry 
-Lashawn 
-Devon 
-Dara 
-Tawana 
-Oma 
-Verda 
-Christin 
-Alene 
-Zella 
-Sandi 
-Rafaela 
-Maya 
-Kira 
-Candida 
-Alvina 
-Suzan 
-Shayla 
-Lyn 
-Lettie 
-Alva 
-Samatha 
-Oralia 
-Matilde 
-Madonna 
-Larissa 
-Vesta 
-Renita 
-India 
-Delois 
-Shanda 
-Phillis 
-Lorri 
-Erlinda 
-Cruz 
-Cathrine 
-Barb 
-Zoe 
-Isabell 
-Ione 
-Gisela 
-Charlie 
-Valencia 
-Roxanna 
-Mayme 
-Kisha 
-Ellie 
-Mellissa 
-Dorris 
-Dalia 
-Bella 
-Annetta 
-Zoila 
-Reta 
-Reina 
-Lauretta 
-Kylie 
-Christal 
-Pilar 
-Charla 
-Elissa 
-Tiffani 
-Tana 
-Paulina 
-Leota 
-Breanna 
-Jayme 
-Carmel 
-Vernell 
-Tomasa 
-Mandi 
-Dominga 
-Santa 
-Melodie 
-Lura 
-Alexa 
-Tamela 
-Ryan 
-Mirna 
-Kerrie 
-Venus 
-Noel 
-Felicita 
-Cristy 
-Carmelita 
-Berniece 
-Annemarie 
-Tiara 
-Roseanne 
-Missy 
-Cori 
-Roxana 
-Pricilla 
-Kristal 
-Jung 
-Elyse 
-Haydee 
-Aletha 
-Bettina 
-Marge 
-Gillian 
-Filomena 
-Charles 
-Zenaida 
-Harriette 
-Caridad 
-Vada 
-Una 
-Aretha 
-Pearline 
-Marjory 
-Marcela 
-Flor 
-Evette 
-Elouise 
-Alina 
-Trinidad 
-David 
-Damaris 
-Catharine 
-Carroll 
-Belva 
-Nakia 
-Marlena 
-Luanne 
-Lorine 
-Karon 
-Dorene 
-Danita 
-Brenna 
-Tatiana 
-Sammie 
-Louann 
-Loren 
-Julianna 
-Andria 
-Philomena 
-Lucila 
-Leonora 
-Dovie 
-Romona 
-Mimi 
-Jacquelin 
-Gaye 
-Tonja 
-Misti 
-Joe 
-Gene 
-Chastity 
-Stacia 
-Roxann 
-Micaela 
-Nikita 
-Mei 
-Velda 
-Marlys 
-Johnna 
-Aura 
-Lavern 
-Ivonne 
-Hayley 
-Nicki 
-Majorie 
-Herlinda 
-George 
-Alpha 
-Yadira 
-Perla 
-Gregoria 
-Daniel 
-Antonette 
-Shelli 
-Mozelle 
-Mariah 
-Joelle 
-Cordelia 
-Josette 
-Chiquita 
-Trista 
-Louis 
-Laquita 
-Georgiana 
-Candi 
-Shanon 
-Lonnie 
-Hildegard 
-Cecil 
-Valentina 
-Stephany 
-Magda 
-Karol 
-Gerry 
-Gabriella 
-Tiana 
-Roma 
-Richelle 
-Ray 
-Princess 
-Oleta 
-Jacque 
-Idella 
-Alaina 
-Suzanna 
-Jovita 
-Blair 
-Tosha 
-Raven 
-Nereida 
-Marlyn 
-Kyla 
-Joseph 
-Delfina 
-Tena 
-Stephenie 
-Sabina 
-Nathalie 
-Marcelle 
-Gertie 
-Darleen 
-Thea 
-Sharonda 
-Shantel 
-Belen 
-Venessa 
-Rosalina 
-Ona 
-Genoveva 
-Corey 
-Clementine 
-Rosalba 
-Renate 
-Renata 
-Mi 
-Ivory 
-Georgianna 
-Floy 
-Dorcas 
-Ariana 
-Tyra 
-Theda 
-Mariam 
-Juli 
-Jesica 
-Donnie 
-Vikki 
-Verla 
-Roselyn 
-Melvina 
-Jannette 
-Ginny 
-Debrah 
-Corrie 
-Asia 
-Violeta 
-Myrtis 
-Latricia 
-Collette 
-Charleen 
-Anissa 
-Viviana 
-Twyla 
-Precious 
-Nedra 
-Latonia 
-Lan 
-Hellen 
-Fabiola 
-Annamarie 
-Adell 
-Sharyn 
-Chantal 
-Niki 
-Maud 
-Lizette 
-Lindy 
-Kia 
-Kesha 
-Jeana 
-Danelle 
-Charline 
-Chanel 
-Carrol 
-Valorie 
-Lia 
-Dortha 
-Cristal 
-Sunny 
-Leone 
-Leilani 
-Gerri 
-Debi 
-Andra 
-Keshia 
-Ima 
-Eulalia 
-Easter 
-Dulce 
-Natividad 
-Linnie 
-Kami 
-Georgie 
-Catina 
-Brook 
-Alda 
-Winnifred 
-Sharla 
-Ruthann 
-Meaghan 
-Magdalene 
-Lissette 
-Adelaida 
-Venita 
-Trena 
-Shirlene 
-Shameka 
-Elizebeth 
-Dian 
-Shanta 
-Mickey 
-Latosha 
-Carlotta 
-Windy 
-Soon 
-Rosina 
-Mariann 
-Leisa 
-Jonnie 
-Dawna 
-Cathie 
-Billy 
-Astrid 
-Sidney 
-Laureen 
-Janeen 
-Holli 
-Fawn 
-Vickey 
-Teressa 
-Shante 
-Rubye 
-Marcelina 
-Chanda 
-Cary 
-Terese 
-Scarlett 
-Marty 
-Marnie 
-Lulu 
-Lisette 
-Jeniffer 
-Elenor 
-Dorinda 
-Donita 
-Carman 
-Bernita 
-Altagracia 
-Aleta 
-Adrianna 
-Zoraida 
-Ronnie 
-Nicola 
-Lyndsey 
-Kendall 
-Janina 
-Chrissy 
-Ami 
-Starla 
-Phylis 
-Phuong 
-Kyra 
-Charisse 
-Blanch 
-Sanjuanita 
-Rona 
-Nanci 
-Marilee 
-Maranda 
-Cory 
-Brigette 
-Sanjuana 
-Marita 
-Kassandra 
-Joycelyn 
-Ira 
-Felipa 
-Chelsie 
-Bonny 
-Mireya 
-Lorenza 
-Kyong 
-Ileana 
-Candelaria 
-Tony 
-Toby 
-Sherie 
-Ok 
-Mark 
-Lucie 
-Leatrice 
-Lakeshia 
-Gerda 
-Edie 
-Bambi 
-Marylin 
-Lavon 
-Hortense 
-Garnet 
-Evie 
-Tressa 
-Shayna 
-Lavina 
-Kyung 
-Jeanetta 
-Sherrill 
-Shara 
-Phyliss 
-Mittie 
-Anabel 
-Alesia 
-Thuy 
-Tawanda 
-Richard 
-Joanie 
-Tiffanie 
-Lashanda 
-Karissa 
-Enriqueta 
-Daria 
-Daniella 
-Corinna 
-Alanna 
-Abbey 
-Roxane 
-Roseanna 
-Magnolia 
-Lida 
-Kyle 
-Joellen 
-Era 
-Coral 
-Carleen 
-Tresa 
-Peggie 
-Novella 
-Nila 
-Maybelle 
-Jenelle 
-Carina 
-Nova 
-Melina 
-Marquerite 
-Margarette 
-Josephina 
-Evonne 
-Devin 
-Cinthia 
-Albina 
-Toya 
-Tawnya 
-Sherita 
-Santos 
-Myriam 
-Lizabeth 
-Lise 
-Keely 
-Jenni 
-Giselle 
-Cheryle 
-Ardith 
-Ardis 
-Alesha 
-Adriane 
-Shaina 
-Linnea 
-Karolyn 
-Hong 
-Florida 
-Felisha 
-Dori 
-Darci 
-Artie 
-Armida 
-Zola 
-Xiomara 
-Vergie 
-Shamika 
-Nena 
-Nannette 
-Maxie 
-Lovie 
-Jeane 
-Jaimie 
-Inge 
-Farrah 
-Elaina 
-Caitlyn 
-Starr 
-Felicitas 
-Cherly 
-Caryl 
-Yolonda 
-Yasmin 
-Teena 
-Prudence 
-Pennie 
-Nydia 
-Mackenzie 
-Orpha 
-Marvel 
-Lizbeth 
-Laurette 
-Jerrie 
-Hermelinda 
-Carolee 
-Tierra 
-Mirian 
-Meta 
-Melony 
-Kori 
-Jennette 
-Jamila 
-Ena 
-Anh 
-Yoshiko 
-Susannah 
-Salina 
-Rhiannon 
-Joleen 
-Cristine 
-Ashton 
-Aracely 
-Tomeka 
-Shalonda 
-Marti 
-Lacie 
-Kala 
-Jada 
-Ilse 
-Hailey 
-Brittani 
-Zona 
-Syble 
-Sherryl 
-Randy 
-Nidia 
-Marlo 
-Kandice 
-Kandi 
-Deb 
-Dean 
-America 
-Alycia 
-Tommy 
-Ronna 
-Norene 
-Mercy 
-Jose 
-Ingeborg 
-Giovanna 
-Gemma 
-Christel 
-Audry 
-Zora 
-Vita 
-Van 
-Trish 
-Stephaine 
-Shirlee 
-Shanika 
-Melonie 
-Mazie 
-Jazmin 
-Inga 
-Hoa 
-Hettie 
-Geralyn 
-Fonda 
-Estrella 
-Adella 
-Su 
-Sarita 
-Rina 
-Milissa 
-Maribeth 
-Golda 
-Evon 
-Ethelyn 
-Enedina 
-Cherise 
-Chana 
-Velva 
-Tawanna 
-Sade 
-Mirta 
-Li 
-Karie 
-Jacinta 
-Elna 
-Davina 
-Cierra 
-Ashlie 
-Albertha 
-Tanesha 
-Stephani 
-Nelle 
-Mindi 
-Lu 
-Lorinda 
-Larue 
-Florene 
-Demetra 
-Dedra 
-Ciara 
-Chantelle 
-Ashly 
-Suzy 
-Rosalva 
-Noelia 
-Lyda 
-Leatha 
-Krystyna 
-Kristan 
-Karri 
-Darline 
-Darcie 
-Cinda 
-Cheyenne 
-Cherrie 
-Awilda 
-Almeda 
-Rolanda 
-Lanette 
-Jerilyn 
-Gisele 
-Evalyn 
-Cyndi 
-Cleta 
-Carin 
-Zina 
-Zena 
-Velia 
-Tanika 
-Paul 
-Charissa 
-Thomas 
-Talia 
-Margarete 
-Lavonda 
-Kaylee 
-Kathlene 
-Jonna 
-Irena 
-Ilona 
-Idalia 
-Candis 
-Candance 
-Brandee 
-Anitra 
-Alida 
-Sigrid 
-Nicolette 
-Maryjo 
-Linette 
-Hedwig 
-Christiana 
-Cassidy 
-Alexia 
-Tressie 
-Modesta 
-Lupita 
-Lita 
-Gladis 
-Evelia 
-Davida 
-Cherri 
-Cecily 
-Ashely 
-Annabel 
-Agustina 
-Wanita 
-Shirly 
-Rosaura 
-Hulda 
-Eun 
-Bailey 
-Yetta 
-Verona 
-Thomasina 
-Sibyl 
-Shannan 
-Mechelle 
-Lue 
-Leandra 
-Lani 
-Kylee 
-Kandy 
-Jolynn 
-Ferne 
-Eboni 
-Corene 
-Alysia 
-Zula 
-Nada 
-Moira 
-Lyndsay 
-Lorretta 
-Juan 
-Jammie 
-Hortensia 
-Gaynell 
-Cameron 
-Adria 
-Vina 
-Vicenta 
-Tangela 
-Stephine 
-Norine 
-Nella 
-Liana 
-Leslee 
-Kimberely 
-Iliana 
-Glory 
-Felica 
-Emogene 
-Elfriede 
-Eden 
-Eartha 
-Carma 
-Bea 
-Ocie 
-Marry 
-Lennie 
-Kiara 
-Jacalyn 
-Carlota 
-Arielle 
-Yu 
-Star 
-Otilia 
-Kirstin 
-Kacey 
-Johnetta 
-Joey 
-Joetta 
-Jeraldine 
-Jaunita 
-Elana 
-Dorthea 
-Cami 
-Amada 
-Adelia 
-Vernita 
-Tamar 
-Siobhan 
-Renea 
-Rashida 
-Ouida 
-Odell 
-Nilsa 
-Meryl 
-Kristyn 
-Julieta 
-Danica 
-Breanne 
-Aurea 
-Anglea 
-Sherron 
-Odette 
-Malia 
-Lorelei 
-Lin 
-Leesa 
-Kenna 
-Kathlyn 
-Fiona 
-Charlette 
-Suzie 
-Shantell 
-Sabra 
-Racquel 
-Myong 
-Mira 
-Martine 
-Lucienne 
-Lavada 
-Juliann 
-Johnie 
-Elvera 
-Delphia 
-Clair 
-Christiane 
-Charolette 
-Carri 
-Augustine 
-Asha 
-Angella 
-Paola 
-Ninfa 
-Leda 
-Lai 
-Eda 
-Sunshine 
-Stefani 
-Shanell 
-Palma 
-Machelle 
-Lissa 
-Kecia 
-Kathryne 
-Karlene 
-Julissa 
-Jettie 
-Jenniffer 
-Hui 
-Corrina 
-Christopher 
-Carolann 
-Alena 
-Tess 
-Rosaria 
-Myrtice 
-Marylee 
-Liane 
-Kenyatta 
-Judie 
-Janey 
-In 
-Elmira 
-Eldora 
-Denna 
-Cristi 
-Cathi 
-Zaida 
-Vonnie 
-Viva 
-Vernie 
-Rosaline 
-Mariela 
-Luciana 
-Lesli 
-Karan 
-Felice 
-Deneen 
-Adina 
-Wynona 
-Tarsha 
-Sheron 
-Shasta 
-Shanita 
-Shani 
-Shandra 
-Randa 
-Pinkie 
-Paris 
-Nelida 
-Marilou 
-Lyla 
-Laurene 
-Laci 
-Joi 
-Janene 
-Dorotha 
-Daniele 
-Dani 
-Carolynn 
-Carlyn 
-Berenice 
-Ayesha 
-Anneliese 
-Alethea 
-Thersa 
-Tamiko 
-Rufina 
-Oliva 
-Mozell 
-Marylyn 
-Madison 
-Kristian 
-Kathyrn 
-Kasandra 
-Kandace 
-Janae 
-Gabriel 
-Domenica 
-Debbra 
-Dannielle 
-Chun 
-Buffy 
-Barbie 
-Arcelia 
-Aja 
-Zenobia 
-Sharen 
-Sharee 
-Patrick 
-Page 
-My 
-Lavinia 
-Kum 
-Kacie 
-Jackeline 
-Huong 
-Felisa 
-Emelia 
-Eleanora 
-Cythia 
-Cristin 
-Clyde 
-Claribel 
-Caron 
-Anastacia 
-Zulma 
-Zandra 
-Yoko 
-Tenisha 
-Susann 
-Sherilyn 
-Shay 
-Shawanda 
-Sabine 
-Romana 
-Mathilda 
-Linsey 
-Keiko 
-Joana 
-Isela 
-Gretta 
-Georgetta 
-Eugenie 
-Dusty 
-Desirae 
-Delora 
-Corazon 
-Antonina 
-Anika 
-Willene 
-Tracee 
-Tamatha 
-Regan 
-Nichelle 
-Mickie 
-Maegan 
-Luana 
-Lanita 
-Kelsie 
-Edelmira 
-Bree 
-Afton 
-Teodora 
-Tamie 
-Shena 
-Meg 
-Linh 
-Keli 
-Kaci 
-Danyelle 
-Britt 
-Arlette 
-Albertine 
-Adelle 
-Tiffiny 
-Stormy 
-Simona 
-Numbers 
-Nicolasa 
-Nichol 
-Nia 
-Nakisha 
-Mee 
-Maira 
-Loreen 
-Kizzy 
-Johnny 
-Jay 
-Fallon 
-Christene 
-Bobbye 
-Anthony 
-Ying 
-Vincenza 
-Tanja 
-Rubie 
-Roni 
-Queenie 
-Margarett 
-Kimberli 
-Irmgard 
-Idell 
-Hilma 
-Evelina 
-Esta 
-Emilee 
-Dennise 
-Dania 
-Carl 
-Carie 
-Antonio 
-Wai 
-Sang 
-Risa 
-Rikki 
-Particia 
-Mui 
-Masako 
-Mario 
-Luvenia 
-Loree 
-Loni 
-Lien 
-Kevin 
-Gigi 
-Florencia 
-Dorian 
-Denita 
-Dallas 
-Chi 
-Billye 
-Alexander 
-Tomika 
-Sharita 
-Rana 
-Nikole 
-Neoma 
-Margarite 
-Madalyn 
-Lucina 
-Laila 
-Kali 
-Jenette 
-Gabriele 
-Evelyne 
-Elenora 
-Clementina 
-Alejandrina 
-Zulema 
-Violette 
-Vannessa 
-Thresa 
-Retta 
-Pia 
-Patience 
-Noella 
-Nickie 
-Jonell 
-Delta 
-Chung 
-Chaya 
-Camelia 
-Bethel 
-Anya 
-Andrew 
-Thanh 
-Suzann 
-Spring 
-Shu 
-Mila 
-Lilla 
-Laverna 
-Keesha 
-Kattie 
-Gia 
-Georgene 
-Eveline 
-Estell 
-Elizbeth 
-Vivienne 
-Vallie 
-Trudie 
-Stephane 
-Michel 
-Magaly 
-Madie 
-Kenyetta 
-Karren 
-Janetta 
-Hermine 
-Harmony 
-Drucilla 
-Debbi 
-Celestina 
-Candie 
-Britni 
-Beckie 
-Amina 
-Zita 
-Yun 
-Yolande 
-Vivien 
-Vernetta 
-Trudi 
-Sommer 
-Pearle 
-Patrina 
-Ossie 
-Nicolle 
-Loyce 
-Letty 
-Larisa 
-Katharina 
-Joselyn 
-Jonelle 
-Jenell 
-Iesha 
-Heide 
-Florinda 
-Florentina 
-Flo 
-Elodia 
-Dorine 
-Brunilda 
-Brigid 
-Ashli 
-Ardella 
-Twana 
-Thu 
-Tarah 
-Sung 
-Shea 
-Shavon 
-Shane 
-Serina 
-Rayna 
-Ramonita 
-Nga 
-Margurite 
-Lucrecia 
-Kourtney 
-Kati 
-Jesus 
-Jesenia 
-Diamond 
-Crista 
-Ayana 
-Alica 
-Alia 
-Vinnie 
-Suellen 
-Romelia 
-Rachell 
-Piper 
-Olympia 
-Michiko 
-Kathaleen 
-Jolie 
-Jessi 
-Janessa 
-Hana 
-Ha 
-Elease 
-Carletta 
-Britany 
-Shona 
-Salome 
-Rosamond 
-Regena 
-Raina 
-Ngoc 
-Nelia 
-Louvenia 
-Lesia 
-Latrina 
-Laticia 
-Larhonda 
-Jina 
-Jacki 
-Hollis 
-Holley 
-Emmy 
-Deeann 
-Coretta 
-Arnetta 
-Velvet 
-Thalia 
-Shanice 
-Neta 
-Mikki 
-Micki 
-Lonna 
-Leana 
-Lashunda 
-Kiley 
-Joye 
-Jacqulyn 
-Ignacia 
-Hyun 
-Hiroko 
-Henry 
-Henriette 
-Elayne 
-Delinda 
-Darnell 
-Dahlia 
-Coreen 
-Consuela 
-Conchita 
-Celine 
-Babette 
-Ayanna 
-Anette 
-Albertina 
-Skye 
-Shawnee 
-Shaneka 
-Quiana 
-Pamelia 
-Min 
-Merri 
-Merlene 
-Margit 
-Kiesha 
-Kiera 
-Kaylene 
-Jodee 
-Jenise 
-Erlene 
-Emmie 
-Else 
-Daryl 
-Dalila 
-Daisey 
-Cody 
-Casie 
-Belia 
-Babara 
-Versie 
-Vanesa 
-Shelba 
-Shawnda 
-Sam 
-Norman 
-Nikia 
-Naoma 
-Marna 
-Margeret 
-Madaline 
-Lawana 
-Kindra 
-Jutta 
-Jazmine 
-Janett 
-Hannelore 
-Glendora 
-Gertrud 
-Garnett 
-Freeda 
-Frederica 
-Florance 
-Flavia 
-Dennis 
-Carline 
-Beverlee 
-Anjanette 
-Valda 
-Trinity 
-Tamala 
-Stevie 
-Shonna 
-Sha 
-Sarina 
-Oneida 
-Micah 
-Merilyn 
-Marleen 
-Lurline 
-Lenna 
-Katherin 
-Jin 
-Jeni 
-Hae 
-Gracia 
-Glady 
-Farah 
-Eric 
-Enola 
-Ema 
-Dominque 
-Devona 
-Delana 
-Cecila 
-Caprice 
-Alysha 
-Ali 
-Alethia 
-Vena 
-Theresia 
-Tawny 
-Song 
-Shakira 
-Samara 
-Sachiko 
-Rachele 
-Pamella 
-Nicky 
-Marni 
-Mariel 
-Maren 
-Malisa 
-Ligia 
-Lera 
-Latoria 
-Larae 
-Kimber 
-Kathern 
-Karey 
-Jennefer 
-Janeth 
-Halina 
-Fredia 
-Delisa 
-Debroah 
-Ciera 
-Chin 
-Angelika 
-Andree 
-Altha 
-Yen 
-Vivan 
-Terresa 
-Tanna 
-Suk 
-Sudie 
-Soo 
-Signe 
-Salena 
-Ronni 
-Rebbecca 
-Myrtie 
-Mckenzie 
-Malika 
-Maida 
-Loan 
-Leonarda 
-Kayleigh 
-France 
-Ethyl 
-Ellyn 
-Dayle 
-Cammie 
-Brittni 
-Birgit 
-Avelina 
-Asuncion 
-Arianna 
-Akiko 
-Venice 
-Tyesha 
-Tonie 
-Tiesha 
-Takisha 
-Steffanie 
-Sindy 
-Santana 
-Meghann 
-Manda 
-Macie 
-Lady 
-Kellye 
-Kellee 
-Joslyn 
-Jason 
-Inger 
-Indira 
-Glinda 
-Glennis 
-Fernanda 
-Faustina 
-Eneida 
-Elicia 
-Dot 
-Digna 
-Dell 
-Arletta 
-Andre 
-Willia 
-Tammara 
-Tabetha 
-Sherrell 
-Sari 
-Refugio 
-Rebbeca 
-Pauletta 
-Nieves 
-Natosha 
-Nakita 
-Mammie 
-Kenisha 
-Kazuko 
-Kassie 
-Gary 
-Earlean 
-Daphine 
-Corliss 
-Clotilde 
-Carolyne 
-Bernetta 
-Augustina 
-Audrea 
-Annis 
-Annabell 
-Yan 
-Tennille 
-Tamica 
-Selene 
-Sean 
-Rosana 
-Regenia 
-Qiana 
-Markita 
-Macy 
-Leeanne 
-Laurine 
-Kym 
-Jessenia 
-Janita 
-Georgine 
-Genie 
-Emiko 
-Elvie 
-Deandra 
-Dagmar 
-Corie 
-Collen 
-Cherish 
-Romaine 
-Porsha 
-Pearlene 
-Micheline 
-Merna 
-Margorie 
-Margaretta 
-Lore 
-Kenneth 
-Jenine 
-Hermina 
-Fredericka 
-Elke 
-Drusilla 
-Dorathy 
-Dione 
-Desire 
-Celena 
-Brigida 
-Angeles 
-Allegra 
-Theo 
-Tamekia 
-Synthia 
-Stephen 
-Sook 
-Slyvia 
-Rosann 
-Reatha 
-Raye 
-Marquetta 
-Margart 
-Ling 
-Layla 
-Kymberly 
-Kiana 
-Kayleen 
-Katlyn 
-Karmen 
-Joella 
-Irina 
-Emelda 
-Eleni 
-Detra 
-Clemmie 
-Cheryll 
-Chantell 
-Cathey 
-Arnita 
-Arla 
-Angle 
-Angelic 
-Alyse 
-Zofia 
-Thomasine 
-Tennie 
-Son 
-Sherly 
-Sherley 
-Sharyl 
-Remedios 
-Petrina 
-Nickole 
-Myung 
-Myrle 
-Mozella 
-Louanne 
-Lisha 
-Latia 
-Lane 
-Krysta 
-Julienne 
-Joel 
-Jeanene 
-Jacqualine 
-Isaura 
-Gwenda 
-Earleen 
-Donald 
-Cleopatra 
-Carlie 
-Audie 
-Antonietta 
-Alise 
-Alex 
-Verdell 
-Val 
-Tyler 
-Tomoko 
-Thao 
-Talisha 
-Steven 
-So 
-Shemika 
-Shaun 
-Scarlet 
-Savanna 
-Santina 
-Rosia 
-Raeann 
-Odilia 
-Nana 
-Minna 
-Magan 
-Lynelle 
-Le 
-Karma 
-Joeann 
-Ivana 
-Inell 
-Ilana 
-Hye 
-Honey 
-Hee 
-Gudrun 
-Frank 
-Dreama 
-Crissy 
-Chante 
-Carmelina 
-Arvilla 
-Arthur 
-Annamae 
-Alvera 
-Aleida 
-Aaron 
-Yee 
-Yanira 
-Vanda 
-Tianna 
-Tam 
-Stefania 
-Shira 
-Perry 
-Nicol 
-Nancie 
-Monserrate 
-Minh 
-Melynda 
-Melany 
-Matthew 
-Lovella 
-Laure 
-Kirby 
-Kacy 
-Jacquelynn 
-Hyon 
-Gertha 
-Francisco 
-Eliana 
-Christena 
-Christeen 
-Charise 
-Caterina 
-Carley 
-Candyce 
-Arlena 
-Ammie 
-Yang 
-Willette 
-Vanita 
-Tuyet 
-Tiny 
-Syreeta 
-Silva 
-Scott 
-Ronald 
-Penney 
-Nyla 
-Michal 
-Maurice 
-Maryam 
-Marya 
-Magen 
-Ludie 
-Loma 
-Livia 
-Lanell 
-Kimberlie 
-Julee 
-Donetta 
-Diedra 
-Denisha 
-Deane 
-Dawne 
-Clarine 
-Cherryl 
-Bronwyn 
-Brandon 
-Alla 
-Valery 
-Tonda 
-Sueann 
-Soraya 
-Shoshana 
-Shela 
-Sharleen 
-Shanelle 
-Nerissa 
-Micheal 
-Meridith 
-Mellie 
-Maye 
-Maple 
-Magaret 
-Luis 
-Lili 
-Leonila 
-Leonie 
-Leeanna 
-Lavonia 
-Lavera 
-Kristel 
-Kathey 
-Kathe 
-Justin 
-Julian 
-Jimmy 
-Jann 
-Ilda 
-Hildred 
-Hildegarde 
-Genia 
-Fumiko 
-Evelin 
-Ermelinda 
-Elly 
-Dung 
-Doloris 
-Dionna 
-Danae 
-Berneice 
-Annice 
-Alix 
-Verena 
-Verdie 
-Tristan 
-Shawnna 
-Shawana 
-Shaunna 
-Rozella 
-Randee 
-Ranae 
-Milagro 
-Lynell 
-Luise 
-Louie 
-Loida 
-Lisbeth 
-Karleen 
-Junita 
-Jona 
-Isis 
-Hyacinth 
-Hedy 
-Gwenn 
-Ethelene 
-Erline 
-Edward 
-Donya 
-Domonique 
-Delicia 
-Dannette 
-Cicely 
-Branda 
-Blythe 
-Bethann 
-Ashlyn 
-Annalee 
-Alline 
-Yuko 
-Vella 
-Trang 
-Towanda 
-Tesha 
-Sherlyn 
-Narcisa 
-Miguelina 
-Meri 
-Maybell 
-Marlana 
-Marguerita 
-Madlyn 
-Luna 
-Lory 
-Loriann 
-Liberty 
-Leonore 
-Leighann 
-Laurice 
-Latesha 
-Laronda 
-Katrice 
-Kasie 
-Karl 
-Kaley 
-Jadwiga 
-Glennie 
-Gearldine 
-Francina 
-Epifania 
-Dyan 
-Dorie 
-Diedre 
-Denese 
-Demetrice 
-Delena 
-Darby 
-Cristie 
-Cleora 
-Catarina 
-Carisa 
-Bernie 
-Barbera 
-Almeta 
-Trula 
-Tereasa 
-Solange 
-Sheilah 
-Shavonne 
-Sanora 
-Rochell 
-Mathilde 
-Margareta 
-Maia 
-Lynsey 
-Lawanna 
-Launa 
-Kena 
-Keena 
-Katia 
-Jamey 
-Glynda 
-Gaylene 
-Elvina 
-Elanor 
-Danuta 
-Danika 
-Cristen 
-Cordie 
-Coletta 
-Clarita 
-Carmon 
-Brynn 
-Azucena 
-Aundrea 
-Angele 
-Yi 
-Walter 
-Verlie 
-Verlene 
-Tamesha 
-Silvana 
-Sebrina 
-Samira 
-Reda 
-Raylene 
-Penni 
-Pandora 
-Norah 
-Noma 
-Mireille 
-Melissia 
-Maryalice 
-Laraine 
-Kimbery 
-Karyl 
-Karine 
-Kam 
-Jolanda 
-Johana 
-Jesusa 
-Jaleesa 
-Jae 
-Jacquelyne 
-Irish 
-Iluminada 
-Hilaria 
-Hanh 
-Gennie 
-Francie 
-Floretta 
-Exie 
-Edda 
-Drema 
-Delpha 
-Bev 
-Barbar 
-Assunta 
-Ardell 
-Annalisa 
-Alisia 
-Yukiko 
-Yolando 
-Wonda 
-Wei 
-Waltraud 
-Veta 
-Tequila 
-Temeka 
-Tameika 
-Shirleen 
-Shenita 
-Piedad 
-Ozella 
-Mirtha 
-Marilu 
-Kimiko 
-Juliane 
-Jenice 
-Jen 
-Janay 
-Jacquiline 
-Hilde 
-Fe 
-Fae 
-Evan 
-Eugene 
-Elois 
-Echo 
-Devorah 
-Chau 
-Brinda 
-Betsey 
-Arminda 
-Aracelis 
-Apryl 
-Annett 
-Alishia 
-Veola 
-Usha 
-Toshiko 
-Theola 
-Tashia 
-Talitha 
-Shery 
-Rudy 
-Renetta 
-Reiko 
-Rasheeda 
-Omega 
-Obdulia 
-Mika 
-Melaine 
-Meggan 
-Martin 
-Marlen 
-Marget 
-Marceline 
-Mana 
-Magdalen 
-Librada 
-Lezlie 
-Lexie 
-Latashia 
-Lasandra 
-Kelle 
-Isidra 
-Isa 
-Inocencia 
-Gwyn 
-Francoise 
-Erminia 
-Erinn 
-Dimple 
-Devora 
-Criselda 
-Armanda 
-Arie 
-Ariane 
-Angelo 
-Angelena 
-Allen 
-Aliza 
-Adriene 
-Adaline 
-Xochitl 
-Twanna 
-Tran 
-Tomiko 
-Tamisha 
-Taisha 
-Susy 
-Siu 
-Rutha 
-Roxy 
-Rhona 
-Raymond 
-Otha 
-Noriko 
-Natashia 
-Merrie 
-Melvin 
-Marinda 
-Mariko 
-Margert 
-Loris 
-Lizzette 
-Leisha 
-Kaila 
-Ka 
-Joannie 
-Jerrica 
-Jene 
-Jannet 
-Janee 
-Jacinda 
-Herta 
-Elenore 
-Doretta 
-Delaine 
-Daniell 
-Claudie 
-China 
-Britta 
-Apolonia 
-Amberly 
-Alease 
-Yuri 
-Yuk 
-Wen 
-Waneta 
-Ute 
-Tomi 
-Sharri 
-Sandie 
-Roselle 
-Reynalda 
-Raguel 
-Phylicia 
-Patria 
-Olimpia 
-Odelia 
-Mitzie 
-Mitchell 
-Miss 
-Minda 
-Mignon 
-Mica 
-Mendy 
-Marivel 
-Maile 
-Lynetta 
-Lavette 
-Lauryn 
-Latrisha 
-Lakiesha 
-Kiersten 
-Kary 
-Josphine 
-Jolyn 
-Jetta 
-Janise 
-Jacquie 
-Ivelisse 
-Glynis 
-Gianna 
-Gaynelle 
-Emerald 
-Demetrius 
-Danyell 
-Danille 
-Dacia 
-Coralee 
-Cher 
-Ceola 
-Brett 
-Bell 
-Arianne 
-Aleshia 
-Yung 
-Williemae 
-Troy 
-Trinh 
-Thora 
-Tai 
-Svetlana 
-Sherika 
-Shemeka 
-Shaunda 
-Roseline 
-Ricki 
-Melda 
-Mallie 
-Lavonna 
-Latina 
-Larry 
-Laquanda 
-Lala 
-Lachelle 
-Klara 
-Kandis 
-Johna 
-Jeanmarie 
-Jaye 
-Hang 
-Grayce 
-Gertude 
-Emerita 
-Ebonie 
-Clorinda 
-Ching 
-Chery 
-Carola 
-Breann 
-Blossom 
-Bernardine 
-Becki 
-Arletha 
-Argelia 
-Ara 
-Alita 
-Yulanda 
-Yon 
-Yessenia 
-Tobi 
-Tasia 
-Sylvie 
-Shirl 
-Shirely 
-Sheridan 
-Shella 
-Shantelle 
-Sacha 
-Royce 
-Rebecka 
-Reagan 
-Providencia 
-Paulene 
-Misha 
-Miki 
-Marline 
-Marica 
-Lorita 
-Latoyia 
-Lasonya 
-Kerstin 
-Kenda 
-Keitha 
-Kathrin 
-Jaymie 
-Jack 
-Gricelda 
-Ginette 
-Eryn 
-Elina 
-Elfrieda 
-Danyel 
-Cheree 
-Chanelle 
-Barrie 
-Avery 
-Aurore 
-Annamaria 
-Alleen 
-Ailene 
-Aide 
-Yasmine 
-Vashti 
-Valentine 
-Treasa 
-Tory 
-Tiffaney 
-Sheryll 
-Sharie 
-Shanae 
-Sau 
-Raisa 
-Pa 
-Neda 
-Mitsuko 
-Mirella 
-Milda 
-Maryanna 
-Maragret 
-Mabelle 
-Luetta 
-Lorina 
-Letisha 
-Latarsha 
-Lanelle 
-Lajuana 
-Krissy 
-Karly 
-Karena 
-Jon 
-Jessika 
-Jerica 
-Jeanelle 
-January 
-Jalisa 
-Jacelyn 
-Izola 
-Ivey 
-Gregory 
-Euna 
-Etha 
-Drew 
-Domitila 
-Dominica 
-Daina 
-Creola 
-Carli 
-Camie 
-Bunny 
-Brittny 
-Ashanti 
-Anisha 
-Aleen 
-Adah 
-Yasuko 
-Winter 
-Viki 
-Valrie 
-Tona 
-Tinisha 
-Thi 
-Terisa 
-Tatum 
-Taneka 
-Simonne 
-Shalanda 
-Serita 
-Ressie 
-Refugia 
-Paz 
-Olene 
-Na 
-Merrill 
-Margherita 
-Mandie 
-Man 
-Maire 
-Lyndia 
-Luci 
-Lorriane 
-Loreta 
-Leonia 
-Lavona 
-Lashawnda 
-Lakia 
-Kyoko 
-Krystina 
-Krysten 
-Kenia 
-Kelsi 
-Jude 
-Jeanice 
-Isobel 
-Georgiann 
-Genny 
-Felicidad 
-Eilene 
-Deon 
-Deloise 
-Deedee 
-Dannie 
-Conception 
-Clora 
-Cherilyn 
-Chang 
-Calandra 
-Berry 
-Armandina 
-Anisa 
-Ula 
-Timothy 
-Tiera 
-Theressa 
-Stephania 
-Sima 
-Shyla 
-Shonta 
-Shera 
-Shaquita 
-Shala 
-Sammy 
-Rossana 
-Nohemi 
-Nery 
-Moriah 
-Melita 
-Melida 
-Melani 
-Marylynn 
-Marisha 
-Mariette 
-Malorie 
-Madelene 
-Ludivina 
-Loria 
-Lorette 
-Loralee 
-Lianne 
-Leon 
-Lavenia 
-Laurinda 
-Lashon 
-Kit 
-Kimi 
-Keila 
-Katelynn 
-Kai 
-Jone 
-Joane 
-Ji 
-Jayna 
-Janella 
-Ja 
-Hue 
-Hertha 
-Francene 
-Elinore 
-Despina 
-Delsie 
-Deedra 
-Clemencia 
-Carry 
-Carolin 
-Carlos 
-Bulah 
-Brittanie 
-Bok 
-Blondell 
-Bibi 
-Beaulah 
-Beata 
-Annita 
-Agripina 
-Virgen 
-Valene 
-Un 
-Twanda 
-Tommye 
-Toi 
-Tarra 
-Tari 
-Tammera 
-Shakia 
-Sadye 
-Ruthanne 
-Rochel 
-Rivka 
-Pura 
-Nenita 
-Natisha 
-Ming 
-Merrilee 
-Melodee 
-Marvis 
-Lucilla 
-Leena 
-Laveta 
-Larita 
-Lanie 
-Keren 
-Ileen 
-Georgeann 
-Genna 
-Genesis 
-Frida 
-Ewa 
-Eufemia 
-Emely 
-Ela 
-Edyth 
-Deonna 
-Deadra 
-Darlena 
-Chanell 
-Chan 
-Cathern 
-Cassondra 
-Cassaundra 
-Bernarda 
-Berna 
-Arlinda 
-Anamaria 
-Albert 
-Wesley 
-Vertie 
-Valeri 
-Torri 
-Tatyana 
-Stasia 
-Sherise 
-Sherill 
-Season 
-Scottie 
-Sanda 
-Ruthe 
-Rosy 
-Roberto 
-Robbi 
-Ranee 
-Quyen 
-Pearly 
-Palmira 
-Onita 
-Nisha 
-Niesha 
-Nida 
-Nevada 
-Nam 
-Merlyn 
-Mayola 
-Marylouise 
-Maryland 
-Marx 
-Marth 
-Margene 
-Madelaine 
-Londa 
-Leontine 
-Leoma 
-Leia 
-Lawrence 
-Lauralee 
-Lanora 
-Lakita 
-Kiyoko 
-Keturah 
-Katelin 
-Kareen 
-Jonie 
-Johnette 
-Jenee 
-Jeanett 
-Izetta 
-Hiedi 
-Heike 
-Hassie 
-Harold 
-Giuseppina 
-Georgann 
-Fidela 
-Fernande 
-Elwanda 
-Ellamae 
-Eliz 
-Dusti 
-Dotty 
-Cyndy 
-Coralie 
-Celesta 
-Argentina 
-Alverta 
-Xenia 
-Wava 
-Vanetta 
-Torrie 
-Tashina 
-Tandy 
-Tambra 
-Tama 
-Stepanie 
-Shila 
-Shaunta 
-Sharan 
-Shaniqua 
-Shae 
-Setsuko 
-Serafina 
-Sandee 
-Rosamaria 
-Priscila 
-Olinda 
-Nadene 
-Muoi 
-Michelina 
-Mercedez 
-Maryrose 
-Marin 
-Marcene 
-Mao 
-Magali 
-Mafalda 
-Logan 
-Linn 
-Lannie 
-Kayce 
-Karoline 
-Kamilah 
-Kamala 
-Justa 
-Joline 
-Jennine 
-Jacquetta 
-Iraida 
-Gerald 
-Georgeanna 
-Franchesca 
-Fairy 
-Emeline 
-Elane 
-Ehtel 
-Earlie 
-Dulcie 
-Dalene 
-Cris 
-Classie 
-Chere 
-Charis 
-Caroyln 
-Carmina 
-Carita 
-Brian 
-Bethanie 
-Ayako 
-Arica 
-An 
-Alysa 
-Alessandra 
-Akilah 
-Adrien 
-Zetta 
-Youlanda 
-Yelena 
-Yahaira 
-Xuan 
-Wendolyn 
-Victor 
-Tijuana 
-Terrell 
-Terina 
-Teresia 
-Suzi 
-Sunday 
-Sherell 
-Shavonda 
-Shaunte 
-Sharda 
-Shakita 
-Sena 
-Ryann 
-Rubi 
-Riva 
-Reginia 
-Rea 
-Rachal 
-Parthenia 
-Pamula 
-Monnie 
-Monet 
-Michaele 
-Melia 
-Marine 
-Malka 
-Maisha 
-Lisandra 
-Leo 
-Lekisha 
-Lean 
-Laurence 
-Lakendra 
-Krystin 
-Kortney 
-Kizzie 
-Kittie 
-Kera 
-Kendal 
-Kemberly 
-Kanisha 
-Julene 
-Jule 
-Joshua 
-Johanne 
-Jeffrey 
-Jamee 
-Han 
-Halley 
-Gidget 
-Galina 
-Fredricka 
-Fleta 
-Fatimah 
-Eusebia 
-Elza 
-Eleonore 
-Dorthey 
-Doria 
-Donella 
-Dinorah 
-Delorse 
-Claretha 
-Christinia 
-Charlyn 
-Bong 
-Belkis 
-Azzie 
-Andera 
-Aiko 
-Adena 
-Yer 
-Yajaira 
-Wan 
-Vania 
-Ulrike 
-Toshia 
-Tifany 
-Stefany 
-Shizue 
-Shenika 
-Shawanna 
-Sharolyn 
-Sharilyn 
-Shaquana 
-Shantay 
-See 
-Rozanne 
-Roselee 
-Rickie 
-Remona 
-Reanna 
-Raelene 
-Quinn 
-Phung 
-Petronila 
-Natacha 
-Nancey 
-Myrl 
-Miyoko 
-Miesha 
-Merideth 
-Marvella 
-Marquitta 
-Marhta 
-Marchelle 
-Lizeth 
-Libbie 
-Lahoma 
-Ladawn 
-Kina 
-Katheleen 
-Katharyn 
-Karisa 
-Kaleigh 
-Junie 
-Julieann 
-Johnsie 
-Janean 
-Jaimee 
-Jackqueline 
-Hisako 
-Herma 
-Helaine 
-Gwyneth 
-Glenn 
-Gita 
-Eustolia 
-Emelina 
-Elin 
-Edris 
-Donnette 
-Donnetta 
-Dierdre 
-Denae 
-Darcel 
-Claude 
-Clarisa 
-Cinderella 
-Chia 
-Charlesetta 
-Charita 
-Celsa 
-Cassy 
-Cassi 
-Carlee 
-Bruna 
-Brittaney 
-Brande 
-Billi 
-Bao 
-Antonetta 
-Angla 
-Angelyn 
-Analisa 
-Alane 
-Wenona 
-Wendie 
-Veronique 
-Vannesa 
-Tobie 
-Tempie 
-Sumiko 
-Sulema 
-Sparkle 
-Somer 
-Sheba 
-Shayne 
-Sharice 
-Shanel 
-Shalon 
-Sage 
-Roy 
-Rosio 
-Roselia 
-Renay 
-Rema 
-Reena 
-Porsche 
-Ping 
-Peg 
-Ozie 
-Oretha 
-Oralee 
-Oda 
-Nu 
-Ngan 
-Nakesha 
-Milly 
-Marybelle 
-Marlin 
-Maris 
-Margrett 
-Maragaret 
-Manie 
-Lurlene 
-Lillia 
-Lieselotte 
-Lavelle 
-Lashaunda 
-Lakeesha 
-Keith 
-Kaycee 
-Kalyn 
-Joya 
-Joette 
-Jenae 
-Janiece 
-Illa 
-Grisel 
-Glayds 
-Genevie 
-Gala 
-Fredda 
-Fred 
-Elmer 
-Eleonor 
-Debera 
-Deandrea 
-Dan 
-Corrinne 
-Cordia 
-Contessa 
-Colene 
-Cleotilde 
-Charlott 
-Chantay 
-Cecille 
-Beatris 
-Azalee 
-Arlean 
-Ardath 
-Anjelica 
-Anja 
-Alfredia 
-Aleisha 
-Adam 
-Zada 
-Yuonne 
-Xiao 
-Willodean 
-Whitley 
-Vennie 
-Vanna 
-Tyisha 
-Tova 
-Torie 
-Tonisha 
-Tilda 
-Tien 
-Temple 
-Sirena 
-Sherril 
-Shanti 
-Shan 
-Senaida 
-Samella 
-Robbyn 
-Renda 
-Reita 
-Phebe 
-Paulita 
-Nobuko 
-Nguyet 
-Neomi 
-Moon 
-Mikaela 
-Melania 
-Maximina 
-Marg 
-Maisie 
-Lynna 
-Lilli 
-Layne 
-Lashaun 
-Lakenya 
-Lael 
-Kirstie 
-Kathline 
-Kasha 
-Karlyn 
-Karima 
-Jovan 
-Josefine 
-Jennell 
-Jacqui 
-Jackelyn 
-Hyo 
-Hien 
-Grazyna 
-Florrie 
-Floria 
-Eleonora 
-Dwana 
-Dorla 
-Dong 
-Delmy 
-Deja 
-Dede 
-Dann 
-Crysta 
-Clelia 
-Claris 
-Clarence 
-Chieko 
-Cherlyn 
-Cherelle 
-Charmain 
-Chara 
-Cammy 
-Bee 
-Arnette 
-Ardelle 
-Annika 
-Amiee 
-Amee 
-Allena 
-Yvone 
-Yuki 
-Yoshie 
-Yevette 
-Yael 
-Willetta 
-Voncile 
-Venetta 
-Tula 
-Tonette 
-Timika 
-Temika 
-Telma 
-Teisha 
-Taren 
-Ta 
-Stacee 
-Shin 
-Shawnta 
-Saturnina 
-Ricarda 
-Pok 
-Pasty 
-Onie 
-Nubia 
-Mora 
-Mike 
-Marielle 
-Mariella 
-Marianela 
-Mardell 
-Many 
-Luanna 
-Loise 
-Lisabeth 
-Lindsy 
-Lilliana 
-Lilliam 
-Lelah 
-Leigha 
-Leanora 
-Lang 
-Kristeen 
-Khalilah 
-Keeley 
-Kandra 
-Junko 
-Joaquina 
-Jerlene 
-Jani 
-Jamika 
-Jame 
-Hsiu 
-Hermila 
-Golden 
-Genevive 
-Evia 
-Eugena 
-Emmaline 
-Elfreda 
-Elene 
-Donette 
-Delcie 
-Deeanna 
-Darcey 
-Cuc 
-Clarinda 
-Cira 
-Chae 
-Celinda 
-Catheryn 
-Catherin 
-Casimira 
-Carmelia 
-Camellia 
-Breana 
-Bobette 
-Bernardina 
-Bebe 
-Basilia 
-Arlyne 
-Amal 
-Alayna 
-Zonia 
-Zenia 
-Yuriko 
-Yaeko 
-Wynell 
-Willow 
-Willena 
-Vernia 
-Tu 
-Travis 
-Tora 
-Terrilyn 
-Terica 
-Tenesha 
-Tawna 
-Tajuana 
-Taina 
-Stephnie 
-Sona 
-Sol 
-Sina 
-Shondra 
-Shizuko 
-Sherlene 
-Sherice 
-Sharika 
-Rossie 
-Rosena 
-Rory 
-Rima 
-Ria 
-Rheba 
-Renna 
-Peter 
-Natalya 
-Nancee 
-Melodi 
-Meda 
-Maxima 
-Matha 
-Marketta 
-Maricruz 
-Marcelene 
-Malvina 
-Luba 
-Louetta 
-Leida 
-Lecia 
-Lauran 
-Lashawna 
-Laine 
-Khadijah 
-Katerine 
-Kasi 
-Kallie 
-Julietta 
-Jesusita 
-Jestine 
-Jessia 
-Jeremy 
-Jeffie 
-Janyce 
-Isadora 
-Georgianne 
-Fidelia 
-Evita 
-Eura 
-Eulah 
-Estefana 
-Elsy 
-Elizabet 
-Eladia 
-Dodie 
-Dion 
-Dia 
-Denisse 
-Deloras 
-Delila 
-Daysi 
-Dakota 
-Curtis 
-Crystle 
-Concha 
-Colby 
-Claretta 
-Chu 
-Christia 
-Charlsie 
-Charlena 
-Carylon 
-Bettyann 
-Asley 
-Ashlea 
-Amira 
-Ai 
-Agueda 
-Agnus 
-Yuette 
-Vinita 
-Victorina 
-Tynisha 
-Treena 
-Toccara 
-Tish 
-Thomasena 
-Tegan 
-Soila 
-Shiloh 
-Shenna 
-Sharmaine 
-Shantae 
-Shandi 
-September 
-Saran 
-Sarai 
-Sana 
-Samuel 
-Salley 
-Rosette 
-Rolande 
-Regine 
-Otelia 
-Oscar 
-Olevia 
-Nicholle 
-Necole 
-Naida 
-Myrta 
-Myesha 
-Mitsue 
-Minta 
-Mertie 
-Margy 
-Mahalia 
-Madalene 
-Love 
-Loura 
-Lorean 
-Lewis 
-Lesha 
-Leonida 
-Lenita 
-Lavone 
-Lashell 
-Lashandra 
-Lamonica 
-Kimbra 
-Katherina 
-Karry 
-Kanesha 
-Julio 
-Jong 
-Jeneva 
-Jaquelyn 
-Hwa 
-Gilma 
-Ghislaine 
-Gertrudis 
-Fransisca 
-Fermina 
-Ettie 
-Etsuko 
-Ellis 
-Ellan 
-Elidia 
-Edra 
-Dorethea 
-Doreatha 
-Denyse 
-Denny 
-Deetta 
-Daine 
-Cyrstal 
-Corrin 
-Cayla 
-Carlita 
-Camila 
-Burma 
-Bula 
-Buena 
-Blake 
-Barabara 
-Avril 
-Austin 
-Alaine 
-Zana 
-Wilhemina 
-Wanetta 
-Virgil 
-Vi 
-Veronika 
-Vernon 
-Verline 
-Vasiliki 
-Tonita 
-Tisa 
-Teofila 
-Tayna 
-Taunya 
-Tandra 
-Takako 
-Sunni 
-Suanne 
-Sixta 
-Sharell 
-Seema 
-Russell 
-Rosenda 
-Robena 
-Raymonde 
-Pei 
-Pamila 
-Ozell 
-Neida 
-Neely 
-Mistie 
-Micha 
-Merissa 
-Maurita 
-Maryln 
-Maryetta 
-Marshall 
-Marcell 
-Malena 
-Makeda 
-Maddie 
-Lovetta 
-Lourie 
-Lorrine 
-Lorilee 
-Lester 
-Laurena 
-Lashay 
-Larraine 
-Laree 
-Lacresha 
-Kristle 
-Krishna 
-Keva 
-Keira 
-Karole 
-Joie 
-Jinny 
-Jeannetta 
-Jama 
-Heidy 
-Gilberte 
-Gema 
-Faviola 
-Evelynn 
-Enda 
-Elli 
-Ellena 
-Divina 
-Dagny 
-Collene 
-Codi 
-Cindie 
-Chassidy 
-Chasidy 
-Catrice 
-Catherina 
-Cassey 
-Caroll 
-Carlena 
-Candra 
-Calista 
-Bryanna 
-Britteny 
-Beula 
-Bari 
-Audrie 
-Audria 
-Ardelia 
-Annelle 
-Angila 
-Alona 
-Allyn 
-James 
-John 
-Robert 
-Michael 
-William 
-David 
-Richard 
-Charles 
-Joseph 
-Thomas 
-Christopher 
-Daniel 
-Paul 
-Mark 
-Donald 
-George 
-Kenneth 
-Steven 
-Edward 
-Brian 
-Ronald 
-Anthony 
-Kevin 
-Jason 
-Matthew 
-Gary 
-Timothy 
-Jose 
-Larry 
-Jeffrey 
-Frank 
-Scott 
-Eric 
-Stephen 
-Andrew 
-Raymond 
-Gregory 
-Joshua 
-Jerry 
-Dennis 
-Walter 
-Patrick 
-Peter 
-Harold 
-Douglas 
-Henry 
-Carl 
-Arthur 
-Ryan 
-Roger 
-Joe 
-Juan 
-Jack 
-Albert 
-Jonathan 
-Justin 
-Terry 
-Gerald 
-Keith 
-Samuel 
-Willie 
-Ralph 
-Lawrence 
-Nicholas 
-Roy 
-Benjamin 
-Bruce 
-Brandon 
-Adam 
-Harry 
-Fred 
-Wayne 
-Billy 
-Steve 
-Louis 
-Jeremy 
-Aaron 
-Randy 
-Howard 
-Eugene 
-Carlos 
-Russell 
-Bobby 
-Victor 
-Martin 
-Ernest 
-Phillip 
-Todd 
-Jesse 
-Craig 
-Alan 
-Shawn 
-Clarence 
-Sean 
-Philip 
-Chris 
-Johnny 
-Earl 
-Jimmy 
-Antonio 
-Danny 
-Bryan 
-Tony 
-Luis 
-Mike 
-Stanley 
-Leonard 
-Nathan 
-Dale 
-Manuel 
-Rodney 
-Curtis 
-Norman 
-Allen 
-Marvin 
-Vincent 
-Glenn 
-Jeffery 
-Travis 
-Jeff 
-Chad 
-Jacob 
-Lee 
-Melvin 
-Alfred 
-Kyle 
-Francis 
-Bradley 
-Jesus 
-Herbert 
-Frederick 
-Ray 
-Joel 
-Edwin 
-Don 
-Eddie 
-Ricky 
-Troy 
-Randall 
-Barry 
-Alexander 
-Bernard 
-Mario 
-Leroy 
-Francisco 
-Marcus 
-Micheal 
-Theodore 
-Clifford 
-Miguel 
-Oscar 
-Jay 
-Jim 
-Tom 
-Calvin 
-Alex 
-Jon 
-Ronnie 
-Bill 
-Lloyd 
-Tommy 
-Leon 
-Derek 
-Warren 
-Darrell 
-Jerome 
-Floyd 
-Leo 
-Alvin 
-Tim 
-Wesley 
-Gordon 
-Dean 
-Greg 
-Jorge 
-Dustin 
-Pedro 
-Derrick 
-Dan 
-Lewis 
-Zachary 
-Corey 
-Herman 
-Maurice 
-Vernon 
-Roberto 
-Clyde 
-Glen 
-Hector 
-Shane 
-Ricardo 
-Sam 
-Rick 
-Lester 
-Brent 
-Ramon 
-Charlie 
-Tyler 
-Gilbert 
-Gene 
-Marc 
-Reginald 
-Ruben 
-Brett 
-Angel 
-Nathaniel 
-Rafael 
-Leslie 
-Edgar 
-Milton 
-Raul 
-Ben 
-Chester 
-Cecil 
-Duane 
-Franklin 
-Andre 
-Elmer 
-Brad 
-Gabriel 
-Ron 
-Mitchell 
-Roland 
-Arnold 
-Harvey 
-Jared 
-Adrian 
-Karl 
-Cory 
-Claude 
-Erik 
-Darryl 
-Jamie 
-Neil 
-Jessie 
-Christian 
-Javier 
-Fernando 
-Clinton 
-Ted 
-Mathew 
-Tyrone 
-Darren 
-Lonnie 
-Lance 
-Cody 
-Julio 
-Kelly 
-Kurt 
-Allan 
-Nelson 
-Guy 
-Clayton 
-Hugh 
-Max 
-Dwayne 
-Dwight 
-Armando 
-Felix 
-Jimmie 
-Everett 
-Jordan 
-Ian 
-Wallace 
-Ken 
-Bob 
-Jaime 
-Casey 
-Alfredo 
-Alberto 
-Dave 
-Ivan 
-Johnnie 
-Sidney 
-Byron 
-Julian 
-Isaac 
-Morris 
-Clifton 
-Willard 
-Daryl 
-Ross 
-Virgil 
-Andy 
-Marshall 
-Salvador 
-Perry 
-Kirk 
-Sergio 
-Marion 
-Tracy 
-Seth 
-Kent 
-Terrance 
-Rene 
-Eduardo 
-Terrence 
-Enrique 
-Freddie 
-Wade 
-Austin 
-Stuart 
-Fredrick 
-Arturo 
-Alejandro 
-Jackie 
-Joey 
-Nick 
-Luther 
-Wendell 
-Jeremiah 
-Evan 
-Julius 
-Dana 
-Donnie 
-Otis 
-Shannon 
-Trevor 
-Oliver 
-Luke 
-Homer 
-Gerard 
-Doug 
-Kenny 
-Hubert 
-Angelo 
-Shaun 
-Lyle 
-Matt 
-Lynn 
-Alfonso 
-Orlando 
-Rex 
-Carlton 
-Ernesto 
-Cameron 
-Neal 
-Pablo 
-Lorenzo 
-Omar 
-Wilbur 
-Blake 
-Grant 
-Horace 
-Roderick 
-Kerry 
-Abraham 
-Willis 
-Rickey 
-Jean 
-Ira 
-Andres 
-Cesar 
-Johnathan 
-Malcolm 
-Rudolph 
-Damon 
-Kelvin 
-Rudy 
-Preston 
-Alton 
-Archie 
-Marco 
-Wm 
-Pete 
-Randolph 
-Garry 
-Geoffrey 
-Jonathon 
-Felipe 
-Bennie 
-Gerardo 
-Ed 
-Dominic 
-Robin 
-Loren 
-Delbert 
-Colin 
-Guillermo 
-Earnest 
-Lucas 
-Benny 
-Noel 
-Spencer 
-Rodolfo 
-Myron 
-Edmund 
-Garrett 
-Salvatore 
-Cedric 
-Lowell 
-Gregg 
-Sherman 
-Wilson 
-Devin 
-Sylvester 
-Kim 
-Roosevelt 
-Israel 
-Jermaine 
-Forrest 
-Wilbert 
-Leland 
-Simon 
-Guadalupe 
-Clark 
-Irving 
-Carroll 
-Bryant 
-Owen 
-Rufus 
-Woodrow 
-Sammy 
-Kristopher 
-Mack 
-Levi 
-Marcos 
-Gustavo 
-Jake 
-Lionel 
-Marty 
-Taylor 
-Ellis 
-Dallas 
-Gilberto 
-Clint 
-Nicolas 
-Laurence 
-Ismael 
-Orville 
-Drew 
-Jody 
-Ervin 
-Dewey 
-Al 
-Wilfred 
-Josh 
-Hugo 
-Ignacio 
-Caleb 
-Tomas 
-Sheldon 
-Erick 
-Frankie 
-Stewart 
-Doyle 
-Darrel 
-Rogelio 
-Terence 
-Santiago 
-Alonzo 
-Elias 
-Bert 
-Elbert 
-Ramiro 
-Conrad 
-Pat 
-Noah 
-Grady 
-Phil 
-Cornelius 
-Lamar 
-Rolando 
-Clay 
-Percy 
-Dexter 
-Bradford 
-Merle 
-Darin 
-Amos 
-Terrell 
-Moses 
-Irvin 
-Saul 
-Roman 
-Darnell 
-Randal 
-Tommie 
-Timmy 
-Darrin 
-Winston 
-Brendan 
-Toby 
-Van 
-Abel 
-Dominick 
-Boyd 
-Courtney 
-Jan 
-Emilio 
-Elijah 
-Cary 
-Domingo 
-Santos 
-Aubrey 
-Emmett 
-Marlon 
-Emanuel 
-Jerald 
-Edmond 
-Emil 
-Dewayne 
-Will 
-Otto 
-Teddy 
-Reynaldo 
-Bret 
-Morgan 
-Jess 
-Trent 
-Humberto 
-Emmanuel 
-Stephan 
-Louie 
-Vicente 
-Lamont 
-Stacy 
-Garland 
-Miles 
-Micah 
-Efrain 
-Billie 
-Logan 
-Heath 
-Rodger 
-Harley 
-Demetrius 
-Ethan 
-Eldon 
-Rocky 
-Pierre 
-Junior 
-Freddy 
-Eli 
-Bryce 
-Antoine 
-Robbie 
-Kendall 
-Royce 
-Sterling 
-Mickey 
-Chase 
-Grover 
-Elton 
-Cleveland 
-Dylan 
-Chuck 
-Damian 
-Reuben 
-Stan 
-August 
-Leonardo 
-Jasper 
-Russel 
-Erwin 
-Benito 
-Hans 
-Monte 
-Blaine 
-Ernie 
-Curt 
-Quentin 
-Agustin 
-Murray 
-Jamal 
-Devon 
-Adolfo 
-Harrison 
-Tyson 
-Burton 
-Brady 
-Elliott 
-Wilfredo 
-Bart 
-Jarrod 
-Vance 
-Denis 
-Damien 
-Joaquin 
-Harlan 
-Desmond 
-Elliot 
-Darwin 
-Ashley 
-Gregorio 
-Buddy 
-Xavier 
-Kermit 
-Roscoe 
-Esteban 
-Anton 
-Solomon 
-Scotty 
-Norbert 
-Elvin 
-Williams 
-Nolan 
-Carey 
-Rod 
-Quinton 
-Hal 
-Brain 
-Rob 
-Elwood 
-Kendrick 
-Darius 
-Moises 
-Son 
-Marlin 
-Fidel 
-Thaddeus 
-Cliff 
-Marcel 
-Ali 
-Jackson 
-Raphael 
-Bryon 
-Armand 
-Alvaro 
-Jeffry 
-Dane 
-Joesph 
-Thurman 
-Ned 
-Sammie 
-Rusty 
-Michel 
-Monty 
-Rory 
-Fabian 
-Reggie 
-Mason 
-Graham 
-Kris 
-Isaiah 
-Vaughn 
-Gus 
-Avery 
-Loyd 
-Diego 
-Alexis 
-Adolph 
-Norris 
-Millard 
-Rocco 
-Gonzalo 
-Derick 
-Rodrigo 
-Gerry 
-Stacey 
-Carmen 
-Wiley 
-Rigoberto 
-Alphonso 
-Ty 
-Shelby 
-Rickie 
-Noe 
-Vern 
-Bobbie 
-Reed 
-Jefferson 
-Elvis 
-Bernardo 
-Mauricio 
-Hiram 
-Donovan 
-Basil 
-Riley 
-Ollie 
-Nickolas 
-Maynard 
-Scot 
-Vince 
-Quincy 
-Eddy 
-Sebastian 
-Federico 
-Ulysses 
-Heriberto 
-Donnell 
-Cole 
-Denny 
-Davis 
-Gavin 
-Emery 
-Ward 
-Romeo 
-Jayson 
-Dion 
-Dante 
-Clement 
-Coy 
-Odell 
-Maxwell 
-Jarvis 
-Bruno 
-Issac 
-Mary 
-Dudley 
-Brock 
-Sanford 
-Colby 
-Carmelo 
-Barney 
-Nestor 
-Hollis 
-Stefan 
-Donny 
-Art 
-Linwood 
-Beau 
-Weldon 
-Galen 
-Isidro 
-Truman 
-Delmar 
-Johnathon 
-Silas 
-Frederic 
-Dick 
-Kirby 
-Irwin 
-Cruz 
-Merlin 
-Merrill 
-Charley 
-Marcelino 
-Lane 
-Harris 
-Cleo 
-Carlo 
-Trenton 
-Kurtis 
-Hunter 
-Aurelio 
-Winfred 
-Vito 
-Collin 
-Denver 
-Carter 
-Leonel 
-Emory 
-Pasquale 
-Mohammad 
-Mariano 
-Danial 
-Blair 
-Landon 
-Dirk 
-Branden 
-Adan 
-Numbers 
-Clair 
-Buford 
-German 
-Bernie 
-Wilmer 
-Joan 
-Emerson 
-Zachery 
-Fletcher 
-Jacques 
-Errol 
-Dalton 
-Monroe 
-Josue 
-Dominique 
-Edwardo 
-Booker 
-Wilford 
-Sonny 
-Shelton 
-Carson 
-Theron 
-Raymundo 
-Daren 
-Tristan 
-Houston 
-Robby 
-Lincoln 
-Jame 
-Genaro 
-Gale 
-Bennett 
-Octavio 
-Cornell 
-Laverne 
-Hung 
-Arron 
-Antony 
-Herschel 
-Alva 
-Giovanni 
-Garth 
-Cyrus 
-Cyril 
-Ronny 
-Stevie 
-Lon 
-Freeman 
-Erin 
-Duncan 
-Kennith 
-Carmine 
-Augustine 
-Young 
-Erich 
-Chadwick 
-Wilburn 
-Russ 
-Reid 
-Myles 
-Anderson 
-Morton 
-Jonas 
-Forest 
-Mitchel 
-Mervin 
-Zane 
-Rich 
-Jamel 
-Lazaro 
-Alphonse 
-Randell 
-Major 
-Johnie 
-Jarrett 
-Brooks 
-Ariel 
-Abdul 
-Dusty 
-Luciano 
-Lindsey 
-Tracey 
-Seymour 
-Scottie 
-Eugenio 
-Mohammed 
-Sandy 
-Valentin 
-Chance 
-Arnulfo 
-Lucien 
-Ferdinand 
-Thad 
-Ezra 
-Sydney 
-Aldo 
-Rubin 
-Royal 
-Mitch 
-Earle 
-Abe 
-Wyatt 
-Marquis 
-Lanny 
-Kareem 
-Jamar 
-Boris 
-Isiah 
-Emile 
-Elmo 
-Aron 
-Leopoldo 
-Everette 
-Josef 
-Gail 
-Eloy 
-Dorian 
-Rodrick 
-Reinaldo 
-Lucio 
-Jerrod 
-Weston 
-Hershel 
-Barton 
-Parker 
-Lemuel 
-Lavern 
-Burt 
-Jules 
-Gil 
-Eliseo 
-Ahmad 
-Nigel 
-Efren 
-Antwan 
-Alden 
-Margarito 
-Coleman 
-Refugio 
-Dino 
-Osvaldo 
-Les 
-Deandre 
-Normand 
-Kieth 
-Ivory 
-Andrea 
-Trey 
-Norberto 
-Napoleon 
-Jerold 
-Fritz 
-Rosendo 
-Milford 
-Sang 
-Deon 
-Christoper 
-Alfonzo 
-Lyman 
-Josiah 
-Brant 
-Wilton 
-Rico 
-Jamaal 
-Dewitt 
-Carol 
-Brenton 
-Yong 
-Olin 
-Foster 
-Faustino 
-Claudio 
-Judson 
-Gino 
-Edgardo 
-Berry 
-Alec 
-Tanner 
-Jarred 
-Donn 
-Trinidad 
-Tad 
-Shirley 
-Prince 
-Porfirio 
-Odis 
-Maria 
-Lenard 
-Chauncey 
-Chang 
-Tod 
-Mel 
-Marcelo 
-Kory 
-Augustus 
-Keven 
-Hilario 
-Bud 
-Sal 
-Rosario 
-Orval 
-Mauro 
-Dannie 
-Zachariah 
-Olen 
-Anibal 
-Milo 
-Jed 
-Frances 
-Thanh 
-Dillon 
-Amado 
-Newton 
-Connie 
-Lenny 
-Tory 
-Richie 
-Lupe 
-Horacio 
-Brice 
-Mohamed 
-Delmer 
-Dario 
-Reyes 
-Dee 
-Mac 
-Jonah 
-Jerrold 
-Robt 
-Hank 
-Sung 
-Rupert 
-Rolland 
-Kenton 
-Damion 
-Chi 
-Antone 
-Waldo 
-Fredric 
-Bradly 
-Quinn 
-Kip 
-Burl 
-Walker 
-Tyree 
-Jefferey 
-Ahmed 
-Willy 
-Stanford 
-Oren 
-Noble 
-Moshe 
-Mikel 
-Enoch 
-Brendon 
-Quintin 
-Jamison 
-Florencio 
-Darrick 
-Tobias 
-Minh 
-Hassan 
-Giuseppe 
-Demarcus 
-Cletus 
-Tyrell 
-Lyndon 
-Keenan 
-Werner 
-Theo 
-Geraldo 
-Lou 
-Columbus 
-Chet 
-Bertram 
-Markus 
-Huey 
-Hilton 
-Dwain 
-Donte 
-Tyron 
-Omer 
-Isaias 
-Hipolito 
-Fermin 
-Chung 
-Adalberto 
-Valentine 
-Jamey 
-Bo 
-Barrett 
-Whitney 
-Teodoro 
-Mckinley 
-Maximo 
-Garfield 
-Sol 
-Raleigh 
-Lawerence 
-Abram 
-Rashad 
-King 
-Emmitt 
-Daron 
-Chong 
-Samual 
-Paris 
-Otha 
-Miquel 
-Lacy 
-Eusebio 
-Dong 
-Domenic 
-Darron 
-Buster 
-Antonia 
-Wilber 
-Renato 
-Jc 
-Hoyt 
-Haywood 
-Ezekiel 
-Chas 
-Florentino 
-Elroy 
-Clemente 
-Arden 
-Neville 
-Kelley 
-Edison 
-Deshawn 
-Carrol 
-Shayne 
-Nathanial 
-Jordon 
-Danilo 
-Claud 
-Val 
-Sherwood 
-Raymon 
-Rayford 
-Cristobal 
-Ambrose 
-Titus 
-Hyman 
-Felton 
-Ezequiel 
-Erasmo 
-Stanton 
-Lonny 
-Len 
-Ike 
-Milan 
-Lino 
-Jarod 
-Herb 
-Andreas 
-Walton 
-Rhett 
-Palmer 
-Jude 
-Douglass 
-Cordell 
-Oswaldo 
-Ellsworth 
-Virgilio 
-Toney 
-Nathanael 
-Del 
-Britt 
-Benedict 
-Mose 
-Hong 
-Leigh 
-Johnson 
-Isreal 
-Gayle 
-Garret 
-Fausto 
-Asa 
-Arlen 
-Zack 
-Warner 
-Modesto 
-Francesco 
-Manual 
-Jae 
-Gaylord 
-Gaston 
-Filiberto 
-Deangelo 
-Michale 
-Granville 
-Wes 
-Malik 
-Zackary 
-Tuan 
-Nicky 
-Eldridge 
-Cristopher 
-Cortez 
-Antione 
-Malcom 
-Long 
-Korey 
-Jospeh 
-Colton 
-Waylon 
-Von 
-Hosea 
-Shad 
-Santo 
-Rudolf 
-Rolf 
-Rey 
-Renaldo 
-Marcellus 
-Lucius 
-Lesley 
-Kristofer 
-Boyce 
-Benton 
-Man 
-Kasey 
-Jewell 
-Hayden 
-Harland 
-Arnoldo 
-Rueben 
-Leandro 
-Kraig 
-Jerrell 
-Jeromy 
-Hobert 
-Cedrick 
-Arlie 
-Winford 
-Wally 
-Patricia 
-Luigi 
-Keneth 
-Jacinto 
-Graig 
-Franklyn 
-Edmundo 
-Sid 
-Porter 
-Leif 
-Lauren 
-Jeramy 
-Elisha 
-Buck 
-Willian 
-Vincenzo 
-Shon 
-Michal 
-Lynwood 
-Lindsay 
-Jewel 
-Jere 
-Hai 
-Elden 
-Dorsey 
-Darell 
-Broderick 
-Alonso 
diff --git a/hyracks-storage-am-common/src/main/resources/dist.all.last.cleaned b/hyracks-storage-am-common/src/main/resources/dist.all.last.cleaned
deleted file mode 100755
index c9ec798..0000000
--- a/hyracks-storage-am-common/src/main/resources/dist.all.last.cleaned
+++ /dev/null
@@ -1,88799 +0,0 @@
-Smith
-Johnson
-Williams
-Jones
-Brown
-Davis
-Miller
-Wilson
-Moore
-Taylor
-Anderson
-Thomas
-Jackson
-White
-Harris
-Martin
-Thompson
-Garcia
-Martinez
-Robinson
-Clark
-Rodriguez
-Lewis
-Lee
-Walker
-Hall
-Allen
-Young
-Hernandez
-King
-Wright
-Lopez
-Hill
-Scott
-Green
-Adams
-Baker
-Gonzalez
-Nelson
-Carter
-Mitchell
-Perez
-Roberts
-Turner
-Phillips
-Campbell
-Parker
-Evans
-Edwards
-Collins
-Stewart
-Sanchez
-Morris
-Rogers
-Reed
-Cook
-Morgan
-Bell
-Murphy
-Bailey
-Rivera
-Cooper
-Richardson
-Cox
-Howard
-Ward
-Torres
-Peterson
-Gray
-Ramirez
-James
-Watson
-Brooks
-Kelly
-Sanders
-Price
-Bennett
-Wood
-Barnes
-Ross
-Henderson
-Coleman
-Jenkins
-Perry
-Powell
-Long
-Patterson
-Hughes
-Flores
-Washington
-Butler
-Simmons
-Foster
-Gonzales
-Bryant
-Alexander
-Russell
-Griffin
-Diaz
-Hayes
-Myers
-Ford
-Hamilton
-Graham
-Sullivan
-Wallace
-Woods
-Cole
-West
-Jordan
-Owens
-Reynolds
-Fisher
-Ellis
-Harrison
-Gibson
-Mcdonald
-Cruz
-Marshall
-Ortiz
-Gomez
-Murray
-Freeman
-Wells
-Webb
-Simpson
-Stevens
-Tucker
-Porter
-Hunter
-Hicks
-Crawford
-Henry
-Boyd
-Mason
-Morales
-Kennedy
-Warren
-Dixon
-Ramos
-Reyes
-Burns
-Gordon
-Shaw
-Holmes
-Rice
-Robertson
-Hunt
-Black
-Daniels
-Palmer
-Mills
-Nichols
-Grant
-Knight
-Ferguson
-Rose
-Stone
-Hawkins
-Dunn
-Perkins
-Hudson
-Spencer
-Gardner
-Stephens
-Payne
-Pierce
-Berry
-Matthews
-Arnold
-Wagner
-Willis
-Ray
-Watkins
-Olson
-Carroll
-Duncan
-Snyder
-Hart
-Cunningham
-Bradley
-Lane
-Andrews
-Ruiz
-Harper
-Fox
-Riley
-Armstrong
-Carpenter
-Weaver
-Greene
-Lawrence
-Elliott
-Chavez
-Sims
-Austin
-Peters
-Kelley
-Franklin
-Lawson
-Fields
-Gutierrez
-Ryan
-Schmidt
-Carr
-Vasquez
-Castillo
-Wheeler
-Chapman
-Oliver
-Montgomery
-Richards
-Williamson
-Johnston
-Banks
-Meyer
-Bishop
-Mccoy
-Howell
-Alvarez
-Morrison
-Hansen
-Fernandez
-Garza
-Harvey
-Little
-Burton
-Stanley
-Nguyen
-George
-Jacobs
-Reid
-Kim
-Fuller
-Lynch
-Dean
-Gilbert
-Garrett
-Romero
-Welch
-Larson
-Frazier
-Burke
-Hanson
-Day
-Mendoza
-Moreno
-Bowman
-Medina
-Fowler
-Brewer
-Hoffman
-Carlson
-Silva
-Pearson
-Holland
-Douglas
-Fleming
-Jensen
-Vargas
-Byrd
-Davidson
-Hopkins
-May
-Terry
-Herrera
-Wade
-Soto
-Walters
-Curtis
-Neal
-Caldwell
-Lowe
-Jennings
-Barnett
-Graves
-Jimenez
-Horton
-Shelton
-Barrett
-Obrien
-Castro
-Sutton
-Gregory
-Mckinney
-Lucas
-Miles
-Craig
-Rodriquez
-Chambers
-Holt
-Lambert
-Fletcher
-Watts
-Bates
-Hale
-Rhodes
-Pena
-Beck
-Newman
-Haynes
-Mcdaniel
-Mendez
-Bush
-Vaughn
-Parks
-Dawson
-Santiago
-Norris
-Hardy
-Love
-Steele
-Curry
-Powers
-Schultz
-Barker
-Guzman
-Page
-Munoz
-Ball
-Keller
-Chandler
-Weber
-Leonard
-Walsh
-Lyons
-Ramsey
-Wolfe
-Schneider
-Mullins
-Benson
-Sharp
-Bowen
-Daniel
-Barber
-Cummings
-Hines
-Baldwin
-Griffith
-Valdez
-Hubbard
-Salazar
-Reeves
-Warner
-Stevenson
-Burgess
-Santos
-Tate
-Cross
-Garner
-Mann
-Mack
-Moss
-Thornton
-Dennis
-Mcgee
-Farmer
-Delgado
-Aguilar
-Vega
-Glover
-Manning
-Cohen
-Harmon
-Rodgers
-Robbins
-Newton
-Todd
-Blair
-Higgins
-Ingram
-Reese
-Cannon
-Strickland
-Townsend
-Potter
-Goodwin
-Walton
-Rowe
-Hampton
-Ortega
-Patton
-Swanson
-Joseph
-Francis
-Goodman
-Maldonado
-Yates
-Becker
-Erickson
-Hodges
-Rios
-Conner
-Adkins
-Webster
-Norman
-Malone
-Hammond
-Flowers
-Cobb
-Moody
-Quinn
-Blake
-Maxwell
-Pope
-Floyd
-Osborne
-Paul
-Mccarthy
-Guerrero
-Lindsey
-Estrada
-Sandoval
-Gibbs
-Tyler
-Gross
-Fitzgerald
-Stokes
-Doyle
-Sherman
-Saunders
-Wise
-Colon
-Gill
-Alvarado
-Greer
-Padilla
-Simon
-Waters
-Nunez
-Ballard
-Schwartz
-Mcbride
-Houston
-Christensen
-Klein
-Pratt
-Briggs
-Parsons
-Mclaughlin
-Zimmerman
-French
-Buchanan
-Moran
-Copeland
-Roy
-Pittman
-Brady
-Mccormick
-Holloway
-Brock
-Poole
-Frank
-Logan
-Owen
-Bass
-Marsh
-Drake
-Wong
-Jefferson
-Park
-Morton
-Abbott
-Sparks
-Patrick
-Norton
-Huff
-Clayton
-Massey
-Lloyd
-Figueroa
-Carson
-Bowers
-Roberson
-Barton
-Tran
-Lamb
-Harrington
-Casey
-Boone
-Cortez
-Clarke
-Mathis
-Singleton
-Wilkins
-Cain
-Bryan
-Underwood
-Hogan
-Mckenzie
-Collier
-Luna
-Phelps
-Mcguire
-Allison
-Bridges
-Wilkerson
-Nash
-Summers
-Atkins
-Wilcox
-Pitts
-Conley
-Marquez
-Burnett
-Richard
-Cochran
-Chase
-Davenport
-Hood
-Gates
-Clay
-Ayala
-Sawyer
-Roman
-Vazquez
-Dickerson
-Hodge
-Acosta
-Flynn
-Espinoza
-Nicholson
-Monroe
-Wolf
-Morrow
-Kirk
-Randall
-Anthony
-Whitaker
-Oconnor
-Skinner
-Ware
-Molina
-Kirby
-Huffman
-Bradford
-Charles
-Gilmore
-Dominguez
-Oneal
-Bruce
-Lang
-Combs
-Kramer
-Heath
-Hancock
-Gallagher
-Gaines
-Shaffer
-Short
-Wiggins
-Mathews
-Mcclain
-Fischer
-Wall
-Small
-Melton
-Hensley
-Bond
-Dyer
-Cameron
-Grimes
-Contreras
-Christian
-Wyatt
-Baxter
-Snow
-Mosley
-Shepherd
-Larsen
-Hoover
-Beasley
-Glenn
-Petersen
-Whitehead
-Meyers
-Keith
-Garrison
-Vincent
-Shields
-Horn
-Savage
-Olsen
-Schroeder
-Hartman
-Woodard
-Mueller
-Kemp
-Deleon
-Booth
-Patel
-Calhoun
-Wiley
-Eaton
-Cline
-Navarro
-Harrell
-Lester
-Humphrey
-Parrish
-Duran
-Hutchinson
-Hess
-Dorsey
-Bullock
-Robles
-Beard
-Dalton
-Avila
-Vance
-Rich
-Blackwell
-York
-Johns
-Blankenship
-Trevino
-Salinas
-Campos
-Pruitt
-Moses
-Callahan
-Golden
-Montoya
-Hardin
-Guerra
-Mcdowell
-Carey
-Stafford
-Gallegos
-Henson
-Wilkinson
-Booker
-Merritt
-Miranda
-Atkinson
-Orr
-Decker
-Hobbs
-Preston
-Tanner
-Knox
-Pacheco
-Stephenson
-Glass
-Rojas
-Serrano
-Marks
-Hickman
-English
-Sweeney
-Strong
-Prince
-Mcclure
-Conway
-Walter
-Roth
-Maynard
-Farrell
-Lowery
-Hurst
-Nixon
-Weiss
-Trujillo
-Ellison
-Sloan
-Juarez
-Winters
-Mclean
-Randolph
-Leon
-Boyer
-Villarreal
-Mccall
-Gentry
-Carrillo
-Kent
-Ayers
-Lara
-Shannon
-Sexton
-Pace
-Hull
-Leblanc
-Browning
-Velasquez
-Leach
-Chang
-House
-Sellers
-Herring
-Noble
-Foley
-Bartlett
-Mercado
-Landry
-Durham
-Walls
-Barr
-Mckee
-Bauer
-Rivers
-Everett
-Bradshaw
-Pugh
-Velez
-Rush
-Estes
-Dodson
-Morse
-Sheppard
-Weeks
-Camacho
-Bean
-Barron
-Livingston
-Middleton
-Spears
-Branch
-Blevins
-Chen
-Kerr
-Mcconnell
-Hatfield
-Harding
-Ashley
-Solis
-Herman
-Frost
-Giles
-Blackburn
-William
-Pennington
-Woodward
-Finley
-Mcintosh
-Koch
-Best
-Solomon
-Mccullough
-Dudley
-Nolan
-Blanchard
-Rivas
-Brennan
-Mejia
-Kane
-Benton
-Joyce
-Buckley
-Haley
-Valentine
-Maddox
-Russo
-Mcknight
-Buck
-Moon
-Mcmillan
-Crosby
-Berg
-Dotson
-Mays
-Roach
-Church
-Chan
-Richmond
-Meadows
-Faulkner
-Oneill
-Knapp
-Kline
-Barry
-Ochoa
-Jacobson
-Gay
-Avery
-Hendricks
-Horne
-Shepard
-Hebert
-Cherry
-Cardenas
-Mcintyre
-Whitney
-Waller
-Holman
-Donaldson
-Cantu
-Terrell
-Morin
-Gillespie
-Fuentes
-Tillman
-Sanford
-Bentley
-Peck
-Key
-Salas
-Rollins
-Gamble
-Dickson
-Battle
-Santana
-Cabrera
-Cervantes
-Howe
-Hinton
-Hurley
-Spence
-Zamora
-Yang
-Mcneil
-Suarez
-Case
-Petty
-Gould
-Mcfarland
-Sampson
-Carver
-Bray
-Rosario
-Macdonald
-Stout
-Hester
-Melendez
-Dillon
-Farley
-Hopper
-Galloway
-Potts
-Bernard
-Joyner
-Stein
-Aguirre
-Osborn
-Mercer
-Bender
-Franco
-Rowland
-Sykes
-Benjamin
-Travis
-Pickett
-Crane
-Sears
-Mayo
-Dunlap
-Hayden
-Wilder
-Mckay
-Coffey
-Mccarty
-Ewing
-Cooley
-Vaughan
-Bonner
-Cotton
-Holder
-Stark
-Ferrell
-Cantrell
-Fulton
-Lynn
-Lott
-Calderon
-Rosa
-Pollard
-Hooper
-Burch
-Mullen
-Fry
-Riddle
-Levy
-David
-Duke
-Odonnell
-Guy
-Michael
-Britt
-Frederick
-Daugherty
-Berger
-Dillard
-Alston
-Jarvis
-Frye
-Riggs
-Chaney
-Odom
-Duffy
-Fitzpatrick
-Valenzuela
-Merrill
-Mayer
-Alford
-Mcpherson
-Acevedo
-Donovan
-Barrera
-Albert
-Cote
-Reilly
-Compton
-Raymond
-Mooney
-Mcgowan
-Craft
-Cleveland
-Clemons
-Wynn
-Nielsen
-Baird
-Stanton
-Snider
-Rosales
-Bright
-Witt
-Stuart
-Hays
-Holden
-Rutledge
-Kinney
-Clements
-Castaneda
-Slater
-Hahn
-Emerson
-Conrad
-Burks
-Delaney
-Pate
-Lancaster
-Sweet
-Justice
-Tyson
-Sharpe
-Whitfield
-Talley
-Macias
-Irwin
-Burris
-Ratliff
-Mccray
-Madden
-Kaufman
-Beach
-Goff
-Cash
-Bolton
-Mcfadden
-Levine
-Good
-Byers
-Kirkland
-Kidd
-Workman
-Carney
-Dale
-Mcleod
-Holcomb
-England
-Finch
-Head
-Burt
-Hendrix
-Sosa
-Haney
-Franks
-Sargent
-Nieves
-Downs
-Rasmussen
-Bird
-Hewitt
-Lindsay
-Le
-Foreman
-Valencia
-Oneil
-Delacruz
-Vinson
-Dejesus
-Hyde
-Forbes
-Gilliam
-Guthrie
-Wooten
-Huber
-Barlow
-Boyle
-Mcmahon
-Buckner
-Rocha
-Puckett
-Langley
-Knowles
-Cooke
-Velazquez
-Whitley
-Noel
-Vang
-Shea
-Rouse
-Hartley
-Mayfield
-Elder
-Rankin
-Hanna
-Cowan
-Lucero
-Arroyo
-Slaughter
-Haas
-Oconnell
-Minor
-Kendrick
-Shirley
-Kendall
-Boucher
-Archer
-Boggs
-Odell
-Dougherty
-Andersen
-Newell
-Crowe
-Wang
-Friedman
-Bland
-Swain
-Holley
-Felix
-Pearce
-Childs
-Yarbrough
-Galvan
-Proctor
-Meeks
-Lozano
-Mora
-Rangel
-Bacon
-Villanueva
-Schaefer
-Rosado
-Helms
-Boyce
-Goss
-Stinson
-Smart
-Lake
-Ibarra
-Hutchins
-Covington
-Reyna
-Gregg
-Werner
-Crowley
-Hatcher
-Mackey
-Bunch
-Womack
-Polk
-Jamison
-Dodd
-Childress
-Childers
-Camp
-Villa
-Dye
-Springer
-Mahoney
-Dailey
-Belcher
-Lockhart
-Griggs
-Costa
-Connor
-Brandt
-Winter
-Walden
-Moser
-Tracy
-Tatum
-Mccann
-Akers
-Lutz
-Pryor
-Law
-Orozco
-Mcallister
-Lugo
-Davies
-Shoemaker
-Madison
-Rutherford
-Newsome
-Magee
-Chamberlain
-Blanton
-Simms
-Godfrey
-Flanagan
-Crum
-Cordova
-Escobar
-Downing
-Sinclair
-Donahue
-Krueger
-Mcginnis
-Gore
-Farris
-Webber
-Corbett
-Andrade
-Starr
-Lyon
-Yoder
-Hastings
-Mcgrath
-Spivey
-Krause
-Harden
-Crabtree
-Kirkpatrick
-Hollis
-Brandon
-Arrington
-Ervin
-Clifton
-Ritter
-Mcghee
-Bolden
-Maloney
-Gagnon
-Dunbar
-Ponce
-Pike
-Mayes
-Heard
-Beatty
-Mobley
-Kimball
-Butts
-Montes
-Herbert
-Grady
-Eldridge
-Braun
-Hamm
-Gibbons
-Seymour
-Moyer
-Manley
-Herron
-Plummer
-Elmore
-Cramer
-Gary
-Rucker
-Hilton
-Blue
-Pierson
-Fontenot
-Field
-Rubio
-Grace
-Goldstein
-Elkins
-Wills
-Novak
-John
-Hickey
-Worley
-Gorman
-Katz
-Dickinson
-Broussard
-Fritz
-Woodruff
-Crow
-Christopher
-Britton
-Forrest
-Nance
-Lehman
-Bingham
-Zuniga
-Whaley
-Shafer
-Coffman
-Steward
-Delarosa
-Nix
-Neely
-Numbers
-Mata
-Manuel
-Davila
-Mccabe
-Kessler
-Emery
-Bowling
-Hinkle
-Welsh
-Pagan
-Goldberg
-Goins
-Crouch
-Cuevas
-Quinones
-Mcdermott
-Hendrickson
-Samuels
-Denton
-Bergeron
-Lam
-Ivey
-Locke
-Haines
-Thurman
-Snell
-Hoskins
-Byrne
-Milton
-Winston
-Arthur
-Arias
-Stanford
-Roe
-Corbin
-Beltran
-Chappell
-Hurt
-Downey
-Dooley
-Tuttle
-Couch
-Payton
-Mcelroy
-Crockett
-Groves
-Clement
-Leslie
-Cartwright
-Dickey
-Mcgill
-Dubois
-Muniz
-Erwin
-Self
-Tolbert
-Dempsey
-Cisneros
-Sewell
-Latham
-Garland
-Vigil
-Tapia
-Sterling
-Rainey
-Norwood
-Lacy
-Stroud
-Meade
-Amos
-Tipton
-Lord
-Kuhn
-Hilliard
-Bonilla
-Teague
-Courtney
-Gunn
-Ho
-Greenwood
-Correa
-Reece
-Weston
-Poe
-Trent
-Pineda
-Phipps
-Frey
-Kaiser
-Ames
-Paige
-Gunter
-Schmitt
-Milligan
-Espinosa
-Carlton
-Bowden
-Vickers
-Lowry
-Pritchard
-Costello
-Piper
-Mcclellan
-Lovell
-Drew
-Sheehan
-Quick
-Hatch
-Dobson
-Singh
-Jeffries
-Hollingsworth
-Sorensen
-Meza
-Fink
-Donnelly
-Burrell
-Bruno
-Tomlinson
-Colbert
-Billings
-Ritchie
-Helton
-Sutherland
-Peoples
-Mcqueen
-Gaston
-Thomason
-Mckinley
-Givens
-Crocker
-Vogel
-Robison
-Dunham
-Coker
-Swartz
-Keys
-Lilly
-Ladner
-Hannah
-Willard
-Richter
-Hargrove
-Edmonds
-Brantley
-Albright
-Murdock
-Boswell
-Muller
-Quintero
-Padgett
-Kenney
-Daly
-Connolly
-Pierre
-Inman
-Quintana
-Lund
-Barnard
-Villegas
-Simons
-Land
-Huggins
-Tidwell
-Sanderson
-Bullard
-Mcclendon
-Duarte
-Draper
-Meredith
-Marrero
-Dwyer
-Abrams
-Stover
-Goode
-Fraser
-Crews
-Bernal
-Smiley
-Godwin
-Fish
-Conklin
-Mcneal
-Baca
-Esparza
-Crowder
-Bower
-Nicholas
-Chung
-Brewster
-Mcneill
-Dick
-Rodrigues
-Leal
-Coates
-Raines
-Mccain
-Mccord
-Miner
-Holbrook
-Swift
-Dukes
-Carlisle
-Aldridge
-Ackerman
-Starks
-Ricks
-Holliday
-Ferris
-Hairston
-Sheffield
-Lange
-Fountain
-Marino
-Doss
-Betts
-Kaplan
-Carmichael
-Bloom
-Ruffin
-Penn
-Kern
-Bowles
-Sizemore
-Larkin
-Dupree
-Jewell
-Silver
-Seals
-Metcalf
-Hutchison
-Henley
-Farr
-Castle
-Mccauley
-Hankins
-Gustafson
-Deal
-Curran
-Ash
-Waddell
-Ramey
-Cates
-Pollock
-Major
-Irvin
-Cummins
-Messer
-Heller
-Dewitt
-Lin
-Funk
-Cornett
-Palacios
-Galindo
-Cano
-Hathaway
-Singer
-Pham
-Enriquez
-Aaron
-Salgado
-Pelletier
-Painter
-Wiseman
-Blount
-Hand
-Feliciano
-Temple
-Houser
-Doherty
-Mead
-Mcgraw
-Toney
-Swan
-Melvin
-Capps
-Blanco
-Blackmon
-Wesley
-Thomson
-Mcmanus
-Fair
-Burkett
-Post
-Gleason
-Rudolph
-Ott
-Dickens
-Cormier
-Voss
-Rushing
-Rosenberg
-Hurd
-Dumas
-Benitez
-Arellano
-Story
-Marin
-Caudill
-Bragg
-Jaramillo
-Huerta
-Gipson
-Colvin
-Biggs
-Vela
-Platt
-Cassidy
-Tompkins
-Mccollum
-Kay
-Gabriel
-Dolan
-Daley
-Crump
-Street
-Sneed
-Kilgore
-Grove
-Grimm
-Davison
-Brunson
-Prater
-Marcum
-Devine
-Kyle
-Dodge
-Stratton
-Rosas
-Choi
-Tripp
-Ledbetter
-Lay
-Hightower
-Haywood
-Feldman
-Epps
-Yeager
-Posey
-Sylvester
-Scruggs
-Cope
-Stubbs
-Richey
-Overton
-Trotter
-Sprague
-Cordero
-Butcher
-Burger
-Stiles
-Burgos
-Woodson
-Horner
-Bassett
-Purcell
-Haskins
-Gee
-Akins
-Abraham
-Hoyt
-Ziegler
-Spaulding
-Hadley
-Grubbs
-Sumner
-Murillo
-Zavala
-Shook
-Lockwood
-Jarrett
-Driscoll
-Dahl
-Thorpe
-Sheridan
-Redmond
-Putnam
-Mcwilliams
-Mcrae
-Cornell
-Felton
-Romano
-Joiner
-Sadler
-Hedrick
-Hager
-Hagen
-Fitch
-Coulter
-Thacker
-Mansfield
-Langston
-Guidry
-Ferreira
-Corley
-Conn
-Rossi
-Lackey
-Cody
-Baez
-Saenz
-Mcnamara
-Darnell
-Michel
-Mcmullen
-Mckenna
-Mcdonough
-Link
-Engel
-Browne
-Roper
-Peacock
-Eubanks
-Drummond
-Stringer
-Pritchett
-Parham
-Mims
-Landers
-Ham
-Grayson
-Stacy
-Schafer
-Egan
-Timmons
-Ohara
-Keen
-Hamlin
-Finn
-Cortes
-Mcnair
-Louis
-Clifford
-Nadeau
-Moseley
-Michaud
-Rosen
-Oakes
-Kurtz
-Jeffers
-Calloway
-Beal
-Bautista
-Winn
-Suggs
-Stern
-Stapleton
-Lyles
-Laird
-Montano
-Diamond
-Dawkins
-Roland
-Hagan
-Goldman
-Bryson
-Barajas
-Lovett
-Segura
-Metz
-Lockett
-Langford
-Hinson
-Eastman
-Rock
-Hooks
-Woody
-Smallwood
-Shapiro
-Crowell
-Whalen
-Triplett
-Hooker
-Chatman
-Aldrich
-Cahill
-Youngblood
-Ybarra
-Stallings
-Sheets
-Samuel
-Reeder
-Person
-Pack
-Lacey
-Connelly
-Bateman
-Abernathy
-Winkler
-Wilkes
-Masters
-Hackett
-Granger
-Gillis
-Schmitz
-Sapp
-Napier
-Souza
-Lanier
-Gomes
-Weir
-Otero
-Ledford
-Burroughs
-Babcock
-Ventura
-Siegel
-Dugan
-Clinton
-Christie
-Bledsoe
-Atwood
-Wray
-Varner
-Spangler
-Otto
-Anaya
-Staley
-Kraft
-Fournier
-Eddy
-Belanger
-Wolff
-Thorne
-Bynum
-Burnette
-Boykin
-Swenson
-Purvis
-Pina
-Khan
-Duvall
-Darby
-Xiong
-Kauffman
-Ali
-Yu
-Healy
-Engle
-Corona
-Benoit
-Valle
-Steiner
-Spicer
-Shaver
-Randle
-Lundy
-Dow
-Chin
-Calvert
-Staton
-Neff
-Kearney
-Darden
-Oakley
-Medeiros
-Mccracken
-Crenshaw
-Block
-Beaver
-Perdue
-Dill
-Whittaker
-Tobin
-Cornelius
-Washburn
-Hogue
-Goodrich
-Easley
-Bravo
-Dennison
-Vera
-Shipley
-Kerns
-Jorgensen
-Crain
-Abel
-Villalobos
-Maurer
-Longoria
-Keene
-Coon
-Sierra
-Witherspoon
-Staples
-Pettit
-Kincaid
-Eason
-Madrid
-Echols
-Lusk
-Wu
-Stahl
-Currie
-Thayer
-Shultz
-Sherwood
-Mcnally
-Seay
-North
-Maher
-Kenny
-Hope
-Gagne
-Barrow
-Nava
-Myles
-Moreland
-Honeycutt
-Hearn
-Diggs
-Caron
-Whitten
-Westbrook
-Stovall
-Ragland
-Queen
-Munson
-Meier
-Looney
-Kimble
-Jolly
-Hobson
-London
-Goddard
-Culver
-Burr
-Presley
-Negron
-Connell
-Tovar
-Marcus
-Huddleston
-Hammer
-Ashby
-Salter
-Root
-Pendleton
-Oleary
-Nickerson
-Myrick
-Judd
-Jacobsen
-Elliot
-Bain
-Adair
-Starnes
-Sheldon
-Matos
-Light
-Busby
-Herndon
-Hanley
-Bellamy
-Jack
-Doty
-Bartley
-Yazzie
-Rowell
-Parson
-Gifford
-Cullen
-Christiansen
-Benavides
-Barnhart
-Talbot
-Mock
-Crandall
-Connors
-Bonds
-Whitt
-Gage
-Bergman
-Arredondo
-Addison
-Marion
-Lujan
-Dowdy
-Jernigan
-Huynh
-Bouchard
-Dutton
-Rhoades
-Ouellette
-Kiser
-Rubin
-Herrington
-Hare
-Denny
-Blackman
-Babb
-Allred
-Rudd
-Paulson
-Ogden
-Koenig
-Jacob
-Irving
-Geiger
-Begay
-Parra
-Champion
-Lassiter
-Hawk
-Esposito
-Cho
-Waldron
-Vernon
-Ransom
-Prather
-Keenan
-Jean
-Grover
-Chacon
-Vick
-Sands
-Roark
-Parr
-Mayberry
-Greenberg
-Coley
-Bruner
-Whitman
-Skaggs
-Shipman
-Means
-Leary
-Hutton
-Romo
-Medrano
-Ladd
-Kruse
-Friend
-Darling
-Askew
-Valentin
-Schulz
-Alfaro
-Tabor
-Mohr
-Gallo
-Bermudez
-Pereira
-Isaac
-Bliss
-Reaves
-Flint
-Comer
-Boston
-Woodall
-Naquin
-Guevara
-Earl
-Delong
-Carrier
-Pickens
-Brand
-Tilley
-Schaffer
-Read
-Lim
-Knutson
-Fenton
-Doran
-Chu
-Vogt
-Vann
-Prescott
-Mclain
-Landis
-Corcoran
-Ambrose
-Zapata
-Hyatt
-Hemphill
-Faulk
-Call
-Dove
-Boudreaux
-Aragon
-Whitlock
-Trejo
-Tackett
-Shearer
-Saldana
-Hanks
-Gold
-Driver
-Mckinnon
-Koehler
-Champagne
-Bourgeois
-Pool
-Keyes
-Goodson
-Foote
-Early
-Lunsford
-Goldsmith
-Flood
-Winslow
-Sams
-Reagan
-Mccloud
-Hough
-Esquivel
-Naylor
-Loomis
-Coronado
-Ludwig
-Braswell
-Bearden
-Sherrill
-Huang
-Fagan
-Ezell
-Edmondson
-Cyr
-Cronin
-Nunn
-Lemon
-Guillory
-Grier
-Dubose
-Traylor
-Ryder
-Dobbins
-Coyle
-Aponte
-Whitmore
-Smalls
-Rowan
-Malloy
-Cardona
-Braxton
-Borden
-Humphries
-Carrasco
-Ruff
-Metzger
-Huntley
-Hinojosa
-Finney
-Madsen
-Hong
-Hills
-Ernst
-Dozier
-Burkhart
-Bowser
-Peralta
-Daigle
-Whittington
-Sorenson
-Saucedo
-Roche
-Redding
-Loyd
-Fugate
-Avalos
-Waite
-Lind
-Huston
-Hay
-Benedict
-Hawthorne
-Hamby
-Boyles
-Boles
-Regan
-Faust
-Crook
-Beam
-Barger
-Hinds
-Gallardo
-Elias
-Willoughby
-Willingham
-Wilburn
-Eckert
-Busch
-Zepeda
-Worthington
-Tinsley
-Russ
-Li
-Hoff
-Hawley
-Carmona
-Varela
-Rector
-Newcomb
-Mallory
-Kinsey
-Dube
-Whatley
-Strange
-Ragsdale
-Ivy
-Bernstein
-Becerra
-Yost
-Mattson
-Ly
-Felder
-Cheek
-Luke
-Handy
-Grossman
-Gauthier
-Escobedo
-Braden
-Beckman
-Mott
-Hillman
-Gil
-Flaherty
-Dykes
-Doe
-Stockton
-Stearns
-Lofton
-Kitchen
-Coats
-Cavazos
-Beavers
-Barrios
-Tang
-Parish
-Mosher
-Lincoln
-Cardwell
-Coles
-Burnham
-Weller
-Lemons
-Beebe
-Aguilera
-Ring
-Parnell
-Harman
-Couture
-Alley
-Schumacher
-Redd
-Dobbs
-Blum
-Blalock
-Merchant
-Ennis
-Denson
-Cottrell
-Chester
-Brannon
-Bagley
-Aviles
-Watt
-Sousa
-Rosenthal
-Rooney
-Dietz
-Blank
-Paquette
-Mcclelland
-Duff
-Velasco
-Lentz
-Grubb
-Burrows
-Barbour
-Ulrich
-Shockley
-Rader
-German
-Beyer
-Mixon
-Layton
-Altman
-Alonzo
-Weathers
-Titus
-Stoner
-Squires
-Shipp
-Priest
-Lipscomb
-Cutler
-Caballero
-Zimmer
-Willett
-Thurston
-Storey
-Medley
-Lyle
-Epperson
-Shah
-Mcmillian
-Baggett
-Torrez
-Laws
-Hirsch
-Dent
-Corey
-Poirier
-Peachey
-Jacques
-Farrar
-Creech
-Barth
-Trimble
-France
-Dupre
-Albrecht
-Sample
-Lawler
-Crisp
-Conroy
-Chadwick
-Wetzel
-Nesbitt
-Murry
-Jameson
-Wilhelm
-Patten
-Minton
-Matson
-Kimbrough
-Iverson
-Guinn
-Gale
-Fortune
-Croft
-Toth
-Pulliam
-Nugent
-Newby
-Littlejohn
-Dias
-Canales
-Bernier
-Baron
-Barney
-Singletary
-Renteria
-Pruett
-Mchugh
-Mabry
-Landrum
-Brower
-Weldon
-Stoddard
-Ruth
-Cagle
-Stjohn
-Scales
-Kohler
-Kellogg
-Hopson
-Gant
-Tharp
-Gann
-Zeigler
-Pringle
-Hammons
-Fairchild
-Deaton
-Chavis
-Carnes
-Rowley
-Matlock
-Libby
-Kearns
-Irizarry
-Carrington
-Starkey
-Pepper
-Lopes
-Jarrell
-Fay
-Craven
-Beverly
-Baum
-Spain
-Littlefield
-Linn
-Humphreys
-Hook
-High
-Etheridge
-Cuellar
-Chastain
-Chance
-Bundy
-Speer
-Skelton
-Quiroz
-Pyle
-Portillo
-Ponder
-Moulton
-Machado
-Liu
-Killian
-Hutson
-Hitchcock
-Ellsworth
-Dowling
-Cloud
-Burdick
-Spann
-Pedersen
-Levin
-Leggett
-Hayward
-Hacker
-Dietrich
-Beaulieu
-Barksdale
-Wakefield
-Snowden
-Paris
-Briscoe
-Bowie
-Berman
-Ogle
-Mcgregor
-Laughlin
-Helm
-Burden
-Wheatley
-Schreiber
-Pressley
-Parris
-Ng
-Alaniz
-Agee
-Urban
-Swann
-Snodgrass
-Schuster
-Radford
-Monk
-Mattingly
-Main
-Lamar
-Harp
-Girard
-Cheney
-Yancey
-Wagoner
-Ridley
-Lombardo
-Lau
-Hudgins
-Gaskins
-Duckworth
-Coe
-Coburn
-Willey
-Prado
-Newberry
-Magana
-Hammonds
-Elam
-Whipple
-Slade
-Serna
-Ojeda
-Liles
-Dorman
-Diehl
-Angel
-Upton
-Reardon
-Michaels
-Kelsey
-Goetz
-Eller
-Bauman
-Baer
-Augustine
-Layne
-Hummel
-Brenner
-Amaya
-Adamson
-Ornelas
-Dowell
-Cloutier
-Christy
-Castellanos
-Wing
-Wellman
-Saylor
-Orourke
-Moya
-Montalvo
-Kilpatrick
-Harley
-Durbin
-Shell
-Oldham
-Kang
-Garvin
-Foss
-Branham
-Bartholomew
-Templeton
-Maguire
-Holton
-Alonso
-Rider
-Monahan
-Mccormack
-Beaty
-Anders
-Streeter
-Nieto
-Nielson
-Moffett
-Lankford
-Keating
-Heck
-Gatlin
-Delatorre
-Callaway
-Adcock
-Worrell
-Unger
-Robinette
-Nowak
-Jeter
-Brunner
-Ashton
-Steen
-Parrott
-Overstreet
-Nobles
-Montanez
-Luther
-Clevenger
-Brinkley
-Trahan
-Quarles
-Pickering
-Pederson
-Jansen
-Grantham
-Gilchrist
-Crespo
-Aiken
-Schell
-Schaeffer
-Lorenz
-Leyva
-Harms
-Dyson
-Wallis
-Pease
-Leavitt
-Hyman
-Cheng
-Cavanaugh
-Batts
-Warden
-Seaman
-Rockwell
-Quezada
-Paxton
-Linder
-Houck
-Fontaine
-Durant
-Caruso
-Adler
-Pimentel
-Mize
-Lytle
-Donald
-Cleary
-Cason
-Acker
-Switzer
-Salmon
-Isaacs
-Higginbotham
-Han
-Waterman
-Vandyke
-Stamper
-Sisk
-Shuler
-Riddick
-Redman
-Mcmahan
-Levesque
-Hatton
-Bronson
-Bollinger
-Arnett
-Okeefe
-Gerber
-Gannon
-Farnsworth
-Baughman
-Silverman
-Satterfield
-Royal
-Mccrary
-Kowalski
-Joy
-Grigsby
-Greco
-Cabral
-Trout
-Rinehart
-Mahon
-Linton
-Gooden
-Curley
-Baugh
-Wyman
-Weiner
-Schwab
-Schuler
-Morrissey
-Mahan
-Coy
-Bunn
-Andrew
-Thrasher
-Spear
-Waggoner
-Shelley
-Robert
-Qualls
-Purdy
-Mcwhorter
-Mauldin
-Mark
-Jordon
-Gilman
-Perryman
-Newsom
-Menard
-Martino
-Graf
-Billingsley
-Artis
-Simpkins
-Salisbury
-Quintanilla
-Gilliland
-Fraley
-Foust
-Crouse
-Scarborough
-Ngo
-Grissom
-Fultz
-Rico
-Marlow
-Markham
-Madrigal
-Lawton
-Barfield
-Whiting
-Varney
-Schwarz
-Huey
-Gooch
-Arce
-Wheat
-Truong
-Poulin
-Mackenzie
-Leone
-Hurtado
-Selby
-Gaither
-Fortner
-Culpepper
-Coughlin
-Brinson
-Boudreau
-Barkley
-Bales
-Stepp
-Holm
-Tan
-Schilling
-Morrell
-Kahn
-Heaton
-Gamez
-Douglass
-Causey
-Brothers
-Turpin
-Shanks
-Schrader
-Meek
-Isom
-Hardison
-Carranza
-Yanez
-Way
-Scroggins
-Schofield
-Runyon
-Ratcliff
-Murrell
-Moeller
-Irby
-Currier
-Butterfield
-Yee
-Ralston
-Pullen
-Pinson
-Estep
-East
-Carbone
-Lance
-Hawks
-Ellington
-Casillas
-Spurlock
-Sikes
-Motley
-Mccartney
-Kruger
-Isbell
-Houle
-Francisco
-Burk
-Bone
-Tomlin
-Shelby
-Quigley
-Neumann
-Lovelace
-Fennell
-Colby
-Cheatham
-Bustamante
-Skidmore
-Hidalgo
-Forman
-Culp
-Bowens
-Betancourt
-Aquino
-Robb
-Rea
-Milner
-Martel
-Gresham
-Wiles
-Ricketts
-Gavin
-Dowd
-Collazo
-Bostic
-Blakely
-Sherrod
-Power
-Kenyon
-Gandy
-Ebert
-Deloach
-Cary
-Bull
-Allard
-Sauer
-Robins
-Olivares
-Gillette
-Chestnut
-Bourque
-Paine
-Lyman
-Hite
-Hauser
-Devore
-Crawley
-Chapa
-Vu
-Tobias
-Talbert
-Poindexter
-Millard
-Meador
-Mcduffie
-Mattox
-Kraus
-Harkins
-Choate
-Bess
-Wren
-Sledge
-Sanborn
-Outlaw
-Kinder
-Geary
-Cornwell
-Barclay
-Adam
-Abney
-Seward
-Rhoads
-Howland
-Fortier
-Easter
-Benner
-Vines
-Tubbs
-Troutman
-Rapp
-Noe
-Mccurdy
-Harder
-Deluca
-Westmoreland
-South
-Havens
-Guajardo
-Ely
-Clary
-Seal
-Meehan
-Herzog
-Guillen
-Ashcraft
-Waugh
-Renner
-Milam
-Jung
-Elrod
-Churchill
-Buford
-Breaux
-Bolin
-Asher
-Windham
-Tirado
-Pemberton
-Nolen
-Noland
-Knott
-Emmons
-Cornish
-Christenson
-Brownlee
-Barbee
-Waldrop
-Pitt
-Olvera
-Lombardi
-Gruber
-Gaffney
-Eggleston
-Banda
-Archuleta
-Still
-Slone
-Prewitt
-Pfeiffer
-Nettles
-Mena
-Mcadams
-Henning
-Gardiner
-Cromwell
-Chisholm
-Burleson
-Box
-Vest
-Oglesby
-Mccarter
-Malcolm
-Lumpkin
-Larue
-Grey
-Wofford
-Vanhorn
-Thorn
-Teel
-Swafford
-Stclair
-Stanfield
-Ocampo
-Herrmann
-Hannon
-Arsenault
-Roush
-Mcalister
-Hiatt
-Gunderson
-Forsythe
-Duggan
-Delvalle
-Cintron
-Wilks
-Weinstein
-Uribe
-Rizzo
-Noyes
-Mclendon
-Gurley
-Bethea
-Winstead
-Maples
-Harry
-Guyton
-Giordano
-Alderman
-Valdes
-Polanco
-Pappas
-Lively
-Grogan
-Griffiths
-Bobo
-Arevalo
-Whitson
-Sowell
-Rendon
-Matthew
-Julian
-Fernandes
-Farrow
-Edmond
-Benavidez
-Ayres
-Alicea
-Stump
-Smalley
-Seitz
-Schulte
-Gilley
-Gallant
-Dewey
-Casper
-Canfield
-Wolford
-Omalley
-Mcnutt
-Mcnulty
-Mcgovern
-Hardman
-Harbin
-Cowart
-Chavarria
-Brink
-Beckett
-Bagwell
-Armstead
-Anglin
-Abreu
-Reynoso
-Krebs
-Jett
-Hoffmann
-Greenfield
-Forte
-Burney
-Broome
-Sisson
-Parent
-Jude
-Younger
-Trammell
-Partridge
-Marvin
-Mace
-Lomax
-Lemieux
-Gossett
-Frantz
-Fogle
-Cooney
-Broughton
-Pence
-Paulsen
-Neil
-Muncy
-Mcarthur
-Hollins
-Edward
-Beauchamp
-Withers
-Osorio
-Mulligan
-Hoyle
-Foy
-Dockery
-Cockrell
-Begley
-Amador
-Roby
-Rains
-Lindquist
-Gentile
-Everhart
-Bohannon
-Wylie
-Thao
-Sommers
-Purnell
-Palma
-Fortin
-Dunning
-Breeden
-Vail
-Phelan
-Phan
-Marx
-Cosby
-Colburn
-Chong
-Boling
-Biddle
-Ledesma
-Gaddis
-Denney
-Chow
-Bueno
-Berrios
-Wicker
-Tolliver
-Thibodeaux
-Nagle
-Lavoie
-Fisk
-Do
-Crist
-Barbosa
-Reedy
-March
-Locklear
-Kolb
-Himes
-Behrens
-Beckwith
-Beckham
-Weems
-Wahl
-Shorter
-Shackelford
-Rees
-Muse
-Free
-Cerda
-Valadez
-Thibodeau
-Saavedra
-Ridgeway
-Reiter
-Mchenry
-Majors
-Lachance
-Keaton
-Israel
-Ferrara
-Falcon
-Clemens
-Blocker
-Applegate
-Paz
-Needham
-Mojica
-Kuykendall
-Hamel
-Escamilla
-Doughty
-Burchett
-Ainsworth
-Wilbur
-Vidal
-Upchurch
-Thigpen
-Strauss
-Spruill
-Sowers
-Riggins
-Ricker
-Mccombs
-Harlow
-Garnett
-Buffington
-Yi
-Sotelo
-Olivas
-Negrete
-Morey
-Macon
-Logsdon
-Lapointe
-Florence
-Cathey
-Bigelow
-Bello
-Westfall
-Stubblefield
-Peak
-Lindley
-Jeffrey
-Hein
-Hawes
-Farrington
-Edge
-Breen
-Birch
-Wilde
-Steed
-Sepulveda
-Reinhardt
-Proffitt
-Minter
-Messina
-Mcnabb
-Maier
-Keeler
-Gamboa
-Donohue
-Dexter
-Basham
-Shinn
-Orlando
-Crooks
-Cota
-Borders
-Bills
-Bachman
-Tisdale
-Tavares
-Schmid
-Pickard
-Jasper
-Gulley
-Fonseca
-Delossantos
-Condon
-Clancy
-Batista
-Wicks
-Wadsworth
-New
-Martell
-Lo
-Littleton
-Ison
-Haag
-Folsom
-Brumfield
-Broyles
-Brito
-Mireles
-Mcdonnell
-Leclair
-Hamblin
-Gough
-Fanning
-Binder
-Winfield
-Whitworth
-Soriano
-Palumbo
-Newkirk
-Mangum
-Hutcherson
-Comstock
-Cecil
-Carlin
-Beall
-Bair
-Wendt
-Watters
-Walling
-Putman
-Otoole
-Oliva
-Morley
-Mares
-Lemus
-Keener
-Jeffery
-Hundley
-Dial
-Damico
-Billups
-Strother
-Mcfarlane
-Lamm
-Eaves
-Crutcher
-Caraballo
-Canty
-Atwell
-Taft
-Siler
-Rust
-Rawls
-Rawlings
-Prieto
-Niles
-Mcneely
-Mcafee
-Hulsey
-Harlan
-Hackney
-Galvez
-Escalante
-Delagarza
-Crider
-Charlton
-Bandy
-Wilbanks
-Stowe
-Steinberg
-Samson
-Renfro
-Masterson
-Massie
-Lanham
-Haskell
-Hamrick
-Fort
-Dehart
-Card
-Burdette
-Branson
-Bourne
-Babin
-Aleman
-Worthy
-Tibbs
-Sweat
-Smoot
-Slack
-Paradis
-Packard
-Mull
-Luce
-Houghton
-Gantt
-Furman
-Danner
-Christianson
-Burge
-Broderick
-Ashford
-Arndt
-Almeida
-Stallworth
-Shade
-Searcy
-Sager
-Noonan
-Mclemore
-Mcintire
-Maxey
-Lavigne
-Jobe
-Ireland
-Ferrer
-Falk
-Edgar
-Coffin
-Byrnes
-Aranda
-Apodaca
-Stamps
-Rounds
-Peek
-Olmstead
-Lewandowski
-Kaminski
-Her
-Dunaway
-Bruns
-Brackett
-Amato
-Reich
-Mcclung
-Lacroix
-Koontz
-Herrick
-Hardesty
-Flanders
-Cousins
-Close
-Cato
-Cade
-Vickery
-Shank
-Nagel
-Dupuis
-Croteau
-Cotter
-Cable
-Stuckey
-Stine
-Porterfield
-Pauley
-Nye
-Moffitt
-Lu
-Knudsen
-Hardwick
-Goforth
-Dupont
-Blunt
-Barrows
-Barnhill
-Shull
-Rash
-Ralph
-Penny
-Lorenzo
-Loftis
-Lemay
-Kitchens
-Horvath
-Grenier
-Fuchs
-Fairbanks
-Culbertson
-Calkins
-Burnside
-Beattie
-Ashworth
-Albertson
-Wertz
-Vo
-Vaught
-Vallejo
-Tyree
-Turk
-Tuck
-Tijerina
-Sage
-Picard
-Peterman
-Otis
-Marroquin
-Marr
-Lantz
-Hoang
-Demarco
-Daily
-Cone
-Berube
-Barnette
-Wharton
-Stinnett
-Slocum
-Scanlon
-Sander
-Pinto
-Mancuso
-Lima
-Judge
-Headley
-Epstein
-Counts
-Clarkson
-Carnahan
-Brice
-Boren
-Arteaga
-Adame
-Zook
-Whittle
-Whitehurst
-Wenzel
-Saxton
-Rhea
-Reddick
-Puente
-Hazel
-Handley
-Haggerty
-Earley
-Devlin
-Dallas
-Chaffin
-Cady
-Ahmed
-Acuna
-Solano
-Sigler
-Pollack
-Pendergrass
-Ostrander
-Janes
-Francois
-Fine
-Crutchfield
-Cordell
-Chamberlin
-Brubaker
-Baptiste
-Willson
-Reis
-Neeley
-Mullin
-Mercier
-Lira
-Layman
-Keeling
-Higdon
-Guest
-Forrester
-Espinal
-Dion
-Chapin
-Carl
-Warfield
-Toledo
-Pulido
-Peebles
-Nagy
-Montague
-Mello
-Lear
-Jaeger
-Hogg
-Graff
-Furr
-Derrick
-Cave
-Canada
-Soliz
-Poore
-Mendenhall
-Mclaurin
-Maestas
-Low
-Gable
-Belt
-Barraza
-Tillery
-Snead
-Pond
-Neill
-Mcculloch
-Mccorkle
-Lightfoot
-Hutchings
-Holloman
-Harness
-Dorn
-Council
-Bock
-Zielinski
-Turley
-Treadwell
-Stpierre
-Starling
-Somers
-Oswald
-Merrick
-Marquis
-Ivory
-Easterling
-Bivens
-Truitt
-Poston
-Parry
-Ontiveros
-Olivarez
-Neville
-Moreau
-Medlin
-Ma
-Lenz
-Knowlton
-Fairley
-Cobbs
-Chisolm
-Bannister
-Woodworth
-Toler
-Ocasio
-Noriega
-Neuman
-Moye
-Milburn
-Mcclanahan
-Lilley
-Hanes
-Flannery
-Dellinger
-Danielson
-Conti
-Blodgett
-Beers
-Weatherford
-Strain
-Karr
-Hitt
-Denham
-Custer
-Coble
-Clough
-Casteel
-Bolduc
-Batchelor
-Ammons
-Whitlow
-Tierney
-Staten
-Sibley
-Seifert
-Schubert
-Salcedo
-Mattison
-Laney
-Haggard
-Grooms
-Dix
-Dees
-Cromer
-Cooks
-Colson
-Caswell
-Zarate
-Swisher
-Stacey
-Shin
-Ragan
-Pridgen
-Mcvey
-Matheny
-Leigh
-Lafleur
-Franz
-Ferraro
-Dugger
-Whiteside
-Rigsby
-Mcmurray
-Lehmann
-Large
-Jacoby
-Hildebrand
-Hendrick
-Headrick
-Goad
-Fincher
-Drury
-Borges
-Archibald
-Albers
-Woodcock
-Trapp
-Soares
-Seaton
-Richie
-Monson
-Luckett
-Lindberg
-Kopp
-Keeton
-Hsu
-Healey
-Garvey
-Gaddy
-Fain
-Burchfield
-Badger
-Wentworth
-Strand
-Stack
-Spooner
-Saucier
-Sales
-Ruby
-Ricci
-Plunkett
-Pannell
-Ness
-Leger
-Hoy
-Freitas
-Fong
-Elizondo
-Duval
-Chun
-Calvin
-Beaudoin
-Urbina
-Stock
-Rickard
-Partin
-Moe
-Mcgrew
-Mcclintock
-Ledoux
-Forsyth
-Faison
-Devries
-Bertrand
-Wasson
-Tilton
-Scarbrough
-Pride
-Oh
-Leung
-Larry
-Irvine
-Garber
-Denning
-Corral
-Colley
-Castleberry
-Bowlin
-Bogan
-Beale
-Baines
-True
-Trice
-Rayburn
-Parkinson
-Pak
-Nunes
-Mcmillen
-Leahy
-Lea
-Kimmel
-Higgs
-Fulmer
-Carden
-Bedford
-Taggart
-Spearman
-Register
-Prichard
-Morrill
-Koonce
-Heinz
-Hedges
-Guenther
-Grice
-Findley
-Earle
-Dover
-Creighton
-Boothe
-Bayer
-Arreola
-Vitale
-Valles
-See
-Raney
-Peter
-Osgood
-Lowell
-Hanlon
-Burley
-Bounds
-Worden
-Weatherly
-Vetter
-Tanaka
-Stiltner
-Sell
-Nevarez
-Mosby
-Montero
-Melancon
-Harter
-Hamer
-Goble
-Gladden
-Gist
-Ginn
-Akin
-Zaragoza
-Towns
-Tarver
-Sammons
-Royster
-Oreilly
-Muir
-Morehead
-Luster
-Kingsley
-Kelso
-Grisham
-Glynn
-Baumann
-Alves
-Yount
-Tamayo
-Tam
-Paterson
-Oates
-Menendez
-Longo
-Hargis
-Greenlee
-Gillen
-Desantis
-Conover
-Breedlove
-Wayne
-Sumpter
-Scherer
-Rupp
-Reichert
-Heredia
-Fallon
-Creel
-Cohn
-Clemmons
-Casas
-Bickford
-Belton
-Bach
-Williford
-Whitcomb
-Tennant
-Sutter
-Stull
-Sessions
-Mccallum
-Manson
-Langlois
-Keel
-Keegan
-Emanuel
-Dangelo
-Dancy
-Damron
-Clapp
-Clanton
-Bankston
-Trinidad
-Oliveira
-Mintz
-Mcinnis
-Martens
-Mabe
-Laster
-Jolley
-Irish
-Hildreth
-Hefner
-Glaser
-Duckett
-Demers
-Brockman
-Blais
-Back
-Alcorn
-Agnew
-Toliver
-Tice
-Song
-Seeley
-Najera
-Musser
-Mcfall
-Laplante
-Galvin
-Fajardo
-Doan
-Coyne
-Copley
-Clawson
-Cheung
-Barone
-Wynne
-Woodley
-Tremblay
-Stoll
-Sparrow
-Sparkman
-Schweitzer
-Sasser
-Samples
-Roney
-Ramon
-Legg
-Lai
-Joe
-Heim
-Farias
-Concepcion
-Colwell
-Christman
-Bratcher
-Alba
-Winchester
-Upshaw
-Southerland
-Sorrell
-Shay
-Sells
-Mount
-Mccloskey
-Martindale
-Luttrell
-Loveless
-Lovejoy
-Linares
-Latimer
-Holly
-Embry
-Coombs
-Bratton
-Bostick
-Boss
-Venable
-Tuggle
-Toro
-Staggs
-Sandlin
-Jefferies
-Heckman
-Griffis
-Crayton
-Clem
-Button
-Browder
-Allan
-Thorton
-Sturgill
-Sprouse
-Royer
-Rousseau
-Ridenour
-Pogue
-Perales
-Peeples
-Metzler
-Mesa
-Mccutcheon
-Mcbee
-Jay
-Hornsby
-Heffner
-Corrigan
-Armijo
-Vue
-Romeo
-Plante
-Peyton
-Paredes
-Macklin
-Hussey
-Hodgson
-Granados
-Frias
-Carman
-Brent
-Becnel
-Batten
-Almanza
-Turney
-Teal
-Sturgeon
-Meeker
-Mcdaniels
-Limon
-Keeney
-Kee
-Hutto
-Holguin
-Gorham
-Fishman
-Fierro
-Blanchette
-Rodrigue
-Reddy
-Osburn
-Oden
-Lerma
-Kirkwood
-Keefer
-Haugen
-Hammett
-Chalmers
-Carlos
-Brinkman
-Baumgartner
-Zhang
-Valerio
-Tellez
-Steffen
-Shumate
-Sauls
-Ripley
-Kemper
-Jacks
-Guffey
-Evers
-Craddock
-Carvalho
-Blaylock
-Banuelos
-Balderas
-Wooden
-Wheaton
-Turnbull
-Shuman
-Pointer
-Mosier
-Mccue
-Ligon
-Kozlowski
-Johansen
-Ingle
-Herr
-Briones
-Southern
-Snipes
-Rickman
-Pipkin
-Peace
-Pantoja
-Orosco
-Moniz
-Lawless
-Kunkel
-Hibbard
-Galarza
-Enos
-Bussey
-Settle
-Schott
-Salcido
-Perreault
-Mcdougal
-Mccool
-Haight
-Garris
-Ferry
-Easton
-Conyers
-Atherton
-Wimberly
-Utley
-Stephen
-Spellman
-Smithson
-Slagle
-Skipper
-Ritchey
-Rand
-Petit
-Osullivan
-Oaks
-Nutt
-Mcvay
-Mccreary
-Mayhew
-Knoll
-Jewett
-Harwood
-Hailey
-Cardoza
-Ashe
-Arriaga
-Andres
-Zeller
-Wirth
-Whitmire
-Stauffer
-Spring
-Rountree
-Redden
-Mccaffrey
-Martz
-Loving
-Larose
-Langdon
-Humes
-Gaskin
-Faber
-Doll
-Devito
-Cass
-Almond
-Wingfield
-Wingate
-Villareal
-Tyner
-Smothers
-Severson
-Reno
-Pennell
-Maupin
-Leighton
-Janssen
-Hassell
-Hallman
-Halcomb
-Folse
-Fitzsimmons
-Fahey
-Cranford
-Bolen
-Battles
-Battaglia
-Wooldridge
-Weed
-Trask
-Rosser
-Regalado
-Mcewen
-Keefe
-Fuqua
-Echevarria
-Domingo
-Dang
-Caro
-Boynton
-Andrus
-Wild
-Viera
-Vanmeter
-Taber
-Spradlin
-Seibert
-Provost
-Prentice
-Oliphant
-Laporte
-Hwang
-Hatchett
-Hass
-Greiner
-Freedman
-Covert
-Chilton
-Byars
-Wiese
-Venegas
-Swank
-Shrader
-Roderick
-Roberge
-Mullis
-Mortensen
-Mccune
-Marlowe
-Kirchner
-Keck
-Isaacson
-Hostetler
-Halverson
-Gunther
-Griswold
-Gerard
-Fenner
-Durden
-Blackwood
-Bertram
-Ahrens
-Sawyers
-Savoy
-Nabors
-Mcswain
-Mackay
-Loy
-Lavender
-Lash
-Labbe
-Jessup
-Hubert
-Fullerton
-Donnell
-Cruse
-Crittenden
-Correia
-Centeno
-Caudle
-Canady
-Callender
-Alarcon
-Ahern
-Winfrey
-Tribble
-Tom
-Styles
-Salley
-Roden
-Musgrove
-Minnick
-Fortenberry
-Carrion
-Bunting
-Bethel
-Batiste
-Woo
-Whited
-Underhill
-Stillwell
-Silvia
-Rauch
-Pippin
-Perrin
-Messenger
-Mancini
-Lister
-Kinard
-Hartmann
-Fleck
-Broadway
-Wilt
-Treadway
-Thornhill
-Speed
-Spalding
-Sam
-Rafferty
-Pitre
-Patino
-Ordonez
-Linkous
-Kelleher
-Homan
-Holiday
-Galbraith
-Feeney
-Dorris
-Curtin
-Coward
-Camarillo
-Buss
-Bunnell
-Bolt
-Beeler
-Autry
-Alcala
-Witte
-Wentz
-Stidham
-Shively
-Nunley
-Meacham
-Martins
-Lemke
-Lefebvre
-Kaye
-Hynes
-Horowitz
-Hoppe
-Holcombe
-Estrella
-Dunne
-Derr
-Cochrane
-Brittain
-Bedard
-Beauregard
-Torrence
-Strunk
-Soria
-Simonson
-Shumaker
-Scoggins
-Packer
-Oconner
-Moriarty
-Leroy
-Kuntz
-Ives
-Hutcheson
-Horan
-Hales
-Garmon
-Fitts
-Dell
-Bohn
-Atchison
-Worth
-Wisniewski
-Will
-Vanwinkle
-Sturm
-Sallee
-Prosser
-Moen
-Lundberg
-Kunz
-Kohl
-Keane
-Jorgenson
-Jaynes
-Funderburk
-Freed
-Frame
-Durr
-Creamer
-Cosgrove
-Candelaria
-Berlin
-Batson
-Vanhoose
-Thomsen
-Teeter
-Sommer
-Smyth
-Sena
-Redmon
-Orellana
-Maness
-Lennon
-Heflin
-Goulet
-Frick
-Forney
-Dollar
-Bunker
-Asbury
-Aguiar
-Talbott
-Southard
-Pleasant
-Mowery
-Mears
-Lemmon
-Krieger
-Hickson
-Gracia
-Elston
-Duong
-Delgadillo
-Dayton
-Dasilva
-Conaway
-Catron
-Bruton
-Bradbury
-Bordelon
-Bivins
-Bittner
-Bergstrom
-Beals
-Abell
-Whelan
-Travers
-Tejada
-Pulley
-Pino
-Norfleet
-Nealy
-Maes
-Loper
-Held
-Gerald
-Gatewood
-Frierson
-Freund
-Finnegan
-Cupp
-Covey
-Catalano
-Boehm
-Bader
-Yoon
-Walston
-Tenney
-Sipes
-Roller
-Rawlins
-Medlock
-Mccaskill
-Mccallister
-Marcotte
-Maclean
-Hughey
-Henke
-Harwell
-Gladney
-Gilson
-Dew
-Chism
-Caskey
-Brandenburg
-Baylor
-Villasenor
-Veal
-Van
-Thatcher
-Stegall
-Shore
-Petrie
-Nowlin
-Navarrete
-Muhammad
-Lombard
-Loftin
-Lemaster
-Kroll
-Kovach
-Kimbrell
-Kidwell
-Hershberger
-Fulcher
-Eng
-Cantwell
-Bustos
-Boland
-Bobbitt
-Binkley
-Wester
-Weis
-Verdin
-Tong
-Tiller
-Sisco
-Sharkey
-Seymore
-Rosenbaum
-Rohr
-Quinonez
-Pinkston
-Nation
-Malley
-Logue
-Lessard
-Lerner
-Lebron
-Krauss
-Klinger
-Halstead
-Haller
-Getz
-Burrow
-Brant
-Alger
-Victor
-Shores
-Scully
-Pounds
-Pfeifer
-Perron
-Nelms
-Munn
-Mcmaster
-Mckenney
-Manns
-Knudson
-Hutchens
-Huskey
-Goebel
-Flagg
-Cushman
-Click
-Castellano
-Carder
-Bumgarner
-Blaine
-Bible
-Wampler
-Spinks
-Robson
-Neel
-Mcreynolds
-Mathias
-Maas
-Loera
-Kasper
-Jose
-Jenson
-Florez
-Coons
-Buckingham
-Brogan
-Berryman
-Wilmoth
-Wilhite
-Thrash
-Shephard
-Seidel
-Schulze
-Roldan
-Pettis
-Obryan
-Maki
-Mackie
-Hatley
-Frazer
-Fiore
-Falls
-Chesser
-Bui
-Bottoms
-Bisson
-Benefield
-Allman
-Wilke
-Trudeau
-Timm
-Shifflett
-Rau
-Mundy
-Milliken
-Mayers
-Leake
-Kohn
-Huntington
-Horsley
-Hermann
-Guerin
-Fryer
-Frizzell
-Foret
-Flemming
-Fife
-Criswell
-Carbajal
-Bozeman
-Boisvert
-Archie
-Antonio
-Angulo
-Wallen
-Tapp
-Silvers
-Ramsay
-Oshea
-Orta
-Moll
-Mckeever
-Mcgehee
-Luciano
-Linville
-Kiefer
-Ketchum
-Howerton
-Groce
-Gaylord
-Gass
-Fusco
-Corbitt
-Blythe
-Betz
-Bartels
-Amaral
-Aiello
-Yoo
-Weddle
-Troy
-Sun
-Sperry
-Seiler
-Runyan
-Raley
-Overby
-Osteen
-Olds
-Mckeown
-Mauro
-Matney
-Lauer
-Lattimore
-Hindman
-Hartwell
-Fredrickson
-Fredericks
-Espino
-Clegg
-Carswell
-Cambell
-Burkholder
-August
-Woodbury
-Welker
-Totten
-Thornburg
-Theriault
-Stitt
-Stamm
-Stackhouse
-Simone
-Scholl
-Saxon
-Rife
-Razo
-Quinlan
-Pinkerton
-Olivo
-Nesmith
-Nall
-Mattos
-Leak
-Lafferty
-Justus
-Giron
-Geer
-Fielder
-Eagle
-Drayton
-Dortch
-Conners
-Conger
-Chau
-Boatwright
-Billiot
-Barden
-Armenta
-Antoine
-Tibbetts
-Steadman
-Slattery
-Sides
-Rinaldi
-Raynor
-Rayford
-Pinckney
-Pettigrew
-Nickel
-Milne
-Matteson
-Halsey
-Gonsalves
-Fellows
-Durand
-Desimone
-Cowley
-Cowles
-Brill
-Barham
-Barela
-Barba
-Ashmore
-Withrow
-Valenti
-Tejeda
-Spriggs
-Sayre
-Salerno
-Place
-Peltier
-Peel
-Merriman
-Matheson
-Lowman
-Lindstrom
-Hyland
-Homer
-Ha
-Giroux
-Fries
-Frasier
-Earls
-Dugas
-Damon
-Dabney
-Collado
-Briseno
-Baxley
-Andre
-Word
-Whyte
-Wenger
-Vanover
-Vanburen
-Thiel
-Schindler
-Schiller
-Rigby
-Pomeroy
-Passmore
-Marble
-Manzo
-Mahaffey
-Lindgren
-Laflamme
-Greathouse
-Fite
-Ferrari
-Calabrese
-Bayne
-Yamamoto
-Wick
-Townes
-Thames
-Steel
-Reinhart
-Peeler
-Naranjo
-Montez
-Mcdade
-Mast
-Markley
-Marchand
-Leeper
-Kong
-Kellum
-Hudgens
-Hennessey
-Hadden
-Guess
-Gainey
-Coppola
-Borrego
-Bolling
-Beane
-Ault
-Slaton
-Poland
-Pape
-Null
-Mulkey
-Lightner
-Langer
-Hillard
-Glasgow
-Fabian
-Ethridge
-Enright
-Derosa
-Baskin
-Alfred
-Weinberg
-Turman
-Tinker
-Somerville
-Pardo
-Noll
-Lashley
-Ingraham
-Hiller
-Hendon
-Glaze
-Flora
-Cothran
-Cooksey
-Conte
-Carrico
-Apple
-Abner
-Wooley
-Swope
-Summerlin
-Sturgis
-Sturdivant
-Stott
-Spurgeon
-Spillman
-Speight
-Roussel
-Popp
-Nutter
-Mckeon
-Mazza
-Magnuson
-Lanning
-Kozak
-Jankowski
-Heyward
-Forster
-Corwin
-Callaghan
-Bays
-Wortham
-Usher
-Theriot
-Sayers
-Sabo
-Rupert
-Poling
-Nathan
-Loya
-Lieberman
-Levi
-Laroche
-Labelle
-Howes
-Harr
-Garay
-Fogarty
-Everson
-Durkin
-Dominquez
-Chaves
-Chambliss
-Alfonso
-Witcher
-Wilber
-Vieira
-Vandiver
-Terrill
-Stoker
-Schreiner
-Nestor
-Moorman
-Liddell
-Lew
-Lawhorn
-Krug
-Irons
-Hylton
-Hollenbeck
-Herrin
-Hembree
-Hair
-Goolsby
-Goodin
-Gilmer
-Foltz
-Dinkins
-Daughtry
-Caban
-Brim
-Briley
-Bilodeau
-Bear
-Wyant
-Vergara
-Tallent
-Swearingen
-Stroup
-Sherry
-Scribner
-Roger
-Quillen
-Pitman
-Monaco
-Mccants
-Maxfield
-Martinson
-Landon
-Holtz
-Flournoy
-Brookins
-Brody
-Baumgardner
-Angelo
-Straub
-Sills
-Roybal
-Roundtree
-Oswalt
-Money
-Mcgriff
-Mcdougall
-Mccleary
-Maggard
-Gragg
-Gooding
-Godinez
-Doolittle
-Donato
-Cowell
-Cassell
-Bracken
-Appel
-Ahmad
-Zambrano
-Reuter
-Perea
-Olive
-Nakamura
-Monaghan
-Mickens
-Mcclinton
-Mcclary
-Marler
-Kish
-Judkins
-Gilbreath
-Freese
-Flanigan
-Felts
-Erdmann
-Dodds
-Chew
-Brownell
-Brazil
-Boatright
-Barreto
-Slayton
-Sandberg
-Saldivar
-Pettway
-Odum
-Narvaez
-Moultrie
-Montemayor
-Merrell
-Lees
-Keyser
-Hoke
-Hardaway
-Hannan
-Gilbertson
-Fogg
-Dumont
-Deberry
-Coggins
-Carrera
-Buxton
-Bucher
-Broadnax
-Beeson
-Araujo
-Appleton
-Amundson
-Aguayo
-Ackley
-Yocum
-Worsham
-Shivers
-Shelly
-Sanches
-Sacco
-Robey
-Rhoden
-Pender
-Ochs
-Mccurry
-Madera
-Luong
-Luis
-Knotts
-Jackman
-Heinrich
-Hargrave
-Gault
-Forest
-Comeaux
-Chitwood
-Child
-Caraway
-Boettcher
-Bernhardt
-Barrientos
-Zink
-Wickham
-Whiteman
-Thorp
-Stillman
-Settles
-Schoonover
-Roque
-Riddell
-Rey
-Pilcher
-Phifer
-Novotny
-Maple
-Macleod
-Hardee
-Haase
-Grider
-Fredrick
-Earnest
-Doucette
-Clausen
-Christmas
-Bevins
-Beamon
-Badillo
-Tolley
-Tindall
-Soule
-Snook
-Sebastian
-Seale
-Pitcher
-Pinkney
-Pellegrino
-Nowell
-Nemeth
-Nail
-Mondragon
-Mclane
-Lundgren
-Ingalls
-Hudspeth
-Hixson
-Gearhart
-Furlong
-Downes
-Dionne
-Dibble
-Deyoung
-Cornejo
-Camara
-Brookshire
-Boyette
-Wolcott
-Tracey
-Surratt
-Sellars
-Segal
-Salyer
-Reeve
-Rausch
-Philips
-Labonte
-Haro
-Gower
-Freeland
-Fawcett
-Eads
-Driggers
-Donley
-Collett
-Cage
-Bromley
-Boatman
-Ballinger
-Baldridge
-Volz
-Trombley
-Stonge
-Silas
-Shanahan
-Rivard
-Rhyne
-Pedroza
-Matias
-Mallard
-Jamieson
-Hedgepeth
-Hartnett
-Estevez
-Eskridge
-Denman
-Chiu
-Chinn
-Catlett
-Carmack
-Buie
-Book
-Bechtel
-Beardsley
-Bard
-Ballou
-Windsor
-Ulmer
-Storm
-Skeen
-Robledo
-Rincon
-Reitz
-Piazza
-Pearl
-Munger
-Moten
-Mcmichael
-Loftus
-Ledet
-Kersey
-Groff
-Fowlkes
-Folk
-Crumpton
-Collette
-Clouse
-Bettis
-Villagomez
-Timmerman
-Strom
-Saul
-Santoro
-Roddy
-Phillip
-Penrod
-Musselman
-Macpherson
-Leboeuf
-Harless
-Haddad
-Guido
-Golding
-Fulkerson
-Fannin
-Dulaney
-Dowdell
-Deane
-Cottle
-Ceja
-Cate
-Bosley
-Benge
-Albritton
-Voigt
-Trowbridge
-Soileau
-Seely
-Rome
-Rohde
-Pearsall
-Paulk
-Orth
-Nason
-Mota
-Mcmullin
-Marquardt
-Madigan
-Hoag
-Gillum
-Gayle
-Gabbard
-Fenwick
-Fender
-Eck
-Danforth
-Cushing
-Cress
-Creed
-Cazares
-Casanova
-Bey
-Bettencourt
-Barringer
-Baber
-Stansberry
-Schramm
-Rutter
-Rivero
-Race
-Oquendo
-Necaise
-Mouton
-Montenegro
-Miley
-Mcgough
-Marra
-Macmillan
-Lock
-Lamontagne
-Jasso
-Jaime
-Horst
-Hetrick
-Heilman
-Gaytan
-Gall
-Fried
-Fortney
-Eden
-Dingle
-Desjardins
-Dabbs
-Burbank
-Brigham
-Breland
-Beaman
-Banner
-Arriola
-Yarborough
-Wallin
-Treat
-Toscano
-Stowers
-Reiss
-Pichardo
-Orton
-Mitchel
-Michels
-Mcnamee
-Mccrory
-Leatherman
-Kell
-Keister
-Jerome
-Horning
-Hargett
-Guay
-Friday
-Ferro
-Deboer
-Dagostino
-Clemente
-Christ
-Carper
-Bowler
-Blanks
-Beaudry
-Willie
-Towle
-Tafoya
-Stricklin
-Strader
-Soper
-Sonnier
-Sigmon
-Schenk
-Saddler
-Rodman
-Pedigo
-Mendes
-Lunn
-Lohr
-Lahr
-Kingsbury
-Jarman
-Hume
-Holliman
-Hofmann
-Haworth
-Harrelson
-Hambrick
-Flick
-Edmunds
-Dacosta
-Crossman
-Colston
-Chaplin
-Carrell
-Budd
-Weiler
-Waits
-Viola
-Valentino
-Trantham
-Tarr
-Straight
-Solorio
-Roebuck
-Powe
-Plank
-Pettus
-Palm
-Pagano
-Mink
-Luker
-Leathers
-Joslin
-Hartzell
-Gambrell
-Fears
-Deutsch
-Cepeda
-Carty
-Caputo
-Brewington
-Bedell
-Ballew
-Applewhite
-Warnock
-Walz
-Urena
-Tudor
-Reel
-Pigg
-Parton
-Mickelson
-Meagher
-Mclellan
-Mcculley
-Mandel
-Leech
-Lavallee
-Kraemer
-Kling
-Kipp
-Kingston
-Kehoe
-Hochstetler
-Harriman
-Gregoire
-Grabowski
-Gosselin
-Gammon
-Fancher
-Edens
-Desai
-Butt
-Brannan
-Armendariz
-Woolsey
-Whitehouse
-Whetstone
-Ussery
-Towne
-Tower
-Testa
-Tallman
-Studer
-Strait
-Steinmetz
-Sorrells
-Sauceda
-Rolfe
-Rae
-Paddock
-Mitchem
-Mcginn
-Mccrea
-Luck
-Lovato
-Ling
-Hazen
-Gilpin
-Gaynor
-Fike
-Devoe
-Delrio
-Curiel
-Burkhardt
-Bristol
-Bode
-Backus
-Alton
-Zinn
-Watanabe
-Wachter
-Vanpelt
-Turnage
-Shaner
-Schroder
-Sato
-Riordan
-Quimby
-Portis
-Natale
-Mckoy
-Mccown
-Marker
-Lucio
-Kilmer
-Karl
-Hotchkiss
-Hesse
-Halbert
-Gwinn
-Godsey
-Desmond
-Delisle
-Chrisman
-Canter
-Brook
-Arbogast
-Angell
-Acree
-Yancy
-Woolley
-Wesson
-Weatherspoon
-Trainor
-Stockman
-Spiller
-Sipe
-Rooks
-Reavis
-Propst
-Porras
-Neilson
-Mullens
-Loucks
-Llewellyn
-Lamont
-Kumar
-Koester
-Klingensmith
-Kirsch
-Kester
-Honaker
-Hodson
-Hennessy
-Helmick
-Garrity
-Garibay
-Fee
-Drain
-Casarez
-Callis
-Botello
-Bay
-Aycock
-Avant
-Angle
-Wingard
-Wayman
-Tully
-Theisen
-Szymanski
-Stansbury
-Segovia
-Rudy
-Rainwater
-Preece
-Pirtle
-Padron
-Mincey
-Mckelvey
-Mathes
-Marty
-Larrabee
-Kornegay
-Klug
-Judy
-Ingersoll
-Hecht
-Germain
-Eggers
-Dykstra
-Denis
-Deering
-Decoteau
-Deason
-Dearing
-Cofield
-Carrigan
-Brush
-Bonham
-Bahr
-Aucoin
-Appleby
-Almonte
-Yager
-Womble
-Wimmer
-Weimer
-Vanderpool
-Stancil
-Sprinkle
-Romine
-Remington
-Pfaff
-Peckham
-Olivera
-Meraz
-Maze
-Lathrop
-Koehn
-Jonas
-Hazelton
-Halvorson
-Hallock
-Haddock
-Ducharme
-Dehaven
-Colton
-Caruthers
-Brehm
-Bosworth
-Bost
-Blow
-Bias
-Beeman
-Basile
-Bane
-Aikens
-Zachary
-Wold
-Walther
-Tabb
-Suber
-Strawn
-Stocks
-Stocker
-Shirey
-Schlosser
-Salvador
-Riedel
-Rembert
-Reimer
-Pyles
-Pickle
-Peele
-Merriweather
-Letourneau
-Latta
-Kidder
-Hixon
-Hillis
-Hight
-Herbst
-Henriquez
-Haygood
-Hamill
-Gabel
-Fritts
-Eubank
-Duty
-Dawes
-Correll
-Coffee
-Cha
-Bushey
-Buchholz
-Brotherton
-Bridge
-Botts
-Barnwell
-Auger
-Atchley
-Westphal
-Veilleux
-Ulloa
-Truman
-Stutzman
-Shriver
-Ryals
-Prior
-Pilkington
-Newport
-Moyers
-Miracle
-Marrs
-Mangrum
-Maddux
-Lockard
-Laing
-Kuhl
-Harney
-Hammock
-Hamlett
-Felker
-Doerr
-Depriest
-Carrasquillo
-Carothers
-Bogle
-Blood
-Bischoff
-Bergen
-Albanese
-Wyckoff
-Vermillion
-Vansickle
-Thibault
-Tetreault
-Stickney
-Shoemake
-Ruggiero
-Rawson
-Racine
-Philpot
-Paschal
-Mcelhaney
-Mathison
-Legrand
-Lapierre
-Kwan
-Kremer
-Jiles
-Hilbert
-Geyer
-Faircloth
-Ehlers
-Egbert
-Desrosiers
-Dalrymple
-Cotten
-Cashman
-Cadena
-Breeding
-Boardman
-Alcaraz
-Ahn
-Wyrick
-Therrien
-Tankersley
-Strickler
-Puryear
-Plourde
-Pattison
-Pardue
-Milan
-Mcginty
-Mcevoy
-Landreth
-Kuhns
-Koon
-Hewett
-Giddens
-Everette
-Emerick
-Eades
-Deangelis
-Cosme
-Ceballos
-Birdsong
-Benham
-Bemis
-Armour
-Anguiano
-Angeles
-Welborn
-Tsosie
-Storms
-Shoup
-Sessoms
-Samaniego
-Rood
-Rojo
-Rhinehart
-Raby
-Northcutt
-Myer
-Munguia
-Morehouse
-More
-Mcdevitt
-Mateo
-Mallett
-Lozada
-Lemoine
-Kuehn
-Hallett
-Grim
-Gillard
-Gaylor
-Garman
-Gallaher
-Feaster
-Faris
-Darrow
-Dardar
-Coney
-Carreon
-Byron
-Braithwaite
-Boylan
-Boyett
-Born
-Bixler
-Bigham
-Benford
-Barragan
-Barnum
-Zuber
-Wyche
-Westcott
-Vining
-Stoltzfus
-Simonds
-Shupe
-Sabin
-Ruble
-Rittenhouse
-Richman
-Perrone
-Mulholland
-Millan
-Meister
-Mathew
-Lomeli
-Kite
-Jemison
-Hulett
-Holler
-Hickerson
-Herold
-Hazelwood
-Griffen
-Gause
-Forde
-Eisenberg
-Dilworth
-Charron
-Chaisson
-Brodie
-Bristow
-Breunig
-Brace
-Boutwell
-Bentz
-Belk
-Bayless
-Batchelder
-Baran
-Baeza
-Zimmermann
-Weathersby
-Volk
-Toole
-Theis
-Tedesco
-Shine
-Searle
-Schenck
-Satterwhite
-Sandy
-Ruelas
-Royce
-Rankins
-Partida
-Nesbit
-Morel
-Menchaca
-Levasseur
-Kaylor
-Johnstone
-Hulse
-Hollar
-Hersey
-Harrigan
-Harbison
-Guyer
-Gish
-Giese
-Gerlach
-Geller
-Geisler
-Falcone
-Ernest
-Elwell
-Doucet
-Deese
-Darr
-Corder
-Chafin
-Byler
-Bussell
-Burdett
-Brasher
-Bowe
-Bellinger
-Bastian
-Barner
-Alleyne
-Wilborn
-Weil
-Wegner
-Wales
-Tatro
-Spitzer
-Smithers
-Schoen
-Resendez
-Pete
-Parisi
-Overman
-Obrian
-Mudd
-Moy
-Mclaren
-Mahler
-Maggio
-Lindner
-Lalonde
-Lacasse
-Laboy
-Killion
-Kahl
-Jessen
-Jamerson
-Houk
-Henshaw
-Gustin
-Groom
-Graber
-Durst
-Duenas
-Davey
-Cundiff
-Conlon
-Colunga
-Coakley
-Chiles
-Capers
-Buell
-Bricker
-Bissonnette
-Birmingham
-Bartz
-Bagby
-Zayas
-Volpe
-Treece
-Toombs
-Thom
-Terrazas
-Swinney
-Skiles
-Silveira
-Shouse
-Senn
-Rambo
-Ramage
-Nez
-Moua
-Marlin
-Malik
-Langham
-Kyles
-Holston
-Hoagland
-Herd
-Hector
-Feller
-Emory
-Denison
-Corliss
-Carraway
-Burford
-Bickel
-Ambriz
-Abercrombie
-Yamada
-Winner
-Weidner
-Waddle
-Verduzco
-Thurmond
-Swindle
-Schrock
-Sanabria
-Rosenberger
-Probst
-Peabody
-Olinger
-Neighbors
-Nazario
-Mccafferty
-Mcbroom
-Mcabee
-Mazur
-Matherne
-Mapes
-Leverett
-Killingsworth
-Heisler
-Griego
-Grande
-Gosnell
-Frankel
-Franke
-Ferrante
-Fenn
-Elmer
-Ehrlich
-Christopherso
-Chick
-Chasse
-Chancellor
-Caton
-Brunelle
-Bly
-Bloomfield
-Babbitt
-Azevedo
-Abramson
-Ables
-Abeyta
-Youmans
-Wozniak
-Wainwright
-Summer
-Stowell
-Smitherman
-Sites
-Samuelson
-Runge
-Rule
-Rothman
-Rosenfeld
-Quan
-Peake
-Oxford
-Owings
-Olmos
-Munro
-Moreira
-Leatherwood
-Larkins
-Krantz
-Kovacs
-Kizer
-Kindred
-Karnes
-Jaffe
-Hubbell
-Hosey
-Hauck
-Harold
-Goodell
-Favors
-Erdman
-Dvorak
-Doane
-Cureton
-Cofer
-Buehler
-Bierman
-Berndt
-Banta
-Annis
-Abram
-Abdullah
-Warwick
-Waltz
-Turcotte
-Trinh
-Torrey
-Stith
-Seger
-Sachs
-Quesada
-Pinder
-Peppers
-Pascual
-Paschall
-Parkhurst
-Ozuna
-Oster
-Nicholls
-Mortimer
-Lheureux
-Lavalley
-Kimura
-Jablonski
-Haun
-Gourley
-Gilligan
-Fix
-Derby
-Croy
-Cotto
-Cargill
-Burwell
-Burgett
-Buckman
-Brett
-Booher
-Adorno
-Wrenn
-Whittemore
-Urias
-Szabo
-Sayles
-Saiz
-Rutland
-Rael
-Plant
-Pharr
-Penney
-Pelkey
-Ogrady
-Nickell
-Musick
-Moats
-Mather
-Massa
-Laurent
-Kirschner
-Kieffer
-Kellar
-Hendershot
-Gott
-Godoy
-Gadson
-Furtado
-Fiedler
-Erskine
-Edison
-Dutcher
-Dever
-Daggett
-Chevalier
-Chao
-Brake
-Ballesteros
-Amerson
-Alejandro
-Wingo
-Waldon
-Trott
-Spikes
-Silvey
-Showers
-Schlegel
-Rue
-Ritz
-Pepin
-Pelayo
-Parsley
-Palermo
-Moorehead
-Mchale
-Lett
-Kocher
-Kilburn
-Iglesias
-Humble
-Hulbert
-Huckaby
-Hix
-Haven
-Hartford
-Hardiman
-Gurney
-Grigg
-Grasso
-Goings
-Fillmore
-Farber
-Depew
-Dandrea
-Dame
-Cowen
-Covarrubias
-Cory
-Burrus
-Bracy
-Ardoin
-Thompkins
-Suzuki
-Standley
-Russel
-Radcliffe
-Pohl
-Persaud
-Percy
-Parenteau
-Pabon
-Newson
-Newhouse
-Napolitano
-Mulcahy
-Maya
-Malave
-Keim
-Hooten
-Hernandes
-Heffernan
-Hearne
-Greenleaf
-Glick
-Fuhrman
-Fetter
-Faria
-Dishman
-Dickenson
-Crites
-Criss
-Clapper
-Chenault
-Castor
-Casto
-Bugg
-Bove
-Bonney
-Blessing
-Ard
-Anderton
-Allgood
-Alderson
-Woodman
-Wisdom
-Warrick
-Toomey
-Tooley
-Tarrant
-Summerville
-Stebbins
-Sokol
-Sink
-Searles
-Schutz
-Schumann
-Scheer
-Remillard
-Raper
-Proulx
-Palmore
-Monroy
-Miguel
-Messier
-Melo
-Melanson
-Mashburn
-Manzano
-Lussier
-Lovely
-Lien
-Jenks
-Huneycutt
-Hartwig
-Grimsley
-Fulk
-Fielding
-Fidler
-Engstrom
-Eldred
-Dantzler
-Crandell
-Ching
-Calder
-Brumley
-Breton
-Brann
-Bramlett
-Boykins
-Bianco
-Bancroft
-Almaraz
-Alcantar
-Whitmer
-Whitener
-Welton
-Vineyard
-Su
-Rahn
-Paquin
-Mizell
-Mix
-Mcmillin
-Mckean
-Marston
-Maciel
-Lundquist
-Louie
-Liggins
-Lampkin
-Kranz
-Koski
-Kirkham
-Jiminez
-Hazzard
-Harrod
-Graziano
-Grammer
-Gendron
-Garrido
-Fordham
-Englert
-Elwood
-Dryden
-Demoss
-Deluna
-Crabb
-Comeau
-Claudio
-Brummett
-Blume
-Benally
-Wessel
-Vanbuskirk
-Thorson
-Stumpf
-Stockwell
-Rocco
-Reams
-Radtke
-Rackley
-Pelton
-Niemi
-Newland
-Nelsen
-Morrissette
-Miramontes
-Mcginley
-Mccluskey
-Marley
-Marchant
-Luevano
-Lampe
-Lail
-Jeffcoat
-Infante
-Hu
-Hinman
-Gaona
-Erb
-Eady
-Desmarais
-Decosta
-Dansby
-Cisco
-Choe
-Breckenridge
-Bostwick
-Borg
-Bianchi
-Beer
-Alberts
-Adrian
-Wilkie
-Whorton
-Vargo
-Tait
-Sylvia
-Soucy
-Schuman
-Ousley
-Mumford
-Lum
-Lippert
-Leath
-Lavergne
-Laliberte
-Kirksey
-Kenner
-Johnsen
-Izzo
-Hiles
-Gullett
-Greenwell
-Gaspar
-Galbreath
-Gaitan
-Ericson
-Duck
-Delapaz
-Croom
-Cottingham
-Clift
-Bushnell
-Boozer
-Bice
-Bernardo
-Beason
-Arrowood
-Waring
-Voorhees
-Truax
-Shreve
-Shockey
-Schatz
-Sandifer
-Rubino
-Rozier
-Roseberry
-Roll
-Player
-Pieper
-Peden
-Nester
-Nave
-Murphey
-Malinowski
-Macgregor
-Liang
-Lafrance
-Kunkle
-Kirkman
-Jorge
-Hipp
-Hasty
-Haddix
-Gervais
-Gerdes
-Garfield
-Gamache
-Fouts
-Fitzwater
-Dillingham
-Deming
-Deanda
-Cedeno
-Cannady
-Burson
-Bouldin
-Arceneaux
-Woodhouse
-Whitford
-Wescott
-Welty
-Weigel
-Torgerson
-Toms
-Surber
-Sunderland
-Sterner
-Setzer
-Salvatore
-Riojas
-Pumphrey
-Puga
-Pedro
-Patch
-Metts
-Mcgarry
-Mccandless
-Magill
-Lupo
-Loveland
-Llamas
-Leclerc
-Koons
-Kahler
-Huss
-Holbert
-Heintz
-Haupt
-Grimmett
-Gaskill
-Flower
-Ellingson
-Dorr
-Dingess
-Deweese
-Desilva
-Crossley
-Cordeiro
-Converse
-Conde
-Cheeks
-Caldera
-Cairns
-Burmeister
-Burkhalter
-Brawner
-Bott
-Youngs
-Vierra
-Valladares
-Tiffany
-Shrum
-Shropshire
-Sevilla
-Rusk
-Roof
-Rodarte
-Pedraza
-Nino
-Montana
-Merino
-Mcminn
-Markle
-Mapp
-Lucia
-Lajoie
-Koerner
-Kittrell
-Kato
-Hyder
-Hollifield
-Heiser
-Hazlett
-Greenwald
-Fant
-Eldredge
-Dreher
-Delafuente
-Cravens
-Claypool
-Beecher
-Aronson
-Alanis
-Worthen
-Wojcik
-Winger
-Whitacre
-Wellington
-Valverde
-Valdivia
-Troupe
-Thrower
-Swindell
-Suttles
-Suh
-Stroman
-Spires
-Slate
-Shealy
-Sarver
-Sartin
-Sadowski
-Rondeau
-Rolon
-Rick
-Rex
-Rascon
-Priddy
-Pine
-Paulino
-Nolte
-Munroe
-Molloy
-Mellon
-Mciver
-Lykins
-Loggins
-Lillie
-Lenoir
-Klotz
-Kempf
-Jone
-Hupp
-Hollowell
-Hollander
-Haynie
-Hassan
-Harkness
-Harker
-Gottlieb
-Frith
-Eddins
-Driskell
-Doggett
-Densmore
-Charette
-Cassady
-Carrol
-Byrum
-Burcham
-Buggs
-Benn
-Whitted
-Warrington
-Vandusen
-Vaillancourt
-Steger
-Spell
-Siebert
-Scofield
-Quirk
-Purser
-Plumb
-Orcutt
-Northern
-Nordstrom
-Mosely
-Michalski
-Mcphail
-Mcdavid
-Mccraw
-Martini
-Marchese
-Mannino
-Leo
-Lefevre
-Largent
-Lanza
-Kress
-Isham
-Hunsaker
-Hoch
-Hildebrandt
-Guarino
-Grijalva
-Graybill
-Fick
-Ewell
-Ewald
-Deangelo
-Cusick
-Crumley
-Coston
-Cathcart
-Carruthers
-Bullington
-Brian
-Bowes
-Blain
-Blackford
-Barboza
-Yingling
-Woodland
-Wert
-Weiland
-Varga
-Silverstein
-Sievers
-Shuster
-Shumway
-Scudder
-Runnels
-Rumsey
-Renfroe
-Provencher
-Polley
-Mohler
-Middlebrooks
-Kutz
-Koster
-Korn
-Grow
-Groth
-Glidden
-Fazio
-Deen
-Corn
-Copper
-Chipman
-Chenoweth
-Champlin
-Cedillo
-Carrero
-Carmody
-Buckles
-Brien
-Boutin
-Bosch
-Bill
-Berkowitz
-Altamirano
-Wilfong
-Wiegand
-Waites
-Truesdale
-Toussaint
-Tobey
-Tedder
-Steelman
-Sirois
-Schnell
-Robichaud
-Ridge
-Richburg
-Pray
-Plumley
-Pizarro
-Piercy
-Ortego
-Oberg
-Neace
-Music
-Mickey
-Mertz
-Mcnew
-Matta
-Lawyer
-Lapp
-Lair
-Kibler
-Jessie
-Howlett
-Hollister
-Hofer
-Hatten
-Hagler
-Germany
-Falgoust
-Engelhardt
-Eberle
-Eastwood
-Dombrowski
-Dinsmore
-Daye
-Cool
-Casares
-Capone
-Braud
-Balch
-Autrey
-Wendel
-Tyndall
-Toy
-Strobel
-Stoltz
-Spinelli
-Serrato
-Rochester
-Reber
-Real
-Rathbone
-Palomino
-Noah
-Nickels
-Mayle
-Mathers
-Mach
-Loeffler
-Littrell
-Levinson
-Leong
-Lemire
-Lejeune
-Lazo
-Lasley
-Koller
-Kennard
-Jester
-Hoelscher
-Hintz
-Hagerman
-Greaves
-Fore
-Eudy
-Engler
-Corrales
-Cordes
-Brunet
-Bidwell
-Bennet
-Bare
-Tyrrell
-Tharpe
-Swinton
-Stribling
-Steven
-Southworth
-Sisneros
-Shane
-Savoie
-Samons
-Ruvalcaba
-Roscoe
-Ries
-Ramer
-Omara
-Mosqueda
-Millar
-Mcpeak
-Macomber
-Luckey
-Litton
-Lehr
-Lavin
-Hubbs
-Hoard
-Hibbs
-Hagans
-Futrell
-Exum
-Evenson
-Dicks
-Culler
-Chou
-Carbaugh
-Callen
-Brashear
-Bloomer
-Blakeney
-Bigler
-Addington
-Woodford
-Witter
-Unruh
-Tolentino
-Sumrall
-Stgermain
-Smock
-Sherer
-Salem
-Rochelle
-Rayner
-Pooler
-Oquinn
-Nero
-Milano
-Mcglothlin
-Mars
-Linden
-Kowal
-Kerrigan
-Ibrahim
-Harvell
-Hanrahan
-Goodall
-Geist
-Fussell
-Fung
-Ferebee
-Federico
-Eley
-Eggert
-Dorsett
-Dingman
-Destefano
-Colucci
-Clemmer
-Caesar
-Burnell
-Brumbaugh
-Boddie
-Berryhill
-Avelar
-Alcantara
-Abbey
-Winder
-Winchell
-Vandenberg
-Trotman
-Thurber
-Thibeault
-Stlouis
-Stilwell
-Sperling
-Shattuck
-Sarmiento
-Ruppert
-Rumph
-Renaud
-Randazzo
-Rademacher
-Quiles
-Pearman
-Palomo
-Mercurio
-Lowrey
-Lindeman
-Lawlor
-Larosa
-Lander
-Labrecque
-Kimber
-Hovis
-Holifield
-Henninger
-Hawkes
-Hartfield
-Hann
-Hague
-Genovese
-Garrick
-Fudge
-Frink
-Eddings
-Dinh
-Dear
-Cutter
-Cribbs
-Constant
-Calvillo
-Bunton
-Brodeur
-Bolding
-Blanding
-Agosto
-Zahn
-Wiener
-Trussell
-Tew
-Tello
-Teixeira
-Stephan
-Speck
-Sharma
-Shanklin
-Sealy
-Scanlan
-Santamaria
-Roundy
-Robichaux
-Ringer
-Rigney
-Prevost
-Polson
-Philip
-Pass
-Nord
-Moxley
-Mohammed
-Medford
-Mccaslin
-Mcardle
-Macarthur
-Lewin
-Lasher
-Ketcham
-Keiser
-Heine
-Hackworth
-Grose
-Grizzle
-Grass
-Gillman
-Gartner
-Garth
-Frazee
-Fleury
-Fast
-Edson
-Edmonson
-Derry
-Deck
-Cronk
-Conant
-Burress
-Burgin
-Broom
-Brockington
-Bolick
-Boger
-Birchfield
-Billington
-Baily
-Bahena
-Armbruster
-Anson
-Yoho
-Wilcher
-Tinney
-Timberlake
-Thoma
-Thielen
-Sutphin
-Stultz
-Sikora
-Serra
-Schulman
-Scheffler
-Santillan
-Robin
-Rego
-Preciado
-Pinkham
-Monday
-Mickle
-Luu
-Lomas
-Lizotte
-Lent
-Lenard
-Kellerman
-Keil
-Juan
-Johanson
-Hernadez
-Hartsfield
-Hang
-Haber
-Gorski
-Farkas
-Eberhardt
-Duquette
-Delano
-Cropper
-Cozart
-Cockerham
-Chamblee
-Cartagena
-Cahoon
-Buzzell
-Brister
-Brewton
-Blackshear
-Benfield
-Aston
-Ashburn
-Arruda
-Wetmore
-Weise
-Vaccaro
-Tucci
-Sudduth
-Stromberg
-Stoops
-Showalter
-Shears
-Runion
-Rowden
-Rosenblum
-Riffle
-Renfrow
-Peres
-Obryant
-Nicolas
-Leftwich
-Lark
-Landeros
-Kistler
-Killough
-Kerley
-Kastner
-Hoggard
-Hartung
-Guertin
-Govan
-Gatling
-Gailey
-Fullmer
-Fulford
-Flatt
-Esquibel
-Endicott
-Edmiston
-Edelstein
-Dufresne
-Dressler
-Dickman
-Chee
-Busse
-Bonnett
-Bogart
-Berard
-Barrington
-Arena
-Anton
-Yoshida
-Velarde
-Veach
-Vanhouten
-Vachon
-Tolson
-Tolman
-Tennyson
-Stites
-Soler
-Shutt
-Ruggles
-Rhone
-Pegues
-Ong
-Neese
-Muro
-Moncrief
-Mefford
-Mcphee
-Mcmorris
-Mceachern
-Mcclurg
-Mansour
-Mai
-Mader
-Leija
-Lecompte
-Lafountain
-Labrie
-Jaquez
-Heald
-Hash
-Hartle
-Gainer
-Frisby
-Farina
-Eidson
-Edgerton
-Dyke
-Durrett
-Duhon
-Cuomo
-Cobos
-Cervantez
-Bybee
-Brockway
-Borowski
-Binion
-Beery
-Arguello
-Amaro
-Acton
-Yuen
-Winton
-Wigfall
-Weekley
-Vidrine
-Vannoy
-Tardiff
-Shoop
-Shilling
-Schick
-Sand
-Safford
-Prendergast
-Pilgrim
-Pellerin
-Osuna
-Nissen
-Nalley
-Moritz
-Moller
-Messner
-Messick
-Merry
-Merrifield
-Mcguinness
-Matherly
-Marcano
-Mahone
-Lemos
-Lebrun
-Jara
-Hoffer
-Hewlett
-Herren
-Hecker
-Haws
-Haug
-Hack
-Gwin
-Gober
-Gilliard
-Fredette
-Favela
-Echeverria
-Downer
-Donofrio
-Desrochers
-Dee
-Crozier
-Corson
-Clyde
-Bechtold
-Argueta
-Aparicio
-Zamudio
-Willette
-Westover
-Westerman
-Utter
-Troyer
-Thies
-Tapley
-Slavin
-Shirk
-Sandler
-Roop
-Rimmer
-Raymer
-Range
-Radcliff
-Otten
-Moorer
-Millet
-Mckibben
-Mccutchen
-Mcavoy
-Mcadoo
-Mayorga
-Mastin
-Martineau
-Marek
-Madore
-Leflore
-Kroeger
-Kennon
-Jimerson
-Javier
-Hostetter
-Hornback
-Hendley
-Hance
-Guardado
-Granado
-Gowen
-Goodale
-Flinn
-Fleetwood
-Fitz
-Durkee
-Duprey
-Dipietro
-Dilley
-Clyburn
-Brawley
-Beckley
-Arana
-Weatherby
-Vollmer
-Victoria
-Vestal
-Tunnell
-Trigg
-Tingle
-Takahashi
-Sweatt
-Storer
-Snapp
-Shiver
-Rooker
-Red
-Rathbun
-Poisson
-Perrine
-Perri
-Pastor
-Parmer
-Parke
-Pare
-Papa
-Palmieri
-Nottingham
-Midkiff
-Mecham
-Mccomas
-Mcalpine
-Lovelady
-Lillard
-Lally
-Knopp
-Kile
-Kiger
-Haile
-Gupta
-Goldsberry
-Gilreath
-Fulks
-Friesen
-Franzen
-Flack
-Findlay
-Ferland
-Dreyer
-Dore
-Dennard
-Deckard
-Debose
-Crim
-Coulombe
-Cork
-Chancey
-Cantor
-Branton
-Bissell
-Barns
-Woolard
-Witham
-Wasserman
-Waldo
-Spiegel
-Shoffner
-Scholz
-Ruch
-Rossman
-Ready
-Petry
-Palacio
-Paez
-Neary
-Mortenson
-Millsap
-Miele
-Mick
-Menke
-Mckim
-Mcanally
-Martines
-Manor
-Malcom
-Lemley
-Larochelle
-Klaus
-Klatt
-Kaufmann
-Kapp
-Helmer
-Hedge
-Halloran
-Glisson
-Frechette
-Fontana
-Enoch
-Eagan
-Drum
-Distefano
-Danley
-Creekmore
-Chartier
-Chaffee
-Carillo
-Burg
-Bolinger
-Berkley
-Benz
-Basso
-Bash
-Barrier
-Zelaya
-Woodring
-Witkowski
-Wilmot
-Wilkens
-Wieland
-Virgil
-Verdugo
-Urquhart
-Tsai
-Timms
-Swiger
-Swaim
-Sussman
-Scarlett
-Pires
-Molnar
-Mcatee
-Maurice
-Lowder
-Loos
-Linker
-Landes
-Kingery
-Keeley
-Hufford
-Higa
-Hendren
-Hammack
-Hamann
-Gillam
-Gerhardt
-Fell
-Eugene
-Edelman
-Eby
-Delk
-Deans
-Curl
-Constantine
-Cleaver
-Claar
-Casiano
-Carruth
-Carlyle
-Bump
-Brophy
-Bolanos
-Bibbs
-Bessette
-Beggs
-Baugher
-Bartel
-Averill
-Andresen
-Amin
-Alden
-Adames
-Wildman
-Via
-Valente
-Turnbow
-Tse
-Swink
-Sublett
-Stroh
-Stringfellow
-Ridgway
-Pugliese
-Poteat
-Pang
-Ohare
-Neubauer
-Murchison
-Mohamed
-Mingo
-Lucky
-Lemmons
-Kwon
-Kellam
-Kean
-Jarmon
-Hyden
-Hudak
-Hollinger
-Henkel
-Hemingway
-Hasson
-Hansel
-Halter
-Haire
-Goodnight
-Ginsberg
-Gillispie
-Fogel
-Flory
-Etter
-Elledge
-Eckman
-Deas
-Currin
-Crafton
-Coomer
-Colter
-Claxton
-Bulter
-Braddock
-Bowyer
-Blizzard
-Binns
-Bing
-Bellows
-Baskerville
-Barros
-Ansley
-Woolf
-Wight
-Waldman
-Wadley
-Tull
-Trull
-Tesch
-Struck
-Stouffer
-Stadler
-Slay
-Shubert
-Sedillo
-Santacruz
-Reinke
-Raleigh
-Poynter
-Neri
-Neale
-Natividad
-Mowry
-Moralez
-Monger
-Mitchum
-Merryman
-Manion
-Macdougall
-Lux
-Litchfield
-Ley
-Levitt
-Lepage
-Lasalle
-Laine
-Khoury
-Kavanagh
-Karns
-Ivie
-Huebner
-Hodgkins
-Halpin
-Garica
-Eversole
-Dutra
-Dunagan
-Duffey
-Dillman
-Dillion
-Deville
-Dearborn
-Damato
-Courson
-Coulson
-Burdine
-Bryce
-Bousquet
-Bonin
-Bish
-Atencio
-Westbrooks
-Wages
-Vaca
-Tye
-Toner
-Tomas
-Tillis
-Swett
-Surface
-Struble
-Stanfill
-Son
-Solorzano
-Slusher
-Sipple
-Sim
-Silvas
-Shults
-Schexnayder
-Saez
-Rodas
-Rager
-Pulver
-Plaza
-Penton
-Paniagua
-Meneses
-Mcfarlin
-Mcauley
-Matz
-Maloy
-Magruder
-Lohman
-Landa
-Lacombe
-Jaimes
-Hom
-Holzer
-Holst
-Heil
-Hackler
-Grundy
-Gregor
-Gilkey
-Farnham
-Durfee
-Dunton
-Dunston
-Duda
-Dews
-Dana
-Craver
-Corriveau
-Conwell
-Colella
-Chambless
-Bremer
-Boutte
-Bourassa
-Blaisdell
-Backman
-Babineaux
-Audette
-Alleman
-Towner
-Taveras
-Tarango
-Sullins
-Suiter
-Stallard
-Solberg
-Schlueter
-Poulos
-Pimental
-Owsley
-Olivier
-Okelley
-Nations
-Moffatt
-Metcalfe
-Meekins
-Medellin
-Mcglynn
-Mccowan
-Marriott
-Marable
-Lennox
-Lamoureux
-Koss
-Kerby
-Karp
-Jason
-Isenberg
-Howze
-Hockenberry
-Highsmith
-Harbour
-Hallmark
-Gusman
-Greeley
-Giddings
-Gaudet
-Gallup
-Fleenor
-Eicher
-Edington
-Dimaggio
-Dement
-Demello
-Decastro
-Cruise
-Bushman
-Brundage
-Brooker
-Brooke
-Bourg
-Board
-Blackstock
-Bergmann
-Beaton
-Banister
-Argo
-Appling
-Wortman
-Watterson
-Villalpando
-Tillotson
-Tighe
-Sundberg
-Sternberg
-Stamey
-Speaks
-Shipe
-Seeger
-Scarberry
-Sattler
-Sain
-Rothstein
-Poteet
-Plowman
-Pettiford
-Penland
-Peach
-Partain
-Pankey
-Oyler
-Ogletree
-Ogburn
-Moton
-Million
-Merkel
-Mask
-Markus
-Lucier
-Lazarus
-Lavelle
-Lakey
-Kratz
-Kinser
-Kershaw
-Josephson
-Jesse
-Imhoff
-Ibanez
-Hendry
-Hammon
-Frisbie
-Friedrich
-Frawley
-Fraga
-Forester
-Eskew
-Emmert
-Drennan
-Doyon
-Dominick
-Dandridge
-Cumming
-Cawley
-Carvajal
-Bracey
-Belisle
-Batey
-Ahner
-Wysocki
-Weiser
-Veliz
-Tincher
-Sherlock
-Santo
-Sansone
-Sankey
-Sandstrom
-Sale
-Rohrer
-Risner
-Pridemore
-Pfeffer
-Persinger
-Peery
-Oubre
-Orange
-Nowicki
-Musgrave
-Murdoch
-Mullinax
-Mccary
-Mathieu
-Livengood
-Leonardo
-Kyser
-Klink
-Kimes
-Kellner
-Kavanaugh
-Kasten
-Imes
-Hoey
-Hinshaw
-Halley
-Hake
-Gurule
-Grube
-Grillo
-Geter
-Gatto
-Garver
-Garretson
-Farwell
-Eiland
-Dunford
-Decarlo
-Corso
-Core
-Colman
-Collard
-Cleghorn
-Chasteen
-Cavender
-Carlile
-Calvo
-Byerly
-Brogdon
-Broadwater
-Breault
-Bono
-Bergin
-Behr
-Ballenger
-Amick
-Yan
-Vice
-Tamez
-Stiffler
-Steinke
-Simmon
-Shankle
-Schaller
-Salmons
-Sackett
-Saad
-Rideout
-Reader
-Ratcliffe
-Rao
-Ranson
-Randell
-Plascencia
-Petterson
-Olszewski
-Olney
-Olguin
-Nilsson
-Nevels
-Morelli
-Montiel
-Monge
-Michell
-Michaelson
-Mertens
-Mcchesney
-Mcalpin
-Mathewson
-Lower
-Loudermilk
-Lineberry
-Liggett
-Lamp
-Kinlaw
-Kight
-Just
-Jost
-Hereford
-Hardeman
-Halpern
-Halliday
-Hafer
-Gaul
-Friel
-Freitag
-Frances
-Forsberg
-Evangelista
-Doering
-Dicarlo
-Dendy
-Delp
-Deguzman
-Dameron
-Curtiss
-Cousin
-Cosper
-Charley
-Cauthen
-Cao
-Camper
-Bradberry
-Bouton
-Bonnell
-Bixby
-Bieber
-Beveridge
-Belle
-Bedwell
-Barhorst
-Bannon
-Baltazar
-Baier
-Ayotte
-Attaway
-Arenas
-Alex
-Abrego
-Watford
-Valley
-Turgeon
-Tunstall
-Thaxton
-Thai
-Tenorio
-Stotts
-Sthilaire
-Spiker
-Shedd
-Seng
-Seabolt
-Scalf
-Salyers
-Ruhl
-Rowlett
-Robinett
-Pfister
-Perlman
-Pepe
-Parkman
-Paradise
-Olin
-Nunnally
-Norvell
-Napper
-Modlin
-Mckellar
-Mcclean
-Mascarenas
-Manchester
-Leibowitz
-Ledezma
-Kuhlman
-Kobayashi
-Hunley
-Holmquist
-Hinkley
-Hazard
-Hartsell
-Gribble
-Gravely
-Fifield
-Eliason
-Doctor
-Doak
-Crossland
-Cover
-Clair
-Carleton
-Butters
-Bridgeman
-Bojorquez
-Boggess
-Banker
-Auten
-Woosley
-Wine
-Whiteley
-Wexler
-Twomey
-Tullis
-Townley
-To
-Standridge
-Stamp
-Springs
-Santoyo
-Rueda
-Riendeau
-Revell
-Pless
-Ottinger
-Nigro
-Nickles
-Mulvey
-Menefee
-Mcshane
-Mcloughlin
-Mckinzie
-Marrow
-Markey
-Mariano
-Lockridge
-Lipsey
-Knisley
-Knepper
-Kitts
-Kiel
-Jinks
-Hathcock
-Godin
-Gallego
-Fikes
-Fecteau
-Estabrook
-Ellinger
-Dustin
-Dunlop
-Dudek
-Diego
-Countryman
-Chauvin
-Chatham
-Bullins
-Brownfield
-Boughton
-Bloodworth
-Bibb
-Baucom
-Barbieri
-Aubin
-Armitage
-Alessi
-Absher
-Abbate
-Zito
-Woolery
-Wiggs
-Wacker
-Violette
-Tynes
-Tolle
-Telles
-Tarter
-Swarey
-Strode
-Stockdale
-Stella
-Stalnaker
-Spina
-Schiff
-Saari
-Risley
-Reading
-Rameriz
-Rakes
-Pettaway
-Penner
-Paulus
-Palladino
-Omeara
-Montelongo
-Melnick
-Mehta
-Mcgary
-Mccourt
-Mccollough
-Marchetti
-Manzanares
-Lowther
-Leiva
-Lauderdale
-Lafontaine
-Kowalczyk
-Knighton
-Joubert
-Jaworski
-Ide
-Huth
-Hurdle
-Hung
-Housley
-Hackman
-Gulick
-Gordy
-Gilstrap
-Gehrke
-Gebhart
-Gaudette
-Foxworth
-Finger
-Essex
-Endres
-Dunkle
-Clare
-Cimino
-Cardinal
-Caddell
-Brauer
-Braley
-Bodine
-Blackmore
-Belden
-Backer
-Ayer
-Andress
-Alva
-Wisner
-Walk
-Vuong
-Valliere
-Twigg
-Tso
-Tavarez
-Strahan
-Steib
-Staub
-Sowder
-Shoulders
-Seiber
-Schutt
-Scharf
-Schade
-Rodriques
-Risinger
-Renshaw
-Rath
-Rahman
-Presnell
-Pillow
-Piatt
-Pasquale
-Nieman
-Nicol
-Nevins
-Milford
-Mcilwain
-Mcgaha
-Mccully
-Mccomb
-Maye
-Massengale
-Macedo
-Lines
-Lesher
-Leland
-Kearse
-Jauregui
-Husted
-Hudnall
-Holmberg
-Hertel
-Hershey
-Hardie
-Glidewell
-Frausto
-Fassett
-Dash
-Dalessandro
-Dahlgren
-Corum
-Constantino
-Conlin
-Colquitt
-Colombo
-Claycomb
-Carley
-Cardin
-Cancel
-Buller
-Boring
-Boney
-Bocanegra
-Blazer
-Biggers
-Benedetto
-Araiza
-Andino
-Albin
-Zorn
-Werth
-Weisman
-Walley
-Vanegas
-Ulibarri
-Towers
-Towe
-Tedford
-Teasley
-Suttle
-Steffens
-Stcyr
-Squire
-Smythe
-Singley
-Sifuentes
-Shuck
-Session
-Schram
-Sass
-Rieger
-Ridenhour
-Rickert
-Richerson
-Rayborn
-Rabe
-Raab
-Pendley
-Pastore
-Ordway
-Moynihan
-Mellott
-Mckissick
-Mcgann
-Mccready
-Mauney
-Marrufo
-List
-Lenhart
-Lazar
-Lafave
-Keele
-Kautz
-Jardine
-Jahnke
-Jacobo
-Hord
-Hardcastle
-Hageman
-Griffey
-Giglio
-Gehring
-Fortson
-Duque
-Duplessis
-Donner
-Dicken
-Derosier
-Deitz
-Dalessio
-Cyrus
-Cram
-Chi
-Center
-Castleman
-Candelario
-Callison
-Caceres
-Bozarth
-Biles
-Bejarano
-Beech
-Bashaw
-Avina
-Armentrout
-Angus
-Alverez
-Acord
-Zack
-Waterhouse
-Vereen
-Vanlandingham
-Uhl
-Strawser
-Shotwell
-Severance
-Seltzer
-Schoonmaker
-Schock
-Schaub
-Schaffner
-Roeder
-Rodrigez
-Riffe
-Rhine
-Rasberry
-Rancourt
-Railey
-Quade
-Pursley
-Prouty
-Perdomo
-Oxley
-Osterman
-Nickens
-Murphree
-Mounts
-Monte
-Merida
-Maus
-Mattern
-Masse
-Martinelli
-Mangan
-Lutes
-Ludwick
-Loney
-Laureano
-Lasater
-Knighten
-Kissinger
-Kimsey
-Kessinger
-Honea
-Hollingshead
-Hockett
-Heyer
-Heron
-Gurrola
-Gove
-Glasscock
-Gillett
-Galan
-Featherstone
-Eckhardt
-Duron
-Dunson
-Dasher
-Culbreth
-Cowden
-Cowans
-Claypoole
-Churchwell
-Chabot
-Caviness
-Cater
-Caston
-Callan
-Byington
-Burkey
-Boden
-Beckford
-Atwater
-Arms
-Archambault
-Alvey
-Alsup
-Yon
-Whisenant
-Weese
-Voyles
-Verret
-Tsang
-Tessier
-Sweitzer
-Sherwin
-Shaughnessy
-Revis
-Remy
-Prine
-Philpott
-Peavy
-Paynter
-Parmenter
-Ovalle
-Offutt
-Nightingale
-Newlin
-Nakano
-Myatt
-Muth
-Mohan
-Mcmillon
-Mccarley
-Mccaleb
-Maxson
-Marinelli
-Maley
-Macy
-Liston
-Letendre
-Kain
-Huntsman
-Hirst
-Hagerty
-Gulledge
-Greenway
-Grajeda
-Gorton
-Goines
-Gittens
-Frederickson
-Fanelli
-Embree
-Eichelberger
-Dunkin
-Dull
-Dixson
-Dillow
-Defelice
-Chumley
-Burleigh
-Borkowski
-Binette
-Biggerstaff
-Berglund
-Beller
-Audet
-Arbuckle
-Allain
-Alfano
-Zander
-Youngman
-Wittman
-Weintraub
-Vanzant
-Vaden
-Twitty
-Trader
-Toon
-Till
-Stollings
-Standifer
-Spinner
-Sines
-Shope
-Scalise
-Saville
-Romans
-Posada
-Pisano
-Otte
-Nolasco
-Napoli
-Mier
-Merkle
-Mendiola
-Melcher
-Mejias
-Mcmurry
-Mccalla
-Markowitz
-Marine
-Manis
-Mallette
-Macfarlane
-Lough
-Looper
-Landin
-Kittle
-Kinsella
-Kinnard
-Hobart
-Herald
-Helman
-Hellman
-Hartsock
-Halford
-Hage
-Gordan
-Glasser
-Gayton
-Gattis
-Gastelum
-Gaspard
-Frisch
-Force
-Fitzhugh
-Eckstein
-Eberly
-Dowden
-Despain
-Crumpler
-Crotty
-Cornelison
-Collin
-Colin
-Chouinard
-Chamness
-Catlin
-Cann
-Bumgardner
-Budde
-Branum
-Bradfield
-Braddy
-Borst
-Birdwell
-Bent
-Bazan
-Bank
-Banas
-Bade
-Aubrey
-Arango
-Ahearn
-Addis
-Zumwalt
-Wurth
-Wilk
-Widener
-Wagstaff
-Vella
-Urrutia
-Terwilliger
-Tart
-Steinman
-Staats
-Sloat
-Rives
-Riggle
-Revels
-Reichard
-Prickett
-Poff
-Pitzer
-Petro
-Pell
-Northrup
-Nicks
-Moline
-Mielke
-Maynor
-Mallon
-Magness
-Lingle
-Lindell
-Lieb
-Lesko
-Lebeau
-Lammers
-Lafond
-Kiernan
-Ketron
-Jurado
-Holmgren
-Hilburn
-Hayashi
-Hashimoto
-Harbaugh
-Hans
-Guillot
-Gard
-Froehlich
-Felipe
-Feinberg
-Falco
-Dufour
-Drees
-Doney
-Diep
-Delao
-Daves
-Dail
-Cutting
-Crowson
-Coss
-Congdon
-Carner
-Camarena
-Butterworth
-Burlingame
-Bouffard
-Bloch
-Bilyeu
-Barta
-Bakke
-Baillargeon
-Avent
-Aquilar
-Ake
-Aho
-Zeringue
-Yeh
-Yarber
-Wolfson
-Wendell
-Vogler
-Voelker
-Truss
-Troxell
-Thrift
-Strouse
-Spielman
-Sistrunk
-Shows
-Sevigny
-Schuller
-Schaaf
-Ruffner
-Routh
-Roseman
-Ricciardi
-Peraza
-Pegram
-Overturf
-Olander
-Odaniel
-Neu
-Millner
-Melchor
-Maxie
-Marvel
-Maroney
-Machuca
-Macaluso
-Livesay
-Layfield
-Laskowski
-Kwiatkowski
-Ko
-Kiley
-Kilby
-Julien
-Hovey
-Heywood
-Hayman
-Havard
-Harville
-Haigh
-Hagood
-Grieco
-Glassman
-Gebhardt
-Garry
-Freeze
-Fleischer
-Fann
-Elson
-Eccles
-Cunha
-Crumb
-Crew
-Blakley
-Bardwell
-Abshire
-Woodham
-Wines
-Welter
-Wargo
-Varnado
-Tutt
-Traynor
-Swaney
-Svoboda
-Stricker
-Stoffel
-Stambaugh
-Sickler
-Shackleford
-Selman
-Seaver
-Sansom
-Sanmiguel
-Royston
-Rourke
-Rockett
-Rioux
-Puleo
-Pitchford
-Persons
-Normand
-Nardi
-Mulvaney
-Middaugh
-Manners
-Malek
-Lodge
-Leos
-Lathan
-Kujawa
-Kimbro
-Killebrew
-Joshua
-Houlihan
-Hobby
-Hinckley
-Herod
-Hepler
-Hamner
-Hammel
-Hallowell
-Gonsalez
-Gingerich
-Gambill
-Funkhouser
-Fricke
-Fewell
-Falkner
-Endsley
-Dulin
-Drennen
-Deaver
-Dambrosio
-Clover
-Chadwell
-Ceasar
-Castanon
-Canon
-Burkes
-Brune
-Brisco
-Brinker
-Bowker
-Boldt
-Berner
-Bee
-Beaumont
-Beaird
-Bazemore
-Barrick
-Arnette
-Albano
-Younts
-Wunderlich
-Weidman
-Vanness
-Tu
-Toland
-Theobald
-Stickler
-Steiger
-Stanger
-Spies
-Spector
-Sollars
-Smedley
-Seibel
-Scoville
-Saito
-Rye
-Rummel
-Rude
-Rowles
-Rouleau
-Roos
-Rogan
-Roemer
-Ream
-Raya
-Purkey
-Priester
-Perreira
-Penick
-Paulin
-Parkins
-Overcash
-Oleson
-Nicely
-Neves
-Muldrow
-Minard
-Midgett
-Michalak
-Melgar
-Mcentire
-Mcauliffe
-Marti
-Marte
-Lydon
-Lindholm
-Leyba
-Leader
-Langevin
-Lagasse
-Lafayette
-Kesler
-Kelton
-Kao
-Kaminsky
-Jump
-Jaggers
-Humbert
-Huck
-Howarth
-Hinrichs
-Higley
-Gupton
-Guimond
-Gravois
-Giguere
-Fretwell
-Fontes
-Feeley
-Faucher
-Fall
-Evan
-Eichhorn
-Ecker
-Earp
-Dole
-Dinger
-Derryberry
-Demars
-Deel
-Copenhaver
-Collinsworth
-Colangelo
-Cloyd
-Claiborne
-Caulfield
-Carlsen
-Calzada
-Caffey
-Broadus
-Brenneman
-Bouie
-Bodnar
-Blaney
-Blanc
-Blades
-Beltz
-Behling
-Begin
-Barahona
-Yun
-Yockey
-Winkle
-Windom
-Wimer
-Wilford
-Wash
-Villatoro
-Trexler
-Teran
-Taliaferro
-Sydnor
-Swinson
-Snelling
-Smtih
-Siu
-Simonton
-Simoneaux
-Simoneau
-Sherrer
-Seavey
-Scheel
-Rushton
-Rupe
-Ruano
-Rodney
-Rippy
-Reiner
-Reiff
-Rabinowitz
-Quach
-Penley
-Odle
-Nock
-Minnich
-Mckown
-Mccarver
-Mcandrew
-Longley
-Laux
-Lamothe
-Lafreniere
-Kropp
-Krick
-Kates
-Jepson
-Huie
-Howse
-Howie
-Henriques
-Haydon
-Haught
-Hatter
-Hartzog
-Harkey
-Grimaldo
-Goshorn
-Gormley
-Gluck
-Gilroy
-Gillenwater
-Giffin
-Folks
-Fluker
-Feder
-Eyre
-Eshelman
-Eakins
-Dryer
-Disney
-Detwiler
-Delrosario
-Davisson
-Celestine
-Catalan
-Canning
-Calton
-Buster
-Brammer
-Botelho
-Blakney
-Bartell
-Averett
-Askins
-Aker
-Zak
-Worcester
-Witmer
-Wiser
-Winkelman
-Widmer
-Whittier
-Western
-Weitzel
-Wardell
-Wagers
-Ullman
-Tupper
-Tingley
-Tilghman
-Talton
-Simard
-Seda
-Scheller
-Sala
-Rundell
-Rost
-Roa
-Ribeiro
-Rabideau
-Primm
-Porch
-Polite
-Pinon
-Peart
-Ostrom
-Ober
-Nystrom
-Nussbaum
-Nurse
-Naughton
-Murr
-Moorhead
-Monti
-Monteiro
-Melson
-Meissner
-Mclin
-Mcgruder
-Marotta
-Makowski
-Majewski
-Madewell
-Lunt
-Lukens
-Leininger
-Lebel
-Lakin
-Laguna
-Kepler
-Jaques
-Hunnicutt
-Hungerford
-Hoopes
-Hertz
-Heins
-Hammers
-Halliburton
-Grosso
-Gravitt
-Glasper
-Gideon
-Gallman
-Gallaway
-Funke
-Fulbright
-Falgout
-Eakin
-Dostie
-Dorado
-Dewberry
-Derose
-Cutshall
-Crampton
-Costanzo
-Colletti
-Cloninger
-Claytor
-Chiang
-Canterbury
-Campagna
-Burd
-Brokaw
-Broaddus
-Bretz
-Brainard
-Binford
-Bilbrey
-Alpert
-Aitken
-Ahlers
-Zajac
-Yale
-Woolfolk
-Witten
-Windle
-Wayland
-Tramel
-Tittle
-Talavera
-Suter
-Straley
-Stetson
-Specht
-Sommerville
-Soloman
-So
-Skeens
-Sigman
-Sibert
-Shavers
-Schuck
-Schmit
-Sartain
-Sabol
-Rosenblatt
-Rollo
-Rashid
-Rabb
-Province
-Polston
-Nyberg
-Northrop
-Navarra
-Muldoon
-Mulder
-Mikesell
-Mcdougald
-Mcburney
-Mauricio
-Mariscal
-Lui
-Lozier
-Lingerfelt
-Legere
-Latour
-Lagunas
-Lacour
-Kurth
-Ku
-Killen
-Kiely
-Kayser
-Kahle
-Julius
-Isley
-Huertas
-Hower
-Hinz
-Haugh
-Gumm
-Given
-Galicia
-Fortunato
-Flake
-Dunleavy
-Duggins
-Doby
-Digiovanni
-Devaney
-Deltoro
-Cribb
-Crank
-Corpuz
-Coronel
-Comfort
-Coen
-Charbonneau
-Caine
-Burchette
-Blakey
-Blakemore
-Bergquist
-Beene
-Beaudette
-Bayles
-Ballance
-Bakker
-Bailes
-Asberry
-Arwood
-Zucker
-Willman
-Whitesell
-Wald
-Walcott
-Vancleave
-Trump
-Trail
-Strasser
-Simas
-Shorts
-Shick
-Schleicher
-Schaal
-Saleh
-Rotz
-Resnick
-Raphael
-Rainer
-Partee
-Ollis
-Oller
-Oday
-Noles
-Munday
-Mountain
-Mong
-Millican
-Merwin
-Mazzola
-Mansell
-Magallanes
-Llanes
-Lewellen
-Lepore
-Kisner
-Keesee
-Jim
-Jeanlouis
-Ingham
-Hornbeck
-Hermes
-Hawn
-Hartz
-Harber
-Haffner
-Gutshall
-Guth
-Grays
-Grams
-Gowan
-Finlay
-Finkelstein
-Eyler
-Enloe
-Dungan
-Diez
-Dearman
-Dann
-Cull
-Crosson
-Creek
-Chronister
-Cassity
-Campion
-Callihan
-Butz
-Breazeale
-Blumenthal
-Billy
-Berkey
-Batty
-Batton
-Barge
-Arvizu
-Alexis
-Alderete
-Aldana
-Albaugh
-Abernethy
-Work
-Wolter
-Wille
-Tweed
-Tollefson
-Thomasson
-Teter
-Testerman
-Sproul
-Spates
-Southwick
-Soukup
-Skelly
-Senter
-Sealey
-Sawicki
-Sargeant
-Rossiter
-Rosemond
-Repp
-Pound
-Pink
-Pifer
-Ormsby
-Nickelson
-Naumann
-Morabito
-Monzon
-Millsaps
-Millen
-Mcelrath
-Marcoux
-Mantooth
-Madson
-Macneil
-Mackinnon
-Louque
-Leister
-Lampley
-Kushner
-Krouse
-Kirwan
-June
-Jessee
-Janson
-Jahn
-Jacquez
-Islas
-Hutt
-Holladay
-Hillyer
-Hepburn
-Hensel
-Harrold
-Guadalupe
-Gingrich
-Geis
-Gales
-Fults
-Finnell
-Ferri
-Featherston
-Epley
-Ebersole
-Eames
-Dunigan
-Drye
-Dismuke
-Devaughn
-Delorenzo
-Damiano
-Confer
-Collum
-Clower
-Clow
-Claussen
-Clack
-Caylor
-Cawthon
-Casias
-Carreno
-Carlo
-Bluhm
-Bingaman
-Bewley
-Belew
-Beckner
-Beamer
-Barefoot
-Auld
-Amey
-Wolfenbarger
-Wilkey
-Wicklund
-Waltman
-Villalba
-Valero
-Valdovinos
-Ung
-Ullrich
-Tyus
-Twyman
-Trost
-Tardif
-Tanguay
-Stripling
-Steinbach
-Shumpert
-Sasaki
-Sappington
-Sandusky
-Reinhold
-Reinert
-Quijano
-Pye
-Poor
-Placencia
-Pinkard
-Phinney
-Perrotta
-Pernell
-Parrett
-Oxendine
-Owensby
-Orman
-Nuno
-Mori
-Mcroberts
-Mcneese
-Mckamey
-Mccullum
-Markel
-Mardis
-Maines
-Lueck
-Lubin
-Lefler
-Leffler
-Lavery
-Larios
-Labarbera
-Kershner
-Josey
-Jeanbaptiste
-Izaguirre
-Hermosillo
-Haviland
-Hartshorn
-Hamlet
-Hafner
-Ginter
-Getty
-Franck
-Fiske
-Emmett
-Dufrene
-Doody
-Davie
-Dangerfield
-Dahlberg
-Cuthbertson
-Crone
-Coffelt
-Claus
-Chidester
-Chesson
-Cauley
-Caudell
-Cantara
-Campo
-Caines
-Bullis
-Bucci
-Brochu
-Bosco
-Bogard
-Bickerstaff
-Benning
-Arzola
-Antonelli
-Adkinson
-Zellers
-Wulf
-Worsley
-Woolridge
-Whitton
-Westerfield
-Walczak
-Vassar
-Truett
-Trueblood
-Trawick
-Townsley
-Topping
-Tobar
-Telford
-Sung
-Steverson
-Stagg
-Sitton
-Sill
-Sherrell
-Sergent
-Schoenfeld
-Sarabia
-Rutkowski
-Rubenstein
-Rigdon
-Prentiss
-Pomerleau
-Plumlee
-Phoenix
-Philbrick
-Peer
-Patty
-Patnode
-Oloughlin
-Obregon
-Nuss
-Napoleon
-Morell
-Moose
-Mikell
-Mele
-Mcinerney
-Mcguigan
-Mcbrayer
-Lore
-Lor
-Look
-Lollar
-Lakes
-Kuehl
-Kinzer
-Kamp
-Joplin
-Jacobi
-Howells
-Holstein
-Hedden
-Hassler
-Harty
-Halle
-Greig
-Granville
-Gouge
-Goodrum
-Gerhart
-Geier
-Geddes
-Gast
-Forehand
-Ferree
-Fendley
-Feltner
-Fang
-Esqueda
-Encarnacion
-Eichler
-Egger
-Edmundson
-Eatmon
-Dragon
-Doud
-Donohoe
-Donelson
-Dilorenzo
-Digiacomo
-Diggins
-Delozier
-Dejong
-Danford
-Crippen
-Coppage
-Cogswell
-Clardy
-Cioffi
-Cabe
-Brunette
-Bresnahan
-Bramble
-Blomquist
-Blackstone
-Biller
-Bevis
-Bevan
-Bethune
-Benbow
-Baty
-Basinger
-Balcom
-Andes
-Aman
-Aguero
-Adkisson
-Yandell
-Wilds
-Whisenhunt
-Weigand
-Weeden
-Voight
-Villar
-Trottier
-Tillett
-Suazo
-Setser
-Scurry
-Schuh
-Schreck
-Schauer
-Samora
-Roane
-Rinker
-Reimers
-Reason
-Ratchford
-Popovich
-Parkin
-Nichol
-Natal
-Melville
-Mcbryde
-Magdaleno
-Loehr
-Lockman
-Lingo
-Leduc
-Larocca
-Lao
-Lamere
-Laclair
-Krall
-Korte
-Koger
-Jumper
-Jalbert
-Hughs
-Higbee
-Henton
-Heaney
-Haith
-Gump
-Greeson
-Goodloe
-Gholston
-Gasper
-Gagliardi
-Fregoso
-Farthing
-Fabrizio
-Ensor
-Elswick
-Elgin
-Eklund
-Eaddy
-Drouin
-Dorton
-Dizon
-Derouen
-Delia
-Deherrera
-Davy
-Dark
-Dampier
-Cullum
-Culley
-Cowgill
-Cardoso
-Cardinale
-Brodsky
-Broadbent
-Brimmer
-Briceno
-Branscum
-Bolyard
-Boley
-Bennington
-Beadle
-Baur
-Ballentine
-Azure
-Aultman
-Augustus
-Asuncion
-Arciniega
-Aguila
-Aceves
-Yepez
-Yap
-Woodrum
-Wethington
-Weissman
-Veloz
-Trusty
-Troup
-Trammel
-Theodore
-Tarpley
-Stivers
-Steck
-Sprayberry
-Spraggins
-Spitler
-Spiers
-Sohn
-Seagraves
-Schiffman
-Rudnick
-Rizo
-Riccio
-Rennie
-Quinton
-Quackenbush
-Puma
-Plott
-Pearcy
-Parada
-Paiz
-Munford
-Moskowitz
-Mease
-Mcnary
-Mccusker
-Matt
-Lozoya
-Longmire
-Loesch
-Lasky
-Kuhlmann
-Krieg
-Koziol
-Kowalewski
-Konrad
-Kindle
-Jowers
-Jolin
-Jaco
-Hua
-Horgan
-Hine
-Hileman
-Hepner
-Heise
-Heady
-Hawkinson
-Hannigan
-Haberman
-Guilford
-Grimaldi
-Gilles
-Garton
-Gagliano
-Fruge
-Follett
-Fiscus
-Ferretti
-Ebner
-Easterday
-Eanes
-Dirks
-Dimarco
-Depalma
-Deforest
-Dance
-Cruce
-Craighead
-Christner
-Candler
-Cadwell
-Burchell
-Buettner
-Brinton
-Breed
-Brazier
-Brannen
-Brame
-Bova
-Bomar
-Blakeslee
-Belknap
-Bangs
-Balzer
-Athey
-Armes
-Alvis
-Alverson
-Alvardo
-Alter
-Zhao
-Yeung
-Yen
-Wheelock
-Westlund
-Wessels
-Volkman
-Threadgill
-Thelen
-Tandy
-Tague
-Ta
-Symons
-Swinford
-Sturtevant
-Straka
-Stier
-Stagner
-Segarra
-Seawright
-Sack
-Rutan
-Roux
-Ringler
-Riker
-Ramsdell
-Quattlebaum
-Purifoy
-Poulson
-Permenter
-Peloquin
-Pasley
-Pagel
-Osman
-Obannon
-Nygaard
-Nipper
-Newcomer
-Munos
-Motta
-Meadors
-Mcquiston
-Mcniel
-Mcmann
-Mccrae
-Mayne
-Matte
-Martine
-Lucy
-Legault
-Lechner
-Lack
-Kucera
-Krohn
-Kratzer
-Koopman
-Judson
-Jeske
-Horrocks
-Homes
-Hock
-Hibbler
-Hesson
-Hersh
-Harvin
-Halvorsen
-Griner
-Grindle
-Glen
-Gladstone
-Garofalo
-Frampton
-Forbis
-Fernando
-Eddington
-Diorio
-Dingus
-Dewar
-Desalvo
-Curcio
-Creasy
-Cortese
-Cordoba
-Connally
-Cluff
-Cascio
-Capuano
-Canaday
-Calabro
-Bussard
-Brayton
-Borja
-Bigley
-Arnone
-Arguelles
-Acuff
-Zamarripa
-Wooton
-Wolfgang
-Widner
-Wideman
-Threatt
-Thiele
-Templin
-Teeters
-Synder
-Swint
-Swick
-Sturges
-Stogner
-Stedman
-Spratt
-Six
-Siegfried
-Shetler
-Scull
-Savino
-Sather
-Rothwell
-Rook
-Rone
-Rolf
-Rhee
-Quevedo
-Privett
-Pouliot
-Poche
-Pickel
-Petrillo
-Pellegrini
-Peaslee
-Partlow
-Otey
-Nunnery
-Morelock
-Morello
-Meunier
-Messinger
-Mckie
-Mccubbin
-Mccarron
-Maria
-Lerch
-Lavine
-Laverty
-Lariviere
-Lamkin
-Kugler
-Krol
-Kissel
-Keeter
-Hummer
-Hubble
-Hickox
-Hetzel
-Hayner
-Hagy
-Hadlock
-Groh
-Gregorio
-Gottschalk
-Goodsell
-Gloria
-Gerry
-Gassaway
-Garrard
-Galligan
-Fye
-Firth
-Fenderson
-Feinstein
-Etienne
-Engleman
-Emrick
-Ellender
-Drews
-Doiron
-Degraw
-Deegan
-Dart
-Crissman
-Corr
-Cookson
-Coil
-Cleaves
-Charest
-Chapple
-Chaparro
-Castano
-Carpio
-Byer
-Bufford
-Bridgewater
-Bridgers
-Brandes
-Borrero
-Bonanno
-Aube
-Ancheta
-Abarca
-Abad
-Yung
-Yim
-Wooster
-Woodrow
-Wimbush
-Willhite
-Willams
-Wigley
-Weisberg
-Wardlaw
-Vigue
-Vanhook
-Unknow
-Torre
-Tasker
-Tarbox
-Strachan
-Standard
-Slover
-Shamblin
-Semple
-Schuyler
-Schrimsher
-Sayer
-Salzman
-Salomon
-Rubalcava
-Riles
-Rickey
-Reneau
-Reichel
-Rayfield
-Rabon
-Pyatt
-Prindle
-Poss
-Polito
-Plemmons
-Pesce
-Perrault
-Pereyra
-Ostrowski
-Nilsen
-Niemeyer
-Nick
-Munsey
-Mundell
-Moncada
-Miceli
-Meader
-Mcmasters
-Mckeehan
-Matsumoto
-Marron
-Marden
-Lizarraga
-Lingenfelter
-Lewallen
-Laurence
-Langan
-Lamanna
-Kovac
-Kinsler
-Kephart
-Keown
-Kass
-Kammerer
-Jeffreys
-Hysell
-Householder
-Hosmer
-Hardnett
-Hanner
-Guyette
-Greening
-Glazer
-Ginder
-Fromm
-Fortuna
-Fluellen
-Finkle
-Fey
-Fessler
-Essary
-Eisele
-Duren
-Dittmer
-Crochet
-Cosentino
-Cogan
-Coelho
-Cavin
-Carrizales
-Campuzano
-Brough
-Bow
-Bopp
-Bookman
-Bobb
-Blouin
-Beesley
-Battista
-Bascom
-Bakken
-Badgett
-Arneson
-Anselmo
-Albino
-Ahumada
-Agustin
-Woodyard
-Wolters
-Wireman
-Wilton
-Willison
-Warman
-Wan
-Waldrup
-Vowell
-Vantassel
-Vale
-Twombly
-Toomer
-Tennison
-Teets
-Tedeschi
-Swanner
-Swallow
-Stutz
-Stelly
-Sheehy
-Schermerhorn
-Scala
-Sandidge
-Salters
-Salo
-Saechao
-Roseboro
-Rolle
-Ressler
-Renz
-Renn
-Redford
-Raposa
-Rainbolt
-Pompey
-Pelfrey
-Orndorff
-Oney
-Nolin
-Nimmons
-Ney
-Nardone
-Myhre
-Morman
-Mines
-Menjivar
-Mcglone
-Mccammon
-Maxon
-Maris
-Marciano
-Manus
-Maiden
-Lowrance
-Lorenzen
-Lonergan
-Lollis
-Littles
-Lindahl
-Lansing
-Lamas
-Lach
-Kuster
-Krawczyk
-Knuth
-Knecht
-Kirkendall
-Keitt
-Keever
-Kantor
-Jarboe
-Hoye
-Houchens
-Holter
-Holsinger
-Hickok
-Herb
-Helwig
-Helgeson
-Heater
-Hassett
-Harner
-Hamman
-Hames
-Hadfield
-Goree
-Goldfarb
-Gaughan
-Gaudreau
-Gantz
-Gallion
-Frady
-Foti
-Flesher
-Ferrin
-Faught
-Engram
-Elbert
-Donegan
-Desouza
-Degroot
-Cutright
-Crowl
-Criner
-Coke
-Coan
-Clinkscales
-Chewning
-Chavira
-Catchings
-Carlock
-Bye
-Bulger
-Buenrostro
-Bramblett
-Brack
-Boulware
-Bordeaux
-Bookout
-Bitner
-Birt
-Baranowski
-Baisden
-Augustin
-Allmon
-Alberto
-Acklin
-Yoakum
-Wilbourn
-Whisler
-Weinberger
-Washer
-Vasques
-Vanzandt
-Vanatta
-Troxler
-Tomes
-Tindle
-Tims
-Throckmorton
-Thach
-Stpeter
-Stlaurent
-Stenson
-Spry
-Spitz
-Songer
-Snavely
-Sly
-Sleeper
-Shroyer
-Shortridge
-Shenk
-Sevier
-Seabrook
-Scrivner
-Saltzman
-Rosenberry
-Rockwood
-Robeson
-Roan
-Reiser
-Redwine
-Ramires
-Raber
-Profit
-Posner
-Popham
-Pipes
-Piotrowski
-Pinard
-Peterkin
-Pelham
-Peiffer
-Peay
-Peavey
-Nadler
-Musso
-Milo
-Millett
-Mestas
-Mcgowen
-Marques
-Marasco
-Manriquez
-Manos
-Mair
-Lipps
-Lesser
-Leiker
-Leeds
-Krumm
-Knorr
-Kinslow
-Kessel
-Kendricks
-Kelm
-Ito
-Irick
-Ickes
-Hurlburt
-Horta
-Hoekstra
-Heuer
-Helmuth
-Heatherly
-Hampson
-Hagar
-Haga
-Greenlaw
-Grau
-Godbey
-Gingras
-Gillies
-Gibb
-Gayden
-Gauvin
-Garrow
-Fontanez
-Florio
-Fleischman
-Finke
-Fasano
-Fan
-Faith
-Ezzell
-Ewers
-Eveland
-Eckenrode
-Duclos
-Drumm
-Dimmick
-Delancey
-Defazio
-Deacon
-Dashiell
-Damian
-Cusack
-Crowther
-Crigger
-Cray
-Coolidge
-Coldiron
-Cleland
-Chalfant
-Cassel
-Cape
-Camire
-Cabrales
-Broomfield
-Brittingham
-Brisson
-Brickey
-Braziel
-Brazell
-Bragdon
-Boulanger
-Bos
-Boman
-Bohannan
-Beem
-Barto
-Barre
-Barley
-Baptist
-Azar
-Ashbaugh
-Armistead
-Almazan
-Adamski
-Zendejas
-Winburn
-Willaims
-Wilhoit
-Westberry
-Wentzel
-Wendling
-Wager
-Visser
-Vanscoy
-Vankirk
-Vallee
-Tweedy
-Thornberry
-Sweeny
-Stalker
-Spradling
-Spano
-Smelser
-Shim
-Sechrist
-Schall
-Scaife
-Rugg
-Ruben
-Rothrock
-Roesler
-Riehl
-Ridings
-Render
-Ransdell
-Radke
-Pinero
-Petree
-Pendergast
-Peluso
-Pecoraro
-Pascoe
-Panek
-Oshiro
-Noon
-Navarrette
-Murguia
-Moores
-Moberg
-Mike
-Michaelis
-Mcwhirter
-Mcsweeney
-Mcquade
-Mccay
-Mauk
-Mariani
-Marceau
-Mandeville
-Maeda
-Lunde
-Ludlow
-Loeb
-Lindo
-Linderman
-Leveille
-Leith
-Larock
-Lambrecht
-Kulp
-Kinsley
-Kimberlin
-Kesterson
-Jacinto
-Ice
-Hui
-Hoyos
-Helfrich
-Hanke
-Hail
-Guillermo
-Grisby
-Goyette
-Gouveia
-Glazier
-Gile
-Gerena
-Gelinas
-Gasaway
-Garden
-Funches
-Fujimoto
-Flynt
-Fenske
-Fellers
-Fehr
-Eslinger
-Escalera
-Enciso
-Duley
-Dittman
-Dineen
-Diller
-Devault
-Dao
-Collings
-Clymer
-Clowers
-Chavers
-Charland
-Castorena
-Castello
-Camargo
-Bunce
-Bullen
-Boyes
-Borchers
-Borchardt
-Birnbaum
-Birdsall
-Billman
-Benites
-Bankhead
-Ange
-Ammerman
-Adkison
-Yuan
-Winegar
-Wickman
-Wear
-Warr
-Warnke
-Villeneuve
-Veasey
-Vassallo
-Vannatta
-Vadnais
-Twilley
-Truelove
-Towery
-Tomblin
-Tippett
-Theiss
-Talkington
-Talamantes
-Swart
-Swanger
-Streit
-Straw
-Stines
-Stabler
-Spurling
-Sobel
-Sine
-Simmers
-Shippy
-Shiflett
-Shearin
-Sauter
-Sanderlin
-Rusch
-Runkle
-Ruckman
-Rorie
-Roesch
-Roberto
-Richert
-Rehm
-Randel
-Ragin
-Quesenberry
-Puentes
-Plyler
-Plotkin
-Paugh
-Oshaughnessy
-Ohalloran
-Norsworthy
-Niemann
-Nader
-Moorefield
-Mooneyham
-Modica
-Miyamoto
-Mickel
-Mebane
-Mckinnie
-Mazurek
-Mancilla
-Lukas
-Lovins
-Loughlin
-Lotz
-Lindsley
-Liddle
-Levan
-Lederman
-Leclaire
-Lasseter
-Lapoint
-Lamoreaux
-Lafollette
-Kubiak
-Kirtley
-Keffer
-Kaczmarek
-Jennette
-Housman
-Honey
-Hiers
-Hibbert
-Herrod
-Hegarty
-Hathorn
-Harsh
-Greenhaw
-Grafton
-Govea
-Gardener
-Futch
-Furst
-Frisbee
-Fred
-Franko
-Forcier
-Foran
-Flickinger
-Fairfield
-Eure
-Emrich
-Embrey
-Edgington
-Ecklund
-Eckard
-Durante
-Deyo
-Delvecchio
-Deeds
-Dade
-Currey
-Cuff
-Creswell
-Cottrill
-Casavant
-Cartier
-Cargile
-Capel
-Cammack
-Calfee
-Buzzard
-Burse
-Burruss
-Brust
-Brousseau
-Bridwell
-Braaten
-Borkholder
-Bloomquist
-Bjork
-Bartelt
-Arp
-Amburgey
-Yeary
-Yao
-Whitefield
-Vinyard
-Vicente
-Vanvalkenburg
-Twitchell
-Timmins
-Tester
-Tapper
-Stringham
-Starcher
-Spotts
-Slaugh
-Simonsen
-Sheffer
-Sequeira
-Rosati
-Rode
-Rhymes
-Reza
-Record
-Quint
-Pollak
-Peirce
-Patillo
-Parkerson
-Paiva
-Nilson
-Nice
-Nevin
-Narcisse
-Nair
-Mitton
-Merriam
-Merced
-Meiners
-Mckain
-Mcelveen
-Mcbeth
-Marsden
-Marez
-Manke
-Mahurin
-Mabrey
-Luper
-Krull
-Kees
-Iles
-Hunsicker
-Hornbuckle
-Holtzclaw
-Hirt
-Hinnant
-Heston
-Hering
-Hemenway
-Hegwood
-Hearns
-Halterman
-Halls
-Guiterrez
-Grote
-Granillo
-Grainger
-Glasco
-Gilder
-Garren
-Garlock
-Garey
-Fu
-Fryar
-Fredricks
-Fraizer
-Foxx
-Foshee
-Ferrel
-Felty
-Feathers
-Everitt
-Evens
-Esser
-Elkin
-Eberhart
-Durso
-Duguay
-Driskill
-Doster
-Dewall
-Deveau
-Demps
-Demaio
-Delreal
-Deleo
-Delay
-Deem
-Darrah
-Cumberbatch
-Culberson
-Cranmer
-Cordle
-Colgan
-Chesley
-Cavallo
-Castellon
-Castelli
-Carreras
-Carnell
-Carmon
-Carmen
-Carlucci
-Bottom
-Bontrager
-Blumberg
-Blasingame
-Becton
-Ayon
-Artrip
-Arline
-Andujar
-Alkire
-Alder
-Agan
-Zukowski
-Zuckerman
-Zehr
-Wroblewski
-Wrigley
-Woodside
-Wigginton
-Westman
-Westgate
-Werts
-Washam
-Wardlow
-Walser
-Waiters
-Teller
-Tadlock
-Stuck
-Stringfield
-Stimpson
-Stickley
-Starbuck
-Standish
-Spurlin
-Spindler
-Speller
-Spaeth
-Sotomayor
-Sok
-Sluder
-Shryock
-Shepardson
-Shatley
-Scannell
-Santistevan
-Rosner
-Rolland
-Rhode
-Resto
-Reinhard
-Rathburn
-Prisco
-Poulsen
-Pinney
-Phares
-Pennock
-Pastrana
-Oviedo
-Ostler
-Noto
-Nauman
-Mulford
-Moise
-Moberly
-Mirabal
-Ming
-Metoyer
-Metheny
-Mentzer
-Meldrum
-Mcinturff
-Mcelyea
-Mcdougle
-Massaro
-Lumpkins
-Loveday
-Lofgren
-Loe
-Lirette
-Lesperance
-Lefkowitz
-Ledger
-Lauzon
-Lain
-Lachapelle
-Kurz
-Klassen
-Keough
-Kempton
-Kaelin
-Jeffords
-Im
-Huot
-Hsieh
-Hoyer
-Horwitz
-Hopp
-Hoeft
-Hennig
-Haskin
-Grill
-Gourdine
-Golightly
-Girouard
-Fulgham
-Fritsch
-Freer
-Frasher
-Foulk
-Firestone
-Fiorentino
-Fedor
-Feather
-Ensley
-Englehart
-Eells
-Ebel
-Dunphy
-Donahoe
-Dimas
-Dileo
-Dibenedetto
-Dabrowski
-Crick
-Coonrod
-Conder
-Coddington
-Chunn
-Choy
-Chaput
-Cerna
-Carreiro
-Calahan
-Braggs
-Bourdon
-Boner
-Bollman
-Bittle
-Ben
-Behm
-Bauder
-Batt
-Barreras
-Aubuchon
-Anzalone
-Adamo
-Zhou
-Zerbe
-Zachery
-Witty
-Wirt
-Willcox
-Westberg
-Weikel
-Waymire
-Vroman
-Vinci
-Vallejos
-Tutor
-Truesdell
-Troutt
-Trotta
-Tollison
-Toles
-Tichenor
-Tai
-Symonds
-Surles
-Sunday
-Strayer
-Stgeorge
-Sroka
-Sorrentino
-Solares
-Snelson
-Silvestri
-Sikorski
-Shawver
-Schumaker
-Schorr
-Schooley
-Scates
-Satterlee
-Satchell
-Sacks
-Rymer
-Roselli
-Robitaille
-Riegel
-Richer
-Regis
-Reames
-Provenzano
-Proper
-Priestley
-Plaisance
-Pettey
-Palomares
-Oman
-Nowakowski
-Nace
-Monette
-Minyard
-Mclamb
-Mchone
-Mccarroll
-Masson
-Marco
-Magoon
-Maddy
-Lundin
-Loza
-Licata
-Lesley
-Leonhardt
-Lema
-Landwehr
-Kircher
-Kinch
-Karpinski
-Johannsen
-Hussain
-Houghtaling
-Hoskinson
-Hollaway
-Holeman
-Hobgood
-Hilt
-Hiebert
-Gros
-Gram
-Goggin
-Gentle
-Geissler
-Gadbois
-Gabaldon
-Fleshman
-Flannigan
-Files
-Fairman
-Epp
-Eilers
-Dycus
-Dunmire
-Duffield
-Dowler
-Ditto
-Deloatch
-Dehaan
-Deemer
-Corner
-Clayborn
-Christofferso
-Chilson
-Chesney
-Chatfield
-Charlie
-Caster
-Carron
-Canale
-Camden
-Buff
-Brigman
-Branstetter
-Bosse
-Borton
-Bonar
-Blau
-Biron
-Beagle
-Barroso
-Arvin
-Arispe
-Zacharias
-Zabel
-Yaeger
-Works
-Woolford
-Whetzel
-Weakley
-Veatch
-Vandeusen
-Tufts
-Troxel
-Troche
-Traver
-Townsel
-Tosh
-Talarico
-Swilley
-Sterrett
-Stenger
-Springfield
-Speakman
-Sowards
-Sours
-Souders
-Souder
-Soles
-Sobers
-Snoddy
-Smither
-Sias
-Shute
-Shoaf
-Shahan
-Schuetz
-Scaggs
-Santini
-Rosson
-Rolen
-Robidoux
-Rentas
-Recio
-Pixley
-Pawlowski
-Pawlak
-Paull
-Pascal
-Overbey
-Orear
-Oliveri
-Oldenburg
-Nutting
-Naugle
-Mote
-Mossman
-Moor
-Misner
-Milazzo
-Michelson
-Mei
-Mcentee
-Mccullar
-Mccree
-Mcaleer
-Mazzone
-Maxim
-Marshal
-Mandell
-Manahan
-Malott
-Maisonet
-Mailloux
-Lumley
-Lowrie
-Louviere
-Lipinski
-Lindemann
-Leppert
-Leopold
-Leasure
-Leaf
-Labarge
-Kubik
-Knisely
-Knepp
-Kenworthy
-Kennelly
-Kelch
-Karg
-Kanter
-Ignacio
-Hyer
-Houchin
-Hosley
-Hosler
-Hollon
-Holleman
-Heitman
-Hebb
-Haggins
-Gwaltney
-Guin
-Greenman
-Goulding
-Gorden
-Goodyear
-Geraci
-Georges
-Gathers
-Frison
-Feagin
-Falconer
-Espada
-Erving
-Erikson
-Eisenhauer
-Eder
-Ebeling
-Durgin
-Drown
-Dowdle
-Dinwiddie
-Delcastillo
-Dedrick
-Crimmins
-Covell
-Cournoyer
-Coria
-Cohan
-Cataldo
-Carpentier
-Canas
-Campa
-Brode
-Brashears
-Blaser
-Bicknell
-Berk
-Bednar
-Barwick
-Ascencio
-Althoff
-Almodovar
-Alamo
-Zirkle
-Zabala
-Xu
-Wolverton
-Winebrenner
-Wetherell
-Westlake
-Wegener
-Weddington
-Vong
-Tuten
-Trosclair
-Trim
-Tressler
-Theroux
-Teske
-Sword
-Swinehart
-Swensen
-Sundquist
-Southall
-Socha
-Sizer
-Silverberg
-Shortt
-Shimizu
-Sherrard
-Shen
-Shaeffer
-Seth
-Scheid
-Scheetz
-Saravia
-Sanner
-Rubinstein
-Rozell
-Romer
-Ringo
-Rheaume
-Reisinger
-Raven
-Randles
-Pullum
-Petrella
-Payan
-Papp
-Pablo
-Nordin
-Norcross
-Nicoletti
-Nicholes
-Newbold
-Nakagawa
-Mraz
-Monteith
-Milstead
-Milliner
-Mellen
-Mccardle
-Matthias
-Marcy
-Luft
-Loo
-Locker
-Liptak
-Lipp
-Leitch
-Latimore
-Larrison
-Landau
-Laborde
-Koval
-Izquierdo
-Hymel
-Hoskin
-Holte
-Hoefer
-Hayworth
-Hausman
-Harrill
-Harrel
-Hardt
-Gully
-Groover
-Grinnell
-Greenspan
-Graver
-Grandberry
-Gorrell
-Goldenberg
-Goguen
-Gilleland
-Garr
-Fuson
-Foye
-Felt
-Feldmann
-Everly
-Dyess
-Dyal
-Dunnigan
-Downie
-Dolby
-Divine
-Deatherage
-Dates
-Danna
-Cosey
-Corrado
-Cheever
-Celaya
-Caver
-Cashion
-Caplinger
-Cansler
-Byrge
-Bruder
-Brew
-Breuer
-Breslin
-Brazelton
-Botkin
-Bonneau
-Bones
-Bondurant
-Bohanan
-Bogue
-Boes
-Bodner
-Boatner
-Blatt
-Bickley
-Belliveau
-Beiler
-Beier
-Beckstead
-Bart
-Bang
-Bachmann
-Atkin
-Aron
-Andreas
-Altizer
-Alloway
-Allaire
-Albro
-Abron
-Zellmer
-Yetter
-Yelverton
-Wiltshire
-Wiens
-Whidden
-Wait
-Viramontes
-Vanwormer
-Topper
-Tarantino
-Tanksley
-Sumlin
-Strauch
-Strang
-Stice
-Spahn
-Sosebee
-Sigala
-Shrout
-Seamon
-Schrum
-Schneck
-Schantz
-Said
-Ruddy
-Romig
-Roehl
-Renninger
-Reding
-Pyne
-Polak
-Pohlman
-Pasillas
-Oldfield
-Oldaker
-Ohanlon
-Ogilvie
-Norberg
-Nolette
-Nies
-Neufeld
-Nellis
-Mummert
-Mulvihill
-Mullaney
-Monteleone
-Mendonca
-Meisner
-Mcmullan
-Mccluney
-Mattis
-Massengill
-Manfredi
-Luedtke
-Lounsbury
-Lora
-Liberatore
-Leek
-Lease
-Lazaro
-Lamphere
-Laforge
-Kuo
-Koo
-Jourdan
-Ismail
-Iorio
-Iniguez
-Ikeda
-Hubler
-Hodgdon
-Hocking
-Heacock
-Haslam
-Haralson
-Hanshaw
-Hannum
-Hallam
-Haden
-Garnes
-Garces
-Gammage
-Gambino
-Finkel
-Faucett
-Fahy
-Esteban
-Ehrhardt
-Eggen
-Dusek
-Durrant
-Dubay
-Dones
-Dey
-Depasquale
-Delucia
-Degraff
-Deer
-Decamp
-Davalos
-Darwin
-Dan
-Cullins
-Conard
-Clouser
-Clontz
-Cifuentes
-Chico
-Chappel
-Chaffins
-Celis
-Carwile
-Byram
-Bruggeman
-Brick
-Bressler
-Brathwaite
-Brasfield
-Bradburn
-Boose
-Boon
-Bodie
-Blosser
-Blas
-Bise
-Bertsch
-Bernardi
-Bernabe
-Bengtson
-Barrette
-Astorga
-Armand
-Antone
-Alday
-Albee
-Abrahamson
-Yarnell
-Wiltse
-Wile
-Wiebe
-Waguespack
-Vasser
-Upham
-Tyre
-Turek
-Tune
-Traxler
-Torain
-Tomaszewski
-Tinnin
-Tiner
-Tindell
-Teed
-Styron
-Stahlman
-Staab
-Spoon
-Spells
-Skiba
-Shih
-Sheperd
-Seidl
-Secor
-Schutte
-Sanfilippo
-Ruder
-Rondon
-Reina
-Rearick
-Rank
-Procter
-Prochaska
-Pettengill
-Pauly
-Neilsen
-Nally
-Mutter
-Mullenax
-Morano
-Meads
-Mcnaughton
-Mcmurtry
-Mcmath
-Mckinsey
-Matthes
-Massenburg
-Marlar
-Margolis
-Marcos
-Malin
-Magallon
-Mackin
-Lovette
-Loughran
-Loring
-Longstreet
-Loiselle
-Lenihan
-Laub
-Kunze
-Kull
-Koepke
-Knights
-Kerwin
-Kalinowski
-Kagan
-Innis
-Innes
-Husband
-Holtzman
-Heinemann
-Harshman
-Haider
-Haack
-Guss
-Grondin
-Grissett
-Greenawalt
-Gravel
-Goudy
-Goodlett
-Goldston
-Gokey
-Goin
-Gardea
-Galaviz
-Gafford
-Gabrielson
-Furlow
-Fritch
-Fordyce
-Folger
-Elizalde
-Ehlert
-Eckhoff
-Eccleston
-Ealey
-Dubin
-Dolphin
-Dieter
-Diemer
-Deschamps
-Delapena
-Decicco
-Debolt
-Daum
-Cullinan
-Crittendon
-Crase
-Cossey
-Coppock
-Coots
-Colyer
-Columbus
-Cluck
-Chamberland
-Cane
-Burkhead
-Bumpus
-Buchan
-Borman
-Bork
-Boe
-Birkholz
-Berardi
-Benda
-Behnke
-Barter
-Auer
-Amezquita
-Wotring
-Wirtz
-Wingert
-Wiesner
-Whitesides
-Weyant
-Wainscott
-Vivian
-Venezia
-Varnell
-Tussey
-Trainer
-Toll
-Thurlow
-Tack
-Tabares
-Stiver
-Stell
-Starke
-Stanhope
-Stanek
-Sisler
-Sinnott
-Sidney
-Siciliano
-Shehan
-Selph
-Seager
-Scurlock
-Scranton
-Santucci
-Santangelo
-Saltsman
-Ruel
-Ropp
-Rolling
-Rogge
-Rettig
-Renwick
-Reidy
-Reider
-Redfield
-Quam
-Premo
-Port
-Pier
-Peet
-Parente
-Paolucci
-Pan
-Palmquist
-Orme
-Ohler
-Ogg
-Netherton
-Mutchler
-Morita
-Mistretta
-Minnis
-Middendorf
-Menzel
-Mendosa
-Mendelson
-Meaux
-Mcspadden
-Mcquaid
-Mcnatt
-Manigault
-Maney
-Mager
-Lung
-Lukes
-Lopresti
-Liriano
-Lipton
-Letson
-Lechuga
-Lazenby
-Lauria
-Larimore
-Kwok
-Kwak
-Krupp
-Krupa
-Krum
-Kopec
-Kinchen
-Kifer
-Kerney
-Kerner
-Kennison
-Kegley
-Kays
-Karcher
-Justis
-Johson
-Jellison
-Janke
-Isabell
-Huskins
-Holzman
-Hollie
-Hinojos
-Highland
-Hefley
-He
-Hatmaker
-Harte
-Halloway
-Hallenbeck
-Goodwyn
-Glaspie
-Gillian
-Geise
-Fullwood
-Fryman
-Frew
-Frakes
-Fraire
-Farrer
-Enlow
-Engen
-Ellzey
-Eckles
-Earles
-Ealy
-Dunkley
-Drinkard
-Dreiling
-Draeger
-Dinardo
-Dills
-Desroches
-Desantiago
-Current
-Curlee
-Crumbley
-Critchlow
-Coury
-Courtright
-Coffield
-Cleek
-Christen
-Charpentier
-Cardone
-Caples
-Cantin
-Buntin
-Bugbee
-Brinkerhoff
-Brackin
-Bourland
-Bohl
-Bogdan
-Blassingame
-Beacham
-Banning
-Auguste
-Andreasen
-Amann
-Almon
-Alejo
-Adelman
-Abston
-Zeno
-Yerger
-Wymer
-Woodberry
-Windley
-Whiteaker
-Westfield
-Weibel
-Wanner
-Waldrep
-Vital
-Villani
-Vanarsdale
-Utterback
-Updike
-Triggs
-Topete
-Tolar
-Tigner
-Thoms
-Tauber
-Tarvin
-Tally
-Swiney
-Sweatman
-Studebaker
-Streets
-Stennett
-States
-Starrett
-Stannard
-Stalvey
-Sonnenberg
-Smithey
-Sieber
-Sickles
-Shinault
-Segars
-Sanger
-Salmeron
-Rothe
-Rizzi
-Rine
-Ricard
-Restrepo
-Ralls
-Ragusa
-Quiroga
-Ping
-Phung
-Pero
-Pegg
-Pavlik
-Papenfuss
-Oropeza
-Omar
-Okane
-Neer
-Nee
-Nathaniel
-Mudge
-Mozingo
-Molinaro
-Mikel
-Mcvicker
-Mcgarvey
-Mcfalls
-Mccraney
-Matus
-Magers
-Llanos
-Livermore
-Liss
-Linehan
-Leto
-Leitner
-Laymon
-Lawing
-Lawerence
-Lacourse
-Kwong
-Kollar
-Kneeland
-Keo
-Kennett
-Kellett
-Kangas
-Janzen
-Hutter
-Huse
-Huling
-Hoss
-Hohn
-Hofmeister
-Hewes
-Hern
-Harjo
-Habib
-Gust
-Guice
-Grullon
-Greggs
-Grayer
-Granier
-Grable
-Gowdy
-Giannini
-Getchell
-Gartman
-Garnica
-Ganey
-Gallimore
-Fray
-Fetters
-Fergerson
-Farlow
-Fagundes
-Exley
-Esteves
-Enders
-Edenfield
-Easterwood
-Drakeford
-Dipasquale
-Desousa
-Deshields
-Deeter
-Dedmon
-Debord
-Daughtery
-Cutts
-Courtemanche
-Coursey
-Copple
-Coomes
-Collis
-Coll
-Cogburn
-Clopton
-Choquette
-Chaidez
-Castrejon
-Calhoon
-Burbach
-Bulloch
-Buchman
-Bruhn
-Bohon
-Blough
-Bien
-Belmont
-Baynes
-Barstow
-Zeman
-Zackery
-Yardley
-Yamashita
-Wulff
-Wilken
-Wiliams
-Wickersham
-Wible
-Whipkey
-Wedgeworth
-Walmsley
-Walkup
-Vreeland
-Verrill
-Valera
-Umana
-Traub
-Timothy
-Swingle
-Swing
-Summey
-Stroupe
-Stockstill
-Steffey
-Stefanski
-Statler
-Stapp
-Speights
-Sons
-Solari
-Soderberg
-Slick
-Shunk
-Shorey
-Shewmaker
-Sheilds
-Schiffer
-Schank
-Schaff
-Sagers
-Rodger
-Rochon
-Riser
-Rickett
-Reale
-Raglin
-Poon
-Polly
-Polen
-Plata
-Pitcock
-Percival
-Palen
-Pahl
-Orona
-Oberle
-Nocera
-Navas
-Nault
-Mullings
-Mouser
-Moos
-Montejano
-Monreal
-Minick
-Middlebrook
-Meece
-Mcmillion
-Mccullen
-Mauck
-Marshburn
-Maillet
-Mahaney
-Magner
-Maclin
-Lucey
-Litteral
-Lippincott
-Leite
-Leis
-Leaks
-Laurie
-Lamarre
-Kost
-Jurgens
-Jesus
-Jerkins
-Jager
-Hurwitz
-Hughley
-Hotaling
-Horstman
-Hohman
-Hocker
-Hively
-Hipps
-Hile
-Hessler
-Hermanson
-Hepworth
-Henn
-Helland
-Hedlund
-Harkless
-Haigler
-Gutierez
-Gum
-Grindstaff
-Glantz
-Giardina
-Gerken
-Gadsden
-Freda
-Finnerty
-Feld
-Farnum
-Encinas
-Elton
-Eager
-Drakes
-Dennie
-Cutlip
-Curtsinger
-Couto
-Cortinas
-Corby
-Choice
-Chiasson
-Carle
-Carballo
-Brindle
-Borum
-Bober
-Blagg
-Birk
-Berthiaume
-Beahm
-Batres
-Basnight
-Barbara
-Backes
-Axtell
-Aust
-Au
-Atterberry
-Alvares
-Alt
-Alegria
-Abe
-Yow
-Yip
-Woodell
-Wojciechowski
-Winfree
-Winbush
-Wiest
-Wesner
-Wax
-Wamsley
-Wakeman
-Verner
-Truex
-Trafton
-Toman
-Thorsen
-Thor
-Theus
-Tellier
-Tallant
-Szeto
-Strope
-Stills
-Stage
-Sorg
-Simkins
-Shuey
-Shaul
-Servin
-Serio
-Serafin
-Senior
-Sebring
-Salguero
-Saba
-Ryerson
-Rudder
-Ruark
-Rother
-Rohrbaugh
-Rohrbach
-Rohan
-Rogerson
-Risher
-Rigg
-Reeser
-Pryce
-Prokop
-Prins
-Priebe
-Prejean
-Pinheiro
-Petrone
-Petri
-Penson
-Pearlman
-Parikh
-Pal
-Pair
-Natoli
-Murakami
-Mullikin
-Mullane
-Motes
-Morningstar
-Monks
-Mcveigh
-Mcgrady
-Mcgaughey
-Mccurley
-Masi
-Marchan
-Manske
-Maine
-Maez
-Lusby
-Linde
-Lile
-Likens
-Licon
-Leroux
-Lemaire
-Legette
-Lax
-Laskey
-Laprade
-Laplant
-Lady
-Kolar
-Kittredge
-Kinley
-Kerber
-Kanagy
-Johannes
-Jetton
-Jayne
-January
-Janik
-Ippolito
-Inouye
-Hunsinger
-Howley
-Howery
-Horrell
-Hoosier
-Holthaus
-Hiner
-Hilson
-Hilderbrand
-Hasan
-Hartzler
-Harnish
-Harada
-Hansford
-Halligan
-Hagedorn
-Gwynn
-Gudino
-Greenstein
-Greear
-Gracey
-Goudeau
-Gose
-Goodner
-Ginsburg
-Gerth
-Gerner
-Fyfe
-Fujii
-Frier
-Frenette
-Folmar
-Fleisher
-Fleischmann
-Fetzer
-Fern
-Eisenman
-Earhart
-Dupuy
-Dunkelberger
-Drummer
-Drexler
-Dillinger
-Dilbeck
-Diana
-Dewald
-Demby
-Deford
-Daniell
-Dake
-Craine
-Como
-Clever
-Chesnut
-Casady
-Carstens
-Carrick
-Carino
-Carignan
-Canchola
-Cale
-Bushong
-Burman
-Buono
-Brownlow
-Broach
-Britten
-Brickhouse
-Boyden
-Boulton
-Borne
-Borland
-Bohrer
-Blubaugh
-Bever
-Berggren
-Benevides
-Arocho
-Arends
-Amezcua
-Almendarez
-Zalewski
-Witzel
-Winkfield
-Wilhoite
-Vara
-Vangundy
-Vanfleet
-Vanetten
-Vandergriff
-Urbanski
-Tyrell
-Troiano
-Tickle
-Thibodaux
-Straus
-Stoneking
-Stjean
-Stillings
-Stiff
-Stange
-Square
-Speicher
-Speegle
-Sowa
-Smeltzer
-Slawson
-Simmonds
-Shuttleworth
-Serpa
-Senger
-Seidman
-Schweiger
-Schloss
-Schimmel
-Schechter
-Sayler
-Sabb
-Sabatini
-Ronan
-Rodiguez
-Riggleman
-Richins
-Reep
-Reamer
-Prunty
-Porath
-Plunk
-Piland
-Philbrook
-Pettitt
-Perna
-Peralez
-Pascale
-Padula
-Oboyle
-Nivens
-Nickols
-Murph
-Mundt
-Munden
-Montijo
-Mcmanis
-Mcgrane
-Mccrimmon
-Manzi
-Mangold
-Malick
-Mahar
-Maddock
-Lust
-Losey
-Loop
-Litten
-Liner
-Leff
-Leedy
-Leavell
-Ladue
-Krahn
-Kluge
-Junker
-Iversen
-Imler
-Hurtt
-Huizar
-Hubbert
-Howington
-Hollomon
-Holdren
-Hoisington
-Hise
-Heiden
-Hauge
-Hartigan
-Gutirrez
-Griffie
-Greenhill
-Gratton
-Granata
-Gottfried
-Gertz
-Gautreaux
-Furry
-Furey
-Funderburg
-Flippen
-Fitzgibbon
-Fergus
-Felice
-Eye
-Dyar
-Drucker
-Donoghue
-Dildy
-Devers
-Detweiler
-Despres
-Denby
-Degeorge
-Cueto
-Cranston
-Courville
-Clukey
-Cirillo
-Chon
-Chivers
-Caudillo
-Catt
-Butera
-Bulluck
-Buckmaster
-Braunstein
-Bracamonte
-Bourdeau
-Border
-Bonnette
-Bobadilla
-Boaz
-Blackledge
-Beshears
-Bernhard
-Bergeson
-Baver
-Barthel
-Balsamo
-Bak
-Aziz
-Awad
-Authement
-Altom
-Altieri
-Abels
-Zigler
-Zhu
-Younker
-Yeomans
-Yearwood
-Wurster
-Winget
-Whitsett
-Wechsler
-Weatherwax
-Wathen
-Warriner
-Wanamaker
-Walraven
-Viens
-Vandemark
-Vancamp
-Uchida
-Triana
-Tinoco
-Terpstra
-Tellis
-Tarin
-Taranto
-Takacs
-Studdard
-Struthers
-Strout
-Stiller
-Spataro
-Soderquist
-Sliger
-Silberman
-Shurtleff
-Sheetz
-Schillinger
-Ritch
-Reif
-Raybon
-Ratzlaff
-Radley
-Putt
-Putney
-Prime
-Press
-Pinette
-Piner
-Petrin
-Parise
-Osbourne
-Nyman
-Northington
-Noblitt
-Nishimura
-Nell
-Neher
-Nalls
-Naccarato
-Mucha
-Mounce
-Miron
-Millis
-Meaney
-Mcnichols
-Mckinnis
-Mcjunkin
-Mcduffy
-Max
-Marcello
-Manrique
-Mannion
-Mangual
-Malveaux
-Mains
-Lumsden
-Lucien
-Lohmann
-Lipe
-Lightsey
-Lemasters
-Leist
-Laxton
-Laverriere
-Latorre
-Lamons
-Kral
-Kopf
-Knauer
-Kitt
-Kaul
-Karas
-Kamps
-Jusino
-Janis
-Islam
-Hullinger
-Huges
-Hornung
-Hiser
-Hempel
-Helsel
-Hassinger
-Hargraves
-Hammes
-Hallberg
-Gutman
-Gumbs
-Gruver
-Graddy
-Gonsales
-Goncalves
-Glennon
-Gilford
-Geno
-Freshour
-Flippo
-Fifer
-Few
-Fermin
-Fason
-Farrish
-Fallin
-Ewert
-Estepp
-Escudero
-Ensminger
-Emmanuel
-Emberton
-Elms
-Ellerbe
-Eide
-Dysart
-Dougan
-Dierking
-Dicus
-Detrick
-Deroche
-Depue
-Demartino
-Delosreyes
-Dalke
-Culbreath
-Crownover
-Crisler
-Crass
-Corsi
-Chagnon
-Centers
-Cavanagh
-Casson
-Carollo
-Cadwallader
-Burnley
-Burciaga
-Burchard
-Broadhead
-Boris
-Booze
-Bolte
-Body
-Berens
-Bellman
-Bellard
-Baril
-Arden
-Antonucci
-Amado
-Allie
-Wolfgram
-Winsor
-Wimbish
-Wilbert
-Wier
-Wallach
-Viveros
-Vento
-Varley
-Vanslyke
-Vangorder
-Touchstone
-Tomko
-Tiemann
-Throop
-Tamura
-Talmadge
-Swayze
-Sturdevant
-Strauser
-Stolz
-Stenberg
-Stayton
-Spohn
-Spillers
-Spillane
-Sluss
-Sloane
-Slavens
-Simonetti
-Shofner
-Shead
-Senecal
-Seales
-Schueler
-Schley
-Schacht
-Sauve
-Sarno
-Salsbury
-Rothschild
-Rosier
-Rines
-Reveles
-Rein
-Redus
-Redfern
-Reck
-Ranney
-Raggs
-Prout
-Prill
-Preble
-Prager
-Plemons
-Pippen
-Pilon
-Piccirillo
-Pewitt
-Pesina
-Pecora
-Otani
-Orsini
-Ollie
-Oestreich
-Odea
-Ocallaghan
-Northup
-Niehaus
-Newberg
-Nasser
-Narron
-Monarrez
-Mishler
-Mcsherry
-Mcelfresh
-Mayon
-Mauer
-Mattice
-Mash
-Marrone
-Marmolejo
-Marini
-Marie
-Mara
-Malm
-Machen
-Lunceford
-Loewen
-Liverman
-Litwin
-Linscott
-Levins
-Lenox
-Legaspi
-Leeman
-Leavy
-Lannon
-Lamson
-Lambdin
-Labarre
-Knouse
-Klemm
-Kleinschmidt
-Kirklin
-Keels
-Juliano
-Howser
-Hott
-Hosier
-Hosea
-Hopwood
-Holyfield
-Hodnett
-Hirsh
-Heimann
-Height
-Heckel
-Harger
-Hamil
-Hajek
-Gurganus
-Gunning
-Grange
-Gonzalas
-Goggins
-Gerow
-Gaydos
-Garduno
-Ganley
-Galey
-Farner
-Ester
-Engles
-Emond
-Emert
-Ellenburg
-Edick
-Duell
-Dublin
-Dorazio
-Dong
-Dimond
-Diederich
-Dewalt
-Depuy
-Dempster
-Demaria
-Dehoyos
-Dearth
-Dealba
-Dane
-Czech
-Crose
-Crespin
-Cogdill
-Clinard
-Cipriano
-Chretien
-Chalk
-Cerny
-Ceniceros
-Celestin
-Caple
-Cacho
-Burrill
-Buhr
-Buckland
-Branam
-Boysen
-Bovee
-Boos
-Boler
-Blom
-Blasko
-Beyers
-Belz
-Belmonte
-Bednarz
-Beckmann
-Beaudin
-Bazile
-Barbeau
-Balentine
-Abrahams
-Able
-Zielke
-Yunker
-Yeates
-Wrobel
-Wike
-Whisnant
-Wherry
-Wagnon
-Vogan
-Vansant
-Vannest
-Vallo
-Ullery
-Towles
-Towell
-Tiger
-Thill
-Taormina
-Tannehill
-Taing
-Storrs
-Stickles
-Stetler
-Sparling
-Solt
-Silcox
-Sheard
-Shadle
-Seman
-Selleck
-Schlemmer
-Scher
-Sapien
-Sainz
-Rumble
-Roye
-Rosamond
-Romain
-Rizzuto
-Resch
-Rentz
-Rather
-Rasch
-Ranieri
-Purtell
-Primmer
-Portwood
-Pontius
-Pons
-Pletcher
-Pledger
-Pirkle
-Pillsbury
-Pentecost
-Peng
-Paxson
-Ortez
-Organ
-Oles
-Newborn
-Mullett
-Muirhead
-Mouzon
-Mork
-Mollett
-Mohn
-Mitcham
-Melillo
-Mee
-Medders
-Mcmiller
-Mccleery
-Mccaughey
-Manders
-Mak
-Maciejewski
-Macaulay
-Lute
-Lipman
-Lewter
-Larocque
-Langton
-Kriner
-Knipp
-Killeen
-Karn
-Kalish
-Kaczor
-Jonson
-Jerez
-Jarrard
-Janda
-Hymes
-Hollman
-Hollandsworth
-Holl
-Hobdy
-Hitch
-Hennen
-Hemmer
-Hagins
-Haddox
-Guitierrez
-Guernsey
-Gorsuch
-Gholson
-Genova
-Gazaway
-Gauna
-Gammons
-Freels
-Fonville
-Fly
-Florian
-Fleet
-Fetterman
-Fava
-Farquhar
-Farish
-Fabela
-Escoto
-Eisen
-Dossett
-Dority
-Dorfman
-Demmer
-Dehn
-Dawley
-Darbonne
-Damore
-Damm
-Crosley
-Cron
-Crompton
-Crichton
-Cotner
-Cordon
-Conerly
-Colvard
-Clauson
-Chess
-Cheeseman
-Charity
-Cavallaro
-Castille
-Cabello
-Burgan
-Buffum
-Bruss
-Brassfield
-Bowerman
-Bothwell
-Borgen
-Bonaparte
-Bombard
-Boivin
-Boissonneault
-Bogner
-Bodden
-Boan
-Blanche
-Bittinger
-Bickham
-Bedolla
-Bale
-Bainbridge
-Aybar
-Avendano
-Ashlock
-Amidon
-Almanzar
-Akridge
-Ackermann
-Zager
-Yong
-Xavier
-Worrall
-Winans
-Wilsey
-Wightman
-Westrick
-Wenner
-Warne
-Warford
-Verville
-Utecht
-Upson
-Tuma
-Tseng
-Troncoso
-Trollinger
-Torbert
-Taulbee
-Sutterfield
-Stough
-Storch
-Stonebraker
-Stolle
-Stilson
-Stiefel
-Steptoe
-Stepney
-Stender
-Stemple
-Staggers
-Spurrier
-Spray
-Spinney
-Spengler
-Smartt
-Skoog
-Silvis
-Sieg
-Shuford
-Selfridge
-Seguin
-Sedgwick
-Sease
-Scotti
-Schroer
-Schlenker
-Schill
-Savarese
-Sapienza
-Sanson
-Sandefur
-Salamone
-Rusnak
-Rudisill
-Royalty
-Rothermel
-Roca
-Resendiz
-Reliford
-Rasco
-Raiford
-Quisenberry
-Quijada
-Pullins
-Puccio
-Postell
-Poppe
-Pinter
-Piche
-Petrucci
-Pellegrin
-Pelaez
-Patti
-Paton
-Pasco
-Parkes
-Paden
-Pabst
-Orchard
-Olmsted
-Newlon
-Mynatt
-Mustafa
-Mower
-Morrone
-Moree
-Moffat
-Mixson
-Minner
-Min
-Millette
-Mederos
-Mcgahan
-Mcconville
-Maughan
-Massingill
-Marano
-Macri
-Lovern
-Lichtenstein
-Leonetti
-Lehner
-Lawley
-Laramie
-Lappin
-Lahti
-Lago
-Lacayo
-Kuester
-Knee
-Kincade
-Junior
-Juhl
-Joslyn
-Jiron
-Jessop
-Jerry
-Jarosz
-Jain
-Hults
-Hoge
-Hodgins
-Hoban
-Hinkson
-Hillyard
-Herzig
-Hervey
-Henriksen
-Hawker
-Hause
-Hard
-Hankerson
-Gregson
-Golliday
-Gilcrease
-Gessner
-Gerace
-Garwood
-Garst
-Gaillard
-Flinchum
-Fishel
-Fishback
-Filkins
-Fentress
-Fabre
-Ethier
-Espana
-Eisner
-Ehrhart
-Efird
-Drennon
-Dominy
-Dominique
-Domingue
-Dipaolo
-Dinan
-Dimartino
-Deskins
-Dengler
-Defreitas
-Defranco
-Dancer
-Dahlin
-Cutshaw
-Cuthbert
-Croyle
-Crothers
-Critchfield
-Cowie
-Costner
-Coppedge
-Copes
-Ciccone
-Champ
-Cesar
-Caufield
-Capo
-Cambron
-Cambridge
-Buser
-Burnes
-Buhl
-Buendia
-Brindley
-Brecht
-Bourgoin
-Boomer
-Blackshire
-Birge
-Benninger
-Bembry
-Beil
-Begaye
-Barrentine
-Barks
-Banton
-Balmer
-Baity
-Auerbach
-Ambler
-Alexandre
-Ackerson
-Zurcher
-Zell
-Wynkoop
-Wallick
-Waid
-Vos
-Vizcaino
-Vester
-Veale
-Vandermark
-Vanderford
-Tuthill
-Trivette
-Thiessen
-Tewksbury
-Tao
-Tabron
-Swim
-Swasey
-Swanigan
-Stoughton
-Stoudt
-Stimson
-Stecker
-Stead
-Stall
-Spady
-Souther
-Smoak
-Sklar
-Simcox
-Sidwell
-Sharon
-Seybert
-Sesco
-Seeman
-Seaborn
-Schwenk
-Schmeling
-Rossignol
-Robillard
-Robicheaux
-Riveria
-Rippeon
-Ridgley
-Remaley
-Rehkop
-Reddish
-Reach
-Rauscher
-Rachel
-Quirion
-Pusey
-Pruden
-Pressler
-Potvin
-Pospisil
-Paradiso
-Pangburn
-Palmateer
-Ownby
-Otwell
-Osterberg
-Osmond
-Olsson
-Old
-Oberlander
-Nusbaum
-Novack
-Nokes
-Nicastro
-Nehls
-Nay
-Naber
-Mulhern
-Motter
-Moretz
-Milian
-Mercedes
-Mckeel
-Mcclay
-Mccart
-Matsuda
-Mary
-Martucci
-Marple
-Marko
-Marciniak
-Manes
-Mancia
-Maker
-Macrae
-Lybarger
-Lint
-Lineberger
-Levingston
-Lecroy
-Lattimer
-Laseter
-Kulick
-Krier
-Knutsen
-Klem
-Kinne
-Kinkade
-Ketterman
-Kerstetter
-Kersten
-Karam
-Jury
-Joshi
-Jin
-Jent
-Jefcoat
-Hillier
-Hillhouse
-Hettinger
-Henthorn
-Henline
-Helzer
-Heitzman
-Heineman
-Heenan
-Haughton
-Haris
-Harbert
-Haman
-Grinstead
-Gremillion
-Gorby
-Giraldo
-Gioia
-Gerardi
-Geraghty
-Gaunt
-Gatson
-Gardin
-Gans
-Gammill
-Games
-Gain
-Friedlander
-Frahm
-Fossett
-Fosdick
-Forth
-Forbush
-Fondren
-Fleckenstein
-Fitchett
-Filer
-Feliz
-Feist
-Ewart
-Evelyn
-Esters
-Elsner
-Edgin
-Eddie
-Easterly
-Dussault
-Durazo
-Don
-Devereaux
-Deshotel
-Deckert
-Dargan
-Dare
-Cornman
-Conkle
-Condit
-Commander
-Claunch
-Clabaugh
-Chute
-Cheesman
-Chea
-Charney
-Charleston
-Casella
-Carone
-Carbonell
-Canipe
-Campana
-Calles
-Cabezas
-Cabell
-Buttram
-Bustillos
-Buskirk
-Boyland
-Bourke
-Blakeley
-Big
-Berumen
-Berrier
-Bench
-Belli
-Behrendt
-Baumbach
-Bartsch
-Baney
-Arambula
-Alldredge
-Allbritton
-Ziemba
-Zanders
-Youngquist
-Yoshioka
-Yohe
-Wunder
-Woodfin
-Wojtowicz
-Winkel
-Wilmore
-Willbanks
-Wesolowski
-Wendland
-Walko
-Votaw
-Vanek
-Uriarte
-Urbano
-Turnipseed
-Triche
-Trautman
-Towler
-Tokarz
-Temples
-Tefft
-Teegarden
-Syed
-Swigart
-Stryker
-Stoller
-Stapler
-Stansfield
-Smit
-Smelley
-Sicard
-Shulman
-Shew
-Shear
-Sheahan
-Sharpton
-Selvidge
-Schlesinger
-Savell
-Sandford
-Sabatino
-Rosenbloom
-Roepke
-Rish
-Rhames
-Renken
-Reger
-Rappaport
-Quarterman
-Puig
-Prasad
-Poplar
-Pizano
-Pigott
-Pick
-Phair
-Petrick
-Patt
-Pascua
-Paramore
-Papineau
-Olivieri
-Ogren
-Norden
-Noga
-Nisbet
-Munk
-Munch
-Mui
-Morvant
-Moro
-Moloney
-Merz
-Meng
-Meltzer
-Mellinger
-Mehl
-Mcnealy
-Mckernan
-Mchaney
-Mccleskey
-Mcandrews
-Mayton
-Mayor
-Markert
-Maresca
-Marcellus
-Maner
-Mandujano
-Malpass
-Macintyre
-Lytton
-Lyall
-Lummus
-Longshore
-Longfellow
-Lokey
-Locher
-Leverette
-Lepe
-Lefever
-Leeson
-Lederer
-Lampert
-Lagrone
-La
-Kreider
-Korth
-Knopf
-Kleist
-Kiss
-Keltner
-Kelling
-Kaspar
-Kappler
-Justin
-Josephs
-Jiang
-Huckins
-Horace
-Holub
-Hofstetter
-Hoehn
-Higginson
-Hennings
-Heid
-Havel
-Hauer
-Harnden
-Hargreaves
-Hanger
-Guild
-Guidi
-Grate
-Grandy
-Grandstaff
-Goza
-Goodridge
-Goodfellow
-Goggans
-Godley
-Giusti
-Gilyard
-Geoghegan
-Galyon
-Gaeta
-Funes
-Font
-Flor
-Flanary
-Fales
-Erlandson
-Ellett
-Elia
-Edinger
-Dziedzic
-Duerr
-Draughn
-Donoho
-Dimatteo
-Devos
-Dematteo
-Degnan
-Darlington
-Danis
-Dam
-Dahlstrom
-Dahlke
-Czajkowski
-Cumbie
-Culbert
-Crosier
-Croley
-Corry
-Clinger
-Cheshire
-Chalker
-Cephas
-Caywood
-Cavalier
-Capehart
-Cales
-Cadiz
-Bussiere
-Burriss
-Burkart
-Brundidge
-Bronstein
-Breeze
-Bradt
-Boydston
-Bostrom
-Borel
-Bolles
-Blay
-Blackwelder
-Bissett
-Bevers
-Bester
-Bernardino
-Benefiel
-Belote
-Beedle
-Beckles
-Baysinger
-Bassler
-Bartee
-Barlett
-Bargas
-Barefield
-Baptista
-Arterburn
-Armas
-Apperson
-Amoroso
-Amedee
-Zullo
-Zellner
-Yelton
-Willems
-Wilkin
-Wiggin
-Widman
-Welk
-Weingarten
-Walla
-Viers
-Vess
-Verdi
-Veazey
-Vannote
-Tullos
-Trudell
-Trower
-Trosper
-Trimm
-Trew
-Tousignant
-Topp
-Tocco
-Thoreson
-Terhune
-Tatom
-Suniga
-Sumter
-Steeves
-Stansell
-Soltis
-Sloss
-Slaven
-Sing
-Shisler
-Sheriff
-Shanley
-Servantes
-Selders
-Segrest
-Seese
-Seeber
-Schaible
-Savala
-Sartor
-Rutt
-Rumbaugh
-Ruis
-Roten
-Roessler
-Ritenour
-Riney
-Restivo
-Rene
-Renard
-Rakestraw
-Rake
-Rachal
-Quiros
-Pullin
-Prudhomme
-Primeaux
-Prestridge
-Presswood
-Ponte
-Polzin
-Poarch
-Pittenger
-Piggott
-Pickell
-Phaneuf
-Parvin
-Parmley
-Palmeri
-Paisley
-Ozment
-Ormond
-Ordaz
-Ono
-Olea
-Obanion
-Oakman
-Novick
-Nicklas
-Nemec
-Nappi
-Mund
-Morfin
-Mera
-Melgoza
-Melby
-Mcgoldrick
-Mcelwain
-Mcchristian
-Mccaw
-Marquart
-Marlatt
-Markovich
-Mahr
-Lupton
-Lucus
-Lorusso
-Lerman
-Leddy
-Leaman
-Leachman
-Lavalle
-Laduke
-Kummer
-Koury
-Konopka
-Koh
-Koepp
-Kloss
-Klock
-Khalil
-Kernan
-Kappel
-Jakes
-Inoue
-Hutsell
-Howle
-Honore
-Hole
-Hockman
-Hockaday
-Hiltz
-Hetherington
-Hesser
-Hershman
-Heng
-Heffron
-Headen
-Haskett
-Hartline
-Harned
-Guillemette
-Guglielmo
-Guercio
-Greenbaum
-Goris
-Glines
-Gilmour
-Gardella
-Gadd
-Gabler
-Gabbert
-Fuselier
-Freudenburg
-Fragoso
-Follis
-Flemings
-Feltman
-Febus
-Farren
-Fallis
-Evert
-Ekstrom
-Eastridge
-Dyck
-Dufault
-Dubreuil
-Dresser
-Drapeau
-Domingues
-Dolezal
-Dinkel
-Didonato
-Devitt
-Devane
-Demott
-Daughtrey
-Daubert
-Das
-Darrell
-Creason
-Crary
-Costilla
-Chipps
-Cheatwood
-Carmean
-Canton
-Caffrey
-Burgher
-Buker
-Brunk
-Brodbeck
-Brantner
-Brandy
-Bolivar
-Boerner
-Bodkin
-Biel
-Betty
-Bencomo
-Bellino
-Beliveau
-Beauvais
-Beaupre
-Baylis
-Baskett
-Barcus
-Barbera
-Baltz
-Asay
-Arney
-Arcuri
-Ankney
-Agostini
-Addy
-Zwilling
-Zubia
-Zollinger
-Zeitz
-Yard
-Yanes
-Winship
-Winningham
-Wickline
-Webre
-Waddington
-Vosburgh
-Vessels
-Verrett
-Vedder
-Varnum
-Vandeventer
-Vacca
-Usry
-Towry
-Touchet
-Tookes
-Tonkin
-Timko
-Tibbitts
-Thedford
-Tarleton
-Talty
-Talamantez
-Tafolla
-Sugg
-Strecker
-Stirling
-Steffan
-Spiva
-Slape
-Siemens
-Shatzer
-Seyler
-Seamans
-Schmaltz
-Schipper
-Sasso
-Sailor
-Ruppe
-Runner
-Royals
-Roudebush
-Ripple
-Riemer
-Richarson
-Revilla
-Reichenbach
-Ratley
-Railsback
-Quayle
-Poplin
-Poorman
-Ponton
-Polo
-Pollitt
-Poitras
-Piscitelli
-Piedra
-Pickles
-Pew
-Perera
-People
-Penwell
-Pelt
-Pauline
-Parkhill
-Paladino
-Ore
-Oram
-Olmo
-Oliveras
-Olivarria
-Ogorman
-Near
-Naron
-Na
-Muncie
-Mowbray
-Morones
-Moretti
-Monn
-Mitts
-Minks
-Minarik
-Mimms
-Milliron
-Millington
-Millhouse
-Messersmith
-Mcnett
-Mckinstry
-Mcgeorge
-Mcdill
-Mcateer
-Mazzeo
-Matchett
-Mahood
-Mabery
-Lundell
-Louden
-Losoya
-Lisk
-Lezama
-Leib
-Lebo
-Lanoue
-Lanford
-Lafortune
-Kump
-Krone
-Kreps
-Kott
-Kopecky
-Kolodziej
-Knuckles
-Kinman
-Kimmons
-Kelty
-Kaster
-Karlson
-Kania
-Jules
-Joyal
-Job
-Jenner
-Jasinski
-Jandreau
-Isenhour
-Hunziker
-Huhn
-Houde
-Houchins
-Holtman
-Hodo
-Heyman
-Hentges
-Hedberg
-Hayne
-Haycraft
-Harshbarger
-Harshaw
-Harriss
-Haring
-Hansell
-Hanford
-Handler
-Hamburg
-Hamblen
-Gunnell
-Groat
-Gorecki
-Gochenour
-Gleeson
-Genest
-Geiser
-Gabriele
-Fulghum
-Friese
-Fridley
-Freeborn
-Frailey
-Flaugher
-Fiala
-Ettinger
-Etheredge
-Espitia
-Eriksen
-Engelbrecht
-Engebretson
-Elie
-Eickhoff
-Edney
-Edelen
-Eberhard
-Eastin
-Eakes
-Driggs
-Doner
-Donaghy
-Disalvo
-Deshong
-Dahms
-Dahlquist
-Curren
-Cripe
-Cree
-Creager
-Corle
-Conatser
-Commons
-Coggin
-Coder
-Coaxum
-Closson
-Clodfelter
-Classen
-Chittenden
-Castilleja
-Casale
-Cartee
-Carriere
-Canup
-Canizales
-Burgoon
-Bunger
-Bugarin
-Buchanon
-Bruning
-Bruck
-Brookes
-Broadwell
-Brier
-Brekke
-Breese
-Bracero
-Bowley
-Bowersox
-Bose
-Bogar
-Blossom
-Blauser
-Blacker
-Bjorklund
-Belair
-Baumer
-Basler
-Barb
-Baltimore
-Baize
-Baden
-Auman
-Amundsen
-Amore
-Alvarenga
-Adan
-Adamczyk
-Yerkes
-Yerby
-Yawn
-Yamaguchi
-Worthey
-Wolk
-Wixom
-Wiersma
-Wieczorek
-Whiddon
-Weyer
-Wetherington
-Wein
-Watchman
-Warf
-Wansley
-Vesely
-Velazco
-Vannorman
-Valasquez
-Utz
-Urso
-Turco
-Turbeville
-Trivett
-Torrance
-Toothaker
-Toohey
-Tondreau
-Thaler
-Sylvain
-Swindler
-Swigert
-Swider
-Stiner
-Stever
-Steffes
-Stampley
-Stair
-Smidt
-Skeete
-Silvestre
-Shy
-Shutts
-Shock
-Shealey
-Seigler
-Schweizer
-Schuldt
-Schlichting
-Scherr
-Saulsberry
-Saner
-Rosin
-Rosato
-Roling
-Rohn
-Rix
-Rister
-Remley
-Remick
-Recinos
-Ramm
-Raabe
-Pursell
-Poythress
-Poli
-Pokorny
-Plum
-Pettry
-Petrey
-Petitt
-Penman
-Payson
-Paquet
-Pappalardo
-Outland
-Oscar
-Orenstein
-Nuttall
-Nuckols
-Nott
-Nimmo
-Murtagh
-Mousseau
-Moulder
-Mooneyhan
-Moak
-Minch
-Miera
-Mercuri
-Meighan
-Mcnelly
-Mcguffin
-Mccreery
-Mcclaskey
-Man
-Mainor
-Luongo
-Lundstrom
-Loughman
-Loose
-Lobo
-Lobb
-Linhart
-Liberty
-Lever
-Leu
-Leiter
-Lehoux
-Lehn
-Lares
-Lapan
-Langhorne
-Lamon
-Ladwig
-Ladson
-Kuzma
-Kreitzer
-Knop
-Keech
-Kea
-Kadlec
-Jo
-Jhonson
-Jantz
-Inglis
-Husk
-Hulme
-Housel
-Hofman
-Hillery
-Heidenreich
-Heaps
-Haslett
-Harting
-Hartig
-Hamler
-Halton
-Hallum
-Gutierres
-Guida
-Guerrier
-Grossi
-Gress
-Greenhalgh
-Gravelle
-Gow
-Goslin
-Gonyea
-Gipe
-Gerstner
-Gasser
-Garceau
-Gannaway
-Gama
-Gallop
-Gaiser
-Fullilove
-Foutz
-Fossum
-Flannagan
-Farrior
-Faller
-Ericksen
-Entrekin
-Enochs
-Englund
-Ellenberger
-Eastland
-Earwood
-Dudash
-Du
-Drozd
-Desoto
-Delph
-Dekker
-Dejohn
-Degarmo
-Defeo
-Defalco
-Deblois
-Dacus
-Cudd
-Crossen
-Crooms
-Cronan
-Costin
-Costanza
-Cordray
-Comerford
-Collie
-Colegrove
-Coldwell
-Claassen
-Chartrand
-Castiglione
-Carte
-Cardella
-Carberry
-Capp
-Capobianco
-Cangelosi
-Buch
-Brunell
-Brucker
-Brockett
-Brizendine
-Brinegar
-Brimer
-Brase
-Bosque
-Bonk
-Bolger
-Bohanon
-Bohan
-Blazek
-Berning
-Bergan
-Bennette
-Beauchemin
-Battiste
-Barra
-Balogh
-Avis
-Avallone
-Aubry
-Ashcroft
-Asencio
-Arledge
-Anchondo
-Amy
-Alvord
-Acheson
-Zaleski
-Yonker
-Wyss
-Wycoff
-Woodburn
-Wininger
-Winders
-Willmon
-Wiechmann
-Westley
-Weatherholt
-Warnick
-Wardle
-Warburton
-Volkert
-Virgin
-Villanveva
-Veit
-Vass
-Vanallen
-Tung
-Toribio
-Toothman
-Tiggs
-Thornsberry
-Thome
-Tepper
-Teeple
-Tebo
-Tassone
-Tann
-Sultan
-Stucker
-Stotler
-Stoneman
-Stehle
-Stanback
-Stallcup
-Spurr
-Speers
-Spada
-Solum
-Smolen
-Sinn
-Silvernail
-Sholes
-Shives
-Shain
-Secrest
-Seagle
-Schuette
-Schoch
-Schnieders
-Schild
-Schiavone
-Schiavo
-Scharff
-Santee
-Sandell
-Salvo
-Rollings
-Rollin
-Rivenburg
-Ritzman
-Rist
-Rio
-Ricardo
-Reynosa
-Retana
-Reiber
-Regnier
-Rarick
-Ransome
-Rall
-Propes
-Prall
-Poyner
-Ponds
-Poitra
-Plaster
-Pippins
-Pinion
-Piccolo
-Phu
-Perillo
-Penrose
-Pendergraft
-Pelchat
-Peed
-Patenaude
-Palko
-Odoms
-Oddo
-Novoa
-Noone
-Newburn
-Negri
-Nantz
-Mosser
-Moshier
-Molter
-Molinari
-Moler
-Millman
-Meurer
-Mendel
-Mcray
-Mcnicholas
-Mcnerney
-Mckillip
-Mcilvain
-Mcadory
-Matter
-Master
-Marmol
-Marinez
-Manzer
-Mankin
-Makris
-Majeski
-Magnus
-Maffei
-Luoma
-Luman
-Luebke
-Luby
-Lomonaco
-Loar
-Litchford
-Lintz
-Licht
-Levenson
-Legge
-Laughter
-Lanigan
-Krom
-Kreger
-Koop
-Kober
-Klima
-Kitterman
-Kinkead
-Kimbell
-Kilian
-Kibbe
-Kendig
-Kemmer
-Kash
-Jenkin
-Inniss
-Hurlbut
-Hunsucker
-Hugo
-Huckabee
-Hoxie
-Hoglund
-Hockensmith
-Hoadley
-Hinkel
-Higuera
-Herrman
-Heiner
-Hausmann
-Haubrich
-Hassen
-Hanlin
-Hallinan
-Haglund
-Hagberg
-Gullo
-Gullion
-Groner
-Greenwalt
-Grand
-Goodwill
-Gong
-Gobert
-Glowacki
-Glessner
-Gines
-Gildersleeve
-Gildea
-Gerke
-Gerhard
-Gebhard
-Gatton
-Gately
-Galasso
-Fralick
-Fouse
-Fluharty
-Faucette
-Fairfax
-Evanoff
-Elser
-Ellard
-Egerton
-Edie
-Ector
-Ebling
-Dunkel
-Duhart
-Drysdale
-Dostal
-Dorey
-Dolph
-Doles
-Dismukes
-Digregorio
-Digby
-Dewees
-Deramus
-Denniston
-Dennett
-Deloney
-Delaughter
-Darcy
-Cuneo
-Cumberland
-Crotts
-Crosswhite
-Cremeans
-Creasey
-Cottman
-Cothern
-Costales
-Cosner
-Corpus
-Cora
-Constable
-Colligan
-Cobble
-Clutter
-Chupp
-Chevez
-Chatmon
-Chaires
-Caplan
-Caffee
-Cabana
-Burrough
-Burditt
-Buckler
-Brunswick
-Brouillard
-Broady
-Bowlby
-Bouley
-Borgman
-Boltz
-Boddy
-Blackston
-Birdsell
-Bedgood
-Bate
-Basil
-Bartos
-Barriga
-Barrie
-Barna
-Barcenas
-Banach
-Baccus
-Auclair
-Ashman
-Arter
-Arendt
-Ansell
-Allums
-Allsop
-Allender
-Alber
-Albarran
-Adelson
-Zoll
-Wysong
-Wimbley
-Wildes
-Whitis
-Whitehill
-Whicker
-Weymouth
-Well
-Weldy
-Wark
-Wareham
-Waddy
-Viveiros
-Vito
-Vides
-Vecchio
-Vath
-Vandoren
-Vanderhoof
-Unrein
-Uecker
-Tsan
-Trepanier
-Tregre
-Torkelson
-Ton
-Tobler
-Tineo
-Timmer
-Swopes
-Swofford
-Sweeten
-Swarts
-Summerfield
-Sumler
-Stucky
-Strozier
-Stigall
-Stickel
-Stennis
-Stelzer
-Steely
-Solar
-Slayden
-Skillern
-Shurtz
-Shelor
-Shellenbarger
-Shand
-Shabazz
-Seo
-Scroggs
-Schwandt
-Schrecengost
-Schoenrock
-Schirmer
-Sandridge
-Ruzicka
-Rozek
-Rowlands
-Roser
-Rosendahl
-Romanowski
-Romaine
-Rolston
-Rink
-Riggio
-Reichman
-Redondo
-Reay
-Rawlinson
-Raskin
-Raine
-Quandt
-Purpura
-Purdue
-Pruneda
-Prevatte
-Prettyman
-Pinedo
-Pierro
-Pidgeon
-Phillippi
-Pfeil
-Penix
-Peasley
-Paro
-Overall
-Ospina
-Ortegon
-Ogata
-Ogara
-Normandin
-Nordman
-Nims
-Nassar
-Motz
-Morlan
-Mooring
-Moles
-Moir
-Mizrahi
-Mire
-Minaya
-Millwood
-Mikula
-Messmer
-Meikle
-Mctaggart
-Mcgonagle
-Mcewan
-Mccasland
-Mccane
-Mccaffery
-Mcalexander
-Mattocks
-Mattie
-Matranga
-Martone
-Markland
-Maravilla
-Manno
-Manly
-Mancha
-Mallery
-Magno
-Lorentz
-Locklin
-Livingstone
-Lipford
-Lininger
-Line
-Liao
-Lepley
-Leming
-Lemelin
-Leadbetter
-Lawhon
-Lattin
-Langworthy
-Lampman
-Lambeth
-Lamarr
-Lahey
-Krajewski
-Klopp
-Kinnison
-Kestner
-Kerry
-Kennell
-Karim
-Jozwiak
-Jakubowski
-Jagger
-Ivery
-Ishmael
-Iliff
-Iddings
-Hudkins
-Houseman
-Holz
-Holderman
-Hoehne
-Highfill
-Hiett
-Heskett
-Heldt
-Hedman
-Hayslett
-Hatchell
-Hasse
-Hamon
-Hamada
-Hakala
-Haislip
-Haffey
-Hackbarth
-Guo
-Gullickson
-Guerrette
-Guan
-Greenblatt
-Goudreau
-Gongora
-Godbout
-Glaude
-Gills
-Gillison
-Gigliotti
-Gargano
-Gallucci
-Galli
-Galante
-Frasure
-Fodor
-Fizer
-Fishburn
-Finkbeiner
-Finck
-Fager
-Estey
-Espiritu
-Eppinger
-Epperly
-Emig
-Eckley
-Dray
-Dorsch
-Dille
-Devita
-Deslauriers
-Demery
-Delorme
-Delbosque
-Dauphin
-Dantonio
-Curd
-Crume
-Crown
-Cozad
-Cossette
-Comacho
-Climer
-Chadbourne
-Cespedes
-Cayton
-Castaldo
-Carpino
-Carls
-Capozzi
-Canela
-Cadet
-Buzard
-Busick
-Burlison
-Brinkmann
-Bridgeforth
-Bourbeau
-Bornstein
-Boots
-Bonfiglio
-Boice
-Boese
-Biondi
-Bilski
-Betton
-Berwick
-Berlanga
-Behan
-Becraft
-Barrientez
-Banh
-Balke
-Balderrama
-Bahe
-Bachand
-Atlas
-Armer
-Arceo
-Aliff
-Alatorre
-Zermeno
-Zane
-Younce
-You
-Yeoman
-Yamasaki
-Wroten
-Worm
-Woodby
-Winer
-Wilmer
-Willits
-Wilcoxon
-Wehmeyer
-Waterbury
-Wass
-Wann
-Wake
-Wachtel
-Vizcarra
-Vince
-Victory
-Veitch
-Vanderbilt
-Vallone
-Vallery
-Ureno
-Tyer
-Tipps
-Tiedeman
-Theberge
-Texeira
-Taub
-Tapscott
-Stutts
-Stults
-Stukes
-Staff
-Spink
-Sottile
-Smithwick
-Slane
-Simeone
-Silvester
-Siegrist
-Shiffer
-Sheedy
-Sheaffer
-Severin
-Sellman
-Scotto
-Schupp
-Schueller
-Schreier
-Schoolcraft
-Schoenberger
-Schnabel
-Sangster
-Samford
-Saliba
-Ryles
-Ryans
-Rossetti
-Rodriguz
-Risch
-Riel
-Rezendes
-Rester
-Rencher
-Recker
-Rathjen
-Profitt
-Poteete
-Polizzi
-Perrigo
-Patridge
-Osby
-Orvis
-Opperman
-Oppenheim
-Onorato
-Olaughlin
-Ohagan
-Ogles
-Oehler
-Obyrne
-Nuzzo
-Nickle
-Nease
-Neagle
-Navarette
-Nagata
-Musto
-Morning
-Morison
-Montz
-Mogensen
-Mizer
-Miraglia
-Mingus
-Migliore
-Merideth
-Menges
-Mellor
-Mcnear
-Mcnab
-Mcloud
-Mcelligott
-Mccollom
-Maynes
-Marquette
-Markowski
-Marcantonio
-Mar
-Maldanado
-Makin
-Macey
-Lundeen
-Lovin
-Longino
-Lisle
-Linthicum
-Limones
-Lesure
-Lesage
-Leisure
-Lauver
-Laubach
-Latshaw
-Lary
-Lapham
-Lacoste
-Lacher
-Kutcher
-Knickerbocker
-Klos
-Klingler
-Kleiman
-Kittleson
-Kimbrel
-Kimberly
-Kemmerer
-Kelson
-Keese
-Kam
-Kallas
-Jurgensen
-Junkins
-Juneau
-Juergens
-Jolliff
-Jelks
-Janicki
-Jang
-Innocent
-Ingles
-Inge
-Huguley
-Huggard
-Howton
-Hone
-Holford
-Holding
-Hogle
-Hipple
-Heimbach
-Heider
-Heidel
-Havener
-Hattaway
-Harrah
-Hanscom
-Hankinson
-Hamdan
-Gridley
-Goulette
-Goulart
-Goodspeed
-Goodrow
-Go
-Girardi
-Gent
-Gautreau
-Ganz
-Gandara
-Gamblin
-Galipeau
-Fyffe
-Furrow
-Fulp
-Fricks
-Frase
-Frandsen
-Fout
-Foulks
-Fouche
-Foskey
-Forgey
-Foor
-Fobbs
-Finklea
-Fincham
-Figueiredo
-Festa
-Ferrier
-Fellman
-Eslick
-Eilerman
-Eckart
-Eaglin
-Dunfee
-Dumond
-Drewry
-Douse
-Domino
-Dimick
-Diener
-Dickert
-Deines
-Degree
-Declue
-Daw
-Dattilo
-Danko
-Custodio
-Cuccia
-Crunk
-Crispin
-Corp
-Cornwall
-Corea
-Coppin
-Considine
-Coniglio
-Conboy
-Collar
-Cockrum
-Clute
-Clewis
-Claude
-Christiano
-Channell
-Channel
-Cerrato
-Cecere
-Catoe
-Castillon
-Castile
-Carstarphen
-Carmouche
-Caperton
-Buteau
-Bury
-Bumpers
-Brey
-Brenton
-Brazeal
-Brassard
-Brass
-Braga
-Bradham
-Bourget
-Borrelli
-Borba
-Boothby
-Bohr
-Bohm
-Boehme
-Bodin
-Bloss
-Blocher
-Bizzell
-Bieker
-Berthelot
-Bernardini
-Berends
-Benard
-Belser
-Baze
-Bartling
-Barrientes
-Barras
-Barcia
-Banfield
-Aurand
-Artman
-Arnott
-Arend
-Ardis
-Amon
-Almaguer
-Allee
-Albarado
-Alameda
-Abdo
-Zuehlke
-Zoeller
-Yokoyama
-Yocom
-Wyllie
-Woolum
-Wint
-Winland
-Wink
-Wilner
-Wilmes
-Whitlatch
-Westervelt
-Walthall
-Walkowiak
-Walburn
-Viviano
-Vanderhoff
-Valez
-Ugalde
-Trumbull
-Todaro
-Tilford
-Tidd
-Tibbits
-Terranova
-Templeman
-Tannenbaum
-Talmage
-Tabarez
-Swearengin
-Swartwood
-Svendsen
-Strum
-Strack
-Storie
-Stockard
-Steinbeck
-Starns
-Stanko
-Stankiewicz
-Stacks
-Stach
-Sproles
-Spenser
-Smotherman
-Slusser
-Sinha
-Silber
-Siefert
-Siddiqui
-Shuff
-Sherburne
-Seldon
-Seddon
-Schweigert
-Schroeter
-Schmucker
-Saffold
-Rutz
-Rundle
-Rosinski
-Rosenow
-Rogalski
-Ridout
-Rhymer
-Replogle
-Regina
-Reda
-Raygoza
-Ratner
-Rascoe
-Rahm
-Quincy
-Quast
-Pry
-Pressnell
-Predmore
-Pou
-Porto
-Pleasants
-Pigford
-Pavone
-Patnaude
-Parramore
-Papadopoulos
-Palmatier
-Ouzts
-Oshields
-Ortis
-Olmeda
-Olden
-Okamoto
-Norby
-Nitz
-Niebuhr
-Nevius
-Neiman
-Neidig
-Neece
-Murawski
-Mroz
-Moylan
-Moultry
-Mosteller
-Moring
-Morganti
-Mook
-Moffet
-Mettler
-Merlo
-Mengel
-Mendelsohn
-Meli
-Melchior
-Mcmeans
-Mcfaddin
-Mccullers
-Mccollister
-Mccloy
-Mcclaine
-Maury
-Maser
-Martelli
-Manthey
-Malkin
-Maio
-Magwood
-Maginnis
-Mabon
-Luton
-Lusher
-Lucht
-Lobato
-Levis
-Letellier
-Legendre
-Laurel
-Latson
-Larmon
-Largo
-Landreneau
-Landgraf
-Lamberson
-Kurland
-Kresge
-Korman
-Korando
-Klapper
-Kitson
-Kinyon
-Kincheloe
-Kawamoto
-Kawakami
-Jenney
-Jeanpierre
-Ivers
-Issa
-Ince
-Hugh
-Hug
-Honda
-Hollier
-Hollars
-Hoerner
-Hodgkinson
-Hiott
-Hibbitts
-Herlihy
-Henricks
-Heavner
-Hayhurst
-Harvill
-Harewood
-Hanselman
-Hanning
-Gwyn
-Gustavson
-Grounds
-Grizzard
-Grinder
-Graybeal
-Gravley
-Gorney
-Goll
-Goehring
-Godines
-Gobeil
-Glickman
-Giuliano
-Gimbel
-Gift
-Geib
-Gayhart
-Gatti
-Gains
-Gadberry
-Frei
-Fraise
-Fouch
-Forst
-Forsman
-Folden
-Fogleman
-Figaro
-Fetty
-Feely
-Fabry
-Eury
-Estill
-Epling
-Elamin
-Echavarria
-Dutil
-Duryea
-Dumais
-Drago
-Downard
-Douthit
-Doolin
-Dobos
-Dison
-Dinges
-Diebold
-Desilets
-Deshazo
-Depaz
-Degennaro
-Dall
-Cyphers
-Cryer
-Croce
-Crisman
-Credle
-Coriell
-Copp
-Coop
-Compos
-Colmenero
-Cogar
-Cliff
-Chapel
-Carnevale
-Campanella
-Caley
-Calderone
-Burtch
-Brouwer
-Brehmer
-Brassell
-Brafford
-Bourquin
-Bourn
-Bohnert
-Blewett
-Blass
-Blakes
-Bhakta
-Besser
-Berge
-Bellis
-Balfour
-Avera
-Austria
-Applin
-Ammon
-Alsop
-Aleshire
-Akbar
-Zoller
-Zapien
-Wymore
-Wyble
-Wolken
-Wix
-Wickstrom
-Whobrey
-Whigham
-Westerlund
-Welsch
-Weisser
-Weisner
-Weinstock
-Wehner
-Watlington
-Wakeland
-Wafer
-Virgen
-Victorino
-Veltri
-Veith
-Urich
-Uresti
-Umberger
-Twedt
-Tuohy
-Tschida
-Trumble
-Troia
-Tristan
-Trimmer
-Topps
-Tonn
-Tiernan
-Threet
-Thrall
-Thetford
-Teneyck
-Tartaglia
-Swords
-Strohl
-Streater
-Strausbaugh
-Stradley
-Stonecipher
-Steadham
-Stansel
-Stalcup
-Stabile
-Sprenger
-Spradley
-Speier
-Southwood
-Sorrels
-Slezak
-Skow
-Sirmans
-Simental
-Silk
-Sifford
-Sievert
-Shover
-Sheley
-Selzer
-Scriven
-Schwindt
-Schwan
-Schroth
-Saylors
-Saragosa
-Sant
-Salaam
-Saephan
-Routt
-Rousey
-Ros
-Rolfes
-Rieke
-Rieder
-Richeson
-Redinger
-Rasnick
-Rapoza
-Rambert
-Rafael
-Quist
-Pyron
-Punch
-Pullman
-Przybylski
-Pridmore
-Pooley
-Pines
-Perkinson
-Perine
-Perham
-Pecor
-Peavler
-Partington
-Panton
-Oliverio
-Olague
-Ohman
-Ohearn
-Noyola
-Nicolai
-Nebel
-Murtha
-Muff
-Mowrey
-Moroney
-Morgenstern
-Morant
-Monty
-Monsour
-Mohammad
-Moffit
-Mijares
-Meriwether
-Mendieta
-Melendrez
-Mejorado
-Mckittrick
-Mckey
-Mckenny
-Mckelvy
-Mckechnie
-Mcelvain
-Mccoin
-Mazzarella
-Mazon
-Maurin
-Matthies
-Maston
-Maske
-Marzano
-Marmon
-Marburger
-Mangus
-Mangino
-Mallet
-Luo
-Losada
-Londono
-Lobdell
-Lipson
-Lesniak
-Leighty
-Lei
-League
-Lavallie
-Lareau
-Laperle
-Lape
-Laforce
-Laffey
-Kuehner
-Kravitz
-Kowalsky
-Kohr
-Kinsman
-Keppler
-Kennemer
-Keiper
-Keely
-Kaler
-Jun
-Jelinek
-Jarnagin
-Issac
-Isakson
-Hypes
-Hutzler
-Huls
-Horak
-Hitz
-Hice
-Herrell
-Henslee
-Heitz
-Heiss
-Heiman
-Hasting
-Hartwick
-Harmer
-Harland
-Hammontree
-Haldeman
-Hakes
-Guse
-Guillotte
-Guard
-Groleau
-Greve
-Greenough
-Golub
-Golson
-Goldschmidt
-Golder
-Godbolt
-Gilmartin
-Gies
-Gibby
-Geren
-Genthner
-Gendreau
-Gemmill
-Gaymon
-Galyean
-Galeano
-Friar
-Folkerts
-Fleeman
-Fitzgibbons
-Ferranti
-Felan
-Farrand
-Eoff
-Enger
-Engels
-Ducksworth
-Duby
-Dry
-Drumheller
-Douthitt
-Doris
-Donis
-Dixion
-Dittrich
-Dials
-Dessert
-Descoteaux
-Depaul
-Denker
-Demuth
-Demelo
-Delacerda
-Deforge
-Danos
-Dalley
-Daigneault
-Cybulski
-Crystal
-Cristobal
-Cothren
-Corns
-Corkery
-Copas
-Coco
-Clubb
-Clore
-Chitty
-Chichester
-Chery
-Charon
-Chamber
-Chace
-Catanzaro
-Castonguay
-Cassella
-Caroll
-Carlberg
-Cammarata
-Calle
-Cajigas
-Byas
-Buzbee
-Busey
-Burling
-Bufkin
-Brzezinski
-Brun
-Brickner
-Brabham
-Boller
-Bodily
-Bockman
-Bleich
-Blakeman
-Bisbee
-Bier
-Bezanson
-Bevilacqua
-Besaw
-Berrian
-Berkeley
-Bequette
-Beauford
-Baumgarten
-Baudoin
-Batie
-Basaldua
-Bardin
-Bangert
-Banes
-Backlund
-Avitia
-Artz
-Archey
-Apel
-Amico
-Alam
-Aden
-Zebrowski
-Yokota
-Wormley
-Wootton
-Woodie
-Womac
-Wiltz
-Wigington
-Whitehorn
-Whisman
-Weisgerber
-Weigle
-Weedman
-Watkin
-Wasilewski
-Wadlington
-Wadkins
-Viverette
-Vidaurri
-Vidales
-Vezina
-Vanleer
-Vanhoy
-Vanguilder
-Vanbrunt
-Uy
-Updegraff
-Tylor
-Trinkle
-Touchette
-Tilson
-Tilman
-Tengan
-Tarkington
-Surrett
-Super
-Summy
-Streetman
-Straughter
-Steere
-Stalling
-Spruell
-Spadaro
-Solley
-Smathers
-Silvera
-Siems
-Shreffler
-Sholar
-Selden
-Schaper
-Samayoa
-Ruggeri
-Rowen
-Rosso
-Rosenbalm
-Roosevelt
-Roose
-Ronquillo
-Rogowski
-Rexford
-Repass
-Renzi
-Renick
-Renda
-Rehberg
-Reaper
-Ranck
-Raffa
-Rackers
-Raap
-Pugsley
-Puglisi
-Prinz
-Primus
-Pounders
-Pon
-Pompa
-Plasencia
-Pipkins
-Pillar
-Petrosky
-Pelley
-Pauls
-Pauli
-Parkison
-Parisien
-Pangle
-Pancoast
-Palazzolo
-Owenby
-Overbay
-Orris
-Orlowski
-Nipp
-Newbern
-Nedd
-Nealon
-Najar
-Mysliwiec
-Myron
-Myres
-Musson
-Murrieta
-Munsell
-Mumma
-Muldowney
-Moyle
-Mowen
-Mose
-Morejon
-Moodie
-Monier
-Mikkelsen
-Miers
-Metzinger
-Melin
-Mcquay
-Mcpeek
-Mcneeley
-Mcglothin
-Mcghie
-Mcdonell
-Mccumber
-Mccranie
-Mcbean
-Mayhugh
-Marts
-Marenco
-Manges
-Lynam
-Lupien
-Luff
-Luebbert
-Loh
-Loflin
-Lococo
-Loch
-Lis
-Linke
-Lightle
-Lewellyn
-Leishman
-Lebow
-Lebouef
-Leanos
-Lanz
-Landy
-Landaverde
-Lacefield
-Kyler
-Kuebler
-Kropf
-Kroeker
-Kluesner
-Klass
-Kimberling
-Kilkenny
-Kiker
-Ketter
-Kelemen
-Keasler
-Kawamura
-Karst
-Kardos
-Jeremiah
-Jared
-Igo
-Huseman
-Huseby
-Hurlbert
-Huard
-Hottinger
-Hornberger
-Hopps
-Holdsworth
-Hensen
-Heilig
-Heeter
-Harpole
-Haak
-Gutowski
-Gunnels
-Grimmer
-Grieve
-Gravatt
-Granderson
-Gotcher
-Gleaves
-Genao
-Garfinkel
-Frerichs
-Foushee
-Flanery
-Finnie
-Feldt
-Fagin
-Ewalt
-Ellefson
-Eiler
-Eckhart
-Eastep
-Dwight
-Digirolamo
-Didomenico
-Devera
-Delavega
-Defilippo
-Debusk
-Daub
-Damiani
-Cupples
-Cuddy
-Crofoot
-Courter
-Coto
-Costigan
-Corning
-Corman
-Corlett
-Cooperman
-Collison
-Coghlan
-Cobbins
-Coady
-Coachman
-Clothier
-Client
-Clear
-Cipolla
-Chmielewski
-Chiodo
-Chatterton
-Chappelle
-Chairez
-Ceron
-Casperson
-Casler
-Casados
-Carrow
-Carolina
-Carlino
-Carico
-Cardillo
-Caouette
-Canto
-Canavan
-Cambra
-Byard
-Buterbaugh
-Buse
-Bucy
-Buckwalter
-Bubb
-Bryd
-Brissette
-Brault
-Bradwell
-Boshears
-Borchert
-Blansett
-Blanch
-Blade
-Biondo
-Bilbo
-Biehl
-Bessey
-Berta
-Belles
-Bella
-Beeks
-Beekman
-Beaufort
-Bayliss
-Bardsley
-Avilla
-Astudillo
-Ardito
-Anwar
-Antunez
-Amen
-Aderholt
-Abate
-Yowell
-Yin
-Yearby
-Ye
-Wurst
-Woolverton
-Woolbright
-Wildermuth
-Whittenburg
-Whitely
-Wetter
-Wetherbee
-Wenz
-Welliver
-Welling
-Welcome
-Wason
-Warrior
-Warlick
-Voorhies
-Vivier
-Villines
-Vida
-Verde
-Veiga
-Varghese
-Vanwyk
-Vanwingerden
-Vanhorne
-Umstead
-Twiggs
-Tusing
-Trego
-Tompson
-Tinkle
-Thoman
-Thole
-Tatman
-Tartt
-Suda
-Studley
-Strock
-Strawbridge
-Stokely
-Stec
-Stang
-Stalter
-Speidel
-Spafford
-Spade
-Sontag
-Sokolowski
-Skillman
-Skelley
-Skalski
-Sison
-Sippel
-Sinquefield
-Sin
-Siegle
-Sher
-Sharrow
-Setliff
-Sera
-Sellner
-Selig
-Seibold
-Seery
-Scriber
-Schull
-Schrupp
-Schippers
-Say
-Saulsbury
-Sao
-Santillo
-Sanor
-Sancho
-Rufus
-Rubalcaba
-Roosa
-Ronk
-Robbs
-Roache
-River
-Riebe
-Reinoso
-Quin
-Prude
-Preuss
-Pottorff
-Pontiff
-Plouffe
-Picou
-Picklesimer
-Pettyjohn
-Petti
-Penaloza
-Parmelee
-Pardee
-Palazzo
-Overholt
-Ogawa
-Ofarrell
-Nova
-Nolting
-Noda
-Nicola
-Nickson
-Nevitt
-Neveu
-Navarre
-Nam
-Murrow
-Munz
-Mulloy
-Monzo
-Milliman
-Metivier
-Merlino
-Mcpeters
-Mckissack
-Mckeen
-Mcgurk
-Mcfee
-Mcfarren
-Mcelwee
-Mceachin
-Mcdonagh
-Mccarville
-Mayhall
-Mattoon
-Martello
-Marconi
-Marbury
-Mao
-Manzella
-Maly
-Malec
-Maitland
-Maheu
-Maclennan
-Lyke
-Luera
-Loyola
-Lowenstein
-Losh
-Lopiccolo
-Longacre
-Loman
-Loden
-Loaiza
-Lieber
-Libbey
-Lenhardt
-Lefebre
-Lauterbach
-Lauritsen
-Lass
-Larocco
-Larimer
-Lansford
-Lanclos
-Lamay
-Lal
-Kulikowski
-Kriebel
-Kosinski
-Kleinman
-Kleiner
-Kleckner
-Kistner
-Kissner
-Kissell
-Kilroy
-Kenna
-Keisler
-Keeble
-Keaney
-Kale
-Joly
-Jimison
-Jeans
-Ikner
-Hursey
-Hruska
-Hove
-Hou
-Host
-Hosking
-Hoose
-Holle
-Hoeppner
-Hittle
-Hitchens
-Hirth
-Hinerman
-Hilario
-Higby
-Hertzog
-Hentz
-Hensler
-Heist
-Heier
-Hegg
-Hassel
-Harpe
-Hara
-Hank
-Hain
-Hagopian
-Grimshaw
-Grado
-Gowin
-Gowans
-Googe
-Goodlow
-Goering
-Gleaton
-Gidley
-Giannone
-Gascon
-Garneau
-Gambrel
-Galaz
-Fuentez
-Frisina
-Fresquez
-Fraher
-Fitting
-Feuerstein
-Felten
-Everman
-Estell
-Ertel
-Erazo
-Ensign
-Endo
-Ellerman
-Eichorn
-Edgell
-Ebron
-Eaker
-Dundas
-Duncanson
-Duchene
-Ducan
-Dombroski
-Doman
-Dock
-Dickison
-Dewoody
-Deloera
-Delahoussaye
-Dejean
-Degroat
-Decaro
-Dearmond
-Dashner
-Dales
-Crossett
-Cressey
-Cowger
-Courts
-Court
-Cornette
-Corbo
-Coplin
-Coover
-Condie
-Cokley
-Cicero
-Ceaser
-Cannaday
-Callanan
-Cadle
-Buscher
-Bullion
-Bucklin
-Bruening
-Bruckner
-Brose
-Branan
-Bradway
-Botsford
-Bortz
-Borelli
-Bonetti
-Bolan
-Boerger
-Bloomberg
-Bingman
-Bilger
-Berns
-Beringer
-Beres
-Beets
-Beede
-Beaudet
-Beachum
-Baughn
-Bator
-Bastien
-Basquez
-Barreiro
-Barga
-Baratta
-Balser
-Baillie
-Axford
-Attebery
-Arakaki
-Annunziata
-Andrzejewski
-Ament
-Amendola
-Adcox
-Abril
-Zenon
-Zeitler
-Zang
-Zambrana
-Ybanez
-Yagi
-Wolak
-Wilcoxson
-Whitesel
-Whitehair
-Weyand
-Westendorf
-Welke
-Weinmann
-Wei
-Weesner
-Weekes
-Wedel
-Wedding
-Weatherall
-Warthen
-Vose
-Villalta
-Vila
-Viator
-Vaz
-Valtierra
-Urbanek
-Tulley
-Trojanowski
-Trapani
-Toups
-Torpey
-Tomita
-Tindal
-Tieman
-Tevis
-Tedrow
-Taul
-Tash
-Tammaro
-Sylva
-Swiderski
-Sweeting
-Sund
-Stutler
-Stocking
-Stich
-Sterns
-Stegner
-Stalder
-Splawn
-Speirs
-Southwell
-Soltys
-Smead
-Slye
-Skipworth
-Sipos
-Simmerman
-Sigmund
-Sidhu
-Shuffler
-Shingleton
-Shadwick
-Sermons
-Seefeldt
-Scipio
-Schwanke
-Schreffler
-Schiro
-Scheiber
-Sandoz
-Samsel
-Ruddell
-Royse
-Rouillard
-Rotella
-Rosalez
-Romriell
-Rommel
-Rizer
-Riner
-Rickards
-Rhoton
-Rhem
-Reppert
-Rayl
-Raulston
-Raposo
-Rapier
-Rainville
-Radel
-Quinney
-Purdie
-Puffer
-Pizzo
-Pincus
-Petrus
-Pendelton
-Pendarvis
-Peltz
-Peguero
-Peete
-Patricio
-Patchett
-Parrino
-Papke
-Pam
-Palafox
-Ottley
-Ostby
-Oritz
-Oren
-Ogan
-Odegaard
-Oatman
-Noell
-Nida
-Nicoll
-Newhall
-Newbill
-Netzer
-Nettleton
-Neblett
-Murley
-Mungo
-Mulhall
-Mosca
-Morissette
-Morford
-Montag
-Monsen
-Mitzel
-Miskell
-Minder
-Mehaffey
-Mcquillen
-Mclennan
-Mcgrail
-Mccreight
-Mayville
-Maysonet
-Maust
-Mathieson
-Mastrangelo
-Maskell
-Martina
-Manz
-Malmberg
-Makela
-Madruga
-Luz
-Lotts
-Longnecker
-Logston
-Littell
-Liska
-Lindauer
-Lillibridge
-Levron
-Letchworth
-Lesh
-Leffel
-Leday
-Leamon
-Laura
-Kulas
-Kula
-Kucharski
-Kromer
-Kraatz
-Konieczny
-Konen
-Komar
-Kivett
-Kirts
-Kinnear
-Kersh
-Keithley
-Keifer
-Judah
-Jimenes
-Jeppesen
-Jasmin
-Jansson
-Huntsberry
-Hund
-Huitt
-Huffine
-Hosford
-Hopes
-Holmstrom
-Hollen
-Hodgin
-Hirschman
-Hiltner
-Hilliker
-Hibner
-Hennis
-Helt
-Heidelberg
-Heger
-Heer
-Hartness
-Hardrick
-Halladay
-Gula
-Guillaume
-Guerriero
-Grunewald
-Grosse
-Griffeth
-Grenz
-Grassi
-Grandison
-Ginther
-Gimenez
-Gillingham
-Gillham
-Gess
-Gelman
-Gearheart
-Gaskell
-Gariepy
-Gamino
-Gallien
-Galentine
-Fuquay
-Froman
-Froelich
-Friedel
-Foos
-Fomby
-Focht
-Flythe
-Fiqueroa
-Filson
-Filip
-Fierros
-Fett
-Fedele
-Fasching
-Farney
-Fargo
-Everts
-Even
-Etzel
-Elzey
-Eichner
-Eger
-Eatman
-Ducker
-Duchesne
-Donati
-Domenech
-Dollard
-Dodrill
-Dinapoli
-Denn
-Delfino
-Delcid
-Delaune
-Delatte
-Deems
-Daluz
-Cusson
-Cullison
-Cue
-Cuadrado
-Crumrine
-Cruickshank
-Crosland
-Croll
-Criddle
-Crepeau
-Coutu
-Couey
-Cort
-Coppinger
-Collman
-Cockburn
-Coca
-Clayborne
-Claflin
-Cissell
-Chowdhury
-Chicoine
-Chenier
-Causby
-Caulder
-Cassano
-Casner
-Cardiel
-Burner
-Brunton
-Bruch
-Broxton
-Brosius
-Brooking
-Branco
-Bracco
-Bourgault
-Bosserman
-Books
-Bonet
-Bolds
-Bolander
-Bohman
-Boelter
-Blohm
-Blea
-Blaise
-Bischof
-Billie
-Beus
-Bellew
-Bastarache
-Bast
-Bartolome
-Bark
-Barcomb
-Barco
-Balls
-Balk
-Balas
-Bakos
-Avey
-Atnip
-Ashbrook
-Arno
-Arbour
-Aquirre
-Appell
-Aldaco
-Alcazar
-Alban
-Ahlstrom
-Abadie
-Zylstra
-Zick
-Zheng
-Yother
-Wyse
-Wunsch
-Whitty
-Weist
-Vrooman
-Vine
-Villalon
-Vidrio
-Vavra
-Vasbinder
-Vanmatre
-Vandorn
-Ugarte
-Turberville
-Tuel
-Trogdon
-Town
-Toupin
-Toone
-Tolleson
-Tinkham
-Tinch
-Tiano
-Teston
-Teer
-Tea
-Tawney
-Taplin
-Tant
-Tansey
-Swayne
-Sutcliffe
-Sunderman
-Suits
-Strothers
-Stromain
-Stork
-Stoneburner
-Stolte
-Stolp
-Stoehr
-Stingley
-Stegman
-Stangl
-Spinella
-Spier
-Soules
-Sommerfield
-Sipp
-Simek
-Siders
-Shufelt
-Shue
-Shor
-Shires
-Shellenberger
-Sheely
-Service
-Sepe
-Seaberg
-Schwing
-Scherrer
-Scalzo
-Saver
-Sasse
-Sarvis
-Santora
-Sansbury
-Salls
-Saleem
-Ryland
-Rybicki
-Ruggieri
-Rothenberg
-Rosenstein
-Roquemore
-Rollison
-Rodden
-Rivet
-Rita
-Ridlon
-Riche
-Riccardi
-Reiley
-Regner
-Rech
-Rayo
-Rawley
-Ranger
-Raff
-Radabaugh
-Quon
-Quill
-Privette
-Prange
-Pickrell
-Perino
-Penning
-Pankratz
-Orlandi
-Nyquist
-Norrell
-Noren
-Naples
-Nale
-Nakashima
-Musselwhite
-Murrin
-Murch
-Mullinix
-Mullican
-Mullan
-Morneau
-Mondor
-Molinar
-Mo
-Minjares
-Minix
-Mingle
-Minchew
-Mill
-Milewski
-Mikkelson
-Mifflin
-Messing
-Merkley
-Meis
-Meas
-Mcroy
-Mcphearson
-Mcneel
-Mcmunn
-Mcmorrow
-Mcdorman
-Mccroskey
-Mccoll
-Mcclusky
-Mcclaran
-Mccampbell
-Mazzariello
-Mauzy
-Mauch
-Mastro
-Martinek
-Marsala
-Marcantel
-Mahle
-Lyda
-Lucius
-Luciani
-Lubbers
-Louder
-Lobel
-Linsey
-Linch
-Liller
-Legros
-Layden
-Lapine
-Lansberry
-Lage
-Laforest
-Labriola
-Koga
-Knupp
-Klimek
-Kittinger
-Kirchoff
-Kinzel
-Killinger
-Kilbourne
-Ketner
-Kepley
-Kemble
-Kells
-Kear
-Kaya
-Karsten
-Kaneshiro
-Kamm
-Joines
-Joachim
-Janelle
-Jacobus
-Iler
-Holgate
-Hoar
-Hisey
-Hird
-Hilyard
-Heslin
-Herzberg
-Hennigan
-Hegland
-Hartl
-Haner
-Handel
-Gualtieri
-Greenly
-Grasser
-Gran
-Goetsch
-Godbold
-Gilland
-Gidney
-Gibney
-Giancola
-Gettinger
-Garzon
-Garret
-Galle
-Galgano
-Gaier
-Gaertner
-Fuston
-Freel
-Fortes
-Flock
-Fiorillo
-Figgs
-Fenstermacher
-Fedler
-Facer
-Fabiano
-Evins
-Eusebio
-Euler
-Esquer
-Enyeart
-Elem
-Eisenhower
-Eich
-Edgerly
-Durocher
-Durgan
-Duffin
-Drolet
-Drewes
-Dotts
-Dossantos
-Dolly
-Dockins
-Dirksen
-Difiore
-Dierks
-Dickerman
-Dice
-Dery
-Denault
-Demaree
-Delmonte
-Delcambre
-Days
-Daulton
-Darst
-Dahle
-Curnutt
-Cully
-Culligan
-Cueva
-Crosslin
-Croskey
-Cromartie
-Crofts
-Covin
-Coutee
-Countess
-Cost
-Coppa
-Coogan
-Condrey
-Concannon
-Coger
-Cloer
-Clatterbuck
-Cieslak
-Chumbley
-Choudhury
-Chiaramonte
-Charboneau
-Chai
-Carneal
-Cappello
-Campisi
-Callicoat
-Burgoyne
-Bucholz
-Brumback
-Brosnan
-Brogden
-Broder
-Brendle
-Breece
-Bown
-Bou
-Boser
-Bondy
-Bolster
-Boll
-Bluford
-Blandon
-Biscoe
-Bevill
-Bence
-Battin
-Basel
-Bartram
-Barnaby
-Barmore
-Balbuena
-Badgley
-Backstrom
-Auyeung
-Ater
-Arrellano
-Arant
-Ansari
-Alling
-Alejandre
-Alcock
-Alaimo
-Aguinaldo
-Aarons
-Zurita
-Zeiger
-Zawacki
-Yutzy
-Yarger
-Wygant
-Wurm
-Wuest
-Wolfram
-Witherell
-Wisneski
-Whitby
-Whelchel
-Weisz
-Weisinger
-Weishaar
-Wehr
-Wedge
-Waxman
-Waldschmidt
-Walck
-Waggener
-Vosburg
-Vita
-Villela
-Vercher
-Venters
-Vanscyoc
-Vandyne
-Valenza
-Utt
-Urick
-Ungar
-Ulm
-Tumlin
-Tsao
-Tryon
-Trudel
-Treiber
-Tow
-Tober
-Tipler
-Tillson
-Tiedemann
-Thornley
-Tetrault
-Temme
-Tarrance
-Tackitt
-Sykora
-Sweetman
-Swatzell
-Sutliff
-Suhr
-Sturtz
-Strub
-Strayhorn
-Stormer
-Steveson
-Stengel
-Steinfeldt
-Spiro
-Spieker
-Speth
-Spero
-Soza
-Souliere
-Soucie
-Snedeker
-Slifer
-Skillings
-Situ
-Siniard
-Simeon
-Signorelli
-Siggers
-Shultis
-Shrewsbury
-Shippee
-Shimp
-Sherron
-Shepler
-Sharpless
-Shadrick
-Severt
-Severs
-Semon
-Semmes
-Seiter
-Segers
-Sclafani
-Sciortino
-Schroyer
-Schrack
-Schoenberg
-Schober
-Scheidt
-Scheele
-Satter
-Sartori
-Sarris
-Sarratt
-Salvaggio
-Saladino
-Sakamoto
-Saine
-Ryman
-Rumley
-Ruggerio
-Rucks
-Roughton
-Room
-Robards
-Ricca
-Rexroad
-Resler
-Reny
-Rentschler
-Redrick
-Redick
-Reagle
-Raymo
-Rape
-Raker
-Racette
-Pyburn
-Pritt
-Presson
-Pressman
-Pough
-Plain
-Pisani
-Perz
-Perras
-Pelzer
-Pedrosa
-Palos
-Palmisano
-Paille
-Orem
-Orbison
-Oliveros
-Nourse
-Nordquist
-Newbury
-Nelligan
-Nawrocki
-Myler
-Mumaw
-Morphis
-Moldenhauer
-Miyashiro
-Mignone
-Mickelsen
-Michalec
-Mesta
-Mcree
-Mcqueary
-Mcninch
-Mcneilly
-Mclelland
-Mclawhorn
-Mcgreevy
-Mcconkey
-Mattes
-Maselli
-Marten
-Mart
-Marcucci
-Manseau
-Manjarrez
-Malbrough
-Machin
-Mabie
-Lynde
-Lykes
-Lueras
-Lokken
-Loken
-Linzy
-Lillis
-Lilienthal
-Levey
-Legler
-Leedom
-Lebowitz
-Lazzaro
-Larabee
-Lapinski
-Langner
-Langenfeld
-Lampkins
-Lamotte
-Lambright
-Lagarde
-Ladouceur
-Labrador
-Labounty
-Lablanc
-Laberge
-Kyte
-Kroon
-Kron
-Kraker
-Kouba
-Kirwin
-Kincer
-Kimbler
-Kegler
-Keach
-Katzman
-Katzer
-Kalman
-Journey
-Jimmerson
-Jenning
-Janus
-Iacovelli
-Hust
-Huson
-Husby
-Humphery
-Hufnagel
-Honig
-Holsey
-Holoman
-Hohl
-Hogge
-Hinderliter
-Hildebrant
-Hick
-Hey
-Hemby
-Helle
-Heintzelman
-Heidrick
-Hearon
-Heap
-Hazelip
-Hauk
-Hasbrouck
-Harton
-Hartin
-Harpster
-Hansley
-Hanchett
-Haar
-Guthridge
-Gulbranson
-Guill
-Guerrera
-Grund
-Grosvenor
-Grist
-Grell
-Grear
-Granberry
-Gonser
-Giunta
-Giuliani
-Gillon
-Gillmore
-Gillan
-Gibbon
-Gettys
-Gelb
-Gano
-Galliher
-Fullen
-Frese
-Frates
-Foxwell
-Fleishman
-Fleener
-Fielden
-Ferrera
-Feng
-Fells
-Feemster
-Fauntleroy
-Fails
-Evatt
-Espy
-Eno
-Emmerich
-Edwin
-Edler
-Eastham
-Dunavant
-Duca
-Drinnon
-Dowe
-Dorgan
-Dollinger
-Divers
-Dipalma
-Difranco
-Dietrick
-Denzer
-Demarest
-Delee
-Delariva
-Delany
-Decesare
-Debellis
-Deavers
-Deardorff
-Dawe
-Darosa
-Darley
-Dalzell
-Dahlen
-Curto
-Cupps
-Cunniff
-Cude
-Crivello
-Cripps
-Cresswell
-Cousar
-Cotta
-Compo
-Colorado
-Clyne
-Clayson
-Cearley
-Catania
-Carini
-Cargo
-Cantero
-Cali
-Buttrey
-Buttler
-Burpee
-Bulkley
-Buitron
-Buda
-Bublitz
-Bryer
-Bryden
-Brouillette
-Brott
-Brookman
-Bronk
-Breshears
-Brennen
-Brannum
-Brandl
-Braman
-Bracewell
-Boyter
-Bomberger
-Bold
-Bogen
-Boeding
-Bob
-Blauvelt
-Blandford
-Bigger
-Biermann
-Bielecki
-Bibby
-Berthold
-Berkman
-Belvin
-Bellomy
-Beland
-Behne
-Beecham
-Becher
-Beams
-Bax
-Bassham
-Barret
-Baley
-Bacchus
-Auxier
-Atkison
-Ary
-Arocha
-Arechiga
-Anspach
-An
-Algarin
-Alcott
-Alberty
-Ager
-Adolph
-Ackman
-Abdul
-Abdallah
-Zwick
-Ziemer
-Zastrow
-Zajicek
-Yokum
-Yokley
-Wittrock
-Winebarger
-Wilker
-Wilham
-Whitham
-Wetzler
-Westling
-Westbury
-Wendler
-Wellborn
-Weitzman
-Weitz
-Weight
-Wallner
-Waldroup
-Vrabel
-Vowels
-Volker
-Vitiello
-Visconti
-Villicana
-Vibbert
-Vesey
-Vannatter
-Vangilder
-Vandervort
-Vandegrift
-Vanalstyne
-Vallecillo
-Usrey
-Tynan
-Turpen
-Tuller
-Trisler
-Townson
-Tillmon
-Threlkeld
-Thornell
-Terrio
-Taunton
-Tarry
-Tardy
-Swoboda
-Swihart
-Sustaita
-Suitt
-Stuber
-Strine
-Stookey
-Stmartin
-Stiger
-Stainbrook
-Solem
-Smail
-Sligh
-Siple
-Sieben
-Shumake
-Shriner
-Showman
-Shiner
-Sheen
-Sheckler
-Seim
-Secrist
-Scoggin
-Schultheis
-Schmalz
-Schendel
-Schacher
-Savard
-Saulter
-Santillanes
-Sandiford
-Sande
-Salzer
-Salvato
-Saltz
-Sakai
-Ryckman
-Ryant
-Ruck
-Ronald
-Rocker
-Rittenberry
-Ristau
-Risk
-Richart
-Rhynes
-Reyer
-Reulet
-Reser
-Redington
-Reddington
-Rebello
-Reasor
-Raftery
-Rabago
-Raasch
-Quintanar
-Pylant
-Purington
-Provencal
-Prom
-Prioleau
-Prestwood
-Pothier
-Popa
-Polster
-Politte
-Poffenberger
-Pinner
-Pietrzak
-Pettie
-Penaflor
-Pellot
-Pellham
-Paylor
-Payeur
-Papas
-Paik
-Oyola
-Osbourn
-Orzechowski
-Oppenheimer
-Olesen
-Oja
-Ohl
-Nuckolls
-Nordberg
-Noonkester
-Nold
-Nitta
-Niblett
-Neuhaus
-Nesler
-Ned
-Nanney
-Myrie
-Mutch
-Motto
-Mosquera
-Morena
-Montalto
-Montagna
-Mizelle
-Mincy
-Millikan
-Millay
-Miler
-Milbourn
-Mikels
-Migues
-Miesner
-Mershon
-Merrow
-Merlin
-Melia
-Meigs
-Mealey
-Mcraney
-Mcmartin
-Mclachlan
-Mcgeehan
-Mcferren
-Mcdole
-Mccaulley
-Mcanulty
-Maziarz
-Maul
-Mateer
-Martinsen
-Marson
-Mariotti
-Manna
-Mang
-Mance
-Malbon
-Mah
-Magnusson
-Maclachlan
-Macek
-Lurie
-Luc
-Lown
-Loranger
-Lonon
-Lisenby
-Linsley
-Linger
-Lenk
-Leavens
-Learned
-Lauritzen
-Lathem
-Lashbrook
-Landman
-Lamarche
-Lamantia
-Laguerre
-Lagrange
-Kogan
-Klingbeil
-Kist
-Kimpel
-Kime
-Kier
-Kerfoot
-Kennamer
-Kellems
-Kammer
-Kamen
-Jess
-Jepsen
-Jarnigan
-Isler
-Ishee
-Isabel
-Hux
-Hungate
-Hummell
-Hultgren
-Huffaker
-Hruby
-Hover
-Hornick
-Hooser
-Hooley
-Hoggan
-Hirano
-Hilley
-Higham
-Heuser
-Henrickson
-Henegar
-Hellwig
-Heide
-Hedley
-Hasegawa
-Hartt
-Hambright
-Halfacre
-Hafley
-Guion
-Guinan
-Grunwald
-Grothe
-Gries
-Greaney
-Granda
-Grabill
-Gothard
-Gossman
-Gosser
-Gossard
-Gosha
-Goldner
-Gobin
-Gloss
-Ginyard
-Gilkes
-Gilden
-Gerson
-Gephart
-Gengler
-Gautier
-Gassett
-Garon
-Gandhi
-Galusha
-Gallager
-Galdamez
-Fulmore
-Fritsche
-Fowles
-Foutch
-Forward
-Footman
-Fludd
-Flakes
-Ferriera
-Ferrero
-Ferreri
-Fenimore
-Fegley
-Fegan
-Fearn
-Farrier
-Fansler
-Fane
-Falzone
-Fairweather
-Etherton
-Elsberry
-Dykema
-Duppstadt
-Dunnam
-Dunklin
-Duet
-Due
-Dudgeon
-Dubuc
-Doxey
-Dory
-Donmoyer
-Dodgen
-Disanto
-Dingler
-Dimattia
-Dilday
-Digennaro
-Diedrich
-Derossett
-Deputy
-Depp
-Demasi
-Degraffenreid
-Deakins
-Deady
-Davin
-Daigre
-Daddario
-Czerwinski
-Cullens
-Cubbage
-Cracraft
-Constance
-Comes
-Combest
-Coletti
-Coghill
-Clerk
-Claybrooks
-Class
-Christofferse
-Chiesa
-Chason
-Chamorro
-Cessna
-Celentano
-Cayer
-Carolan
-Carnegie
-Capetillo
-Callier
-Cadogan
-Caba
-Byrom
-Byrns
-Burrowes
-Burket
-Burdge
-Burbage
-Bukowski
-Buchholtz
-Brunt
-Brungardt
-Brunetti
-Brumbelow
-Brugger
-Broadhurst
-Brigance
-Brandow
-Bouknight
-Bottorff
-Bottomley
-Bosarge
-Borger
-Bona
-Bombardier
-Bologna
-Boggan
-Blumer
-Blecha
-Birney
-Birkland
-Betances
-Beran
-Benny
-Benes
-Belin
-Belgrave
-Bealer
-Bauch
-Bath
-Bashir
-Bartow
-Baro
-Barnhouse
-Barile
-Ballweg
-Baisley
-Bains
-Baehr
-Badilla
-Bachus
-Bacher
-Bachelder
-Auzenne
-Aten
-Astle
-Allis
-Agarwal
-Adger
-Adamek
-Ziolkowski
-Zinke
-Zazueta
-Zamorano
-Younkin
-Won
-Wittig
-Witman
-Winsett
-Winkles
-Wiedman
-Whitner
-Whitcher
-Wetherby
-Westra
-Westhoff
-Wehrle
-Wee
-Wagaman
-Voris
-Vicknair
-Vegas
-Veasley
-Vaugh
-Vanish
-Vanderburg
-Valletta
-Tunney
-Trumbo
-Truluck
-Trueman
-Truby
-Trombly
-Trojan
-Tourville
-Tostado
-Tone
-Titcomb
-Timpson
-Tignor
-Thrush
-Thresher
-Thiede
-Tews
-Tamplin
-Taff
-Tacker
-Syverson
-Sylvestre
-Summerall
-Stumbaugh
-Strouth
-Straker
-Stradford
-Stoney
-Stokley
-Steinhoff
-Steinberger
-Stairs
-Spigner
-Soltero
-Snively
-Sletten
-Sinkler
-Sinegal
-Simoes
-Siller
-Sigel
-Shoe
-Shire
-Shinkle
-Shellman
-Sheller
-Sheats
-Sharer
-Selvage
-Sedlak
-Sea
-Schriver
-Schimke
-Scheuerman
-Schanz
-Savory
-Saulters
-Sauers
-Sais
-Rusin
-Rumfelt
-Ruhland
-Rozar
-Rosborough
-Ronning
-Rolph
-Roloff
-Rogue
-Robie
-Riviera
-Rimer
-Riehle
-Ricco
-Rhein
-Retzlaff
-Reisman
-Reimann
-Re
-Rayes
-Raub
-Raminez
-Quesinberry
-Pua
-Procopio
-Priolo
-Printz
-Prewett
-Preas
-Prahl
-Portugal
-Poovey
-Ploof
-Platz
-Plaisted
-Pinzon
-Pineiro
-Pickney
-Petrovich
-Perl
-Pehrson
-Peets
-Pavon
-Pautz
-Pascarella
-Paras
-Paolini
-Pals
-Pafford
-Oyer
-Ovellette
-Outten
-Outen
-Ours
-Orduna
-Odriscoll
-Oberlin
-Nosal
-Niven
-Nisbett
-Nevers
-Nathanson
-Mule
-Mukai
-Mozee
-Mowers
-Motyka
-Morency
-Montford
-Mollica
-Molden
-Mitten
-Miser
-Mina
-Millender
-Midgette
-Messerly
-Melendy
-Meisel
-Meidinger
-Meany
-Mcnitt
-Mcnemar
-Mcmakin
-Mcgaugh
-Mccaa
-Mauriello
-Maudlin
-Matzke
-Mattia
-Matteo
-Matsumura
-Masuda
-Mangels
-Maloof
-Malizia
-Mahmoud
-Maglione
-Maddix
-Lucchesi
-Lochner
-Linquist
-Lino
-Lietz
-Leventhal
-Leopard
-Lemanski
-Leiser
-Laury
-Lauber
-Lamberth
-Kuss
-Kung
-Kulik
-Kuiper
-Krout
-Kotter
-Kort
-Kohlmeier
-Koffler
-Koeller
-Knipe
-Knauss
-Kleiber
-Kissee
-Kirst
-Kirch
-Kilgo
-Kerlin
-Kellison
-Kehl
-Kalb
-Jorden
-Jantzen
-Jamar
-Inabinet
-Ikard
-Husman
-Hunsberger
-Hundt
-Hucks
-Houtz
-Houseknecht
-Hoots
-Hogsett
-Hogans
-Hintze
-Hession
-Henault
-Hemming
-Helsley
-Heinen
-Heffington
-Heberling
-Heasley
-Heal
-Hazley
-Hazeltine
-Hayton
-Hayse
-Hawke
-Haston
-Harward
-Harvard
-Harrow
-Hanneman
-Hafford
-Hadnot
-Guerro
-Graig
-Grahm
-Gowins
-Gordillo
-Goosby
-Glatt
-Gibbens
-Ghent
-Gerrard
-Germann
-Geil
-Gebo
-Gean
-Garling
-Gardenhire
-Garbutt
-Gagner
-Furguson
-Funchess
-Fujiwara
-Fujita
-Friley
-Frigo
-Forshee
-Folkes
-Filler
-Fernald
-Ferber
-Feingold
-Favorite
-Faul
-Farrelly
-Fairbank
-Failla
-Estelle
-Espey
-Eshleman
-Ertl
-Erhart
-Erhardt
-Erbe
-Elsea
-Ells
-Ellman
-Eisenhart
-Ehmann
-Earnhardt
-Duplantis
-Dulac
-Ducote
-Draves
-Dosch
-Dolce
-Divito
-Ditch
-Dimauro
-Derringer
-Demeo
-Demartini
-Delima
-Dehner
-Degen
-Defrancisco
-Defoor
-Dedeaux
-Debnam
-Cypert
-Cutrer
-Cusumano
-Custis
-Croker
-Courtois
-Costantino
-Cormack
-Corbeil
-Copher
-Conlan
-Conkling
-Cogdell
-Cilley
-Chapdelaine
-Cendejas
-Castiglia
-Cassette
-Cashin
-Carstensen
-Carol
-Caprio
-Calcote
-Calaway
-Byfield
-Butner
-Bushway
-Burritt
-Browner
-Brobst
-Briner
-Brighton
-Bridger
-Brickley
-Brendel
-Bratten
-Bratt
-Brainerd
-Brackman
-Bowne
-Bouck
-Borunda
-Bordner
-Bonenfant
-Boer
-Boehmer
-Bodiford
-Bleau
-Blankinship
-Blane
-Blaha
-Bitting
-Bissonette
-Bigby
-Bibeau
-Beverage
-Bermudes
-Berke
-Bergevin
-Bergerson
-Bendel
-Belville
-Bechard
-Bearce
-Beadles
-Batz
-Bartlow
-Barren
-Ayoub
-Avans
-Aumiller
-Arviso
-Arpin
-Arnwine
-Armwood
-Arent
-Arehart
-Arcand
-Antle
-Ambrosino
-Alongi
-Alm
-Allshouse
-Ahart
-Aguon
-Ziebarth
-Zeledon
-Zakrzewski
-Yuhas
-Yingst
-Yedinak
-Wommack
-Winnett
-Wingler
-Wilcoxen
-Whitmarsh
-Whistler
-Wayt
-Watley
-Wasser
-Warkentin
-Voll
-Vogelsang
-Voegele
-Vivanco
-Vinton
-Villafane
-Viles
-Versace
-Ver
-Venne
-Vanwagoner
-Vanwagenen
-Vanleuven
-Vanauken
-Uselton
-Uren
-Trumbauer
-Tritt
-Treadaway
-Tozier
-Tope
-Tomczak
-Tomberlin
-Tomasini
-Tollett
-Toller
-Titsworth
-Tirrell
-Tilly
-Tavera
-Tarnowski
-Tanouye
-Tall
-Swarthout
-Sutera
-Surette
-Styers
-Styer
-Stipe
-Stickland
-Steve
-Stembridge
-Stearn
-Starkes
-Stanberry
-Stahr
-Spino
-Spicher
-Sperber
-Speece
-Soo
-Sonntag
-Sneller
-Smalling
-Slowik
-Slocumb
-Sliva
-Slemp
-Slama
-Sitz
-Sisto
-Sisemore
-Sindelar
-Shipton
-Shillings
-Sheeley
-Sharber
-Shaddix
-Severns
-Severino
-Sever
-Sensabaugh
-Seder
-Seawell
-Seamons
-Schrantz
-Schooler
-Scheffer
-Scheerer
-Scalia
-Saum
-Santibanez
-Sano
-Sanjuan
-Sampley
-Sailer
-Sabella
-Sabbagh
-Royall
-Rottman
-Rivenbark
-Rikard
-Ricketson
-Rickel
-Rethman
-Reily
-Reddin
-Reasoner
-Reade
-Rast
-Ranallo
-Rana
-Quintal
-Pung
-Pucci
-Proto
-Prosperie
-Prim
-Preusser
-Preslar
-Powley
-Postma
-Pinnix
-Pilla
-Pietsch
-Pickerel
-Pica
-Pharris
-Petway
-Petillo
-Perin
-Pereda
-Pennypacker
-Pennebaker
-Pedrick
-Patin
-Patchell
-Parodi
-Parman
-Pantano
-Padua
-Padro
-Osterhout
-Orner
-Opp
-Olivar
-Ohlson
-Odonoghue
-Oceguera
-Oberry
-Novello
-Noguera
-Newquist
-Newcombe
-Neihoff
-Nehring
-Nees
-Nebeker
-Nau
-Mundo
-Mullenix
-Morrisey
-Moronta
-Morillo
-Morefield
-Mongillo
-Molino
-Minto
-Midgley
-Michie
-Menzies
-Medved
-Mechling
-Mealy
-Mcshan
-Mcquaig
-Mcnees
-Mcglade
-Mcgarity
-Mcgahey
-Mcduff
-Mayweather
-Mastropietro
-Masten
-Maranto
-Maniscalco
-Maize
-Mahmood
-Maddocks
-Maday
-Macha
-Maag
-Luken
-Lopp
-Lolley
-Llanas
-Litz
-Litherland
-Lindenberg
-Lieu
-Letcher
-Lentini
-Lemelle
-Leet
-Lecuyer
-Leber
-Laursen
-Latch
-Larrick
-Lantigua
-Langlinais
-Lalli
-Lafever
-Labat
-Labadie
-Kurt
-Krogman
-Kohut
-Knarr
-Klimas
-Klar
-Kittelson
-Kirschbaum
-Kintzel
-Kincannon
-Kimmell
-Killgore
-Kettner
-Kelsch
-Karle
-Kapoor
-Johansson
-Jock
-Jenkinson
-Janney
-Isabelle
-Iraheta
-Insley
-Hyslop
-Hy
-Human
-Huckstep
-Holleran
-Hoerr
-Hinze
-Hinnenkamp
-Hilger
-Higgin
-Hicklin
-Heroux
-Henkle
-Helfer
-Heikkinen
-Heckstall
-Heckler
-Heavener
-Haydel
-Haveman
-Haubert
-Harrop
-Harnois
-Hansard
-Hanover
-Hammitt
-Haliburton
-Haefner
-Hadsell
-Haakenson
-Guynn
-Guizar
-Grout
-Grosz
-Goo
-Gomer
-Golla
-Godby
-Glanz
-Glancy
-Givan
-Giesen
-Gerst
-Gayman
-Garraway
-Gabor
-Furness
-Frisk
-Fremont
-Frary
-Forand
-Fessenden
-Ferrigno
-Fearon
-Favreau
-Faulks
-Falbo
-Ewen
-Everton
-Eurich
-Etchison
-Esterly
-Entwistle
-Ellingsworth
-Elders
-Ek
-Eisenbarth
-Edelson
-Eckel
-Earnshaw
-Dunneback
-Doyal
-Donnellan
-Dolin
-Dibiase
-Deschenes
-Dermody
-Denmark
-Degregorio
-Darnall
-Dant
-Dansereau
-Danaher
-Dammann
-Dames
-Czarnecki
-Cuyler
-Custard
-Cummingham
-Cuffie
-Cuffee
-Cudney
-Cuadra
-Crigler
-Creger
-Coughlan
-Corvin
-Cortright
-Corchado
-Connery
-Conforti
-Condron
-Colosimo
-Colclough
-Cola
-Cohee
-Claire
-Ciotti
-Chill
-Chien
-Check
-Chacko
-Cevallos
-Cavitt
-Cavins
-Castagna
-Cashwell
-Carrozza
-Carrara
-Capra
-Campas
-Callas
-Caison
-Cai
-Caggiano
-Cabot
-Bynoe
-Buswell
-Burpo
-Burnam
-Burges
-Buerger
-Buelow
-Bueche
-Buckle
-Bruni
-Brummitt
-Brodersen
-Briese
-Breit
-Brakebill
-Braatz
-Boyers
-Boughner
-Borror
-Borquez
-Bonelli
-Bohner
-Blaze
-Blaker
-Blackmer
-Bissette
-Bibbins
-Bhatt
-Bhatia
-Bessler
-Bergh
-Beresford
-Bensen
-Benningfield
-Benito
-Bellantoni
-Behler
-Beehler
-Beazley
-Beauchesne
-Bargo
-Bannerman
-Baltes
-Balog
-Ballantyne
-Bad
-Axelson
-Apgar
-Aoki
-Anstett
-Alejos
-Alcocer
-Albury
-Aichele
-Ahl
-Ackles
-Zerangue
-Zehner
-Zank
-Zacarias
-Youngberg
-Yorke
-Yarbro
-Xie
-Wydra
-Worthley
-Wolbert
-Wittmer
-Witherington
-Wishart
-Wire
-Winnie
-Winkleman
-Willilams
-Willer
-Wiedeman
-Whittingham
-Whitbeck
-Whetsel
-Wheless
-Westerberg
-Welcher
-Wegman
-Waterfield
-Wasinger
-Warfel
-Wannamaker
-Walborn
-Wada
-Vogl
-Vizcarrondo
-Vitela
-Villeda
-Veras
-Venuti
-Veney
-Ulrey
-Uhlig
-Turcios
-Tremper
-Torian
-Torbett
-Thrailkill
-Terrones
-Teitelbaum
-Teems
-Tay
-Swoope
-Sunseri
-Stutes
-Stthomas
-Strohm
-Stroble
-Striegel
-Streicher
-Stodola
-Stinchcomb
-Steves
-Steppe
-Stem
-Steller
-Staudt
-Starner
-Stamant
-Stam
-Stackpole
-Sprankle
-Speciale
-Spahr
-Sowders
-Sova
-Soluri
-Soderlund
-Slinkard
-Skates
-Sjogren
-Sirianni
-Siewert
-Sickels
-Sica
-Shugart
-Shoults
-Shive
-Shimer
-Shier
-Shield
-Shepley
-Sheeran
-Sharper
-Sevin
-Severe
-Seto
-Segundo
-Sedlacek
-Scuderi
-Schurman
-Schuelke
-Scholten
-Schlater
-Schisler
-Schiefelbein
-Schalk
-Sanon
-Sae
-Sabala
-Ruyle
-Ruybal
-Ruf
-Rueb
-Rowsey
-Rosol
-Rocheleau
-Rishel
-Rippey
-Ringgold
-Rieves
-Ridinger
-Rew
-Retherford
-Rempe
-Reith
-Rafter
-Raffaele
-Quinto
-Putz
-Purdom
-Puls
-Pulaski
-Propp
-Principato
-Preiss
-Prada
-Polansky
-Poch
-Plath
-Pittard
-Pinnock
-Pfarr
-Pfannenstiel
-Penniman
-Pauling
-Patchen
-Paschke
-Parkey
-Pando
-Overly
-Ouimet
-Ottman
-Otter
-Ostlund
-Ormiston
-Occhipinti
-Nowacki
-Norred
-Noack
-Nishida
-Nilles
-Nicodemus
-Neth
-Nealey
-Myricks
-Murff
-Mungia
-Mullet
-Motsinger
-Moscato
-Mort
-Morado
-Moors
-Monnier
-Molyneux
-Modzelewski
-Miura
-Minich
-Militello
-Milbrandt
-Michalik
-Meserve
-Merle
-Mendivil
-Melara
-Meadow
-Mcnish
-Mcelhannon
-Mccroy
-Mccrady
-Mazzella
-Maule
-Mattera
-Mathena
-Matas
-Mass
-Mascorro
-Marone
-Marinello
-Marguez
-Marcell
-Manwaring
-Manhart
-Mangano
-Maggi
-Lymon
-Luter
-Luse
-Lukasik
-Luiz
-Ludlum
-Luczak
-Lowenthal
-Lossett
-Lorentzen
-Loredo
-Longworth
-Lomanto
-Lisi
-Lish
-Lipsky
-Linck
-Liedtke
-Levering
-Lessman
-Lemond
-Lembo
-Ledonne
-Leatham
-Laufer
-Lanphear
-Langlais
-Lando
-Lamphear
-Lamberton
-Lafon
-Lade
-Lacross
-Kyzer
-Krok
-Kring
-Krell
-Krehbiel
-Kratochvil
-Krach
-Kovar
-Kostka
-Knudtson
-Knaack
-Kliebert
-Klahn
-Kirkley
-Kimzey
-Kettle
-Kerrick
-Kennerson
-Keesler
-Karlin
-Kan
-Jenny
-Janousek
-Jan
-Imel
-Icenhour
-Hyler
-Hunger
-Hudock
-Houpt
-Hopping
-Hoops
-Holquin
-Holiman
-Holahan
-Hodapp
-Hires
-Hillen
-Hickmon
-Hersom
-Henrich
-Helvey
-Heidt
-Heideman
-Hedstrom
-Hedin
-Hebron
-Hayter
-Harn
-Hardage
-Harbor
-Halsted
-Hahne
-Hagemann
-Guzik
-Guel
-Groesbeck
-Gritton
-Grego
-Graziani
-Grasty
-Graney
-Gouin
-Gossage
-Golston
-Goheen
-Godina
-Glade
-Giorgi
-Giambrone
-Gerrity
-Gerrish
-Gero
-Gerling
-Gaulke
-Garlick
-Galiano
-Gaiter
-Gahagan
-Gagnier
-Friddle
-Fredericksen
-Franqui
-Follansbee
-Foerster
-Flury
-Fitzmaurice
-Fiorini
-Finlayson
-Fiecke
-Fickes
-Fichter
-Ferron
-Ferdinand
-Farrel
-Fackler
-Eyman
-Escarcega
-Errico
-Erler
-Erby
-Engman
-Engelmann
-Elsass
-Elliston
-Eddleman
-Eadie
-Dummer
-Drost
-Dorrough
-Dorrance
-Doolan
-Donalson
-Domenico
-Ditullio
-Dittmar
-Dishon
-Dionisio
-Dike
-Devinney
-Desir
-Deschamp
-Derrickson
-Delamora
-Deitch
-Dechant
-Dave
-Danek
-Dahmen
-Curci
-Cudjoe
-Crumble
-Croxton
-Creasman
-Craney
-Crader
-Cowling
-Coulston
-Cortina
-Corlew
-Corl
-Copland
-Convery
-Cohrs
-Clune
-Clausing
-Cipriani
-Cinnamon
-Cianciolo
-Chubb
-Chittum
-Chenard
-Charlesworth
-Charlebois
-Champine
-Chamlee
-Chagoya
-Casselman
-Cardello
-Capasso
-Cannella
-Calderwood
-Byford
-Buttars
-Bushee
-Burrage
-Buentello
-Brzozowski
-Bryner
-Brumit
-Brookover
-Bronner
-Bromberg
-Brixey
-Brinn
-Briganti
-Bremner
-Brawn
-Branscome
-Brannigan
-Bradsher
-Bozek
-Boulay
-Bormann
-Bongiorno
-Bollin
-Bohler
-Bogert
-Bodenhamer
-Blose
-Blind
-Bivona
-Bitter
-Billips
-Bibler
-Benfer
-Benedetti
-Belue
-Bellanger
-Belford
-Behn
-Beerman
-Barnhardt
-Baltzell
-Balling
-Balducci
-Bainter
-Babineau
-Babich
-Baade
-Attwood
-Asmus
-Asaro
-Artiaga
-April
-Applebaum
-Ang
-Anding
-Amar
-Amaker
-Allsup
-Alligood
-Alers
-Agin
-Agar
-Achenbach
-Abramowitz
-Abbas
-Aasen
-Zehnder
-Yopp
-Yelle
-Yeldell
-Wynter
-Woodmansee
-Wooding
-Woll
-Winborne
-Willsey
-Willeford
-Widger
-Whiten
-Whitchurch
-Whang
-Wen
-Weissinger
-Weinman
-Weingartner
-Weidler
-Waltrip
-Walt
-Wagar
-Wafford
-Vitagliano
-Villalvazo
-Villacorta
-Vigna
-Vickrey
-Vicini
-Ventimiglia
-Vandenbosch
-Valvo
-Valazquez
-Utsey
-Urbaniak
-Unzueta
-Trombetta
-Trevizo
-Trembley
-Tremaine
-Traverso
-Tores
-Tolan
-Tillison
-Tietjen
-Tee
-Teachout
-Taube
-Tatham
-Tarwater
-Tarbell
-Sydow
-Sy
-Swims
-Swader
-Striplin
-Stops
-Stoltenberg
-Steinhauer
-Steil
-Steigerwald
-Starkweather
-Stallman
-Squier
-Sparacino
-Span
-Spadafora
-Shiflet
-Shibata
-Shevlin
-Sherrick
-Shake
-Sessums
-Servais
-Senters
-Seevers
-Seelye
-Searfoss
-Seabrooks
-Scoles
-Schwager
-Schrom
-Schmeltzer
-Scheffel
-Sax
-Sawin
-Saterfiel
-Sardina
-Sanroman
-Sane
-Sandin
-Salamanca
-Saladin
-Sak
-Sabia
-Rustin
-Rushin
-Ruley
-Rueter
-Row
-Rotter
-Rosenzweig
-Roles
-Rohe
-Roder
-Rockey
-Ro
-Riter
-Rieth
-Ried
-Riding
-Riddles
-Ridder
-Rennick
-Remmers
-Remer
-Relyea
-Reilley
-Reder
-Rasheed
-Rakowski
-Rabin
-Queener
-Pursel
-Prue
-Prowell
-Pritts
-Primo
-Presler
-Pouncy
-Porche
-Porcaro
-Pollman
-Pleas
-Planas
-Pinkley
-Pinegar
-Pilger
-Philson
-Petties
-Perrodin
-Pendergrast
-Patao
-Pasternak
-Passarelli
-Pasko
-Parshall
-Panos
-Panella
-Palombo
-Padillo
-Oyama
-Overlock
-Overbeck
-Otterson
-Orrell
-Ornellas
-Opitz
-Okelly
-Officer
-Obando
-Noggle
-Nicosia
-Netto
-Negrin
-Natali
-Nakayama
-Nagao
-Nadel
-Musial
-Murrill
-Murrah
-Munsch
-Mucci
-Mrozek
-Moyes
-Mowrer
-Moris
-Morais
-Moorhouse
-Monico
-Mone
-Mondy
-Moncayo
-Mole
-Miltenberger
-Milsap
-Milone
-Millikin
-Milardo
-Mika
-Micheals
-Micco
-Meyerson
-Mericle
-Mendell
-Meinhardt
-Meachum
-Mcleroy
-Mcgray
-Mcgonigal
-Maultsby
-Matis
-Matheney
-Matamoros
-Marro
-Marcil
-Marcial
-Mantz
-Mannings
-Maltby
-Malchow
-Maiorano
-Mahn
-Mahlum
-Maglio
-Mae
-Maberry
-Lustig
-Luellen
-Longwell
-Longenecker
-Lofland
-Locascio
-Linney
-Linneman
-Lighty
-Levell
-Levay
-Lenahan
-Lemen
-Lehto
-Lebaron
-Lanctot
-Lamy
-Lainez
-Laffoon
-Labombard
-Kujawski
-Kroger
-Kreutzer
-Korhonen
-Kondo
-Kollman
-Kohan
-Kogut
-Knaus
-Kivi
-Kittel
-Kinner
-Kindig
-Kindel
-Kiesel
-Kidney
-Kibby
-Khang
-Kettler
-Ketterer
-Kepner
-Kelliher
-Keenum
-Kanode
-Kail
-July
-Juhasz
-Jowett
-Jolicoeur
-Jeon
-Iser
-Ingrassia
-Imai
-Hutchcraft
-Humiston
-Hulings
-Hukill
-Huizenga
-Hugley
-Huddle
-Hose
-Hornyak
-Hodder
-Hisle
-Hillenbrand
-Hille
-Higuchi
-Hertzler
-Herdon
-Heppner
-Hepp
-Heitmann
-Heckart
-Hazlewood
-Hayles
-Hayek
-Hawthorn
-Hawkin
-Haugland
-Hasler
-Harbuck
-Happel
-Hambly
-Hambleton
-Hagaman
-Guzzi
-Gullette
-Guinyard
-Grogg
-Grise
-Griffing
-Goto
-Gosney
-Goods
-Goley
-Goldblatt
-Gledhill
-Girton
-Giltner
-Gillock
-Gilham
-Gilfillan
-Giblin
-Gentner
-Gehlert
-Gehl
-Garten
-Garney
-Garlow
-Garett
-Galles
-Galeana
-Futral
-Fuhr
-Friedland
-Franson
-Fransen
-Foulds
-Follmer
-Foland
-Flax
-Flavin
-Firkins
-Fillion
-Figueredo
-Ferrill
-Fenster
-Fenley
-Fauver
-Farfan
-Factor
-Eustice
-Eppler
-Engelman
-Engelke
-Emmer
-Elzy
-Ellwood
-Ellerbee
-Elks
-Ehret
-Ebbert
-Durrah
-Dupras
-Dubuque
-Dragoo
-Donlon
-Dolloff
-Doi
-Dibella
-Derrico
-Demko
-Demar
-Darrington
-Czapla
-Crooker
-Creagh
-Cranor
-Craner
-Crafts
-Crabill
-Coyer
-Cowman
-Cowherd
-Cottone
-Costillo
-Coster
-Costas
-Cosenza
-Corker
-Collinson
-Coello
-Clingman
-Clingerman
-Claborn
-Citizen
-Chmura
-Chausse
-Chaudhry
-Chapell
-Chancy
-Cerrone
-Caves
-Caverly
-Caulkins
-Carn
-Campfield
-Campanelli
-Callaham
-Cadorette
-Butkovich
-Buske
-Burrier
-Burkley
-Bunyard
-Budge
-Buckelew
-Buchheit
-Broman
-Brescia
-Brasel
-Brain
-Boyster
-Booe
-Bonomo
-Bonnet
-Bondi
-Bohnsack
-Bobby
-Blomberg
-Blanford
-Bilderback
-Biggins
-Bently
-Behrends
-Beegle
-Bedoya
-Bechtol
-Beaubien
-Bayerl
-Baumgart
-Baumeister
-Barratt
-Barlowe
-Barkman
-Barbagallo
-Baldree
-Baine
-Bail
-Baggs
-Bacote
-Aylward
-Ashurst
-Arvidson
-Arthurs
-Arrieta
-Arrey
-Arreguin
-Arrant
-Arner
-Armor
-Arizmendi
-Anker
-Amis
-Amend
-Alphin
-Allbright
-Aikin
-Acres
-Zupan
-Zuchowski
-Zeolla
-Zanchez
-Zahradnik
-Zahler
-Younan
-Yeater
-Yearta
-Yarrington
-Yantis
-Woomer
-Wollard
-Wolfinger
-Woerner
-Witek
-Wishon
-Wisener
-Wingerter
-Willet
-Wilding
-Wiedemann
-Weisel
-Wedeking
-Weary
-Waybright
-Wardwell
-Walkins
-Waldorf
-Voth
-Voit
-Virden
-Viloria
-Villagran
-Vasta
-Vashon
-Vaquera
-Vantassell
-Vanderlinden
-Vandergrift
-Vancuren
-Valenta
-Underdahl
-Tyra
-Tygart
-Twining
-Twiford
-Turlington
-Tullius
-Tubman
-Trowell
-Trieu
-Transue
-Tousant
-Torgersen
-Tooker
-Tony
-Tome
-Toma
-Tocci
-Tippins
-Tinner
-Timlin
-Tillinghast
-Tidmore
-Teti
-Tedrick
-Tacey
-Swanberg
-Sunde
-Summitt
-Summerford
-Summa
-Sue
-Stratman
-Strandberg
-Storck
-Stober
-Steitz
-Stayer
-Stauber
-Staiger
-Sponaugle
-Spofford
-Sparano
-Spagnola
-Sokoloski
-Snay
-Slough
-Skowronski
-Sieck
-Shimkus
-Sheth
-Sherk
-Shankles
-Shakespeare
-Shahid
-Sevy
-Sergeant
-Senegal
-Seiden
-Seidell
-Searls
-Searight
-Schwalm
-Schug
-Schilke
-Schier
-Scheck
-Sawtelle
-Santore
-Santa
-Sanks
-Sandquist
-Sanden
-Saling
-Sabine
-Saathoff
-Ryberg
-Rustad
-Ruffing
-Rudnicki
-Ruane
-Rozzi
-Rowse
-Rosenau
-Rodes
-Risser
-Riggin
-Riess
-Riese
-Rhoten
-Reinecke
-Reigle
-Reichling
-Redner
-Rebelo
-Raynes
-Raimondi
-Rahe
-Rada
-Querry
-Quellette
-Pulsifer
-Prochnow
-Pretty
-Prato
-Poulton
-Poudrier
-Poll
-Policastro
-Polhemus
-Polasek
-Poissant
-Pohlmann
-Plotner
-Pitkin
-Pita
-Pio
-Pinkett
-Pilot
-Piekarski
-Pichon
-Philippe
-Pfau
-Petroff
-Petermann
-Peplinski
-Peller
-Pecinovsky
-Pearse
-Pattillo
-Patague
-Parlier
-Parenti
-Parchman
-Pane
-Paff
-Ota
-Ortner
-Oros
-Nolley
-Noakes
-Nigh
-Nicolosi
-Nicolay
-Newnam
-Netter
-Nass
-Napoles
-Nakata
-Nakamoto
-Muriel
-Muck
-Morlock
-Moraga
-Montilla
-Mongeau
-Molitor
-Mohney
-Mitchener
-Meyerhoff
-Medel
-Mcniff
-Mcmonagle
-Mcglown
-Mcglinchey
-Mcgarrity
-Mccright
-Mccorvey
-Mcconnel
-Mccargo
-Mazzei
-Matula
-Mastroianni
-Massingale
-Maring
-Maricle
-Marc
-Mans
-Mannon
-Mannix
-Manney
-Manger
-Manalo
-Malo
-Malan
-Mahony
-Madril
-Mackowiak
-Macko
-Macintosh
-Lurry
-Luczynski
-Lucke
-Lucarelli
-Luca
-Loud
-Lou
-Losee
-Lorence
-Loiacono
-Lohse
-Loder
-Lipari
-Linebarger
-Lindamood
-Limbaugh
-Letts
-Leleux
-Leep
-Leeder
-Leard
-Laxson
-Lawry
-Laverdiere
-Laughton
-Lastra
-Kurek
-Kriss
-Krishnan
-Kretschmer
-Krebsbach
-Kontos
-Knobel
-Knauf
-Klick
-Kleven
-Klawitter
-Kitchin
-Kirkendoll
-Kinkel
-Kingrey
-Kilbourn
-Kensinger
-Kennerly
-Kamin
-Justiniano
-Jurek
-Junkin
-Julia
-Judon
-Jordahl
-Jeanes
-Jarrells
-Jamal
-Iwamoto
-Isreal
-Ishida
-Ines
-Immel
-Iman
-Ihle
-Hyre
-Hurn
-Hunn
-Hultman
-Huffstetler
-Huffer
-Hubner
-Howey
-Horney
-Hooton
-Holts
-Holscher
-Holen
-Hoggatt
-Hilaire
-Herz
-Henne
-Helstrom
-Hellickson
-Heinlein
-Heckathorn
-Heckard
-Heather
-Heart
-Headlee
-Hauptman
-Haughey
-Hatt
-Harring
-Harford
-Hammill
-Hamed
-Halperin
-Haig
-Hagwood
-Hagstrom
-Gunnells
-Gundlach
-Guardiola
-Greeno
-Greenland
-Gonce
-Goldsby
-Gobel
-Gisi
-Gillins
-Gillie
-Germano
-Geibel
-Gauger
-Garriott
-Garbarino
-Gander
-Gajewski
-Funari
-Fullbright
-Fuell
-Fritzler
-Freshwater
-Freas
-Fortino
-Forbus
-Fonda
-Flohr
-Flemister
-Fisch
-Finks
-Fenstermaker
-Feldstein
-Faw
-Farhat
-Farah
-Fankhauser
-Fagg
-Fader
-Exline
-Emigh
-Eguia
-Edman
-Eckler
-Eastburn
-Dy
-Dunmore
-Dubuisson
-Dubinsky
-Drayer
-Doverspike
-Doubleday
-Doten
-Dorner
-Dolson
-Dohrmann
-Disla
-Direnzo
-Dipaola
-Dines
-Dickie
-Diblasi
-Dewolf
-Desanti
-Dennehy
-Demming
-Delker
-Decola
-Davilla
-Davids
-Daughtridge
-Darville
-Darland
-Danzy
-Dandy
-Dagenais
-Culotta
-Cruzado
-Crudup
-Croswell
-Coverdale
-Covelli
-Couts
-Corbell
-Coplan
-Coolbaugh
-Conyer
-Conlee
-Conigliaro
-Comiskey
-Coberly
-Clendening
-Clairmont
-Cienfuegos
-Chojnacki
-Chilcote
-Champney
-Cassara
-Casazza
-Casado
-Carew
-Carbin
-Carabajal
-Calcagni
-Cail
-Caddy
-Busbee
-Burts
-Burbridge
-Bunge
-Bundick
-Buhler
-Bucker
-Bucholtz
-Bruen
-Broce
-Brite
-Brignac
-Brierly
-Bridgman
-Braham
-Bradish
-Boyington
-Borjas
-Bonnie
-Bonn
-Bonhomme
-Bohlen
-Bogardus
-Bockelman
-Blick
-Blackerby
-Bizier
-Biro
-Binney
-Bertolini
-Bertin
-Berti
-Bert
-Bento
-Beno
-Belgarde
-Belding
-Beckel
-Becerril
-Bazaldua
-Bayes
-Bayard
-Barrus
-Barris
-Baros
-Bara
-Ballow
-Balboa
-Bakewell
-Baginski
-Badalamenti
-Backhaus
-Avilez
-Auvil
-Atteberry
-Ardon
-Anzaldua
-Anello
-Amsler
-Amo
-Ambrosio
-Althouse
-Alles
-Alix
-Alberti
-Alberson
-Aitchison
-Aguinaga
-Ziemann
-Zickefoose
-Zerr
-Zeh
-Zeck
-Zartman
-Zahm
-Zabriskie
-Yohn
-Yellowhair
-Yeaton
-Yarnall
-Yaple
-Wolski
-Wixon
-Winford
-Willner
-Willms
-Whitsitt
-Wheelwright
-Weyandt
-Wess
-Wengerd
-Weatherholtz
-Wattenbarger
-Walrath
-Walpole
-Waldrip
-Voges
-Violet
-Vinzant
-Viars
-Veres
-Veneziano
-Veillon
-Vawter
-Vaughns
-Vanwart
-Vanostrand
-Valiente
-Valderas
-Uhrig
-Tunison
-Tulloch
-Trostle
-Treaster
-Traywick
-Toye
-Tomson
-Tomasello
-Tomasek
-Tippit
-Tinajero
-Tift
-Tienda
-Thorington
-Thierry
-Thieme
-Thibeau
-Thakkar
-Tewell
-Test
-Telfer
-Sweetser
-Sum
-Stratford
-Stracener
-Stoke
-Stiverson
-Stelling
-Stefan
-Stavros
-Speaker
-Spatz
-Spagnoli
-Sorge
-Sober
-Slevin
-Slabaugh
-Simson
-Shupp
-Shoultz
-Shotts
-Shiroma
-Shetley
-Sherrow
-Sheffey
-Shawgo
-Shamburger
-Sester
-Segraves
-Seelig
-Seats
-Scioneaux
-Schwartzkopf
-Schwabe
-Scholes
-Schmuck
-Schluter
-Schlecht
-Schillaci
-Schildgen
-Schieber
-Schewe
-Schecter
-Scarpelli
-Scaglione
-Sautter
-Santelli
-Sandman
-Salmi
-Sabado
-Ryer
-Rydberg
-Ryba
-Rushford
-Running
-Runk
-Ruddick
-Rotondo
-Rote
-Rosenfield
-Roesner
-Rocchio
-Ritzer
-Rippel
-Rimes
-Riffel
-Richison
-Ribble
-Reynold
-Resh
-Rehn
-Ratti
-Rasor
-Rasnake
-Rappold
-Rando
-Radosevich
-Pulice
-Puff
-Prichett
-Pribble
-Poynor
-Plowden
-Pitzen
-Pittsley
-Pitter
-Pigeon
-Philyaw
-Philipps
-Petite
-Pestana
-Perro
-Perone
-Pera
-Peil
-Pedone
-Pawlowicz
-Pattee
-Parten
-Parlin
-Pariseau
-Paredez
-Pardon
-Panther
-Paek
-Pacifico
-Otts
-Ostrow
-Osornio
-Oslund
-Orso
-Ooten
-Onken
-Oniel
-Onan
-Ollison
-Ohlsen
-Ohlinger
-Odowd
-Niemiec
-Neubert
-Nembhard
-Neaves
-Neathery
-Nakasone
-Myerson
-Muto
-Muntz
-Munez
-Mumme
-Mumm
-Mujica
-Muise
-Muench
-Morriss
-Molock
-Mishoe
-Minier
-Metzgar
-Mero
-Meiser
-Meese
-Meals
-Mcsween
-Mcquire
-Mcquinn
-Mcpheeters
-Mckeller
-Mcilrath
-Mcgown
-Mcdavis
-Mccuen
-Mcclenton
-Maxham
-Matsui
-Marriner
-Marlette
-Mantle
-Mansur
-Mancino
-Maland
-Majka
-Maisch
-Maheux
-Madry
-Madriz
-Mackley
-Macke
-Lydick
-Lutterman
-Luppino
-Lundahl
-Lovingood
-Loudon
-Longmore
-Lippman
-Liefer
-Leveque
-Lescarbeau
-Lemmer
-Ledgerwood
-Lawver
-Lawrie
-Lattea
-Lasko
-Lahman
-Kulpa
-Kukowski
-Kukla
-Kubota
-Kubala
-Krizan
-Kriz
-Krikorian
-Kravetz
-Kramp
-Kowaleski
-Knobloch
-Klosterman
-Kloster
-Klepper
-Kirven
-Kinnaman
-Kinnaird
-Killam
-Kiesling
-Kesner
-Keebler
-Keagle
-Karls
-Kapinos
-Kantner
-Kaba
-Junious
-Jefferys
-Jacquet
-Izzi
-Ishii
-Irion
-Ifill
-Hyun
-Hotard
-Horman
-Hoppes
-Hopkin
-Hokanson
-Hoda
-Hocutt
-Hoaglin
-Hites
-Hirai
-Hindle
-Hinch
-Hilty
-Hild
-Hier
-Hickle
-Hibler
-Henrichs
-Hempstead
-Helmers
-Hellard
-Heims
-Heidler
-Hearst
-Hawbaker
-Hau
-Harkleroad
-Harari
-Hanney
-Hannaford
-Hamid
-Hamburger
-Haltom
-Hallford
-Guilliams
-Guerette
-Gryder
-Groseclose
-Groen
-Grimley
-Greenidge
-Greek
-Graffam
-Goucher
-Goodenough
-Goldsborough
-Goldie
-Gloster
-Glanton
-Gladson
-Gladding
-Ghee
-Gethers
-Gerstein
-Geesey
-Geddie
-Gayer
-Gaw
-Gaver
-Gauntt
-Gartland
-Garriga
-Garoutte
-Gao
-Gan
-Fronk
-Fritze
-Frenzel
-Forgione
-Fluitt
-Flinchbaugh
-Flach
-Fiorito
-Finan
-Finamore
-Fimbres
-Fillman
-File
-Figeroa
-Ficklin
-Feher
-Feddersen
-Fambro
-Fairbairn
-Eves
-Esperanza
-Escalona
-Elsey
-Eisenstein
-Ehrenberg
-Eargle
-Dress
-Drane
-Dorothy
-Doria
-Dogan
-Dively
-Dewolfe
-Dettman
-Desiderio
-Desch
-Dennen
-Denk
-Demaris
-Delsignore
-Dejarnette
-Deere
-Dedman
-Daws
-Dawn
-Dauphinais
-Danz
-Dantin
-Dannenberg
-Dalby
-Currence
-Culwell
-Cuesta
-Croston
-Crossno
-Cromley
-Crisci
-Craw
-Coryell
-Cooter
-Condra
-Columbia
-Colpitts
-Colas
-Coach
-Clink
-Clevinger
-Clermont
-Cistrunk
-Cirilo
-Chirico
-Chiarello
-Cephus
-Cecena
-Cavaliere
-Caughey
-Casimir
-Carwell
-Carlon
-Carbonaro
-Caraveo
-Cantley
-Callejas
-Cagney
-Cadieux
-Cabaniss
-Bushard
-Burlew
-Buras
-Budzinski
-Bucklew
-Bruneau
-Brummer
-Brueggemann
-Brotzman
-Bross
-Broad
-Brittian
-Brimage
-Briles
-Brickman
-Breneman
-Breitenstein
-Brandel
-Brackins
-Boydstun
-Botta
-Bosket
-Boros
-Borgmann
-Bordeau
-Bonifacio
-Bolten
-Boehman
-Blundell
-Bloodsaw
-Bjerke
-Biffle
-Bickett
-Bickers
-Beville
-Bergren
-Bergey
-Benzing
-Belfiore
-Beirne
-Beckert
-Bebout
-Baumert
-Battey
-Bartman
-Barrs
-Barriere
-Barcelo
-Barbe
-Balliet
-Baham
-Babst
-Auton
-Asper
-Asbell
-Arzate
-Argento
-Arel
-Araki
-Arai
-Apo
-Antley
-Amodeo
-Ammann
-Allyn
-Allensworth
-Aldape
-Akey
-Abeita
-Zweifel
-Zeng
-Zeiler
-Zamor
-Zalenski
-Yzaguirre
-Yousef
-Yetman
-Yau
-Wyer
-Woolwine
-Wohlgemuth
-Wohlers
-Wittenberg
-Wingrove
-Wind
-Wimsatt
-Willimas
-Wilkenson
-Wildey
-Wilderman
-Wilczynski
-Wigton
-Whorley
-Wellons
-Welles
-Welle
-Weirich
-Weideman
-Weide
-Weekly
-Weast
-Wasmund
-Warshaw
-Walson
-Waldner
-Walch
-Walberg
-Wagener
-Wageman
-Vrieze
-Vossen
-Vorce
-Voorhis
-Vonderheide
-Viruet
-Vicari
-Verne
-Velasques
-Vautour
-Vartanian
-Varona
-Vankeuren
-Vandine
-Vandermeer
-Ursery
-Underdown
-Uhrich
-Uhlman
-Tworek
-Twine
-Twellman
-Tweedie
-Tutino
-Turmelle
-Tubb
-Troop
-Trivedi
-Triano
-Trevathan
-Treese
-Treanor
-Treacy
-Traina
-Topham
-Toenjes
-Tippetts
-Tieu
-Thomure
-Thatch
-Than
-Tetzlaff
-Tetterton
-Tena
-Tell
-Teamer
-Tappan
-Tank
-Talcott
-Tagg
-Szczepanski
-Syring
-Surace
-Sulzer
-Sugrue
-Sugarman
-Suess
-Styons
-Stwart
-Stupka
-Strey
-Straube
-Strate
-Stoddart
-Stockbridge
-Stjames
-Stinger
-Steimle
-Steenberg
-Start
-Stamand
-Staller
-Stahly
-Stager
-Spurgin
-Sprow
-Sponsler
-Speas
-Spainhour
-Sones
-Smits
-Smelcer
-Slovak
-Slaten
-Singleterry
-Simien
-Sidebottom
-Sibrian
-Shellhammer
-Shelburne
-Shambo
-Sepeda
-Seigel
-Scogin
-Scianna
-Schmoll
-Schmelzer
-Scheu
-Schachter
-Savant
-Sauseda
-Satcher
-Sandor
-Sampsell
-Rugh
-Rufener
-Rudolf
-Rotenberry
-Rossow
-Rossbach
-Roots
-Rollman
-Rodrique
-Rodreguez
-Rodkey
-Roda
-Rising
-Rini
-Riggan
-Rients
-Riedl
-Rhines
-Ress
-Reinbold
-Raschke
-Rardin
-Rain
-Racicot
-Quillin
-Pushard
-Primrose
-Pries
-Pressey
-Precourt
-Pratts
-Postel
-Poppell
-Plumer
-Pingree
-Pieroni
-Pflug
-Petre
-Petrarca
-Peterka
-Peru
-Perkin
-Pergande
-Peranio
-Penna
-Pekar
-Pea
-Paulhus
-Pasquariello
-Parras
-Parmentier
-Para
-Panzer
-Pamplin
-Oviatt
-Osterhoudt
-Ostendorf
-Osmun
-Ortman
-Orloff
-Orban
-Onofrio
-Olveda
-Oltman
-Okeeffe
-Ocana
-Nunemaker
-Novy
-Noffsinger
-Nish
-Niday
-Nethery
-Nestle
-Nemitz
-Neidert
-Nadal
-Nack
-Muszynski
-Munsterman
-Mulherin
-Mortimore
-Morter
-Montesino
-Montalvan
-Montalbano
-Momon
-Moman
-Mom
-Mogan
-Minns
-Millward
-Milling
-Michelsen
-Micheal
-Mewborn
-Metro
-Metayer
-Mensch
-Meloy
-Meggs
-Meaders
-Mcsorley
-Mcmenamin
-Mclead
-Mclauchlin
-Mcguffey
-Mcguckin
-Mcglaughlin
-Mcferron
-Mcentyre
-Mccrum
-Mccawley
-Mcbain
-Mayhue
-Mau
-Matzen
-Matton
-Marsee
-Marrin
-Marland
-Markum
-Mantilla
-Manfre
-Malta
-Makuch
-Madlock
-Maclaren
-Macauley
-Luzier
-Luthy
-Lufkin
-Lucena
-Loudin
-Lothrop
-Lorch
-Lona
-Loll
-Loadholt
-Lisa
-Lippold
-Likes
-Lichtman
-Liberto
-Liakos
-Lewicki
-Levett
-Level
-Lentine
-Leja
-Legree
-Lawhead
-Lauro
-Lauder
-Lard
-Lanman
-Lank
-Laning
-Lama
-Lalor
-Krob
-Kriger
-Kriegel
-Krejci
-Kreisel
-Kozel
-Kos
-Konkel
-Kolstad
-Koenen
-Kocsis
-Knoblock
-Knebel
-Klopfer
-Klee
-Kilday
-Kesten
-Kerbs
-Kempker
-Keathley
-Kazee
-Kawasaki
-Kaur
-Kamer
-Kamaka
-Kallenbach
-Kafka
-Jerrell
-Jehle
-Jaycox
-Jardin
-Jahns
-Ivester
-Hyppolite
-Hyche
-Husbands
-Hur
-Huppert
-Hulin
-Hubley
-Horsey
-Hornak
-Holzwarth
-Holmon
-Hollabaugh
-Holaway
-Hodes
-Hoak
-Hinesley
-Hillwig
-Hillebrand
-Highfield
-Heslop
-Herrada
-Hendryx
-Hellums
-Heit
-Heishman
-Heindel
-Hayslip
-Hayford
-Hastie
-Hartgrove
-Hanus
-Hakim
-Hains
-Hadnott
-Gundersen
-Gulino
-Guidroz
-Guebert
-Gressett
-Greenhouse
-Graydon
-Gramling
-Grahn
-Goupil
-Gory
-Gorelick
-Goodreau
-Goodnough
-Golay
-Going
-Goers
-Glatz
-Gillikin
-Gieseke
-Giammarino
-Getman
-Geronimo
-Gerardo
-Gensler
-Gazda
-Garibaldi
-Gahan
-Fury
-Funderburke
-Fukuda
-Fugitt
-Fuerst
-Fortman
-Forsgren
-Formica
-Fluke
-Flink
-Fitton
-Feltz
-Fekete
-Feit
-Fehrenbach
-Farone
-Farinas
-Faries
-Fagen
-Ewin
-Esquilin
-Esch
-Enderle
-Ellery
-Ellers
-Ekberg
-Egli
-Effinger
-Dymond
-Dulle
-Dula
-Duhe
-Dudney
-Duane
-Dowless
-Dower
-Dorminey
-Dopp
-Dooling
-Domer
-Disher
-Dillenbeck
-Difilippo
-Dibernardo
-Deyoe
-Devillier
-Denley
-Deland
-Defibaugh
-Deeb
-Debow
-Dauer
-Datta
-Darcangelo
-Daoust
-Damelio
-Dahm
-Dahlman
-Cypher
-Curling
-Curlin
-Cupit
-Culton
-Cuenca
-Cropp
-Croke
-Cremer
-Crace
-Cosio
-Corzine
-Coombe
-Coman
-Colone
-Coloma
-Collingwood
-Coletta
-Coderre
-Cocke
-Cobler
-Claybrook
-Circle
-Cincotta
-Cimmino
-Christoff
-Christina
-Chisum
-Chillemi
-Chevere
-Chae
-Chachere
-Cervone
-Cermak
-Cefalu
-Cauble
-Cather
-Caso
-Carns
-Carcamo
-Carbo
-Capoccia
-Capello
-Capell
-Canino
-Cambareri
-Calvi
-Cabiness
-Bushell
-Burtt
-Burstein
-Burkle
-Bunner
-Bundren
-Buechler
-Bryand
-Bruso
-Brownstein
-Brow
-Brouse
-Brodt
-Broaden
-Brisbin
-Brightman
-Bridgett
-Brenes
-Breitenbach
-Brazzell
-Brazee
-Bramwell
-Bramhall
-Bradstreet
-Boyton
-Bowland
-Boulter
-Bossert
-Bonura
-Bonebrake
-Bonacci
-Boeck
-Blystone
-Birchard
-Bilal
-Biddy
-Bibee
-Bevans
-Bethke
-Bertelsen
-Berney
-Bergfeld
-Benware
-Bellon
-Bellah
-Been
-Batterton
-Barberio
-Bamber
-Bagdon
-Badeaux
-Averitt
-Augsburger
-Ates
-Arvie
-Aronowitz
-Arens
-Arch
-Araya
-Angelos
-Andrada
-Amell
-Amante
-Alvin
-Almy
-Almquist
-Alls
-Aispuro
-Aguillon
-Agudelo
-Admire
-Acy
-Aceto
-Abbot
-Abalos
-Zdenek
-Zaremba
-Zaccaria
-Youssef
-Wrona
-Wrinkle
-Wrede
-Wotton
-Woolston
-Wolpert
-Wollman
-Wince
-Wimberley
-Willmore
-Willetts
-Wikoff
-Wieder
-Wickert
-Whitenack
-Wernick
-Welte
-Welden
-Weiskopf
-Weisenberger
-Weich
-Wallington
-Walder
-Vossler
-Vore
-Vigo
-Vierling
-Victorine
-Verdun
-Vencill
-Vena
-Vazguez
-Vassel
-Vanzile
-Vanvliet
-Vantrease
-Vannostrand
-Vanderveer
-Vanderveen
-Vancil
-Uyeda
-Umphrey
-Uhler
-Uber
-Tutson
-Turrentine
-Tullier
-Tugwell
-Trundy
-Tripodi
-Tomer
-Tomei
-Tomasi
-Tomaselli
-Tokarski
-Tisher
-Tibbets
-Thweatt
-Thistle
-Tharrington
-Tesar
-Telesco
-Teasdale
-Tatem
-Taniguchi
-Suriel
-Sudler
-Stutsman
-Sturman
-Strite
-Strelow
-Streight
-Strawder
-Stransky
-Strahl
-Stours
-Stong
-Stinebaugh
-Stilts
-Stillson
-Steyer
-Stelle
-Steffy
-Steffensmeier
-Statham
-Squillante
-Spiess
-Spargo
-Southward
-Soller
-Soden
-Snuggs
-Snellgrove
-Smyers
-Smiddy
-Slonaker
-Skyles
-Skowron
-Sivils
-Siqueiros
-Siers
-Siddall
-Shorty
-Shontz
-Shingler
-Shiley
-Shibley
-Sherard
-Shelnutt
-Shedrick
-Shasteen
-Sereno
-Selke
-Scovil
-Scola
-Schuett
-Schuessler
-Schreckengost
-Schranz
-Schoepp
-Schneiderman
-Schlanger
-Schiele
-Scheuermann
-Schertz
-Scheidler
-Scheff
-Schaner
-Schamber
-Scardina
-Savedra
-Saulnier
-Sater
-Sarro
-Sambrano
-Salomone
-Sabourin
-Ruud
-Rutten
-Ruffino
-Ruddock
-Rowser
-Roussell
-Rosengarten
-Rominger
-Rollinson
-Rohman
-Roeser
-Rodenberg
-Roberds
-Ridgell
-Rhodus
-Reynaga
-Rexrode
-Revelle
-Rempel
-Remigio
-Reising
-Reiling
-Reetz
-Rayos
-Ravenscroft
-Ravenell
-Raulerson
-Rasmusson
-Rask
-Rase
-Ragon
-Quesnel
-Quashie
-Puzo
-Puterbaugh
-Ptak
-Prost
-Prisbrey
-Principe
-Pricer
-Pratte
-Pouncey
-Portman
-Pontious
-Pomerantz
-Platter
-Planck
-Pilkenton
-Pilarski
-Piano
-Phegley
-Pertuit
-Perla
-Penta
-Pelc
-Peffer
-Pech
-Peagler
-Pavelka
-Pavao
-Patman
-Paskett
-Parrilla
-Pardini
-Papazian
-Panter
-Palin
-Paley
-Pai
-Pages
-Paetzold
-Packett
-Pacheo
-Ostrem
-Orsborn
-Olmedo
-Okamura
-Oiler
-Ohm
-Oglesbee
-Oatis
-Oakland
-Nuckles
-Notter
-Nordyke
-Nogueira
-Niswander
-Nibert
-Nesby
-Neloms
-Nading
-Naab
-Munns
-Mullarkey
-Moudy
-Moret
-Monnin
-Molder
-Modisette
-Moczygemba
-Moctezuma
-Mischke
-Miro
-Mings
-Milot
-Milledge
-Milhorn
-Milera
-Mieles
-Mickley
-Michelle
-Micek
-Metellus
-Mersch
-Merola
-Mercure
-Mencer
-Mellin
-Mell
-Meinke
-Mcquillan
-Mcmurtrie
-Mckillop
-Mckiernan
-Mckendrick
-Mckamie
-Mcilvaine
-Mcguffie
-Mcgonigle
-Mcgarrah
-Mcfetridge
-Mcenaney
-Mcdow
-Mccutchan
-Mccallie
-Mcadam
-Maycock
-Maybee
-Mattei
-Massi
-Masser
-Masiello
-Marth
-Marshell
-Marmo
-Marksberry
-Markell
-Marchal
-Manross
-Manganaro
-Mally
-Mallow
-Mailhot
-Magyar
-Madonna
-Madero
-Madding
-Maddalena
-Macfarland
-Lynes
-Lush
-Lugar
-Luckie
-Lucca
-Lovitt
-Loveridge
-Loux
-Loth
-Loso
-Lorenzana
-Lorance
-Lockley
-Lockamy
-Littler
-Litman
-Litke
-Liebel
-Lichtenberger
-Licea
-Leverich
-Letarte
-Lesesne
-Leno
-Legleiter
-Leffew
-Laurin
-Launius
-Laswell
-Lassen
-Lasala
-Laraway
-Laramore
-Landrith
-Lancon
-Lanahan
-Laiche
-Laford
-Lachermeier
-Kunst
-Kugel
-Kuck
-Kuchta
-Kube
-Korus
-Koppes
-Kolbe
-Koerber
-Kochan
-Knittel
-Kluck
-Kleve
-Kleine
-Kitch
-Kirton
-Kirker
-Kintz
-Kinghorn
-Kindell
-Kimrey
-Kilduff
-Kilcrease
-Kicklighter
-Kibble
-Kervin
-Keplinger
-Keogh
-Kellog
-Keeth
-Kealey
-Kazmierczak
-Karner
-Kamel
-Kalina
-Kaczynski
-Juel
-Joye
-Jerman
-Jeppson
-Jawad
-Jasik
-Jaqua
-Janusz
-Janco
-Island
-Inskeep
-Inks
-Ingold
-Ing
-Hyndman
-Hymer
-Hunte
-Hunkins
-Humber
-Huffstutler
-Huffines
-Hudon
-Hudec
-Hovland
-Houze
-Hout
-Hougland
-Hopf
-Hon
-Holsapple
-Holness
-Hollenbach
-Hoffmeister
-Hitchings
-Hirata
-Hieber
-Hickel
-Hewey
-Herriman
-Hermansen
-Herandez
-Henze
-Heffelfinger
-Hedgecock
-Hazlitt
-Hazelrigg
-Haycock
-Harren
-Harnage
-Harling
-Harcrow
-Hannold
-Hanline
-Hanel
-Hanberry
-Hammersley
-Hamernik
-Halliwell
-Hajduk
-Haithcock
-Haff
-Hadaway
-Haan
-Gullatt
-Guilbault
-Guidotti
-Gruner
-Grisson
-Grieves
-Granato
-Gracie
-Grabert
-Gover
-Gorka
-Glueck
-Girardin
-Giorgio
-Giesler
-Gersten
-Gering
-Geers
-Gaut
-Gaulin
-Gaskamp
-Garbett
-Gallivan
-Galland
-Gaeth
-Fullenkamp
-Fullam
-Friedrichs
-Freire
-Freeney
-Fredenburg
-Frappier
-Fowkes
-Foree
-Fleurant
-Fleig
-Fleagle
-Fitzsimons
-Fischetti
-Fiorenza
-Finneran
-Filippi
-Figueras
-Fesler
-Fertig
-Fennel
-Feltmann
-Felps
-Felmlee
-Faye
-Fannon
-Familia
-Fairall
-Fail
-Fadden
-Esslinger
-Enfinger
-Elsasser
-Elmendorf
-Ellisor
-Einhorn
-Ehrman
-Egner
-Edmisten
-Edlund
-Ebinger
-Dyment
-Dykeman
-Durling
-Dunstan
-Dunsmore
-Dugal
-Duer
-Drescher
-Doyel
-Down
-Dossey
-Donelan
-Dockstader
-Dobyns
-Divis
-Dilks
-Didier
-Desrosier
-Desanto
-Deppe
-Deng
-Delosh
-Delange
-Defrank
-Debo
-Dauber
-Dartez
-Daquila
-Dankert
-Dahn
-Cygan
-Cusic
-Curfman
-Croghan
-Croff
-Criger
-Creviston
-Crays
-Cravey
-Crandle
-Crail
-Crago
-Craghead
-Cousineau
-Couchman
-Cothron
-Corella
-Conine
-Coller
-Colberg
-Cogley
-Coatney
-Coale
-Clendenin
-Claywell
-Clagon
-Cifaldi
-Choiniere
-Chickering
-Chica
-Chennault
-Chavarin
-Chattin
-Chaloux
-Challis
-Cesario
-Certain
-Cazarez
-Caughman
-Catledge
-Casebolt
-Carrel
-Carra
-Carlow
-Capote
-Canez
-Camillo
-Caliendo
-Calbert
-Cairo
-Bylsma
-Bustle
-Buskey
-Buschman
-Burkhard
-Burghardt
-Burgard
-Buonocore
-Bunkley
-Bungard
-Bundrick
-Bumbrey
-Buice
-Buffkin
-Brundige
-Brockwell
-Brion
-Brin
-Briant
-Bredeson
-Bransford
-Brannock
-Brakefield
-Brackens
-Brabant
-Boxer
-Bowdoin
-Bouyer
-Bothe
-Boor
-Bonavita
-Bollig
-Blurton
-Blunk
-Blanke
-Blanck
-Birden
-Bierbaum
-Bevington
-Beutler
-Betters
-Bettcher
-Bera
-Benway
-Bengston
-Benesh
-Behar
-Bedsole
-Becenti
-Beachy
-Battersby
-Basta
-Bartmess
-Bartle
-Bartkowiak
-Barsky
-Barrio
-Barletta
-Barfoot
-Banegas
-Ballin
-Baldonado
-Bal
-Azcona
-Avants
-Austell
-Aungst
-Aune
-Aumann
-Audia
-Atterbury
-Asselin
-Asmussen
-Ashline
-Asbill
-Arvizo
-Arnot
-Ariola
-Ardrey
-Angstadt
-Anastasio
-Amsden
-Amor
-Amerman
-Alred
-Almeda
-Allington
-Alewine
-Alcina
-Alberico
-Alas
-Ahlgren
-Aguas
-Agrawal
-Agosta
-Adolphsen
-Addie
-Acre
-Acey
-Aburto
-Abler
-Zwiebel
-Zuk
-Zepp
-Zentz
-Ybarbo
-Yarberry
-Yamauchi
-Yamashiro
-Wurtz
-Wronski
-Worster
-Wootten
-Wool
-Wongus
-Woltz
-Wolanski
-Witzke
-Withey
-Wisecarver
-Wingham
-Wineinger
-Winegarden
-Windholz
-Wilgus
-Wiesen
-Wieck
-Widrick
-Wickliffe
-Whittenberg
-Westby
-Werley
-Wengert
-Wendorf
-Weimar
-Weick
-Weckerly
-Watrous
-Wasden
-Walford
-Wainright
-Wahlstrom
-Wadlow
-Vrba
-Voisin
-Vives
-Vivas
-Vitello
-Villescas
-Villavicencio
-Villanova
-Vialpando
-Vetrano
-Verona
-Vensel
-Vassell
-Varano
-Vanriper
-Vankleeck
-Vanduyne
-Vanderpol
-Vanantwerp
-Valenzula
-Udell
-Turnquist
-Tuff
-Trickett
-Tremble
-Tramble
-Tingey
-Ting
-Timbers
-Tietz
-Thon
-Thiem
-Then
-Tercero
-Tenner
-Tenaglia
-Teaster
-Tarlton
-Taitt
-Taggert
-Tabon
-Sward
-Swaby
-Suydam
-Surita
-Suman
-Sugar
-Suddeth
-Stumbo
-Studivant
-Strobl
-Stretch
-Streich
-Stow
-Stoodley
-Stoecker
-Stillwagon
-Stickle
-Stellmacher
-Stefanik
-Steedley
-Starbird
-Stake
-Stainback
-Stacker
-Speir
-Spath
-Sommerfeld
-Soltani
-Solie
-Sojka
-Sobota
-Sobieski
-Sobczak
-Smullen
-Sleeth
-Slaymaker
-Skolnick
-Skoglund
-Sires
-Singler
-Silliman
-Shrock
-Shott
-Shirah
-Shimek
-Shepperd
-Sheffler
-Sheeler
-Sharrock
-Sharman
-Shalash
-Seyfried
-Seybold
-Selander
-Seip
-Seifried
-Sedor
-Sedlock
-Sebesta
-Seago
-Scutt
-Scrivens
-Sciacca
-Schultze
-Schoemaker
-Schleifer
-Schlagel
-Schlachter
-Schempp
-Scheider
-Scarboro
-Santi
-Sang
-Sandhu
-Sally
-Salim
-Saia
-Rylander
-Ryburn
-Rutigliano
-Ruocco
-Ruland
-Rudloff
-Rott
-Rosenburg
-Rosenbeck
-Romberger
-Romanelli
-Rohloff
-Rohlfing
-Rodda
-Rodd
-Ritacco
-Rielly
-Rieck
-Rickles
-Rickenbacker
-Rhett
-Respass
-Reisner
-Reineck
-Reighard
-Rehbein
-Rega
-Redwood
-Reddix
-Razor
-Rawles
-Raver
-Rattler
-Ratledge
-Rathman
-Ramsburg
-Raisor
-Radovich
-Radigan
-Quail
-Puskar
-Purtee
-Priestly
-Prestidge
-Presti
-Pressly
-Pozo
-Pottinger
-Portier
-Porta
-Porcelli
-Poplawski
-Polin
-Points
-Poeppelman
-Pocock
-Plump
-Plantz
-Placek
-Piro
-Pinnell
-Pinkowski
-Pietz
-Picone
-Philbeck
-Pflum
-Peveto
-Perret
-Pentz
-Payer
-Paulette
-Patlan
-Paterno
-Papageorge
-Pae
-Overmyer
-Overland
-Osier
-Orwig
-Orum
-Orosz
-Oquin
-Opie
-Oda
-Ochsner
-Oathout
-Nygard
-Norville
-Northway
-Niver
-Nicolson
-Newhart
-Nery
-Neitzel
-Nath
-Nanez
-Mustard
-Murnane
-Mortellaro
-Morreale
-Morino
-Moriarity
-Morgado
-Moorehouse
-Mongiello
-Molton
-Mirza
-Minnix
-Millspaugh
-Milby
-Miland
-Miguez
-Mickles
-Michaux
-Mento
-Melugin
-Melrose
-Melito
-Meinecke
-Mehr
-Meares
-Mcneece
-Mckane
-Mcglasson
-Mcgirt
-Mcgilvery
-Mcculler
-Mccowen
-Mccook
-Mcclintic
-Mccallon
-Mazzotta
-Maza
-Mayse
-Mayeda
-Matousek
-Matley
-Martyn
-Maroon
-Marney
-Marnell
-Marling
-Marcelino
-Manuelito
-Maltos
-Malson
-Maire
-Mahi
-Maffucci
-Macken
-Maass
-Lyttle
-Lynd
-Lyden
-Lukasiewicz
-Luebbers
-Lovering
-Loveall
-Lords
-Longtin
-Lok
-Lobue
-Loberg
-Loan
-Lipka
-Lion
-Linen
-Lightbody
-Lichty
-Levert
-Lev
-Lettieri
-Letsinger
-Lepak
-Lemmond
-Lembke
-Leitz
-Lasso
-Lasiter
-Lango
-Landsman
-Lamirande
-Lamey
-Laber
-Kuta
-Kulesza
-Kua
-Krenz
-Kreiner
-Krein
-Kreiger
-Kraushaar
-Kottke
-Koser
-Kornreich
-Kopczynski
-Konecny
-Kok
-Koff
-Koehl
-Kocian
-Knaub
-Kmetz
-Kluender
-Klenke
-Kleeman
-Kitzmiller
-Kirsh
-Kilman
-Kildow
-Kielbasa
-Ketelsen
-Kesinger
-Kendra
-Kehr
-Keef
-Kauzlarich
-Karter
-Kahre
-Junk
-Jong
-Jobin
-Joaquin
-Jinkins
-Jines
-Jeffress
-Jaquith
-Jaillet
-Jablonowski
-Ishikawa
-Irey
-Ingerson
-Indelicato
-In
-Huntzinger
-Huisman
-Huett
-Howson
-Houge
-Hosack
-Hora
-Hoobler
-Holtzen
-Holtsclaw
-Hollingworth
-Hollin
-Hoberg
-Hobaugh
-Hilker
-Hilgefort
-Higgenbotham
-Heyen
-Hetzler
-Hessel
-Hennessee
-Hendrie
-Hellmann
-Heft
-Heesch
-Haymond
-Haymon
-Haye
-Havlik
-Havis
-Haverland
-Haus
-Harstad
-Harriston
-Harm
-Harju
-Hardegree
-Hankey
-Hands
-Hampshire
-Hammell
-Hamaker
-Halbrook
-Halberg
-Guptill
-Guntrum
-Gunderman
-Gunder
-Gularte
-Guarnieri
-Gu
-Groll
-Grippo
-Greely
-Grave
-Gramlich
-Goh
-Goewey
-Goetzinger
-Goding
-Giraud
-Giefer
-Giberson
-Gennaro
-Gemmell
-Gearing
-Gayles
-Gaudin
-Gatz
-Gatts
-Gasca
-Garn
-Gandee
-Gammel
-Galindez
-Galati
-Gagliardo
-Fulop
-Fukushima
-Friedt
-Fretz
-Frenz
-Freeberg
-Frederic
-Fravel
-Fountaine
-Forry
-Forck
-Fonner
-Flippin
-Flewelling
-Flansburg
-Filippone
-Fettig
-Fenlon
-Felter
-Felkins
-Fein
-Faz
-Favor
-Favero
-Faulcon
-Farver
-Farless
-Fahnestock
-Facemire
-Faas
-Eyer
-Evett
-Every
-Esses
-Escareno
-Ensey
-Ennals
-Engelking
-Empey
-Emily
-Elvira
-Ellithorpe
-Effler
-Edling
-Edgley
-Durrell
-Dunkerson
-Draheim
-Domina
-Dombrosky
-Doescher
-Dobbin
-Divens
-Dinatale
-Dimitri
-Dieguez
-Diede
-Devivo
-Devilbiss
-Devaul
-Determan
-Desjardin
-Deshaies
-Demo
-Delpozo
-Delorey
-Delman
-Delapp
-Delamater
-Deibert
-Degroff
-Debelak
-Dapolito
-Dano
-Dacruz
-Dacanay
-Cushenberry
-Cruze
-Crosbie
-Cregan
-Cousino
-Corrie
-Corrao
-Corney
-Cookingham
-Conry
-Collingsworth
-Coldren
-Cobian
-Coate
-Clauss
-Chrysler
-Christine
-Christenberry
-Chmiel
-Chauez
-Charters
-Chait
-Cesare
-Cella
-Caya
-Castenada
-Cashen
-Captain
-Cantrelle
-Canova
-Candy
-Canary
-Campione
-Camel
-Calixte
-Caicedo
-Byerley
-Buttery
-Butter
-Burda
-Burchill
-Bun
-Bulmer
-Bulman
-Buesing
-Buczek
-Buckholz
-Buchner
-Buchler
-Buban
-Bryne
-Brutus
-Brunkhorst
-Brumsey
-Brumer
-Brownson
-Broker
-Brodnax
-Brezinski
-Brazile
-Braverman
-Brasil
-Branning
-Bradly
-Boye
-Boulden
-Bough
-Bossard
-Bosak
-Borth
-Borgmeyer
-Borge
-Blowers
-Blaschke
-Blann
-Blankenbaker
-Bisceglia
-Billingslea
-Bialek
-Beverlin
-Besecker
-Berquist
-Benigno
-Benavente
-Belizaire
-Beisner
-Behrman
-Beausoleil
-Bea
-Baylon
-Bayley
-Bassi
-Basnett
-Basilio
-Basden
-Basco
-Banerjee
-Balli
-Bake
-Bagnell
-Bady
-Averette
-Augusta
-Arzu
-Arn
-Archambeault
-Arboleda
-Arbaugh
-Arata
-Antrim
-Amrhein
-Amerine
-Alpers
-Alfrey
-Alcon
-Albus
-Albertini
-Aguiniga
-Aday
-Acquaviva
-Accardi
-Zygmont
-Zych
-Zollner
-Zobel
-Zinck
-Zertuche
-Zaragosa
-Zale
-Zaldivar
-Ying
-Yeadon
-Wykoff
-Woullard
-Wolfrum
-Wohlford
-Wison
-Wiseley
-Wisecup
-Winchenbach
-Wiltsie
-Whittlesey
-Whitelow
-Whiteford
-Wever
-Westrich
-Wertman
-Wensel
-Wenrich
-Weisbrod
-Weglarz
-Wedderburn
-Weatherhead
-Wease
-Warring
-Wand
-Wadleigh
-Voltz
-Vise
-Villano
-Vicario
-Vermeulen
-Vazques
-Vasko
-Varughese
-Vangieson
-Vanfossen
-Vanepps
-Vanderploeg
-Vancleve
-Valerius
-Uyehara
-Unsworth
-Twersky
-Turrell
-Tuner
-Tsui
-Trunzo
-Trousdale
-Trentham
-Traughber
-Torgrimson
-Toppin
-Tokar
-Tobia
-Tippens
-Tigue
-Thong
-Thiry
-Thackston
-Terhaar
-Tenny
-Tassin
-Tadeo
-Sweigart
-Sutherlin
-Sumrell
-Suen
-Stuhr
-Strzelecki
-Strosnider
-Streiff
-Stottlemyer
-Storment
-Storlie
-Stonesifer
-Stogsdill
-Stenzel
-Stemen
-Stellhorn
-Steidl
-Stecklein
-Statton
-Staple
-Stangle
-Spratling
-Spoor
-Spight
-Spelman
-Spece
-Spanos
-Spadoni
-Southers
-Sola
-Sobol
-Smyre
-Slaybaugh
-Sizelove
-Sirmons
-Simington
-Silversmith
-Siguenza
-Sieren
-Shelman
-Shawn
-Sharples
-Sharif
-Shack
-Seville
-Sessler
-Serrata
-Serino
-Serafini
-Semien
-Selvey
-Seedorf
-Seckman
-Seawood
-Screws
-Screen
-Scoby
-Scicchitano
-Schorn
-Schommer
-Schnitzer
-Schleusner
-Schlabach
-Schiel
-Schepers
-Schaber
-Scally
-Sautner
-Sartwell
-Santerre
-Sandage
-Salvia
-Salvetti
-Salsman
-Sallis
-Salais
-Saint
-Saeger
-Sable
-Sabat
-Saar
-Ruther
-Russom
-Ruoff
-Rumery
-Rubottom
-Rozelle
-Rowton
-Routon
-Rotolo
-Rostad
-Roseborough
-Rorick
-Ronco
-Rolls
-Roher
-Roberie
-Robare
-Ritts
-Rison
-Rippe
-Rinke
-Ringwood
-Righter
-Rieser
-Rideaux
-Rickerson
-Renfrew
-Releford
-Reinsch
-Reiman
-Reifsteck
-Reidhead
-Redfearn
-Reddout
-Reaux
-Rance
-Ram
-Rado
-Radebaugh
-Quinby
-Quigg
-Provo
-Provenza
-Provence
-Prophet
-Pridgeon
-Praylow
-Powel
-Poulter
-Portner
-Pontbriand
-Police
-Poirrier
-Poirer
-Platero
-Pixler
-Pintor
-Pigman
-Piersall
-Piel
-Pichette
-Phou
-Phillis
-Phillippe
-Pharis
-Phalen
-Petsche
-Perrier
-Penfield
-Pelosi
-Pebley
-Peat
-Pawloski
-Pawlik
-Pavlick
-Pavel
-Patz
-Patout
-Pascucci
-Pasch
-Parrinello
-Parekh
-Pantaleo
-Pannone
-Pankow
-Pangborn
-Pagani
-Pacelli
-Ort
-Orsi
-Oriley
-Orduno
-Oommen
-Olivero
-Okada
-Ocon
-Ocheltree
-Oberman
-Nyland
-Noss
-Norling
-Nolton
-Nobile
-Nitti
-Nishimoto
-Nghiem
-Neuner
-Neuberger
-Neifert
-Negus
-Naval
-Nagler
-Mullally
-Moulden
-Morra
-Morquecho
-Morocco
-Moots
-Monica
-Mizzell
-Mirsky
-Mirabito
-Minardi
-Milholland
-Mikus
-Mijangos
-Michener
-Michalek
-Methvin
-Merrit
-Menter
-Meneely
-Melody
-Meiers
-Mehring
-Mees
-Medal
-Mcwhirt
-Mcwain
-Mcphatter
-Mcnichol
-Mcnaught
-Mclarty
-Mcivor
-Mcginness
-Mcgaughy
-Mcferrin
-Mcfate
-Mcclenny
-Mcclard
-Mccaskey
-Mccallion
-Mcamis
-Mathisen
-Marton
-Marsico
-Mariner
-Marchi
-Mani
-Mangione
-Magda
-Macaraeg
-Lupi
-Lunday
-Lukowski
-Lucious
-Locicero
-Loach
-Littlewood
-Litt
-Litle
-Lipham
-Linley
-Lindon
-Lightford
-Lieser
-Leyendecker
-Lewey
-Lesane
-Lenzi
-Lenart
-Lena
-Leisinger
-Lehrman
-Lefebure
-Leandro
-Lazard
-Laycock
-Laver
-Launer
-Lastrapes
-Lastinger
-Lasker
-Larkey
-Larger
-Lanser
-Lanphere
-Landey
-Lan
-Lampton
-Lamark
-Lager
-Kumm
-Kullman
-Krzeminski
-Krasner
-Kram
-Koran
-Koning
-Kohls
-Kohen
-Kobel
-Kniffen
-Knick
-Kneip
-Knappenberger
-Knack
-Klumpp
-Klausner
-Kitamura
-Kisling
-Kirshner
-Kinloch
-Kingman
-Kin
-Kimery
-Kestler
-Kellen
-Keleher
-Keehn
-Kearley
-Kasprzak
-Kary
-Kampf
-Kamerer
-Kalis
-Kahan
-Kaestner
-Kadel
-Kabel
-Junge
-Juckett
-Joynt
-Jorstad
-Jetter
-Jelley
-Jefferis
-Jeff
-Jeansonne
-Janecek
-Jaffee
-Jacko
-Izzard
-Istre
-Isherwood
-Ipock
-Iannuzzi
-Hypolite
-Hussein
-Humfeld
-Huckleberry
-Hotz
-Hosein
-Honahni
-Holzworth
-Holdridge
-Holdaway
-Holaday
-Hodak
-Hitchman
-Hippler
-Hinchey
-Hillin
-Hiler
-Hibdon
-Hevey
-Heth
-Hepfer
-Henneman
-Hemsley
-Hemmings
-Hemminger
-Helbert
-Helberg
-Heinze
-Heeren
-Hee
-Heber
-Haver
-Hauff
-Haswell
-Harvison
-Hartson
-Harshberger
-Harryman
-Harries
-Hannibal
-Hane
-Hamsher
-Haggett
-Hagemeier
-Haecker
-Haddon
-Haberkorn
-Guttman
-Guttierrez
-Guthmiller
-Guillet
-Guilbert
-Gugino
-Grumbles
-Griffy
-Gregerson
-Greg
-Granada
-Grana
-Goya
-Goranson
-Gonsoulin
-Goettl
-Goertz
-Goe
-Godlewski
-Glandon
-Glad
-Gilsdorf
-Gillogly
-Gilkison
-Giard
-Giampaolo
-Gheen
-Gettings
-Gesell
-Gershon
-Gaumer
-Gartrell
-Garside
-Garrigan
-Garmany
-Garlitz
-Garlington
-Gamet
-Gail
-Fuss
-Furlough
-Funston
-Funaro
-Frix
-Frasca
-Francoeur
-Forshey
-Foose
-Flatley
-Flagler
-Fils
-Fillers
-Fickett
-Feth
-Fennelly
-Fencl
-Felch
-Fedrick
-Febres
-Fazekas
-Farnan
-Fairless
-Ewan
-Etsitty
-Enterline
-Elvin
-Elsworth
-Elliff
-Ell
-Eleby
-Eldreth
-Eidem
-Edgecomb
-Edds
-Ebarb
-Dworkin
-Dusenberry
-Durrance
-Duropan
-Durfey
-Dungy
-Dundon
-Dumbleton
-Duffel
-Dubon
-Dubberly
-Droz
-Drinkwater
-Dressel
-Doughtie
-Doshier
-Dorrell
-Dora
-Dople
-Doonan
-Donadio
-Dollison
-Doig
-Ditzler
-Dishner
-Discher
-Dimaio
-Digman
-Difalco
-Diem
-Devino
-Devens
-Derosia
-Deppen
-Depaola
-Deniz
-Denardo
-Demos
-Demay
-Delgiudice
-Davi
-Danielsen
-Dally
-Dais
-Dahmer
-Cutsforth
-Cusimano
-Curington
-Cumbee
-Cryan
-Crusoe
-Crowden
-Crete
-Cressman
-Crapo
-Cowens
-Coupe
-Councill
-Coty
-Cotnoir
-Correira
-Copen
-Consiglio
-Combes
-Coffer
-Cockrill
-Coad
-Clogston
-Clasen
-Chock
-Chesnutt
-Charrier
-Chain
-Chadburn
-Cerniglia
-Cebula
-Castruita
-Castilla
-Castaldi
-Casebeer
-Casagrande
-Carta
-Carrales
-Carnley
-Cardon
-Carasco
-Capshaw
-Capron
-Cappiello
-Capito
-Canney
-Candela
-Caminiti
-Califano
-Calico
-Calabria
-Caiazzo
-Cahall
-Buscemi
-Burtner
-Burgdorf
-Bureau
-Burdo
-Buffaloe
-Buchwald
-Brwon
-Brunke
-Brummond
-Brumm
-Broe
-Brocious
-Brocato
-Bro
-Britain
-Briski
-Brisker
-Brightwell
-Bresett
-Breiner
-Brazeau
-Braz
-Brayman
-Brandis
-Bramer
-Bradeen
-Boyko
-Bourbon
-Bossi
-Boshart
-Bortle
-Boniello
-Bomgardner
-Bolz
-Bolenbaugh
-Bohling
-Bohland
-Bochenek
-Blust
-Bloxham
-Blowe
-Blish
-Blackwater
-Bjelland
-Biros
-Birkhead
-Biederman
-Bickle
-Bialaszewski
-Bevil
-Beverley
-Beumer
-Bettinger
-Besse
-Bernett
-Bermejo
-Bement
-Belfield
-Beckler
-Beatrice
-Baxendale
-Batdorf
-Bastin
-Bashore
-Bascombe
-Bartlebaugh
-Barsh
-Ballantine
-Bahl
-Badon
-Bachelor
-Autin
-Audie
-Astin
-Askey
-Ascher
-Arrigo
-Arbeiter
-Antes
-Angers
-Amburn
-Amarante
-Alvidrez
-Althaus
-Allmond
-Alfieri
-Aldinger
-Akerley
-Akana
-Aikins
-Ader
-Acebedo
-Accardo
-Abila
-Aberle
-Abele
-Abboud
-Zollars
-Zimmerer
-Zieman
-Zerby
-Zelman
-Zellars
-Yule
-Yoshimura
-Yonts
-Yeats
-Yant
-Yamanaka
-Wyland
-Wuensche
-Worman
-Wordlaw
-Wohl
-Winslett
-Winberg
-Wilmeth
-Willcutt
-Wiers
-Wiemer
-Wickwire
-Wichman
-Whitting
-Whidbee
-Westergard
-Wemmer
-Wellner
-Weishaupt
-Weinert
-Weedon
-Waynick
-Wasielewski
-Waren
-Walworth
-Wallingford
-Walke
-Waechter
-Viviani
-Vitti
-Villagrana
-Vien
-Vicks
-Venema
-Varnes
-Varnadoe
-Varden
-Vanpatten
-Vanorden
-Vanderzee
-Vandenburg
-Vandehey
-Valls
-Vallarta
-Valderrama
-Valade
-Urman
-Ulery
-Tusa
-Tuft
-Tripoli
-Trimpe
-Trickey
-Tortora
-Torrens
-Torchia
-Toft
-Tjaden
-Tison
-Tindel
-Thurmon
-Thode
-Tardugno
-Tancredi
-Taketa
-Taillon
-Tagle
-Sytsma
-Symes
-Swindall
-Swicegood
-Swartout
-Sundstrom
-Sumners
-Sulton
-Studstill
-Student
-Stroop
-Stonerock
-Stmarie
-Stlawrence
-Stemm
-Steinhauser
-Steinert
-Steffensen
-Stefano
-Stefaniak
-Starck
-Stalzer
-Spidle
-Spake
-Sowinski
-Sosnowski
-Sorber
-Somma
-Soliday
-Soldner
-Soja
-Soderstrom
-Soder
-Sockwell
-Sobus
-Snowball
-Sloop
-Skeeter
-Sinner
-Sinkfield
-Simerly
-Silguero
-Sigg
-Siemers
-Siegmund
-Sidle
-Shum
-Sholtis
-Shkreli
-Sheikh
-Shattles
-Sharlow
-Shao
-Shambaugh
-Shaikh
-Serrao
-Serafino
-Selley
-Selle
-Seel
-Sedberry
-Secord
-Seat
-Schunk
-Schuch
-Schor
-Scholze
-Schnee
-Schmieder
-Schleich
-Schimpf
-Scherf
-Satterthwaite
-Sasson
-Sarkisian
-Sarinana
-Sanzone
-Salvas
-Salone
-Salido
-Saiki
-Sahr
-Rusher
-Rusek
-Ruse
-Ruppel
-Rubi
-Rubel
-Rough
-Rothfuss
-Rothenberger
-Rossell
-Rosenquist
-Rosebrook
-Romito
-Romines
-Rolando
-Rolan
-Roker
-Roehrig
-Rockhold
-Rocca
-Robuck
-Riss
-Rinaldo
-Right
-Riggenbach
-Rezentes
-Reuther
-Reuben
-Renolds
-Rench
-Remus
-Remsen
-Reller
-Relf
-Reitzel
-Reiher
-Rehder
-Redeker
-Ramero
-Rahaim
-Radice
-Quijas
-Qualey
-Purgason
-Prum
-Proudfoot
-Prock
-Probert
-Printup
-Primer
-Primavera
-Prenatt
-Pratico
-Polich
-Podkowka
-Podesta
-Plattner
-Plasse
-Plamondon
-Pittmon
-Pippenger
-Pineo
-Pierpont
-Petzold
-Petz
-Pettiway
-Petters
-Petroski
-Petrik
-Pesola
-Pershall
-Perlmutter
-Penepent
-Peevy
-Pechacek
-Pears
-Peaden
-Pazos
-Pavia
-Pascarelli
-Parm
-Parillo
-Parfait
-Paoletti
-Palomba
-Palencia
-Pagaduan
-Oxner
-Overfield
-Overcast
-Oullette
-Ouk
-Ostroff
-Osei
-Omarah
-Olenick
-Olah
-Odem
-Nygren
-Notaro
-Northcott
-Nodine
-Nilges
-Neyman
-Neve
-Neuendorf
-Neptune
-Neisler
-Neault
-Narciso
-Naff
-Muscarella
-Mun
-Most
-Morrisette
-Morphew
-Morein
-Mor
-Montville
-Montufar
-Montesinos
-Monterroso
-Mongold
-Mona
-Mojarro
-Moitoso
-Mode
-Mirarchi
-Mirando
-Minogue
-Milici
-Miga
-Midyett
-Michna
-Mey
-Meuser
-Messana
-Menzie
-Menz
-Mendicino
-Melone
-Mellish
-Meller
-Melle
-Meints
-Mechem
-Mealer
-Mcwilliam
-Mcwhite
-Mcquiggan
-Mcphillips
-Mcpartland
-Mcnellis
-Mcmackin
-Mclaughin
-Mckinny
-Mckeithan
-Mcguirk
-Mcgillivray
-Mcgarr
-Mcgahee
-Mcfaul
-Mcfadin
-Mceuen
-Mccullah
-Mcconico
-Mcclaren
-Mccaul
-Mccalley
-Mccalister
-Mazer
-Mayson
-Mayhan
-Maugeri
-Mauger
-Mattix
-Mattews
-Maslowski
-Masek
-Martir
-Marsch
-Marquess
-Maron
-Markwell
-Markow
-Marinaro
-Marietta
-Marcinek
-Manner
-Mannella
-Mango
-Mallen
-Majeed
-Mahnke
-Mahabir
-Magby
-Magallan
-Madere
-Machnik
-Lybrand
-Luque
-Lundholm
-Lueders
-Lucian
-Lubinski
-Lowy
-Loew
-Lippard
-Linson
-Lindblad
-Lightcap
-Levitsky
-Levens
-Leonardi
-Lenton
-Lengyel
-Leng
-Leitzel
-Leicht
-Leaver
-Laubscher
-Lashua
-Larusso
-Larrimore
-Lanterman
-Lanni
-Lanasa
-Lamoureaux
-Lambros
-Lamborn
-Lamberti
-Lall
-Lagos
-Lafuente
-Laferriere
-Laconte
-Kyger
-Kupiec
-Kunzman
-Kuehne
-Kuder
-Kubat
-Krogh
-Kreidler
-Krawiec
-Krauth
-Kratky
-Kottwitz
-Korb
-Kono
-Kolman
-Kolesar
-Koeppel
-Knapper
-Klingenberg
-Kjos
-Keppel
-Kennan
-Keltz
-Kealoha
-Kasel
-Karney
-Kanne
-Kamrowski
-Kagawa
-Joo
-Johnosn
-Joesph
-Jilek
-Jarvie
-Jarret
-Jansky
-Jacquemin
-Jacox
-Jacome
-Italiano
-Iriarte
-Ingwersen
-Imboden
-Iglesia
-Huyser
-Hurston
-Hursh
-Huntoon
-Hudman
-Hoying
-Horsman
-Horrigan
-Hornbaker
-Horiuchi
-Hopewell
-Hoop
-Hommel
-Homeyer
-Holzinger
-Holmer
-Hollow
-Hipsher
-Hinchman
-Hilts
-Higginbottom
-Hieb
-Heyne
-Hessling
-Hesler
-Hertlein
-Herford
-Heras
-Henricksen
-Hennemann
-Henery
-Hendershott
-Hemstreet
-Heiney
-Heckert
-Heatley
-Hazell
-Hazan
-Hayashida
-Hausler
-Hartsoe
-Harth
-Harriott
-Harriger
-Harpin
-Hardisty
-Hardge
-Hao
-Hannaman
-Hannahs
-Hamp
-Hammersmith
-Hamiton
-Halsell
-Halderman
-Hagge
-Habel
-Gusler
-Gushiken
-Gurr
-Gummer
-Gullick
-Grunden
-Grosch
-Greenburg
-Greb
-Greaver
-Gratz
-Grajales
-Gourlay
-Gotto
-Gorley
-Goodpasture
-Godard
-Glorioso
-Gloor
-Glascock
-Gizzi
-Giroir
-Gibeault
-Gauldin
-Gauer
-Gartin
-Garrels
-Gamber
-Gallogly
-Galley
-Gade
-Fusaro
-Fripp
-Freyer
-Freiberg
-Franzoni
-Fragale
-Foston
-Forti
-Forness
-Folts
-Followell
-Foard
-Flom
-Fling
-Flett
-Fleitas
-Flamm
-Fino
-Finnen
-Finchum
-Filippelli
-Fickel
-Feucht
-Feiler
-Feenstra
-Feagins
-Faver
-Faux
-Faulkenberry
-Farabaugh
-Fandel
-Fallen
-Faler
-Faivre
-Fairey
-Facey
-Exner
-Evensen
-Erion
-Erben
-Epting
-Epping
-Ephraim
-Engberg
-Elsen
-Ellingwood
-Ellen
-Eisenmann
-Eichman
-Ehle
-Edsall
-Eagles
-Durall
-Dupler
-Dunker
-Dumlao
-Duford
-Duffie
-Dudding
-Dries
-Doung
-Dorantes
-Donahoo
-Domenick
-Dollins
-Dobles
-Dipiazza
-Dino
-Dimeo
-Diehm
-Dicicco
-Devin
-Devenport
-Desormeaux
-Derrow
-Depaolo
-Denver
-Denise
-Demas
-Delpriore
-Delosantos
-Dela
-Degreenia
-Degenhardt
-Defrancesco
-Defenbaugh
-Deets
-Debonis
-Deary
-Dazey
-Dargie
-Dambrosia
-Dalal
-Dagen
-Cun
-Cuen
-Crupi
-Crossan
-Crichlow
-Creque
-Coutts
-Counce
-Coram
-Constante
-Connon
-Collelo
-Coit
-Cocklin
-Coblentz
-Cobey
-Coard
-Clutts
-Clingan
-Claw
-Clampitt
-Claeys
-Ciulla
-Cimini
-Ciampa
-Christon
-Choat
-Chiou
-Chenail
-Chavous
-Catto
-Catalfamo
-Casterline
-Cassinelli
-Caspers
-Carroway
-Carlen
-Carithers
-Cappel
-Calo
-Callow
-Calandra
-Cagley
-Cafferty
-Byun
-Byam
-Buttner
-Buth
-Burtenshaw
-Burget
-Burfield
-Buresh
-Bunt
-Bultman
-Bulow
-Buchta
-Buchmann
-Brunett
-Bruemmer
-Brueggeman
-Britto
-Briney
-Brimhall
-Bribiesca
-Bresler
-Brazan
-Brashier
-Brar
-Brandstetter
-Brandi
-Boze
-Boonstra
-Bluitt
-Blomgren
-Blattner
-Blasi
-Bladen
-Bitterman
-Bilby
-Bierce
-Biello
-Bettes
-Bertone
-Berrey
-Bernat
-Berberich
-Benshoof
-Bendickson
-Below
-Bellefeuille
-Bednarski
-Beddingfield
-Beckerman
-Beaston
-Bavaro
-Batalla
-Basye
-Baskins
-Bartolotta
-Bartkowski
-Barranco
-Barkett
-Band
-Banaszak
-Bame
-Bamberger
-Balsley
-Ballas
-Balicki
-Balding
-Bald
-Badura
-Aymond
-Aylor
-Aylesworth
-Axley
-Axelrod
-Aubert
-Armond
-Ariza
-Apicella
-Anstine
-Ankrom
-Angevine
-Anger
-Andreotti
-Andrea
-Alto
-Alspaugh
-Alpaugh
-Almada
-Allinder
-Alexandra
-Alequin
-Alan
-Aguillard
-Agron
-Agena
-Afanador
-Ackerley
-Abrev
-Abdalla
-Aaronson
-Zynda
-Zucco
-Zipp
-Zetina
-Zenz
-Zelinski
-Youngren
-Yochum
-Yearsley
-Yankey
-Woodfork
-Wohlwend
-Woelfel
-Wiste
-Wismer
-Winzer
-Winker
-Wilkison
-Wigger
-Wierenga
-Whipps
-Wheeling
-Westray
-Wesch
-Weld
-Weible
-Wedell
-Weddell
-Wawrzyniak
-Wasko
-Washinton
-Wantz
-Walts
-Wallander
-Wain
-Wahlen
-Wachowiak
-Voshell
-Viteri
-Vire
-Villafuerte
-Vieyra
-Viau
-Vescio
-Verrier
-Verhey
-Vause
-Vandermolen
-Vanderhorst
-Valois
-Valla
-Valcourt
-Vacek
-Uzzle
-Umland
-Um
-Ulman
-Ulland
-Turvey
-Tuley
-Trembath
-Trees
-Trabert
-Towsend
-Totman
-Toews
-Toby
-Tito
-Tisch
-Tisby
-Tipping
-Tierce
-Thivierge
-Tenenbaum
-Teagle
-Tacy
-Tabler
-Szewczyk
-Swearngin
-Suire
-Sturrock
-Stubbe
-Stronach
-Stoute
-Stoudemire
-Stoneberg
-Sterba
-Stejskal
-Steier
-Stehr
-Steckler
-Steckel
-Stearman
-Steakley
-Star
-Stanforth
-Stancill
-Stalls
-Srour
-Sprowl
-Spevak
-Sole
-Sokoloff
-Soderman
-Snover
-Sleeman
-Slaubaugh
-Sitzman
-Simpler
-Simmer
-Simes
-Siegal
-Sidoti
-Sidler
-Sider
-Sidener
-Siddiqi
-Shireman
-Shima
-Sheroan
-Shadduck
-Seyal
-Sentell
-Sennett
-Senko
-Seneca
-Sen
-Seligman
-Seipel
-Seekins
-Seabaugh
-Scouten
-Schweinsberg
-Schwartzberg
-Schurr
-Schult
-Schrick
-Schoening
-Schmitmeyer
-Schlicher
-Schlager
-Schack
-Schaar
-Scavuzzo
-Scarpa
-Sassano
-Santigo
-Sandavol
-San
-Sampsel
-Samms
-Samet
-Salzano
-Salyards
-Salva
-Saidi
-Sabir
-Saam
-Saab
-Runions
-Rundquist
-Rousselle
-Round
-Rotunno
-Roses
-Rosch
-Romney
-Rohner
-Roff
-Rockhill
-Rockefeller
-Rocamora
-Rm
-Ringle
-Riggie
-Ricklefs
-Rexroat
-Reves
-Revel
-Reuss
-Reta
-Repka
-Rentfro
-Reineke
-Recore
-Recalde
-Rease
-Rawling
-Ravencraft
-Ravelo
-Rappa
-Randol
-Ramsier
-Ramerez
-Rahimi
-Rahim
-Radney
-Racey
-Raborn
-Rabalais
-Quebedeaux
-Pujol
-Puchalski
-Prothro
-Proffit
-Prigge
-Prideaux
-Prevo
-Portales
-Porco
-Popovic
-Popek
-Popejoy
-Pompei
-Plumber
-Plude
-Platner
-Plate
-Pizzuto
-Pizer
-Pistone
-Piller
-Pierri
-Piehl
-Pickert
-Piasecki
-Phong
-Philipp
-Peugh
-Pesqueira
-Perrett
-Perfetti
-Percell
-Penhollow
-Pelto
-Pellett
-Pavlak
-Paulo
-Paula
-Patricia
-Pastorius
-Parsell
-Parrales
-Pareja
-Parcell
-Pappan
-Pajak
-Owusu
-Ovitt
-Ory
-Orrick
-Oniell
-Olliff
-Olberding
-Oesterling
-Odwyer
-Ocegueda
-Obey
-Obermiller
-Nylander
-Nulph
-Nottage
-Northam
-Norgard
-Nodal
-Niel
-Nicols
-Newhard
-Nellum
-Neira
-Nazzaro
-Nassif
-Narducci
-Nalbandian
-Nails
-Musil
-Murga
-Muraoka
-Mumper
-Mulroy
-Mountjoy
-Mossey
-Moreton
-Morea
-Montoro
-Montesdeoca
-Montealegre
-Montanye
-Montandon
-Mok
-Moisan
-Mohl
-Modesto
-Modeste
-Mitra
-Mister
-Minson
-Minjarez
-Milbourne
-Michaelsen
-Metheney
-Mestre
-Mescher
-Mervis
-Mennenga
-Melgarejo
-Meisinger
-Meininger
-Mcwaters
-Mckern
-Mckendree
-Mchargue
-Mcglothlen
-Mcgibbon
-Mcgavock
-Mcduffee
-Mcclurkin
-Mccausland
-Mccardell
-Mccambridge
-Mazzoni
-Mayen
-Maxton
-Mawson
-Mauffray
-Mattinson
-Mattila
-Matsunaga
-Mater
-Mascia
-Marse
-Marotz
-Marois
-Markin
-Markee
-Marcinko
-Marcin
-Manville
-Mantyla
-Manser
-Manry
-Manderscheid
-Mallari
-Malia
-Malecha
-Malcomb
-Majerus
-Mailman
-Macinnis
-Mabey
-Lyford
-Luth
-Lupercio
-Luhman
-Luedke
-Lovick
-Lossing
-Loss
-Lorraine
-Lookabaugh
-Longway
-Lone
-Loisel
-Logiudice
-Loffredo
-Locust
-Lobe
-Lobaugh
-Lizaola
-Livers
-Littlepage
-Linnen
-Limmer
-Liebsch
-Liebman
-Leyden
-Levitan
-Levison
-Levier
-Leven
-Levalley
-Lettinga
-Lessley
-Lessig
-Lepine
-Leight
-Leick
-Leggio
-Leffingwell
-Leffert
-Lefevers
-Ledlow
-Leaton
-Leander
-Leaming
-Lazos
-Laviolette
-Lauffer
-Latz
-Lasorsa
-Lasch
-Larin
-Laporta
-Lanter
-Langstaff
-Landi
-Lamica
-Lambson
-Lambe
-Lamarca
-Laman
-Lamagna
-Lajeunesse
-Lafontant
-Lafler
-Labrum
-Laakso
-Kush
-Kuether
-Kuchar
-Kruk
-Kroner
-Kroh
-Kridler
-Kreuzer
-Kovats
-Koprowski
-Kohout
-Knicely
-Knell
-Klutts
-Kindrick
-Kiddy
-Khanna
-Ketcher
-Kerschner
-Kerfien
-Kensey
-Kenley
-Kenan
-Kemplin
-Kellerhouse
-Keesling
-Keep
-Keena
-Keas
-Kaplin
-Kanady
-Kampen
-Jutras
-Jungers
-Julio
-Jeschke
-Jen
-Janowski
-Janas
-Iskra
-Imperato
-Ikerd
-Igoe
-Hyneman
-Hynek
-Husain
-Hurrell
-Hultquist
-Hullett
-Hulen
-Huf
-Huberty
-Hoyte
-Hossain
-Hornstein
-Hori
-Hopton
-Holms
-Hollmann
-Holdman
-Holdeman
-Holben
-Hoffert
-Himel
-Hillsman
-Hillary
-Herdt
-Hellyer
-Hellen
-Heister
-Heimer
-Heidecker
-Hedgpeth
-Hedgepath
-Hebel
-Heatwole
-Hayer
-Hausner
-Haskew
-Haselden
-Hartranft
-Harsch
-Harres
-Harps
-Hardimon
-Halm
-Hallee
-Hallahan
-Hackley
-Hackenberg
-Hachey
-Haapala
-Guynes
-Gunnerson
-Gunby
-Gulotta
-Gudger
-Groman
-Grignon
-Griebel
-Gregori
-Greenan
-Grauer
-Gourd
-Gorin
-Gorgone
-Gooslin
-Goold
-Goltz
-Goldberger
-Gobble
-Glotfelty
-Glassford
-Glance
-Gladwin
-Giuffre
-Gilpatrick
-Germaine
-Gerdts
-Genna
-Geisel
-Gayler
-Gaunce
-Gaulding
-Gateley
-Gassman
-Gash
-Garson
-Garron
-Garand
-Gangestad
-Gallow
-Galbo
-Gabrielli
-Fullington
-Fucci
-Frum
-Frieden
-Friberg
-Frasco
-Francese
-Fowle
-Foucher
-Fothergill
-Foraker
-Fonder
-Foisy
-Fogal
-Flurry
-Flenniken
-Fitzhenry
-Fishbein
-Finton
-Filmore
-Filice
-Feola
-Felberbaum
-Fausnaught
-Fasciano
-Farrah
-Farquharson
-Faires
-Estridge
-Essman
-Enz
-Enriques
-Emmick
-Ekker
-Ekdahl
-Eisman
-Eggleton
-Eddinger
-Eakle
-Eagar
-Durio
-Dunwoody
-Duhaime
-Duenes
-Duden
-Dudas
-Dresher
-Dresel
-Doutt
-Donlan
-Donathan
-Domke
-Dobrowolski
-Dingee
-Dimmitt
-Dimery
-Dilullo
-Deveaux
-Devalle
-Desper
-Desnoyers
-Desautels
-Derouin
-Derbyshire
-Denmon
-Dena
-Demski
-Delucca
-Delpino
-Delmont
-Deller
-Dejulio
-Deibler
-Dehne
-Deharo
-Degner
-Defore
-Deerman
-Decuir
-Deckman
-Deasy
-Dease
-Deaner
-Dawdy
-Daughdrill
-Darrigo
-Darity
-Daniele
-Dalbey
-Dagenhart
-Daffron
-Curro
-Curnutte
-Curatolo
-Cruikshank
-Crosswell
-Croslin
-Croney
-Crofton
-Criado
-Crecelius
-Coscia
-Conniff
-Commodore
-Coltharp
-Colonna
-Collyer
-Collington
-Cobbley
-Coache
-Clonts
-Cloe
-Cliett
-Clemans
-Clara
-Cid
-Christo
-Chrisp
-China
-Chiarini
-Chia
-Cheatam
-Cheadle
-Che
-Chauncey
-Chand
-Chadd
-Cervera
-Cerulli
-Cerezo
-Cedano
-Cayetano
-Cawthorne
-Cavalieri
-Cattaneo
-Caryl
-Cartlidge
-Carrithers
-Carreira
-Carranco
-Cargle
-Candanoza
-Camille
-Camburn
-Calender
-Calderin
-Calcagno
-Cahn
-Cadden
-Byham
-Buttry
-Burry
-Burruel
-Burkitt
-Burgio
-Burgener
-Buescher
-Buckalew
-Brymer
-Brumett
-Brugnoli
-Brugman
-Brosnahan
-Bronder
-Broeckel
-Broderson
-Brisbon
-Brinsfield
-Brinks
-Bresee
-Bregman
-Branner
-Brambila
-Brailsford
-Bouska
-Boster
-Borucki
-Bortner
-Boroughs
-Borgeson
-Bonier
-Bomba
-Bolender
-Boesch
-Boeke
-Bloyd
-Bley
-Binger
-Billing
-Bilbro
-Biery
-Bichrest
-Bezio
-Bevel
-Berrett
-Bermeo
-Bergdoll
-Bercier
-Benzel
-Bentler
-Bennetts
-Belnap
-Bellini
-Beitz
-Behrend
-Bednarczyk
-Bearse
-Batman
-Bartolini
-Bartol
-Barretta
-Barbero
-Barbaro
-Banvelos
-Bankes
-Ballengee
-Baldon
-Aye
-Ausmus
-Atilano
-Atienza
-Aschenbrenner
-Arora
-Armstong
-Aquilino
-Appleberry
-Applebee
-Apolinar
-Antos
-Angles
-Andrepont
-Ancona
-Amesquita
-Alvino
-Altschuler
-Allin
-Alire
-Ainslie
-Agular
-Aeschliman
-Accetta
-Abdulla
-Abbe
-Zwart
-Zufelt
-Zona
-Zirbel
-Zingaro
-Zilnicki
-Zenteno
-Zent
-Zemke
-Zayac
-Zarrella
-Yoshimoto
-Yearout
-Wrench
-World
-Womer
-Woltman
-Wolin
-Wolery
-Woldt
-Witts
-Wittner
-Witherow
-Winward
-Winrow
-Wiemann
-Wichmann
-Whitwell
-Whitelaw
-Wheeless
-Whalley
-Wey
-Wessner
-Wenzl
-Wene
-Weatherbee
-Waye
-Wattles
-Wanke
-Walkes
-Waldeck
-Vonruden
-Voisine
-Vogus
-Vittetoe
-Villalva
-Villacis
-Victorian
-Verge
-Venturini
-Venturi
-Venson
-Vanloan
-Vanhooser
-Vanduzer
-Vandever
-Vanderwal
-Vanderheyden
-Vanbeek
-Vanbebber
-Vallance
-Vales
-Vahle
-Urbain
-Upshur
-Umfleet
-Twist
-Tsuji
-Trybus
-Triolo
-Trimarchi
-Trezza
-Trenholm
-Tovey
-Tourigny
-Torry
-Torrain
-Torgeson
-Tongue
-Tomey
-Tischler
-Tinkler
-Tinder
-Ticknor
-Tibbles
-Tibbals
-Throneberry
-Thormahlen
-Thibert
-Thibeaux
-Theurer
-Templet
-Tegeler
-Tavernier
-Taubman
-Tamashiro
-Tallon
-Tallarico
-Taboada
-Sypher
-Sybert
-Swyers
-Switalski
-Swinger
-Swedberg
-Suther
-Surprenant
-Sullen
-Sulik
-Sugden
-Suder
-Suchan
-Such
-Strube
-Stroope
-Strittmatter
-Streett
-Straughn
-Strasburg
-Stjacques
-Stimage
-Stimac
-Stifter
-Stgelais
-Steinhart
-Stehlik
-Steffenson
-Steenbergen
-Stanbery
-Stallone
-Sprung
-Spraggs
-Spoto
-Spilman
-Speno
-Spanbauer
-Spalla
-Spagnolo
-Soliman
-Solan
-Sobolik
-Snelgrove
-Snedden
-Smale
-Sliter
-Slankard
-Sircy
-Signor
-Shutter
-Shurtliff
-Shur
-Show
-Shirkey
-Shi
-Shewmake
-Shams
-Shadley
-Shaddox
-Sgro
-Serfass
-Seppala
-Segawa
-Segalla
-Seaberry
-Scruton
-Scism
-Schwein
-Schwartzman
-Schwantes
-Schomer
-Schoenborn
-Schlottmann
-Schissler
-Scheurer
-Schepis
-Scheidegger
-Saunier
-Sauders
-Sassman
-Sannicolas
-Sanderfur
-Salser
-Sagar
-Saffer
-Saeed
-Sadberry
-Saban
-Ryce
-Rybak
-Rux
-Rumore
-Rummell
-Rummage
-Rudasill
-Rozman
-Rota
-Rossin
-Rosell
-Rosel
-Romberg
-Rojero
-Rochin
-Rochell
-Robideau
-Robarge
-Roath
-Risko
-Ringel
-Ringdahl
-Riera
-Riemann
-Ribas
-Revard
-Renna
-Renegar
-Reinwald
-Rehman
-Regal
-Reels
-Ree
-Redel
-Reasons
-Raysor
-Rathke
-Rapozo
-Rampton
-Ramaker
-Rakow
-Raia
-Radin
-Raco
-Rackham
-Racca
-Racanelli
-Rabun
-Quaranta
-Purves
-Pundt
-Protsman
-Prosper
-Prezioso
-Presutti
-President
-Presgraves
-Poydras
-Portnoy
-Portalatin
-Pop
-Pontes
-Poehler
-Poblete
-Poat
-Plumadore
-Pleiman
-Pizana
-Piscopo
-Piraino
-Pinelli
-Pillai
-Picken
-Picha
-Piccoli
-Philen
-Petteway
-Petros
-Peskin
-Perugini
-Perrella
-Pernice
-Peper
-Pensinger
-Pembleton
-Patron
-Passman
-Parrent
-Panetta
-Pancake
-Pallas
-Palka
-Pais
-Paglia
-Padmore
-Oum
-Ottesen
-Ost
-Oser
-Ortmann
-Ormand
-Oriol
-Orick
-Oler
-Okafor
-Ohair
-Obert
-Oberholtzer
-Number
-Nowland
-Nosek
-Nordeen
-Nolf
-Nogle
-Nobriga
-Nicley
-Niccum
-Newingham
-Neumeister
-Neugebauer
-Netherland
-Nerney
-Neiss
-Neis
-Neider
-Neeld
-Nailor
-Mustain
-Mussman
-Musante
-Murton
-Murden
-Munyon
-Muldrew
-Motton
-Moscoso
-Moschella
-Moroz
-Mormon
-Morelos
-Morace
-Moone
-Montesano
-Montemurro
-Montas
-Montalbo
-Molander
-Mleczko
-Miyake
-Mitschke
-Minger
-Minelli
-Minear
-Millener
-Mihelich
-Miedema
-Miah
-Metzer
-Mery
-Merrigan
-Merck
-Mennella
-Membreno
-Melecio
-Melder
-Mehling
-Mehler
-Medcalf
-Meche
-Mealing
-Mcqueeney
-Mcphaul
-Mcmickle
-Mcmeen
-Mcmains
-Mclees
-Mcgowin
-Mcfarlain
-Mcdivitt
-Mccotter
-Mcconn
-Mcclane
-Mccaster
-Mcbay
-Mcbath
-Mayoral
-Mayeux
-Matsuo
-Masur
-Massman
-Marzette
-Martensen
-Marlett
-Markie
-Markgraf
-Marcinkowski
-Marchbanks
-Marcella
-Mansir
-Mandez
-Mancil
-Malagon
-Magnani
-Madonia
-Madill
-Madia
-Mackiewicz
-Macgillivray
-Macdowell
-Macbeth
-Mabee
-Lundblad
-Lovvorn
-Lovings
-Loreto
-Linz
-Linwood
-Linnell
-Linebaugh
-Lindstedt
-Lindbloom
-Linda
-Limberg
-Liebig
-Lickteig
-Lichtenberg
-Licari
-Lex
-Lewison
-Levario
-Levar
-Lepper
-Lenzen
-Lenderman
-Lemarr
-Leinen
-Leider
-Legrande
-Lefort
-Lebleu
-Leask
-Learn
-Leacock
-Lazano
-Lawalin
-Laven
-Laplaca
-Lant
-Langsam
-Langone
-Landress
-Landen
-Lande
-Lamorte
-Lairsey
-Laidlaw
-Laffin
-Lackner
-Lacaze
-Labuda
-Labree
-Labella
-Labar
-Kyer
-Kuyper
-Kulinski
-Kulig
-Kuhnert
-Kuchera
-Kubicek
-Kruckeberg
-Kruchten
-Krider
-Kotch
-Kornfeld
-Koren
-Koogler
-Koll
-Kole
-Kohnke
-Kohli
-Kofoed
-Koelling
-Kluth
-Klump
-Klopfenstein
-Klippel
-Klinge
-Klett
-Klemp
-Kleis
-Klann
-Kitzman
-Kinnan
-Kingsberry
-Kind
-Kina
-Kilmon
-Killpack
-Kilbane
-Kijowski
-Kies
-Kierstead
-Kettering
-Kesselman
-Kenton
-Kennington
-Keniston
-Kehrer
-Kearl
-Keala
-Kassa
-Kasahara
-Kantz
-Kalin
-Kaina
-Jupin
-Juntunen
-Juares
-Joynes
-Jovel
-Joos
-Jn
-Jiggetts
-Jervis
-Jerabek
-Jennison
-Jaso
-Janz
-Izatt
-Ishibashi
-Iannotti
-Hymas
-Huneke
-Hulet
-Hougen
-Horvat
-Horstmann
-Hopple
-Holtkamp
-Holsten
-Hohenstein
-Hoefle
-Hoback
-Hiney
-Hiemstra
-Herwig
-Herter
-Herriott
-Hermsen
-Herdman
-Herder
-Herbig
-Hem
-Helper
-Helling
-Helbig
-Heitkamp
-Heinrichs
-Heinecke
-Heileman
-Heffley
-Heavrin
-Heaston
-Haymaker
-Hauenstein
-Hartlage
-Harlin
-Harig
-Hardenbrook
-Hankin
-Hamiter
-Hagens
-Hagel
-Grizzell
-Griest
-Griese
-Grief
-Grennan
-Graden
-Gosse
-Gorder
-Goldin
-Goatley
-Gillespi
-Gilbride
-Giel
-Gianni
-Ghoston
-Getter
-Gershman
-Geisinger
-Gehringer
-Gedeon
-Gebert
-Gaxiola
-Gawronski
-Gau
-Gathright
-Gatchell
-Gargiulo
-Garg
-Galang
-Gadison
-Fyock
-Furniss
-Furby
-Funnell
-Frizell
-Frenkel
-Freeburg
-Frankhouser
-Franchi
-Foulger
-Formby
-Forkey
-Fonte
-Folson
-Follette
-Flicker
-Flavors
-Flavell
-Finegan
-Fill
-Filippini
-Ferencz
-Ference
-Fennessey
-Feggins
-Feehan
-Fazzino
-Fazenbaker
-Fausto
-Faunce
-Farraj
-Farnell
-Farler
-Farabee
-Falkowski
-Facio
-Etzler
-Ethington
-Esterline
-Esper
-Esker
-Erxleben
-Ericsson
-Erick
-Engh
-Emling
-Elridge
-Ellenwood
-Elfrink
-Ekhoff
-Eisert
-Eis
-Eifert
-Eichenlaub
-Egnor
-Eggebrecht
-Edlin
-Edberg
-Eble
-Eber
-Easler
-Duwe
-Dutta
-Dutremble
-Dusseault
-Durney
-Dunworth
-Dumire
-Dukeman
-Dufner
-Duey
-Duble
-Dreese
-Dozal
-Douville
-Dougal
-Doom
-Done
-Diver
-Ditmore
-Distin
-Dimuzio
-Dildine
-Dignan
-Dieterich
-Dieckman
-Didonna
-Dhillon
-Dezern
-Devereux
-Devall
-Detty
-Detamore
-Derksen
-Deremer
-Deras
-Denslow
-Deno
-Denicola
-Denbow
-Demma
-Demille
-Delisa
-Delira
-Delawder
-Delara
-Delahanty
-Dejonge
-Deininger
-Dedios
-Dederick
-Decelles
-Debus
-Debruyn
-Deborde
-Deak
-Dauenhauer
-Darsey
-Daring
-Dansie
-Dalman
-Dakin
-Dagley
-Czaja
-Cybart
-Cutchin
-Currington
-Curbelo
-Croucher
-Crinklaw
-Cremin
-Cratty
-Cranfield
-Crafford
-Cowher
-Cowboy
-Couvillion
-Couturier
-Counter
-Corter
-Coombes
-Contos
-Consolini
-Connaughton
-Conely
-Coltrane
-Collom
-Cockett
-Clepper
-Cleavenger
-Claro
-Clarkin
-Ciriaco
-Ciesla
-Cichon
-Ciancio
-Cianci
-Chynoweth
-Chuang
-Chrzanowski
-Christion
-Cholewa
-Chipley
-Chilcott
-Cheyne
-Cheslock
-Chenevert
-Cheers
-Charlot
-Chagolla
-Chabolla
-Cesena
-Cerutti
-Cava
-Caul
-Cassone
-Cassin
-Cassese
-Casaus
-Casali
-Cartledge
-Carsten
-Cardamone
-Carcia
-Carbonneau
-Carboni
-Carabello
-Capozzoli
-Capella
-Cap
-Cannata
-Campoverde
-Campeau
-Cambre
-Camberos
-Calvery
-Calnan
-Calmes
-Calley
-Callery
-Calise
-Cacciotti
-Cacciatore
-Butterbaugh
-Burgo
-Burgamy
-Burell
-Bunde
-Bumbalough
-Buel
-Buechner
-Buchannon
-Bryon
-Brunn
-Brost
-Broadfoot
-Brittan
-Brevard
-Breda
-Brazel
-Brayboy
-Brasier
-Boyea
-Boxx
-Both
-Boso
-Bosio
-Boruff
-Borda
-Bongiovanni
-Bolerjack
-Boedeker
-Blye
-Blumstein
-Blumenfeld
-Blinn
-Bleakley
-Blatter
-Blan
-Bjornson
-Bisignano
-Billick
-Bieniek
-Bhatti
-Bevacqua
-Betterton
-Berra
-Berenbaum
-Bensinger
-Bennefield
-Belvins
-Belson
-Bellin
-Beighley
-Beecroft
-Beaudreau
-Baynard
-Bautch
-Bausch
-Basch
-Bartleson
-Barthelemy
-Barak
-Balzano
-Balistreri
-Bailer
-Bagnall
-Bagg
-Bae
-Auston
-Augustyn
-Aslinger
-Ashalintubbi
-Artist
-Arjona
-Arebalo
-Arab
-Appelbaum
-Anna
-Angst
-Angert
-Angelucci
-Andry
-Andersson
-Amorim
-Amavisca
-Alward
-Alvelo
-Alvear
-Alumbaugh
-Alsobrook
-Alli
-Allgeier
-Allende
-Aldrete
-Akiyama
-Ahlquist
-Adolphson
-Addario
-Acoff
-Abelson
-Abasta
-Zulauf
-Zirkind
-Zeoli
-Zemlicka
-Zawislak
-Zappia
-Zanella
-Yelvington
-Yeatman
-Yanni
-Wragg
-Wissing
-Wischmeier
-Wirta
-Wiren
-Wilmouth
-Williard
-Willert
-Willaert
-Wildt
-Whelpley
-Westwood
-Weingart
-Weidenbach
-Weidemann
-Weatherman
-Weakland
-Watwood
-Wattley
-Waterson
-Wambach
-Walzer
-Waldow
-Waag
-Vorpahl
-Volkmann
-Vitolo
-Visitacion
-Vincelette
-Vina
-Viggiano
-Vieth
-Vidana
-Vert
-Verna
-Verges
-Verdejo
-Venzon
-Velardi
-Varian
-Vargus
-Vandermeulen
-Vandam
-Vanasse
-Vanaman
-Utzinger
-Uriostegui
-Uplinger
-Twiss
-Tumlinson
-Tschanz
-Trunnell
-Troung
-Troublefield
-Trojacek
-Trial
-Treloar
-Tranmer
-Touchton
-Torsiello
-Torina
-Tootle
-Toki
-Toepfer
-Tippin
-Tippie
-Thronson
-Thomes
-Tezeno
-Texada
-Testani
-Tessmer
-Terrel
-Terra
-Terlizzi
-Tempel
-Temblador
-Tayler
-Tawil
-Tasch
-Tames
-Talor
-Talerico
-Swinderman
-Sweetland
-Swager
-Sulser
-Sullens
-Subia
-Sturgell
-Stumpff
-Stufflebeam
-Stucki
-Strohmeyer
-Strebel
-Straughan
-Strackbein
-Stobaugh
-Stetz
-Stelter
-Steinmann
-Steinfeld
-Stefani
-Stecher
-Stanwood
-Stanislawski
-Stander
-Speziale
-Soppe
-Soni
-Sol
-Sobotka
-Snipe
-Smuin
-Slider
-Slee
-Skerrett
-Sjoberg
-Sittig
-Simonelli
-Simo
-Sima
-Silvio
-Silverio
-Silveria
-Silsby
-Sillman
-Sienkiewicz
-Sick
-Sia
-Shomo
-Shoff
-Shoener
-Shiba
-Sherfey
-Shehane
-Shawl
-Sexson
-Setton
-Sergi
-Selvy
-Seiders
-Seegmiller
-Sebree
-Seabury
-Scroggin
-Sconyers
-Schwalb
-Schurg
-Schulenberg
-Schuld
-Schrage
-Schow
-Schon
-Schnur
-Schneller
-Schmidtke
-Schlatter
-Schieffer
-Schenkel
-Scheeler
-Schauwecker
-Schartz
-Schacherer
-Scafe
-Sayegh
-Savidge
-Saur
-Sarles
-Sarkissian
-Sarkis
-Sarcone
-Sagucio
-Saffell
-Saenger
-Sacher
-Rylee
-Ruvolo
-Ruston
-Ruple
-Rulison
-Ruge
-Ruffo
-Ruehl
-Rueckert
-Rudman
-Rudie
-Rubert
-Rozeboom
-Roysden
-Roylance
-Rothchild
-Rosse
-Rosecrans
-Rodrick
-Rodi
-Rockmore
-Robnett
-Roberti
-Rivett
-Riva
-Ritzel
-Rierson
-Ricotta
-Ricken
-Rezac
-Rendell
-Remo
-Reitman
-Reindl
-Reeb
-Reddic
-Reddell
-Rebuck
-Reali
-Raye
-Raso
-Ramthun
-Ramsden
-Rameau
-Ralphs
-Rak
-Rago
-Racz
-Quinteros
-Quinter
-Quinley
-Quiggle
-Quaid
-Purvines
-Purinton
-Purdum
-Pummill
-Puglia
-Puett
-Ptacek
-Przybyla
-Prowse
-Providence
-Prestwich
-Pracht
-Poutre
-Poucher
-Portera
-Polinsky
-Poage
-Platts
-Pineau
-Pinckard
-Pilson
-Pilling
-Pilkins
-Pili
-Pikes
-Pigram
-Pietila
-Pickron
-Pia
-Philippi
-Philhower
-Pflueger
-Pfalzgraf
-Pettibone
-Pett
-Petrosino
-Persing
-Perrino
-Perotti
-Periera
-Peri
-Peredo
-Peralto
-Pennywell
-Pennel
-Pen
-Pellegren
-Pella
-Pedroso
-Paulos
-Paulding
-Pates
-Pasek
-Paramo
-Paolino
-Panganiban
-Paneto
-Paluch
-Ozaki
-Ownbey
-Overfelt
-Outman
-Opper
-Onstad
-Oland
-Okuda
-Oertel
-Oelke
-Normandeau
-Nordby
-Nordahl
-Noecker
-Noblin
-No
-Niswonger
-Nishioka
-Nett
-Nephew
-Negley
-Needles
-Nedeau
-Natera
-Nachman
-Naas
-Musich
-Mungin
-Mourer
-Mounsey
-Mottola
-Mothershed
-Moskal
-Mosbey
-Morini
-Moreles
-Mood
-Montaluo
-Moneypenny
-Monda
-Moench
-Moates
-Moad
-Mixer
-Missildine
-Misiewicz
-Mirabella
-Minott
-Minnifield
-Mincks
-Milum
-Milani
-Mikelson
-Mestayer
-Mess
-Mertes
-Merrihew
-Merlos
-Meritt
-Melnyk
-Medlen
-Meder
-Mean
-Mcvea
-Mcquarrie
-Mcquain
-Mclucas
-Mclester
-Mckitrick
-Mckennon
-Mcinnes
-Mcgrory
-Mcgranahan
-Mcglamery
-Mcgivney
-Mcgilvray
-Mccuiston
-Mccuin
-Mccrystal
-Mccolley
-Mcclerkin
-Mcclenon
-Mccamey
-Mcaninch
-Mazariegos
-Maynez
-Mattioli
-Mastronardi
-Masone
-Marzett
-Marsland
-Mari
-Margulies
-Margolin
-Malatesta
-Malachi
-Mainer
-Maietta
-Magrath
-Maese
-Madkins
-Madeiros
-Madamba
-Mackson
-Mac
-Maben
-Lytch
-Lundgreen
-Lumb
-Lukach
-Luick
-Luetkemeyer
-Luechtefeld
-Ludy
-Ludden
-Luckow
-Lubinsky
-Lowes
-Lout
-Lorenson
-Loran
-Lopinto
-Looby
-Lones
-Livsey
-Liskey
-Lisby
-Lintner
-Lindow
-Lindblom
-Liming
-Liechty
-Leth
-Lesniewski
-Lenig
-Lemonds
-Leisy
-Lehrer
-Lehnen
-Lehmkuhl
-Leeth
-Leer
-Leeks
-Lechler
-Lebsock
-Lavere
-Lautenschlage
-Laughridge
-Lauderback
-Laudenslager
-Lassonde
-Laroque
-Laramee
-Laracuente
-Lapeyrouse
-Lampron
-Lamers
-Lamer
-Laino
-Lague
-Laguardia
-Lafromboise
-Lafata
-Lacount
-Lachowicz
-Kysar
-Kwiecien
-Kuffel
-Kueter
-Kronenberg
-Kristensen
-Kristek
-Krings
-Kriesel
-Krey
-Krebbs
-Kreamer
-Krabbe
-Kossman
-Kosakowski
-Kosak
-Kopacz
-Konkol
-Koepsell
-Koening
-Koen
-Knerr
-Knapik
-Kluttz
-Klocke
-Klenk
-Klemme
-Klapp
-Kitchell
-Kita
-Kissane
-Kirkbride
-Kirchhoff
-Kinter
-Kinsel
-Kingsland
-Kimmer
-Kimler
-Killoran
-Kieser
-Khalsa
-Khalaf
-Kettel
-Kerekes
-Keplin
-Kentner
-Kennebrew
-Kenison
-Kellough
-Kellman
-Keatts
-Keasey
-Kauppi
-Katon
-Kari
-Kanner
-Kampa
-Kall
-Kai
-Kaczorowski
-Kaczmarski
-Juarbe
-Jordison
-Jonathan
-Jobst
-Jezierski
-Jeanbart
-Jarquin
-Janey
-Jagodzinski
-Ishak
-Isett
-Isa
-Infantino
-Imburgia
-Illingworth
-Hysmith
-Hynson
-Hydrick
-Hurla
-Hunton
-Hunnell
-Humbertson
-Housand
-Hottle
-Hosch
-Hoos
-Honn
-Hohlt
-Hodel
-Hochmuth
-Hixenbaugh
-Hislop
-Hisaw
-Hintzen
-Hilgendorf
-Hilchey
-Higgens
-Hersman
-Herrara
-Hendrixson
-Hendriks
-Hemond
-Hemmingway
-Heminger
-Helgren
-Heisey
-Heilmann
-Hehn
-Hegna
-Heffern
-Hawrylak
-Haverty
-Hauger
-Haslem
-Harnett
-Harb
-Happ
-Hanzlik
-Hanway
-Hanby
-Hanan
-Hamric
-Hammaker
-Halas
-Hagenbuch
-Hacking
-Habeck
-Gwozdz
-Gutter
-Gunia
-Guise
-Guadarrama
-Grubaugh
-Grivas
-Griffieth
-Grieb
-Grewell
-Gregorich
-Grazier
-Graeber
-Graciano
-Gowens
-Goodpaster
-Gondek
-Gohr
-Goffney
-Godbee
-Gitlin
-Gisler
-Gin
-Gillyard
-Gillooly
-Gilchrest
-Gilbo
-Gierlach
-Giebler
-Giang
-Geske
-Gervasio
-Gertner
-Gehling
-Geeter
-Gaus
-Gattison
-Gatica
-Gathings
-Gath
-Gassner
-Gassert
-Garabedian
-Gamon
-Gameros
-Galban
-Gabourel
-Gaal
-Fuoco
-Fullenwider
-Fudala
-Friscia
-Franceschini
-Foronda
-Fontanilla
-Florey
-Florentino
-Flore
-Flegle
-Flecha
-Fisler
-Fischbach
-Fiorita
-Fines
-Figura
-Figgins
-Fichera
-Fester
-Ferra
-Fear
-Fawley
-Fawbush
-Fausett
-Farnes
-Farago
-Fairclough
-Fahie
-Fabiani
-Everest
-Evanson
-Eutsey
-Eshbaugh
-Esh
-Ertle
-Eppley
-Englehardt
-Engelhard
-Emswiler
-Elza
-Elling
-Elderkin
-Eland
-Efaw
-Edstrom
-Edmund
-Edgemon
-Ecton
-Echeverri
-Ebright
-Earheart
-Dynes
-Dygert
-Dyches
-Dulmage
-Duhn
-Duhamel
-Dues
-Dubrey
-Dubray
-Dubbs
-Drone
-Drey
-Drewery
-Dreier
-Dorval
-Dorough
-Dorais
-Donlin
-Donatelli
-Doke
-Dohm
-Doetsch
-Dobek
-Ditty
-Disbrow
-Ding
-Dinardi
-Dillahunty
-Dillahunt
-Diers
-Dier
-Diekmann
-Diangelo
-Deskin
-Deschaine
-Depaoli
-Denner
-Demyan
-Demont
-Demaray
-Delillo
-Deleeuw
-Deibel
-Decato
-Deblasio
-Debartolo
-Daubenspeck
-Darner
-Dardon
-Danziger
-Danials
-Damewood
-Dalpiaz
-Dallman
-Dallaire
-Cunniffe
-Cumpston
-Cumbo
-Cubero
-Cruzan
-Cronkhite
-Critelli
-Crimi
-Creegan
-Crean
-Craycraft
-Crater
-Cranfill
-Coyt
-Courchesne
-Coufal
-Corradino
-Corprew
-Colville
-Cocco
-Coby
-Clinch
-Clickner
-Clavette
-Claggett
-Cirigliano
-Ciesielski
-Christain
-Chesbro
-Chavera
-Chard
-Casteneda
-Castanedo
-Cast
-Casseus
-Casa
-Caruana
-Carnero
-Cappelli
-Capellan
-Canedy
-Cancro
-Camilleri
-Calero
-Cada
-Burghart
-Burbidge
-Bulfer
-Buis
-Budniewski
-Bucko
-Bruney
-Brugh
-Brossard
-Brodmerkel
-Brockmann
-Bring
-Brigmond
-Briere
-Bremmer
-Breck
-Breau
-Brautigam
-Brasch
-Brandenberger
-Bran
-Bragan
-Bozell
-Bowsher
-Bosh
-Borgia
-Borey
-Boomhower
-Bonneville
-Bonam
-Bolland
-Boise
-Boeve
-Boettger
-Boersma
-Boateng
-Bliven
-Blazier
-Blanca
-Blahnik
-Bjornstad
-Bitton
-Biss
-Birkett
-Billingsly
-Biagioni
-Bettle
-Bertucci
-Bertolino
-Bermea
-Bergner
-Berber
-Bensley
-Bendixen
-Beltrami
-Bellone
-Belland
-Bein
-Behringer
-Begum
-Beans
-Bayona
-Batiz
-Bassin
-Baskette
-Bartolomeo
-Bartolo
-Bartholow
-Barkan
-Barish
-Barett
-Bardo
-Bamburg
-Ballerini
-Balla
-Balis
-Bakley
-Bailon
-Bachicha
-Babiarz
-Ayars
-Axton
-Axel
-Awong
-Awe
-Awalt
-Auslander
-Ausherman
-Aumick
-Athens
-Atha
-Atchinson
-Aslett
-Askren
-Arrowsmith
-Arras
-Arnhold
-Armagost
-Arey
-Arcos
-Archibeque
-Antunes
-Antilla
-Ann
-Andras
-Amyx
-Amison
-Amero
-Alzate
-Alphonse
-Alper
-Aller
-Alioto
-Alexandria
-Aigner
-Agtarap
-Agbayani
-Adami
-Achorn
-Aceuedo
-Acedo
-Abundis
-Aber
-Abee
-Zuccaro
-Ziglar
-Zier
-Ziebell
-Zieba
-Zamzow
-Zahl
-Yurko
-Yurick
-Yonkers
-Yerian
-Yeaman
-Yarman
-Yann
-Yahn
-Yadon
-Yadao
-Woodbridge
-Wolske
-Wollenberg
-Wojtczak
-Wnuk
-Witherite
-Winther
-Winick
-Widell
-Wickens
-Whichard
-Wheelis
-Wesely
-Wentzell
-Wenthold
-Wemple
-Weisenburger
-Wehling
-Weger
-Weaks
-Water
-Wassink
-Warn
-Walquist
-Wadman
-Wacaster
-Waage
-Voliva
-Vlcek
-Villafana
-Vigliotti
-Viger
-Viernes
-Viands
-Vey
-Veselka
-Versteeg
-Vero
-Verhoeven
-Vendetti
-Velardo
-Vatter
-Vasconcellos
-Varn
-Vanwagner
-Vanvoorhis
-Vanhecke
-Vanduyn
-Vandervoort
-Vanderslice
-Valone
-Vallier
-Vails
-Uvalle
-Ursua
-Urenda
-Upright
-Uphoff
-Tustin
-Turton
-Turnbough
-Turck
-Tullio
-Tuch
-Truehart
-Tropea
-Troester
-Trippe
-Tricarico
-Trevarthen
-Trembly
-Trace
-Trabue
-Traber
-Toto
-Tosi
-Toal
-Tinley
-Tingler
-Timoteo
-Tiffin
-Tien
-Ticer
-Thurgood
-Thorman
-Therriault
-Theel
-Tessman
-Tekulve
-Tejera
-Tebbs
-Tavernia
-Tarpey
-Tallmadge
-Takemoto
-Szot
-Sylvest
-Swindoll
-Swearinger
-Swantek
-Swaner
-Swainston
-Susi
-Surrette
-Sur
-Supple
-Sullenger
-Sudderth
-Suddarth
-Suckow
-Strider
-Strege
-Stream
-Strassburg
-Stoval
-Stotz
-Stoneham
-Stilley
-Stille
-Stierwalt
-Stfleur
-Steuck
-Stermer
-Stclaire
-Stano
-Staker
-Stahler
-Stablein
-Srinivasan
-Squillace
-Sprvill
-Sproull
-Sprau
-Sporer
-Spore
-Spittler
-Speelman
-Sparr
-Sparkes
-Spang
-Spagnuolo
-Sosinski
-Sorto
-Sorkin
-Sondag
-Sollers
-Socia
-Snarr
-Smrekar
-Smolka
-Slyter
-Slovinsky
-Sliwa
-Slavik
-Slatter
-Skiver
-Skeem
-Skala
-Sitzes
-Sitsler
-Sitler
-Sinko
-Simser
-Siegler
-Sideris
-Shrewsberry
-Shoopman
-Shoaff
-Shira
-Shindler
-Shimmin
-Shill
-Shenkel
-Shemwell
-Shehorn
-Severa
-Sergio
-Semones
-Selsor
-Seller
-Sekulski
-Segui
-Sechrest
-Scot
-Schwer
-Schwebach
-Schur
-Schmiesing
-Schlick
-Schlender
-Schebler
-Schear
-Schapiro
-Sauro
-Saunder
-Sauage
-Satterly
-Saraiva
-Saracino
-Saperstein
-Sanmartin
-Sanluis
-Sandt
-Sandrock
-Sammet
-Sama
-Salk
-Sakata
-Saini
-Sackrider
-Rys
-Russum
-Russi
-Russaw
-Rozzell
-Roza
-Rowlette
-Rothberg
-Rossano
-Rosebrock
-Romanski
-Romanik
-Romani
-Roma
-Roiger
-Roig
-Roehr
-Rodenberger
-Rodela
-Rod
-Rochford
-Ristow
-Rispoli
-Ripper
-Rigo
-Riesgo
-Riebel
-Ribera
-Ribaudo
-Rhoda
-Reys
-Resendes
-Repine
-Reisdorf
-Reisch
-Rebman
-Rasmus
-Raske
-Ranum
-Rames
-Rambin
-Raman
-Rajewski
-Raffield
-Rady
-Radich
-Raatz
-Quinnie
-Pyper
-Puthoff
-Prow
-Proehl
-Pribyl
-Pretti
-Prete
-Presby
-Poyer
-Powelson
-Porteous
-Poquette
-Pooser
-Pollan
-Ploss
-Plewa
-Plants
-Placide
-Pion
-Pinnick
-Pinales
-Pin
-Pillot
-Pille
-Pilato
-Piggee
-Pietrowski
-Piermarini
-Pickford
-Piccard
-Phenix
-Pevey
-Petrowski
-Petrillose
-Pesek
-Perrotti
-Perfecto
-Peppler
-Peppard
-Penfold
-Pellitier
-Pelland
-Pehowic
-Pedretti
-Paules
-Passero
-Pasha
-Panza
-Pallante
-Palau
-Pakele
-Pacetti
-Paavola
-Overy
-Overson
-Outler
-Osegueda
-Ord
-Oplinger
-Oldenkamp
-Ok
-Ohern
-Oetting
-Odums
-Oba
-Nowlen
-Nowack
-Nordlund
-Noblett
-Nobbe
-Nierman
-Nichelson
-Niblock
-Newbrough
-Nest
-Nemetz
-Neeson
-Needleman
-Necessary
-Navin
-Nastasi
-Naslund
-Naramore
-Nakken
-Nakanishi
-Najarro
-Mushrush
-Muma
-Mulero
-Morganfield
-Moreman
-Morain
-Moquin
-Montrose
-Monterrosa
-Monsivais
-Monroig
-Monje
-Monfort
-Moises
-Moffa
-Moeckel
-Mobbs
-Mitch
-Misiak
-Mires
-Mirelez
-Mineo
-Mineau
-Milnes
-Mikeska
-Michelin
-Michalowski
-Meszaros
-Messineo
-Meshell
-Merten
-Meola
-Menton
-Mends
-Mende
-Memmott
-Melius
-Mehan
-Mcnickle
-Mcmorran
-Mclennon
-Mcleish
-Mclaine
-Mckendry
-Mckell
-Mckeighan
-Mcisaac
-Mcie
-Mcguinn
-Mcgillis
-Mcfatridge
-Mcfarling
-Mcelravy
-Mcdonalds
-Mcculla
-Mcconnaughy
-Mcconnaughey
-Mcchriston
-Mcbeath
-Mayr
-Matyas
-Matthiesen
-Matsuura
-Matinez
-Mathys
-Matarazzo
-Masker
-Masden
-Mascio
-Martis
-Marrinan
-Marinucci
-Margerum
-Marengo
-Manthe
-Mansker
-Manoogian
-Mankey
-Manigo
-Manier
-Mangini
-Mandelbaum
-Maltese
-Malsam
-Mallo
-Maliszewski
-Mainolfi
-Maharaj
-Maggart
-Magar
-Maffett
-Macmaster
-Macky
-Macdonnell
-Mable
-Lyvers
-Lyn
-Luzzi
-Lutman
-Luk
-Lover
-Lovan
-Lonzo
-Longest
-Longerbeam
-Lofthouse
-Loethen
-Lodi
-Llorens
-Lizardo
-Lizama
-Liz
-Litscher
-Lisowski
-Lipski
-Lipsett
-Lipkin
-Linzey
-Lineman
-Limerick
-Limb
-Limas
-Lige
-Lierman
-Liebold
-Liberti
-Leverton
-Levene
-Lesueur
-Lenser
-Lenker
-Lemme
-Legnon
-Lefrancois
-Ledwell
-Lavecchia
-Laurich
-Lauricella
-Latino
-Lannigan
-Landor
-Lamprecht
-Lamountain
-Lamore
-Lamonica
-Lammert
-Lamboy
-Lamarque
-Lamacchia
-Lalley
-Lagace
-Lacorte
-Lacomb
-Kyllonen
-Kyker
-Kye
-Kuschel
-Kupfer
-Kunde
-Kucinski
-Kubacki
-Kuan
-Kroenke
-Krech
-Koziel
-Kovacich
-Kothari
-Koth
-Kotek
-Kostelnik
-Kosloski
-Knoles
-Knabe
-Kmiecik
-Klingman
-Kliethermes
-Kleffman
-Klees
-Klaiber
-Kittell
-Kissling
-Kisinger
-Kintner
-Kinoshita
-Kiener
-Khouri
-Kerman
-Kelii
-Keirn
-Keezer
-Kaup
-Kathan
-Kaser
-Karlsen
-Kapur
-Kandoll
-Kammel
-Kahele
-Justesen
-Jue
-Jonason
-Johnsrud
-Joerling
-Jochim
-Jespersen
-Jeong
-Jenness
-Jedlicka
-Jakob
-Isaman
-Inghram
-Ingenito
-Imperial
-Iadarola
-Hynd
-Huxtable
-Huwe
-Huron
-Hurless
-Humpal
-Hughston
-Hughart
-Huggett
-Hugar
-Huether
-Howdyshell
-Houtchens
-Houseworth
-Hoskie
-Holshouser
-Holmen
-Holloran
-Hohler
-Hoefler
-Hodsdon
-Hochman
-Hjort
-Hippert
-Hippe
-Hinzman
-Hillock
-Hilden
-Hilde
-Heyn
-Heyden
-Heyd
-Hergert
-Henrikson
-Henningsen
-Hendel
-Helget
-Helf
-Helbing
-Heintzman
-Heggie
-Hege
-Hecox
-Heatherington
-Heare
-Haxton
-Haverstock
-Haverly
-Hatler
-Haselton
-Hase
-Hartzfeld
-Harten
-Harken
-Hargrow
-Haran
-Hanton
-Hammar
-Hamamoto
-Halper
-Halko
-Hackathorn
-Haberle
-Haake
-Gunnoe
-Gunkel
-Gulyas
-Guiney
-Guilbeau
-Guider
-Guerrant
-Gudgel
-Guarisco
-Grossen
-Grossberg
-Gropp
-Groome
-Grobe
-Gremminger
-Greenley
-Grauberger
-Grabenstein
-Gowers
-Gostomski
-Gosier
-Goodenow
-Gonzoles
-Goliday
-Goettle
-Goens
-Goates
-Glymph
-Glavin
-Glassco
-Gladys
-Gladfelter
-Glackin
-Githens
-Girgis
-Gimpel
-Gilbreth
-Gilbeau
-Giffen
-Giannotti
-Gholar
-Gervasi
-Gertsch
-Gernatt
-Gephardt
-Genco
-Gehr
-Geddis
-Gear
-Gase
-Garrott
-Garrette
-Gapinski
-Ganter
-Ganser
-Gangi
-Gangemi
-Gang
-Gallina
-Galdi
-Gailes
-Gaetano
-Gadomski
-Gaccione
-Fuschetto
-Furtick
-Furfaro
-Fullman
-Frutos
-Fruchter
-Frogge
-Freytag
-Freudenthal
-Fregoe
-Franzone
-Frankum
-Francia
-Franceschi
-Fraction
-Forys
-Forero
-Folkers
-Foil
-Flug
-Flitter
-Flemons
-Fitzer
-Firpo
-Finizio
-Filiault
-Figg
-Fiddler
-Fichtner
-Fetterolf
-Ferringer
-Feil
-Fayne
-Farro
-Faddis
-Ezzo
-Ezelle
-Eynon
-Evitt
-Eutsler
-Euell
-Escovedo
-Erne
-Eriksson
-Enriguez
-Empson
-Elkington
-Elk
-Eisenmenger
-Eidt
-Eichenberger
-Ehrmann
-Ediger
-Earlywine
-Eacret
-Duzan
-Dunnington
-Duffer
-Ducasse
-Dubiel
-Drovin
-Drager
-Drage
-Donham
-Donat
-Dona
-Dolinger
-Dokken
-Doepke
-Dodwell
-Docherty
-Distasio
-Disandro
-Diniz
-Digangi
-Didion
-Dezzutti
-Devora
-Detmer
-Deshon
-Derrigo
-Dentler
-Demoura
-Demeter
-Demeritt
-Demayo
-Demark
-Demario
-Delzell
-Delnero
-Delgrosso
-Dejarnett
-Debernardi
-Dearmas
-Dau
-Dashnaw
-Daris
-Danks
-Danker
-Dangler
-Daignault
-Dafoe
-Dace
-Curet
-Cumberledge
-Culkin
-Cuba
-Crowner
-Crocket
-Crawshaw
-Craun
-Cranshaw
-Cragle
-Courser
-Costella
-Cornforth
-Corkill
-Cordy
-Coopersmith
-Conzemius
-Connett
-Connely
-Condict
-Condello
-Concha
-Comley
-Colt
-Collen
-Cohoon
-Coday
-Clugston
-Clowney
-Clippard
-Clinkenbeard
-Clines
-Clelland
-Clause
-Clapham
-Clancey
-Clabough
-Cichy
-Cicalese
-Chuck
-Chua
-Chittick
-Chisom
-Chisley
-Chino
-Chinchilla
-Cheramie
-Cerritos
-Cercone
-Cena
-Cawood
-Cavness
-Catanzarite
-Casada
-Carvell
-Carp
-Carmicheal
-Carll
-Cardozo
-Caplin
-Candia
-Canby
-Cammon
-Callister
-Calligan
-Calkin
-Caillouet
-Buzzelli
-Bute
-Bustillo
-Bursey
-Burgeson
-Bupp
-Bulson
-Bulls
-Buist
-Buffey
-Buczkowski
-Buckbee
-Bucio
-Brueckner
-Broz
-Brookhart
-Brong
-Brockmeyer
-Broberg
-Brittenham
-Brisbois
-Bridgmon
-Bride
-Breyer
-Brede
-Breakfield
-Breakey
-Brauner
-Branigan
-Brandewie
-Branche
-Brager
-Brader
-Bovell
-Bouthot
-Bostock
-Bosma
-Boseman
-Boschee
-Borthwick
-Borneman
-Borer
-Borek
-Boomershine
-Boni
-Bommarito
-Bolman
-Boleware
-Boisse
-Boehlke
-Bodle
-Blash
-Blasco
-Blakesley
-Blacklock
-Blackley
-Bittick
-Birks
-Birdin
-Bircher
-Bilbao
-Bick
-Biby
-Bertoni
-Bertino
-Bertini
-Berson
-Bern
-Berkebile
-Bergstresser
-Benne
-Benevento
-Belzer
-Beltre
-Bellomo
-Bellerose
-Beilke
-Begeman
-Bebee
-Beazer
-Beaven
-Beamish
-Baymon
-Baston
-Bastidas
-Basom
-Basket
-Basey
-Bartles
-Baroni
-Barocio
-Barnet
-Barclift
-Banville
-Balthazor
-Balleza
-Balkcom
-Baires
-Bailiff
-Bailie
-Baik
-Baggott
-Bagen
-Bachner
-Babington
-Babel
-Asmar
-Askin
-Arvelo
-Artega
-Arrendondo
-Arreaga
-Arrambide
-Arquette
-Aronoff
-Arico
-Argentieri
-Arevalos
-Archbold
-Apuzzo
-Antczak
-Ankeny
-Angelle
-Angelini
-Anfinson
-Amer
-Amberg
-Amarillas
-Altier
-Altenburg
-Alspach
-Alosa
-Allsbrook
-Alexopoulos
-Aleem
-Aldred
-Albertsen
-Akerson
-Ainsley
-Agler
-Adley
-Addams
-Acoba
-Achille
-Abplanalp
-Abella
-Abare
-Zwolinski
-Zollicoffer
-Zola
-Zins
-Ziff
-Zenner
-Zender
-Zelnick
-Zelenka
-Zeches
-Zaucha
-Zauala
-Zappa
-Zangari
-Zagorski
-Youtsey
-Yorker
-Yell
-Yasso
-Yarde
-Yarbough
-Xiao
-Woolever
-Woodsmall
-Woodfolk
-Wonders
-Wobig
-Wixson
-Wittwer
-Wirtanen
-Winson
-Wingerd
-Wilkening
-Wilhelms
-Wierzbicki
-Wiechman
-Whites
-Weyrick
-Wessell
-Wenrick
-Wenning
-Weltz
-Weinrich
-Weiand
-Wehunt
-Wareing
-Walth
-Waibel
-Wahlquist
-Vona
-Voelkel
-Vitek
-Vinsant
-Vincente
-Vilar
-Viel
-Vicars
-Vermette
-Verma
-Vent
-Venner
-Veazie
-Vayda
-Vashaw
-Varon
-Vardeman
-Vandevelde
-Vanbrocklin
-Valery
-Val
-Vaccarezza
-Urquidez
-Urie
-Urbach
-Uram
-Ungaro
-Umali
-Ulsh
-Tutwiler
-Turnbaugh
-Tumminello
-Tuite
-Tueller
-Trulove
-Troha
-Trivino
-Trisdale
-Trippett
-Tribbett
-Treptow
-Tremain
-Travelstead
-Trautwein
-Trautmann
-Tram
-Traeger
-Tonelli
-Tomsic
-Tomich
-Tomasulo
-Tomasino
-Tole
-Todhunter
-Toborg
-Tischer
-Tirpak
-Tircuit
-Tinnon
-Tinnel
-Tines
-Tina
-Timbs
-Tilden
-Tiede
-Thumm
-Throne
-Throgmorton
-Thorndike
-Thornburgh
-Thoren
-Thomann
-Therrell
-Thau
-Thammavong
-Tetrick
-Tessitore
-Tesreau
-Teicher
-Teaford
-Tauscher
-Tauer
-Tanabe
-Talamo
-Takeuchi
-Taite
-Tadych
-Sweeton
-Swecker
-Swartzentrube
-Swarner
-Surrell
-Surbaugh
-Suppa
-Sunshine
-Sumbry
-Suchy
-Stuteville
-Studt
-Stromer
-Strome
-Streng
-Stonestreet
-Stockley
-Stmichel
-Sticker
-Stfort
-Sternisha
-Stensrud
-Steinhardt
-Steinback
-Steichen
-Stauble
-Stasiak
-Starzyk
-Stango
-Standerfer
-Stachowiak
-Springston
-Spratlin
-Spracklen
-Sponseller
-Spilker
-Spiegelman
-Spellacy
-Speiser
-Spaziani
-Spader
-Spackman
-Space
-Sorum
-Sopha
-Sollis
-Sollenberger
-Solivan
-Solheim
-Sokolsky
-Sogge
-Smyser
-Smitley
-Sloas
-Slinker
-Skora
-Skiff
-Skare
-Siverd
-Sivels
-Siska
-Siordia
-Simmering
-Simko
-Sime
-Silmon
-Silano
-Sieger
-Siebold
-Shukla
-Shreves
-Shoun
-Shortle
-Shonkwiler
-Shoals
-Shimmel
-Shiel
-Shieh
-Sherbondy
-Shenkman
-Shein
-Shearon
-Shean
-Shatz
-Shanholtz
-Shafran
-Shaff
-Shackett
-Sgroi
-Sewall
-Severy
-Sethi
-Sessa
-Sequra
-Sepulvado
-Seper
-Senteno
-Sendejo
-Semmens
-Seipp
-Segler
-Seegers
-Sedwick
-Sedore
-Sechler
-Sebastiano
-Scovel
-Scotton
-Scopel
-Schwend
-Schwarting
-Schutter
-Schrier
-Schons
-Scholtes
-Schnetzer
-Schnelle
-Schmutz
-Schlichter
-Schelling
-Schams
-Schamp
-Scarber
-Scallan
-Scalisi
-Scaffidi
-Saxby
-Sawrey
-Sauvageau
-Sauder
-Sarrett
-Sanzo
-Santizo
-Santella
-Santander
-Sandez
-Sandel
-Sammon
-Salsedo
-Salge
-Sailors
-Sagun
-Safi
-Sader
-Sacchetti
-Sablan
-Saber
-Saade
-Runnion
-Runkel
-Rung
-Rumbo
-Ruesch
-Ruegg
-Ruckle
-Ruchti
-Rubens
-Rubano
-Rozycki
-Roupe
-Roufs
-Rossel
-Rosmarin
-Rosero
-Rosenwald
-Roselle
-Ronca
-Romos
-Rolla
-Rohling
-Rohleder
-Roell
-Roehm
-Rochefort
-Roch
-Robotham
-Rivenburgh
-Riopel
-Riederer
-Ridlen
-Rias
-Rhudy
-Reynard
-Retter
-Respess
-Reppond
-Repko
-Rengifo
-Reinking
-Reichelt
-Reeh
-Redenius
-Rebolledo
-Raymundo
-Rauh
-Ratajczak
-Rapley
-Ranalli
-Ramie
-Raitt
-Radloff
-Radle
-Rabbitt
-Quay
-Quant
-Pusateri
-Puffinberger
-Puerta
-Provencio
-Proano
-Privitera
-Prenger
-Prellwitz
-Pousson
-Potier
-Poster
-Portz
-Portlock
-Porth
-Portela
-Portee
-Porchia
-Pollick
-Polinski
-Polfer
-Polanski
-Polachek
-Pluta
-Plourd
-Plauche
-Pitner
-Piontkowski
-Pileggi
-Pierotti
-Pico
-Piacente
-Phinisee
-Phaup
-Pfost
-Pettinger
-Pettet
-Petrich
-Peto
-Persley
-Persad
-Perlstein
-Perko
-Pere
-Penders
-Peifer
-Peco
-Pear
-Pay
-Pawley
-Pash
-Parrack
-Parady
-Papen
-Pangilinan
-Pandolfo
-Palone
-Palmertree
-Padin
-Ou
-Ottey
-Ottem
-Ostroski
-Ornstein
-Ormonde
-Onstott
-Oncale
-Oltremari
-Olcott
-Olan
-Oishi
-Oien
-Odonell
-Odonald
-Ode
-Obeso
-Obeirne
-Oatley
-Nusser
-Novo
-Novicki
-Noreen
-Nora
-Nitschke
-Nistler
-Nim
-Nikkel
-Niese
-Nierenberg
-Nield
-Niedzwiecki
-Niebla
-Niebel
-Nicklin
-Neyhart
-Newsum
-Nevares
-Nageotte
-Nagai
-Myung
-Mutz
-Murata
-Muralles
-Munnerlyn
-Mumpower
-Muegge
-Muckle
-Muchmore
-Moulthrop
-Motl
-Moskos
-Mortland
-Morring
-Mormile
-Morimoto
-Morikawa
-Morgon
-Mordecai
-Montour
-Mont
-Mongan
-Monell
-Miyasato
-Mish
-Minshew
-Mimbs
-Millin
-Milliard
-Mihm
-Middlemiss
-Miano
-Mew
-Mesick
-Merlan
-Mendonsa
-Mench
-Melonson
-Melling
-Mecca
-Meachem
-Mctighe
-Mcnelis
-Mcmurtrey
-Mcmurphy
-Mckesson
-Mckenrick
-Mckelvie
-Mcjunkins
-Mcgory
-Mcgirr
-Mcgeever
-Mcfield
-Mcelhinney
-Mccrossen
-Mccommon
-Mccannon
-Mazyck
-Mawyer
-Maull
-Matute
-Mathies
-Maschino
-Marzan
-Martinie
-Marrotte
-Marmion
-Markarian
-Marinacci
-Margolies
-Margeson
-Marcia
-Marcel
-Marak
-Maraia
-Maracle
-Manygoats
-Mano
-Manker
-Mank
-Mandich
-Manderson
-Maltz
-Malmquist
-Malacara
-Majette
-Mais
-Magnan
-Magliocca
-Madina
-Madara
-Macwilliams
-Macqueen
-Maccallum
-Lyde
-Lyday
-Lutrick
-Lurz
-Lurvey
-Lumbreras
-Luhrs
-Luhr
-Lue
-Lowrimore
-Lowndes
-Lowers
-Lourenco
-Lougee
-Lorona
-Longstreth
-Loht
-Lofquist
-Loewenstein
-Lobos
-Lizardi
-Liverpool
-Lionberger
-Limoli
-Liljenquist
-Liguori
-Liebl
-Liburd
-Leukhardt
-Letizia
-Lesinski
-Lepisto
-Lenzini
-Leisenring
-Leipold
-Leier
-Leggitt
-Legare
-Leaphart
-Lazor
-Lazaga
-Lavey
-Laue
-Laudermilk
-Lauck
-Lassalle
-Larsson
-Larison
-Lanzo
-Lantzy
-Lanners
-Langtry
-Landford
-Lancour
-Lamour
-Lambertson
-Lalone
-Lairson
-Lainhart
-Lagreca
-Lacina
-Labranche
-Labate
-Kurtenbach
-Kuipers
-Kuechle
-Kue
-Kubo
-Krinsky
-Krauser
-Kraeger
-Kracht
-Kozeliski
-Kozar
-Kowalik
-Kotler
-Kotecki
-Koslosky
-Kosel
-Koob
-Kolasinski
-Koizumi
-Kohlman
-Koffman
-Knutt
-Knore
-Knaff
-Kmiec
-Klamm
-Kittler
-Kitner
-Kirkeby
-Kiper
-Kindler
-Kilmartin
-Killings
-Killin
-Kilbride
-Kerchner
-Kendell
-Keddy
-Keaveney
-Kearsley
-Karras
-Karlsson
-Karalis
-Kappes
-Kapadia
-Kallman
-Kallio
-Kalil
-Kader
-Jurkiewicz
-Joya
-Johann
-Jitchaku
-Jillson
-Jex
-Jeune
-Jarratt
-Jarchow
-Janak
-Ivins
-Ivans
-Isenhart
-Inocencio
-Inoa
-Imhof
-Iacono
-Hynds
-Hutching
-Hutchin
-Hulsman
-Hulsizer
-Hueston
-Huddleson
-Hrbek
-Howry
-Housey
-Hounshell
-Hosick
-Hortman
-Horseman
-Horky
-Horine
-Hootman
-Honeywell
-Honeyestewa
-Holste
-Holien
-Holbrooks
-Hoffmeyer
-Hof
-Hoese
-Hoenig
-Hirschfeld
-Hildenbrand
-Higson
-Higney
-Hibert
-Hibbetts
-Hewlin
-Hesley
-Herrold
-Hermon
-Heritage
-Hepker
-Henwood
-Helbling
-Heinzman
-Heidtbrink
-Hedger
-Havey
-Hatheway
-Hartshorne
-Harpel
-Haning
-Handelman
-Hamalainen
-Hamad
-Halt
-Halasz
-Haigwood
-Haggans
-Hackshaw
-Guzzo
-Gunner
-Gundrum
-Guilbeault
-Gugliuzza
-Guglielmi
-Gue
-Guderian
-Gruwell
-Grunow
-Grundman
-Gruen
-Grotzke
-Grossnickle
-Groomes
-Grode
-Grochowski
-Grob
-Grein
-Greif
-Greenwall
-Greenup
-Grassl
-Grannis
-Grandfield
-Grames
-Grabski
-Grabe
-Gouldsberry
-Gotham
-Gosch
-Goody
-Goodling
-Goodermote
-Gonzale
-Golebiowski
-Goldson
-Godlove
-Glanville
-Gillin
-Gilkerson
-Giessler
-Giambalvo
-Giacomini
-Giacobbe
-Ghio
-Gergen
-Gentz
-Genrich
-Gelormino
-Gelber
-Geitner
-Geimer
-Gauthreaux
-Gaultney
-Garvie
-Gareau
-Garbo
-Garbacz
-Ganoe
-Gangwer
-Gandarilla
-Galyen
-Galt
-Galluzzo
-Gallon
-Galardo
-Gager
-Gaddie
-Gaber
-Gabehart
-Gaarder
-Fusilier
-Furnari
-Furbee
-Fugua
-Fruth
-Frohman
-Friske
-Frilot
-Fridman
-Frescas
-Freier
-Frayer
-Franzese
-Franklyn
-Frankenberry
-Frain
-Fosse
-Foresman
-Forbess
-Foot
-Florida
-Flook
-Fletes
-Fleer
-Fleek
-Fleegle
-Fishburne
-Fiscalini
-Finnigan
-Fini
-Filipiak
-Figueira
-Fiero
-Ficek
-Fiaschetti
-Ferren
-Ferrando
-Ferman
-Fergusson
-Fenech
-Feiner
-Feig
-Fees
-Faulds
-Fate
-Fariss
-Fantasia
-Falor
-Falke
-Ewings
-Eversley
-Everding
-Eunice
-Etling
-Essen
-Erskin
-Enstrom
-Enrico
-Engebretsen
-Ender
-Emma
-Eitel
-Eichberger
-Ehler
-Eekhoff
-Edrington
-Edmonston
-Edgmon
-Edes
-Eberlein
-Dwinell
-Dux
-Dupee
-Dunklee
-Dunk
-Dungey
-Dunagin
-Dumoulin
-Duggar
-Duenez
-Dudzic
-Dudenhoeffer
-Ducey
-Dub
-Drouillard
-Dreibelbis
-Dreger
-Dreesman
-Draughon
-Downen
-Double
-Dorminy
-Dominic
-Dombeck
-Dolman
-Doebler
-Dittberner
-Dishaw
-Disanti
-Dinicola
-Dinham
-Dimino
-Dilling
-Difrancesco
-Dicello
-Dibert
-Deshazer
-Deserio
-Descoteau
-Deruyter
-Dering
-Depinto
-Dente
-Demus
-Demattos
-Demarsico
-Delude
-Dekok
-Debrito
-Debois
-Deakin
-Dea
-Dayley
-Dawsey
-Dauria
-Datson
-Darty
-Darsow
-Darragh
-Darensbourg
-Dalleva
-Dalbec
-Dadd
-Cutcher
-Curb
-Cung
-Cuello
-Cuadros
-Crute
-Crutchley
-Crispino
-Crislip
-Crisco
-Crevier
-Creekmur
-Crance
-Cragg
-Crager
-Cozby
-Coyan
-Coxon
-Covalt
-Couillard
-Costley
-Costilow
-Cossairt
-Corvino
-Corigliano
-Cordaro
-Corbridge
-Corban
-Coor
-Cooler
-Conkel
-Cong
-Conary
-Coltrain
-Collopy
-Colgin
-Colen
-Colbath
-Coiro
-Coffie
-Cochrum
-Cobbett
-Clopper
-Cliburn
-Clendenon
-Clemon
-Clementi
-Clausi
-Cirino
-Cina
-Churn
-Churchman
-Chilcutt
-Cherney
-Cheetham
-Cheatom
-Chatelain
-Chandra
-Chalifour
-Cesa
-Cervenka
-Cerullo
-Cerreta
-Cerbone
-Cecchini
-Ceccarelli
-Cawthorn
-Cavalero
-Catalina
-Castner
-Castlen
-Castine
-Casimiro
-Casdorph
-Cartmill
-Cartmell
-Carro
-Carriger
-Carlee
-Carias
-Caravella
-Cappas
-Capen
-Cantey
-Canedo
-Camuso
-Camps
-Campanaro
-Camero
-Cambria
-Calzado
-Callejo
-Caligiuri
-Cafaro
-Cadotte
-Cacace
-Byrant
-Busbey
-Burtle
-Burres
-Burnworth
-Burggraf
-Burback
-Bunte
-Bunke
-Bulle
-Bugos
-Budlong
-Buckhalter
-Buccellato
-Brummet
-Bruff
-Brubeck
-Brouk
-Broten
-Brosky
-Broner
-Brittle
-Brislin
-Brimm
-Brillhart
-Bridgham
-Brideau
-Brennecke
-Brenna
-Breer
-Breeland
-Bredesen
-Branden
-Brackney
-Brackeen
-Boza
-Boyum
-Bowdry
-Bowdish
-Bouwens
-Bouvier
-Bougie
-Bouche
-Bottenfield
-Bostian
-Bossie
-Bosler
-Boschert
-Boroff
-Borello
-Boom
-Bonser
-Bonfield
-Bon
-Bole
-Boldue
-Bogacz
-Boemer
-Bluth
-Bloxom
-Blickenstaff
-Blessinger
-Bleazard
-Blatz
-Blanchet
-Blacksher
-Birchler
-Binning
-Binkowski
-Biltz
-Bilotta
-Bilagody
-Bigbee
-Bieri
-Biehle
-Bidlack
-Betker
-Bethers
-Bethell
-Bertha
-Bero
-Bernacchi
-Bermingham
-Berkshire
-Benvenuto
-Bensman
-Benoff
-Bencivenga
-Beman
-Bellow
-Bellany
-Belflower
-Belch
-Bekker
-Bejar
-Beisel
-Beichner
-Began
-Beedy
-Beas
-Beanblossom
-Bawek
-Baus
-Baugus
-Battie
-Battershell
-Bateson
-Basque
-Basford
-Bartone
-Barritt
-Barko
-Bann
-Bamford
-Baltrip
-Balon
-Balliew
-Ballam
-Baldus
-Ayling
-Avelino
-Ashwell
-Ashland
-Arseneau
-Arroyos
-Armendarez
-Arita
-Argust
-Archuletta
-Arcement
-Antonacci
-Anthis
-Antal
-Annan
-Andree
-Anderman
-Amster
-Amiri
-Amadon
-Alveraz
-Altomari
-Altmann
-Altenhofen
-Allers
-Allbee
-Allaway
-All
-Aleo
-Alcoser
-Alcorta
-Akhtar
-Ahuna
-Agramonte
-Agard
-Adkerson
-Achord
-Abt
-Abdi
-Abair
-Zurn
-Zoellner
-Zirk
-Zion
-Zee
-Zarro
-Zarco
-Zambo
-Zaiser
-Zaino
-Zachry
-Youd
-Yonan
-Yniguez
-Yepes
-Yeo
-Yellock
-Yellen
-Yeatts
-Yearling
-Yatsko
-Yannone
-Wyler
-Woodridge
-Wolfrom
-Wolaver
-Wolanin
-Wojnar
-Wojciak
-Wittmann
-Wittich
-Wiswell
-Wisser
-Wintersteen
-Wineland
-Willing
-Willford
-Wiginton
-Wigfield
-Wierman
-Wice
-Wiater
-Whitsel
-Whitbread
-Wheller
-Wettstein
-Werling
-Wente
-Wenig
-Wempe
-Welz
-Weinhold
-Weigelt
-Weichman
-Wedemeyer
-Weddel
-Ways
-Wayment
-Waycaster
-Wauneka
-Watzka
-Watton
-Warnell
-Warnecke
-Warmack
-Warder
-Wands
-Waldvogel
-Waldridge
-Wahs
-Wagganer
-Waddill
-Vyas
-Vought
-Votta
-Voiles
-Virga
-Viner
-Villella
-Villaverde
-Villaneda
-Viele
-Vickroy
-Vicencio
-Veve
-Vetere
-Vermilyea
-Verley
-Verburg
-Ventresca
-Veno
-Venard
-Venancio
-Velaquez
-Veenstra
-Vea
-Vasil
-Vanzee
-Vanwie
-Vantine
-Vant
-Vanschoyck
-Vannice
-Vankampen
-Vanicek
-Vandersloot
-Vanderpoel
-Vanderlinde
-Vallieres
-Uzzell
-Uzelac
-Uranga
-Uptain
-Updyke
-Uong
-Untiedt
-Umbrell
-Umbaugh
-Umbarger
-Ulysse
-Ullmann
-Ullah
-Tutko
-Turturro
-Turnmire
-Turnley
-Turcott
-Turbyfill
-Turano
-Tuminello
-Tumbleson
-Tsou
-Truscott
-Trulson
-Troutner
-Trone
-Troll
-Trinklein
-Tremmel
-Tredway
-Trease
-Traynham
-Traw
-Totty
-Torti
-Torregrossa
-Torok
-Tomkins
-Tomaino
-Tkach
-Tirey
-Tinsman
-Timpe
-Tiefenauer
-Tiedt
-Tidball
-Thwaites
-Thulin
-Throneburg
-Thorns
-Thorell
-Thorburn
-Thiemann
-Thieman
-Thesing
-Tham
-Terrien
-Terrance
-Telfair
-Taybron
-Tasson
-Tasso
-Tarro
-Tanenbaum
-Talent
-Tailor
-Taddeo
-Tada
-Taborn
-Tabios
-Szekely
-Szatkowski
-Sylve
-Swineford
-Swartzfager
-Swanton
-Swagerty
-Surrency
-Sunderlin
-Sumerlin
-Suero
-Suddith
-Sublette
-Stumpe
-Stueve
-Study
-Stuckert
-Strycker
-Struve
-Struss
-Strubbe
-Strough
-Strothmann
-Strahle
-Stoutner
-Stooksbury
-Stones
-Stonebarger
-Stokey
-Stoffer
-Stimmel
-Stief
-Stephans
-Stemper
-Steltenpohl
-Stellato
-Steinle
-Stegeman
-Steffler
-Steer
-Steege
-Steckman
-Stapel
-Stansbery
-Stanaland
-Stahley
-Stagnaro
-Stachowski
-Squibb
-Sprunger
-Sproule
-Sprehe
-Spreen
-Sprecher
-Sposato
-Spivery
-Souter
-Sopher
-Sommerfeldt
-Soffer
-Snowberger
-Snape
-Smylie
-Smyer
-Smack
-Slaydon
-Slatton
-Slaght
-Skovira
-Skeans
-Sjolund
-Sjodin
-Siragusa
-Singelton
-Sinatra
-Silis
-Siebenaler
-Shuffield
-Shobe
-Shiring
-Shimabukuro
-Shilts
-Sherley
-Sherbert
-Shelden
-Sheil
-Shedlock
-Shearn
-Shaub
-Sharbono
-Shapley
-Shands
-Shaheen
-Shaffner
-Servantez
-Sentz
-Seney
-Selin
-Seitzinger
-Seider
-Sehr
-Sego
-Segall
-Seeds
-Sebastien
-Scimeca
-Schwenck
-Schweiss
-Schwark
-Schwalbe
-Schucker
-Schronce
-Schrag
-Schouten
-Schoppe
-Schomaker
-Schnarr
-Schmied
-Schmader
-Schlicht
-Schlag
-Schield
-Schiano
-Scheve
-Scherbarth
-Schaumburg
-Schauman
-Scarpino
-Savinon
-Sassaman
-Sarah
-Saporito
-Sanville
-Santilli
-Santaana
-Sanda
-Salzmann
-Salman
-Saks
-Sagraves
-Safran
-Saccone
-Sa
-Rutty
-Russett
-Rupard
-Rump
-Rumbley
-Ruffins
-Ruacho
-Rozema
-Roxas
-Routson
-Rourk
-Rought
-Rotunda
-Rotermund
-Rosman
-Rosette
-Rork
-Rooke
-Rolin
-Rohm
-Rohlman
-Rohl
-Roeske
-Roecker
-Rober
-Robenson
-Riso
-Rinne
-Rima
-Riina
-Rigsbee
-Riggles
-Riester
-Rials
-Rhinehardt
-Reynaud
-Reyburn
-Rewis
-Revermann
-Reutzel
-Retz
-Rende
-Rendall
-Reistad
-Reinders
-Reichardt
-Rehrig
-Rehrer
-Recendez
-Reamy
-Raz
-Rauls
-Ratz
-Rattray
-Rasband
-Rapone
-Ragle
-Ragins
-Radican
-Raczka
-Rachels
-Raburn
-Rabren
-Raboin
-Ra
-Quesnell
-Quaintance
-Puccinelli
-Pruner
-Prouse
-Proud
-Prosise
-Proffer
-Prochazka
-Probasco
-Previte
-Prayer
-Pour
-Portell
-Porcher
-Popoca
-Poncho
-Pomroy
-Poma
-Polsky
-Polsgrove
-Polidore
-Podraza
-Plymale
-Plescia
-Pleau
-Platte
-Plato
-Pizzi
-Pinchon
-Picot
-Piccione
-Picazo
-Philibert
-Phebus
-Pfohl
-Petell
-Pesso
-Pesante
-Pervis
-Perrins
-Perley
-Perkey
-Pereida
-Penate
-Peloso
-Pellerito
-Peffley
-Peddicord
-Pecina
-Peale
-Peaks
-Payette
-Paxman
-Pawlikowski
-Pavy
-Pavlov
-Patry
-Patmon
-Patil
-Pater
-Patak
-Pasqua
-Pasche
-Partyka
-Parody
-Parmeter
-Pares
-Pardi
-Paonessa
-Pao
-Panozzo
-Panameno
-Paletta
-Pait
-Oyervides
-Ossman
-Oshima
-Ortlieb
-Orsak
-Orleans
-Onley
-On
-Oldroyd
-Okano
-Ohora
-Offley
-Oestreicher
-Odonovan
-Odham
-Odegard
-Obst
-Obriant
-Obrecht
-Nuccio
-Nowling
-Nowden
-Novelli
-Novell
-Nost
-Norstrom
-Norfolk
-Nordgren
-Nopper
-Noller
-Nisonger
-Niskanen
-Nienhuis
-Nienaber
-Neuwirth
-Neumeyer
-Neice
-Naugher
-Naiman
-Nagamine
-Mustin
-Murrietta
-Murdaugh
-Munar
-Mulberry
-Muhlbauer
-Mroczkowski
-Mowdy
-Mouw
-Mousel
-Mountcastle
-Moscowitz
-Mosco
-Morro
-Moresi
-Morago
-Moomaw
-Montroy
-Montpas
-Montieth
-Montanaro
-Mongelli
-Mon
-Mollison
-Mollette
-Moldovan
-Mohar
-Mizuno
-Mitchelle
-Mishra
-Misenheimer
-Minshall
-Minozzi
-Minniefield
-Minion
-Milhous
-Migliaccio
-Migdal
-Mickell
-Meyering
-Methot
-Mester
-Mesler
-Meriweather
-Mensing
-Mensah
-Menge
-Mendola
-Mendibles
-Meloche
-Melnik
-Mellas
-Meinert
-Mehrhoff
-Medas
-Meckler
-Mctague
-Mcspirit
-Mcshea
-Mcquown
-Mcquiller
-Mclarney
-Mckiney
-Mckearney
-Mcguyer
-Mcfarlan
-Mcfadyen
-Mcdanial
-Mcdanel
-Mccurtis
-Mccrohan
-Mccorry
-Mcclune
-Mccant
-Mccanna
-Mccandlish
-Mcaloon
-Mayall
-Maver
-Maune
-Matza
-Matty
-Matsuzaki
-Matott
-Mathey
-Mateos
-Masoner
-Masino
-Mas
-Marzullo
-Marz
-Maryland
-Marsolek
-Marquard
-Mario
-Marchetta
-Marberry
-Manzione
-Many
-Manthei
-Manka
-Mangram
-Mangle
-Mangel
-Mandato
-Mancillas
-Mammen
-Malina
-Maletta
-Malecki
-Majkut
-Mages
-Maestre
-Macphail
-Maco
-Macneill
-Macadam
-Lysiak
-Lyne
-Luxton
-Luptak
-Lundmark
-Luginbill
-Lovallo
-Louthan
-Lousteau
-Loupe
-Lotti
-Lopresto
-Lonsdale
-Longsworth
-Lohnes
-Loghry
-Logemann
-Lofaro
-Loeber
-Locastro
-Livings
-Litzinger
-Litts
-Liotta
-Lingard
-Lineback
-Lindy
-Lindhorst
-Lill
-Lide
-Lickliter
-Liberman
-Lewinski
-Levandowski
-Leimbach
-Leifer
-Leidholt
-Leiby
-Leibel
-Leibee
-Lehrke
-Lehnherr
-Lego
-Leese
-Leen
-Ledo
-Lech
-Leblond
-Leap
-Leahey
-Lazzari
-Lawrance
-Lawlis
-Lawhorne
-Lawes
-Lavigna
-Lavell
-Lauzier
-Lauter
-Laumann
-Latsha
-Latourette
-Latona
-Latney
-Laska
-Larner
-Larmore
-Larke
-Larence
-Lapier
-Lanzarin
-Lands
-Lammey
-Lamke
-Laminack
-Lamastus
-Lamaster
-Lacewell
-Labarr
-Laabs
-Kutch
-Kuper
-Kuna
-Kubis
-Krzemien
-Krupinski
-Krepps
-Kreeger
-Kraner
-Krammer
-Kountz
-Kothe
-Korpela
-Komara
-Kolenda
-Kolek
-Kohnen
-Koelzer
-Koelsch
-Kocurek
-Knoke
-Knauff
-Knaggs
-Knab
-Kluver
-Klose
-Klien
-Klahr
-Kitagawa
-Kissler
-Kirstein
-Kinnon
-Kinnebrew
-Kinnamon
-Kimmins
-Kilgour
-Kilcoyne
-Kiester
-Kiehm
-Kha
-Kesselring
-Kerestes
-Kenniston
-Kennamore
-Kenebrew
-Kelderman
-Keitel
-Kefauver
-Katzenberger
-Katt
-Kast
-Kassel
-Kasey
-Karol
-Kamara
-Kalmbach
-Kaizer
-Kaiwi
-Kainz
-Jurczyk
-Jumonville
-Juliar
-Jourdain
-Johndrow
-Johanning
-Johannesen
-Joffrion
-Jobes
-Jerde
-Jentzsch
-Jenkens
-Jendro
-Jellerson
-Jefferds
-Jaure
-Jaquish
-Janeway
-Jago
-Iwasaki
-Ishman
-Isaza
-Inmon
-Inlow
-Inclan
-Ildefonso
-Ike
-Iezzi
-Ianni
-Iacovetto
-Hyldahl
-Huxhold
-Huser
-Humpherys
-Humburg
-Hult
-Hullender
-Hulburt
-Huckabay
-Howeth
-Hovermale
-Hoven
-Houtman
-Hourigan
-Hosek
-Hopgood
-Homrich
-Holstine
-Holsclaw
-Hokama
-Hoffpauir
-Hoffner
-Hochstein
-Hochstatter
-Hochberg
-Hjelm
-Hiscox
-Hinsley
-Hinks
-Hineman
-Hineline
-Hinck
-Hilbun
-Hewins
-Herzing
-Hertzberg
-Hertenstein
-Herrea
-Herington
-Hercules
-Henrie
-Henman
-Hengst
-Hemmen
-Helmke
-Helgerson
-Heinsohn
-Heigl
-Hegstad
-Heggen
-Hegge
-Hefti
-Heathcock
-Haylett
-Haupert
-Haufler
-Hatala
-Haslip
-Hartless
-Hartje
-Hartis
-Harpold
-Harmsen
-Harbach
-Hanten
-Hanington
-Hammen
-Hameister
-Hallstrom
-Habersham
-Habegger
-Gussman
-Gundy
-Guitterez
-Guisinger
-Guilfoyle
-Groulx
-Grismer
-Griesbach
-Grawe
-Grall
-Graft
-Graben
-Goulden
-Gornick
-Gori
-Gookin
-Gonzalaz
-Gonyer
-Gonder
-Golphin
-Goller
-Goergen
-Glosson
-Glor
-Gladin
-Girdler
-Gillim
-Gillians
-Gillaspie
-Gilhooly
-Gildon
-Gignac
-Gibler
-Gibbins
-Giardino
-Giampietro
-Gettman
-Gerringer
-Gerrald
-Gerlich
-Georgiou
-Georgia
-Georgi
-Geiselman
-Gehman
-Gauze
-Gangl
-Gamage
-Gallian
-Gallen
-Gallatin
-Galen
-Galea
-Gainor
-Gahr
-Furbush
-Fulfer
-Fuhrmann
-Fritter
-Friis
-Friendly
-Friedly
-Freudenberger
-Frees
-Freemon
-Fratus
-Frans
-Foulke
-Fosler
-Forquer
-Fontan
-Folwell
-Folds
-Foeller
-Fodge
-Fobes
-Florek
-Fliss
-Flight
-Flesner
-Flegel
-Fitzloff
-Fiser
-First
-Firmin
-Firestine
-Finfrock
-Fineberg
-Figures
-Fiegel
-Fickling
-Fesperman
-Fernadez
-Felber
-Feimster
-Feazel
-Favre
-Faughn
-Fatula
-Fasone
-Farron
-Faron
-Farino
-Falvey
-Falkenberg
-Faley
-Faletti
-Faeth
-Fackrell
-Ezekiel
-Espe
-Eskola
-Escott
-Esaw
-Erps
-Erker
-Erath
-Enfield
-Emfinger
-Embury
-Embleton
-Emanuele
-Em
-Elvers
-Ellwanger
-Ellegood
-Einstein
-Eichinger
-Egge
-Egeland
-Edgett
-Echard
-Eblen
-Eastmond
-Duteau
-Durland
-Dure
-Dunlavy
-Dungee
-Dukette
-Dugay
-Duboise
-Dubey
-Dsouza
-Druck
-Dralle
-Doubek
-Dorta
-Dorch
-Dorce
-Dopson
-Dolney
-Dockter
-Distler
-Diss
-Dippel
-Diperna
-Dina
-Dichiara
-Dicerbo
-Dewindt
-Dewan
-Deveney
-Devargas
-Deutscher
-Deuel
-Detter
-Dess
-Derrington
-Deroberts
-Dern
-Deponte
-Denogean
-Denardi
-Denard
-Demary
-Demarcus
-Demarais
-Delucas
-Deloe
-Delmonico
-Delisi
-Delio
-Delduca
-Delaine
-Deihl
-Dehmer
-Deep
-Decoste
-Dechick
-Decatur
-Dec
-Debruce
-Debold
-Debell
-Deats
-Daunt
-Daquilante
-Dambrosi
-Damas
-Dalin
-Daisy
-Dahman
-Dahlem
-Daffin
-Dacquel
-Cutrell
-Cusano
-Curtner
-Currens
-Curnow
-Cuppett
-Cummiskey
-Cullers
-Culhane
-Crull
-Crossin
-Cropsey
-Cromie
-Crofford
-Criscuolo
-Crisafulli
-Crego
-Creeden
-Covello
-Covel
-Corse
-Correra
-Corners
-Cordner
-Cordier
-Coplen
-Copeman
-Contini
-Conteras
-Consalvo
-Conduff
-Condo
-Compher
-Comas
-Colliver
-Colan
-Cohill
-Cohenour
-Cogliano
-Codd
-Cockayne
-Clum
-Clowdus
-Clarida
-Clance
-Clairday
-Clagg
-Citron
-Citino
-Ciriello
-Cicciarelli
-Chrostowski
-Christley
-Christians
-Chrisco
-Chris
-Chrest
-Chisler
-Chieffo
-Cherne
-Cherico
-Cherian
-Cheirs
-Chauhan
-Charter
-Chamblin
-Cerra
-Cepero
-Cellini
-Celia
-Celeste
-Celedon
-Cejka
-Cavagnaro
-Cauffman
-Catanese
-Castrillo
-Castrellon
-Casserly
-Casino
-Caseres
-Carthen
-Carse
-Carragher
-Carpentieri
-Carmony
-Carmer
-Carlozzi
-Caradine
-Cappola
-Capece
-Capaldi
-Cantres
-Cantos
-Canevari
-Canete
-Calcaterra
-Cal
-Cadigan
-Cabbell
-Byrn
-Bykowski
-Butchko
-Busler
-Bushaw
-Buschmann
-Burow
-Buri
-Burgman
-Bunselmeyer
-Bunning
-Buhrman
-Budnick
-Buckson
-Buckhannon
-Brunjes
-Brummel
-Brumleve
-Bruckman
-Brouhard
-Brougham
-Brostrom
-Broerman
-Brocks
-Brison
-Brining
-Brindisi
-Brereton
-Breon
-Breitling
-Breedon
-Brasseaux
-Branaman
-Bramon
-Brackenridge
-Boyan
-Boxley
-Bouman
-Bouillion
-Botting
-Botti
-Bosshart
-Borup
-Borner
-Bordonaro
-Boot
-Bonsignore
-Bonsall
-Bolter
-Bojko
-Bohne
-Bohlmann
-Bogus
-Bogdon
-Boen
-Bodenschatz
-Bockoven
-Bobrow
-Blondin
-Blissett
-Bligen
-Blasini
-Blankenburg
-Bjorkman
-Bistline
-Bisset
-Birdow
-Biondolillo
-Bielski
-Biele
-Biddix
-Biddinger
-Bianchini
-Bevens
-Bevard
-Betancur
-Bernskoetter
-Bernet
-Bernardez
-Berliner
-Berland
-Berkheimer
-Berent
-Bensch
-Benesch
-Belleau
-Bedingfield
-Beckstrom
-Beckim
-Bechler
-Beachler
-Bazzell
-Basa
-Bartoszek
-Barsch
-Barrell
-Barnas
-Barnaba
-Barillas
-Barbier
-Baltodano
-Baltierra
-Balle
-Balint
-Baldi
-Balderson
-Balderama
-Baldauf
-Balcazar
-Balay
-Baiz
-Bairos
-Baba
-Azim
-Axe
-Aversa
-Avellaneda
-Ausburn
-Aurelio
-Auila
-Augusto
-Atwill
-Artiles
-Arterberry
-Aro
-Arnow
-Arnaud
-Arnall
-Armando
-Argyle
-Ares
-Arenz
-Arduini
-Archila
-Arakawa
-Appleman
-Aplin
-Antonini
-Anstey
-Anglen
-Andros
-Amweg
-Amstutz
-Amari
-Amadeo
-Aly
-Alteri
-Aloi
-Allebach
-Allah
-Aley
-Alamillo
-Airhart
-Ahrendt
-Africa
-Aegerter
-Adragna
-Admas
-Adderly
-Adderley
-Addair
-Abelar
-Abbamonte
-Abadi
-Zurek
-Zundel
-Zuidema
-Zuelke
-Zuck
-Zogg
-Zody
-Zets
-Zech
-Zecca
-Zavaleta
-Zarr
-Yousif
-Yoes
-Yoast
-Yeagley
-Yaney
-Yanda
-Yackel
-Wyles
-Wyke
-Woolman
-Woollard
-Woodis
-Woodin
-Wonderly
-Wombles
-Woloszyn
-Wollam
-Wnek
-Wms
-Wittie
-Withee
-Wissman
-Wisham
-Wintle
-Winthrop
-Winokur
-Winch
-Wilmarth
-Willhoite
-Wildner
-Wikel
-Wieser
-Wien
-Wicke
-Wiatrek
-Whitehall
-Whetstine
-Wheelus
-Weyrauch
-Weyers
-Westerling
-Wendelken
-Welner
-Welder
-Weinreb
-Weinheimer
-Weilbacher
-Weihe
-Weider
-Wecker
-Wead
-Watler
-Watkinson
-Wasmer
-Waskiewicz
-Wasik
-Warneke
-Wares
-Wangerin
-Wamble
-Walken
-Waker
-Wakeley
-Wahlgren
-Wahlberg
-Wagler
-Wachob
-Vorhies
-Vonseggern
-Vittitow
-Virgilio
-Vink
-Villarruel
-Villamil
-Villamar
-Villalovos
-Vidmar
-Victorero
-Vespa
-Vertrees
-Verissimo
-Veltman
-Vecchione
-Veals
-Varrone
-Varma
-Vanveen
-Vanterpool
-Vaneck
-Vandyck
-Vancise
-Vanausdal
-Vanalphen
-Valdiviezo
-Urton
-Urey
-Updegrove
-Unrue
-Ulbrich
-Tysinger
-Tyo
-Twiddy
-Tunson
-Trueheart
-Troyan
-Trier
-Traweek
-Trafford
-Tozzi
-Toulouse
-Touch
-Tosto
-Toste
-Torez
-Tooke
-Tonini
-Tonge
-Tomerlin
-Tolmie
-Tobe
-Tippen
-Tierno
-Tichy
-Thuss
-Threat
-Thran
-Thornbury
-Thone
-Theunissen
-Thelmon
-Theall
-Textor
-Teters
-Tesh
-Tennis
-Teng
-Tench
-Tekautz
-Tehrani
-Teat
-Teas
-Teare
-Te
-Tavenner
-Tartaglione
-Tanski
-Tanis
-Tanguma
-Tangeman
-Taney
-Tammen
-Tamburri
-Tamburello
-Talsma
-Tallie
-Takeda
-Taira
-Taheri
-Tademy
-Taddei
-Taaffe
-Szymczak
-Szczepaniak
-Szafranski
-Swygert
-Swem
-Swartzlander
-Sutley
-Supernaw
-Sundell
-Sullivant
-Suderman
-Sudbury
-Suares
-Stueber
-Stromme
-Striker
-Streeper
-Streck
-Strebe
-Stonehouse
-Stoia
-Stohr
-Stodghill
-Stirewalt
-Stick
-Sterry
-Stephanie
-Stenstrom
-Stene
-Steinbrecher
-Stear
-Stdenis
-Stanphill
-Staniszewski
-Stanard
-Stahlhut
-Stachowicz
-Srivastava
-Spong
-Spomer
-Spinosa
-Spindel
-Spera
-Spark
-Soward
-Sopp
-Sooter
-Sonnek
-Sonne
-Soland
-Sojourner
-Soeder
-Sobolewski
-Snellings
-Snare
-Smola
-Smetana
-Smeal
-Smarr
-Sloma
-Sligar
-Skenandore
-Skalsky
-Sitter
-Sissom
-Sirko
-Simkin
-Silverthorn
-Silman
-Sikkink
-Signorile
-Siddens
-Shumsky
-Shrider
-Shoulta
-Shonk
-Shomaker
-Shippey
-Shimada
-Shillingburg
-Shifflet
-Shiels
-Shepheard
-Sheerin
-Shedden
-Sheckles
-Sharrieff
-Sharpley
-Shappell
-Shaneyfelt
-Shampine
-Shaefer
-Shaddock
-Shadd
-Sforza
-Severtson
-Setzler
-Sepich
-Senne
-Senatore
-Sementilli
-Selway
-Selover
-Sellick
-Seigworth
-Sefton
-Seegars
-Sebourn
-Seaquist
-Sealock
-Seabreeze
-Scriver
-Scinto
-Schumer
-Schulke
-Schryver
-Schriner
-Schramek
-Schoon
-Schoolfield
-Schonberger
-Schnieder
-Schnider
-Schlitz
-Schlather
-Schirtzinger
-Scherman
-Schenker
-Scheiner
-Scheible
-Schaus
-Schakel
-Schaad
-Saxe
-Savely
-Savary
-Sardinas
-Santarelli
-Sanschagrin
-Sans
-Sanpedro
-Sanjose
-Sandra
-Sandine
-Sandigo
-Sandgren
-Sanderford
-Sandahl
-Salzwedel
-Salzar
-Salvino
-Salvatierra
-Salminen
-Salierno
-Salberg
-Sahagun
-Saelee
-Sabel
-Rynearson
-Ryker
-Rupprecht
-Runquist
-Rumrill
-Ruhnke
-Rovira
-Rottenberg
-Rosoff
-Rosete
-Rosebrough
-Roppolo
-Roope
-Romas
-Roley
-Rohrback
-Rohlfs
-Rogriguez
-Roel
-Rodriguiz
-Rodewald
-Roback
-Rizor
-Ritt
-Rippee
-Riolo
-Rinkenberger
-Riggsby
-Rigel
-Rieman
-Riedesel
-Rideau
-Ricke
-Rhinebolt
-Rheault
-Revak
-Relford
-Reinsmith
-Reichmann
-Rei
-Regula
-Redlinger
-Redhead
-Rayno
-Raycroft
-Rave
-Raus
-Raupp
-Rathmann
-Rastorfer
-Rasey
-Raponi
-Rantz
-Ranno
-Ranes
-Randal
-Ramp
-Ramnauth
-Rahal
-Raddatz
-Quattrocchi
-Quang
-Purchase
-Pullis
-Pulanco
-Pryde
-Prohaska
-Primiano
-Prez
-Prevatt
-Prechtl
-Pottle
-Potenza
-Portes
-Porowski
-Poppleton
-Pontillo
-Pong
-Polka
-Politz
-Politi
-Poggi
-Plonka
-Plaskett
-Placzek
-Pizzuti
-Pizzaro
-Pisciotta
-Pippens
-Pinkins
-Pinilla
-Pini
-Pingitore
-Piercey
-Pickup
-Piccola
-Piccioni
-Picciano
-Phy
-Philps
-Philp
-Philo
-Philmon
-Philbin
-Pflieger
-Pezzullo
-Petruso
-Petrea
-Petitti
-Peth
-Peshlakai
-Peschel
-Persico
-Persichetti
-Persechino
-Perris
-Perlow
-Perico
-Pergola
-Penniston
-Pembroke
-Pellman
-Pekarek
-Peirson
-Pearcey
-Pealer
-Pavlicek
-Passino
-Pasquarello
-Pasion
-Parzych
-Parziale
-Parga
-Papalia
-Papadakis
-Paino
-Pacini
-Oyen
-Ownes
-Owczarzak
-Outley
-Ouelette
-Ottosen
-Otting
-Ostwinkle
-Osment
-Oshita
-Osario
-Orlow
-Oriordan
-Orefice
-Orantes
-Oran
-Orahood
-Opel
-Olpin
-Oliveria
-Okon
-Okerlund
-Okazaki
-Ohta
-Offerman
-Nyce
-Nutall
-Northey
-Norcia
-Noor
-Noh
-Niehoff
-Niederhauser
-Nickolson
-Nguy
-Neylon
-Newstrom
-Nevill
-Netz
-Nesselrodt
-Nemes
-Neally
-Nauyen
-Nascimento
-Nardella
-Nanni
-Myren
-Murchinson
-Munter
-Munster
-Mundschenk
-Mujalli
-Muckleroy
-Mu
-Moussa
-Mouret
-Moulds
-Mottram
-Motte
-Mosey
-Morre
-Montreuil
-Monton
-Montellano
-Monninger
-Monhollen
-Mongeon
-Monestime
-Monegro
-Mondesir
-Monceaux
-Mola
-Moga
-Moening
-Moccia
-Misko
-Miske
-Mishaw
-Minturn
-Mingione
-Minerva
-Milstein
-Milos
-Milla
-Milks
-Milhouse
-Michl
-Micheletti
-Michals
-Mesia
-Merson
-Meras
-Menifee
-Meluso
-Mella
-Melick
-Mehlman
-Meffert
-Medoza
-Mecum
-Meaker
-Meahl
-Mczeal
-Mcwatters
-Mcomber
-Mcmonigle
-Mckiddy
-Mcgranor
-Mcgeary
-Mcgaw
-Mcenery
-Mcelderry
-Mcduffey
-Mccuistion
-Mccrudden
-Mccrossin
-Mccosh
-Mccolgan
-Mcclish
-Mcclenahan
-Mcclam
-Mccartt
-Mccarrell
-Mcbane
-Mc
-Maybury
-Mayben
-Maw
-Maulden
-Mauceri
-Matko
-Mathie
-Matheis
-Mathai
-Masucci
-Massiah
-Martorano
-Martnez
-Martindelcamp
-Marschke
-Marovich
-Markiewicz
-Marinaccio
-Marhefka
-Marcrum
-Manton
-Mantel
-Mannarino
-Manlove
-Mangham
-Manasco
-Malpica
-Mallernee
-Malinsky
-Malhotra
-Maish
-Maisel
-Mainville
-Maharrey
-Magid
-Maertz
-Mada
-Maclaughlin
-Macina
-Macdermott
-Macallister
-Macadangdang
-Maack
-Lynk
-Lydic
-Luyando
-Lutke
-Lupinacci
-Lunz
-Lundsten
-Lull
-Lujano
-Luhn
-Luecke
-Luebbe
-Ludolph
-Luckman
-Lucker
-Luckenbill
-Luckenbach
-Lucido
-Lowney
-Lowitz
-Lovaglio
-Louro
-Louk
-Loudy
-Louderback
-Lorick
-Lorenzini
-Lorensen
-Lorenc
-Lomuscio
-Loguidice
-Lockner
-Lockart
-Lochridge
-Litaker
-Lisowe
-Liptrap
-Linnane
-Linhares
-Lindfors
-Lindenmuth
-Lincourt
-Lina
-Like
-Liew
-Lies
-Liebowitz
-Levengood
-Leskovec
-Lesch
-Leoni
-Lennard
-Legner
-Leaser
-Leas
-Lean
-Leadingham
-Lazarski
-Layland
-Laurito
-Laulu
-Laughner
-Laughman
-Laughery
-Laube
-Latiolais
-Lasserre
-Lasser
-Lars
-Larrow
-Larrea
-Lapsley
-Lantrip
-Lanthier
-Langwell
-Langelier
-Landaker
-Lampi
-Lamond
-Lamblin
-Lambie
-Lakins
-Laipple
-Lagrimas
-Lafrancois
-Laffitte
-Laday
-Lacko
-Lacava
-Labor
-Labianca
-Kutsch
-Kuske
-Kunert
-Kubly
-Kuamoo
-Krummel
-Krise
-Krenek
-Kreiser
-Krausz
-Kraska
-Krakowski
-Kradel
-Kozik
-Koza
-Kotowski
-Koslow
-Korber
-Kojima
-Kochel
-Knabjian
-Klunder
-Klugh
-Klinkhammer
-Kliewer
-Klever
-Kleber
-Klages
-Klaas
-Kizziar
-Kitchel
-Kishimoto
-Kirschenman
-Kirschenbaum
-Kinnick
-Kinn
-Kinkle
-Kiner
-Kindla
-Kindall
-Kincaide
-Kilson
-Killins
-Kill
-Kightlinger
-Kienzle
-Kiah
-Khim
-Ketcherside
-Kerl
-Kelsoe
-Kelker
-Keizer
-Keir
-Keepers
-Kawano
-Kawa
-Kaveney
-Kath
-Kasparek
-Kaplowitz
-Kantrowitz
-Kant
-Kanoff
-Kano
-Kann
-Kamalii
-Kalt
-Kaleta
-Kalbach
-Kalauli
-Kalata
-Kalas
-Kaigler
-Kachel
-Juran
-Jubb
-Jonker
-Jonke
-Jolivette
-Joles
-Joas
-Jividen
-Jewel
-Jeffus
-Jeanty
-Jarvi
-Jardon
-Janvier
-Janosko
-Janoski
-Janiszewski
-Janish
-Janek
-Iwanski
-Iuliano
-Isabella
-Irle
-Ingmire
-Imber
-Ijames
-Iiams
-Ihrig
-Ichikawa
-Hynum
-Hutzel
-Hutts
-Huskin
-Husak
-Hurndon
-Huntsinger
-Humm
-Hulette
-Huitron
-Huguenin
-Hugg
-Hugee
-Huelskamp
-Huch
-Howen
-Hovanec
-Hoston
-Hostettler
-Horsfall
-Horodyski
-Holzhauer
-Hollimon
-Hollender
-Hogarth
-Hoffelmeyer
-Histand
-Hissem
-Hisel
-Hirayama
-Hinegardner
-Hinde
-Hinchcliffe
-Hiltbrand
-Hilsinger
-Hillstrom
-Hiley
-Hickenbottom
-Hickam
-Hibley
-Heying
-Hewson
-Hetland
-Hersch
-Herlong
-Herda
-Henzel
-Henshall
-Hendler
-Hence
-Helson
-Helfen
-Heinbach
-Heikkila
-Heggs
-Hefferon
-Hebard
-Heathcote
-Hearl
-Heaberlin
-Hauth
-Hauschild
-Haughney
-Hauch
-Hattori
-Haste
-Hasley
-Hartpence
-Harroun
-Harrier
-Harelson
-Hardgrove
-Hardel
-Hansbrough
-Handsome
-Handshoe
-Handly
-Haluska
-Hally
-Halling
-Halfhill
-Halferty
-Hakanson
-Haist
-Hairgrove
-Hahner
-Hagg
-Hafele
-Haaland
-Guttierez
-Gutknecht
-Gunnarson
-Gunlock
-Gummersheimer
-Gullatte
-Guity
-Guilmette
-Guhl
-Guenette
-Guardino
-Groshong
-Grober
-Gripp
-Grillot
-Grilli
-Greulich
-Gretzinger
-Greenwaldt
-Graven
-Grassman
-Granberg
-Graeser
-Graeff
-Graef
-Grabow
-Grabau
-Gotchy
-Goswick
-Gosa
-Gordineer
-Gorczyca
-Goodchild
-Golz
-Gollihue
-Goldwire
-Goldbach
-Goffredo
-Glassburn
-Glaeser
-Gillilan
-Gigante
-Giere
-Gieger
-Gidcumb
-Giarrusso
-Giannelli
-Gettle
-Gesualdi
-Geschke
-Gerwig
-Gervase
-Geoffrion
-Gentilcore
-Genther
-Gemes
-Gemberling
-Gelles
-Geitz
-Geeslin
-Gedney
-Gebauer
-Gaye
-Gawron
-Gavia
-Gautney
-Gaustad
-Gasmen
-Gargus
-Ganske
-Ganger
-Galvis
-Gallinger
-Gallichio
-Galletta
-Gaede
-Gadlin
-Gaby
-Gabrielsen
-Gaboriault
-Furlan
-Furgerson
-Fujioka
-Fugett
-Fuehrer
-Frisco
-Frint
-Frigon
-Frevert
-Frautschi
-Fraker
-Fradette
-Foulkes
-Forslund
-Forni
-Foo
-Fontenette
-Fones
-Folz
-Folmer
-Follman
-Folkman
-Flourney
-Flickner
-Flemmings
-Fleischacker
-Flander
-Flament
-Fithian
-Fister
-Fiorello
-Fiorelli
-Fioravanti
-Fieck
-Ficke
-Fiallos
-Fiacco
-Feuer
-Ferrington
-Fernholz
-Feria
-Fergurson
-Feick
-Febles
-Favila
-Faulkingham
-Fath
-Farnam
-Falter
-Fakhouri
-Fairhurst
-Failing
-Fahs
-Eva
-Estrello
-Essick
-Espree
-Esmond
-Eskelson
-Escue
-Escatel
-Erebia
-Epperley
-Epler
-Enyart
-Engelbert
-Enderson
-Emmitt
-Emch
-Elisondo
-Eli
-Elford
-El
-Ekman
-Eick
-Eichmann
-Ehrich
-Ehlen
-Edwardson
-Edley
-Edghill
-Edel
-Eastes
-Easterbrooks
-Eagleson
-Eagen
-Eade
-Dyle
-Dutkiewicz
-Dunnagan
-Duncil
-Duling
-Drumgoole
-Droney
-Dreyfus
-Dragan
-Dowty
-Doscher
-Dornan
-Doremus
-Doogan
-Donaho
-Donahey
-Dombkowski
-Dolton
-Dolen
-Dobratz
-Diveley
-Dittemore
-Ditsch
-Disque
-Dishmon
-Disch
-Dirickson
-Dippolito
-Dimuccio
-Dilger
-Diefenderfer
-Dicola
-Diblasio
-Dibello
-Devan
-Dettmer
-Deschner
-Desbiens
-Derusha
-Denkins
-Demonbreun
-Demchak
-Delucchi
-Delprete
-Deloy
-Deliz
-Deline
-Delap
-Deiter
-Deignan
-Degiacomo
-Degaetano
-Defusco
-Dede
-Deboard
-Debiase
-Deaville
-Deadwyler
-Davanzo
-Daughton
-Darter
-Darrin
-Danser
-Dandrade
-Dando
-Dampeer
-Dalziel
-Dalen
-Dain
-Dai
-Dague
-Czekanski
-Cutwright
-Cutliff
-Curle
-Cuozzo
-Cunnington
-Cunning
-Cunnigham
-Cumings
-Crowston
-Croak
-Crittle
-Crispell
-Crisostomo
-Crear
-Creach
-Craigue
-Crabbs
-Cozzi
-Cozza
-Coxe
-Cowsert
-Coviello
-Couse
-Coull
-Cottier
-Costagliola
-Corra
-Corpening
-Cormany
-Corless
-Corkern
-Conteh
-Conquest
-Conkey
-Cones
-Conditt
-Conaty
-Colomb
-Collura
-Colledge
-Colins
-Colgate
-Coleson
-Colemon
-Coins
-Coffland
-Coccia
-Coast
-Clougherty
-Clewell
-Cleckley
-Cleaveland
-Clarno
-Clamp
-Civils
-Cillo
-Cifelli
-Ciesluk
-Chum
-Chui
-Christison
-Christiana
-Chowning
-Chouteau
-Choung
-Childres
-Cherrington
-Chenette
-Cheeves
-Cheairs
-Chaddock
-Cernoch
-Cerino
-Cazier
-Cathy
-Castel
-Casselberry
-Caserta
-Carvey
-Carton
-Cart
-Carry
-Carris
-Carrie
-Carmant
-Cariello
-Cardarelli
-Caras
-Caracciolo
-Capitano
-Cantoni
-Cantave
-Cancio
-Campillo
-Cam
-Callens
-Caldero
-Calamia
-Cahee
-Cahan
-Cahalan
-Cabanilla
-Cabal
-Bywater
-Bynes
-Byassee
-Butkus
-Busker
-Bushby
-Busack
-Burtis
-Burrola
-Buroker
-Burnias
-Burn
-Burlock
-Burham
-Burak
-Bulla
-Buffin
-Buffa
-Buening
-Budney
-Buchannan
-Buchalter
-Bua
-Brule
-Brugler
-Broxson
-Broun
-Brosh
-Brissey
-Brisby
-Brinlee
-Brinkmeyer
-Brimley
-Brickell
-Breth
-Breger
-Brees
-Brank
-Braker
-Bozak
-Bowlds
-Bowersock
-Bousman
-Boushie
-Botz
-Bordwell
-Bonkowski
-Bonine
-Bonifay
-Bonesteel
-Boldin
-Bohringer
-Bohlander
-Boecker
-Bocook
-Bocock
-Boblett
-Bobbett
-Boas
-Boarman
-Bleser
-Blazejewski
-Blaustein
-Blausey
-Blancarte
-Blaize
-Blackson
-Blacketer
-Blackard
-Bisch
-Birchett
-Billa
-Bilder
-Bierner
-Bienvenu
-Bielinski
-Bialas
-Biagini
-Beynon
-Beyl
-Bettini
-Bethany
-Betcher
-Bessent
-Beshara
-Besch
-Bernd
-Bergemann
-Bergeaux
-Berdan
-Bens
-Benedicto
-Bendall
-Beltron
-Beltram
-Bellville
-Beisch
-Behney
-Beemer
-Beechler
-Beckum
-Becks
-Batzer
-Batte
-Bastida
-Bassette
-Basley
-Base
-Bartosh
-Bartolone
-Barraclough
-Barnick
-Barket
-Barkdoll
-Baringer
-Barges
-Barella
-Barbian
-Barbati
-Bannan
-Banderas
-Balles
-Baldo
-Balasubramani
-Bala
-Baig
-Bahn
-Bachmeier
-Babyak
-Baas
-Baars
-Ayuso
-Axt
-Avinger
-Avella
-Ausbrooks
-Aull
-Augello
-Atkeson
-Atkerson
-Atherley
-Athan
-Assad
-Asebedo
-Arrison
-Armon
-Armfield
-Armbrust
-Arlington
-Arkin
-Archambeau
-Antonellis
-Angotti
-Andy
-Amorose
-Amini
-Amborn
-Amano
-Aluarez
-Alma
-Allgaier
-Allegood
-Ales
-Alen
-Aldama
-Albertine
-Aki
-Aird
-Ahsing
-Ahmann
-Aguado
-Agostino
-Agostinelli
-Agnes
-Adwell
-Adsit
-Adelstein
-Ade
-Actis
-Acierno
-Achee
-Abbs
-Abbitt
-Zwagerman
-Zuercher
-Zinno
-Zettler
-Zeff
-Zavalza
-Zaugg
-Zarzycki
-Zappulla
-Zanotti
-Zachman
-Zacher
-Yundt
-Yslas
-Younes
-Yontz
-Yglesias
-Yeske
-Yellow
-Yeargin
-Yauger
-Yamane
-Xang
-Wylam
-Wrobleski
-Wratchford
-Worker
-Woodlee
-Wolsey
-Wolfinbarger
-Wohlenhaus
-Wittler
-Wittenmyer
-Witkop
-Wishman
-Wintz
-Winkelmann
-Windus
-Winborn
-Wims
-Wiltrout
-Wilshire
-Willmott
-Williston
-Wilemon
-Wilbourne
-Wiedyk
-Widmann
-Wickland
-Wickes
-Wichert
-Whitsell
-Whisenand
-Whidby
-Wetz
-Westmeyer
-Wertheim
-Wernert
-Werle
-Werkheiser
-Weng
-Weldin
-Weissenborn
-Weingard
-Weinfeld
-Weihl
-Weightman
-Weichel
-Wehrheim
-Wegrzyn
-Wegmann
-Wearing
-Waszak
-Wankum
-Wangler
-Walthour
-Waltermire
-Walstad
-Waldren
-Walbert
-Walawender
-Wahlund
-Wahlert
-Wahlers
-Wach
-Vuncannon
-Vroom
-Vredenburgh
-Vonk
-Vollmar
-Voisinet
-Vlahos
-Viscardi
-Vires
-Vipperman
-Violante
-Vidro
-Vessey
-Vesper
-Veron
-Vergari
-Verbeck
-Venturino
-Velastegui
-Vegter
-Varas
-Vanwey
-Vanvranken
-Vanvalkenbur
-Vanorsdale
-Vanoli
-Vanochten
-Vanier
-Vanevery
-Vane
-Vanduser
-Vandersteen
-Vandell
-Vandall
-Vallot
-Vallon
-Vallez
-Vallely
-Vadenais
-Uthe
-Usery
-Unga
-Ultsch
-Ullom
-Tyminski
-Twogood
-Tursi
-Turay
-Tungate
-Truxillo
-Trulock
-Trovato
-Troise
-Tripi
-Trinks
-Trimboli
-Trickel
-Trezise
-Trefry
-Treen
-Trebilcock
-Travieso
-Trachtenberg
-Touhey
-Tougas
-Tortorella
-Tormey
-Torelli
-Torborg
-Toran
-Tomek
-Tomassi
-Tollerson
-Tolden
-Toda
-Tobon
-Tjelmeland
-Titmus
-Tilbury
-Tietje
-Thurner
-Thum
-Thrope
-Thornbrough
-Thibaudeau
-Thackeray
-Tesoro
-Territo
-Ternes
-Teich
-Tecson
-Teater
-Teagarden
-Tatsch
-Tarallo
-Tapanes
-Tanberg
-Tamm
-Sylvis
-Swenor
-Swedlund
-Swagger
-Sutfin
-Sura
-Sundt
-Sundin
-Summerson
-Sumatzkuku
-Sultemeier
-Sulivan
-Suggitt
-Suermann
-Sturkie
-Sturgess
-Stumph
-Stuemke
-Struckhoff
-Strose
-Stroder
-Stride
-Stricklen
-Strick
-Streib
-Strei
-Strawther
-Stratis
-Strahm
-Stortz
-Storrer
-Storino
-Stohler
-Stohl
-Stockel
-Stinnette
-Stile
-Stieber
-Stensland
-Steffenhagen
-Stefanowicz
-Steever
-Steagall
-Statum
-Stapley
-Stanish
-Standiford
-Standen
-Stamos
-Stahlecker
-Stadtler
-Spratley
-Spraker
-Sposito
-Spickard
-Spehar
-Spees
-Spearing
-Spangle
-Spallone
-Sox
-Soulard
-Sorel
-Sora
-Sopko
-Sood
-Sonnen
-Som
-Solly
-Solesbee
-Soldano
-Sobey
-Sobczyk
-Snedegar
-Sneddon
-Smolinski
-Smolik
-Slota
-Sloman
-Sleigh
-Slavick
-Skorupski
-Skolnik
-Skirvin
-Skeels
-Skains
-Skahan
-Skaar
-Siwiec
-Siverly
-Siver
-Sivak
-Sirk
-Sinton
-Sinor
-Sincell
-Silberstein
-Sieminski
-Sidelinger
-Shurman
-Shunnarah
-Shirer
-Shidler
-Sherlin
-Shepperson
-Shemanski
-Sharum
-Shartrand
-Shapard
-Shanafelt
-Shamp
-Shader
-Shackelton
-Seyer
-Seroka
-Sernas
-Seright
-Serano
-Sengupta
-Semper
-Selinger
-Seith
-Seidler
-Seehusen
-Seefried
-Seed
-Scovell
-Scorzelli
-Sconiers
-Schwind
-Schwichtenber
-Schwerin
-Schwenke
-Schwaderer
-Schussler
-Schuneman
-Schumpert
-Schultheiss
-Schroll
-Schroepfer
-Schroeden
-Schrimpf
-Schook
-Schoof
-Schomburg
-Schoenfeldt
-Schoener
-Schnoor
-Schmick
-Schlereth
-Schindele
-Schildt
-Schildknecht
-Schemmel
-Scharfenberg
-Schanno
-Schane
-Schaer
-Schad
-Scearce
-Scardino
-Sawka
-Sawinski
-Savoca
-Savery
-Saults
-Saucer
-Sarpy
-Saris
-Sardinha
-Sarafin
-Sankar
-Sanjurjo
-Sanderfer
-Sanagustin
-Samudio
-Sammartino
-Samas
-Salz
-Salmen
-Sallie
-Salkeld
-Salamon
-Sakurai
-Sakoda
-Safley
-Sada
-Sachse
-Ryden
-Ryback
-Russow
-Russey
-Ruprecht
-Rumple
-Ruffini
-Rudzinski
-Rudel
-Rudden
-Rud
-Rovero
-Routledge
-Roussin
-Rousse
-Rouser
-Rougeau
-Rosie
-Rosica
-Romey
-Romaniello
-Rolfs
-Rogoff
-Rogne
-Rodriquz
-Rodrequez
-Rodin
-Rocray
-Rocke
-Robbin
-Riviere
-Rivette
-Riske
-Risenhoover
-Rindfleisch
-Rinaudo
-Rimbey
-Riha
-Righi
-Ridner
-Ridling
-Riden
-Rhue
-Reyome
-Reynoldson
-Reusch
-Rensing
-Rensch
-Rennels
-Renderos
-Reininger
-Reiners
-Reigel
-Rehmer
-Regier
-Reff
-Reef
-Redlin
-Recchia
-Reaume
-Reagor
-Rayne
-Rawe
-Rattigan
-Raska
-Rashed
-Ranta
-Ranft
-Randlett
-Randa
-Ramiez
-Ramella
-Rallis
-Rajan
-Raisbeck
-Raimondo
-Raible
-Ragone
-Rackliffe
-Quirino
-Quiring
-Quero
-Quaife
-Pyke
-Purugganan
-Pursifull
-Purkett
-Purdon
-Punches
-Pun
-Pulos
-Pulling
-Puccia
-Provance
-Propper
-Preis
-Prehn
-Prata
-Prasek
-Pranger
-Pradier
-Portor
-Portley
-Porte
-Popiel
-Popescu
-Pomales
-Polowy
-Pollett
-Politis
-Polit
-Poley
-Pol
-Pohler
-Poggio
-Poet
-Podolak
-Poag
-Plymel
-Ploeger
-Planty
-Piskura
-Pirrone
-Pirro
-Piroso
-Pinsky
-Pile
-Pilant
-Pickerill
-Piccolomini
-Picart
-Piascik
-Phann
-Petruzzelli
-Petosa
-Persson
-Perretta
-Perkowski
-Perilli
-Percifield
-Perault
-Peppel
-Pember
-Pelotte
-Pelcher
-Peixoto
-Pehl
-Peatross
-Pearlstein
-Peacher
-Payden
-Paya
-Pawelek
-Pavey
-Pauda
-Pathak
-Parrillo
-Parness
-Parlee
-Paoli
-Pannebaker
-Palomar
-Palo
-Palmberg
-Paganelli
-Paffrath
-Padovano
-Padden
-Pachucki
-Over
-Ovando
-Othman
-Osowski
-Osler
-Osika
-Orsburn
-Orlowsky
-Oregel
-Oppelt
-Opfer
-Opdyke
-Onell
-Omer
-Olivos
-Okumura
-Okoro
-Ogas
-Offer
-Oelschlaeger
-Odette
-Oder
-Ocanas
-Obrion
-Obarr
-Oas
-Oare
-Nyhus
-Nyenhuis
-Nunnelley
-Nunamaker
-Nuckels
-Noyd
-Nowlan
-Novakovich
-Noteboom
-Norviel
-Nortz
-Norment
-Norland
-Nolt
-Nolie
-Nixson
-Nitka
-Nissley
-Nishiyama
-Niland
-Niewiadomski
-Niemeier
-Nieland
-Nickey
-Nicholsen
-Newark
-Neugent
-Neto
-Nerren
-Nein
-Neikirk
-Neigh
-Nedrow
-Neave
-Nazaire
-Navaro
-Navalta
-Nasworthy
-Nasif
-Nani
-Nalepa
-Nakao
-Nakai
-Nadolny
-Myklebust
-Mussel
-Murthy
-Muratore
-Murat
-Mundie
-Mulverhill
-Muilenburg
-Muetzel
-Mudra
-Mudgett
-Mrozinski
-Moura
-Mottinger
-Morson
-Moretto
-Morentin
-Mordan
-Mooreland
-Mooers
-Monts
-Montone
-Montondo
-Montiero
-Monserrate
-Monie
-Monat
-Monares
-Mollo
-Mollet
-Molacek
-Mokry
-Mohrmann
-Mohabir
-Mogavero
-Moes
-Moceri
-Miyoshi
-Mitzner
-Misra
-Mis
-Mirr
-Mira
-Minish
-Minge
-Minckler
-Milroy
-Mille
-Mileski
-Milanesi
-Miko
-Mihok
-Mihalik
-Mieczkowski
-Messerli
-Meskill
-Mesenbrink
-Merton
-Merryweather
-Merkl
-Menser
-Menner
-Menk
-Menden
-Menapace
-Melbourne
-Mekus
-Meinzer
-Mein
-Meers
-Mctigue
-Mcquitty
-Mcpheron
-Mcmurdie
-Mcleary
-Mclafferty
-Mckinzy
-Mckibbin
-Mckethan
-Mcintee
-Mcgurl
-Mceachran
-Mcdowall
-Mcdermitt
-Mccuaig
-Mccreedy
-Mccoskey
-Mcclosky
-Mcclintick
-Mccleese
-Mccanless
-Mazzucco
-Mazzocco
-Mazurkiewicz
-Mazariego
-Mayhorn
-Maxcy
-Mavity
-Mauzey
-Maulding
-Matuszewski
-Mattsson
-Mattke
-Matsushita
-Matsuno
-Matsko
-Matkin
-Mathur
-Mates
-Masterman
-Massett
-Massart
-Massari
-Mashni
-Martella
-Marren
-Margotta
-Marder
-Marczak
-Maran
-Maradiaga
-Manwarren
-Mantini
-Manter
-Mantelli
-Manso
-Mangone
-Manfredonia
-Malden
-Malboeuf
-Malanga
-Makara
-Maison
-Maisano
-Mairs
-Mailhiot
-Magri
-Magic
-Madron
-Madole
-Mackall
-Macduff
-Macartney
-Lynds
-Lusane
-Luffman
-Lua
-Louth
-Loughmiller
-Lougheed
-Lotspeich
-Lorenzi
-Loree
-Loosli
-Looker
-Longe
-Longanecker
-Lonero
-Lohmeyer
-Loeza
-Lobstein
-Lobner
-Lober
-Littman
-Litalien
-Lippe
-Lints
-Linear
-Lijewski
-Ligas
-Liebert
-Liebermann
-Liberati
-Lezcano
-Levinthal
-Lessor
-Less
-Lesieur
-Lenning
-Lengel
-Len
-Lempke
-Lemp
-Lemar
-Leitzke
-Leinweber
-Legrone
-Lege
-Leder
-Lawnicki
-Lauth
-Laun
-Laughary
-Latin
-Lassley
-Lashway
-Larrivee
-Largen
-Lare
-Lanouette
-Lanno
-Langille
-Langen
-Landing
-Lana
-Lamonte
-Lalin
-Lala
-Laible
-Lafratta
-Laforte
-Lacuesta
-Lacer
-Labore
-Laboe
-Labeau
-Kwasniewski
-Kunselman
-Kuhr
-Kuchler
-Kuc
-Krugman
-Kruckenberg
-Krotzer
-Kroemer
-Krist
-Krigbaum
-Kreke
-Kreisman
-Kreisler
-Kreft
-Krasnow
-Kras
-Krag
-Kouyate
-Kough
-Kotz
-Kostura
-Korner
-Kornblum
-Korczynski
-Koppa
-Kopczyk
-Konz
-Komorowski
-Kollen
-Kolander
-Koepnick
-Koehne
-Kochis
-Knoch
-Knippers
-Knaebel
-Klipp
-Klinedinst
-Klimczyk
-Klier
-Klement
-Klaphake
-Kisler
-Kinzie
-Kines
-Kindley
-Kimple
-Kimm
-Kimbel
-Kilker
-Kilborn
-Kibbey
-Khong
-Ketchie
-Kerbow
-Kennemore
-Kennebeck
-Kenneally
-Kenndy
-Kenmore
-Kemnitz
-Kemler
-Kemery
-Kelnhofer
-Kellstrom
-Kellis
-Kellams
-Keiter
-Keirstead
-Keeny
-Keelin
-Keefauver
-Keams
-Kautzman
-Kaus
-Katayama
-Kasson
-Kassim
-Kasparian
-Kase
-Karwoski
-Kapuscinski
-Kaneko
-Kamerling
-Kamada
-Kalka
-Kalar
-Kakacek
-Kaczmarczyk
-Jurica
-Junes
-Journell
-Jolliffe
-Johnsey
-Joel
-Jindra
-Jimenz
-Jette
-Jesperson
-Jerido
-Jenrette
-Jencks
-Jech
-Jayroe
-Jayo
-Jaye
-Javens
-Jaskot
-Jaros
-Jaquet
-Janowiak
-Jame
-Jaegers
-Jackel
-Izumi
-Ith
-Italia
-Irelan
-Ion
-Inzunza
-Imoto
-Imme
-Iglehart
-Iannone
-Iannacone
-Huyler
-Hussaini
-Hurlock
-Hurlbutt
-Huprich
-Humphry
-Hulslander
-Huelsman
-Hudelson
-Hudecek
-Hsia
-Hreha
-Hoyland
-Howk
-Housholder
-Housden
-Houff
-Horkey
-Honan
-Homme
-Holtzberg
-Hollyfield
-Hollings
-Hollenbaugh
-Hokenson
-Hogrefe
-Hogland
-Hoel
-Hodgkin
-Hochhalter
-Hjelle
-Hittson
-Hinderman
-Hinchliffe
-Hime
-Hilyer
-Hilby
-Hibshman
-Heydt
-Hewell
-Heward
-Hetu
-Hestand
-Heslep
-Herridge
-Herner
-Hernande
-Hermandez
-Hermance
-Herbold
-Heon
-Henthorne
-Henion
-Henao
-Heming
-Helmkamp
-Hellberg
-Heidgerken
-Heichel
-Hehl
-Hegedus
-Hefty
-Heckathorne
-Hearron
-Haymer
-Haycook
-Havlicek
-Hausladen
-Haseman
-Hartsook
-Hartog
-Harns
-Harne
-Harmann
-Haren
-Hanserd
-Hanners
-Hanekamp
-Hamra
-Hamley
-Hamelin
-Hamblet
-Hakimi
-Hagle
-Hagin
-Haehn
-Haeck
-Hackleman
-Haacke
-Gulan
-Guirand
-Guiles
-Guggemos
-Guerrieri
-Guerreiro
-Guereca
-Gudiel
-Guccione
-Gubler
-Gruenwald
-Gritz
-Grieser
-Grewe
-Grenon
-Gregersen
-Grefe
-Greener
-Grech
-Grecco
-Gravette
-Grassia
-Granholm
-Graner
-Grandi
-Grahan
-Gradowski
-Gradney
-Graczyk
-Gouthier
-Gottschall
-Goracke
-Gootee
-Goodknight
-Goodine
-Gonzalea
-Gonterman
-Gonalez
-Gomm
-Goleman
-Goldtooth
-Goldstone
-Goldey
-Golan
-Goes
-Goen
-Goeller
-Goel
-Goecke
-Godek
-Goan
-Glunz
-Gloyd
-Glodowski
-Glinski
-Glawe
-Girod
-Girdley
-Giovanni
-Gindi
-Gillings
-Gildner
-Giger
-Giesbrecht
-Gierke
-Gier
-Giboney
-Giaquinto
-Giannakopoulo
-Giaimo
-Giaccio
-Giacalone
-Gessel
-Gerould
-Gerlt
-Gerhold
-Geralds
-Genson
-Genereux
-Gellatly
-Geigel
-Gehrig
-Gehle
-Geerdes
-Geagan
-Gawel
-Gavina
-Gauss
-Gatwood
-Gathman
-Gaster
-Garske
-Garratt
-Garms
-Garis
-Gansburg
-Gammell
-Gambale
-Gamba
-Galimore
-Gadway
-Gadoury
-Furrer
-Furnish
-Furino
-Fullard
-Fukui
-Fuhrer
-Fryou
-Friesner
-Friedli
-Friedl
-Friedberg
-Freyermuth
-Fremin
-Fredell
-Fraze
-Franken
-Fought
-Foth
-Fote
-Fortini
-Fornea
-Formanek
-Forker
-Forgette
-Folan
-Foister
-Foglesong
-Flinck
-Flewellen
-Flaten
-Flaig
-Fitgerald
-Fischels
-Firman
-Finstad
-Finkelman
-Finister
-Finder
-Fina
-Fettes
-Fetterhoff
-Ferriter
-Ferch
-Fennessy
-Feltus
-Feltes
-Feinman
-Farve
-Farry
-Farrall
-Farag
-Falzarano
-Falck
-Falanga
-Fakhoury
-Faire
-Fairbrother
-Fagley
-Faggins
-Facteau
-Ewer
-Ewbank
-Evola
-Evener
-Eustis
-Eugenio
-Estwick
-Estel
-Essa
-Espinola
-Escutia
-Eschmann
-Erpelding
-Ernsberger
-Erling
-Entz
-Enrique
-Engelhart
-Enbody
-Emick
-Elsinger
-Ellinwood
-Ellingsen
-Ellicott
-Elkind
-Eisinger
-Eisenbeisz
-Eischen
-Eimer
-Eigner
-Eichhorst
-Ehmke
-Egleston
-Eggett
-Ege
-Efurd
-Edgeworth
-Eckels
-Ebey
-Eberling
-Eagleton
-Dwiggins
-Dweck
-Dunnings
-Dunnavant
-Dumler
-Duman
-Dugue
-Duerksen
-Dudeck
-Dreisbach
-Drawdy
-Drawbaugh
-Draine
-Draggoo
-Dowse
-Dovel
-Doughton
-Douds
-Doubrava
-Dort
-Dorshorst
-Dornier
-Doolen
-Donavan
-Dominque
-Dominion
-Dominik
-Domingez
-Dome
-Dom
-Dolder
-Dold
-Dobies
-Dk
-Diskin
-Disano
-Dirden
-Diponio
-Dipirro
-Dimock
-Diltz
-Dillabough
-Diley
-Dikes
-Digges
-Digerolamo
-Diel
-Dicker
-Dicharry
-Dicecco
-Dibartolomeo
-Diamant
-Dewire
-Devone
-Dessecker
-Dertinger
-Derousselle
-Derk
-Depauw
-Depalo
-Denherder
-Demeyer
-Demetro
-Demastus
-Delvillar
-Deloye
-Delosrios
-Delgreco
-Delarge
-Delangel
-Dejongh
-Deitsch
-Degiorgio
-Degidio
-Defreese
-Defoe
-Decambra
-Debenedetto
-Deaderick
-Daza
-Dauzat
-Daughenbaugh
-Dato
-Dass
-Darwish
-Dantuono
-Danton
-Dammeyer
-Daloia
-Daleo
-Dagg
-Dacey
-Curts
-Cuny
-Cunneen
-Culverhouse
-Cuervo
-Cucinella
-Cubit
-Crumm
-Crudo
-Crowford
-Crout
-Crotteau
-Crossfield
-Crooke
-Crom
-Critz
-Cristaldi
-Crickmore
-Cribbin
-Cremeens
-Crayne
-Cradduck
-Couvertier
-Cottam
-Cossio
-Correy
-Cordrey
-Coplon
-Copass
-Coone
-Coody
-Contois
-Consla
-Connelley
-Connard
-Congo
-Congleton
-Condry
-Conception
-Coltey
-Colindres
-Colgrove
-Colfer
-Colasurdo
-Cocker
-Cochell
-Cobbin
-Clouthier
-Closs
-Cloonan
-Clizbe
-Clennon
-Clayburn
-Claybourn
-Clausell
-Clasby
-Clagett
-Ciskowski
-Cirrincione
-Cinque
-Cinelli
-Cimaglia
-Ciaburri
-Christiani
-Christeson
-Chladek
-Chizmar
-Chinnici
-Chiarella
-Chevrier
-Cheves
-Chernow
-Cheong
-Chelton
-Charlette
-Chanin
-Cham
-Chaligoj
-Celestino
-Cayce
-Cavey
-Cavaretta
-Caughron
-Catmull
-Catapano
-Casio
-Cashaw
-Carullo
-Carualho
-Carthon
-Cartelli
-Carruba
-Carrere
-Carolus
-Carmine
-Carlstrom
-Carli
-Carfora
-Carello
-Carbary
-Car
-Caplette
-Cannell
-Cancilla
-Campell
-Cammarota
-Camilo
-Camejo
-Camarata
-Caisse
-Cacioppo
-Cabbagestalk
-Cabatu
-Cabanas
-Byles
-Buxbaum
-Butland
-Butch
-Burrington
-Burnsed
-Burningham
-Burlingham
-Burgy
-Buitrago
-Buffett
-Bueti
-Buehring
-Buday
-Bucks
-Bucknell
-Buchbinder
-Bucey
-Bruster
-Brunston
-Brumby
-Bruins
-Brouillet
-Brosious
-Broomes
-Brodin
-Broddy
-Brochard
-Britsch
-Britcher
-Brierley
-Brezina
-Bressi
-Bressette
-Breslow
-Brenden
-Breier
-Brei
-Braymer
-Brasuell
-Brash
-Branscomb
-Branin
-Brandley
-Brahler
-Bracht
-Bracamontes
-Brabson
-Boyne
-Boxell
-Bowery
-Bovard
-Boutelle
-Boulette
-Bottini
-Botkins
-Bosen
-Boscia
-Boscarino
-Borich
-Bores
-Boreman
-Bordoy
-Bordley
-Bordenet
-Boquet
-Boocks
-Bolner
-Boissy
-Boilard
-Bohnen
-Bohall
-Boening
-Boccia
-Boccella
-Bobe
-Blyth
-Blitz
-Blew
-Blacksmith
-Biviano
-Bitto
-Bisel
-Binstock
-Bines
-Billiter
-Bigsby
-Bighorse
-Bielawski
-Bickmore
-Bettin
-Bettenhausen
-Besson
-Beseau
-Berton
-Berroa
-Berntson
-Bernas
-Berisford
-Berhow
-Bergsma
-Benyo
-Benyard
-Bente
-Bennion
-Benko
-Belsky
-Bellavance
-Belasco
-Belardo
-Beidler
-Behring
-Begnaud
-Bega
-Befort
-Beek
-Bedore
-Beddard
-Becknell
-Beardslee
-Beardall
-Beagan
-Bayly
-Bauza
-Bautz
-Bausman
-Baumler
-Batterson
-Battenfield
-Bassford
-Basse
-Basemore
-Baruch
-Bartholf
-Bars
-Barman
-Baray
-Barabas
-Banghart
-Banez
-Balsam
-Ballester
-Ballagh
-Baldock
-Bagnoli
-Bagheri
-Bacus
-Bacho
-Baccam
-Axson
-Averhart
-Aver
-Ave
-Austill
-Auberry
-Athans
-Atcitty
-Atay
-Astarita
-Ascolese
-Artzer
-Arts
-Arrasmith
-Argenbright
-Aresco
-Arb
-Aranjo
-Appleyard
-Appenzeller
-App
-Apilado
-Antonetti
-Antis
-Annett
-Annas
-Angwin
-Andris
-Andries
-Andreozzi
-Ando
-Andis
-Anderegg
-Anastasia
-Amyot
-Aminov
-Amelung
-Amelio
-Amason
-Alviar
-Allendorf
-Allday
-Alice
-Aldredge
-Alcivar
-Alaya
-Alapai
-Airington
-Aina
-Ailor
-Ahrns
-Ahmadi
-Agresta
-Agent
-Affolter
-Aeschlimann
-Adney
-Aderhold
-Adell
-Adachi
-Ackiss
-Aben
-Abdelhamid
-Abar
-Aase
-Zorilla
-Zordan
-Zollman
-Zoch
-Zipfel
-Zimmerle
-Zike
-Ziel
-Zhong
-Zens
-Zelada
-Zaman
-Zahner
-Zadora
-Zachar
-Zaborowski
-Zabinski
-Yzquierdo
-Yoshizawa
-Yori
-Yielding
-Yerton
-Yehl
-Yeargain
-Yeakley
-Yamaoka
-Yagle
-Yablonski
-Wynia
-Wyne
-Wyers
-Wrzesinski
-Wrye
-Wriston
-Woolums
-Woolen
-Woodlock
-Woodle
-Wonser
-Wombacher
-Wollschlager
-Wollen
-Wolfley
-Wolfer
-Wisse
-Wisell
-Wirsing
-Winstanley
-Winsley
-Winiecki
-Winiarski
-Winge
-Winesett
-Windell
-Winberry
-Willyard
-Willemsen
-Wilkosz
-Wilensky
-Wikle
-Wiford
-Wienke
-Wieneke
-Wiederhold
-Wiebold
-Widick
-Wickenhauser
-Whitrock
-Whisner
-Whinery
-Wherley
-Whedbee
-Wheadon
-Whary
-Wessling
-Wessells
-Wenninger
-Wendroth
-Wende
-Wellard
-Weirick
-Weinkauf
-Wehrman
-Weech
-Weathersbee
-Waterford
-Warton
-Warncke
-Warm
-Wardrip
-Walstrom
-Walks
-Walkowski
-Walcutt
-Waight
-Wai
-Wagman
-Waggett
-Wadford
-Vowles
-Vormwald
-Vondran
-Vohs
-Vitt
-Vitalo
-Viser
-Vinas
-Villena
-Villaneuva
-Villafranca
-Villaflor
-Vilain
-Vigilante
-Vicory
-Viana
-Vian
-Vial
-Verucchi
-Verra
-Venzke
-Venske
-Veley
-Veile
-Veeder
-Vaske
-Vasconez
-Vargason
-Varble
-Vanwert
-Vantol
-Vanscooter
-Vanmetre
-Vanmaanen
-Vanhise
-Vanetta
-Vaneaton
-Vandyk
-Vandriel
-Vandorp
-Vandewater
-Vandervelden
-Vanderstelt
-Vanderhoef
-Vanderbeck
-Vanbibber
-Vanalstine
-Vanacore
-Valdespino
-Vaill
-Vailes
-Vagliardo
-Ursini
-Urrea
-Urive
-Uriegas
-Umphress
-Ucci
-Uballe
-Tyrone
-Tynon
-Twiner
-Tutton
-Tudela
-Tuazon
-Troisi
-Tripplett
-Trias
-Trescott
-Treichel
-Tredo
-Tranter
-Tozer
-Toxey
-Tortorici
-Tornow
-Topolski
-Topia
-Topel
-Topalian
-Tonne
-Tondre
-Tola
-Toepke
-Tiu
-Tisdell
-Tiscareno
-Thornborrow
-Thomison
-Thilges
-Theuret
-Therien
-Thang
-Thagard
-Thacher
-Texter
-Terzo
-Teresa
-Tep
-Tenpenny
-Tempesta
-Teetz
-Teaff
-Tavella
-Taussig
-Tatton
-Tasler
-Tarrence
-Tardie
-Tarazon
-Tantillo
-Tanney
-Tankson
-Tangen
-Tamburo
-Takes
-Tabone
-Szilagyi
-Syphers
-Swistak
-Swiatkowski
-Sweigert
-Swayzer
-Swapp
-Svehla
-Sutphen
-Sutch
-Susa
-Surma
-Surls
-Sundermeyer
-Sundeen
-Sulek
-Suite
-Sughrue
-Sudol
-Sturms
-Stupar
-Stum
-Stuckman
-Strole
-Strohman
-Streed
-Strebeck
-Strausser
-Strassel
-Stpaul
-Storts
-Storr
-Stommes
-Stmary
-Stjulien
-Stika
-Stiggers
-Sthill
-Stevick
-Sterman
-Stephany
-Stepanek
-Stemler
-Stelman
-Stelmack
-Steinkamp
-Steinbock
-Stcroix
-Stcharles
-Staudinger
-Starry
-Stanly
-Stallsworth
-Stalley
-Stains
-Srock
-Spritzer
-Spracklin
-Spinuzzi
-Spidell
-Spice
-Speyrer
-Sperbeck
-Spendlove
-Speedy
-Speckman
-Spargur
-Spangenberg
-Spaid
-Sowle
-Soulier
-Sotolongo
-Sostre
-Sorey
-Sonier
-Somogyi
-Somera
-Solo
-Soldo
-Sofia
-Soderholm
-Snoots
-Snooks
-Snoke
-Snodderly
-Snide
-Snee
-Smoke
-Smithhart
-Smillie
-Smay
-Smallman
-Sliwinski
-Slentz
-Sledd
-Slager
-Skogen
-Skog
-Skarda
-Skalicky
-Siwek
-Sitterson
-Sisti
-Sissel
-Sis
-Sinopoli
-Similton
-Simila
-Simenson
-Silvertooth
-Silos
-Siggins
-Sieler
-Siburt
-Sianez
-Shurley
-Shular
-Shuecraft
-Shreeves
-Shon
-Shollenberger
-Shoen
-Shishido
-Shipps
-Shipes
-Shinall
-Sherfield
-Shawe
-Sharrett
-Sharrard
-Shankman
-Shan
-Sham
-Sessum
-Serviss
-Servello
-Serice
-Serda
-Semler
-Semenza
-Selmon
-Sellen
-Seley
-Seidner
-Seib
-Sehgal
-Seelbach
-Sedivy
-Sebren
-Sebo
-Seanez
-Seagroves
-Seagren
-Seagrave
-Seabron
-Schwertner
-Schwegel
-Schwarzer
-Schrunk
-Schriefer
-Schreder
-Schrank
-Schopp
-Schonfeld
-Schoenwetter
-Schnall
-Schnackenberg
-Schnack
-Schmutzler
-Schmierer
-Schmidgall
-Schlup
-Schloemer
-Schlitt
-Schermann
-Scherff
-Schellenberg
-Schain
-Schaedler
-Schabel
-Scaccia
-Saye
-Saxman
-Saurez
-Sasseen
-Sasnett
-Sas
-Sarti
-Sarra
-Sarber
-Saran
-Santoy
-Santeramo
-Sansoucy
-Sando
-Sandles
-Sandburg
-Sandau
-Samra
-Samaha
-Salon
-Salizar
-Salam
-Saindon
-Sagaser
-Saeteun
-Sadusky
-Sackman
-Sabater
-Saas
-Ruthven
-Ruszkowski
-Rusche
-Rumpf
-Ruhter
-Ruhenkamp
-Rufo
-Rudge
-Ruddle
-Rowlee
-Rowand
-Routhier
-Rougeot
-Rotramel
-Rotan
-Roswell
-Rosten
-Rosillo
-Rookard
-Roode
-Rongstad
-Rollie
-Roider
-Roffe
-Roettger
-Rodick
-Rochez
-Rochat
-Roads
-Rivkin
-Rivadeneira
-Riston
-Risso
-Rise
-Rinderknecht
-Riis
-Riggsbee
-Rifkin
-Rieker
-Riegle
-Riedy
-Richwine
-Richmon
-Ricciuti
-Riccardo
-Ricardson
-Rhew
-Revoir
-Revier
-Remsberg
-Remiszewski
-Rembold
-Rella
-Reinken
-Reiland
-Reidel
-Reichart
-Rehak
-Redway
-Rednour
-Redifer
-Redgate
-Redenbaugh
-Redburn
-Reap
-Readus
-Raybuck
-Rauhuff
-Rauda
-Ratte
-Rathje
-Rappley
-Rands
-Ramseyer
-Ramseur
-Ramsdale
-Ramo
-Ramariz
-Raitz
-Raisch
-Rainone
-Rahr
-Ragasa
-Rafalski
-Radunz
-Quenzer
-Queja
-Queenan
-Pyun
-Puz
-Putzier
-Puskas
-Purrington
-Puri
-Punt
-Pullar
-Pruse
-Pring
-Primeau
-Prevette
-Preuett
-Presto
-Prestage
-Pownell
-Pownall
-Potthoff
-Potratz
-Poth
-Poter
-Posthuma
-Posen
-Porritt
-Popkin
-Poormon
-Polidoro
-Poles
-Polcyn
-Pokora
-Poer
-Pluviose
-Plock
-Pleva
-Placke
-Pioli
-Pingleton
-Pinchback
-Pinch
-Pieretti
-Piccone
-Piatkowski
-Philley
-Phibbs
-Phay
-Phagan
-Pfund
-Peyer
-Pettersen
-Petter
-Petrucelli
-Petropoulos
-Petras
-Petix
-Pester
-Perks
-Pepperman
-Pennick
-Penado
-Pelot
-Pelis
-Peeden
-Pechon
-Peal
-Pazmino
-Patchin
-Pasierb
-Parran
-Parilla
-Pardy
-Parcells
-Paragas
-Paradee
-Papin
-Panko
-Pangrazio
-Pangelinan
-Pandya
-Pancheri
-Panas
-Palmiter
-Pallares
-Palinkas
-Palek
-Pagliaro
-Packham
-Pacitti
-Ozier
-Overbaugh
-Oursler
-Ouimette
-Otteson
-Otsuka
-Othon
-Osmundson
-Oroz
-Orgill
-Ordeneaux
-Orama
-Oppy
-Opheim
-Onkst
-Oltmanns
-Olstad
-Olofson
-Ollivier
-Olen
-Olejniczak
-Okura
-Okuna
-Okey
-Ohrt
-Oharra
-Oguendo
-Ogier
-Offermann
-Oetzel
-Oechsle
-Odor
-Odoherty
-Oddi
-Ockerman
-Occhiogrosso
-Obryon
-Obremski
-Nyreen
-Nylund
-Nylen
-Nyholm
-Nuon
-Nuanes
-Norrick
-Noris
-Nordell
-Norbury
-Nooner
-Nono
-Nomura
-Nole
-Nolden
-Nola
-Nofsinger
-Nocito
-Nobel
-Niedbala
-Niebergall
-Nicolini
-Nicole
-Nicklaus
-Nevils
-Neuburger
-Nemerofsky
-Nemecek
-Nazareno
-Nastri
-Nast
-Nancy
-Nagorski
-Myre
-Muzzey
-Mutton
-Mutschler
-Muther
-Musumeci
-Muranaka
-Muramoto
-Murad
-Murach
-Muns
-Munno
-Muncrief
-Mugrage
-Muecke
-Mozer
-Moyet
-Mowles
-Mottern
-Mosman
-Mosconi
-Morine
-Morge
-Moravec
-Morad
-Moneymaker
-Mones
-Moncur
-Monarez
-Molzahn
-Moglia
-Moesch
-Mody
-Modisett
-Mitnick
-Mithcell
-Mitchiner
-Mistry
-Misercola
-Mirabile
-Minvielle
-Mino
-Minkler
-Minifield
-Minichiello
-Mindell
-Minasian
-Milteer
-Millwee
-Millstein
-Millien
-Mikrut
-Mihaly
-Miggins
-Michard
-Mezo
-Metzner
-Mesquita
-Mervin
-Merriwether
-Merk
-Merfeld
-Mercik
-Mercadante
-Mention
-Menna
-Mendizabal
-Mender
-Members
-Melusky
-Melquist
-Mellado
-Meler
-Melendes
-Mekeel
-Meiggs
-Megginson
-Meck
-Mcwherter
-Mcwayne
-Mcsparren
-Mcrea
-Mcneff
-Mcnease
-Mcmurrin
-Mckeag
-Mchughes
-Mcguiness
-Mcgilton
-Mcelreath
-Mcelhone
-Mcelhenney
-Mceldowney
-Mccurtain
-Mccure
-Mccosker
-Mccory
-Mccormic
-Mccline
-Mccleave
-Mcclatchey
-Mccarney
-Mccanse
-Mcallen
-Mazzie
-Mazin
-Mazanec
-Mayette
-Mautz
-Mauser
-Maun
-Mattas
-Mathurin
-Mathiesen
-Massmann
-Masri
-Masias
-Mascolo
-Mascetti
-Mascagni
-Marzolf
-Maruska
-Martain
-Marta
-Marszalek
-Marolf
-Marmas
-Marlor
-Markwood
-Marines
-Marinero
-Marier
-Marich
-Marcom
-Marciante
-Marchman
-Marchio
-Marbach
-Manzone
-Mantey
-Mannina
-Manhardt
-Manfred
-Manaois
-Malmgren
-Mallonee
-Mallin
-Mallary
-Malette
-Makinson
-Makins
-Makarewicz
-Mainwaring
-Maida
-Maiava
-Magro
-Magouyrk
-Magett
-Maeder
-Madyun
-Maduena
-Maden
-Madeira
-Macnamara
-Mackins
-Mackel
-Macinnes
-Macia
-Macgowan
-Lyssy
-Lyerly
-Lyalls
-Lutter
-Lunney
-Luksa
-Ludeman
-Lucidi
-Lucci
-Lowden
-Lovier
-Loughridge
-Losch
-Lory
-Lorson
-Lorenzano
-Lorden
-Lorber
-Lopardo
-Loosier
-Loomer
-Longsdorf
-Longchamps
-Loncar
-Loker
-Logwood
-Loeffelholz
-Lockmiller
-Livoti
-Linford
-Linenberger
-Lindloff
-Lindenbaum
-Limoges
-Lilla
-Liley
-Lighthill
-Lightbourne
-Lieske
-Leza
-Levels
-Levandoski
-Leuck
-Lepere
-Leonhart
-Lenon
-Lemma
-Lemler
-Leising
-Leinonen
-Lehtinen
-Lehan
-Leetch
-Leeming
-Ledyard
-Ledwith
-Ledingham
-Leclere
-Leck
-Lebert
-Leandry
-Lazzell
-Layo
-Laye
-Laxen
-Lawther
-Lawn
-Lawerance
-Lavoy
-Lavertu
-Laverde
-Lauren
-Latouche
-Latner
-Lathen
-Last
-Laskin
-Lashbaugh
-Lascala
-Larroque
-Larick
-Laraia
-Laplume
-Lanzilotta
-Lannom
-Landrigan
-Landolt
-Landess
-Lancia
-Lamkins
-Lalla
-Lalk
-Lakeman
-Lakatos
-Laib
-Lahay
-Lagrave
-Lagerquist
-Lafoy
-Lafleche
-Lader
-Labrada
-Kwiecinski
-Kutner
-Kunshier
-Kulakowski
-Kujak
-Kuehnle
-Kubisiak
-Krzyminski
-Krugh
-Krois
-Kritikos
-Krill
-Kriener
-Krewson
-Kretzschmar
-Kretz
-Kresse
-Kreiter
-Kreischer
-Krebel
-Kraut
-Krans
-Kraling
-Krahenbuhl
-Kouns
-Kotson
-Kossow
-Kopriva
-Konkle
-Kolter
-Kolk
-Kolich
-Kohner
-Koeppen
-Koenigs
-Kock
-Kochanski
-Kobus
-Knowling
-Knouff
-Knoerzer
-Knippel
-Kloberdanz
-Kleinert
-Klarich
-Klaassen
-Kizzie
-Kisamore
-Kirn
-Kiraly
-Kipps
-Kinson
-Kinneman
-Kington
-Kine
-Kimbriel
-Kille
-Kick
-Kibodeaux
-Khamvongsa
-Keylon
-Kever
-Keser
-Kertz
-Kercheval
-Kenneth
-Kendrix
-Kendle
-Ken
-Kempt
-Kemple
-Keesey
-Keats
-Keatley
-Kazmierski
-Kazda
-Kazarian
-Kawashima
-Katsch
-Kasun
-Kassner
-Kassem
-Kasperski
-Kasinger
-Kaschak
-Karels
-Kantola
-Kana
-Kamai
-Kalthoff
-Kalla
-Kalani
-Kahrs
-Kahanek
-Kacher
-Jurasek
-Juniper
-Jungels
-Jukes
-Juelfs
-Judice
-Juda
-Ju
-Josselyn
-Jonsson
-Jonak
-Joens
-Jobson
-Jegede
-Jee
-Jeanjacques
-Jaworowski
-Jaspers
-Jannsen
-Janner
-Jankowiak
-Jank
-Janiak
-Jackowski
-Jacklin
-Jabbour
-Iyer
-Iveson
-Ivan
-Isner
-Iniquez
-Ingwerson
-Ingber
-Ina
-Imbrogno
-Ille
-Ikehara
-Iannelli
-Hyson
-Huxford
-Huseth
-Hurns
-Hurney
-Hurles
-Hunnings
-Humbarger
-Hulan
-Huisinga
-Hughett
-Hughen
-Hudler
-Hubiak
-Hricko
-How
-Hoversten
-Hottel
-Hosaka
-Horsch
-Hormann
-Hordge
-Honzell
-Homburg
-Holten
-Holme
-Hollopeter
-Hollinsworth
-Hollibaugh
-Holberg
-Hohmann
-Hoenstine
-Hodell
-Hodde
-Hobert
-Hives
-Hiter
-Hirko
-Hipolito
-Hinzmann
-Hinrichsen
-Hinger
-Hincks
-Hilz
-Hilborn
-Highley
-Higashi
-Hieatt
-Hicken
-Heverly
-Hesch
-Hervert
-Hershkowitz
-Herreras
-Hermanns
-Herget
-Henriguez
-Hennon
-Hengel
-Helmlinger
-Helmig
-Helen
-Heldman
-Heizer
-Heinitz
-Heifner
-Heidorn
-Heglin
-Heffler
-Hebner
-Heathman
-Heaslip
-Hazlip
-Haymes
-Hayase
-Hawver
-Haw
-Havermale
-Havas
-Hauber
-Hashim
-Hasenauer
-Harvel
-Hartney
-Hartel
-Harsha
-Harpine
-Harkrider
-Harkin
-Harer
-Harclerode
-Hanzely
-Hanni
-Hannagan
-Hampel
-Hammerschmidt
-Hamar
-Hallums
-Hallin
-Hainline
-Haid
-Haggart
-Hafen
-Haer
-Hadiaris
-Hadad
-Hackford
-Habeeb
-Guymon
-Guttery
-Gunnett
-Gull
-Guillette
-Guiliano
-Guilbeaux
-Guiher
-Guignard
-Guerry
-Gude
-Gucman
-Guadian
-Grzybowski
-Grzelak
-Grussendorf
-Grumet
-Gruenhagen
-Grudzinski
-Ground
-Grossmann
-Grof
-Grisso
-Grisanti
-Griffitts
-Griesbaum
-Grella
-Gregston
-Graveline
-Grandusky
-Grandinetti
-Gramm
-Goynes
-Gowing
-Goudie
-Gosman
-Gort
-Gorsline
-Goralski
-Goodstein
-Goodroe
-Goodlin
-Goodheart
-Goodhart
-Gonzelez
-Gonthier
-Goldsworthy
-Goldade
-Goettel
-Goerlitz
-Goepfert
-Goehner
-Goben
-Gobeille
-Glock
-Gliem
-Gleich
-Glasson
-Glascoe
-Gladwell
-Giusto
-Girdner
-Gipple
-Giller
-Giesing
-Giammona
-Ghormley
-Germon
-Geringer
-Gergely
-Gerberich
-Gepner
-Gens
-Genier
-Gemme
-Gelsinger
-Geigle
-Gebbia
-Gayner
-Gavitt
-Gatrell
-Gastineau
-Gasiewski
-Gascoigne
-Garro
-Garin
-Ganong
-Ganga
-Galpin
-Gallus
-Galizia
-Gajda
-Gahm
-Gagen
-Gaffigan
-Furno
-Furnia
-Furgason
-Fronczak
-Frishman
-Friess
-Frierdich
-Fresh
-Freestone
-Franta
-Frankovich
-Fors
-Forres
-Forrer
-Floris
-Florido
-Floria
-Flis
-Flicek
-Flens
-Flegal
-Flamenco
-Finkler
-Finkenbinder
-Finefrock
-Filter
-Filpo
-Filion
-Fierman
-Fieldman
-Ferreyra
-Fernendez
-Fergeson
-Fera
-Fencil
-Feith
-Feight
-Federici
-Federer
-Fechtner
-Feagan
-Fausnaugh
-Faubert
-Fata
-Farman
-Farinella
-Fantauzzi
-Fanara
-Falso
-Falardeau
-Fagnani
-Fabro
-Excell
-Ewton
-Evey
-Everetts
-Eve
-Evarts
-Etherington
-Estremera
-Estis
-Estabrooks
-Essig
-Esplin
-Espenschied
-Ernzen
-Erich
-Eppes
-Eppard
-Entwisle
-Emmi
-Emison
-Elison
-Elguezabal
-Eledge
-Elbaz
-Eisler
-Eiden
-Eichorst
-Eichert
-Egle
-Eggler
-Eggimann
-Edey
-Eckerman
-Echelberger
-Ebbs
-Ebanks
-Dziak
-Dyche
-Dyce
-Dusch
-Duross
-Durley
-Durate
-Dunsworth
-Dumke
-Dulek
-Duhl
-Duggin
-Dufford
-Dudziak
-Ducrepin
-Dubree
-Dubre
-Dubie
-Dubas
-Droste
-Drisko
-Drewniak
-Doxtator
-Dowtin
-Downum
-Doubet
-Dottle
-Dosier
-Doshi
-Dorst
-Dorset
-Dornbusch
-Doren
-Donze
-Donica
-Domanski
-Domagala
-Dohse
-Doerner
-Doerfler
-Doble
-Dobkins
-Dilts
-Digiulio
-Digaetano
-Dietzel
-Diddle
-Dickel
-Dezarn
-Devoy
-Devoss
-Devonshire
-Devon
-Devilla
-Devere
-Deters
-Desvergnes
-Deshay
-Desena
-Deross
-Der
-Depedro
-Densley
-Demorest
-Demore
-Demora
-Demirjian
-Demerchant
-Dematteis
-Demateo
-Delgardo
-Delfavero
-Delaurentis
-Delamar
-Delacy
-Deitrich
-Deisher
-Degracia
-Degraaf
-Defries
-Defilippis
-Decoursey
-Debruin
-Debiasi
-Debar
-Dearden
-Dealy
-Dayhoff
-Davino
-Darvin
-Darrisaw
-Darbyshire
-Daquino
-Daprile
-Danial
-Danh
-Danahy
-Dalsanto
-Dallavalle
-Daine
-Dagel
-Dadamo
-Dacy
-Dacunha
-Dabadie
-Czyz
-Cutsinger
-Curney
-Cuppernell
-Cunliffe
-Cumby
-Cullop
-Cullinane
-Cugini
-Cudmore
-Cuda
-Cucuzza
-Cuch
-Crumby
-Crouser
-Crock
-Critton
-Critchley
-Cristy
-Cremona
-Cremar
-Crehan
-Creary
-Crasco
-Crall
-Crabbe
-Cozzolino
-Cozier
-Coyner
-Couvillier
-Counterman
-Coulthard
-Coudriet
-Cottom
-Corzo
-Cornutt
-Corkran
-Cords
-Corda
-Copelin
-Coonan
-Consolo
-Conrow
-Conran
-Connerton
-Conkwright
-Condren
-Comp
-Comly
-Comisky
-Colli
-Collet
-Colello
-Colbeck
-Colarusso
-Coiner
-Cohron
-Codere
-Cocks
-Cobia
-Cly
-Cluster
-Clure
-Clowser
-Clovis
-Clingenpeel
-Clenney
-Clendaniel
-Clemenson
-Cleere
-Cleckler
-Claybaugh
-Clason
-Cirullo
-Ciraulo
-Ciolek
-Ciampi
-Christopherse
-Christophe
-Chovanec
-Chopra
-Chol
-Chiem
-Chestnutt
-Chesterman
-Chernoff
-Chermak
-Chelette
-Checketts
-Charpia
-Charo
-Chargois
-Champman
-Challender
-Chafins
-Cerruto
-Celi
-Cea
-Cazenave
-Cay
-Cavaluzzi
-Cauthon
-Caudy
-Catino
-Caterina
-Catano
-Castell
-Cassaro
-Cassarino
-Carrano
-Carozza
-Carow
-Carmickle
-Carlyon
-Carlew
-Cardena
-Caputi
-Capley
-Capalbo
-Canseco
-Candella
-Canal
-Campton
-Camposano
-Calleros
-Calleja
-Callegari
-Calica
-Calarco
-Calais
-Caillier
-Cahue
-Cadenhead
-Cadenas
-Cabera
-Buzzo
-Busto
-Bussmann
-Busenbark
-Burzynski
-Bursley
-Bursell
-Burle
-Burkleo
-Burkette
-Burczyk
-Bumstead
-Bullett
-Buikema
-Buenaventura
-Buege
-Buechel
-Budreau
-Budhram
-Bucknam
-Brye
-Brushwood
-Brumbalow
-Brulotte
-Bruington
-Bruderer
-Browns
-Brougher
-Bromfield
-Broege
-Brodhead
-Brocklesby
-Broadie
-Brizuela
-Britz
-Brisendine
-Brilla
-Briggeman
-Brierton
-Bridgeford
-Breyfogle
-Brevig
-Breuninger
-Bresse
-Bresette
-Brelsford
-Breitbach
-Bread
-Brayley
-Braund
-Branscom
-Brando
-Brandner
-Brahm
-Braboy
-Brabble
-Bozman
-Boyte
-Boynes
-Boyken
-Bowell
-Bowan
-Boutet
-Bouse
-Boulet
-Boule
-Bottcher
-Bosquez
-Borrell
-Boria
-Bordes
-Borchard
-Bonson
-Bonino
-Bonas
-Bonamico
-Bolstad
-Bolser
-Bollis
-Bolich
-Bolf
-Boker
-Boileau
-Bohac
-Bogucki
-Bogren
-Boeger
-Bodziony
-Bodo
-Bodley
-Boback
-Blyther
-Blight
-Blenker
-Blazina
-Blase
-Blamer
-Blacknall
-Blackmond
-Bitz
-Biser
-Biscardi
-Binz
-Bilton
-Billotte
-Billafuerte
-Bigford
-Biegler
-Bibber
-Bhandari
-Beyersdorf
-Bevelle
-Bettendorf
-Bessard
-Bertsche
-Berne
-Berlinger
-Berish
-Beranek
-Bentson
-Bentsen
-Benskin
-Benoy
-Benoist
-Benitz
-Belongia
-Belmore
-Belka
-Belen
-Beitzel
-Beiter
-Beitel
-Behrns
-Beckworth
-Becka
-Beaudion
-Beary
-Beare
-Beames
-Beabout
-Beaber
-Bazzano
-Bazinet
-Baucum
-Batrez
-Baswell
-Bastos
-Bascomb
-Bartha
-Barstad
-Barrilleaux
-Barretto
-Barresi
-Barona
-Barkhurst
-Barke
-Bardales
-Barczak
-Barca
-Barash
-Banfill
-Bambino
-Balonek
-Balmes
-Ballon
-Balko
-Balestrieri
-Baldino
-Baldelli
-Baken
-Baiza
-Bahner
-Baek
-Badour
-Badman
-Badley
-Badia
-Backmon
-Bacich
-Bacca
-Ayscue
-Ayo
-Aynes
-Austen
-Ausiello
-Auringer
-Auiles
-Aspinwall
-Askwith
-Artiga
-Arroliga
-Arns
-Arman
-Arellanes
-Aracena
-Antwine
-Antuna
-Anselmi
-Ansel
-Annen
-Angelino
-Angeli
-Angarola
-Andrae
-Amparo
-Amodio
-Amie
-Ameen
-Alwine
-Alverio
-Altro
-Altobello
-Altemus
-Alquicira
-Ally
-Allphin
-Allemand
-Allam
-Alessio
-Akpan
-Akerman
-Aiona
-Aikman
-Agyeman
-Agredano
-Adamik
-Adamczak
-Acrey
-Achilles
-Acevado
-Abu
-Abreo
-Abrahamsen
-Abild
-Zwicker
-Zweig
-Zuvich
-Zumpano
-Zuluaga
-Zubek
-Zornes
-Zoglmann
-Ziminski
-Zimbelman
-Zhanel
-Zenor
-Zechman
-Zauner
-Zamarron
-Zaffino
-Yusuf
-Ytuarte
-Yoke
-Yett
-Yerkovich
-Yelder
-Yaw
-Yasuda
-Yapp
-Yankee
-Yaden
-Yackley
-Yaccarino
-Xia
-Wytch
-Wyre
-Wussow
-Worthing
-Wormwood
-Wormack
-Worlds
-Wordsworth
-Wordell
-Woodroof
-Woodington
-Woodhams
-Wooddell
-Wollner
-Wojtkowski
-Wojcicki
-Wogan
-Wlodarczyk
-Wixted
-Withington
-Withem
-Wisler
-Wirick
-Winterhalter
-Winski
-Winne
-Winemiller
-Wimett
-Wiltfong
-Willibrand
-Willes
-Wilkos
-Wilbon
-Wiktor
-Wiggers
-Wigg
-Wiegmann
-Wickliff
-Wiberg
-Whittler
-Whittenton
-Whitling
-Whitledge
-Whitherspoon
-Whiters
-Whitecotton
-Whitebird
-Wheary
-Wetherill
-Westmark
-Westaby
-Wertenberger
-Wentland
-Wenstrom
-Wenker
-Wellen
-Weier
-Wegleitner
-Wedekind
-Wawers
-Wassel
-Warehime
-Wank
-Wandersee
-Waltmon
-Waltersheid
-Walbridge
-Wakely
-Wakeham
-Wajda
-Waithe
-Waidelich
-Wahler
-Wahington
-Wagster
-Wadel
-Vuyovich
-Vuolo
-Vulich
-Vukovich
-Volmer
-Vollrath
-Vollbrecht
-Vogelgesang
-Voeller
-Vlach
-Vivar
-Vitullo
-Vitanza
-Visker
-Visalli
-Viray
-Vinning
-Viniard
-Villapando
-Villaman
-Vier
-Viar
-Viall
-Verstraete
-Vermilya
-Verdon
-Venn
-Velten
-Velis
-Vasey
-Vanoven
-Vanorder
-Vanlue
-Vanheel
-Vanderwoude
-Vanderheide
-Vandenheuvel
-Vandenbos
-Vandeberg
-Vandal
-Vanblarcom
-Vanaken
-Vanacker
-Vallian
-Valine
-Valent
-Vaine
-Vaile
-Vadner
-Uttech
-Urioste
-Urbanik
-Unrath
-Unnasch
-Underkofler
-Uehara
-Udy
-Tyrer
-Tyburski
-Twaddle
-Turntine
-Tunis
-Tullock
-Trunk
-Tropp
-Troilo
-Tritsch
-Triola
-Trigo
-Tribou
-Tribley
-Tri
-Trethewey
-Tress
-Trela
-Treharne
-Trefethen
-Trayler
-Trax
-Traut
-Trang
-Tranel
-Trager
-Traczyk
-Towsley
-Torrecillas
-Tornatore
-Tork
-Torivio
-Toriello
-Tooles
-Toodle
-Tomme
-Tolosa
-Tolen
-Toca
-Titterington
-Tipsword
-Tinklenberg
-Tim
-Tigney
-Tigert
-Thygerson
-Thurn
-Thur
-Threats
-Thorstad
-Thornberg
-Thoresen
-Thomaston
-Tholen
-Thicke
-Theiler
-Thebeau
-Theaux
-Thaker
-Tewani
-Teufel
-Tetley
-Terrebonne
-Terrano
-Terpening
-Telly
-Tela
-Teig
-Teichert
-Tegethoff
-Teele
-Tatar
-Tashjian
-Tarte
-Tanton
-Tanimoto
-Tamimi
-Tamas
-Talman
-Taal
-Szydlowski
-Szostak
-Swoyer
-Swerdlow
-Sweeden
-Sweda
-Swanke
-Swander
-Swackhammer
-Suyama
-Suriano
-Suri
-Surdam
-Suprenant
-Sundet
-Summerton
-Sult
-Suleiman
-Suffridge
-Suby
-Stych
-Studeny
-Stubbins
-Strupp
-Struckman
-Strief
-Strictland
-Stremcha
-Strehl
-Stramel
-Stoy
-Stoutamire
-Storozuk
-Stordahl
-Stopher
-Stolley
-Stolfi
-Stoeger
-Stockhausen
-Stjulian
-Stivanson
-Stinton
-Stinchfield
-Stigler
-Stieglitz
-Stgermaine
-Steuer
-Steuber
-Steuart
-Stepter
-Stepnowski
-Stepanian
-Steimer
-Stefanelli
-Stebner
-Stears
-Steans
-Stayner
-Staubin
-Statz
-Stasik
-Starn
-Starmer
-Stargel
-Stanzione
-Stankovich
-Stan
-Stamour
-Staib
-Stadelman
-Stadel
-Stachura
-Squadrito
-Sprinkles
-Springstead
-Spragg
-Spigelmyer
-Spieler
-Spielberg
-Spaur
-Sovocool
-Sovereign
-Soundara
-Soulia
-Souffrant
-Sos
-Sorce
-Sonkin
-Sodhi
-Soble
-Sniffen
-Smouse
-Smittle
-Smithee
-Smedick
-Smaller
-Slowinski
-Slovacek
-Slominski
-Slice
-Skowronek
-Skokan
-Skanes
-Sivertson
-Sinyard
-Sinka
-Sinard
-Simonin
-Simonian
-Simmions
-Silcott
-Silberg
-Siefken
-Siddon
-Shuttlesworth
-Shubin
-Shubeck
-Shiro
-Shiraki
-Shipper
-Shina
-Shilt
-Shikles
-Shideler
-Shenton
-Shelvey
-Shellito
-Shelhorse
-Shawcroft
-Shatto
-Shanholtzer
-Shamonsky
-Shall
-Shadden
-Seymer
-Seyfarth
-Sewer
-Setlock
-Servant
-Serratos
-Serr
-Sepulueda
-Senay
-Semmel
-Semans
-Selvig
-Selkirk
-Selk
-Seligson
-Seldin
-Seiple
-Seiersen
-Seidling
-Seidensticker
-Secker
-Searson
-Scordo
-Scollard
-Scoggan
-Scobee
-Sciandra
-Scialdone
-Schwimmer
-Schwieger
-Schweer
-Schwanz
-Schutzenhofer
-Schuetze
-Schrodt
-Schriever
-Schriber
-Schremp
-Schrecongost
-Schraeder
-Schonberg
-Scholtz
-Scholle
-Schoettle
-Schoenemann
-Schoene
-Schnitker
-Schmuhl
-Schmith
-Schlotterbeck
-Schleppenbach
-Schlee
-Schickel
-Schibi
-Schein
-Scheide
-Scheibe
-Scheib
-Schaumberg
-Schardein
-Schaalma
-Scantlin
-Scantlebury
-Sayle
-Sausedo
-Saurer
-Sassone
-Sarracino
-Saric
-Sanz
-Santino
-Santarpia
-Santano
-Santaniello
-Sangha
-Sandvik
-Sandoral
-Sandobal
-Sandercock
-Sanantonio
-Salviejo
-Salsberry
-Salois
-Salazer
-Sagon
-Saglibene
-Sagel
-Sagal
-Saetern
-Saefong
-Sadiq
-Sabori
-Saballos
-Rygiel
-Rushlow
-Runco
-Rulli
-Ruller
-Ruffcorn
-Ruess
-Ruebush
-Rudlong
-Rudin
-Rudgers
-Rudesill
-Ruderman
-Rucki
-Rucinski
-Rubner
-Rubinson
-Rubiano
-Ruan
-Roznowski
-Rozanski
-Rowson
-Rower
-Rounsaville
-Roudabush
-Rotundo
-Rothell
-Rotchford
-Rosiles
-Roshak
-Rosetti
-Rosenkranz
-Rorer
-Rollyson
-Rokosz
-Rojek
-Roitman
-Rohrs
-Rogel
-Roewe
-Rodriges
-Rodocker
-Rodgerson
-Rodan
-Rodak
-Rocque
-Rochholz
-Rochel
-Robicheau
-Robbinson
-Roady
-Ritchotte
-Ripplinger
-Rippetoe
-Ringstaff
-Ringenberg
-Rinard
-Rigler
-Rightmire
-Riesen
-Riek
-Ridges
-Richner
-Richberg
-Riback
-Rial
-Rhyner
-Rhees
-Resse
-Renno
-Renee
-Rendleman
-Ren
-Reisz
-Reisenauer
-Reinschmidt
-Reins
-Reinholt
-Reinard
-Reifsnyder
-Rehfeld
-Reha
-Regester
-Reffitt
-Redler
-Rediske
-Reckner
-Reckart
-Rebolloso
-Rebollar
-Reasonover
-Reasner
-Reaser
-Reano
-Reagh
-Raval
-Ratterman
-Ratigan
-Rater
-Rasp
-Raneses
-Randolf
-Ramil
-Ramdas
-Ramberg
-Rajaniemi
-Rail
-Raid
-Raggio
-Ragel
-Ragain
-Rade
-Radaker
-Racioppi
-Rabinovich
-Quickle
-Quertermous
-Queal
-Quartucci
-Quander
-Quain
-Pynes
-Putzel
-Purl
-Pulizzi
-Pugliares
-Prusak
-Prueter
-Protano
-Propps
-Primack
-Prieur
-Presta
-Preister
-Prawl
-Pratley
-Prairie
-Pozzo
-Powless
-Povey
-Pottorf
-Pote
-Postley
-Porzio
-Ports
-Portney
-Ponzi
-Pontoriero
-Ponto
-Pont
-Poncedeleon
-Polimeni
-Polhamus
-Pole
-Polan
-Poetker
-Poellnitz
-Podgurski
-Plotts
-Pliego
-Plaugher
-Plantenberg
-Plair
-Plagmann
-Pizzitola
-Pittinger
-Pitcavage
-Pischke
-Piontek
-Pintar
-Pinnow
-Pinneo
-Pinley
-Pingel
-Pinello
-Pimenta
-Pillard
-Piker
-Pietras
-Piere
-Picasso
-Phillps
-Pfleger
-Pfahl
-Pezzuti
-Petruccelli
-Petrello
-Peteet
-Pescatore
-Peruzzi
-Perusse
-Perotta
-Perona
-Perini
-Peretti
-Perelman
-Perciful
-Peppin
-Pennix
-Pennino
-Penalosa
-Pemble
-Pelz
-Peltzer
-Pelphrey
-Pelote
-Pellum
-Pellecchia
-Pelikan
-Peitz
-Peels
-Pebworth
-Peary
-Pawlicki
-Pavelich
-Paster
-Pasquarella
-Paskey
-Paseur
-Paschel
-Parslow
-Parrow
-Parrot
-Parlow
-Parlett
-Parler
-Pargo
-Parco
-Paprocki
-Panepinto
-Panebianco
-Pandy
-Pandey
-Pamphile
-Pamintuan
-Pamer
-Paluso
-Paleo
-Paker
-Pagett
-Paczkowski
-Ozburn
-Ovington
-Overmeyer
-Ouellet
-Osterlund
-Oslin
-Oseguera
-Osaki
-Orrock
-Ormsbee
-Orlikowski
-Organista
-Oregan
-Orebaugh
-Orabuena
-Openshaw
-Ontiveroz
-Ondo
-Omohundro
-Ollom
-Ollivierre
-Olivencia
-Oley
-Olazabal
-Okino
-Oki
-Offenberger
-Oestmann
-Ocker
-Obar
-Oakeson
-Nuzum
-Nurre
-Nowinski
-Novosel
-Norquist
-Nordlie
-Noorani
-Nonnemacher
-Nolder
-Njoku
-Niznik
-Niwa
-Niss
-Ninneman
-Niner
-Nimtz
-Niemczyk
-Nieder
-Nicolo
-Nichlos
-Niblack
-Newyear
-Newtown
-Newill
-Newcom
-Neverson
-Neuhart
-Neuenschwande
-Nestler
-Nenno
-Nejman
-Neiffer
-Neidlinger
-Neglia
-Needs
-Nearing
-Nazarian
-Navor
-Nary
-Narayan
-Nangle
-Nakama
-Naish
-Naik
-Nadolski
-Muscato
-Murphrey
-Murdick
-Murchie
-Muratalla
-Munnis
-Mundwiller
-Muncey
-Munce
-Mullenbach
-Mulhearn
-Mulcahey
-Muhammed
-Muchow
-Mountford
-Moudry
-Mosko
-Morvay
-Morrical
-Morr
-Moros
-Mormann
-Morgen
-Moredock
-Morden
-Mordarski
-Moravek
-Morandi
-Morale
-Mooradian
-Montejo
-Montegut
-Montan
-Monsanto
-Monford
-Moncus
-Molinas
-Molek
-Mohd
-Moehrle
-Moehring
-Modzeleski
-Model
-Modafferi
-Moala
-Moake
-Miyahira
-Mitani
-Mischel
-Minges
-Minella
-Mimes
-Milles
-Milbrett
-Milanes
-Mikolajczyk
-Mikami
-Meucci
-Metler
-Methven
-Metge
-Messmore
-Messerschmidt
-Mesrobian
-Meservey
-Merseal
-Menor
-Menon
-Menear
-Melott
-Melley
-Melfi
-Meinhart
-Megivern
-Megeath
-Meester
-Meeler
-Meegan
-Medoff
-Medler
-Meckley
-Meath
-Mearns
-Mcquigg
-Mcpadden
-Mclure
-Mckellips
-Mckeithen
-Mcglathery
-Mcginnes
-Mcghan
-Mcdonel
-Mccullom
-Mccraken
-Mccrackin
-Mcconathy
-Mccloe
-Mcclaughry
-Mcclaflin
-Mccarren
-Mccaig
-Mcaulay
-Mcaffee
-Mazzuca
-Maytubby
-Mayner
-Maymi
-Mattiello
-Matthis
-Matthees
-Matthai
-Mathiason
-Mastrogiovann
-Masteller
-Mashack
-Marucci
-Martorana
-Martiniz
-Marter
-Martellaro
-Marsteller
-Marris
-Marrara
-Maroni
-Marolda
-Marocco
-Maritn
-Margo
-Maresh
-Maready
-Marchione
-Marbut
-Maranan
-Maragno
-Mapps
-Manrriquez
-Manny
-Mannis
-Manni
-Mangina
-Manganelli
-Mancera
-Mamon
-Maloch
-Mallozzi
-Maller
-Majchrzak
-Majano
-Mainella
-Mahanna
-Maertens
-Madon
-Macumber
-Macioce
-Machuga
-Machlin
-Machida
-Machala
-Mabra
-Lynne
-Lybbert
-Luvert
-Lutts
-Luttrull
-Lupez
-Lukehart
-Ludewig
-Luchsinger
-Loyal
-Lovecchio
-Louissaint
-Loughney
-Lottie
-Lostroh
-Lose
-Lorton
-Lorette
-Lopeman
-Loparo
-Longs
-Loner
-Londo
-Lombera
-Lokietek
-Loiko
-Lohrenz
-Lohan
-Lofties
-Locklar
-Lockaby
-Lobianco
-Loader
-Loa
-Llano
-Livesey
-Litster
-Liter
-Liske
-Linsky
-Linne
-Lindbeck
-Limes
-Licudine
-Leyua
-Levie
-Letterman
-Leonelli
-Lenzo
-Lenze
-Lents
-Leitao
-Leif
-Leidecker
-Leibold
-Lehne
-Legan
-Legacy
-Lefave
-Leehy
-Ledue
-Lecount
-Lecea
-Leadley
-Lazzara
-Lazcano
-Lazalde
-Layer
-Lavi
-Lavancha
-Lavan
-Lav
-Laude
-Latu
-Latty
-Lato
-Larranaga
-Lapidus
-Lapenta
-Langridge
-Langeveld
-Langel
-Lanes
-Landowski
-Landgren
-Landfried
-Lame
-Lamattina
-Lallier
-Lairmore
-Lahaie
-Lagazo
-Lagan
-Lafoe
-Lafluer
-Laflame
-Lafevers
-Lada
-Lacoss
-Lachney
-Labreck
-Labreche
-Labay
-Laa
-Kwasnik
-Kuzyk
-Kutzner
-Kushnir
-Kusek
-Kurtzman
-Kurian
-Kulhanek
-Kuklinski
-Kuh
-Kueny
-Kuczynski
-Kubitz
-Kuang
-Kruschke
-Krous
-Krompel
-Kritz
-Krimple
-Kriese
-Krenzer
-Kreis
-Kratzke
-Krane
-Krage
-Kraebel
-Kozub
-Kozma
-Kouri
-Koudelka
-Kotcher
-Kotas
-Kostic
-Kosh
-Kosar
-Kopko
-Kopka
-Kooy
-Konigsberg
-Konarski
-Kolmer
-Kohlmeyer
-Kobbe
-Knoop
-Knoedler
-Knocke
-Knipple
-Knippenberg
-Knickrehm
-Kneisel
-Kluss
-Klossner
-Klipfel
-Klawiter
-Klasen
-Kittles
-Kissack
-Kirtland
-Kirschenmann
-Kirckof
-Kiphart
-Kinstler
-Kinion
-Kilton
-Killman
-Kiehl
-Kief
-Kett
-Kesling
-Keske
-Kerstein
-Kepple
-Keneipp
-Kempson
-Kempel
-Kelp
-Kehm
-Kehler
-Keh
-Keeran
-Keedy
-Kebert
-Keast
-Kearbey
-Kawaguchi
-Kaupu
-Kauble
-Katzenbach
-Kate
-Katcher
-Kartes
-Karpowicz
-Karpf
-Karen
-Karban
-Kanzler
-Kanarek
-Kamper
-Kaman
-Kalsow
-Kalafut
-Kaeser
-Kaercher
-Kaeo
-Kaeding
-Jurewicz
-Julson
-Jozwick
-Jollie
-Johnigan
-Johll
-Jochum
-Jewkes
-Jestes
-Jeska
-Jersey
-Jereb
-Jayson
-Jaurez
-Jarecki
-Jansma
-Janosik
-Jandris
-Jamin
-Jahr
-Jacot
-Jabs
-Ivens
-Itson
-Isenhower
-Iovino
-Ionescu
-Ingrum
-Ingels
-Inch
-Imrie
-Imlay
-Ihlenfeld
-Ihde
-Igou
-Ibach
-Huyett
-Hurry
-Huppe
-Hultberg
-Hullihen
-Hugi
-Hueso
-Huesman
-Hsiao
-Hronek
-Hovde
-Housewright
-Houlahan
-Hougham
-Houchen
-Hostler
-Hoster
-Hosang
-Hornik
-Hornes
-Horio
-Honyumptewa
-Honeyman
-Honer
-Hommerding
-Holsworth
-Hollobaugh
-Hollinshead
-Hollands
-Hollan
-Holecek
-Holdorf
-Hokes
-Hogston
-Hoesly
-Hodkinson
-Hodgman
-Hodgens
-Hochstedler
-Hochhauser
-Hobbie
-Hoare
-Hnat
-Hiss
-Hiskey
-Hirschy
-Hinostroza
-Hink
-Hing
-Hillmer
-Hillian
-Hillerman
-Hietala
-Hierro
-Hickling
-Hickingbottom
-Heye
-Heubusch
-Hesselschward
-Herriot
-Hernon
-Hermida
-Hermans
-Hentschel
-Henningson
-Henneke
-Henk
-Heninger
-Heltsley
-Helmle
-Helminiak
-Helmes
-Hellner
-Hellmuth
-Helke
-Heitmeyer
-Heird
-Heinle
-Heinicke
-Heinandez
-Heimsoth
-Heimlich
-Heibel
-Hegyi
-Heggan
-Hefel
-Heeralall
-Hedrington
-Heacox
-Hazlegrove
-Hazelett
-Haymore
-Havenhill
-Hautala
-Hascall
-Harvie
-Hartrick
-Hartling
-Harrer
-Harles
-Hargenrader
-Hanshew
-Hanly
-Hankla
-Hanisch
-Hancox
-Hammann
-Hambelton
-Halseth
-Hallisey
-Halleck
-Hallas
-Haisley
-Hairr
-Hainey
-Hainer
-Hailstock
-Haertel
-Guzek
-Guyett
-Guster
-Gussler
-Gurwitz
-Gurka
-Gunsolus
-Guinane
-Guiden
-Gugliotti
-Guevin
-Guevarra
-Guerard
-Gudaitis
-Guadeloupe
-Gschwind
-Grupe
-Grumbach
-Gruenes
-Gruenberg
-Grosser
-Grom
-Grodski
-Groden
-Grizzel
-Gritten
-Griswald
-Grishaber
-Grinage
-Grimwood
-Grims
-Griffon
-Griffies
-Gribben
-Grew
-Gressley
-Gren
-Greenstreet
-Grealish
-Gravett
-Grantz
-Granfield
-Granade
-Gowell
-Gossom
-Gorsky
-Goring
-Goodnow
-Goodfriend
-Goodemote
-Golob
-Gollnick
-Golladay
-Goldwyn
-Goldsboro
-Golds
-Goldrick
-Gohring
-Gohn
-Goettsch
-Goertzen
-Goelz
-Godinho
-Goans
-Glumac
-Gleisner
-Gleen
-Glassner
-Glanzer
-Gladue
-Gjelaj
-Givhan
-Girty
-Girone
-Girgenti
-Giorgianni
-Gilpatric
-Gillihan
-Gillet
-Gilbar
-Gierut
-Gierhart
-Gibert
-Gianotti
-Giannetto
-Gianelli
-Giambanco
-Gharing
-Geurts
-Gettis
-Gettel
-Gest
-Germani
-Gerdis
-Gerbitz
-Geppert
-Gennings
-Gemmer
-Gelvin
-Gellert
-Gehler
-Geddings
-Gearon
-Geach
-Gazaille
-Gayheart
-Gauld
-Gaukel
-Gaudio
-Gato
-Gathing
-Gasque
-Garstka
-Garsee
-Garringer
-Garofano
-Garo
-Garnsey
-Garigen
-Garcias
-Garbe
-Ganoung
-Ganfield
-Ganaway
-Gamero
-Galuska
-Galster
-Gallacher
-Galinski
-Galimi
-Galik
-Galeazzi
-Galdo
-Galdames
-Galas
-Galanis
-Gaglio
-Gaff
-Gaeddert
-Gadapee
-Fussner
-Furukawa
-Fuhs
-Fuerte
-Fuerstenberg
-Fryrear
-Fruits
-Froese
-Fringer
-Frieson
-Friesenhahn
-Frieler
-Friede
-Freymuth
-Freyman
-Freudenberg
-Freman
-Fredricksen
-Frech
-Frasch
-Frantum
-Frankin
-Franca
-Frago
-Fragnoli
-Fouquet
-Fossen
-Foskett
-Forner
-Formosa
-Formisano
-Forget
-Fooks
-Fons
-Folino
-Flott
-Floor
-Flesch
-Flener
-Flemmons
-Flattery
-Flanagin
-Flamino
-Flamand
-Fitzerald
-Findling
-Filsinger
-Fillyaw
-Fillinger
-Fiechter
-Ferre
-Ferdon
-Feldkamp
-Fazzio
-Favia
-Faulconer
-Faughnan
-Faubel
-Fassler
-Faso
-Farrey
-Farrare
-Farnworth
-Farland
-Fairrow
-Faille
-Faherty
-Fagnant
-Fabula
-Fabbri
-Eylicio
-Esteve
-Estala
-Espericueta
-Escajeda
-Erlich
-Equia
-Epson
-Enrriquez
-Enomoto
-Enmon
-Engemann
-Emmerson
-Emmel
-Emler
-Emilio
-Elstad
-Ellwein
-Ellerson
-Eliott
-Eliassen
-Elchert
-Eisenbeis
-Eisel
-Eikenberry
-Eichholz
-Ehmer
-Edris
-Edgerson
-Echenique
-Eberley
-Eans
-Dziuk
-Dykhouse
-Dworak
-Dutt
-Dupas
-Duntz
-Dunshee
-Dunovant
-Dunnaway
-Dummermuth
-Duerson
-Duddy
-Ducotey
-Duchon
-Duchesneau
-Ducci
-Dubord
-Duberry
-Dubach
-Drummonds
-Droege
-Drish
-Drier
-Drexel
-Dresch
-Dresbach
-Drenner
-Drechsler
-Dowen
-Dotter
-Dosreis
-Doser
-Dorward
-Dorin
-Dorf
-Door
-Domeier
-Doler
-Doleman
-Dolbow
-Dolbin
-Dobrunz
-Dobransky
-Dobberstein
-Dlouhy
-Diosdado
-Dingmann
-Dimmer
-Dimarino
-Dimaria
-Dilly
-Dillenburg
-Dilaura
-Dieken
-Dickhaus
-Dibbles
-Dibben
-Diamante
-Dewilde
-Dewaard
-Devich
-Devenney
-Devaux
-Dettinger
-Desroberts
-Dershem
-Dersch
-Derita
-Derickson
-Depina
-Deorio
-Deoliveira
-Denzler
-Dentremont
-Denoble
-Demshar
-Demond
-Demint
-Demichele
-Demel
-Delzer
-Delval
-Delorbe
-Delli
-Delbridge
-Delanoy
-Delancy
-Delahoya
-Dekle
-Deitrick
-Deis
-Dehnert
-Degrate
-Defrance
-Deetz
-Deeg
-Decoster
-Decena
-Dearment
-Daughety
-Datt
-Darrough
-Danzer
-Dante
-Danielovich
-Dandurand
-Dancause
-Dalo
-Dalgleish
-Daisley
-Daft
-Dadlani
-Daddona
-Daddio
-Dacpano
-Cyprian
-Cutillo
-Cush
-Curz
-Curvin
-Cuna
-Cumber
-Cullom
-Cudworth
-Cubas
-Crysler
-Cryderman
-Crummey
-Crumbly
-Crookshanks
-Croes
-Criscione
-Crimes
-Crespi
-Cresci
-Creaser
-Craton
-Cramp
-Cradle
-Cowin
-Cowdrey
-Coutcher
-Cotterman
-Cosselman
-Cosgriff
-Cortner
-Corsini
-Corporan
-Corniel
-Cornick
-Cordts
-Cordial
-Copening
-Coolman
-Connick
-Conlisk
-Conelli
-Common
-Comito
-Colten
-Colling
-Colletta
-Coldivar
-Colclasure
-Colantuono
-Colaizzi
-Coggeshall
-Cockman
-Cockfield
-Cobourn
-Cobo
-Cobarrubias
-Clyatt
-Cloney
-Clonch
-Climes
-Cleckner
-Clearo
-Claybourne
-Clavin
-Claridge
-Claffey
-Ciufo
-Cisnero
-Cipollone
-Cieslik
-Ciejka
-Cichocki
-Cicchetti
-Cianflone
-Chrusciel
-Christesen
-Chmielowiec
-Chirino
-Chillis
-Chihuahua
-Chhoun
-Chevas
-Chehab
-Chaviano
-Chavaria
-Chasten
-Charbonnet
-Chanley
-Champoux
-Champa
-Chalifoux
-Cerio
-Cedotal
-Cech
-Cavett
-Cavendish
-Catoire
-Castronovo
-Castellucci
-Castellow
-Castaner
-Casso
-Cassels
-Cassatt
-Cassar
-Cashon
-Cartright
-Carros
-Carrisalez
-Carrig
-Carrejo
-Carnicelli
-Carnett
-Carlise
-Carline
-Carhart
-Caren
-Cardova
-Cardell
-Carchi
-Caram
-Caquias
-Capper
-Capizzi
-Capano
-Cannedy
-Campese
-Calvello
-Callon
-Callins
-Callies
-Callicutt
-Calix
-Calin
-Califf
-Calderaro
-Caldeira
-Cadriel
-Cadmus
-Cadman
-Caccamise
-Buys
-Buttermore
-Butay
-Bustamente
-Busa
-Burmester
-Burkard
-Burhans
-Burgert
-Bure
-Burdin
-Bullman
-Bulin
-Buelna
-Buehner
-Budin
-Buco
-Buckhanon
-Bryars
-Brutger
-Brus
-Brumitt
-Brum
-Bruer
-Brucato
-Broyhill
-Broy
-Brownrigg
-Brownie
-Brossart
-Brookings
-Broden
-Brocklehurst
-Brockert
-Bristo
-Briskey
-Brisbane
-Bringle
-Bries
-Briar
-Bressman
-Bren
-Branyan
-Brands
-Bramson
-Brammell
-Brallier
-Bozich
-Boysel
-Bowthorpe
-Bowron
-Bowin
-Boutilier
-Boulos
-Boullion
-Boughter
-Bottiglieri
-Borruso
-Borrow
-Borreggine
-Borns
-Borkoski
-Borghese
-Borenstein
-Boran
-Bora
-Booton
-Bonvillain
-Bonini
-Bong
-Bonello
-Bolls
-Boitnott
-Boike
-Bohnet
-Bohnenkamp
-Bohmer
-Boeson
-Boeneke
-Bodey
-Bocchino
-Bobrowski
-Bobic
-Bluestein
-Bloomingdale
-Blogg
-Blewitt
-Blenman
-Bleck
-Blaszak
-Blankenbeckle
-Blando
-Blanchfield
-Blancato
-Blalack
-Blakenship
-Blackett
-Bisping
-Birkner
-Birckhead
-Bingle
-Bineau
-Billiel
-Bigness
-Bies
-Bierer
-Bhalla
-Beyerlein
-Bew
-Betesh
-Besler
-Berzins
-Bertalan
-Berntsen
-Berna
-Bergo
-Berganza
-Bennis
-Benney
-Benkert
-Benjamen
-Benincasa
-Bengochia
-Bendle
-Bendana
-Benchoff
-Benbrook
-Belsito
-Belshaw
-Belinsky
-Belak
-Bela
-Beigert
-Beidleman
-Behen
-Befus
-Beel
-Beebee
-Bedonie
-Beckstrand
-Beckerle
-Beato
-Bears
-Bauguess
-Baughan
-Bauerle
-Battis
-Batis
-Bastone
-Bastille
-Bassetti
-Bashor
-Bary
-Bartunek
-Bartoletti
-Barro
-Barno
-Barnicle
-Barlage
-Barkus
-Barkdull
-Bari
-Barcellos
-Barbarino
-Baranski
-Baranick
-Bankert
-Banchero
-Ban
-Bambrick
-Bamberg
-Bambenek
-Balthrop
-Balmaceda
-Ballman
-Balistrieri
-Balcomb
-Balboni
-Balbi
-Bakshi
-Bagner
-Bagent
-Badasci
-Bacot
-Bache
-Babu
-Babione
-Babic
-Babers
-Babbs
-Awkward
-Avitabile
-Avers
-Avena
-Avance
-Ausley
-Auker
-Audas
-Aud
-Aubut
-Athearn
-Atcheson
-Astorino
-Asplund
-Aslanian
-Askari
-Ashmead
-Asby
-Asai
-Arterbury
-Artalejo
-Arqueta
-Arquero
-Arostegui
-Arnell
-Armeli
-Arista
-Arender
-Arca
-Arballo
-Aprea
-Applen
-Applegarth
-Apfel
-Antonello
-Antolin
-Antkowiak
-Angis
-Angione
-Angerman
-Angelilli
-Andujo
-Andrick
-Anderberg
-Amigon
-Ambers
-Amalfitano
-Alviso
-Alvez
-Altice
-Altes
-Almarez
-Allton
-Allston
-Allgeyer
-Allegretti
-Aliaga
-Algood
-Alberg
-Albarez
-Albaladejo
-Akre
-Aitkin
-Ahles
-Ahlberg
-Agnello
-Adrien
-Adinolfi
-Adamis
-Abramek
-Abolt
-Abitong
-Zurich
-Zurawski
-Zufall
-Zubke
-Zizzo
-Zipperer
-Zinner
-Zinda
-Ziller
-Zill
-Zevallos
-Zesati
-Zenzen
-Zentner
-Zellmann
-Zelinsky
-Zboral
-Zarcone
-Zapalac
-Zaldana
-Zakes
-Zaker
-Zahniser
-Zacherl
-Zabawa
-Zabaneh
-Yum
-Youse
-Youree
-Younis
-Yorty
-Yonce
-Yero
-Yerkey
-Yeck
-Yeargan
-Yauch
-Yashinski
-Yambo
-Xiang
-Wrinn
-Wrightsman
-Worton
-Wortley
-Worland
-Woolworth
-Woolfrey
-Woodhead
-Woltjer
-Wolfenden
-Wolden
-Wolchesky
-Wojick
-Woessner
-Witwer
-Witters
-Witchard
-Wissler
-Wisnieski
-Wisinski
-Winnike
-Winkowski
-Winkels
-Wingenter
-Wineman
-Winegardner
-Wimpy
-Wilridge
-Wilmont
-Willy
-Willians
-Williamsen
-Wilhide
-Wilhelmsen
-Wilhelmi
-Wildrick
-Wilden
-Wiland
-Wiker
-Wigglesworth
-Wiebusch
-Widdowson
-Wiant
-Wiacek
-Whittet
-Whitter
-Whitelock
-Whiteis
-Whiley
-Westrope
-Westpfahl
-Westin
-Wessman
-Wessinger
-Wesemann
-Wesby
-Wertheimer
-Weppler
-Wenke
-Wengler
-Wender
-Welp
-Weitzner
-Weissberg
-Weisenborn
-Weipert
-Weiman
-Weidmann
-Wehrsig
-Wehrenberg
-Weemes
-Weeman
-Wayner
-Waston
-Wasicek
-Wascom
-Wasco
-Warmath
-Warbritton
-Waltner
-Wallenstein
-Waldoch
-Waldal
-Wala
-Waide
-Wadlinger
-Wadhams
-Vullo
-Voorheis
-Vonbargen
-Volner
-Vollstedt
-Vollman
-Vold
-Voge
-Vittorio
-Virtue
-Virginia
-Violett
-Viney
-Vinciguerra
-Vinal
-Villata
-Villarrvel
-Vilanova
-Vigor
-Vigneault
-View
-Vielma
-Veyna
-Vessella
-Versteegh
-Verderber
-Venier
-Venice
-Venditti
-Velotta
-Vejarano
-Veil
-Vecchia
-Vecchi
-Vastine
-Vasguez
-Varella
-Vanry
-Vannah
-Vanhyning
-Vanhuss
-Vanhoff
-Vanhoesen
-Vandivort
-Vandevender
-Vanderlip
-Vanderkooi
-Vandebrink
-Vancott
-Vallien
-Vallas
-Vallandingham
-Valiquette
-Valasek
-Vahey
-Vagott
-Uyematsu
-Urbani
-Uran
-Upp
-Uno
-Union
-Umbach
-Udo
-Tyon
-Tyma
-Twyford
-Twombley
-Twohig
-Tutterrow
-Turnes
-Turkington
-Turchi
-Tunks
-Tumey
-Tumbaga
-Tuinstra
-Tsukamoto
-Tschetter
-Trussel
-Trubey
-Trovillion
-Troth
-Trostel
-Tron
-Trinka
-Trine
-Tribbey
-Triarsi
-Trevor
-Treto
-Trautz
-Tragesser
-Tooman
-Toolson
-Tonozzi
-Tomkiewicz
-Tomb
-Tomasso
-Tolin
-Tolfree
-Toelle
-Tisor
-Tiry
-Tinstman
-Timmermann
-Tillie
-Tickner
-Tiburcio
-Thunberg
-Thronton
-Thompsom
-Theil
-Thayne
-Thaggard
-Teschner
-Tensley
-Tenery
-Tempest
-Tellman
-Tellado
-Telep
-Teigen
-Teator
-Teall
-Tayag
-Tavis
-Tattersall
-Tassoni
-Tarshis
-Tappin
-Tappe
-Tansley
-Talone
-Talford
-Tainter
-Taha
-Taguchi
-Tacheny
-Tabak
-Szymczyk
-Szwaja
-Szopinski
-Sze
-Syvertsen
-Swogger
-Switcher
-Swist
-Swilling
-Swierczek
-Swiech
-Swickard
-Swiatek
-Swezey
-Swepson
-Sweezy
-Swaringen
-Swanagan
-Swailes
-Swade
-Sveum
-Svenningsen
-Svec
-Suttie
-Supry
-Sunga
-Summerhill
-Summars
-Sulit
-Stys
-Stutesman
-Stupak
-Stumpo
-Stuller
-Stuekerjuerge
-Stuckett
-Stuckel
-Stuchlik
-Stuard
-Strutton
-Strop
-Stromski
-Stroebel
-Strehlow
-Strause
-Strano
-Straney
-Stradling
-Stoyle
-Stormo
-Stopyra
-Stoots
-Stoop
-Stonis
-Stoltenburg
-Stoiber
-Stoessel
-Stitzer
-Stien
-Stichter
-Stezzi
-Stewert
-Stepler
-Steinkraus
-Stegemann
-Steeples
-Steenburg
-Steeley
-Staszak
-Stasko
-Starkson
-Stanwick
-Stanke
-Stanifer
-Stangel
-Stain
-Stai
-Squiers
-Sprout
-Springsteen
-Spraglin
-Spragins
-Spraberry
-Spoelstra
-Spisak
-Spirko
-Spille
-Spidel
-Speyer
-Speroni
-Spenst
-Speak
-Spartz
-Sparlin
-Sparacio
-Spaman
-Spainhower
-Sow
-Souers
-Souchet
-Sosbee
-Sorn
-Sorice
-Sorbo
-Soqui
-Somer
-Solon
-Soehl
-Sodergren
-Socorro
-Sobie
-Smucker
-Smsith
-Smoley
-Smolensky
-Smolenski
-Smolder
-Smethers
-Slusar
-Slowey
-Slonski
-Slemmons
-Slatkin
-Slates
-Slappy
-Slaney
-Slagter
-Slacum
-Skutnik
-Skrzypek
-Skibbe
-Sjostrom
-Sjoquist
-Sivret
-Sitko
-Sisca
-Sinnett
-Sineath
-Simoni
-Simar
-Simao
-Silvestro
-Silleman
-Silkwood
-Silha
-Silfies
-Silberhorn
-Silacci
-Sigrist
-Sieczkowski
-Sieczka
-Shure
-Shulz
-Shugrue
-Shrode
-Shown
-Shovlin
-Shortell
-Shonka
-Shiyou
-Shiraishi
-Shiplett
-Sheu
-Shermer
-Sherick
-Sheng
-Sheeks
-Shed
-Sharron
-Shantz
-Shakir
-Shaheed
-Shadoan
-Shadid
-Shackford
-Shabot
-Seung
-Seufert
-Setty
-Setters
-Servis
-Server
-Serres
-Serrell
-Serpico
-Serpas
-Serafine
-Sensenig
-Senft
-Semenec
-Semen
-Semas
-Semaan
-Selvera
-Sellmeyer
-Sek
-Segar
-Seever
-Seeney
-Seeliger
-Seehafer
-Seebach
-Sebben
-Seaward
-Seary
-Searl
-Searby
-Scotland
-Scordino
-Scolieri
-Scolaro
-Schwiebert
-Schwartze
-Schwaner
-Schuur
-Schupbach
-Schumacker
-Schum
-Schudel
-Schubbe
-Schroader
-Schramel
-Schollmeyer
-Schoenherr
-Schoeffler
-Schoeder
-Schnurr
-Schnorr
-Schneeman
-Schnake
-Schnaible
-Schmaus
-Schlotter
-Schinke
-Schimming
-Schimek
-Schikora
-Scheulen
-Scherping
-Schermer
-Scherb
-Schember
-Schellhase
-Schedler
-Schanck
-Schaffhauser
-Schaffert
-Schadler
-Scarola
-Scarfo
-Scarff
-Scantling
-Scaff
-Sayward
-Sayas
-Saxbury
-Savin
-Savel
-Savastano
-Savannah
-Sault
-Satre
-Sarkar
-Santellan
-Sandmeier
-Sampica
-Salvesen
-Saltis
-Salloum
-Salling
-Salce
-Salatino
-Salata
-Salamy
-Safe
-Sadowsky
-Sadlier
-Sabbatini
-Sabatelli
-Sabal
-Sabados
-Rydzewski
-Rybka
-Rybczyk
-Ruz
-Rusconi
-Rupright
-Rufino
-Ruffalo
-Rudiger
-Rudig
-Ruda
-Rubyor
-Royea
-Roxberry
-Rover
-Rouzer
-Roumeliotis
-Roston
-Rossmann
-Rosko
-Rosetta
-Rosene
-Rosenbluth
-Roseland
-Rosasco
-Rosano
-Rosal
-Rorabaugh
-Romie
-Romaro
-Rolstad
-Rollow
-Rohrich
-Roghair
-Rogala
-Roets
-Roen
-Roemmich
-Roelfs
-Roeker
-Roedl
-Roedel
-Rodeheaver
-Roddenberry
-Rockstad
-Rocchi
-Robirds
-Robben
-Robasciotti
-Robaina
-Rizzotto
-Rizzio
-Rittle
-Ritcher
-Rissman
-Riseden
-Ripa
-Rion
-Rintharamy
-Rinehimer
-Rinck
-Riling
-Rike
-Rietschlin
-Riesenberg
-Riemenschneid
-Rieland
-Rickenbaugh
-Rickenbach
-Riches
-Rhody
-Revells
-Reutter
-Respress
-Resnik
-Renton
-Remmel
-Reitmeyer
-Reitan
-Reister
-Reinstein
-Reino
-Reinkemeyer
-Reifschneider
-Reierson
-Reichle
-Rehmeier
-Rehl
-Regine
-Reeds
-Rede
-Records
-Recar
-Rebeiro
-Raybourn
-Rawl
-Rautio
-Raugust
-Raudenbush
-Raudales
-Rattan
-Rashad
-Rapuano
-Rapoport
-Rantanen
-Ransbottom
-Raner
-Ramkissoon
-Rambousek
-Raio
-Rainford
-Radakovich
-Rad
-Rabenhorst
-Quivers
-Quispe
-Quintin
-Quinoes
-Quince
-Quilici
-Quattrone
-Quates
-Quance
-Quale
-Purswell
-Purpora
-Pulera
-Pulcher
-Puckhaber
-Pryer
-Pruyne
-Pruit
-Prudencio
-Prows
-Protzman
-Prothero
-Prospero
-Prosperi
-Prospal
-Privott
-Pritchet
-Priem
-Prest
-Prell
-Preer
-Pree
-Preddy
-Preda
-Pravata
-Pradhan
-Potocki
-Postier
-Postema
-Posse
-Posadas
-Poremba
-Popper
-Popichak
-Ponti
-Pomrenke
-Pomponi
-Pomarico
-Pollok
-Polkinghorn
-Polino
-Pock
-Plough
-Plenty
-Plater
-Plagman
-Pipher
-Pinzone
-Pinkleton
-Pillette
-Pillers
-Pill
-Pilapil
-Pignone
-Pignatelli
-Piersol
-Piepho
-Picton
-Pickrel
-Picket
-Pichard
-Picchi
-Piatek
-Pharo
-Phanthanouvon
-Pettingill
-Pettinato
-Petrovits
-Pethtel
-Petersheim
-Pershing
-Perrez
-Perra
-Pergram
-Peretz
-Perego
-Perches
-Pennello
-Pennella
-Pennant
-Pendry
-Penaz
-Pellish
-Peeks
-Pecanty
-Peare
-Paysour
-Pavlovich
-Pavick
-Pavelko
-Paustian
-Patzer
-Patsy
-Patete
-Patadia
-Paszkiewicz
-Pase
-Pasculli
-Pascascio
-Parrotte
-Parlor
-Parajon
-Paparo
-Papandrea
-Paone
-Pantaleon
-Panning
-Paniccia
-Pancho
-Panarello
-Palmeter
-Pallan
-Palardy
-Pahmeier
-Padget
-Padel
-Oyster
-Oya
-Oxborrow
-Oveson
-Outwater
-Ottaway
-Otake
-Ostermeyer
-Osmer
-Osinski
-Osiecki
-Oroak
-Orndoff
-Orms
-Orkin
-Oregon
-Ordiway
-Opatz
-Onsurez
-Onishi
-Oliger
-Okubo
-Okoye
-Ohlmann
-Offord
-Offner
-Offerdahl
-Oesterle
-Oesch
-Odonnel
-Odeh
-Odebralski
-Obie
-Obermeier
-Oberhausen
-Obenshain
-Obenchain
-Oats
-Nute
-Nulty
-Norrington
-Norlin
-Nore
-Nordling
-Nordhoff
-Norder
-Nordan
-Norals
-Nogales
-Noboa
-Nitsche
-Niermann
-Nienhaus
-Niedringhaus
-Niedbalski
-Nicolella
-Nicolais
-Nickleberry
-Nicewander
-Newfield
-Neurohr
-Neumeier
-Netterville
-Nersesian
-Nern
-Nerio
-Nerby
-Nerbonne
-Neitz
-Neighbours
-Neighbor
-Neidecker
-Neat
-Neason
-Nead
-Navratil
-Naves
-Nastase
-Nasir
-Nasca
-Narine
-Narimatsu
-Nard
-Narayanan
-Nappo
-Namm
-Nalbone
-Nakonechny
-Nabarro
-Myott
-Muthler
-Muscatello
-Murriel
-Murin
-Murders
-Muoio
-Mundel
-Munafo
-Mulch
-Mukherjee
-Muffoletto
-Muessig
-Muckey
-Mucher
-Mruk
-Moyd
-Mowell
-Mowatt
-Moutray
-Mourning
-Mou
-Motzer
-Moster
-Mortis
-Morgenroth
-Morga
-Morataya
-Montross
-Montezuma
-Monterroza
-Montemarano
-Montello
-Montbriand
-Montavon
-Montaque
-Monigold
-Monforte
-Molgard
-Moleski
-Mohsin
-Mohead
-Mofield
-Moerbe
-Moeder
-Mochizuki
-Miyazaki
-Miyasaki
-Mital
-Miskin
-Mischler
-Minus
-Minniear
-Minero
-Milosevic
-Mildenhall
-Mila
-Mikhail
-Mielsch
-Midden
-Michonski
-Michniak
-Michitsch
-Michelotti
-Micheli
-Michelfelder
-Michand
-Miao
-Metelus
-Merkt
-Merando
-Meranda
-Mentz
-Meneley
-Menaker
-Memory
-Melino
-Meir
-Mehaffy
-Meehl
-Meech
-Meczywor
-Mcweeney
-Mcumber
-Mcredmond
-Mcneer
-Mcnay
-Mcmikle
-Mcmaken
-Mclaurine
-Mclauglin
-Mclaney
-Mckune
-Mckinnies
-Mckague
-Mchattie
-Mcgrapth
-Mcglothen
-Mcgath
-Mcfolley
-Mcdannell
-Mccurty
-Mccort
-Mcclymonds
-Mcclimon
-Mcclamy
-Mccaughan
-Mccartan
-Mccan
-Mccadden
-Mcburnie
-Mcburnett
-Mcbryar
-Mcannally
-Mcalevy
-Mcaleese
-Maytorena
-Mayrant
-Mayol
-Mayland
-Mayeaux
-Mauter
-Matthewson
-Mathiew
-Matern
-Matera
-Maslow
-Mashore
-Masaki
-Maruco
-Martorell
-Martenez
-Marry
-Marrujo
-Marrison
-Maroun
-Markway
-Markos
-Markoff
-Markman
-Marian
-Marello
-Marbry
-Marban
-Maranda
-Maphis
-Manuele
-Mansel
-Manganello
-Mandrell
-Mandoza
-Manard
-Manago
-Maltba
-Mallick
-Mallak
-Maline
-Malikowski
-Majure
-Majcher
-Maise
-Mahl
-Maffit
-Maffeo
-Madueno
-Madlem
-Madariaga
-Macvane
-Mackler
-Macconnell
-Macchi
-Maccarone
-Lyng
-Lynchard
-Lura
-Lunning
-Luneau
-Lunden
-Lumbra
-Lumbert
-Lueth
-Ludington
-Luckado
-Lucchini
-Lucatero
-Luallen
-Lozeau
-Lowen
-Lovera
-Lovelock
-Louck
-Lothian
-Lorio
-Lorimer
-Lorge
-Loretto
-Longhenry
-Lonas
-Loiseau
-Lohrman
-Logel
-Loft
-Locks
-Lockie
-Llerena
-Livington
-Liuzzi
-Liscomb
-Lippeatt
-Liou
-Linhardt
-Lindelof
-Lindbo
-Limehouse
-Limage
-Lillo
-Lillian
-Lilburn
-Liggons
-Lidster
-Liddy
-Liddick
-Lich
-Liberato
-Lian
-Lia
-Leysath
-Lewelling
-Lesney
-Leser
-Lescano
-Leonette
-Lentsch
-Lenius
-Lemmo
-Lemming
-Lemcke
-Lein
-Leggette
-Legerski
-Legard
-Leever
-Leete
-Ledin
-Lecomte
-Lecocq
-Leakes
-Leab
-Lazarz
-Layous
-Lawrey
-Lawery
-Lauze
-Lautz
-Laughinghouse
-Latulippe
-Lattus
-Lattanzio
-Later
-Lascano
-Larmer
-Laris
-Larcher
-Laprise
-Lapin
-Lapage
-Lano
-Langseth
-Langman
-Langland
-Landstrom
-Landsberg
-Landsaw
-Landram
-Lamphier
-Lamendola
-Lamberty
-Lakhani
-Laker
-Lajara
-Lagrow
-Lagman
-Ladewig
-Laderman
-Ladden
-Lacrue
-Laclaire
-Lachut
-Lachner
-Kwit
-Kvamme
-Kvam
-Kutscher
-Kushi
-Kurgan
-Kunsch
-Kundert
-Kun
-Kulju
-Kukene
-Kudo
-Kubin
-Kubes
-Kuberski
-Krystofiak
-Kruppa
-Krul
-Krukowski
-Kruegel
-Kronemeyer
-Krock
-Kriston
-Kretzer
-Krenn
-Kralik
-Krafft
-Krabill
-Kozisek
-Kovich
-Koverman
-Kovatch
-Kovarik
-Kotlowski
-Kosmala
-Kosky
-Kosir
-Kosa
-Korpi
-Kornbluth
-Koppen
-Kooistra
-Kohlhepp
-Kofahl
-Koeneman
-Koebel
-Koczur
-Kobrin
-Kobashigawa
-Koba
-Knuteson
-Knoff
-Knoble
-Knipper
-Knierim
-Kneisley
-Klusman
-Kloc
-Klitzing
-Klinko
-Klinefelter
-Klemetson
-Kleinpeter
-Klauser
-Klatte
-Klaren
-Klare
-Kissam
-Kirkhart
-Kirchmeier
-Kinzinger
-Kindt
-Kincy
-Kincey
-Kimoto
-Killingworth
-Kilcullen
-Kilbury
-Kietzman
-Kienle
-Kiedrowski
-Kidane
-Khamo
-Khalili
-Ketterling
-Ketchem
-Kessenich
-Kessell
-Kepp
-Kenon
-Kenning
-Kennady
-Kendzior
-Kemppainen
-Kellermann
-Keirns
-Keilen
-Keiffer
-Kehew
-Keelan
-Keawe
-Keator
-Kealy
-Keady
-Kathman
-Kastler
-Kastanes
-Kassab
-Karren
-Karpin
-Karau
-Karathanasis
-Kara
-Kaps
-Kaplun
-Kapaun
-Kannenberg
-Kanipe
-Kander
-Kandel
-Kanas
-Kanan
-Kamke
-Kaltenbach
-Kallenberger
-Kallam
-Kali
-Kaley
-Kafton
-Kafer
-Kabler
-Kaaihue
-Jupiter
-Jundt
-Jubilee
-Jovanovich
-Jojola
-Johnstad
-Jodon
-Joachin
-Jinright
-Jew
-Jessick
-Jeronimo
-Jerald
-Jenne
-Jelsma
-Jeannotte
-Jeangilles
-Jaworsky
-Jaubert
-Jarry
-Jarrette
-Jarreau
-Jarett
-Janos
-Janecka
-Janczak
-Jalomo
-Jagoda
-Jagla
-Jacquier
-Jaber
-Iwata
-Ivanoff
-Isola
-Iserman
-Isais
-Isaacks
-Iron
-Inverso
-Infinger
-Ibsen
-Hyser
-Hylan
-Hybarger
-Hwee
-Hutchenson
-Hutchcroft
-Husar
-Hurlebaus
-Hunsley
-Hunker
-Hummingbird
-Humberson
-Hulst
-Hulon
-Huhtala
-Hugill
-Hugghins
-Huffmaster
-Huckeba
-Hrabovsky
-Howden
-Hoverson
-Houts
-Houskeeper
-Housh
-Hosten
-Horras
-Horchler
-Hor
-Hopke
-Hooke
-Honie
-Holtsoi
-Holsomback
-Holoway
-Holmstead
-Hoistion
-Hohnstein
-Hoheisel
-Hoguet
-Hoggle
-Hogenson
-Hoffstetter
-Hoffler
-Hoffa
-Hofe
-Hoefling
-Hoague
-Hizer
-Hirschfield
-Hironaka
-Hiraldo
-Hinote
-Hingston
-Hind
-Hinaman
-Hillie
-Hillesheim
-Hilderman
-Hiestand
-Heyser
-Heys
-Hews
-Hew
-Hertler
-Herrero
-Herrandez
-Heppe
-Henle
-Henkensiefken
-Henigan
-Henandez
-Henagan
-Hemberger
-Heman
-Helser
-Helmich
-Hellinger
-Helfrick
-Heldenbrand
-Heinonen
-Heineck
-Heikes
-Heidkamp
-Heglar
-Heffren
-Heelan
-Hedgebeth
-Heckmann
-Heckaman
-Hechmer
-Hazelhurst
-Hawken
-Haverkamp
-Havatone
-Hausauer
-Hasch
-Harwick
-Hartse
-Harts
-Harrower
-Harle
-Hargroder
-Hardway
-Hardinger
-Hardemon
-Harbeck
-Hant
-Hamre
-Hamberg
-Hallback
-Haisten
-Hailstone
-Hahl
-Hagner
-Hagman
-Hagemeyer
-Haeussler
-Hackwell
-Haby
-Haataja
-Gverrero
-Gustovich
-Gustave
-Guske
-Gushee
-Gurski
-Gurnett
-Gura
-Gunto
-Gunselman
-Gugler
-Gudmundson
-Gudinas
-Guarneri
-Grumbine
-Gruis
-Grotz
-Grosskopf
-Grosman
-Grosbier
-Grinter
-Grilley
-Grieger
-Grewal
-Gressler
-Greaser
-Graus
-Grasman
-Graser
-Grannan
-Granath
-Gramer
-Graboski
-Goyne
-Gowler
-Gottwald
-Gottesman
-Goshay
-Gorr
-Gorovitz
-Gores
-Goossens
-Goodier
-Goodhue
-Gonzeles
-Gonzalos
-Gonnella
-Golomb
-Golick
-Golembiewski
-Goeke
-Godzik
-Goar
-Glosser
-Glendenning
-Glendening
-Glatter
-Glas
-Gittings
-Gitter
-Gisin
-Giscombe
-Gimlin
-Gillitzer
-Gillick
-Gilliand
-Gilb
-Gigler
-Gidden
-Gibeau
-Gibble
-Gianunzio
-Giannattasio
-Gertelman
-Gerosa
-Gerold
-Gerland
-Gerig
-Gerecke
-Gerbino
-Genz
-Genovesi
-Genet
-Gelrud
-Geitgey
-Geiszler
-Gehrlein
-Gazzo
-Gawrys
-Gavilanes
-Gaulden
-Gate
-Garthwaite
-Garmoe
-Gargis
-Gara
-Gannett
-Galligher
-Galler
-Galleher
-Gallahan
-Galford
-Gal
-Gahn
-Gacek
-Gabert
-Fuster
-Furuya
-Furse
-Fujihara
-Fuhriman
-Fruit
-Frueh
-Fromme
-From
-Froemming
-Friskney
-Frietas
-Freiler
-Freelove
-Freber
-Frear
-Frankl
-Frankenfield
-Franey
-Francke
-Foxworthy
-Formella
-Foringer
-Forgue
-Forge
-Fonnesbeck
-Fonceca
-Folland
-Fodera
-Fode
-Floresca
-Fleurent
-Fleshner
-Flentge
-Fleischhacker
-Fleeger
-Flecher
-Flam
-Flair
-Flaim
-Fivecoat
-Firebaugh
-Fioretti
-Finucane
-Filley
-Figuroa
-Figuerda
-Fiddelke
-Feurtado
-Fetterly
-Fessel
-Femia
-Feild
-Fehling
-Fegett
-Fedde
-Fechter
-Fawver
-Faustino
-Faulhaber
-Fatchett
-Fassnacht
-Fashaw
-Fasel
-Farrugia
-Farran
-Farness
-Farhart
-Farbman
-Fama
-Falwell
-Falvo
-Falling
-Falkenstein
-Falin
-Failor
-Faigin
-Fagundo
-Fague
-Fagnan
-Fagerstrom
-Faden
-Eytchison
-Eyles
-Ewy
-Evon
-Everage
-Evangelist
-Estrin
-Estorga
-Esponda
-Espindola
-Escher
-Esche
-Escarsega
-Escandon
-Erven
-Erding
-Eplin
-Enix
-Englade
-Engdahl
-Enck
-Emmette
-Embery
-Emberson
-Eltzroth
-Else
-Elsayed
-Ellerby
-Ellens
-Elhard
-Elfers
-Elazegui
-Eisermann
-Eilertson
-Eiben
-Ehrhard
-Ehresman
-Egolf
-Egnew
-Eggins
-Efron
-Effland
-Eduardo
-Edminster
-Edgeston
-Ede
-Eckstrom
-Eckhard
-Eckford
-Echoles
-Ebsen
-Eatherly
-Eastlick
-Earnheart
-Ear
-Dykhuizen
-Dyas
-Duttweiler
-Dutka
-Dutch
-Dusenbury
-Dusenbery
-Durre
-Durnil
-Durnell
-Durie
-Durhan
-Durando
-Dupriest
-Dunsmoor
-Dunseith
-Dunnum
-Dunman
-Dunlevy
-Duma
-Dulude
-Dulong
-Duignan
-Dugar
-Dufek
-Ducos
-Duchaine
-Duch
-Dubow
-Drowne
-Dross
-Drollinger
-Droke
-Driggars
-Dredge
-Drawhorn
-Drach
-Drabek
-Doyne
-Doukas
-Dorvil
-Dorow
-Doroski
-Dornak
-Dormer
-Dorian
-Donnelson
-Donna
-Donn
-Donivan
-Dondero
-Dompe
-Dolle
-Doakes
-Diza
-Dixie
-Divirgilio
-Ditore
-Distel
-Disimone
-Disbro
-Dipiero
-Dingson
-Diluzio
-Dillehay
-Dilbert
-Digiorgio
-Diflorio
-Dietzler
-Dietsch
-Dieterle
-Dierolf
-Dierker
-Dicostanzo
-Dicesare
-Dexheimer
-Dewitte
-Dewing
-Devoti
-Devincentis
-Devary
-Deutschman
-Dettloff
-Detienne
-Destasio
-Dest
-Despard
-Desmet
-Deslatte
-Desfosses
-Derise
-Derenzo
-Deppner
-Depolo
-Denoyer
-Denoon
-Denno
-Denne
-Deniston
-Denike
-Denes
-Demoya
-Demick
-Demicco
-Demetriou
-Demange
-Delva
-Delorge
-Delley
-Delisio
-Delhoyo
-Delgrande
-Delgatto
-Delcour
-Delair
-Deinert
-Degruy
-Degrave
-Degeyter
-Defino
-Deffenbaugh
-Deener
-Decook
-Decant
-Deboe
-Deblanc
-Deatley
-Dearmitt
-Deale
-Deaguiar
-Dayan
-Daus
-Dauberman
-Datz
-Dase
-Dary
-Dartt
-Darocha
-Dario
-Dari
-Dardis
-Dapper
-Danowski
-Dancel
-Dami
-Dallmann
-Dalere
-Dalba
-Dakan
-Daise
-Dailing
-Dahan
-Dagnan
-Daggs
-Dagan
-Czarkowski
-Czaplinski
-Cutten
-Curtice
-Curenton
-Cure
-Curboy
-Cura
-Culliton
-Culberth
-Cucchiara
-Cubbison
-Csaszar
-Crytser
-Crotzer
-Crossgrove
-Crosser
-Croshaw
-Croissant
-Crocco
-Critzer
-Creveling
-Cressy
-Creps
-Creese
-Cratic
-Crate
-Craigo
-Craigen
-Craib
-Cracchiolo
-Crable
-Coykendall
-Cowick
-Coville
-Couzens
-Coutch
-Cousens
-Cousain
-Counselman
-Coult
-Cotterell
-Cott
-Cotham
-Corsaut
-Corriere
-Corredor
-Cornet
-Cornelia
-Corkum
-Coreas
-Cordoza
-Corbet
-Corathers
-Conwill
-Contreas
-Consuegra
-Constanza
-Conolly
-Conedy
-Companion
-Comins
-Combee
-Colosi
-Colom
-Colmenares
-Collymore
-Colleran
-Colina
-Colaw
-Colatruglio
-Colantro
-Colantonio
-Cohea
-Cogill
-Codner
-Code
-Codding
-Cockram
-Cocanougher
-Cobine
-Cluckey
-Clucas
-Cloward
-Cloke
-Clisham
-Clipper
-Clinebell
-Cliffe
-Clendenen
-Cisowski
-Cirelli
-Ciraolo
-Ciocca
-Cintora
-Ciesco
-Cibrian
-Chupka
-Chugg
-Christmann
-Choma
-Chiverton
-Chirinos
-Chinen
-Chimenti
-Chima
-Cheuvront
-Chesla
-Chesher
-Chesebro
-Chern
-Chehebar
-Cheatum
-Chastine
-Chapnick
-Chapelle
-Chambley
-Cercy
-Celius
-Celano
-Cayea
-Cavicchi
-Cattell
-Catanach
-Catacutan
-Castelluccio
-Castellani
-Cassmeyer
-Cassetta
-Cassada
-Caspi
-Cashmore
-Casebier
-Casanas
-Carrothers
-Carrizal
-Carriveau
-Carretero
-Carradine
-Carosella
-Carnine
-Carmel
-Carloni
-Carkhuff
-Cardosi
-Cardo
-Carchidi
-Caravello
-Caranza
-Carandang
-Capes
-Cantrall
-Canpos
-Canoy
-Cannizzaro
-Canion
-Canida
-Canham
-Cangemi
-Cange
-Candle
-Cancelliere
-Canard
-Camarda
-Calverley
-Calogero
-Callendar
-Calame
-Cadrette
-Cachero
-Caccavale
-Cabreros
-Cabrero
-Cabrara
-Cabler
-Butzer
-Butte
-Butrick
-Butala
-Bustios
-Busser
-Busic
-Bushorn
-Busher
-Burmaster
-Burl
-Burkland
-Burkins
-Burkert
-Burgueno
-Burgraff
-Buren
-Burel
-Burdon
-Burck
-Burby
-Buoy
-Bunk
-Bumford
-Bulock
-Bujnowski
-Buggie
-Buffy
-Budine
-Bucciero
-Bubier
-Brzoska
-Brydges
-Brumlow
-Brosseau
-Brooksher
-Brokke
-Broeker
-Brittin
-Bristle
-Briano
-Briand
-Brettschneide
-Bresnan
-Brentson
-Brenneis
-Brender
-Brazle
-Brassil
-Brasington
-Branstrom
-Branon
-Branker
-Brandwein
-Brandau
-Brana
-Bralley
-Brailey
-Brague
-Brade
-Bozzi
-Bownds
-Bowmer
-Bournes
-Bour
-Bouchey
-Botto
-Boteler
-Borroel
-Borra
-Boroski
-Boothroyd
-Boord
-Bonny
-Bonga
-Bonato
-Bonadonna
-Bolejack
-Boldman
-Boiser
-Boggio
-Bogacki
-Boerboom
-Boehnlein
-Boehle
-Bodah
-Bobst
-Boak
-Bluemel
-Blockmon
-Blitch
-Blincoe
-Bleier
-Blaydes
-Blasius
-Bittel
-Bir
-Binsfeld
-Bindel
-Bilotti
-Billiott
-Bilbrew
-Bihm
-Biersner
-Bielat
-Bidrowski
-Bickler
-Biasi
-Bianca
-Bhola
-Bhat
-Bewick
-Betzen
-Bettridge
-Betti
-Betsch
-Besley
-Beshero
-Besa
-Bertoli
-Berstein
-Berrien
-Berrie
-Berrell
-Bermel
-Berenguer
-Benzer
-Bensing
-Bennie
-Benedix
-Bemo
-Belile
-Beilman
-Behunin
-Behrmann
-Bedient
-Becht
-Beaule
-Beaudreault
-Bealle
-Beagley
-Bayuk
-Bayot
-Bayliff
-Baugess
-Battistoni
-Batrum
-Basinski
-Basgall
-Bartolomei
-Bartnik
-Bartl
-Bartko
-Bartholomay
-Barthlow
-Bartgis
-Barsness
-Barski
-Barlette
-Barickman
-Bargen
-Bardon
-Barcliff
-Barbu
-Barbar
-Barakat
-Baracani
-Baraban
-Banos
-Banko
-Bania
-Bambach
-Balok
-Balogun
-Bally
-Baldini
-Balck
-Balcer
-Balash
-Baim
-Bailor
-Bahm
-Bahar
-Bagshaw
-Baggerly
-Badie
-Badal
-Backues
-Babino
-Ba
-Aydelott
-Awbrey
-Aversano
-Avansino
-Auyon
-Aukamp
-Aujla
-Augenstein
-Astacio
-Ast
-Asplin
-Asato
-Asano
-Aruizu
-Artale
-Arrick
-Arneecher
-Armelin
-Armbrester
-Armacost
-Arkell
-Argue
-Argrave
-Areizaga
-Areas
-Apolo
-Anzures
-Anzualda
-Antwi
-Antillon
-Antenor
-Annand
-Anhalt
-Angove
-Anglemyer
-Anglada
-Angiano
-Angeloni
-Andaya
-Ancrum
-Anagnos
-Ammirati
-Amescua
-America
-Ambrosius
-Amacker
-Amacher
-Amabile
-Alvizo
-Alvernaz
-Alvara
-Altobelli
-Altobell
-Althauser
-Alterman
-Altavilla
-Alsip
-Alphonso
-Almeyda
-Almeter
-Alman
-Allscheid
-Allaman
-Aliotta
-Alicia
-Aliberti
-Alghamdi
-Alfonzo
-Albiston
-Alberta
-Alberding
-Alarie
-Alano
-Aja
-Ailes
-Ahsan
-Ahrenstorff
-Ahler
-Aerni
-Ackland
-Achor
-Acero
-Acebo
-Ace
-Abshier
-Abruzzo
-Abrom
-Abood
-Abnet
-Abend
-Abegg
-Abbruzzese
-Aaberg
-Zysk
-Zutell
-Zumstein
-Zummo
-Zuhlke
-Zuehlsdorff
-Zuch
-Zucconi
-Zortman
-Zohn
-Ziv
-Zingone
-Zingg
-Zingale
-Zima
-Zientek
-Zieg
-Zervas
-Zerger
-Zenk
-Zeldin
-Zeiss
-Zeiders
-Zediker
-Zea
-Zavodny
-Zarazua
-Zappone
-Zappala
-Zapanta
-Zaniboni
-Zanchi
-Zampedri
-Zaller
-Zakrajsek
-Zagar
-Zadrozny
-Zablocki
-Zable
-Yust
-Yunk
-Youngkin
-Yosten
-Yockers
-Yochim
-Yerke
-Yerena
-Yeast
-Yanos
-Yam
-Wysinger
-Wyner
-Wrisley
-Woznicki
-Wortz
-Worsell
-Wooters
-Woon
-Woolcock
-Woodke
-Wonnacott
-Wolnik
-Wittstock
-Witting
-Witry
-Witfield
-Witcraft
-Wissmann
-Wissink
-Wisehart
-Wiscount
-Wironen
-Wipf
-Winterrowd
-Wingett
-Windon
-Windish
-Windisch
-Windes
-Wiltbank
-Willmarth
-Willick
-Wiler
-Wieseler
-Wiedmaier
-Wiederstein
-Wiedenheft
-Wieberg
-Wickware
-Wickkiser
-Wickell
-Whittmore
-Whitker
-Whitegoat
-Whitcraft
-Whisonant
-Whisby
-Whetsell
-Whedon
-Westry
-Westcoat
-Wernimont
-Wentling
-Wendlandt
-Wencl
-Weisgarber
-Weininger
-Weikle
-Weigold
-Weigl
-Weichbrodt
-Wehrli
-Wehe
-Weege
-Weare
-Watland
-Wassmann
-Warzecha
-Warrix
-Warrell
-Warnack
-Waples
-Wantland
-Wanger
-Wandrei
-Wander
-Wanat
-Wampole
-Waltjen
-Walterscheid
-Waligora
-Walding
-Waldie
-Walczyk
-Wakins
-Waitman
-Wair
-Wainio
-Wahpekeche
-Wahlman
-Wagley
-Wagenknecht
-Wadle
-Waddoups
-Wadding
-Wack
-Vuono
-Vuillemot
-Vugteveen
-Vosmus
-Vorkink
-Vories
-Vondra
-Voelz
-Vlashi
-Vivo
-Vitelli
-Vitali
-Viscarra
-Virgo
-Vinet
-Vimont
-Villega
-Villard
-Vignola
-Viereck
-Videtto
-Vicoy
-Vessell
-Vescovi
-Verros
-Vernier
-Vernaglia
-Vergin
-Verdone
-Verdier
-Verastequi
-Vejar
-Vasile
-Vasi
-Varnadore
-Vardaro
-Vanzanten
-Vansumeren
-Vanschuyver
-Vanleeuwen
-Vanhowe
-Vanhoozer
-Vaness
-Vandewalker
-Vandevoorde
-Vandeveer
-Vanderzwaag
-Vanderweide
-Vanderhyde
-Vandellen
-Vanamburg
-Vanalst
-Vallin
-Valk
-Valerie
-Valentini
-Valcarcel
-Valasco
-Valadao
-Vacher
-Urquijo
-Unterreiner
-Unsicker
-Unser
-Unrau
-Undercoffler
-Uhm
-Uffelman
-Uemura
-Ueda
-Tyszko
-Tyska
-Tymon
-Tyce
-Tyacke
-Twinam
-Tutas
-Tussing
-Turmel
-Turkowski
-Turkel
-Turchetta
-Tupick
-Tumblin
-Tukes
-Tufte
-Tufo
-Tuey
-Tuell
-Tuckerman
-Tsutsumi
-Tsuchiya
-Try
-Trossbach
-Trivitt
-Trippi
-Trippensee
-Trimbach
-Trillo
-Triller
-Trible
-Tribe
-Tribby
-Trevisan
-Tresch
-Tramonte
-Traff
-Trad
-Tousey
-Totaro
-Torregrosa
-Torralba
-Torn
-Tolly
-Tofil
-Tofani
-Tobiassen
-Tippy
-Tiogangco
-Tino
-Tinnes
-Tingstrom
-Tingen
-Tine
-Tindol
-Tifft
-Tiffee
-Tiet
-Thuesen
-Thruston
-Throndson
-Thornsbury
-Thornes
-Thiery
-Thielman
-Thie
-Theilen
-Thede
-Thate
-Thane
-Thalacker
-Thaden
-Teuscher
-Terracina
-Terell
-Terada
-Tepfer
-Tennessee
-Tenneson
-Tenant
-Temores
-Temkin
-Tellers
-Telleria
-Teaque
-Tealer
-Teachey
-Tavakoli
-Tauras
-Taucher
-Tator
-Tartaglino
-Tarpy
-Tape
-Tannery
-Tani
-Tams
-Tamlin
-Tambe
-Tallis
-Talamante
-Takayama
-Takaki
-Takagi
-Taibl
-Taffe
-Tadesse
-Tade
-Tabeling
-Tabag
-Szoke
-Szoc
-Szala
-Szady
-Sysak
-Sylver
-Syler
-Swonger
-Swiggett
-Swensson
-Sweis
-Sweers
-Sweene
-Sweany
-Sweaney
-Swartwout
-Swamy
-Swales
-Swab
-Susman
-Surman
-Surgeon
-Sundblad
-Summerset
-Summerhays
-Sumerall
-Sule
-Sugimoto
-Subramanian
-Sturch
-Stupp
-Stunkard
-Stumpp
-Struiksma
-Stropes
-Stromyer
-Stromquist
-Strede
-Strazza
-Strauf
-Storniolo
-Storjohann
-Stonum
-Stonier
-Stonecypher
-Stoneberger
-Stollar
-Stokke
-Stokan
-Stoetzel
-Stoeckel
-Stockner
-Stockinger
-Stockholm
-Stockert
-Stockdill
-Stobbe
-Stitzel
-Stitely
-Stirgus
-Stigers
-Stettner
-Stettler
-Sterlin
-Sterbenz
-Stemp
-Stelluti
-Steinmeyer
-Steininger
-Steinauer
-Steigerwalt
-Steider
-Steady
-Stavrou
-Staufenberger
-Stassi
-Starin
-Stankus
-Stanaway
-Stammer
-Stakem
-Staino
-Stahlnecker
-Stagnitta
-Staelens
-Staal
-Srsen
-Sprott
-Sprigg
-Sprenkle
-Sprenkel
-Spreitzer
-Spraque
-Sprandel
-Spotted
-Sporn
-Spivak
-Spira
-Spiewak
-Spieth
-Spiering
-Sperow
-Speh
-Specking
-Spease
-Spead
-Sparger
-Spanier
-Spall
-Sower
-Southcott
-Sosna
-Soran
-Sookram
-Sonders
-Solak
-Sohr
-Sohl
-Sofranko
-Soderling
-Sochor
-Sobon
-Smutz
-Smudrick
-Smithj
-Smid
-Slosser
-Sliker
-Slenker
-Sleight
-Sleger
-Sleet
-Slaby
-Skousen
-Skilling
-Skibinski
-Skeeters
-Skeet
-Skees
-Skane
-Skafidas
-Sivic
-Sivertsen
-Sivers
-Sitra
-Sito
-Siracusa
-Sinicki
-Simpers
-Simley
-Simbeck
-Silberberg
-Siever
-Siegwarth
-Sidman
-Siddons
-Siddle
-Sibbett
-Si
-Shumard
-Shubrooks
-Shough
-Shorb
-Shoptaw
-Sholty
-Shoffstall
-Shiverdecker
-Shininger
-Shimasaki
-Shifrin
-Shiffler
-Sheston
-Sherr
-Sherill
-Shere
-Shepeard
-Shelquist
-Shells
-Sheler
-Shave
-Shauf
-Sharrar
-Sharpnack
-Shanon
-Shamsiddeen
-Shambley
-Shallenberger
-Shadler
-Shaban
-Sha
-Sferra
-Seys
-Sexauer
-Sevey
-Severo
-Setlak
-Seta
-Sesko
-Sersen
-Serratore
-Serdula
-Senechal
-Seldomridge
-Seilhamer
-Seifer
-Seidlitz
-Sehnert
-Sedam
-Sebron
-Seber
-Sebek
-Seavers
-Sear
-Scullark
-Scroger
-Scovill
-Sciascia
-Sciarra
-Schweers
-Schwarze
-Schummer
-Schultes
-Schuchardt
-Schuchard
-Schrieber
-Schrenk
-Schreifels
-Schowalter
-Schoultz
-Scholer
-Schofill
-Schoff
-Schnuerer
-Schnettler
-Schmitke
-Schmiege
-Schloop
-Schlinger
-Schlessman
-Schlesser
-Schlageter
-Schiess
-Schiefer
-Schiavoni
-Scherzer
-Scherich
-Schechtman
-Schebel
-Scharpman
-Schaich
-Schaap
-Scappaticci
-Scadlock
-Savocchia
-Savini
-Savers
-Save
-Savageau
-Sauvage
-Sause
-Sauerwein
-Sary
-Sarwary
-Sarnicola
-Santone
-Santoli
-Santalucia
-Santacruce
-Sansoucie
-Sankoff
-Sanes
-Sandri
-Sanderman
-Sammartano
-Salmonson
-Salmela
-Salmans
-Sallaz
-Salis
-Sakuma
-Sakowski
-Sajdak
-Sahm
-Sagredo
-Safrit
-Sade
-Sackey
-Sabio
-Sabino
-Sabina
-Rybolt
-Ruzzo
-Ruthstrom
-Ruta
-Russin
-Russian
-Russak
-Rusko
-Ruskin
-Rusiecki
-Ruscher
-Rupar
-Rumberger
-Rullan
-Ruliffson
-Ruhlman
-Ruger
-Rufenacht
-Ruelle
-Rudisell
-Rudi
-Rucci
-Rublee
-Ruberto
-Rubeck
-Rowett
-Rouge
-Rottinghaus
-Roton
-Rothgeb
-Rothgaber
-Rothermich
-Rostek
-Rossini
-Roskelley
-Rosing
-Rosi
-Rosewell
-Rosebush
-Rosberg
-Roon
-Ronin
-Romesburg
-Romelus
-Rolley
-Rollerson
-Rollefson
-Rolins
-Rolens
-Rois
-Rohrig
-Rohrbacher
-Rohland
-Rohen
-Roh
-Rogness
-Roes
-Roering
-Roehrick
-Roebke
-Rodregez
-Rodabaugh
-Rocks
-Rockingham
-Roblee
-Robel
-Roadcap
-Rizzolo
-Riviezzo
-Rivest
-Riveron
-Risto
-Rissler
-Risen
-Rippentrop
-Ripka
-Rinn
-Ringuette
-Ringering
-Rindone
-Rindels
-Rim
-Rieffer
-Riedman
-Riede
-Riecke
-Riebow
-Riddlebarger
-Rhome
-Rhodd
-Rhatigan
-Rhame
-Reyers
-Rewitzer
-Revalee
-Retzer
-Rettinger
-Reschke
-Requa
-Reper
-Reopell
-Renzelman
-Renne
-Renker
-Renk
-Renicker
-Rendina
-Rendel
-Remund
-Remmele
-Remiasz
-Remaklus
-Remak
-Reitsma
-Reitmeier
-Reiswig
-Reishus
-Reining
-Reim
-Reidinger
-Reick
-Reiche
-Regans
-Reffett
-Reesor
-Reekie
-Redpath
-Redditt
-Rechtzigel
-Recht
-Rebel
-Rearden
-Raynoso
-Raxter
-Ratkowski
-Rasulo
-Rassmussen
-Rassel
-Raspberry
-Raser
-Rappleye
-Rappe
-Randy
-Randrup
-Randleman
-Ramson
-Rampey
-Ramming
-Rama
-Rainier
-Raider
-Radziewicz
-Quirarte
-Quintyne
-Quickel
-Query
-Quattrini
-Quarry
-Quakenbush
-Quaile
-Pytel
-Putty
-Pushaw
-Pusch
-Purslow
-Punzo
-Pullam
-Pugmire
-Puello
-Pu
-Przekop
-Pruss
-Pruiett
-Provow
-Prophete
-Procaccini
-Pritz
-Prillaman
-Priess
-Pretlow
-Prestia
-Presha
-Prescod
-Preast
-Praytor
-Prashad
-Praino
-Pozzi
-Pounder
-Pottenger
-Potash
-Porada
-Popplewell
-Ponzo
-Ponter
-Pommier
-Polland
-Polidori
-Polasky
-Pola
-Pok
-Poitier
-Poisso
-Poire
-Point
-Pofahl
-Podolsky
-Podell
-Plueger
-Plowe
-Plotz
-Plotnik
-Ploch
-Pliska
-Plessner
-Plaut
-Platzer
-Plake
-Pizzino
-Pizza
-Pirog
-Piquette
-Pipho
-Pioche
-Pintos
-Pinkert
-Pinet
-Pilkerton
-Pilch
-Pilarz
-Pignataro
-Piermatteo
-Picozzi
-Pickler
-Pickette
-Pichler
-Philogene
-Pheasant
-Phare
-Phang
-Pfrogner
-Pfisterer
-Pettinelli
-Petruzzi
-Petrovic
-Petretti
-Petermeier
-Pestone
-Pesterfield
-Pessin
-Pesch
-Persky
-Perruzza
-Perrott
-Perritt
-Perretti
-Perrera
-Peroutka
-Peroni
-Peron
-Peret
-Perdew
-Perazzo
-Peppe
-Peno
-Penberthy
-Penagos
-Peles
-Pelech
-Peiper
-Peight
-Pefferman
-Peddie
-Peckenpaugh
-Pean
-Payen
-Pavloski
-Pavlica
-Paullin
-Pattie
-Patteson
-Passon
-Passey
-Passe
-Passalacqua
-Pasquini
-Paskel
-Parter
-Partch
-Parriott
-Parrella
-Parraz
-Parmely
-Parizo
-Parisian
-Papelian
-Papasergi
-Pantojz
-Panto
-Panich
-Panchal
-Palys
-Palms
-Pallone
-Palinski
-Pali
-Palevic
-Pale
-Pagels
-Paciorek
-Pacho
-Pacella
-Paar
-Ozbun
-Overweg
-Overholser
-Ovalles
-Outhouse
-Outcalt
-Otterbein
-Otta
-Ostergren
-Osher
-Osbon
-Orzech
-Orwick
-Orrico
-Oropesa
-Orn
-Ormes
-Orillion
-Opal
-Onorati
-Onnen
-Omary
-Olk
-Olding
-Okonski
-Okimoto
-Ohlrich
-Ohayon
-Oguin
-Ogley
-Oftedahl
-Offen
-Ofallon
-Oeltjen
-Odam
-Ockmond
-Ockimey
-Ocean
-Obermeyer
-Oberdorf
-Obanner
-Oballe
-Oard
-Oakden
-Nyhan
-Nydam
-Numan
-Noyer
-Notte
-Nothstein
-Notestine
-Noser
-Nork
-Nolde
-Noa
-Nishihara
-Nishi
-Nikolic
-Nihart
-Nietupski
-Niesen
-Niehus
-Niece
-Nidiffer
-Nicoulin
-Nicolaysen
-Nicklow
-Nickl
-Nickeson
-Nichter
-Nicholl
-Ngyun
-Newsham
-Newmann
-Neveux
-Neuzil
-Neumayer
-Netland
-Nessen
-Nesheim
-Nelli
-Nelke
-Necochea
-Nazari
-Navy
-Navorro
-Navarez
-Navan
-Natter
-Natt
-Nater
-Nasta
-Narvaiz
-Nardelli
-Napp
-Nakahara
-Nairn
-Nagg
-Nager
-Nagano
-Nafziger
-Naffziger
-Nadelson
-Muzzillo
-Murri
-Murrey
-Murgia
-Murcia
-Muno
-Munier
-Mulqueen
-Mulliniks
-Mulkins
-Mulik
-Muhs
-Muffley
-Mozell
-Moynahan
-Mounger
-Mottley
-Motil
-Moseman
-Moseby
-Mosakowski
-Morten
-Mortell
-Morrisroe
-Morrero
-Mormino
-Morland
-Morger
-Morgenthaler
-Moren
-Morelle
-Morawski
-Morasca
-Morang
-Morand
-Moog
-Montney
-Montera
-Montee
-Montane
-Montagne
-Mons
-Monohan
-Monnett
-Monkhouse
-Moncure
-Momphard
-Molyneaux
-Molles
-Mollenkopf
-Molette
-Moland
-Mohs
-Mohmand
-Mohlke
-Moessner
-Moers
-Mockus
-Moccio
-Mlinar
-Mizzelle
-Mittler
-Mitri
-Mitchusson
-Mitchen
-Mistrot
-Mistler
-Misch
-Miriello
-Minkin
-Mininger
-Minerich
-Minehart
-Minderman
-Minden
-Minahan
-Milonas
-Millon
-Millholland
-Milleson
-Millerbernd
-Millage
-Militante
-Milionis
-Milhoan
-Mildenberger
-Milbury
-Mikolajczak
-Miklos
-Mikkola
-Mikes
-Migneault
-Mifsud
-Mietus
-Mieszala
-Mielnicki
-Midy
-Michon
-Michioka
-Micheau
-Michaeli
-Micali
-Methe
-Metallo
-Messler
-Mesch
-Merow
-Meroney
-Mergenthaler
-Meres
-Mercy
-Menuey
-Menousek
-Menning
-Menn
-Menghini
-Mendia
-Memmer
-Melot
-Mellow
-Mellenthin
-Melland
-Meland
-Meixner
-Meisenheimer
-Meineke
-Meinders
-Mehrens
-Mehlig
-Meglio
-Medsker
-Medicine
-Medero
-Mederios
-Meabon
-Mcwright
-Mcright
-Mcreath
-Mcrary
-Mcquirter
-Mcquerry
-Mcquary
-Mcphie
-Mcnurlen
-Mcnelley
-Mcnee
-Mcnairy
-Mcmanamy
-Mcmahen
-Mckowen
-Mckiver
-Mckinlay
-Mckearin
-Mcirvin
-Mcintrye
-Mchorse
-Mchaffie
-Mcgroarty
-Mcgoff
-Mcgivern
-Mceniry
-Mcelhiney
-Mcdiarmid
-Mccullars
-Mccubbins
-Mccrimon
-Mccovery
-Mccommons
-Mcclour
-Mccarrick
-Mccarey
-Mccallen
-Mcbrien
-Mcarthy
-Mayone
-Maybin
-Maximo
-Maxam
-Maurais
-Maughn
-Matzek
-Matts
-Matin
-Mathre
-Mathia
-Mateen
-Matava
-Masso
-Massar
-Massanet
-Masingale
-Mascaro
-Marthaler
-Martes
-Marso
-Marshman
-Marsalis
-Marrano
-Marolt
-Marold
-Markins
-Margulis
-Mardirosian
-Marchiano
-Marchak
-Marandola
-Marana
-Manues
-Mantis
-Mante
-Mansukhani
-Mansi
-Mannan
-Maniccia
-Mangine
-Manery
-Mandigo
-Manda
-Mancell
-Mamo
-Malstrom
-Malouf
-Malenfant
-Malena
-Maldenado
-Malandruccolo
-Malak
-Malabanan
-Makino
-Maj
-Maisonave
-Mainord
-Maino
-Mainard
-Maillard
-Maia
-Mahmud
-Mahdi
-Mahapatra
-Mahaley
-Mahaffy
-Magouirk
-Maglaras
-Magat
-Magan
-Maga
-Maffia
-Madrazo
-Madrano
-Maditz
-Mackert
-Mackellar
-Mackell
-Macht
-Macchia
-Maccarthy
-Maahs
-Lytal
-Lye
-Luzar
-Luzader
-Lutjen
-Lunger
-Lunan
-Luma
-Lukins
-Luhmann
-Luers
-Ludvigsen
-Ludlam
-Ludemann
-Luchini
-Lucente
-Lubrano
-Lubow
-Luber
-Lubeck
-Lowing
-Loven
-Loup
-Louise
-Louge
-Losco
-Lorts
-Lormand
-Lorenzetti
-Longford
-Longden
-Longbrake
-Lokhmatov
-Loge
-Loeven
-Loeser
-Locket
-Locey
-Locatelli
-Litka
-Lista
-Lisonbee
-Lisenbee
-Liscano
-Liranzo
-Liquori
-Liptrot
-Lionetti
-Lio
-Linscomb
-Linkovich
-Linington
-Lingefelt
-Lindler
-Lindig
-Lindall
-Lincks
-Linander
-Linan
-Limburg
-Limbrick
-Limbach
-Likos
-Lighthall
-Liford
-Lietzke
-Liebe
-Liddicoat
-Lickley
-Lichter
-Libel
-Lias
-Liapis
-Lezo
-Lewan
-Levitz
-Levesgue
-Leverson
-Levander
-Leuthauser
-Letbetter
-Lesuer
-Lesmeister
-Lesly
-Lerer
-Leppanen
-Lepinski
-Leota
-Lenherr
-Lembrick
-Lelonek
-Leisten
-Leiss
-Leins
-Leingang
-Leinberger
-Leinbach
-Leikam
-Leidig
-Lehtonen
-Lehnert
-Lehew
-Legier
-Lefchik
-Lecy
-Leconte
-Lecher
-Lebrecht
-Leather
-Leaper
-Lawter
-Lawrenz
-Lavy
-Laur
-Lauderbaugh
-Lauden
-Laudato
-Latting
-Latsko
-Latini
-Lassere
-Lasseigne
-Laspina
-Laso
-Laslie
-Laskowitz
-Laske
-Laser
-Lasenby
-Lascola
-Lariosa
-Larcade
-Lapete
-Laperouse
-Lanuza
-Lanting
-Lantagne
-Lansdale
-Lanphier
-Langmaid
-Langella
-Lanese
-Landrus
-Lampros
-Lamens
-Laizure
-Laitinen
-Laigle
-Lahm
-Lagueux
-Lagorio
-Lagomarsino
-Lagasca
-Lagana
-Lafont
-Laflen
-Lafavor
-Lafarge
-Laducer
-Ladnier
-Ladesma
-Lacognata
-Lackland
-Lacerte
-Labuff
-Laborin
-Labine
-Labauve
-Kuzio
-Kusterer
-Kussman
-Kusel
-Kusch
-Kurutz
-Kurdyla
-Kupka
-Kunzler
-Kunsman
-Kuni
-Kuney
-Kunc
-Kulish
-Kuliga
-Kulaga
-Kuilan
-Kuhre
-Kuhnke
-Kuemmerle
-Kueker
-Kudla
-Kudelka
-Kubinski
-Kubicki
-Kubal
-Krzyzanowski
-Krupicka
-Krumwiede
-Krumme
-Kross
-Kropidlowski
-Krokos
-Kroell
-Kritzer
-Kribs
-Kreitlow
-Kreisher
-Kraynak
-Krass
-Kranzler
-Kramb
-Kozyra
-Kozicki
-Kovalik
-Kovalchik
-Kovacevic
-Kotula
-Kotrba
-Koteles
-Kosowski
-Koskela
-Kosiba
-Koscinski
-Kosch
-Kory
-Korab
-Kopple
-Kopper
-Koppelman
-Koppel
-Konwinski
-Kon
-Kolosky
-Koloski
-Kolinsky
-Kolinski
-Kolbeck
-Kolasa
-Koepf
-Koda
-Kochevar
-Kochert
-Kobs
-Knust
-Knueppel
-Knoy
-Knieriem
-Knier
-Kneller
-Knappert
-Klitz
-Klintworth
-Klinkenberg
-Klinck
-Kleindienst
-Kleeb
-Klecker
-Kjellberg
-Kitten
-Kitsmiller
-Kisor
-Kisiel
-Kise
-Kirbo
-Kio
-Kinzle
-Kinkaid
-Kingsford
-Kingry
-Kimpton
-Kimel
-Kimberley
-Killmon
-Killick
-Kilgallon
-Kilcher
-Kihn
-Kiggins
-Kiecker
-Kher
-Khaleel
-Keziah
-Kettell
-Ketchen
-Keshishian
-Kersting
-Kersch
-Kerins
-Kercher
-Keno
-Kenefick
-Kemph
-Kempa
-Kelsheimer
-Kelln
-Kellenberger
-Kekahuna
-Keisling
-Keirnan
-Keimig
-Kehn
-Keal
-Ke
-Kaupp
-Kaufhold
-Kauffmann
-Katzenberg
-Katona
-Kaszynski
-Kaszuba
-Kassebaum
-Kasa
-Kartye
-Kartchner
-Karstens
-Karpinsky
-Karmely
-Karel
-Karasek
-Kapral
-Kaper
-Kanelos
-Kanahele
-Kampmann
-Kampe
-Kalp
-Kallus
-Kallevig
-Kallen
-Kaliszewski
-Kaleohano
-Kalchthaler
-Kalama
-Kalahiki
-Kaili
-Kahawai
-Kagey
-Justiss
-Jurkowski
-Jurgensmeyer
-Juilfs
-Josue
-Jopling
-Jondahl
-Jomes
-Joice
-Johannessen
-Joeckel
-Jezewski
-Jezek
-Jeswald
-Jervey
-Jeppsen
-Jenniges
-Jennifer
-Jennett
-Jemmott
-Jeffs
-Jeffry
-Jaurequi
-Janisch
-Janick
-Janice
-Jacek
-Jacaruso
-Iwanicki
-Ishihara
-Isenberger
-Isbister
-Iruegas
-Inzer
-Inyart
-Inscore
-Innocenti
-Inglish
-Infantolino
-Indovina
-Inaba
-Imondi
-Imdieke
-Imbert
-Illes
-Ida
-Iarocci
-Iannucci
-Huver
-Hutley
-Husser
-Husmann
-Hupf
-Huntsberger
-Hunnewell
-Hullum
-Huit
-Huish
-Huh
-Hughson
-Huft
-Hufstetler
-Hueser
-Hudnell
-Hovden
-Housen
-Houghtling
-Hoth
-Hossack
-Hoshaw
-Horsford
-Horry
-Hornbacher
-Horde
-Hoppenstedt
-Hopkinson
-Honza
-Honor
-Homann
-Holzmeister
-Holycross
-Holverson
-Holtzlander
-Holroyd
-Holmlund
-Hollywood
-Holderness
-Holderfield
-Holck
-Hojnacki
-Hohlfeld
-Hohenberger
-Hoganson
-Hogancamp
-Hoffses
-Hoerauf
-Hoell
-Hoefert
-Hodum
-Hoder
-Hockenbury
-Hoage
-Hisserich
-Hislip
-Hirons
-Hippensteel
-Hippen
-Hinkston
-Hindes
-Hinchcliff
-Hin
-Himmel
-Hillberry
-Hildring
-Hiester
-Hiefnar
-Hides
-Hibberd
-Hibben
-Heyliger
-Heyl
-Heyes
-Hevia
-Heu
-Hettrick
-Hert
-Hersha
-Hernandz
-Herkel
-Herber
-Henscheid
-Hennesy
-Henly
-Henegan
-Henebry
-Hench
-Hemsath
-Hemm
-Hemken
-Hemann
-Heltzel
-Hellriegel
-Hejny
-Heinl
-Heinke
-Heidinger
-Hegeman
-Hefferan
-Hedglin
-Hebdon
-Hearnen
-Hearing
-Heape
-Heagy
-Headings
-Headd
-Hazelbaker
-Havlick
-Hauschildt
-Haury
-Hassenfritz
-Hasenbeck
-Haseltine
-Hartstein
-Hartry
-Hartnell
-Harston
-Harpool
-Harmen
-Hardister
-Hardey
-Harders
-Harbolt
-Harbinson
-Haraway
-Haque
-Hansmann
-Hanser
-Hansch
-Hansberry
-Hankel
-Hanigan
-Haneline
-Hampe
-Hamons
-Hammerstone
-Hammerle
-Hamme
-Hammargren
-Hamelton
-Hamberger
-Hamasaki
-Halprin
-Halman
-Hallihan
-Halen
-Haldane
-Hails
-Haifley
-Hai
-Hages
-Hagadorn
-Hadwin
-Habicht
-Habermehl
-Gyles
-Gutzman
-Gutekunst
-Gustason
-Gusewelle
-Gurnsey
-Gurnee
-Gunterman
-Gumina
-Gulliver
-Gulbrandson
-Guiterez
-Guerino
-Guedry
-Gucwa
-Guardarrama
-Guagliano
-Guadagno
-Grulke
-Groote
-Groody
-Groft
-Groeneweg
-Grochow
-Grippe
-Grimstead
-Griepentrog
-Greenfeld
-Greenaway
-Grebe
-Graziosi
-Graw
-Gravina
-Grassie
-Grapes
-Granzow
-Grandjean
-Granby
-Gramacy
-Graces
-Gozalez
-Goyer
-Gotch
-Gosden
-Gorny
-Gormont
-Goodness
-Goodgion
-Gonya
-Gonnerman
-Gompert
-Golish
-Goligoski
-Goldmann
-Goike
-Goetze
-Godeaux
-Glenna
-Glaza
-Glassel
-Glaspy
-Glander
-Glady
-Giumarro
-Gitelman
-Gisondi
-Gismondi
-Girvan
-Girten
-Gironda
-Giovinco
-Ginkel
-Gilster
-Giesy
-Gierman
-Giddins
-Giardini
-Gianino
-Ghea
-Geurin
-Gett
-Getson
-Gerrero
-Germond
-Gere
-Gentsy
-Genta
-Gennette
-Genito
-Genis
-Gene
-Gendler
-Geltz
-Geiss
-Gehret
-Gegenheimer
-Geffert
-Geeting
-Gebel
-Gavette
-Gavenda
-Gaumond
-Gaudioso
-Gatzke
-Gatza
-Gattshall
-Gaton
-Gatchel
-Gasperi
-Gaska
-Gasiorowski
-Garritson
-Garrigus
-Garnier
-Garnick
-Gardinier
-Gardenas
-Garcy
-Garate
-Gandolfi
-Gamm
-Gamel
-Gambel
-Gallmon
-Gallemore
-Gallati
-Gainous
-Gainforth
-Gahring
-Gaffey
-Gaebler
-Gadzinski
-Gadbury
-Gabri
-Gabe
-Gaba
-Fyke
-Furtaw
-Furnas
-Furcron
-Funn
-Funck
-Fulwood
-Fulvio
-Fullmore
-Fukumoto
-Fuest
-Fuery
-Fuente
-Fuel
-Frymire
-Frush
-Frohlich
-Froedge
-Frodge
-Fritzinger
-Fricker
-Frericks
-Frein
-Freid
-Freggiaro
-Fratto
-Franzi
-Franciscus
-Fralix
-Fowble
-Fotheringham
-Foslien
-Foshie
-Fortmann
-Forsey
-Forkner
-Foppiano
-Fontanetta
-Fonohema
-Fogler
-Fockler
-Fluty
-Flusche
-Flud
-Florin
-Flori
-Flenory
-Fleharty
-Fleeks
-Flaxman
-Flash
-Flaming
-Fiumara
-Fitzmorris
-Finnicum
-Finkley
-Fineran
-Fillhart
-Filipi
-Fijal
-Fieldson
-Ficken
-Ficarra
-Fetch
-Festerman
-Fess
-Ferryman
-Ferner
-Fergason
-Ferell
-Fennern
-Femmer
-Feldmeier
-Feeser
-Feenan
-Federick
-Fedak
-Febbo
-Feazell
-Fearing
-Fazzone
-Fauth
-Fauset
-Faurote
-Faulker
-Faubion
-Fatzinger
-Fasick
-Fanguy
-Fambrough
-Falks
-Fahl
-Fabio
-Faaita
-Exler
-Ewens
-Estrado
-Esten
-Esteen
-Esquivez
-Espejo
-Esmiol
-Esguerra
-Esco
-Ertz
-Erspamer
-Ernstes
-Erisman
-Erhard
-Ereaux
-Ercanbrack
-Erbes
-Epple
-Entsminger
-Entriken
-Enslow
-Ennett
-Engquist
-Englebert
-Englander
-Engesser
-Engert
-Engeman
-Enge
-Enerson
-End
-Emhoff
-Emge
-Emerald
-Elting
-Ellner
-Ellenberg
-Ellenbecker
-Elio
-Elfert
-Elden
-Elawar
-Ekstrand
-Eison
-Eismont
-Eisenbrandt
-Eiseman
-Eischens
-Ehrgott
-Egley
-Egert
-Eddlemon
-Economy
-Eckerson
-Eckersley
-Eckberg
-Echeverry
-Eberts
-Earthman
-Earnhart
-Eapen
-Eachus
-Dykas
-Dust
-Dusi
-Durning
-During
-Durdan
-Dunomes
-Duncombe
-Dume
-Dullen
-Dullea
-Dulay
-Dul
-Duffett
-Dubs
-Dubard
-Drook
-Drenth
-Drahos
-Dragone
-Downin
-Downham
-Dowis
-Dowhower
-Doward
-Dovalina
-Dost
-Dopazo
-Doose
-Donson
-Donnan
-Dominski
-Dollarhide
-Dolinar
-Dolecki
-Dolbee
-Doege
-Dockus
-Dobler
-Dobkin
-Dobias
-Divoll
-Diviney
-Ditter
-Ditman
-Dissinger
-Dismang
-Dirlam
-Dinneen
-Dini
-Dingwall
-Dine
-Din
-Diloreto
-Dilmore
-Dillaman
-Dikeman
-Diiorio
-Dighton
-Diffley
-Dieudonne
-Dietel
-Dieringer
-Diercks
-Dienhart
-Diekrager
-Diefendorf
-Dicke
-Dicamillo
-Dibrito
-Dibona
-Dezeeuw
-Dewhurst
-Devins
-Deviney
-Deupree
-Detherage
-Despino
-Desmith
-Desjarlais
-Deshner
-Desha
-Desanctis
-Derring
-Derousse
-Derobertis
-Deridder
-Derego
-Derden
-Deprospero
-Deprofio
-Depping
-Deperro
-Denty
-Denoncourt
-Dencklau
-Demler
-Demirchyan
-Demichiel
-Demesa
-Demere
-Demaggio
-Delung
-Deluise
-Delmoral
-Delmastro
-Delmas
-Delligatti
-Delle
-Delena
-Delasbour
-Delarme
-Delargy
-Delagrange
-Delafontaine
-Deist
-Deiss
-Deighan
-Dehoff
-Degrazia
-Degman
-Defosses
-Deforrest
-Deeks
-Decoux
-Decarolis
-Debuhr
-Deberg
-Debarr
-Debari
-Dearmon
-Deare
-Deardurff
-Daywalt
-Dayer
-Davoren
-Davignon
-Daviau
-Dauteuil
-Dauterive
-Daul
-Darnley
-Darlin
-Darakjy
-Dapice
-Dannunzio
-Danison
-Daniello
-Damario
-Dalonzo
-Dallis
-Daleske
-Dalenberg
-Daiz
-Dains
-Daines
-Dagnese
-Dady
-Dadey
-Czyzewski
-Czapor
-Czaplewski
-Czajka
-Cyganiewicz
-Cuttino
-Cutrona
-Cussins
-Cusanelli
-Cuperus
-Cundy
-Cumiskey
-Cumins
-Cuizon
-Cuffia
-Cuffe
-Cuffari
-Cuccaro
-Cubie
-Cryder
-Cruson
-Crounse
-Cromedy
-Cring
-Creer
-Credeur
-Crea
-Cozort
-Cozine
-Cowee
-Cowdery
-Coventry
-Couser
-Courtway
-Courington
-Cotman
-Costlow
-Costell
-Corton
-Corsaro
-Corrieri
-Corrick
-Corradini
-Coron
-Coren
-Cord
-Corbi
-Corado
-Copus
-Coppenger
-Cooperwood
-Coontz
-Coonce
-Contrera
-Connealy
-Conell
-Comtois
-Compere
-Commins
-Commings
-Comegys
-Coma
-Colyar
-Colo
-Collister
-Collick
-Collella
-Coler
-Colborn
-Cohran
-Cogbill
-Coffen
-Cocuzzo
-Clynes
-Closter
-Clock
-Clipp
-Clingingsmith
-Clemence
-Clayman
-Classon
-Clas
-Clarey
-Clarence
-Clague
-Ciubal
-Citrino
-Citarella
-Cirone
-Cipponeri
-Cindrich
-Cimo
-Ciliberto
-Cichowski
-Ciccarello
-Cicala
-Chura
-Chubbuck
-Chronis
-Christlieb
-Chriss
-Chizek
-Chittester
-Chiquito
-Chimento
-Childree
-Chianese
-Chevrette
-Cheese
-Checo
-Chastang
-Chargualaf
-Chapmon
-Chantry
-Chahal
-Chafetz
-Cezar
-Ceruantes
-Cerrillo
-Cerrano
-Cerecedes
-Cerami
-Cegielski
-Cavallero
-Catinella
-Cassata
-Caslin
-Casano
-Casacchia
-Caruth
-Cartrette
-Carten
-Carodine
-Carnrike
-Carnall
-Carmicle
-Carlan
-Carlacci
-Caris
-Cariaga
-Cardine
-Cardimino
-Cardani
-Carbonara
-Carano
-Capua
-Capponi
-Cappellano
-Caporale
-Capelli
-Canupp
-Cantrel
-Cantone
-Canterberry
-Cannizzo
-Cannan
-Canelo
-Caneer
-Candill
-Candee
-Campbel
-Caminero
-Camble
-Caluya
-Callicott
-Calk
-Caito
-Caffie
-Caden
-Cadavid
-Cacy
-Cachu
-Cachola
-Cabreja
-Cabiles
-Cabada
-Caamano
-Byran
-Byon
-Buyck
-Bussman
-Bussie
-Bushner
-Burston
-Burnison
-Burkman
-Burkhammer
-Bures
-Burdeshaw
-Bumpass
-Bullinger
-Bullers
-Bulgrin
-Bugay
-Buffalo
-Budak
-Buczynski
-Buckendorf
-Buccieri
-Bubrig
-Brynteson
-Brunz
-Brunmeier
-Brunkow
-Brunetto
-Brunelli
-Brumwell
-Bruggman
-Brucki
-Brucculeri
-Brozovich
-Browing
-Brotman
-Broda
-Brocker
-Broadstreet
-Brix
-Britson
-Brinck
-Brimmage
-Brightly
-Brierre
-Bridenstine
-Brezenski
-Brezee
-Brevik
-Brest
-Brentlinger
-Brentley
-Breidenbach
-Breckel
-Brech
-Breaker
-Brazzle
-Braughton
-Brauch
-Brattin
-Brattain
-Branhan
-Branford
-Braner
-Brander
-Braly
-Braegelmann
-Brabec
-Boyt
-Boyack
-Bowren
-Bowl
-Bovian
-Boughan
-Botton
-Botner
-Bosques
-Borzea
-Borre
-Boron
-Bornhorst
-Borgstrom
-Borella
-Boop
-Bontempo
-Bonniwell
-Bonnes
-Bonjour
-Bonillo
-Bonano
-Bolek
-Bohol
-Bohaty
-Boffa
-Boetcher
-Boesen
-Boepple
-Boehler
-Boedecker
-Boeckx
-Bodi
-Boal
-Bloodsworth
-Bloodgood
-Blome
-Blockett
-Blixt
-Blanchett
-Blackhurst
-Blackaby
-Bjornberg
-Bitzer
-Bittenbender
-Bitler
-Birchall
-Binnicker
-Binggeli
-Billett
-Bilberry
-Bijou
-Biglow
-Bierly
-Bielby
-Biegel
-Beu
-Berzas
-Berte
-Bertagnolli
-Berreth
-Bernhart
-Bergum
-Berentson
-Berenson
-Berdy
-Bercegeay
-Bentle
-Bentivegna
-Bentham
-Benscoter
-Benns
-Bennick
-Benjamine
-Beneze
-Benett
-Beneke
-Bendure
-Bendix
-Bendick
-Benauides
-Belman
-Bellus
-Bellott
-Bellefleur
-Bellas
-Beljan
-Belgard
-Beith
-Beinlich
-Beierle
-Behme
-Beevers
-Beermann
-Beeching
-Bedward
-Bedrosian
-Bedner
-Bedeker
-Bechel
-Becera
-Beaubrun
-Beardmore
-Bealmear
-Bazin
-Bazer
-Baumhoer
-Baumgarner
-Bauknecht
-Battson
-Battiest
-Basulto
-Baster
-Basques
-Basista
-Basiliere
-Bashi
-Barzey
-Barz
-Bartus
-Bartucca
-Bartek
-Barrero
-Barreca
-Barnoski
-Barndt
-Barklow
-Baribeau
-Barette
-Bares
-Barentine
-Bareilles
-Barch
-Barbre
-Barberi
-Barbagelata
-Baraw
-Baratto
-Baranoski
-Bar
-Baptise
-Bankson
-Bankey
-Bankard
-Banik
-Baltzley
-Ballen
-Balkey
-Balius
-Balderston
-Bakula
-Bakalar
-Baffuto
-Baerga
-Badoni
-Backous
-Bachtel
-Bachrach
-Baccari
-Babine
-Babilonia
-Baar
-Azbill
-Azad
-Aycox
-Ayalla
-Avolio
-Austerberry
-Aughtry
-Aufderheide
-Auch
-Attanasio
-Athayde
-Atcher
-Astor
-Asselta
-Aslin
-Aslam
-Ashwood
-Ashraf
-Ashbacher
-Asbridge
-Asakura
-Arzaga
-Arriaza
-Arrez
-Arrequin
-Arrants
-Armiger
-Armenteros
-Armbrister
-Arko
-Argumedo
-Arguijo
-Ardolino
-Arcia
-Arbizo
-Aravjo
-Aper
-Anzaldo
-Antu
-Antrikin
-Antony
-Antonia
-Antonetty
-Antinoro
-Anthon
-Antenucci
-Anstead
-Annese
-Ankrum
-Andreason
-Andrado
-Andaverde
-Anastos
-Anable
-Amsterdam
-Amspoker
-Amrine
-Amrein
-Amorin
-Amel
-Ambrosini
-Amber
-Alsbrook
-Alnutt
-Almasi
-Allessio
-Allateef
-Alison
-Aldous
-Alderink
-Aldaz
-Akmal
-Akard
-Aiton
-Aites
-Ainscough
-Aikey
-Ahrends
-Ahlm
-Aguada
-Agans
-Adelmann
-Adebisi
-Addesso
-Adaway
-Adamaitis
-Ackison
-Abud
-Abendroth
-Abdur
-Abdool
-Aamodt
-Zywiec
-Zwiefelhofer
-Zwahlen
-Zunino
-Zuehl
-Zmuda
-Zmolek
-Zizza
-Ziska
-Zinser
-Zinkievich
-Zinger
-Zingarelli
-Ziesmer
-Ziegenfuss
-Ziebol
-Zettlemoyer
-Zettel
-Zervos
-Zenke
-Zembower
-Zelechowski
-Zelasko
-Zeise
-Zeek
-Zeeb
-Zarlenga
-Zarek
-Zaidi
-Zahnow
-Zahnke
-Zaharis
-Zach
-Zacate
-Zabrocki
-Zaborac
-Yurchak
-Yuengling
-Younie
-Youngers
-Youell
-Yott
-Yoshino
-Yorks
-Yordy
-Yochem
-Yerico
-Yerdon
-Yeiser
-Yearous
-Yearick
-Yeaney
-Ybarro
-Yasutake
-Yasin
-Yanke
-Yanish
-Yanik
-Yamazaki
-Yamat
-Yaggi
-Ximenez
-Wyzard
-Wynder
-Wyly
-Wykle
-Wutzke
-Wuori
-Wuertz
-Wuebker
-Wrightsel
-Worobel
-Worlie
-Worford
-Worek
-Woolson
-Woodrome
-Woodly
-Woodling
-Wontor
-Wondra
-Woltemath
-Wollmer
-Wolinski
-Wolfert
-Wojtanik
-Wojtak
-Wohlfarth
-Woeste
-Wobbleton
-Witz
-Wittmeyer
-Witchey
-Wisotzkey
-Wisnewski
-Wisman
-Wirch
-Wippert
-Wineberg
-Wimpee
-Wilusz
-Wiltsey
-Willig
-Williar
-Willers
-Willadsen
-Wilfred
-Wildhaber
-Wilday
-Wigham
-Wiggen
-Wiewel
-Wieting
-Wietbrock
-Wiesel
-Wiesehan
-Wiersema
-Wiegert
-Widney
-Widmark
-Wickson
-Wickings
-Wichern
-Whtie
-Whittie
-Whitlinger
-Whitfill
-Whitebread
-Whispell
-Whetten
-Wheeley
-Wheeles
-Wheelen
-Whatcott
-Weyland
-Weter
-Westrup
-Westphalen
-Westly
-Westland
-Wessler
-Wesolick
-Wesler
-Wesche
-Werry
-Wero
-Wernecke
-Werkhoven
-Wellspeak
-Wellings
-Welford
-Welander
-Weissgerber
-Weisheit
-Weins
-Weill
-Weigner
-Wehrmann
-Wehrley
-Wehmeier
-Wege
-Weers
-Weavers
-Watring
-Wassum
-Wassman
-Wassil
-Washabaugh
-Wascher
-Wary
-Warth
-Warbington
-Wanca
-Wammack
-Wamboldt
-Walterman
-Walkington
-Walkenhorst
-Walinski
-Wakley
-Wagg
-Wadell
-Vuckovich
-Voogd
-Voller
-Vokes
-Vogle
-Vogelsberg
-Vodicka
-Vissering
-Visage
-Vipond
-Vincik
-Villalona
-Vil
-Vickerman
-Vettel
-Veteto
-Vessel
-Vesperman
-Vesco
-Vertucci
-Versaw
-Verba
-Ventris
-Venecia
-Vendela
-Venanzi
-Veldhuizen
-Vehrs
-Veer
-Vee
-Vay
-Vaughen
-Vasilopoulos
-Vascocu
-Varvel
-Varno
-Varlas
-Varland
-Vario
-Vareschi
-Vanwyhe
-Vanweelden
-Vansciver
-Vannaman
-Vanluven
-Vanloo
-Vanlaningham
-Vankomen
-Vanhout
-Vanhampler
-Vangorp
-Vangorden
-Vanella
-Vandresar
-Vandis
-Vandeyacht
-Vandewerker
-Vandevsen
-Vanderwall
-Vandercook
-Vanderberg
-Vanbergen
-Valko
-Valesquez
-Valeriano
-Valen
-Vachula
-Vacha
-Uzee
-Uva
-Uselman
-Urizar
-Urion
-Urben
-Upthegrove
-Unzicker
-Unsell
-Unick
-Umscheid
-Umin
-Umanzor
-Ullo
-Ulicki
-Uhlir
-Uddin
-Tytler
-Tymeson
-Tyger
-Twisdale
-Twedell
-Tweddle
-Turrey
-Tures
-Turell
-Tur
-Tupa
-Tuitt
-Tuberville
-Tubby
-Tryner
-Trumpower
-Trumbore
-Truly
-Troglen
-Troff
-Troesch
-Trivisonno
-Tritto
-Tritten
-Tritle
-Trippany
-Tringali
-Tretheway
-Treon
-Trench
-Trejos
-Tregoning
-Treffert
-Traycheff
-Travali
-Trauth
-Trauernicht
-Transou
-Trane
-Trana
-Toves
-Tosta
-Torp
-Tornquist
-Tornes
-Torchio
-Toppings
-Toor
-Tooks
-Tonks
-Tomblinson
-Tomala
-Tollinchi
-Tolles
-Tokich
-Toh
-Tofte
-Todman
-Toddy
-Titze
-Timpone
-Tillema
-Tier
-Tienken
-Tiblier
-Thyberg
-Thursby
-Thurrell
-Thurm
-Thruman
-Thorsted
-Thorley
-Thomer
-Thoen
-Thissen
-Theimer
-Thee
-Thayn
-Thanpaeng
-Thammavongsa
-Thalman
-Texiera
-Texidor
-Teverbaugh
-Teska
-Ternullo
-Teplica
-Tepe
-Teno
-Tenholder
-Tenbusch
-Tenbrink
-Temby
-Tejedor
-Teitsworth
-Teichmann
-Tehan
-Tegtmeyer
-Tees
-Teem
-Tays
-Taubert
-Tauares
-Taschler
-Tartamella
-Tarquinio
-Tarbutton
-Tappendorf
-Tapija
-Tansil
-Tannahill
-Tamondong
-Talahytewa
-Takashima
-Taecker
-Tabora
-Tabin
-Tabbert
-Szymkowski
-Szymanowski
-Syversen
-Syrett
-Syracuse
-Synnott
-Sydnes
-Swimm
-Sweney
-Swearegene
-Swartzel
-Swanstrom
-Svedin
-Suss
-Suryan
-Surrey
-Supplice
-Supnet
-Suoboda
-Sundby
-Sumaya
-Sumabat
-Sulzen
-Sukovaty
-Sukhu
-Sugerman
-Sugalski
-Sugai
-Sudweeks
-Sudbeck
-Sucharski
-Stutheit
-Stumfoll
-Stuffle
-Struyk
-Strutz
-Strumpf
-Strowbridge
-Strothman
-Strojny
-Strohschein
-Stroffolino
-Stribble
-Strevel
-Strenke
-Stremming
-Strehle
-Strattman
-Stranak
-Stram
-Stracke
-Stoudamire
-Storks
-Stopp
-Stonebreaker
-Stolt
-Stoica
-Stofer
-Stockham
-Stockfisch
-Stjuste
-Stiteler
-Stiman
-Stillions
-Stillabower
-Stierle
-Sterlace
-Sterk
-Stepps
-Stenquist
-Stenner
-Stellman
-Steines
-Steinbaugh
-Steinbacher
-Steiling
-Steidel
-Steffee
-Stavinoha
-Staver
-Stastny
-Stasiuk
-Starrick
-Starliper
-Starlin
-Staniford
-Staner
-Standre
-Standefer
-Standafer
-Stanczyk
-Stallsmith
-Stagliano
-Staehle
-Staebler
-Stady
-Stadtmiller
-Squyres
-Spurbeck
-Sprunk
-Spranger
-Spoonamore
-Spoden
-Spilde
-Spezio
-Speros
-Sperandio
-Specchio
-Spearin
-Spayer
-Spallina
-Spadafino
-Sovie
-Sotello
-Sortor
-Sortino
-Sorrow
-Soros
-Sorola
-Sorbello
-Sonner
-Sonday
-Somes
-Soloway
-Soledad
-Soens
-Soellner
-Soderblom
-Sobin
-Sniezek
-Sneary
-Smyly
-Smutnick
-Smoots
-Smoldt
-Smitz
-Smitreski
-Smallen
-Smades
-Slunaker
-Sluka
-Slown
-Slovick
-Slocomb
-Slinger
-Slife
-Slicker
-Sleeter
-Slanker
-Skufca
-Skubis
-Skrocki
-Skov
-Skjei
-Skilton
-Skill
-Skarke
-Skalka
-Skalak
-Skaff
-Sixkiller
-Sitze
-Siter
-Sisko
-Sirman
-Sirls
-Sinotte
-Sinon
-Sincock
-Sincebaugh
-Simmoms
-Similien
-Silvius
-Silton
-Silloway
-Sikkema
-Sieracki
-Sienko
-Siemon
-Siemer
-Siefker
-Sieberg
-Siebens
-Siebe
-Sicurella
-Sicola
-Sickle
-Shumock
-Shumiloff
-Shuffstall
-Shuemaker
-Shuart
-Shu
-Shroff
-Shreeve
-Shostak
-Shortes
-Shorr
-Shivley
-Shintaku
-Shindo
-Shimomura
-Shiigi
-Sherow
-Sherburn
-Shepps
-Shenefield
-Shelvin
-Shelstad
-Shelp
-Sheild
-Sheaman
-Shaulis
-Sharrer
-Sharps
-Sharpes
-Shareef
-Shappy
-Shapero
-Shanor
-Shandy
-Shad
-Seyller
-Severn
-Sessom
-Sesley
-Servidio
-Serrin
-Sero
-Serge
-Septon
-Septer
-Sennott
-Sengstock
-Senff
-Senese
-Semprini
-Semone
-Sembrat
-Selva
-Sella
-Selbig
-Seiner
-Seif
-Seidt
-Sehrt
-Seemann
-Seelbinder
-Sedlay
-Sebert
-Searing
-Seaholm
-Seacord
-Seaburg
-Se
-Scungio
-Scroggie
-Scritchfield
-Scripture
-Scrimpsher
-Scrabeck
-Score
-Scorca
-Scobey
-Scivally
-Schwulst
-Schwinn
-Schwieson
-Schwery
-Schweppe
-Schwartzenbur
-Schurz
-Schumm
-Schulenburg
-Schuff
-Schuerholz
-Schryer
-Schrager
-Schorsch
-Schonhardt
-Schoenfelder
-Schoeck
-Schoeb
-Schnitzler
-Schnick
-Schnautz
-Schmig
-Schmelter
-Schmeichel
-Schluneger
-Schlosberg
-Schlobohm
-Schlenz
-Schlembach
-Schleisman
-Schleining
-Schleiff
-Schleider
-Schink
-Schilz
-Schiffler
-Schiavi
-Scheuer
-Schemonia
-Scheman
-Schelb
-Schaul
-Schaufelberge
-Scharer
-Schardt
-Scharbach
-Schabacker
-Scee
-Scavone
-Scarth
-Scarfone
-Scalese
-Sayne
-Sayed
-Savitz
-Satterlund
-Sattazahn
-Satow
-Sastre
-Sarr
-Sarjeant
-Sarff
-Sardella
-Santoya
-Santoni
-Santai
-Sankowski
-Sanft
-Sandow
-Sandoe
-Sandhaus
-Sandefer
-Sampey
-Samperi
-Sammarco
-Samia
-Samek
-Samay
-Samaan
-Salvadore
-Saltness
-Salsgiver
-Saller
-Salaz
-Salano
-Sakal
-Saka
-Saintlouis
-Saile
-Sahota
-Saggese
-Sagastume
-Sagan
-Sadri
-Sadak
-Sachez
-Saalfrank
-Saal
-Saadeh
-Ryu
-Rynn
-Ryley
-Ryle
-Rygg
-Rybarczyk
-Ruzich
-Ruyter
-Ruvo
-Rupel
-Ruopp
-Rundlett
-Runde
-Rundall
-Runck
-Rukavina
-Ruggiano
-Rufi
-Ruef
-Rubright
-Rubbo
-Rowbottom
-Route
-Rotner
-Rotman
-Rothweiler
-Rothlisberger
-Rosseau
-Rossean
-Rossa
-Roso
-Rosiek
-Roshia
-Rosenkrans
-Rosener
-Rosencrantz
-Rosencrans
-Rosello
-Roques
-Rookstool
-Rondo
-Romasanta
-Romack
-Rokus
-Rohweder
-Rog
-Roethler
-Roediger
-Rodwell
-Rodrigus
-Rodenbeck
-Rodefer
-Rodarmel
-Rockman
-Rockholt
-Rockford
-Rochow
-Roches
-Roblin
-Roblez
-Roble
-Robers
-Roat
-Rizza
-Rizvi
-Rizk
-Rixie
-Riveiro
-Rius
-Ritschard
-Ritrovato
-Risi
-Rishe
-Rippon
-Rinks
-Rings
-Ringley
-Ringgenberg
-Ringeisen
-Rimando
-Rilley
-Rijos
-Rieks
-Rieken
-Riechman
-Riddley
-Ricord
-Rickabaugh
-Richmeier
-Richesin
-Reyolds
-Rexach
-Revere
-Requena
-Reppucci
-Reposa
-Renzulli
-Renter
-Renault
-Remondini
-Relic
-Reither
-Reisig
-Reifsnider
-Reifer
-Reibsome
-Reibert
-Rehor
-Rehmann
-Reedus
-Redshaw
-Redfox
-Reczek
-Recupero
-Recor
-Reckard
-Recher
-Rear
-Realbuto
-Razer
-Rayman
-Raycraft
-Rayas
-Rawle
-Raviscioni
-Ravetto
-Ravenelle
-Rauth
-Raup
-Rattliff
-Rattley
-Rathfon
-Rataj
-Rasnic
-Rappleyea
-Rapaport
-Ransford
-Rann
-Rampersad
-Ramis
-Ramcharan
-Rainha
-Rainforth
-Ragans
-Ragains
-Rafidi
-Raffety
-Raducha
-Radsky
-Radler
-Radatz
-Raczkowski
-Rack
-Rabenold
-Quraishi
-Quinerly
-Quiet
-Quercia
-Quarnstrom
-Qian
-Pusser
-Puppo
-Pullan
-Pulis
-Pugel
-Puccini
-Puca
-Pruna
-Prowant
-Provines
-Pronk
-Prinkleton
-Prindall
-Primas
-Priesmeyer
-Pridgett
-Prevento
-Preti
-Presser
-Presnall
-Preseren
-Presas
-Presa
-Prchal
-Prattis
-Pratillo
-Praska
-Prak
-Powis
-Powderly
-Postlewait
-Postle
-Posch
-Porteus
-Portal
-Porraz
-Popwell
-Popoff
-Poplaski
-Poniatoski
-Pollina
-Polle
-Polhill
-Poletti
-Polaski
-Pokorney
-Poke
-Pointdexter
-Poinsette
-Po
-Ploszaj
-Plitt
-Pletz
-Pletsch
-Plemel
-Pleitez
-Playford
-Plaxco
-Platek
-Plambeck
-Plagens
-Placido
-Pisarski
-Pinuelas
-Pinnette
-Pinick
-Pinell
-Pinciaro
-Pinal
-Pilz
-Piltz
-Pillion
-Pilkinton
-Pilar
-Pikul
-Piepenburg
-Piening
-Piehler
-Piedrahita
-Piechocki
-Picknell
-Picker
-Pickelsimer
-Pich
-Picariello
-Phoeuk
-Phillipson
-Philbert
-Pherigo
-Phelka
-Peverini
-Petronis
-Petrina
-Petrash
-Petramale
-Petraglia
-Pery
-Personius
-Perrington
-Perrill
-Perpall
-Perot
-Perman
-Peragine
-Pentland
-Pennycuff
-Penninger
-Pennie
-Pennachio
-Penhall
-Pendexter
-Pencil
-Penalver
-Pelzel
-Pelter
-Pelow
-Pelo
-Peli
-Peinado
-Pedley
-Pecue
-Pecore
-Pechar
-Peairs
-Paynes
-Payano
-Pawelk
-Pavlock
-Pavlich
-Pavich
-Pavek
-Pautler
-Paulik
-Patmore
-Patella
-Patee
-Patalano
-Passini
-Passeri
-Paskell
-Parrigan
-Parmar
-Parayno
-Paparelli
-Pantuso
-Pante
-Panico
-Panduro
-Panagos
-Pama
-Palmo
-Pallotta
-Paling
-Palamino
-Pake
-Pajtas
-Pailthorpe
-Pahler
-Pagon
-Paglinawan
-Pagley
-Paget
-Paetz
-Paet
-Padley
-Pacleb
-Pacific
-Pachelo
-Pacer
-Paccione
-Pabey
-Ozley
-Ozimek
-Ozawa
-Owney
-Outram
-Oun
-Ouillette
-Oudekerk
-Ouch
-Ostrosky
-Ostermiller
-Ostermann
-Osterloh
-Osterfeld
-Ossenfort
-Osoria
-Oshell
-Orsino
-Orscheln
-Orrison
-Ororke
-Orf
-Orellano
-Orejuela
-Ordoyne
-Opsahl
-Opland
-Onofre
-Onaga
-Omahony
-Olszowka
-Olshan
-Ollig
-Oliff
-Olien
-Olexy
-Oldridge
-Oldfather
-Older
-Olalde
-Okun
-Okumoto
-Oktavec
-Okin
-Oka
-Ohme
-Ohlemacher
-Ohanesian
-Odneal
-Odgers
-Oderkirk
-Odden
-Ocain
-Obradovich
-Oakey
-Nussey
-Nunziato
-Nunoz
-Nunnenkamp
-Nuncio
-Noviello
-Novacek
-Nothstine
-Nostrand
-Northum
-Norsen
-Norlander
-Norkus
-Norgaard
-Norena
-Nored
-Nobrega
-Niziolek
-Ninnemann
-Nievas
-Nieratko
-Nieng
-Niedermeyer
-Niedermaier
-Nicolls
-Niang
-Newham
-Newcome
-Newberger
-Nevills
-Nevens
-Nevel
-Neumiller
-Netti
-Net
-Nessler
-Neria
-Nemet
-Nelon
-Nellon
-Neller
-Neisen
-Neilly
-Neifer
-Neid
-Negro
-Neering
-Neehouse
-Neef
-Needler
-Nebergall
-Nealis
-Naumoff
-Naufzinger
-Narum
-Narro
-Narramore
-Naraine
-Napps
-Nansteel
-Namisnak
-Namanny
-Nallie
-Nakhle
-Naito
-Naccari
-Nabb
-Myracle
-Myra
-Myhand
-Mwakitwile
-Muzzy
-Muscolino
-Musco
-Muscente
-Muscat
-Muscara
-Musacchia
-Musa
-Murrish
-Murfin
-Muray
-Munnelly
-Munley
-Munivez
-Mundine
-Mundahl
-Munari
-Mulling
-Mullennex
-Mullendore
-Mulkhey
-Mulinix
-Mulders
-Muhl
-Muenchow
-Muellner
-Mudget
-Mudger
-Muckenfuss
-Muchler
-Mozena
-Movius
-Mouldin
-Motola
-Mosseri
-Mossa
-Moselle
-Mory
-Morsell
-Morrish
-Morles
-Morie
-Morguson
-Moresco
-Morck
-Moppin
-Moosman
-Moons
-Montuori
-Montono
-Montogomery
-Montis
-Monterio
-Monter
-Monsalve
-Mongomery
-Mongar
-Mondello
-Moncivais
-Monard
-Monagan
-Molt
-Mollenhauer
-Moldrem
-Moldonado
-Molano
-Mokler
-Moisant
-Moilanen
-Mohrman
-Mohamad
-Moger
-Mogel
-Modine
-Modin
-Modic
-Modha
-Modena
-Mlynek
-Miya
-Mittiga
-Mittan
-Mitcheltree
-Miss
-Misfeldt
-Misener
-Mirchandani
-Miralles
-Miotke
-Miosky
-Minty
-Mintey
-Mins
-Minnie
-Mince
-Minassian
-Minar
-Mimis
-Milon
-Milloy
-Millison
-Milito
-Milfort
-Milbradt
-Mikulich
-Mikos
-Miklas
-Mihelcic
-Migliorisi
-Migliori
-Miesch
-Midura
-Miclette
-Michele
-Michela
-Micale
-Mezey
-Mews
-Mewes
-Mettert
-Mesker
-Mesich
-Mesecher
-Merthie
-Mersman
-Mersereau
-Merrithew
-Merriott
-Merring
-Merenda
-Merchen
-Mercardo
-Merati
-Mentzel
-Mentis
-Mentel
-Menotti
-Meno
-Mengle
-Mendolia
-Mellick
-Mellett
-Melichar
-Melhorn
-Melendres
-Melchiorre
-Meitzler
-Mehtani
-Mehrtens
-Megan
-Meditz
-Medeiras
-Meckes
-Me
-Mcteer
-Mctee
-Mcparland
-Mcniell
-Mcnealey
-Mcmanaway
-Mcleon
-Mclay
-Mclavrin
-Mcklveen
-Mckinzey
-Mcken
-Mckeand
-Mckale
-Mcilwraith
-Mcilroy
-Mcgreal
-Mcgougan
-Mcgettigan
-Mcgarey
-Mcfeeters
-Mcelhany
-Mcdaris
-Mccomis
-Mccomber
-Mccolm
-Mccollins
-Mccollin
-Mccollam
-Mccoach
-Mcclory
-Mcclennon
-Mccathern
-Mccarthey
-Mccarson
-Mccarrel
-Mccargar
-Mccandles
-Mccamish
-Mccally
-Mccage
-Mcbrearty
-Mcaneny
-Mcanallen
-Mcalarney
-Mcaferty
-Mazzo
-Mazy
-Mazurowski
-Mazique
-Mayoras
-Mayden
-Maxberry
-Mauller
-Matusiak
-Mattsen
-Matthey
-Matters
-Matkins
-Mathiasen
-Mathe
-Mateus
-Mate
-Matalka
-Masullo
-Massay
-Mashak
-Mascroft
-Martinex
-Martenson
-Marsiglia
-Marsella
-Marseille
-Maroudas
-Marotte
-Marner
-Marlo
-Markes
-Marina
-Maret
-Mareno
-Marean
-Marcinkiewicz
-Marchel
-Marasigan
-Manzueta
-Manzanilla
-Manternach
-Manring
-Manquero
-Manoni
-Manne
-Mankowski
-Manjarres
-Mangen
-Mangat
-Mandonado
-Mandia
-Mancias
-Manbeck
-Mamros
-Mam
-Maltez
-Mallia
-Mallar
-Malla
-Mall
-Malen
-Malaspina
-Malahan
-Malagisi
-Malachowski
-Makowsky
-Makinen
-Makepeace
-Majkowski
-Majid
-Majestic
-Majercin
-Maisey
-Mainguy
-Mailliard
-Maignan
-Mahlman
-Maha
-Magsamen
-Magpusao
-Magnano
-Magley
-Magedanz
-Magarelli
-Magaddino
-Maenner
-Madnick
-Maddrey
-Madaffari
-Macnaughton
-Macmullen
-Macksey
-Macknight
-Macki
-Macisaac
-Maciejczyk
-Maciag
-Macho
-Machenry
-Machamer
-Macguire
-Macdougal
-Macdaniel
-Maccormack
-Maccabe
-Mabbott
-Mabb
-Lynott
-Lyndon
-Lym
-Lydia
-Lycan
-Luy
-Lutwin
-Luscombe
-Lusco
-Lusardi
-Luria
-Lunetta
-Lundsford
-Lumas
-Luisi
-Luevanos
-Lueckenhoff
-Ludgate
-Ludd
-Lucherini
-Lubbs
-Lozado
-Lovie
-Lourens
-Lounsberry
-Loughrey
-Loughary
-Lotton
-Losser
-Loshbaugh
-Loser
-Loseke
-Loscalzo
-Los
-Lortz
-Loperena
-Loots
-Loosle
-Looman
-Longstaff
-Longobardi
-Longbottom
-Lomay
-Lomasney
-Lohrmann
-Lohmiller
-Logalbo
-Loetz
-Loeffel
-Lodwick
-Lodrigue
-Lockrem
-Llera
-Llarena
-Liv
-Littrel
-Littmann
-Lisser
-Lippa
-Lipner
-Linnemann
-Lingg
-Lindemuth
-Lindeen
-Limbo
-Lillig
-Likins
-Lights
-Lieurance
-Liesmann
-Liesman
-Liendo
-Lickert
-Lichliter
-Leyvas
-Leyrer
-Lewy
-Leubner
-Letters
-Lesslie
-Lesnick
-Lesmerises
-Lerno
-Lequire
-Lepera
-Lepard
-Lenske
-Leneau
-Lempka
-Lemmen
-Lemm
-Lemere
-Leinhart
-Leichner
-Leicher
-Leibman
-Lehmberg
-Leggins
-Lebeda
-Leavengood
-Leanard
-Lazaroff
-Laventure
-Lavant
-Lauster
-Laumea
-Latigo
-Lasota
-Lashure
-Lasecki
-Lascurain
-Lartigue
-Larouche
-Lappe
-Laplaunt
-Laplace
-Lanum
-Lansdell
-Lanpher
-Lanoie
-Lankard
-Laniado
-Langowski
-Langhorn
-Langfield
-Langfeldt
-Landt
-Landingham
-Landerman
-Landavazo
-Lampo
-Lampke
-Lamper
-Lamery
-Lambey
-Lamadrid
-Lallemand
-Laisure
-Laigo
-Laguer
-Lagerman
-Lageman
-Lagares
-Lacosse
-Lachappelle
-Labs
-Laborn
-Labonne
-Kyung
-Kuzia
-Kutt
-Kutil
-Kus
-Kurylo
-Kurowski
-Kuriger
-Kupcho
-Kulzer
-Kulesa
-Kules
-Kuhs
-Kuhne
-Krutz
-Krus
-Krupka
-Kronberg
-Kromka
-Kroese
-Krizek
-Krivanek
-Krishna
-Kringel
-Kreiss
-Kratofil
-Krapp
-Krakowsky
-Kracke
-Kozlow
-Koy
-Kowald
-Kover
-Kovaleski
-Kothakota
-Kosten
-Koskinen
-Kositzke
-Korff
-Korey
-Korbar
-Kor
-Kopplin
-Koplin
-Koos
-Konyn
-Konczak
-Komp
-Komo
-Kolber
-Kolash
-Kolakowski
-Kohm
-Kogen
-Koestner
-Koegler
-Kodama
-Kocik
-Kochheiser
-Kobler
-Kobara
-Knezevich
-Kneifl
-Knapchuck
-Knabb
-Klutz
-Klugman
-Klosner
-Klingel
-Klimesh
-Klice
-Kley
-Kleppe
-Klemke
-Kleinmann
-Kleinhans
-Kleinberg
-Kleffner
-Kleckley
-Klase
-Kisto
-Kissick
-Kisselburg
-Kirsten
-Kirschman
-Kirks
-Kirkner
-Kirkey
-Kirchman
-Kipling
-Kinville
-Kinnunen
-Kingdom
-Kimmey
-Kimmerle
-Kimbley
-Kilty
-Kilts
-Killmeyer
-Killilea
-Killay
-Kiest
-Kierce
-Kiepert
-Kielman
-Khalid
-Kewal
-Keszler
-Kesson
-Kesich
-Kerwood
-Kerksiek
-Kerkhoff
-Kerbo
-Keranen
-Keomuangtai
-Kenter
-Kennelley
-Keniry
-Kendzierski
-Kempner
-Kemmis
-Kemerling
-Kelsay
-Kelchner
-Kela
-Keithly
-Keipe
-Kegg
-Keer
-Keahey
-Kaywood
-Kayes
-Kawahara
-Kasuboski
-Kastendieck
-Kassin
-Kasprzyk
-Karraker
-Karnofski
-Karman
-Karger
-Karge
-Karella
-Karbowski
-Kapphahn
-Kap
-Kannel
-Kamrath
-Kaminer
-Kamansky
-Kalua
-Kaltz
-Kalpakoff
-Kalkbrenner
-Kaku
-Kaib
-Kaehler
-Kackley
-Kaber
-Justo
-Juris
-Jurich
-Jurgenson
-Jurez
-Junor
-Juniel
-Juncker
-Jugo
-Jubert
-Jowell
-Jovanovic
-Josiah
-Joosten
-Joncas
-Joma
-Johnso
-Johanns
-Jodoin
-Jockers
-Joans
-Jinwright
-Jinenez
-Jimeson
-Jerrett
-Jergens
-Jerden
-Jerdee
-Jepperson
-Jendras
-Jeanfrancois
-Jazwa
-Jaussi
-Jaster
-Jarzombek
-Jarencio
-Janocha
-Jakab
-Jadlowiec
-Jacobsma
-Jach
-Izaquirre
-Iwaoka
-Ivaska
-Iturbe
-Israelson
-Ismael
-Isles
-Isachsen
-Isaak
-Irland
-Inzerillo
-Insogna
-Ingegneri
-Ingalsbe
-Inciong
-Inagaki
-Idol
-Icenogle
-Hyon
-Hyett
-Hyers
-Huyck
-Hutti
-Hutten
-Hutnak
-Hussar
-Husky
-Hurrle
-Hurford
-Hurde
-Hupper
-Hunkin
-Hunkele
-Hunke
-Hun
-Humann
-Huhtasaari
-Hugger
-Hugel
-Huge
-Hufft
-Huegel
-Hrobsky
-Hren
-Hoyles
-Howlin
-Hovsepian
-Hovenga
-Hovatter
-Houdek
-Hotze
-Hossler
-Hossfeld
-Hosseini
-Horten
-Hort
-Horr
-Horgen
-Horen
-Hoopii
-Hoon
-Hoogland
-Hontz
-Honnold
-Homewood
-Holway
-Holtgrewe
-Holtan
-Holstrom
-Holstege
-Hollway
-Hollingshed
-Holling
-Hollenback
-Hollard
-Holberton
-Hoines
-Hogeland
-Hofstad
-Hoetger
-Hoen
-Hoaglund
-Hirota
-Hintermeister
-Hinnen
-Hinders
-Hinderer
-Hinchee
-Himelfarb
-Himber
-Hilzer
-Hilling
-Hillers
-Hillegas
-Hildinger
-Hignight
-Highman
-Hierholzer
-Heyde
-Hettich
-Hesketh
-Herzfeld
-Herzer
-Hershenson
-Hershberg
-Hernando
-Hermenegildo
-Hereth
-Hererra
-Hereda
-Herbin
-Heraty
-Herard
-Hepa
-Henschel
-Henrichsen
-Hennes
-Henneberger
-Heningburg
-Henig
-Hendron
-Hendericks
-Hemple
-Hempe
-Hemmingsen
-Hemler
-Helvie
-Helmly
-Helmbrecht
-Heling
-Helin
-Helfrey
-Helble
-Helaire
-Heizman
-Heisser
-Heiny
-Heinbaugh
-Heigh
-Heidemann
-Heidema
-Heiberger
-Hegel
-Heerdt
-Heeg
-Heefner
-Heckerman
-Heckendorf
-Heavin
-Headman
-Haynesworth
-Haylock
-Hayakawa
-Hawksley
-Hawking
-Haverstick
-Haut
-Hausen
-Hauke
-Haubold
-Hattan
-Hattabaugh
-Hasten
-Hasstedt
-Hashem
-Haselhorst
-Harrist
-Harpst
-Haroldsen
-Harmison
-Harkema
-Hark
-Harison
-Hariri
-Harcus
-Harcum
-Harcourt
-Harcharik
-Hanzel
-Hanvey
-Hantz
-Hansche
-Hansberger
-Hannig
-Hanken
-Hanhardt
-Hanf
-Hanauer
-Hamberlin
-Halward
-Halsall
-Hals
-Hallquist
-Hallmon
-Halk
-Halbach
-Halat
-Hajdas
-Hainsworth
-Haik
-Hahm
-Hagger
-Haggar
-Hader
-Hadel
-Haddick
-Hackmann
-Haasch
-Haaf
-Guzzetta
-Guzy
-Gutterman
-Gutmann
-Gutkowski
-Gustine
-Gursky
-Gurner
-Gunsolley
-Gumpert
-Gumbel
-Gulla
-Guilmain
-Guiliani
-Guier
-Guers
-Guerero
-Guerena
-Guebara
-Guadiana
-Grunder
-Grothoff
-Grosland
-Grosh
-Groos
-Grohs
-Grohmann
-Groepper
-Grodi
-Grizzaffi
-Grissinger
-Grippi
-Grinde
-Griffee
-Grether
-Greninger
-Greigo
-Gregorski
-Greger
-Grega
-Greenberger
-Graza
-Grattan
-Grasse
-Gras
-Grano
-Gramby
-Gradilla
-Govin
-Goutremout
-Goulas
-Gotay
-Gosling
-Gorey
-Goren
-Gordner
-Goossen
-Goon
-Goodwater
-Gonzaga
-Gonyo
-Gonska
-Gongalves
-Gomillion
-Gombos
-Golonka
-Gollman
-Goldtrap
-Goldammer
-Golas
-Golab
-Gola
-Gogan
-Goffman
-Goeppinger
-Godkin
-Godette
-Glore
-Glomb
-Glauner
-Glassey
-Glasner
-Gividen
-Giuffrida
-Gishal
-Giovanelli
-Ginoza
-Ginns
-Gindlesperger
-Gindhart
-Gillem
-Gilger
-Giggey
-Giebner
-Gibbson
-Giacomo
-Giacolone
-Giaccone
-Giacchino
-Ghere
-Gherardini
-Gherardi
-Gfeller
-Getts
-Gerwitz
-Gervin
-Gerstle
-Gerfin
-Geremia
-Gercak
-General
-Gener
-Gencarelli
-Gehron
-Gehrmann
-Geffers
-Geery
-Geater
-Gawlik
-Gaudino
-Garsia
-Garrahan
-Garrabrant
-Garofolo
-Garigliano
-Garfinkle
-Garelick
-Gardocki
-Garafola
-Gappa
-Gantner
-Ganther
-Gangelhoff
-Gamarra
-Galstad
-Gally
-Gallik
-Gallier
-Galimba
-Gali
-Galassi
-Gaige
-Gadsby
-Gabby
-Gabbin
-Gabak
-Fyall
-Furney
-Funez
-Fulwider
-Fulson
-Fukunaga
-Fujikawa
-Fugere
-Fuertes
-Fuda
-Fryson
-Frump
-Frothingham
-Froning
-Froncillo
-Frohling
-Froberg
-Froats
-Fritchman
-Frische
-Friedrichsen
-Friedmann
-Fridge
-Friddell
-Frid
-Fresch
-Frentzel
-Freno
-Frelow
-Freimuth
-Freidel
-Freehan
-Freeby
-Freeburn
-Fredieu
-Frederiksen
-Fredeen
-Frazell
-Frayser
-Fratzke
-Frattini
-Franze
-Franich
-Francescon
-Francesco
-Frames
-Framer
-Fraiser
-Fragman
-Frack
-Foxe
-Fowlston
-Fosberg
-Fortna
-Fornataro
-Forden
-Foots
-Foody
-Fogt
-Foglia
-Fogerty
-Fogelson
-Flygare
-Flowe
-Florentine
-Flinner
-Flem
-Flatten
-Flath
-Flater
-Flahaven
-Flad
-Fjeld
-Fitanides
-Fistler
-Fishbaugh
-Firsching
-Fireman
-Finzel
-Finical
-Fingar
-Filosa
-Filicetti
-Filby
-Fierst
-Fierra
-Ficklen
-Ficher
-Fersner
-Ferrufino
-Ferrucci
-Fero
-Ferns
-Ferlenda
-Ferko
-Fergerstrom
-Ferge
-Fenty
-Fent
-Fennimore
-Fendt
-Femat
-Felux
-Felman
-Feldhaus
-Feisthamel
-Feijoo
-Feiertag
-Fehrman
-Fehl
-Feezell
-Feeny
-Feeback
-Fedigan
-Fedder
-Fechner
-Feary
-Fayson
-Faylor
-Fauteux
-Faustini
-Faure
-Fauci
-Fauber
-Fattig
-Farruggio
-Farrens
-Fare
-Faraci
-Fantini
-Fantin
-Fanno
-Fannings
-Faniel
-Fallaw
-Falker
-Falkenhagen
-Fajen
-Fahrner
-Fabel
-Fabacher
-Eytcheson
-Eyster
-Exford
-Exel
-Exe
-Evetts
-Evenstad
-Evanko
-Euresti
-Euber
-Etcitty
-Estler
-Esther
-Essner
-Essinger
-Esplain
-Espenshade
-Espanol
-Espaillat
-Escribano
-Escorcia
-Errington
-Errett
-Errera
-Erlanger
-Erenrich
-Erekson
-Erber
-Entinger
-Ensworth
-Ensell
-Enno
-Ennen
-Englin
-Engblom
-Engberson
-Encinias
-Enama
-Emel
-Elzie
-Elsbree
-Elmo
-Elman
-Elm
-Ellebracht
-Elkan
-Elfstrom
-Elerson
-Eleazer
-Eleam
-Eldrige
-Elcock
-Einspahr
-Eike
-Eidschun
-Eid
-Eickman
-Eichele
-Eiche
-Ehlke
-Eguchi
-Eggink
-Edouard
-Edgehill
-Eckes
-Eblin
-Ebberts
-Eavenson
-Earvin
-Eardley
-Eagon
-Eader
-Dzubak
-Dylla
-Dyckman
-Dwire
-Dutrow
-Dutile
-Dusza
-Dustman
-Dusing
-Duryee
-Durupan
-Durtschi
-Durtsche
-Durell
-Dunny
-Dunnegan
-Dunken
-Dun
-Dumm
-Dulak
-Duker
-Dukelow
-Dufort
-Dufilho
-Duffee
-Duett
-Dueck
-Dudzinski
-Dudasik
-Duckwall
-Duchemin
-Dubrow
-Dubis
-Dubicki
-Duba
-Drust
-Druckman
-Drinnen
-Drewett
-Drewel
-Dreitzler
-Dreckman
-Drappo
-Draffen
-Drabant
-Doyen
-Dowding
-Doub
-Dorson
-Dorschner
-Dorrington
-Dorney
-Dormaier
-Dorff
-Dorcy
-Donges
-Donelly
-Donel
-Domangue
-Dols
-Dollahite
-Dolese
-Doldo
-Doiley
-Dohrman
-Dohn
-Doheny
-Doceti
-Dobry
-Dobrinski
-Dobey
-Divincenzo
-Dischinger
-Dirusso
-Dirocco
-Dipiano
-Diop
-Dinitto
-Dinehart
-Dimsdale
-Diminich
-Dimalanta
-Dillavou
-Dilello
-Difusco
-Diffey
-Diffenderfer
-Diffee
-Difelice
-Difabio
-Dietzman
-Dieteman
-Diepenbrock
-Dieckmann
-Dicey
-Dicampli
-Dibari
-Diazdeleon
-Diallo
-Dewitz
-Dewiel
-Devoll
-Devol
-Devincent
-Devier
-Devendorf
-Devalk
-Detten
-Detraglia
-Dethomas
-Deter
-Detemple
-Desler
-Desharnais
-Desanty
-Derocco
-Dermer
-Derks
-Derito
-Derick
-Derhammer
-Deraney
-Dequattro
-Depass
-Depadua
-Deon
-Denzel
-Denyes
-Denyer
-Dentino
-Denlinger
-Deneal
-Demory
-Demopoulos
-Demontigny
-Demonte
-Demeza
-Delsol
-Delrosso
-Delpit
-Delpapa
-Delouise
-Delone
-Delo
-Delmundo
-Delmore
-Delmar
-Dellapaolera
-Delfin
-Delfierro
-Deleonardis
-Delenick
-Delcarlo
-Delcampo
-Delcamp
-Delawyer
-Delaware
-Delaroca
-Delaluz
-Delahunt
-Delaguardia
-Dekeyser
-Dekay
-Dejaeger
-Dejackome
-Dehay
-Dehass
-Degraffenried
-Degenhart
-Degan
-Deever
-Deedrick
-Deckelbaum
-Dechico
-Decent
-Dececco
-Decasas
-Debrock
-Debona
-Debeaumont
-Debarros
-Debaca
-Dearmore
-Deangelus
-Dealmeida
-Dawood
-Davney
-Daudt
-Datri
-Dasgupta
-Darring
-Darracott
-Darius
-Darcus
-Daoud
-Dansbury
-Dannels
-Danish
-Danielski
-Danehy
-Dancey
-Damour
-Dambra
-Daman
-Dalcour
-Daisey
-Dahlheimer
-Dagon
-Dadisman
-Dacunto
-Dacamara
-Dabe
-Cyrulik
-Cyphert
-Cwik
-Cussen
-Curles
-Curit
-Curby
-Curbo
-Cunas
-Cunard
-Cunanan
-Cumpton
-Culcasi
-Cui
-Cucinotta
-Cucco
-Csubak
-Cruthird
-Crumwell
-Crummitt
-Crumedy
-Crouthamel
-Cronce
-Cromack
-Cristina
-Crisafi
-Crimin
-Cresto
-Crescenzo
-Cremonese
-Creedon
-Credit
-Crankshaw
-Cozzens
-Cove
-Coval
-Courtwright
-Courcelle
-Coupland
-Counihan
-Coullard
-Cotrell
-Cosgrave
-Cornfield
-Cornelio
-Corish
-Cordoua
-Corbit
-Coppersmith
-Coonfield
-Cools
-Conville
-Contrell
-Contento
-Conser
-Conrod
-Connole
-Congrove
-Conery
-Condray
-Colver
-Coltman
-Colflesh
-Colcord
-Colavito
-Colar
-Coile
-Coggan
-Coenen
-Codling
-Coda
-Cockroft
-Cockrel
-Cockerill
-Cocca
-Coberley
-Coaster
-Clouden
-Clos
-Clive
-Clish
-Clint
-Clinkscale
-Clester
-Clammer
-City
-Cittadino
-Citrano
-Ciresi
-Cillis
-Ciccarelli
-Ciborowski
-Ciarlo
-Ciardullo
-Chritton
-Chopp
-Choo
-Chirco
-Chilcoat
-Chevarie
-Cheslak
-Chernak
-Chay
-Chatterjee
-Chatten
-Chatagnier
-Chastin
-Chappuis
-Channing
-Channey
-Champlain
-Chalupsky
-Chalfin
-Chaffer
-Chadek
-Chadderton
-Cestone
-Cestero
-Cestari
-Cerros
-Cermeno
-Centola
-Cedrone
-Cayouette
-Cavan
-Cavaliero
-Casuse
-Castricone
-Castoreno
-Casten
-Castanada
-Castagnola
-Casstevens
-Cassio
-Cassi
-Cassanova
-Caspari
-Casher
-Cashatt
-Casco
-Casassa
-Casad
-Carville
-Carvel
-Cartland
-Cartegena
-Carsey
-Carsen
-Carrino
-Carrilo
-Carpinteyro
-Carmley
-Carlston
-Carlsson
-Carie
-Cariddi
-Caricofe
-Carel
-Cardy
-Carducci
-Carby
-Carangelo
-Capriotti
-Capria
-Caprario
-Capelo
-Canul
-Cantua
-Cantlow
-Canny
-Cangialosi
-Canepa
-Candland
-Campolo
-Campi
-Camors
-Camino
-Camfield
-Camelo
-Camarero
-Camaeho
-Calvano
-Callum
-Calliste
-Caldarella
-Calcutt
-Calcano
-Caissie
-Cager
-Caccamo
-Cabotage
-Cabble
-Byman
-Buzby
-Butkowski
-Bussler
-Busico
-Bushy
-Bushovisky
-Busbin
-Busard
-Busalacchi
-Burtman
-Burrous
-Burridge
-Burrer
-Burno
-Burin
-Burgette
-Burdock
-Burdier
-Burckhard
-Bunten
-Bungay
-Bundage
-Bumby
-Bultema
-Bulinski
-Bulan
-Bukhari
-Buganski
-Buerkle
-Buen
-Buehl
-Bue
-Budzynski
-Buckham
-Bub
-Bryk
-Brydon
-Bruyere
-Brunsvold
-Brunnett
-Brunker
-Brunfield
-Brumble
-Brue
-Brozina
-Brossman
-Brosey
-Brookens
-Broersma
-Brodrick
-Brockmeier
-Brockhouse
-Brisky
-Brinkly
-Brine
-Brincefield
-Brighenti
-Brigante
-Brieno
-Briede
-Bridenbaugh
-Bridegroom
-Brickett
-Bria
-Breske
-Brener
-Brenchley
-Breitkreutz
-Breitbart
-Breister
-Breining
-Breighner
-Breidel
-Brehon
-Breheny
-Breard
-Brean
-Breakell
-Breach
-Brazill
-Braymiller
-Braum
-Brau
-Brashaw
-Bransom
-Brandolino
-Brancato
-Branagan
-Braff
-Brading
-Bracker
-Brackenbury
-Bracher
-Braasch
-Boylen
-Boyda
-Boyanton
-Bowlus
-Bowditch
-Boutot
-Bouthillette
-Boursiquot
-Bourjolly
-Bouret
-Bouquet
-Boulerice
-Bouer
-Bouchillon
-Bouchie
-Bottin
-Boteilho
-Bosko
-Bosack
-Borys
-Bors
-Borla
-Borjon
-Borghi
-Borah
-Booty
-Booten
-Boore
-Bonuz
-Bonne
-Bongers
-Boneta
-Bonawitz
-Bonanni
-Bomer
-Bollen
-Bollard
-Bolla
-Bolio
-Boisseau
-Boies
-Boiani
-Bohorquez
-Boghossian
-Boespflug
-Boeser
-Boehl
-Boegel
-Bodrick
-Bodkins
-Bodenstein
-Bodell
-Bockover
-Bocci
-Bobbs
-Boals
-Boahn
-Boadway
-Bluma
-Bluett
-Bloor
-Blomker
-Blevens
-Blethen
-Bleecker
-Blayney
-Blaske
-Blasetti
-Blancas
-Blackner
-Blackie
-Bjorkquist
-Bjerk
-Bizub
-Bisono
-Bisges
-Bisaillon
-Birr
-Birnie
-Bires
-Birdtail
-Birdine
-Bina
-Billock
-Billinger
-Billig
-Billet
-Bigwood
-Bigalk
-Bielicki
-Biddick
-Biccum
-Biafore
-Bhagat
-Beza
-Beyah
-Bex
-Bevier
-Bevell
-Beute
-Betzer
-Betthauser
-Bethay
-Bethard
-Beshaw
-Bertholf
-Bertels
-Berridge
-Bernot
-Bernath
-Bernabei
-Berkson
-Berkovitz
-Berkich
-Bergsten
-Berget
-Berezny
-Berdin
-Beougher
-Benthin
-Benhaim
-Benenati
-Benejan
-Bemiss
-Beloate
-Bellucci
-Bells
-Bellotti
-Belling
-Bellido
-Bellaire
-Bellafiore
-Bekins
-Bekele
-Beish
-Behnken
-Beerly
-Beddo
-Becket
-Becke
-Bebeau
-Beauchaine
-Beaucage
-Beadling
-Beacher
-Bazar
-Baysmore
-Bayers
-Baun
-Baulch
-Baucher
-Batto
-Baton
-Bathe
-Basora
-Baruffi
-Bartimus
-Bartholemew
-Barrickman
-Barribeau
-Barreda
-Barrack
-Baroody
-Barness
-Barn
-Barmer
-Barillari
-Barias
-Barginear
-Barg
-Barde
-Barbone
-Barbato
-Barbarin
-Baoloy
-Bansal
-Bangle
-Banducci
-Bandel
-Bambeck
-Balter
-Ballif
-Baller
-Balladares
-Balkus
-Baldy
-Baldivia
-Balcerzak
-Balazs
-Baksh
-Bakr
-Bakemeier
-Baisey
-Bainer
-Bailly
-Bagge
-Badua
-Badini
-Bachtell
-Bachrodt
-Bachorski
-Bacak
-Babula
-Bable
-Babjeck
-Babecki
-Azbell
-Ayudan
-Awai
-Avita
-Avino
-Avellar
-Auzat
-Autman
-Autio
-Autery
-Ausman
-Ausland
-Aulabaugh
-Augle
-Aughenbaugh
-Augeri
-Audi
-Attleson
-Attig
-Attal
-Ator
-Asselmeier
-Askland
-Asiello
-Asch
-Arya
-Artola
-Arslanian
-Arron
-Arrezola
-Arnesen
-Arnau
-Armster
-Armintrout
-Armento
-Armato
-Arkenberg
-Ariaza
-Arguin
-Arenson
-Areias
-Archut
-Archibold
-Arave
-Arand
-Appelman
-Appello
-Antonson
-Antoniewicz
-Antill
-Antigua
-Annino
-Anness
-Anneler
-Angustia
-Angry
-Angiolillo
-Angelico
-Andreula
-Andreen
-Andreassi
-Andeson
-Ander
-Anda
-Anania
-Anadio
-Amicone
-Amenta
-Alzaga
-Alwardt
-Aluarado
-Altreche
-Altic
-Alsobrooks
-Alpern
-Almodova
-Almas
-Alltop
-Alliston
-Allio
-Alipio
-Alicandro
-Alibozek
-Alguire
-Alff
-Alcalde
-Alborn
-Albery
-Alberry
-Albany
-Albani
-Albanez
-Alavi
-Akkerman
-Ahlheim
-Agresti
-Agnelli
-Agilar
-Agib
-Aggas
-Afton
-Afonso
-Adil
-Adi
-Adank
-Adamsky
-Acri
-Accurso
-Abruzzese
-Abrew
-Abeln
-Abdullai
-Abdulkarim
-Abdelrahman
-Abbenante
-Abatiell
-Abaloz
-Zyskowski
-Zwiefel
-Zurmiller
-Zupancic
-Zuno
-Zumsteg
-Zumbrennen
-Zumaya
-Zullinger
-Zuleger
-Zozaya
-Zourkos
-Zorrilla
-Zorko
-Zolocsik
-Zittel
-Ziobro
-Zimmerly
-Zimmerli
-Zillmer
-Zigmond
-Zierer
-Zieber
-Zide
-Zevenbergen
-Zephier
-Zemel
-Zelazo
-Zeitlin
-Zeiser
-Zehring
-Zeger
-Zedian
-Zearfoss
-Zbranek
-Zaya
-Zatarain
-Zasso
-Zarn
-Zarilla
-Zari
-Zapp
-Zapf
-Zanghi
-Zange
-Zamacona
-Zalesky
-Zalazar
-Zaki
-Zafar
-Zade
-Yusko
-Yurman
-Yurkovich
-Yuhasz
-Younge
-Yiu
-Yeasted
-Yarrito
-Yark
-Yarboro
-Yannuzzi
-Yankovich
-Yanagawa
-Yago
-Yaffe
-Wyndham
-Wyms
-Wyand
-Wuensch
-Wryals
-Wrubel
-Worosz
-Woolstenhulme
-Wolpe
-Wolner
-Wolgamot
-Wolfman
-Wojtaszek
-Woeppel
-Woehr
-Wodarski
-Wizwer
-Wittkop
-Wisseman
-Wisor
-Wishum
-Wischmann
-Wisch
-Wirkkala
-Wion
-Wintjen
-Wintermute
-Wintermantel
-Winks
-Winkey
-Winham
-Windschitl
-Willow
-Willitzer
-Willier
-Willets
-Willenbrink
-Willen
-Willaimson
-Wilfahrt
-Wilenkin
-Wilen
-Wildeboer
-Wilchek
-Wigren
-Wignall
-Wiggington
-Wierson
-Wiegman
-Wiegel
-Widmayer
-Wider
-Widder
-Wickey
-Wickers
-Wical
-Whiton
-Whitenton
-Whiteleather
-Whiston
-Whirley
-Whetham
-Wheatly
-Wetenkamp
-Westenberger
-Westenbarger
-Westall
-Werblow
-Wengel
-Welson
-Welschmeyer
-Wellmann
-Wellbrock
-Wela
-Wekenborg
-Weiter
-Weisenstein
-Wehmann
-Weeda
-Wede
-Webley
-Waver
-Wauford
-Waterworth
-Watchorn
-Wassinger
-Wassell
-Wasp
-Wasiuta
-Warnix
-Warning
-Warnes
-Warmoth
-Warling
-Warila
-Warga
-Warburg
-Wanzer
-Want
-Waner
-Wanek
-Walwyn
-Walle
-Walkner
-Walin
-Waletzko
-Waler
-Walenta
-Wainer
-Wailes
-Wahr
-Waddel
-Wactor
-Wachtler
-Wachsman
-Wachowski
-Vulgamore
-Vukelich
-Vote
-Vost
-Voskamp
-Vorwerk
-Vongphakdy
-Volpi
-Volle
-Volino
-Voeks
-Vodopich
-Vittone
-Virdin
-Virag
-Vinroe
-Vinegar
-Vindiola
-Vilmont
-Villerreal
-Villaneva
-Villalobas
-Villada
-Vilhauer
-Vilchis
-Vilches
-Viggiani
-Vig
-Vieux
-Viets
-Vient
-Vielle
-Viejo
-Vidovich
-Vichi
-Veys
-Veverka
-Verser
-Veronesi
-Vernoy
-Vermont
-Verhines
-Verheyen
-Veren
-Vereb
-Verano
-Venuto
-Ventry
-Ventrone
-Veltz
-Velo
-Velazguez
-Veeser
-Vassey
-Vasque
-Varin
-Varaza
-Varady
-Vaquez
-Vaquerano
-Vansteenwyk
-Vanschoick
-Vanroekel
-Vannorden
-Vanlent
-Vangrouw
-Vangelder
-Vanes
-Vanelli
-Vanderkar
-Vanderbeek
-Vandenburgh
-Vandekieft
-Vandekamp
-Vancura
-Vancooten
-Vanconey
-Vancampen
-Vanaria
-Valvano
-Vallette
-Vallero
-Valiton
-Valin
-Valeri
-Valek
-Valdovino
-Valdivieso
-Vakas
-Vagas
-Vadala
-Vaccarella
-Vacanti
-Urrabazo
-Urguhart
-Urda
-Urbino
-Urbas
-Upmeyer
-Umphlett
-Ulerio
-Uitz
-Uchimura
-Uccello
-Tysdal
-Ty
-Tweedle
-Turrubiates
-Turrubiartes
-Turri
-Turnham
-Turko
-Turben
-Tupin
-Tumulty
-Tuffey
-Tuckey
-Tuckett
-Tucholski
-Tubolino
-Tubergen
-Tsuboi
-Tschumperlin
-Tschoepe
-Trynowski
-Tryba
-Truslow
-Truog
-Trumball
-Trudelle
-Trojillo
-Trnka
-Trizarry
-Trigueiro
-Trigleth
-Tricomi
-Tresselt
-Trentacoste
-Trendell
-Trenary
-Treml
-Treleven
-Treherne
-Treasure
-Trayer
-Travino
-Traugott
-Trappey
-Tranbarger
-Tramontano
-Tramell
-Trainum
-Traino
-Traill
-Trabucco
-Townsell
-Tourtillott
-Touar
-Toscani
-Torrella
-Torguson
-Torda
-Top
-Toomes
-Tonner
-Tommasino
-Tomaro
-Tolve
-Tolefree
-Toguchi
-Tofflemire
-Tofanelli
-Tody
-Toce
-Tobacco
-Toan
-Toalson
-Tkacik
-Tirone
-Tipple
-Tippery
-Tinson
-Tinnell
-Timper
-Timmers
-Times
-Timblin
-Tilotta
-Tillberg
-Tijernia
-Tigges
-Tigar
-Tielking
-Thyng
-Thonen
-Thomley
-Thombs
-Thimmesch
-Thier
-Thevenin
-Theodorov
-Theodoropoulo
-Tharnish
-Tharaldson
-Thackaberry
-Tewari
-Tetu
-Tetter
-Tersigni
-Tepezano
-Tennon
-Tennent
-Teichman
-Teehan
-Tayloe
-Taus
-Tatis
-Tata
-Tat
-Tashima
-Tarufelli
-Tarlow
-Tarkowski
-Tarka
-Targett
-Taran
-Tarabokija
-Tappen
-Tanzer
-Tanous
-Tanigawa
-Taneja
-Tammo
-Tallerico
-Tallada
-Talk
-Talhelm
-Takehara
-Takata
-Tagliavia
-Taffer
-Tadman
-Tacdol
-Tacconi
-Tables
-Szewczak
-Szeredy
-Szanto
-Sympson
-Symmes
-Syers
-Sydney
-Syas
-Swinny
-Swierk
-Swendsen
-Sweigard
-Sweezey
-Sweesy
-Sween
-Sweely
-Sweed
-Sweazy
-Swauger
-Swansbrough
-Swango
-Swanda
-Swamp
-Swallows
-Swaggerty
-Svatek
-Survant
-Surowka
-Surina
-Suozzi
-Sunstrom
-Sunford
-Sundseth
-Sundahl
-Summerill
-Sumida
-Sumbler
-Suma
-Sulyma
-Sulla
-Sulieman
-Suit
-Sugiyama
-Suell
-Sudo
-Suddreth
-Sucher
-Sturn
-Sturkey
-Studzinski
-Studler
-Stuckmeyer
-Stryjewski
-Stroy
-Strotman
-Strollo
-Stroik
-Stroede
-Streeby
-Stredny
-Strazi
-Stray
-Strawderman
-Straiton
-Stower
-Stoudmire
-Stormont
-Stopka
-Stoneback
-Stoldt
-Stolarz
-Stolarski
-Stockmaster
-Stobb
-Stivason
-Stirk
-Stipp
-Stipes
-Stingel
-Stike
-Stiebel
-Stidd
-Steurer
-Sterley
-Sterle
-Stepro
-Stepovich
-Stephson
-Stenseth
-Stenerson
-Stello
-Steinbrook
-Steidley
-Stehlin
-Stegmaier
-Stefanow
-Steese
-Steenhuis
-Stavely
-Stave
-Stautz
-Staunton
-Stater
-Stas
-Startup
-Startt
-Startin
-Starratt
-Stargell
-Starcevich
-Stank
-Stanis
-Standing
-Stancliff
-Stanchfield
-Stanbrough
-Stakes
-Stahmer
-Staheli
-Staebell
-Stadtlander
-Stadheim
-Sroufe
-Sroczynski
-Srnsky
-Sreaves
-Srader
-Squeo
-Spuler
-Sproat
-Springmeyer
-Sprengeler
-Sport
-Spolar
-Spivack
-Spinale
-Spiegler
-Spickerman
-Spessard
-Spenner
-Speich
-Spaziano
-Sparaco
-Spalter
-Sowells
-Sovich
-Southmayd
-Southgate
-Sotto
-Sotomayer
-Sosaya
-Sorvillo
-Sorrel
-Soos
-Songco
-Somerset
-Somero
-Soll
-Soldan
-Solarzano
-Solana
-Sokal
-Soibelman
-Soesbe
-Sobotta
-Sobina
-Sobeck
-Soard
-Snorton
-Snopek
-Snoozy
-Snethen
-Smithhisler
-Smee
-Smaniotto
-Slusarski
-Slowe
-Slotnick
-Sleva
-Sleighter
-Slappey
-Skyers
-Skutt
-Skorcz
-Skoczylas
-Skillicorn
-Skiffington
-Skibicki
-Skerl
-Skehan
-Skalla
-Siwinski
-Sivley
-Sittloh
-Sitterly
-Sith
-Sit
-Sise
-Siroky
-Sirles
-Sirin
-Sirignano
-Siren
-Sinsabaugh
-Sinks
-Sinisi
-Sinibaldi
-Singson
-Sindlinger
-Simpkin
-Siminski
-Simcoe
-Siford
-Siegert
-Sidor
-Sidhom
-Siddique
-Siddell
-Sicotte
-Sichting
-Sicari
-Sic
-Siano
-Shufflebarger
-Shramek
-Shortnacy
-Sholler
-Sholette
-Sholders
-Shogren
-Shoenberger
-Shoemate
-Shoat
-Shinoda
-Shines
-Shimshak
-Shigley
-Sheward
-Shetrone
-Shetlar
-Sherretts
-Sherod
-Shenkle
-Shely
-Sheltra
-Shelpman
-Shellabarger
-Shelite
-Sheldrick
-Shelburn
-Sheinbein
-Shebby
-Shawley
-Shatrau
-Shartle
-Sharifi
-Shanker
-Shami
-Shamel
-Shamburg
-Shamas
-Shallow
-Shaffstall
-Shadowens
-Shackleton
-Shaak
-Seykora
-Seyfert
-Sevillano
-Sevcik
-Seubert
-Seu
-Setter
-Sesler
-Servatius
-Serrant
-Serramo
-Serl
-Serini
-Serenil
-Serapion
-Sept
-Sensibaugh
-Sens
-Senich
-Sengbusch
-Sendra
-Senate
-Semrau
-Semrad
-Sempertegui
-Semons
-Semke
-Selma
-Sellinger
-Seliga
-Sekel
-Seilheimer
-Seigfried
-Seesholtz
-Seefeld
-Seecharran
-Sedrakyan
-Seavy
-Search
-Seamster
-Seabold
-Scyoc
-Sculley
-Scullawl
-Scrogham
-Scow
-Scopa
-Scontras
-Sciulli
-Sciola
-Scifres
-Schweyen
-Schwering
-Schwerdtfeger
-Schweim
-Schweikert
-Schweder
-Schwebel
-Schwartzwalde
-Schusterman
-Schuhmann
-Schuerman
-Schuchman
-Schrotenboer
-Schreurs
-Schoppert
-Schopper
-Schools
-Schoneman
-Scholfield
-Schoeppner
-Schoenleber
-Schoeman
-Schoel
-Schnurbusch
-Schnepel
-Schnader
-Schlarb
-Schlappi
-Schlangen
-Schlaht
-Schiraldi
-Schinkel
-Schimizzi
-Schifo
-Schiesher
-Scheyer
-Schettler
-Scheppke
-Schepper
-Scheinost
-Scheidel
-Scheets
-Schatzman
-Scharwath
-Scharp
-Schaarschmidt
-Schaack
-Scarnato
-Scarnati
-Scaringi
-Scarcia
-Scarano
-Sberna
-Sawina
-Sawer
-Sawaya
-Sawatzky
-Savcedo
-Sauser
-Saumier
-Sauchez
-Sauceman
-Sathre
-Satawa
-Sasala
-Sartoris
-Sare
-Sarchet
-Saracco
-Santulli
-Santory
-Santorelli
-Santopietro
-Sansing
-Sanseverino
-Saniatan
-Sangiacomo
-Sanges
-Sanfratello
-Sanflippo
-Sandona
-Sandelin
-Sandate
-Samona
-Sammis
-Sambor
-Samano
-Salvitti
-Salvietti
-Salvi
-Salum
-Salsa
-Salonek
-Salm
-Salles
-Sall
-Salera
-Salemo
-Salee
-Salak
-Sakihara
-Sakasegawa
-Sakaguchi
-Sagastegui
-Saeturn
-Sadan
-Sacayanan
-Saborio
-Sabeiha
-Sabedra
-Sabagh
-Rzepecki
-Rzasa
-Ryser
-Ryner
-Rydman
-Rycroft
-Rybij
-Ruyes
-Ruttan
-Russon
-Rushe
-Rusert
-Rusell
-Runnells
-Rundstrom
-Rumschlag
-Rullman
-Ruka
-Ruiloba
-Ruh
-Ruggs
-Ruffer
-Ruest
-Rueluas
-Rueger
-Ruediger
-Rubinoff
-Rubendall
-Rozmus
-Roxburgh
-Rowls
-Rousch
-Rothove
-Rotelli
-Roszel
-Roske
-Roskam
-Rosensteel
-Rosendo
-Roome
-Rombough
-Romash
-Romanson
-Romanello
-Romance
-Rolison
-Rogol
-Rogas
-Roese
-Roehrs
-Roegner
-Roeger
-Rodrguez
-Rodeman
-Rodebaugh
-Rockenbaugh
-Rocconi
-Robleto
-Robateau
-Roarty
-Roaf
-Rivenberg
-Rivara
-Rivali
-Risse
-Risby
-Ripperger
-Riopelle
-Ringrose
-Rinebarger
-Rile
-Riggen
-Rigano
-Riff
-Rifenbark
-Rieper
-Rieffenberger
-Riedmayer
-Ridolfi
-Ridderhoff
-Rickon
-Rickers
-Rickels
-Richoux
-Richens
-Ribao
-Rhodarmer
-Rheingans
-Reznik
-Reveron
-Reus
-Reph
-Renko
-Remme
-Remlinger
-Remke
-Remily
-Reitano
-Reissig
-Reisher
-Reinitz
-Reinholtz
-Reines
-Reigstad
-Reigh
-Reichelderfer
-Rehnert
-Rehagen
-Redline
-Rediger
-Redhouse
-Redepenning
-Recla
-Rechkemmer
-Reando
-Razavi
-Rayson
-Rayna
-Rax
-Raveling
-Rauser
-Rauschenberg
-Raupach
-Raum
-Rauen
-Ratulowski
-Ratterree
-Ratering
-Rapin
-Rannels
-Rane
-Randhawa
-Ramus
-Ramsfield
-Rams
-Ramroop
-Ramano
-Raj
-Raina
-Raikes
-Ragonese
-Rafaniello
-Raetz
-Raether
-Raeside
-Radwan
-Radman
-Rademaker
-Radar
-Racki
-Rachlin
-Rabena
-Rabassa
-Rabadan
-Raad
-Quoss
-Quizon
-Quito
-Quintela
-Quimet
-Quilty
-Quilimaco
-Quidley
-Quezaire
-Quave
-Quarto
-Quaranto
-Quandel
-Qiu
-Qazi
-Pyrdum
-Pyon
-Pyeatt
-Puzinski
-Putnal
-Punter
-Pumphery
-Pumper
-Pump
-Pummell
-Pumarejo
-Pulvermacher
-Pultz
-Pully
-Pullens
-Pulkrabek
-Pulk
-Pudlinski
-Puccetti
-Przygocki
-Przybyszewski
-Prusha
-Prudente
-Prucnal
-Prottsman
-Prosch
-Prodoehl
-Procell
-Prinzivalli
-Primes
-Prey
-Presnar
-Presho
-Prentis
-Preisler
-Preisel
-Pratka
-Pratcher
-Prass
-Pozzuoli
-Powanda
-Poundstone
-Potters
-Potra
-Potestio
-Potempa
-Postlethwait
-Posas
-Portrum
-Portland
-Portilla
-Portie
-Popovitch
-Popken
-Ponzio
-Pontremoli
-Pontarelli
-Pombo
-Pomainville
-Polycarpe
-Pollart
-Politowski
-Politano
-Poliquin
-Polczynski
-Pokoj
-Poitevint
-Poissonnier
-Poeppel
-Poellot
-Poehlman
-Poehlein
-Podratz
-Pociask
-Plocher
-Pline
-Plessinger
-Plautz
-Platten
-Plass
-Plageman
-Placko
-Pizzola
-Pizzella
-Pittsenbarger
-Pittner
-Pitstick
-Pitsch
-Pitney
-Pitaniello
-Pistoresi
-Pirc
-Pinski
-Pinera
-Pincock
-Pinckley
-Pincince
-Piliero
-Pilat
-Pigue
-Pietschman
-Pierpoint
-Pierini
-Picon
-Picking
-Picardi
-Phlegm
-Phippin
-Phetteplace
-Pharel
-Pfundt
-Pfluger
-Pfeuffer
-Pfefferle
-Pezzulo
-Pezzano
-Peveler
-Pettersson
-Petsch
-Petrusky
-Petruska
-Petrulis
-Petrossian
-Petroske
-Petrini
-Petitte
-Petito
-Petela
-Petaccio
-Pesto
-Pestka
-Pesta
-Pessoa
-Perun
-Perrow
-Perricone
-Peros
-Perney
-Perlin
-Perigo
-Perella
-Percle
-Pepple
-Penz
-Penttila
-Pensiero
-Penigar
-Penez
-Pendrak
-Penas
-Pellowski
-Pellow
-Pellin
-Pelissier
-Pelini
-Pekrul
-Peevey
-Pedraja
-Pecher
-Peasel
-Payment
-Pavolini
-Paviolitis
-Paulsell
-Paulina
-Paule
-Patrum
-Patrone
-Patrie
-Patras
-Patera
-Patek
-Patane
-Pastrano
-Pastora
-Passow
-Passley
-Passaretti
-Passantino
-Paske
-Partible
-Parsa
-Parnes
-Parliman
-Parlato
-Paravati
-Paradowski
-Papaleo
-Papagni
-Paoletta
-Panzarino
-Pannunzio
-Panis
-Pandit
-Paluzzi
-Palomin
-Palomaki
-Pallanes
-Palla
-Pall
-Palino
-Palfreyman
-Palazzi
-Palanza
-Palagi
-Painton
-Pain
-Pahulu
-Paganico
-Paeth
-Padlo
-Padillia
-Paddy
-Paddick
-Paciolla
-Pacholski
-Paap
-Paa
-Owolabi
-Overshown
-Overocker
-Overgaard
-Ouchi
-Ottoson
-Ostrye
-Osterland
-Osland
-Oslan
-Osick
-Osen
-Osdoba
-Osberg
-Orzel
-Ortmeier
-Orren
-Ormerod
-Orio
-Orgeron
-Orengo
-Orbaker
-Opiela
-Opdahl
-Onks
-Oltrogge
-Olnick
-Olivarres
-Olide
-Oleksy
-Olaya
-Okray
-Okonek
-Okinaka
-Ojima
-Ojala
-Oinonen
-Ohotto
-Ohan
-Ogwin
-Ogborn
-Oflaherty
-Offill
-Oetken
-Oertle
-Oehlert
-Odems
-Oconnel
-Ocha
-Ocarroll
-Oby
-Oblak
-Oberst
-Obermann
-Obas
-Oachs
-Nydegger
-Nybo
-Nuuanu
-Nutile
-Nuse
-Nuriddin
-Nungesser
-Nuber
-Noy
-Novinger
-Nouri
-Northan
-Norseworthy
-Norrod
-Normington
-Nori
-Norenberg
-Nordine
-Nop
-Noori
-Noblet
-Nives
-Nist
-Niskala
-Nilan
-Nikolai
-Nigl
-Nightengale
-Nichole
-Ni
-Nhek
-Ngvyen
-Newville
-Newsam
-Newnham
-Newmeyer
-Newlan
-Newbert
-Neuschwander
-Neusch
-Neun
-Nethken
-Nethercutt
-Nesser
-Neske
-Neman
-Nelton
-Nelles
-Nekola
-Neiling
-Neeser
-Neelly
-Nedved
-Neang
-Navejar
-Naveja
-Nauarro
-Natho
-Nathe
-Natcher
-Naser
-Nasby
-Narlock
-Nanton
-Naillon
-Naill
-Naguin
-Nagele
-Naftzger
-Naegle
-Naegele
-Naef
-Nacke
-Nabritt
-Mynhier
-Myart
-Muzquiz
-Mutty
-Musolino
-Mushero
-Murtaugh
-Murie
-Muresan
-Murdough
-Mura
-Munuz
-Munstermann
-Munsen
-Munselle
-Munise
-Mungle
-Munerlyn
-Muncher
-Mulrooney
-Mullee
-Mulaney
-Mulanax
-Muhlhauser
-Muhlestein
-Mugleston
-Mugg
-Mugford
-Muckel
-Mucerino
-Mt
-Mrotek
-Mrnak
-Mozdzierz
-Moyler
-Moury
-Moulin
-Moulding
-Moul
-Mottai
-Mostyn
-Mosimann
-Mosholder
-Mosburg
-Morrisseau
-Moron
-Morice
-Morgante
-Moreta
-Morcos
-Morasco
-Morante
-Mooe
-Montori
-Montminy
-Monteforte
-Montante
-Montanari
-Monsees
-Mondier
-Monden
-Monckton
-Monce
-Monarch
-Monarca
-Mompoint
-Mollema
-Molin
-Molima
-Molen
-Molash
-Moher
-Mogle
-Mogannam
-Moel
-Moehn
-Modesitt
-Mobilia
-Moag
-Miyagawa
-Mivshek
-Miu
-Mittman
-Mittleman
-Mittelsteadt
-Mittelstaedt
-Mitsch
-Mithell
-Miscione
-Mirbaha
-Mirabelli
-Mir
-Minon
-Minniti
-Minnerly
-Mingrone
-Minervini
-Minerd
-Minarcin
-Mimnaugh
-Milord
-Milnor
-Milnik
-Millers
-Milkowski
-Mikrot
-Mikles
-Miglorie
-Mientka
-Midthun
-Middlesworth
-Micklos
-Mickler
-Michetti
-Michelli
-Michelet
-Micallef
-Meyn
-Meullion
-Mette
-Metoxen
-Messore
-Messano
-Mesaros
-Mertel
-Merritts
-Merrion
-Merril
-Mermis
-Merlini
-Merker
-Meridith
-Mergel
-Merbaum
-Mente
-Mensi
-Menninger
-Mennen
-Menlove
-Menken
-Menezes
-Menette
-Mendyk
-Mendoca
-Mendivel
-Mendias
-Menasco
-Melloy
-Mellema
-Mellard
-Melis
-Meldahl
-Melberg
-Meirick
-Meinel
-Meiler
-Meile
-Meidl
-Meerdink
-Meer
-Medus
-Meduna
-Medovich
-Medine
-Medico
-Medici
-Mcvaigh
-Mctier
-Mcquirk
-Mcnight
-Mcmurrey
-Mcmurdo
-Mcmorries
-Mcmilleon
-Mcmickell
-Mcmicheal
-Mcmeel
-Mcleese
-Mclee
-Mclaws
-Mclanahan
-Mclaird
-Mckusker
-Mckibbens
-Mckenley
-Mckenize
-Mckendall
-Mckellop
-Mckellip
-Mckeirnan
-Mcinvale
-Mcguffee
-Mcgrue
-Mcgregory
-Mcgrann
-Mcgoey
-Mcglinn
-Mcgillicuddy
-Mcgillen
-Mcgeachy
-Mcgarrell
-Mcgannon
-Mcgalliard
-Mcfarlen
-Mcevers
-Mcerlean
-Mcennis
-Mcelvany
-Mcelvaine
-Mcdonal
-Mcdavitt
-Mccullick
-Mccrone
-Mccreadie
-Mccoun
-Mcconchie
-Mcconaughy
-Mcconahy
-Mcconaghy
-Mccomsey
-Mccoggle
-Mcclimans
-Mccleod
-Mccleaf
-Mcclafferty
-Mccatty
-Mccarry
-Mccance
-Mccament
-Mccaghren
-Mcbreen
-Mcardell
-Mcabier
-Mazell
-Mayotte
-Maybrier
-Mavis
-Mautone
-Matuszek
-Mattimoe
-Mattey
-Matterson
-Matten
-Matsushima
-Matsubara
-Matrone
-Matras
-Mato
-Matier
-Matheus
-Massucci
-Massoni
-Massare
-Maslin
-Mashaw
-Mase
-Mascola
-Masci
-Marze
-Marvray
-Marusak
-Martowski
-Martiny
-Martie
-Martabano
-Marsha
-Marschel
-Marsack
-Marsac
-Marohnic
-Markve
-Markis
-Marking
-Marken
-Marioni
-Marichalar
-Margosian
-Maretti
-Mardesich
-Marcussen
-Marchessault
-Marcey
-Maraldo
-Marafioti
-Manzanero
-Manwill
-Manual
-Manocchio
-Manko
-Manista
-Manire
-Manikowski
-Manganiello
-Manetta
-Mandy
-Mandino
-Mandarino
-Mancinelli
-Manasse
-Manary
-Manalang
-Malling
-Mallahan
-Maliska
-Malet
-Maleski
-Maldonaldo
-Malaterre
-Malaney
-Malagarie
-Malabe
-Maks
-Makinster
-Makar
-Maita
-Maiolo
-Mahley
-Magos
-Mago
-Magnotti
-Magnant
-Maglott
-Maglori
-Maenius
-Madkin
-Madarang
-Madagan
-Macrina
-Macquarrie
-Macphee
-Macneal
-Macmahon
-Maclellan
-Mackeen
-Maciver
-Machkovich
-Machan
-Macewen
-Macera
-Macer
-Maceachern
-Macdonell
-Macaskill
-Maaske
-Lysaght
-Lynum
-Lynema
-Lyas
-Lutton
-Luttman
-Lutsky
-Luthi
-Lutfy
-Lupoe
-Lundrigan
-Lunderville
-Lukan
-Luedeman
-Ludke
-Lucore
-Lucksinger
-Lucks
-Luckner
-Lucarell
-Lubelski
-Luarca
-Luaces
-Lozinski
-Loynes
-Lowis
-Lovorn
-Loverde
-Lovasz
-Loughery
-Lotzer
-Losito
-Loschiavo
-Lorsung
-Lorquet
-Lorkowski
-Lorino
-Lorey
-Lorente
-Loreman
-Lopaz
-Looft
-Lonie
-Longman
-Longhofer
-Longan
-Lomascolo
-Lomack
-Lolagne
-Lokaphone
-Logins
-Loggin
-Lofredo
-Loffler
-Loescher
-Loendorf
-Locus
-Lockyer
-Lockheart
-Lobendahn
-Lobasso
-Lob
-Lizana
-Livshits
-Litzau
-Litty
-Litteer
-Litsey
-Litrenta
-Litner
-Liszewski
-Lisman
-Lisboa
-Liquet
-Liptok
-Lineweaver
-Lindenpitz
-Lindel
-Lime
-Lillywhite
-Life
-Lievano
-Lieblong
-Liebler
-Lidey
-Libutti
-Liborio
-Libengood
-Leyson
-Leyland
-Lewczyk
-Lewark
-Leviner
-Levenstein
-Leuenberger
-Leszczynski
-Lestage
-Leske
-Lerwick
-Leray
-Lepkowski
-Leonor
-Lenyard
-Lenger
-Lendon
-Lemarie
-Leman
-Lelle
-Leisner
-Leisey
-Leischner
-Leimer
-Leigers
-Leiferman
-Leibfried
-Lehoullier
-Lehnortt
-Legget
-Legato
-Legath
-Legassie
-Legarreta
-Leftridge
-Leewright
-Ledsome
-Lecrone
-Lecourt
-Lecky
-Lechman
-Lebsack
-Lebouf
-Lebon
-Leazer
-Leavins
-Leadbeater
-Lawwill
-Lawall
-Lavorini
-Laviero
-Lavertue
-Lavalais
-Lautenbach
-Lausier
-Laurita
-Lauriano
-Laurange
-Launey
-Laughead
-Laufenberg
-Lauderman
-Laubhan
-Latunski
-Latulas
-Lastrape
-Lastiri
-Lason
-Laskoski
-Lasanta
-Laroux
-Larizza
-Larive
-Larish
-Laquerre
-Lappas
-Lapilio
-Lapadula
-Lapa
-Lanzi
-Lanzafame
-Lantier
-Lanski
-Laningham
-Langon
-Langdale
-Landron
-Landero
-Landauer
-Landacre
-Lamport
-Lamping
-Lamott
-Lamonda
-Lammi
-Lambiase
-Laite
-Lahaye
-Laframboise
-Lafone
-Laferte
-Laeger
-Ladieu
-Ladabouche
-Lachat
-Labonville
-Labbee
-Labatt
-Laban
-Kynaston
-Kwaterski
-Kuzniar
-Kuthe
-Kuter
-Kutchar
-Kurtin
-Kuramoto
-Kupstas
-Kuperman
-Kuns
-Kullmann
-Kuligowski
-Kukielka
-Kuehler
-Kudrna
-Kubie
-Kubera
-Kubas
-Kuba
-Kualii
-Krysinski
-Kryder
-Kronberger
-Kroft
-Kroencke
-Kristiansen
-Krigger
-Krieser
-Kretschman
-Krentz
-Krenke
-Kremers
-Kreitner
-Kreimer
-Kray
-Krawchuk
-Kravs
-Kranich
-Krampitz
-Kragh
-Krager
-Kozuch
-Kozloski
-Kozatek
-Kozakiewicz
-Kovalsky
-Kovalcik
-Kovack
-Kotera
-Kot
-Koszyk
-Kostel
-Kosmicki
-Koshy
-Korona
-Koroma
-Korba
-Koopmann
-Konstantinidi
-Kolodzik
-Kolodzieski
-Kolle
-Kolkmann
-Kolker
-Kolda
-Kokaly
-Kofford
-Koepper
-Koeing
-Koehnen
-Kodish
-Kodani
-Kocur
-Kocourek
-Kobza
-Koble
-Koback
-Knutzen
-Knows
-Knolton
-Knoblauch
-Knispel
-Knieper
-Knepshield
-Klyce
-Klunk
-Kluka
-Klostermann
-Klosinski
-Klish
-Klint
-Klinner
-Klindt
-Klimko
-Klicker
-Kleman
-Kleinsorge
-Kleinfelder
-Kleier
-Klas
-Klaman
-Kizzee
-Kitto
-Kitka
-Kirtdoll
-Kirscht
-Kintzer
-Kinstle
-Kinning
-Kinniburgh
-Kinnett
-Kinker
-Kinkelaar
-Kings
-Kingham
-Kingfisher
-Kimmet
-Killingbeck
-Kilberg
-Kikuchi
-Kikkert
-Kiesow
-Kienitz
-Kidner
-Kida
-Kid
-Khuu
-Khatak
-Khaleck
-Kezar
-Keyton
-Ketelhut
-Kesley
-Keshishyan
-Kerzman
-Kertesz
-Kerslake
-Kerscher
-Kernes
-Kerin
-Ker
-Kenimer
-Kenfield
-Kempe
-Kemick
-Kem
-Keitsock
-Keisker
-Keery
-Keblish
-Kebalka
-Kearny
-Kearby
-Kayler
-Kavin
-Kauer
-Kattan
-Katoa
-Kassis
-Kashuba
-Kashan
-Kartman
-Karry
-Karpel
-Karo
-Karnopp
-Karmazyn
-Karjala
-Karcz
-Karasti
-Karagiannis
-Kapoi
-Kapanke
-Kanz
-Kaniewski
-Kanemoto
-Kaneholani
-Kandt
-Kampfer
-Kammann
-Kamler
-Kamal
-Kalvig
-Kalmen
-Kalmar
-Kallstrom
-Kallin
-Kallbrier
-Kakaviatos
-Kakar
-Kahahane
-Kagel
-Kabat
-Kabanuck
-Kaas
-Jurczak
-Jurasin
-Juras
-Junke
-Junghans
-Jungen
-Jund
-Juliusson
-Juhnke
-Juett
-Jolla
-Jokinen
-Jokela
-Joffe
-Joecks
-Jochumsen
-Joa
-Jeziorski
-Jesseman
-Jessamy
-Jernejcic
-Jergenson
-Jerdon
-Jensrud
-Jellinek
-Jedrey
-Jedele
-Jeannette
-Jauron
-Jatho
-Jarrel
-Januszewski
-Janski
-Janovsek
-Janning
-Janikowski
-Jane
-Jandres
-Jamaica
-Jalonen
-Jainlett
-Jahnsen
-Jahde
-Jagow
-Jagielski
-Jaffray
-Jaecks
-Jacquot
-Jacoway
-Jacocks
-Iwami
-Isadore
-Irmeger
-Irie
-Iredale
-Iqbal
-Inscoe
-Inklebarger
-Ingemi
-Immen
-Imig
-Imberg
-Imamura
-Illies
-Ilacqua
-Ijams
-Iha
-Iden
-Ibraham
-Ibey
-Ialongo
-Iafrate
-Hyzer
-Hyacinthe
-Huyard
-Huxman
-Hutchkiss
-Hutchingson
-Husson
-Hussman
-Hurm
-Hupka
-Hunyadi
-Hunstad
-Humpert
-Hummons
-Hultz
-Hulton
-Hules
-Huisenga
-Huhta
-Hugueley
-Hughe
-Huggler
-Hufton
-Huffstickler
-Huddelston
-Huba
-Hrivnak
-Hoysradt
-Howorth
-Howenstine
-Hovda
-Hourani
-Houglum
-Houch
-Hotalen
-Hosse
-Horwich
-Horvitz
-Horoschak
-Hornor
-Hornbrook
-Horita
-Hoque
-Hopman
-Hoovler
-Hoople
-Hookfin
-Honeysucker
-Honeycut
-Honerkamp
-Homyak
-Homa
-Holzwart
-Holzerland
-Holyoke
-Holtry
-Holterman
-Holohan
-Hollinshed
-Hollington
-Hollenshead
-Holey
-Holderby
-Holak
-Hokkanen
-Hohner
-Hogsed
-Hoglen
-Hogen
-Hogberg
-Hofland
-Hofius
-Hoffis
-Hofferber
-Hoffarth
-Hofacker
-Hoekman
-Hodor
-Hochstetter
-Hochnadel
-Hobbins
-Hoa
-Hlavaty
-Hittner
-Hitson
-Hirtz
-Hirschi
-Hinkes
-Hinke
-Hindley
-Hince
-Hilse
-Hilke
-Hilferty
-Hildesheim
-Hikes
-Hignite
-Higman
-Hiemer
-Hidden
-Hickinbotham
-Hewatt
-Hetz
-Hetsler
-Hessian
-Hershaw
-Herra
-Hernander
-Herlocker
-Hepper
-Henseler
-Henri
-Hennick
-Hennecke
-Hendrikson
-Henderlight
-Hellstrom
-Helderman
-Heitland
-Heistand
-Heiskell
-Heisinger
-Heiserman
-Heinritz
-Heinly
-Heinlen
-Heimerdinger
-Heimbigner
-Heidbreder
-Hegwer
-Hedeen
-Hebrank
-Heberlein
-Heaslet
-Hearin
-Hazle
-Hazelbush
-Hayzlett
-Hayre
-Haymans
-Hayenga
-Hayduk
-Haward
-Havner
-Haushalter
-Hauf
-Hatke
-Hatchel
-Hassard
-Haskovec
-Hashmi
-Harvest
-Harvath
-Hartill
-Harteau
-Harshfield
-Harrigill
-Harriet
-Haros
-Haroldson
-Harmeson
-Harl
-Harkley
-Hariston
-Harington
-Harian
-Hargus
-Hargens
-Hardina
-Haraldson
-Harajly
-Hapke
-Hapeman
-Hanz
-Hanthorn
-Hanry
-Hannen
-Hannasch
-Hannam
-Hanifan
-Hanft
-Handon
-Handford
-Hancher
-Hancey
-Hample
-Hammrich
-Hammerstrom
-Hambric
-Halwick
-Halma
-Hallgren
-Hallet
-Hallada
-Halla
-Halik
-Halgas
-Halcon
-Halbrooks
-Hakel
-Hairfield
-Hainesworth
-Haggarty
-Hagenhoff
-Hagebusch
-Hagadone
-Haft
-Haflett
-Haefele
-Haddow
-Hackbart
-Haberer
-Haass
-Gwinner
-Gwathney
-Gwartney
-Gutterrez
-Gutoski
-Gutkin
-Gutherie
-Gutches
-Gustus
-Gustison
-Gustaveson
-Gurtner
-Gurkin
-Gummo
-Gulliksen
-Gulke
-Guldin
-Gulden
-Guitierez
-Guile
-Guildford
-Guidice
-Gugerty
-Guffy
-Gueningsman
-Gudgell
-Guderjahn
-Guastella
-Guariglia
-Guardia
-Gryniuk
-Grueser
-Grudem
-Growden
-Grossett
-Gropper
-Gron
-Grodin
-Groch
-Grismore
-Gripper
-Grinvalsky
-Grima
-Griffth
-Griess
-Greynolds
-Gresh
-Greminger
-Gregoria
-Greenwade
-Greenlief
-Greenier
-Grayes
-Gravell
-Grassmyer
-Grappe
-Grantland
-Grandin
-Grandel
-Grandbois
-Granahan
-Gramham
-Graffeo
-Graeter
-Gradwell
-Gradel
-Grabo
-Graban
-Goy
-Govoni
-Governale
-Govern
-Gouty
-Goughnour
-Goude
-Goubeaux
-Goth
-Gosline
-Goslee
-Goshen
-Gosewisch
-Gorzynski
-Gortman
-Gorter
-Gordin
-Gord
-Goos
-Goodwine
-Goodrick
-Goodley
-Gombert
-Goletz
-Goldy
-Goldthwaite
-Goldthwait
-Goldizen
-Golar
-Goist
-Gofman
-Goffer
-Goerges
-Goeltz
-Goedicke
-Goedecke
-Godnick
-Gocke
-Goade
-Gneiser
-Gluth
-Glovier
-Glomski
-Glodo
-Gloden
-Glenister
-Glawson
-Glasier
-Gladysz
-Gladstein
-Gjertsen
-Giudice
-Gitto
-Gittelman
-Girvin
-Girolamo
-Gionfriddo
-Gingell
-Gimble
-Gilhousen
-Gilboy
-Gilberti
-Gigantino
-Gietzen
-Gieseking
-Gianikas
-Ghosn
-Ghosh
-Geyman
-Gevara
-Getsinger
-Gessert
-Gerrits
-Gerrior
-Geris
-Gerhauser
-Gerety
-Genzone
-Genuario
-Gentles
-Gentille
-Genter
-Genetti
-Gelle
-Gelfand
-Gelabert
-Gekas
-Geck
-Gearin
-Gdovin
-Gaydosh
-Gawith
-Gave
-Gauntlett
-Gaugler
-Gaudy
-Gaub
-Gatten
-Gathje
-Gasperini
-Gasner
-Gasco
-Gascho
-Gasbarro
-Garvis
-Garra
-Garnette
-Garing
-Garick
-Gardunio
-Gardon
-Gardemal
-Garde
-Garczynski
-Garant
-Ganus
-Gantnier
-Ganis
-Gangloff
-Gangler
-Ganer
-Ganem
-Gandolfo
-Gampp
-Gallihugh
-Galletti
-Gallenstein
-Gallarello
-Galla
-Galka
-Galayda
-Galarneau
-Galapon
-Gaito
-Gaglione
-Gady
-Gadsen
-Gachupin
-Gaboury
-Futterman
-Fusch
-Furuta
-Furth
-Furber
-Fune
-Funai
-Fuess
-Frutchey
-Frumkin
-Fruhling
-Frommer
-Fromdahl
-Froehner
-Frizzle
-Friends
-Friederich
-Freyre
-Freilich
-Fregia
-Frediani
-Frederico
-Frater
-Fraile
-Foste
-Fosselman
-Fosnaugh
-Fosburg
-Fortis
-Fortgang
-Forstner
-Forson
-Forseth
-Forkin
-Forister
-Forinash
-Footer
-Fontillas
-Fontenelle
-Fonesca
-Folker
-Fogerson
-Fogelquist
-Flye
-Flummer
-Floth
-Floro
-Florine
-Flies
-Flexer
-Flessner
-Flatness
-Flank
-Fland
-Flahive
-Flager
-Fiveash
-Fitzner
-Fitzke
-Fitcheard
-Fisherman
-Fishbeck
-Fipps
-Fiorino
-Finster
-Finken
-Finigan
-Fingal
-Finer
-Filsaime
-Fillingim
-Filipponi
-Fila
-Fies
-Fiebelkorn
-Fiducia
-Fiallo
-Fetherston
-Fetherolf
-Fesmire
-Fesenmyer
-Ferroni
-Ferriss
-Ferrini
-Ferrick
-Ferraris
-Ferniza
-Fernades
-Ferdig
-Ferandez
-Feoli
-Fenninger
-Fenney
-Femi
-Fejes
-Fehlman
-Feger
-Fede
-Febo
-Febbraio
-Feasel
-Feagley
-Fayad
-Favaloro
-Fauerbach
-Fauble
-Fasheh
-Farrant
-Farra
-Faro
-Farinacci
-Farfaglia
-Farell
-Farb
-Farace
-Fanjoy
-Fangmann
-Famulare
-Falsetta
-Fallows
-Fallert
-Falero
-Faldyn
-Falconi
-Falce
-Fait
-Fairburn
-Faiola
-Faiella
-Fahlsing
-Faggett
-Fafinski
-Fadness
-Fabros
-Fabert
-Everidge
-Evaristo
-Eustache
-Etzkorn
-Etier
-Estabillo
-Esquivias
-Esquirel
-Eslava
-Eschete
-Esau
-Erway
-Ertzbischoff
-Eron
-Erner
-Ermitano
-Ermitanio
-Ermert
-Erie
-Erdley
-Equihua
-Enzor
-Ensing
-Enns
-Engleking
-Engelkes
-Endlich
-Endler
-Emry
-Emms
-Emmerling
-Emerich
-Ellsbury
-Ellie
-Elizarraras
-Eliot
-Eliopoulos
-Elery
-Elek
-Elderidge
-Elbaum
-Ekins
-Ekin
-Eisley
-Eilderts
-Eikleberry
-Eigo
-Eighmy
-Eichel
-Ehly
-Egloff
-Egland
-Eggington
-Eggenberger
-Egar
-Egans
-Eftekhari
-Efford
-Eeds
-Edvalson
-Edin
-Edgman
-Edemann
-Edelmann
-Eddens
-Eckl
-Eckerle
-Eckelman
-Ebrahim
-Eberth
-Eberspacher
-Ebbighausen
-Ebaugh
-Easly
-Eash
-Dzledzic
-Dyett
-Dyba
-Dworaczyk
-Duttry
-Duthie
-Duszynski
-Duso
-Dushaj
-Dusett
-Dus
-Durman
-Durkins
-Durick
-Duplechain
-Dunnivan
-Dunlow
-Dunivan
-Dumars
-Dumaine
-Duliba
-Dulany
-Duka
-Duft
-Dufrane
-Duffek
-Duellman
-Ducking
-Dubourg
-Drzewiecki
-Drugan
-Drozdowski
-Drozda
-Dronet
-Drilling
-Driesenga
-Dreyfuss
-Drevs
-Dreben
-Draudt
-Draleau
-Dragos
-Draghi
-Doyer
-Dowlin
-Douma
-Dotterweich
-Dottavio
-Doroff
-Dornon
-Dorland
-Doop
-Donndelinger
-Donehoo
-Donate
-Donado
-Dommer
-Dominici
-Domann
-Dolio
-Dolence
-Doland
-Dolak
-Doersam
-Doerrer
-Doede
-Dockham
-Dobrich
-Dobosz
-Dobin
-Dobbratz
-Divlio
-Divel
-Ditzel
-Disalvatore
-Diotte
-Dinnen
-Dinkin
-Dimler
-Dimiceli
-Dimeglio
-Dimascio
-Dimare
-Diluca
-Dilsaver
-Dillen
-Dilibero
-Dile
-Digioia
-Difede
-Diefenbach
-Diedrick
-Dickmann
-Dickes
-Dickason
-Dicapua
-Dicaprio
-Dibrell
-Dibley
-Dibattista
-Deyon
-Devotie
-Devoid
-Deval
-Detlefsen
-Destro
-Destiche
-Desposito
-Desola
-Deshotels
-Descombes
-Deschepper
-Desautel
-Desano
-Deroy
-Derosset
-Derosby
-Deroeck
-Derocher
-Dergance
-Deren
-Deptula
-Deprey
-Depolis
-Depner
-Depetro
-Denunzio
-Densford
-Dennington
-Dene
-Dender
-Denbo
-Demuro
-Demoranville
-Demling
-Demerson
-Demelis
-Demeglio
-Dembo
-Demattia
-Demarinis
-Delprincipe
-Deloria
-Delnoce
-Delmedico
-Dellow
-Delles
-Dellavalle
-Dellamora
-Delguidice
-Delgato
-Delfs
-Delcourt
-Delcolle
-Delbert
-Delaportilla
-Delahoz
-Delacueva
-Deisch
-Deike
-Degro
-Degonia
-Degollado
-Degolier
-Degirolamo
-Degener
-Degele
-Degeest
-Degeare
-Defina
-Defabio
-Deeley
-Decraene
-Decou
-Decorte
-Declercq
-Decinti
-Dechambeau
-Debutts
-Debro
-Deblieck
-Deblasi
-Debem
-Deavila
-Deases
-Deangeles
-Deahl
-Daymude
-Daven
-Datil
-Daros
-Darnick
-Darienzo
-Dardy
-Daponte
-Dannhaus
-Danneman
-Danielle
-Dani
-Danger
-Dangel
-Danes
-Danekas
-Dandrow
-Dambrose
-Dalpe
-Dalesandro
-Daiton
-Dainels
-Daigh
-Dahnke
-Dahme
-Dahling
-Dagata
-Dack
-Czaplicki
-Czachorowski
-Cuttitta
-Cutaia
-Custance
-Curless
-Curie
-Curi
-Cupelli
-Cumens
-Cumbass
-Cumba
-Cullars
-Cullar
-Cukaj
-Cubito
-Cuascut
-Crytzer
-Crye
-Cruzen
-Cruser
-Crunkleton
-Crummett
-Crumbliss
-Cropley
-Cronquist
-Cronkite
-Cronic
-Crombie
-Crockwell
-Crnkovich
-Critcher
-Cristo
-Cristales
-Crisanti
-Crier
-Cretsinger
-Crest
-Creson
-Crelia
-Crecco
-Craze
-Craveiro
-Cratch
-Crapps
-Cran
-Craigmiles
-Craiger
-Craige
-Crady
-Cradic
-Craddieth
-Cowels
-Coveney
-Courcy
-Coulbourne
-Cotsis
-Cotrone
-Cotney
-Cotilla
-Costaneda
-Costabile
-Cossel
-Cossa
-Cos
-Corte
-Corsino
-Corria
-Cornog
-Cornely
-Corio
-Corino
-Corington
-Coressel
-Cordone
-Corbisiero
-Corbelli
-Copps
-Coovert
-Coopwood
-Cooner
-Cookman
-Conzales
-Conver
-Contratto
-Conrady
-Conradi
-Connel
-Conneely
-Conmy
-Comunale
-Comber
-Comans
-Colvert
-Columbo
-Coluccio
-Colp
-Colop
-Collini
-College
-Colestock
-Colebank
-Colasante
-Colasacco
-Colapietro
-Cokeley
-Coia
-Cocuzza
-Coalson
-Co
-Clowes
-Cliche
-Clevette
-Cleven
-Clerico
-Clearwater
-Civiello
-Ciullo
-Citro
-Cirocco
-Cioppa
-Cilek
-Cieszynski
-Cieri
-Cicerchia
-Ciaschi
-Ciani
-Cianchetti
-Chudy
-Chuc
-Chryst
-Christodoulou
-Christin
-Chrisley
-Chokshi
-Chmela
-Chkouri
-Chiodini
-Chio
-Chimilio
-Chilen
-Chilek
-Childrey
-Chier
-Chicas
-Chiaro
-Chiappone
-Chiappinelli
-Chiado
-Chhom
-Chesterfield
-Chesteen
-Cheshier
-Cherrez
-Cherep
-Chene
-Cheevers
-Checkett
-Cheaney
-Chayka
-Chawla
-Chasin
-Chasen
-Charvat
-Char
-Chapoton
-Chantos
-Chantler
-Chant
-Chadez
-Chad
-Chaco
-Chabez
-Cerrito
-Ceppetelli
-Centanni
-Celso
-Cederberg
-Cedar
-Cecchetti
-Cavel
-Cavanah
-Cavagna
-Catus
-Catton
-Catterton
-Catrambone
-Catherwood
-Catherman
-Cataldi
-Castellana
-Castellan
-Cassey
-Casparis
-Casilla
-Cashdollar
-Casaceli
-Carvana
-Carriedo
-Carrecter
-Carraher
-Carrabine
-Carpinelli
-Carouthers
-Carnovale
-Carmany
-Carles
-Caretto
-Careaga
-Cardosa
-Cardelli
-Carbine
-Carathers
-Caraker
-Caracci
-Capuchin
-Cappelletti
-Capistran
-Capdeville
-Caparros
-Canute
-Cante
-Canizares
-Canel
-Canclini
-Cancino
-Campus
-Campise
-Campen
-Cammarano
-Camilli
-Camic
-Camey
-Calwell
-Calvey
-Calvary
-Callo
-Callinan
-Callais
-Calizo
-Calixto
-Calisto
-Calip
-Calibuso
-Caira
-Cahillane
-Cahalane
-Cahal
-Caffery
-Caffarelli
-Cafarelli
-Cadlett
-Cacciatori
-Cabebe
-Byus
-Byrnside
-Byrer
-Byone
-Buza
-Buttrum
-Buttel
-Butremovic
-Butanda
-Bustin
-Bussen
-Bushlen
-Bushart
-Burtchell
-Burrel
-Burnard
-Burlett
-Burkeen
-Burce
-Buote
-Bunyan
-Buntrock
-Bunck
-Bumpas
-Bulleri
-Buglione
-Bugge
-Bueter
-Buerk
-Buenger
-Buehrle
-Buechele
-Budrow
-Buddenhagen
-Bucolo
-Buchenau
-Bucco
-Buccino
-Bubar
-Bruzas
-Brutsch
-Bruschke
-Brunot
-Brungard
-Brund
-Bruender
-Brucks
-Bruchey
-Brozowski
-Brownd
-Brothern
-Broomhead
-Bronw
-Brom
-Brog
-Brodigan
-Brockhaus
-Brockel
-Broadaway
-Brletich
-Briston
-Brissett
-Brines
-Brillon
-Brilliant
-Brightbill
-Brigges
-Briel
-Bresciani
-Brents
-Breitmeyer
-Breithaupt
-Breidenthal
-Breden
-Bredemeier
-Breckinridge
-Brecheisen
-Brecheen
-Breazeal
-Bream
-Brazzel
-Brawdy
-Brave
-Brashers
-Branz
-Branyon
-Brantz
-Brannam
-Brankovich
-Brandle
-Branchaud
-Branca
-Bramley
-Bramante
-Bramall
-Brakeman
-Bradby
-Bozzo
-Bozelle
-Boyarski
-Bowline
-Bowey
-Bowerize
-Bowdon
-Bowdler
-Boutros
-Bouten
-Bourdier
-Bouras
-Boufford
-Bottex
-Bottemiller
-Bothman
-Botcher
-Boshers
-Borris
-Bornemann
-Bonus
-Bonnot
-Bonifant
-Bongiardina
-Bonenberger
-Bonasera
-Bollier
-Bolar
-Bokman
-Bokanovich
-Boissonnault
-Boiles
-Bohrn
-Bohlke
-Bogenschutz
-Bogel
-Bogda
-Boevers
-Boever
-Boender
-Boehringer
-Boehne
-Bodor
-Bodda
-Bodak
-Bocker
-Bockenkamp
-Boche
-Blyden
-Bluto
-Bludworth
-Bloxsom
-Blomstrom
-Bloise
-Bloebaum
-Blier
-Bleiweiss
-Blegen
-Bleacher
-Blaum
-Blasz
-Blasingim
-Blasengame
-Blanda
-Blagman
-Blackstad
-Blackham
-Blache
-Bixel
-Bitters
-Bissegger
-Bisker
-Bishoff
-Bisard
-Bis
-Birtwell
-Birley
-Birkenmeier
-Birkenholz
-Birkeland
-Birdsey
-Birdo
-Birdinground
-Binner
-Bilsborough
-Billot
-Billops
-Billingham
-Bigney
-Bigg
-Bienkowski
-Bienek
-Bielefeld
-Bielec
-Biddie
-Bickell
-Bichler
-Bibo
-Biava
-Biagi
-Biagas
-Bhayani
-Bez
-Beyene
-Beyda
-Bevels
-Bettner
-Bettinson
-Betson
-Beto
-Bessix
-Bessire
-Bertschy
-Bertozzi
-Bertoncini
-Bertelson
-Berteau
-Berrong
-Berrones
-Berringer
-Berrigan
-Bernsen
-Berlingeri
-Berken
-Berka
-Berges
-Bergdorf
-Bergara
-Bergant
-Bergamini
-Beren
-Berdugo
-Berdine
-Berberian
-Benvenuti
-Benish
-Benincase
-Benek
-Benedith
-Bendas
-Benak
-Bena
-Beltrame
-Belsheim
-Belotti
-Bellrichard
-Belleville
-Beliles
-Belgrade
-Belcastro
-Bekius
-Bekhit
-Beightol
-Behel
-Beetz
-Bedson
-Becze
-Beckmeyer
-Beckey
-Beckers
-Beckelhimer
-Beccue
-Beberwyk
-Bebber
-Beamesderfer
-Beacom
-Bazzle
-Bazil
-Baynham
-Bayhonan
-Bayas
-Bawany
-Bava
-Baumgardt
-Bauerkemper
-Baudry
-Baudino
-Battko
-Battisti
-Batta
-Bassano
-Baskas
-Baseler
-Basanta
-Bartucci
-Bartron
-Barthold
-Bartamian
-Barsalou
-Barrineau
-Barriger
-Barreneche
-Barkie
-Barich
-Bardes
-Barbano
-Baral
-Baragar
-Baque
-Banther
-Banome
-Bannowsky
-Banke
-Baniaga
-Bandley
-Banahan
-Banaag
-Bamba
-Baltzer
-Balster
-Balnis
-Balkin
-Bali
-Balfe
-Balerio
-Balent
-Baldyga
-Baldor
-Baldinger
-Baldassano
-Baldacci
-Balanoff
-Balado
-Balaban
-Balaam
-Bakes
-Bajwa
-Baisch
-Bahnsen
-Bahls
-Bahler
-Bahamonde
-Bagdasarian
-Bagaoisan
-Bafia
-Baese
-Badolato
-Bado
-Badder
-Bacurin
-Backers
-Bachor
-Babe
-Babbit
-Babauta
-Baadsgaard
-Azzara
-Azebedo
-Avril
-Avello
-Aveline
-Authur
-Ausby
-Auricchio
-Auna
-Aukerman
-Auckerman
-Auck
-Auble
-Atterson
-Attard
-Aswegan
-Aste
-Asta
-Assaf
-Aspen
-Asken
-Asif
-Asiedu
-Ashner
-Asel
-Aschenbach
-Arvay
-Arvan
-Artus
-Artley
-Arrollo
-Aroyo
-Aronov
-Aromin
-Arnsworth
-Arnspiger
-Arnn
-Armant
-Arington
-Argubright
-Arentz
-Arcoraci
-Arbuthnot
-Arbo
-Aquilina
-Aquilera
-Apt
-Apsey
-Appolonia
-Apollo
-Apana
-Antista
-Anshutz
-Anon
-Anno
-Annala
-Anklam
-Angold
-Angelone
-Angeline
-Angeletti
-Andren
-Andreadis
-Andera
-Andelman
-Andel
-Anctil
-Anchors
-Anacker
-Ampy
-Amons
-Amirault
-Amir
-Amezaga
-Ameigh
-Alyea
-Altvater
-Altig
-Altermatt
-Alo
-Almengor
-Alme
-Allvin
-Allocco
-Allegrini
-Aliment
-Algee
-Alexanian
-Aler
-Aldo
-Albero
-Alarid
-Akiona
-Akemon
-Ajello
-Aitcheson
-Ainley
-Ailey
-Ahluwalia
-Ahlf
-Ahlbrecht
-Agundez
-Agro
-Agins
-Aggarwal
-Afalava
-Adriano
-Adomaitis
-Adolphus
-Adlam
-Adie
-Adey
-Adduci
-Addleman
-Adamyan
-Acothley
-Acklen
-Ackert
-Ackerly
-Acencio
-Accosta
-Abundiz
-Abedi
-Abbassi
-Abbasi
-Aanerud
-Aakre
-Aagaard
-Zwickl
-Zuver
-Zurasky
-Zumbo
-Zumba
-Zuckerwar
-Zuccarelli
-Zubris
-Zoucha
-Zorns
-Zorc
-Zitzow
-Zitzloff
-Zirkles
-Zippe
-Ziola
-Zinz
-Zinsmeister
-Zincke
-Zieschang
-Zierdt
-Zien
-Ziemke
-Zidek
-Zickler
-Zeuner
-Zerba
-Zera
-Zenger
-Zeltmann
-Zelle
-Zelinka
-Zelek
-Zele
-Zeiner
-Zeimet
-Zeidler
-Zecchini
-Zebley
-Zdanowicz
-Zbell
-Zaro
-Zaremski
-Zar
-Zani
-Zancanella
-Zana
-Zambarano
-Zakar
-Zadorozny
-Zader
-Zaccaro
-Ysquierdo
-Yoxall
-Youst
-Youngstrom
-Youn
-Youker
-Yoss
-Yoshina
-Yonke
-Yonemura
-Yohannes
-Yock
-Yerhot
-Yengo
-Yehle
-Yanofsky
-Yaker
-Yagues
-Yach
-Ya
-Xue
-Wyrosdick
-Wygle
-Wygand
-Wurzer
-Wurl
-Wunderlin
-Wunderle
-Wuerth
-Writer
-Wrighten
-Wrich
-Wozny
-Wozney
-Wowk
-Wouters
-Wormington
-Worf
-Woolem
-Woodrich
-Wooderson
-Wonder
-Womeldorf
-Wolz
-Woltmann
-Wolstenholme
-Wollmuth
-Wolle
-Wolfard
-Woldridge
-Wojtanowski
-Wojner
-Woitowitz
-Woehl
-Wittenburg
-Wittel
-Witschi
-Witaszek
-Witaker
-Wiszynski
-Wiswall
-Wiss
-Wisher
-Wisenbaker
-Wires
-Winsky
-Winfough
-Windler
-Winckler
-Wimes
-Wiltberger
-Wilm
-Willrich
-Willoby
-Willimon
-Willenborg
-Wilda
-Wilczewski
-Wilcock
-Wiggens
-Wigboldy
-Wiesler
-Wies
-Wienhoff
-Wielgus
-Wiebers
-Wieber
-Wickizer
-Wichrowski
-Wibbens
-Whyard
-Wholey
-Whitsey
-Whitlingum
-Whitlach
-Whirry
-Wharry
-Wharff
-Whack
-Weyman
-Weyler
-Wethje
-Westveer
-Westmorland
-Westerhold
-Wesselman
-Wesloh
-Wery
-Wermers
-Werlinger
-Werksman
-Wenzinger
-Weninger
-Wendeln
-Wendelin
-Wenck
-Wember
-Welters
-Welland
-Welchman
-Welchel
-Weitnauer
-Weissler
-Weinger
-Weimann
-Weigert
-Weidert
-Wehby
-Wehbe
-Weck
-Wechter
-Weaving
-Weather
-Weal
-Weagle
-Wdowiak
-Wayns
-Waycott
-Waychoff
-Waterfall
-Watcher
-Watahomigie
-Wasowski
-Wasner
-Washko
-Washing
-Washell
-Wartenberg
-Warson
-Warrenfeltz
-Warp
-Warmbrodt
-Warhurst
-Wardsworth
-Wanzek
-Wanta
-Wansing
-Wankel
-Wangberg
-Wanberg
-Wamack
-Waltzer
-Walthers
-Walterson
-Walshe
-Walrond
-Wallschlaeger
-Wallgren
-Walema
-Waldram
-Waldhauser
-Waldecker
-Walby
-Wakin
-Wakabayashi
-Wah
-Wagy
-Waggner
-Wagenaar
-Wage
-Waffle
-Wadzinski
-Wademan
-Wackerly
-Wachs
-Wable
-Vredenburg
-Vrana
-Vrable
-Voyer
-Voto
-Vosper
-Vosberg
-Vorhees
-Voran
-Vora
-Vonstein
-Vondoloski
-Voltin
-Volpicelli
-Volland
-Volentine
-Volcko
-Vojtko
-Voice
-Vogeler
-Vizzini
-Vizena
-Vix
-Vitko
-Viste
-Visor
-Visco
-Virock
-Vinup
-Vinion
-Vincenzo
-Villas
-Villarta
-Villari
-Vilello
-Vigne
-Viener
-Vielmas
-Vielhauer
-Viehman
-Vidulich
-Vidinha
-Videen
-Vickerson
-Vicker
-Vertz
-Verry
-Vermeesch
-Verhulst
-Verhoff
-Verhagen
-Verhaeghe
-Vergo
-Vergeer
-Verdino
-Venus
-Ventrella
-Ventola
-Venter
-Vennes
-Venneri
-Venditto
-Velzy
-Velilla
-Velie
-Velandia
-Vecker
-Vecellio
-Vear
-Vavricka
-Vautrin
-Vates
-Vassall
-Vasmadjides
-Varty
-Varriano
-Varriale
-Varrato
-Varnedoe
-Varillas
-Vardaman
-Varajas
-Vaquero
-Vanzyl
-Vanvleet
-Vanvleck
-Vansoest
-Vanskiver
-Vanskike
-Vanruler
-Vanputten
-Vanoy
-Vanous
-Vanoort
-Vanliew
-Vanlew
-Vanhulle
-Vanhoozier
-Vanhofwegen
-Vanhaitsma
-Vanecek
-Vandrunen
-Vandixon
-Vandivier
-Vandiford
-Vandezande
-Vandewege
-Vanderzanden
-Vanderwerff
-Vanderwerf
-Vanderschel
-Vandergiessen
-Vandenberghe
-Vandehei
-Vandee
-Vancheri
-Vanbramer
-Valsin
-Valli
-Valido
-Valenzano
-Vajda
-Vaillencourt
-Vacheresse
-Va
-Uzdygan
-Uyetake
-Usilton
-Urueta
-Ursprung
-Ursiak
-Urquilla
-Urquidi
-Urfer
-Ureta
-Urbancic
-Ura
-Upwall
-Uptegrove
-Uphaus
-Upadhyaya
-Unterburger
-Unch
-Unavailable
-Unangst
-Umphenour
-Umbenhauer
-Ulseth
-Ulatowski
-Ukosata
-Uhyrek
-Uhrmacher
-Uhlich
-Ueno
-Uelmen
-Udoh
-Ude
-Uchytil
-Tzeng
-Typhair
-Twelves
-Twehous
-Tuxhorn
-Turybury
-Turro
-Turne
-Turnblom
-Turkus
-Turks
-Turbin
-Turbes
-Tunick
-Tumpkin
-Tuholski
-Tuggie
-Tufnell
-Tubertini
-Tubaugh
-Tsutsui
-Tsuha
-Tsuda
-Tsinnie
-Trupp
-Trupiano
-Trupia
-Truner
-Trundle
-Trumm
-Trullinger
-Truell
-Trucco
-Trowers
-Trover
-Trosien
-Tronnes
-Trompeter
-Tromp
-Trolio
-Troendle
-Trobaugh
-Triska
-Trimarco
-Trifiletti
-Tridle
-Tricoche
-Tresvant
-Trest
-Tresler
-Tresca
-Tremont
-Tremayne
-Treinen
-Treichler
-Treglia
-Treamer
-Traxson
-Traugh
-Trasher
-Trapasso
-Trant
-Trancoso
-Traister
-Trailor
-Trageser
-Traficante
-Trac
-Toya
-Towson
-Tovrea
-Totherow
-Tote
-Tortorelli
-Torri
-Tornabene
-Torigian
-Torello
-Toppa
-Topor
-Toothill
-Toop
-Tonsil
-Tomsich
-Tommie
-Tomlison
-Tolmich
-Tollner
-Tollefsrud
-Toledano
-Tolayo
-Toenges
-Toefield
-Tock
-Tobiasz
-Tobery
-Tobert
-Toban
-Toback
-Tjarks
-Tiznado
-Titlow
-Tishler
-Tirabassi
-Tippet
-Tinkey
-Timson
-Timperman
-Timmis
-Timmermans
-Timme
-Timberman
-Tikkanen
-Tietze
-Tierman
-Tiberi
-Thuringer
-Thul
-Thu
-Thro
-Thornwell
-Thomlison
-Thomlinson
-Thomassen
-Thimmes
-Thilking
-Thierman
-Thielemann
-Thiboutot
-Thibideau
-Theresa
-Theard
-Thavichith
-Thaut
-Tezak
-Tetzloff
-Teto
-Tetlow
-Tessler
-Tesseyman
-Teskey
-Tes
-Terzian
-Terwillegar
-Tervo
-Terronez
-Ternasky
-Termini
-Terboss
-Teramoto
-Tepley
-Tenuta
-Tenen
-Tellio
-Tellefson
-Telecky
-Tekell
-Tefertiller
-Teece
-Tedesko
-Tederous
-Tebeau
-Tear
-Teahan
-Tazewell
-Tazelaar
-Tavano
-Tatsapaugh
-Tatlock
-Tataris
-Tassinari
-Tassie
-Tarvis
-Tarkey
-Tarangelo
-Tappa
-Tanna
-Tanikella
-Tamblyn
-Tamaro
-Talyor
-Tallas
-Talayumptewa
-Talaska
-Taj
-Tagliarini
-Tagata
-Taflinger
-Taddonio
-Tacderan
-Tablang
-Tabisula
-Tabicas
-Tabar
-Szwed
-Szumski
-Szumigala
-Szollosi
-Szczesny
-Sypniewski
-Syon
-Sylvan
-Syal
-Swor
-Swoopes
-Swoap
-Swire
-Swimmer
-Swiler
-Swida
-Sweezer
-Sweep
-Sweeley
-Swede
-Swearengen
-Sweadner
-Swartzwelder
-Swanhart
-Sveen
-Svay
-Sutyak
-Sutten
-Sutler
-Suski
-Surprise
-Supernault
-Suozzo
-Suns
-Sunder
-Sumney
-Summarell
-Sumera
-Sulzbach
-Sulfridge
-Sukhram
-Suk
-Suitor
-Sughroue
-Sugahara
-Sudlow
-Sudan
-Sudak
-Subido
-Style
-Stweart
-Sturz
-Sturdy
-Sturchio
-Stulce
-Stukenborg
-Stuckemeyer
-Stsauveur
-Stroll
-Strohmeier
-Strissel
-Strimple
-Stremmel
-Streczywilk
-Strawhorn
-Stratz
-Stratos
-Straton
-Strassner
-Strama
-Strada
-Stoss
-Storti
-Stomberg
-Stolze
-Stoliker
-Stoler
-Stolberg
-Stolarik
-Stohlton
-Stofko
-Stofflet
-Stoff
-Stoesser
-Stoeber
-Stodden
-Stobierski
-Stobbs
-Stjohns
-Stirrup
-Stirman
-Stinehelfer
-Stimmell
-Stimits
-Stigger
-Stiers
-Stieff
-Stidam
-Stewarts
-Stevinson
-Stevey
-Sterett
-Ster
-Steppello
-Stepnoski
-Stentzel
-Stencil
-Stencel
-Stempien
-Steketee
-Steinbruckner
-Steinborn
-Steigman
-Steiber
-Stegent
-Steffani
-Steerman
-Steenken
-Steenhard
-Steedman
-Steckley
-Stealey
-Stayrook
-Stavnes
-Stauss
-Stash
-Stary
-Stare
-Stant
-Stanfa
-Standfield
-Standberry
-Standage
-Stanco
-Stanage
-Stampe
-Stamdifer
-Stalworth
-Stalma
-Staires
-Staines
-Staine
-Stahlberg
-Stadden
-Staberg
-Stabel
-Spurgers
-Spruce
-Sprinkel
-Springman
-Spriggle
-Sporleder
-Sporcic
-Spontak
-Sponholz
-Spohr
-Spittle
-Spiry
-Spiece
-Spicuzza
-Sperlich
-Sperdute
-Sperazza
-Spelts
-Speares
-Speakes
-Sparhawk
-Spaniel
-Spaar
-Soyars
-Soverns
-Southam
-Sour
-Souphom
-Soun
-Soula
-Sossamon
-Sosh
-Sosby
-Sorsby
-Soroka
-Soricelli
-Sorgi
-Sorbera
-Soplop
-Soohoo
-Sonoda
-Sonny
-Sonneborn
-Somodi
-Sommese
-Solman
-Sollie
-Solla
-Solina
-Soliani
-Soley
-Solecki
-Solages
-Sohre
-Soenksen
-Sodeman
-Sobiech
-Soberanis
-Snobeck
-Snerling
-Sneider
-Snaza
-Smolic
-Smigel
-Smigaj
-Smiechowski
-Smida
-Smerkar
-Smeby
-Slothower
-Slotemaker
-Slodysko
-Slivka
-Slimmer
-Slight
-Slifko
-Slayter
-Slawski
-Slauson
-Slatten
-Slain
-Skultety
-Skrip
-Skowyra
-Skorupa
-Skordahl
-Skomsky
-Skoff
-Sklenar
-Skeldon
-Skeesick
-Skea
-Skagen
-Sjostrand
-Sixtos
-Sivyer
-Siverson
-Siverling
-Sivan
-Siva
-Sitzler
-Sither
-Siskind
-Siske
-Siron
-Siregar
-Sirbaugh
-Sirak
-Siptak
-Sinstack
-Sins
-Siniscalchi
-Singlton
-Sinden
-Sinagra
-Sina
-Simpon
-Simmoneau
-Simler
-Simkulet
-Simi
-Simeona
-Simens
-Silverstone
-Silverness
-Silsbee
-Sillas
-Sileo
-Silbert
-Sikula
-Siglin
-Sigley
-Sigafus
-Siew
-Sietsma
-Sierras
-Siembida
-Sieker
-Siedlik
-Sidur
-Sidell
-Siddoway
-Sibille
-Sibilia
-Sibbald
-Shusta
-Shuskey
-Shurts
-Shryack
-Shroll
-Showell
-Shove
-Shoulars
-Shortino
-Shopp
-Shmidt
-Shiu
-Shirar
-Shinners
-Shingles
-Shinabery
-Shimko
-Shibles
-Shertzer
-Sherrin
-Sherril
-Shellhamer
-Shellhaas
-Sheldrup
-Sheladia
-Shehab
-Sheff
-Sheck
-Shearman
-Sheaff
-Shauer
-Shatswell
-Shaske
-Sharick
-Shappard
-Shallcross
-Shala
-Shaklee
-Shakespear
-Shafe
-Shady
-Shadwell
-Shacklett
-Seymor
-Settlemire
-Setting
-Sether
-Sesma
-Sesareo
-Seryak
-Serven
-Sers
-Serbus
-Serb
-Seppi
-Sephus
-Sentinella
-Sensel
-Senf
-Senato
-Sempek
-Semidey
-Semasko
-Selz
-Seltz
-Selmer
-Selitto
-Selim
-Seiser
-Seikel
-Seigle
-Seid
-Segouia
-Segner
-Segerson
-Segala
-Sefcik
-Seeholzer
-Seegert
-Sedita
-Sedenko
-Sedar
-Secondo
-Seckinger
-Sebald
-Seba
-Seahorn
-Seabright
-Scotty
-Scothorn
-Scordato
-Scoma
-Scobie
-Scipione
-Sciara
-Schwieterman
-Schwendemann
-Schwede
-Schwartzbach
-Schwarcz
-Schwalen
-Schutzman
-Schunemann
-Schulweis
-Schul
-Schuffert
-Schuckers
-Schrull
-Schrubbe
-Schreyer
-Schreckhise
-Schreader
-Schoonhoven
-Schoolman
-Schol
-Schoettmer
-Schoepf
-Schoenle
-Schoenecker
-Schobert
-Schnyer
-Schnoke
-Schnipper
-Schneiter
-Schneekloth
-Schnapp
-Schmits
-Schmelzle
-Schmelz
-Schmeisser
-Schmeiser
-Schmahl
-Schlotzhauer
-Schlott
-Schlossberg
-Schlipf
-Schlicker
-Schleuder
-Schleimer
-Schlauch
-Schlau
-Schlaefer
-Schiesser
-Schieler
-Schied
-Schie
-Scheuvront
-Scheumann
-Scherz
-Scheperle
-Schenewerk
-Schemm
-Schellenger
-Schaupp
-Schauf
-Schaudel
-Schau
-Schatzberg
-Scharr
-Schappert
-Schapp
-Schamel
-Schallhorn
-Schaefers
-Schadt
-Schadel
-Schackow
-Schabowski
-Schabes
-Schabert
-Schab
-Schaab
-Scavotto
-Scarver
-Scarsella
-Scarbro
-Scampoli
-Scammon
-Scallon
-Scalley
-Scale
-Scafuri
-Scadden
-Scacco
-Sawchuk
-Saviano
-Saverchenko
-Savelli
-Savarino
-Satsky
-Satoe
-Sarwinski
-Sartorio
-Sartorelli
-Sarria
-Saro
-Sarna
-Sarkin
-Sarisky
-Sario
-Sarazin
-Sara
-Sapia
-Santmyer
-Santmier
-Santillana
-Santanna
-Santacroce
-Sansouci
-Sannes
-Sanez
-Sandvig
-Sandino
-Sandella
-Sanburg
-Samy
-Sammer
-Samit
-Salvucci
-Salvey
-Salvatori
-Salvant
-Salvage
-Salts
-Salton
-Saltarelli
-Salt
-Salome
-Sallade
-Saletta
-Salehi
-Saleeby
-Salameh
-Salama
-Salaiz
-Salafia
-Sakry
-Sako
-Sakash
-Saitta
-Sahu
-Sahara
-Saguil
-Sagrera
-Saglimben
-Sagi
-Saggio
-Sagen
-Safranek
-Safko
-Saeli
-Sadar
-Sacre
-Saccardi
-Saborido
-Sabins
-Sabet
-Sabbah
-Saale
-Rynne
-Rynders
-Rylands
-Rykowski
-Ruzbasan
-Ruwe
-Rutiaga
-Ruthledge
-Rutecki
-Rusu
-Russler
-Rurup
-Ruozzo
-Ruot
-Runels
-Rumphol
-Rumpel
-Rumpca
-Rullo
-Ruisi
-Ruic
-Ruhle
-Ruffaner
-Rufer
-Ruetz
-Ruesink
-Ruehle
-Ruedy
-Ruden
-Rubulcaba
-Rua
-Roya
-Rowald
-Rovner
-Rouselle
-Roura
-Roulston
-Rougeaux
-Rotty
-Rothery
-Rotert
-Rossler
-Roskowinski
-Rosiak
-Rosh
-Rosenstock
-Roselius
-Roscigno
-Rosaro
-Rosada
-Roperto
-Ropers
-Rookwood
-Rongo
-Rondinelli
-Ronda
-Ronchetti
-Romrell
-Rollinger
-Rola
-Rokos
-Rohwer
-Rohrscheib
-Rohlf
-Rogal
-Rogacion
-Roeschley
-Roers
-Roemen
-Roelofs
-Roekle
-Roehrich
-Rodriguel
-Rodges
-Rodeen
-Roddey
-Roddam
-Rocquemore
-Rockers
-Roccia
-Robishaw
-Robida
-Robichau
-Robertshaw
-Roberton
-Roberta
-Roberg
-Rob
-Roary
-Rizzuti
-Rizal
-Riveros
-Rittenour
-Risper
-Rippin
-Ripp
-Riola
-Riogas
-Rinner
-Ringus
-Ringhand
-Rinehardt
-Rinderer
-Rigotti
-Righetti
-Riggi
-Riggans
-Rigazio
-Rigatti
-Rifenburg
-Rieu
-Riehm
-Riegler
-Riech
-Riebau
-Ridgel
-Ridens
-Ridener
-Riddel
-Rickner
-Richardt
-Ricciardone
-Rhynard
-Rhyan
-Rhoderick
-Rho
-Rheinschmidt
-Rezak
-Reusing
-Rettkowski
-Retterath
-Retta
-Reshid
-Reppe
-Repke
-Reos
-Reome
-Rensen
-Renschler
-Renova
-Renollet
-Renison
-Reninger
-Rengers
-Rengel
-Renart
-Rena
-Relihan
-Reisen
-Reiniger
-Reindel
-Reil
-Reier
-Reh
-Reggio
-Regener
-Reekers
-Reeger
-Redmann
-Reddinger
-Redcay
-Reckling
-Rebert
-Reategui
-Reagin
-Reagen
-Readnour
-Razzano
-Raynolds
-Rayer
-Raybould
-Rawdon
-Ravotta
-Ravo
-Ravitz
-Ravert
-Rathert
-Raterman
-Ratel
-Raque
-Rapko
-Ransone
-Ransburg
-Rangnow
-Randon
-Rancifer
-Ramotar
-Ramones
-Ramone
-Ramire
-Ramin
-Rameres
-Rakoski
-Rajala
-Raithel
-Rainie
-Rainge
-Rainbow
-Raigoza
-Rahming
-Ragazzo
-Radomski
-Radish
-Radilla
-Raden
-Radde
-Racano
-Rabine
-Rabil
-Rabell
-Rabasca
-Quiterio
-Quinzi
-Quink
-Quinci
-Quilliams
-Quiller
-Quider
-Quenneville
-Quelch
-Queeley
-Quear
-Quattro
-Quastad
-Quaglieri
-Pyscher
-Pust
-Purtle
-Purtill
-Purdin
-Puorto
-Punja
-Pullem
-Pulfer
-Puleio
-Pujia
-Puetz
-Puehler
-Puebla
-Ptomey
-Przewozman
-Prysock
-Pruter
-Prunier
-Pruess
-Prudom
-Pruchnik
-Proveaux
-Prophit
-Promise
-Procknow
-Proby
-Pro
-Prive
-Preziosi
-Preza
-Prem
-Preite
-Preisser
-Pregler
-Precella
-Prazma
-Prats
-Prator
-Prakash
-Prahm
-Prader
-Pozniak
-Poxon
-Powledge
-Pouge
-Pott
-Postlewaite
-Posthumus
-Posnick
-Posley
-Poskey
-Porro
-Poreda
-Poppema
-Popat
-Pondexter
-Ponciano
-Pompilio
-Pommer
-Polosky
-Pollom
-Pollo
-Pollica
-Pollaro
-Polizio
-Polek
-Polack
-Polacek
-Poirot
-Poertner
-Poduska
-Pockrus
-Pochintesta
-Pluym
-Pluhar
-Pluck
-Pliner
-Pliml
-Plese
-Pleasent
-Playle
-Plasky
-Plane
-Plack
-Pizani
-Pitz
-Pittari
-Pitruzzello
-Pistorius
-Pistilli
-Pisha
-Piselli
-Pisco
-Piros
-Pirone
-Pirolli
-Pirman
-Pirkl
-Pirie
-Pique
-Pintado
-Pinkey
-Pingrey
-Pinger
-Pinelo
-Pilsner
-Pilley
-Pilgreen
-Piles
-Pila
-Pignatello
-Pietig
-Pierrott
-Pierron
-Pierceall
-Pieratt
-Pienta
-Piekos
-Piechota
-Picquet
-Pickar
-Picerno
-Piceno
-Phyfiher
-Phorng
-Phearsdorf
-Pharmes
-Phariss
-Pfuhl
-Pfenning
-Pezzetti
-Pevy
-Petzoldt
-Pettrey
-Pettas
-Petta
-Petross
-Petrochello
-Petriello
-Petrelli
-Petch
-Pestoni
-Pestano
-Pesick
-Pesavento
-Perzanowski
-Perrien
-Perrenoud
-Perque
-Peroff
-Perlas
-Perkerson
-Perisho
-Perich
-Perfect
-Peregrino
-Peregoy
-Perch
-Pequeno
-Penza
-Pensis
-Penquite
-Peniston
-Penister
-Pendola
-Pendergraph
-Pelle
-Pelczar
-Pelch
-Pela
-Pehler
-Pegoda
-Peelle
-Peeling
-Pedroni
-Pedlar
-Pedder
-Pecoraino
-Peckman
-Pechal
-Pebsworth
-Peasnall
-Peasant
-Pead
-Peacemaker
-Paytes
-Paysen
-Payn
-Pavletic
-Pavlat
-Pavlas
-Pavese
-Paup
-Paulis
-Patrice
-Patocka
-Pat
-Pastorino
-Pascocello
-Parthemer
-Parreira
-Parido
-Paretti
-Pardun
-Parchment
-Papstein
-Papps
-Papetti
-Papakostas
-Pantoni
-Panik
-Panfilov
-Panfil
-Pana
-Pampusch
-Pamperin
-Palmitessa
-Palmero
-Pallett
-Palilla
-Palese
-Palesano
-Palange
-Pagenkopf
-Padon
-Padmanabhan
-Padinha
-Packen
-Pacitto
-Pacchiana
-Pabich
-Oza
-Oyabu
-Overdorf
-Ourada
-Otukolo
-Otterbine
-Ottalagano
-Oto
-Other
-Otano
-Osting
-Ostiguy
-Osterholt
-Osley
-Oscarson
-Osaile
-Ortz
-Ortolano
-Ortea
-Orte
-Ortaga
-Orszulak
-Orser
-Orihuela
-Orejel
-Ordorica
-Ording
-Ordal
-Orbin
-Oransky
-Oppel
-Onsgard
-Ondrick
-Olsin
-Ollmann
-Olives
-Olavarria
-Olano
-Olafson
-Okuno
-Okuniewski
-Okuhara
-Okrent
-Okoniewski
-Okeke
-Ohs
-Ohotnicky
-Ohno
-Ohlund
-Ohlendorf
-Ohaire
-Ogaz
-Ogando
-Offield
-Odiorne
-Oclair
-Ockenfels
-Ochocki
-Ocamb
-Ocallahan
-Obleton
-Oberly
-Oberhelman
-Oberbeck
-Nylin
-Nydick
-Nwachukwu
-Nutzmann
-Nuque
-Nunz
-Nulle
-Nuffer
-Notti
-Nothum
-Nothnagel
-Notah
-Nossett
-Nose
-Nosbisch
-Norrix
-Norlien
-Norkin
-Nordon
-Nordmeyer
-Norat
-Nooe
-Nokleby
-Nofziger
-Noens
-Nivison
-Niu
-Nittler
-Nissalke
-Nishikawa
-Ninness
-Nin
-Nimon
-Nifong
-Niewieroski
-Nietzer
-Niemela
-Nicolette
-Nicoletta
-Nico
-Nickolas
-Nickless
-Nicklaw
-Niccoli
-Nibbs
-Neyland
-Newmark
-Newey
-Newbauer
-Nevwirth
-Neverman
-Neuser
-Neumaier
-Neufville
-Netzley
-Netzel
-Nettle
-Neiswonger
-Neiswender
-Neilan
-Neidhardt
-Neesmith
-Nebgen
-Navia
-Nate
-Nasuti
-Nasso
-Nassimi
-Nashe
-Nases
-Naro
-Nardo
-Narasimhan
-Naqvi
-Nanka
-Naman
-Nahrstedt
-Nagura
-Nagarajan
-Nadile
-Nabours
-Nabers
-Mysinger
-Mynear
-Muzzarelli
-Muthig
-Mustian
-Muskus
-Muskelly
-Musi
-Mushtaq
-Musca
-Murzynski
-Murzyn
-Murrillo
-Murello
-Murdy
-Murakawa
-Munsinger
-Munnell
-Munks
-Munkberg
-Mundorf
-Mummey
-Mullick
-Mulkin
-Mulhollen
-Mulgrew
-Mulderig
-Mulac
-Muehl
-Muddiman
-Muckerman
-Muckenthaler
-Much
-Mucciolo
-Mruczek
-Mrazek
-Mowat
-Moure
-Mould
-Motts
-Mosure
-Mossor
-Mossberg
-Mosler
-Mosha
-Moscrip
-Moschetti
-Mosbarger
-Morua
-Morss
-Morron
-Morrall
-Moroni
-Morioka
-Moricca
-Morgensen
-Morganson
-Moreshead
-Morely
-Morch
-Moras
-Morar
-Moranville
-Moralas
-Morak
-Moradel
-Moothart
-Moonen
-Monzingo
-Montpetit
-Montjoy
-Monteagudo
-Monoz
-Mongrain
-Mongon
-Mondejar
-Monas
-Monachino
-Momplaisir
-Momin
-Moment
-Molpus
-Molony
-Molner
-Molleda
-Molinski
-Molinelli
-Molfetta
-Molenda
-Molchan
-Mohseni
-Mogg
-Moerke
-Moenius
-Moehlman
-Modugno
-Modi
-Modest
-Moder
-Moch
-Moat
-Miyamura
-Mittlestadt
-Mittelstedt
-Mittelman
-Mitschelen
-Mitro
-Mitchan
-Misty
-Missey
-Misenhimer
-Mirra
-Mirjah
-Mirante
-Miosek
-Minteer
-Minrod
-Minning
-Minney
-Minnema
-Minium
-Minihane
-Minicucci
-Minecci
-Minchey
-Milota
-Millson
-Milloway
-Millonzi
-Millier
-Milley
-Millam
-Milillo
-Milbrath
-Mikowski
-Mikola
-Mikler
-Mihelic
-Mihaila
-Miesen
-Mierzejewski
-Mickels
-Michienzi
-Michalke
-Miazga
-Mezydlo
-Mezick
-Meynard
-Meylor
-Mexicano
-Metsker
-Metrick
-Meter
-Mestad
-Meske
-Mertins
-Merta
-Mersinger
-Merschman
-Merna
-Merila
-Meridieth
-Mergen
-Merel
-Menzella
-Menze
-Mentnech
-Menson
-Mensick
-Mennig
-Mendillo
-Memos
-Melroy
-Melochick
-Mells
-Mellgren
-Meline
-Melich
-Melena
-Melchiori
-Melching
-Melahn
-Meisler
-Meinerding
-Meilleur
-Meidlinger
-Mehner
-Megrabyan
-Megee
-Meeuwsen
-Medlar
-Medick
-Medema
-Mechler
-Mechanic
-Meadowcroft
-Mcpike
-Mcpeake
-Mcnell
-Mcneary
-Mcmutry
-Mcmeekin
-Mcmannus
-Mcluen
-Mclouth
-Mclerran
-Mcleoud
-Mclagan
-Mckone
-Mckneely
-Mckissic
-Mckinnell
-Mckillips
-Mckibbon
-Mckenty
-Mckennan
-Mckeeman
-Mckasson
-Mcinturf
-Mcinerny
-Mchan
-Mcgurn
-Mcguirl
-Mcgue
-Mcgrain
-Mcgonnell
-Mcglumphy
-Mcglauflin
-Mcginity
-Mcgibboney
-Mcgeough
-Mcgauley
-Mcgarvie
-Mcfatter
-Mcentegart
-Mcenroe
-Mcelmury
-Mcelhinny
-Mcdonnel
-Mcdoniel
-Mcdoe
-Mcdermond
-Mcdearmon
-Mcdearman
-Mcday
-Mcdannald
-Mcdaid
-Mccurren
-Mccrosky
-Mccrane
-Mccraig
-Mccooey
-Mccoo
-Mccolpin
-Mccolloch
-Mcclucas
-Mcclester
-Mcclement
-Mcclamroch
-Mcclammy
-Mcclallen
-Mccarte
-Mccaie
-Mccaddon
-Mcanelly
-Mcalmond
-Mcalary
-Mazzini
-Mazzarino
-Mazzara
-Mazzanti
-Mazurk
-Mazor
-Mayerle
-Mayenschein
-Mayard
-Mayans
-Maxedon
-Mavromatis
-Mavins
-Maves
-Mausser
-Maulsby
-Matya
-Matuke
-Matto
-Mattler
-Mattiace
-Matkowski
-Mathern
-Matero
-Matchette
-Matayoshi
-Matar
-Mastine
-Massing
-Massimo
-Masseria
-Massenberg
-Massard
-Masoud
-Masotti
-Maslak
-Masey
-Masella
-Mascarena
-Mascall
-Marzella
-Maryott
-Marwick
-Marugg
-Martt
-Martinis
-Martian
-Martha
-Marstaller
-Marsingill
-Marsicek
-Marotto
-Market
-Markegard
-Marke
-Marinella
-Marien
-Margison
-Margheim
-Margason
-Margaris
-Margaret
-Marett
-Marentes
-Marcott
-Marcon
-Marchena
-Marcellino
-Mapston
-Mantione
-Mantanona
-Mansouri
-Manoi
-Mankus
-Mankins
-Manin
-Manikas
-Mangieri
-Manfredini
-Mane
-Mandt
-Mandolini
-Mandley
-Mancina
-Manas
-Maltsberger
-Maltais
-Malmin
-Mallis
-Mallicoat
-Malleck
-Mallach
-Malkowski
-Malkani
-Malito
-Malensek
-Malandra
-Malander
-Makos
-Makanani
-Maille
-Mail
-Maidens
-Maid
-Mahowald
-Mahala
-Mahajan
-Magnotta
-Maggiore
-Magel
-Maestos
-Maerz
-Maedche
-Madise
-Madi
-Mades
-Maddaloni
-Madayag
-Madaras
-Macnair
-Mackinlay
-Mackesy
-Machon
-Machia
-Machey
-Machesky
-Machacek
-Maceyak
-Macchio
-Macbride
-Mabray
-Maasch
-Lyseski
-Lykken
-Luzania
-Luxenberg
-Lutrell
-Lupkes
-Lupino
-Lupardus
-Lunnon
-Lunghofer
-Lundvall
-Lundby
-Lundborg
-Lulow
-Lukman
-Lukin
-Lukaszewski
-Lukacs
-Lugones
-Luger
-Lueder
-Ludeke
-Lucek
-Lucchetti
-Lucchese
-Lozowski
-Lozaro
-Loyer
-Lowthert
-Lowdermilk
-Lovitz
-Lovinggood
-Lovenduski
-Loura
-Loung
-Lounder
-Louks
-Loughry
-Loudermill
-Lotta
-Lostetter
-Loskot
-Losiewski
-Lorman
-Loren
-Lorelli
-Lorange
-Lonsinger
-Longinotti
-Longhurst
-Lomedico
-Lola
-Lohwasser
-Lohn
-Lohden
-Lograsso
-Logie
-Loftman
-Loften
-Lofaso
-Loewer
-Loehrs
-Locy
-Loconte
-Lockerman
-Lockerby
-Locken
-Lobaton
-Loatman
-Lleras
-Lizak
-Livingood
-Litwiler
-Litvin
-Littledave
-Lites
-Lisee
-Lipszyc
-Lippy
-Lionello
-Linsday
-Linnear
-Linklater
-Lingbeck
-Lindie
-Lindenfelser
-Lindenberger
-Linarez
-Limber
-Lily
-Lightning
-Liffick
-Lieto
-Liestman
-Liepins
-Lieng
-Liebross
-Licciardi
-Licavoli
-Libbee
-Lhuillier
-Lhommedieu
-Leyra
-Lewman
-Levreault
-Levitre
-Levings
-Levick
-Levecke
-Levanger
-Leval
-Leva
-Leuthold
-Leuenthal
-Letze
-Letterlough
-Leski
-Lerwill
-Lertora
-Leppla
-Leopoldo
-Leonides
-Leonardis
-Lenoue
-Lenoch
-Lengerich
-Lemont
-Lemmert
-Lemery
-Lemaitre
-Lella
-Leko
-Leithauser
-Leisher
-Leise
-Leisch
-Leiendecker
-Leiber
-Leialoha
-Lehtomaki
-Lehigh
-Leggs
-Legate
-Leflar
-Lefeber
-Leezer
-Ledden
-Lecleir
-Lechliter
-Lebrane
-Lebarron
-Leason
-Leapheart
-Leadman
-Lazarte
-Lawin
-Lavole
-Lavesque
-Laverdure
-Lautner
-Lauthern
-Laurila
-Laurendeau
-Launderville
-Laumeyer
-Latina
-Laszlo
-Lassan
-Larzelere
-Larzazs
-Larubbio
-Larriuz
-Larew
-Laremont
-Laredo
-Lardizabal
-Larance
-Lappa
-Lapolla
-Lapatra
-Lapaglia
-Lantieri
-Lannan
-Lann
-Langwith
-Langolf
-Langloss
-Langlo
-Langholz
-Langhart
-Langfitt
-Langendorf
-Langenbach
-Langbehn
-Lanehart
-Landoni
-Landherr
-Landberg
-Landazuri
-Lancey
-Lamus
-Lamunyon
-Lampitt
-Lampiasi
-Lammon
-Lamme
-Lamirand
-Lambes
-Lamarta
-Lamarra
-Lalim
-Lalande
-Laky
-Laitila
-Laidler
-Laich
-Lahue
-Lahtinen
-Lagrasse
-Lagrand
-Lagle
-Lagerstrom
-Lagerberg
-Laferney
-Lacson
-Lachenauer
-Lablue
-Labean
-Lab
-Kuzara
-Kuza
-Kuy
-Kutchera
-Kustra
-Kurtyka
-Kurschner
-Kurka
-Kunstlinger
-Kunka
-Kunicki
-Kunda
-Kulling
-Kulla
-Kulbida
-Kuker
-Kujath
-Kujala
-Kuhta
-Kuhner
-Kuhle
-Kufalk
-Kuennen
-Kuen
-Kudley
-Kucharik
-Kuca
-Kubic
-Kryst
-Krysh
-Krumenauer
-Kruczek
-Kroschel
-Kronk
-Kroells
-Krivak
-Kristoff
-Kristin
-Kreuziger
-Kreitz
-Kreisberg
-Kreiman
-Kreighbaum
-Kreh
-Kreck
-Kraszewski
-Krason
-Krammes
-Krake
-Kozusko
-Kozola
-Kozikowski
-Kozielski
-Kowis
-Kowalske
-Kottman
-Kottler
-Kottenstette
-Kostelnick
-Kosmowski
-Koska
-Kosinar
-Kosik
-Kosanovic
-Kosanke
-Kortge
-Korsak
-Kornbau
-Kordas
-Korby
-Korbel
-Kopperman
-Koppenhaver
-Kopischke
-Koper
-Kopelman
-Kopel
-Kopas
-Kooser
-Koors
-Koor
-Koone
-Koogle
-Konzen
-Konieczka
-Kondracki
-Kondos
-Komatsu
-Kolo
-Kolarik
-Kolacki
-Kokesh
-Kohrt
-Kohrs
-Kogel
-Kofron
-Kofman
-Koewler
-Koetting
-Koes
-Koellner
-Koellmann
-Koczela
-Kocon
-Knoth
-Knollman
-Knoebel
-Knknown
-Knittle
-Kniphfer
-Knightly
-Kniffin
-Knaphus
-Knaak
-Kloth
-Klonoski
-Kloke
-Kloer
-Klinetob
-Kliger
-Klich
-Kleyman
-Klepchick
-Klemish
-Kleen
-Klebe
-Klakowicz
-Klaft
-Kithcart
-Kister
-Kisker
-Kishel
-Kishbaugh
-Kirt
-Kirouac
-Kirley
-Kirklen
-Kirkegaard
-Kirchen
-Kipka
-Kipfer
-Kinsinger
-Kiniry
-Kinikini
-Kingma
-Kinderknecht
-Kinahan
-Kimmes
-Kimak
-Killiany
-Killelea
-Kilkus
-Kilfoyle
-Kiflezghie
-Kiffer
-Kiesewetter
-Kienow
-Kieler
-Kiebler
-Kicks
-Kicker
-Kibel
-Kibe
-Kibbee
-Kiang
-Khounthavong
-Khatri
-Khamsyuorauon
-Kham
-Keye
-Keup
-Keto
-Ketch
-Kess
-Kerth
-Kero
-Kernell
-Kerkvliet
-Keomany
-Keomanivong
-Kennemur
-Kennel
-Kenndey
-Kendi
-Kempter
-Kempinski
-Kemna
-Kellan
-Keliikoa
-Keledjian
-Keithan
-Keisel
-Keib
-Kehs
-Kedley
-Keay
-Kearin
-Kawulok
-Kawai
-Kawaa
-Kava
-Kaunisto
-Kaumo
-Kauahi
-Kattner
-Katra
-Kastel
-Kastein
-Kassulke
-Kassman
-Kassing
-Kashani
-Kasch
-Karty
-Karstetter
-Karrenberg
-Karper
-Karow
-Karmo
-Karhoff
-Kardell
-Kardas
-Karapetian
-Kapper
-Kappen
-Kapichok
-Kanis
-Kaneakua
-Kanaris
-Kamuda
-Kamirez
-Kamat
-Kaloudis
-Kallberg
-Kallaher
-Kalkwarf
-Kalkman
-Kalk
-Kalisek
-Kalehuawehe
-Kalchik
-Kalbfleisch
-Kalberer
-Kalal
-Kala
-Kakimoto
-Kaing
-Kaigle
-Kahill
-Kahanaoi
-Kaemmerling
-Kadri
-Kadle
-Kading
-Kadi
-Kadar
-Kachmar
-Kachiroubas
-Kachelmeyer
-Kaase
-Juve
-Juul
-Justinger
-Jungwirth
-Jungman
-Jungck
-Julander
-Juenemann
-Jubie
-Joun
-Joswick
-Jossund
-Joss
-Jory
-Jonnson
-Jongsma
-Joliet
-Johngrass
-Jocoy
-Jing
-Jimerez
-Jimbo
-Jeudy
-Jerowski
-Jernstrom
-Jernstad
-Jernberg
-Jeoffroy
-Jentry
-Jennie
-Jeng
-Jenaye
-Jemerson
-Jeltema
-Jeanpaul
-Jeanmard
-Jax
-Javery
-Jaudon
-Jasperse
-Jasmer
-Jarred
-Jarrar
-Jargas
-Jardot
-Jardell
-Jaquay
-Jappa
-Janower
-Jankoski
-Janise
-Jandrey
-Jandl
-Jakubiak
-Jakobson
-Jakobsen
-Jahncke
-Jagers
-Jacobitz
-Jackon
-Izard
-Ivel
-Itzkowitz
-Itani
-Issacs
-Isome
-Isle
-Islar
-Isidro
-Isidoro
-Isch
-Irvan
-Irizary
-Irene
-Ipson
-Ip
-Ioele
-Interiano
-Insalaco
-Iniestra
-Ingargiola
-Impson
-Illiano
-Iller
-Illa
-Ilardi
-Iida
-Ihrke
-Igneri
-Igbal
-Igartua
-Iffland
-Idell
-Iberra
-Iba
-Ianacone
-Hysong
-Hyrkas
-Huzzard
-Huttle
-Husselbee
-Husseini
-Hupe
-Hunzeker
-Hunnicut
-Humprey
-Humbird
-Humason
-Hugle
-Hufana
-Huestis
-Huesing
-Huell
-Hudy
-Hudley
-Hudas
-Hudalla
-Hudack
-Huckfeldt
-Hubka
-Hubenthal
-Huante
-Hsing
-Hromek
-Hritz
-Hrdlicka
-Howzell
-Howles
-Howat
-Hovarter
-Houy
-Housler
-Houska
-Houseal
-Houlberg
-Hostert
-Hosman
-Hoscheid
-Horvers
-Hortin
-Hornish
-Hornbeak
-Hornaday
-Hoppman
-Hopfer
-Hoot
-Honts
-Honsberger
-Hons
-Honnen
-Honberger
-Honahnie
-Homma
-Homesley
-Holyoak
-Holweger
-Holubar
-Holtzer
-Holtrop
-Holtberg
-Holpp
-Holmquest
-Hollinghead
-Holje
-Holgerson
-Holabaugh
-Hoitt
-Hofford
-Hoffmaster
-Hoffine
-Hoffelt
-Hoes
-Hoellwarth
-Hoegh
-Hoegerl
-Hoeger
-Hodrick
-Hodgkiss
-Hodek
-Hockey
-Hobday
-Hlavacek
-Hlad
-Hitzeman
-Hitzel
-Hitsman
-Hissong
-Hissam
-Hiscock
-Hirz
-Hirshberg
-Hipkins
-Hinsch
-Hinken
-Hinckle
-Hinchliff
-Himmons
-Himmelwright
-Himmelspach
-Himebaugh
-Hilst
-Hilmes
-Hillsgrove
-Hillestad
-Hillesland
-Hillegass
-Hilfiger
-Hilado
-Highshaw
-Highers
-Higginbothan
-Higbie
-Hieronymus
-Hidy
-Hickory
-Hickernell
-Hibma
-Hibbets
-Heximer
-Hewgley
-Heutmaker
-Heuschkel
-Heupel
-Heumann
-Heuman
-Hetzer
-Hetherman
-Hesterman
-Hespe
-Hertweck
-Herson
-Herry
-Herrboldt
-Herms
-Hermosilla
-Herl
-Herbolsheimer
-Herbel
-Hera
-Heptinstall
-Heppler
-Heppell
-Henslin
-Henschen
-Hennington
-Hennagir
-Henkhaus
-Henken
-Henggeler
-Hempfling
-Hemmerling
-Hemish
-Hema
-Helveston
-Helsey
-Helscher
-Helo
-Heline
-Helfin
-Helder
-Heitner
-Heiple
-Heinzelman
-Heinricher
-Heines
-Heimsness
-Heiler
-Heidelburg
-Heiberg
-Hegner
-Hegler
-Hefferman
-Heffelbower
-Heebner
-Hediger
-Hedding
-Heckbert
-Hearnsberger
-Heaivilin
-Heagle
-Heafner
-Hazelrig
-Hayth
-Hayoz
-Haydu
-Haybarger
-Haya
-Havers
-Haverfield
-Hauze
-Haugabrook
-Haub
-Hathcoat
-Hasychak
-Hassin
-Hassey
-Hasenberg
-Hasek
-Harvat
-Haruta
-Hartvigsen
-Hartong
-Hartke
-Harre
-Harradon
-Harnisch
-Harmond
-Harmening
-Harlem
-Harkrader
-Harklerode
-Hargitt
-Hardon
-Hardgrave
-Hardester
-Harbeson
-Harben
-Hanrath
-Handville
-Handcock
-Hamza
-Hamson
-Hamming
-Hamic
-Hambley
-Halphen
-Halpain
-Halmes
-Hallaway
-Hallauer
-Half
-Haldiman
-Halbur
-Hakkila
-Hakimian
-Haimes
-Hahs
-Hagmann
-Hagglund
-Hagert
-Hagee
-Hafeman
-Haeber
-Haddan
-Hada
-Hackner
-Hackel
-Hacher
-Habisch
-Haarstad
-Haare
-Haaker
-Gyger
-Guzowski
-Guzi
-Guzalak
-Guyon
-Guyll
-Gutzmer
-Guttirez
-Gutt
-Gutierrex
-Gutierre
-Gut
-Gustis
-Gushwa
-Gurke
-Gurevich
-Gunyan
-Gumz
-Guisbert
-Guire
-Guintanilla
-Guimaraes
-Guillereault
-Guidos
-Guidera
-Guffin
-Guererro
-Guenthner
-Guedes
-Guareno
-Guardian
-Grussing
-Gruska
-Grudzien
-Growcock
-Grossenbacher
-Grosjean
-Groshans
-Grondahl
-Grollimund
-Groeneveld
-Groenendyk
-Grinnan
-Grindell
-Grindeland
-Grimaud
-Grigorov
-Griffard
-Grierson
-Grich
-Gribbins
-Gribbin
-Grever
-Gretter
-Grennon
-Grenfell
-Gremer
-Greising
-Greenhoward
-Gravitz
-Gravis
-Gravino
-Graubard
-Grates
-Granstrom
-Grannell
-Grandt
-Granat
-Grambling
-Gramajo
-Gralak
-Graise
-Grafe
-Grade
-Grad
-Gracy
-Goyco
-Goyal
-Govindeisami
-Govert
-Govero
-Gouras
-Goulbourne
-Goularte
-Gouker
-Gotwalt
-Gottshall
-Gottsch
-Gorum
-Gordo
-Gordils
-Gorbet
-Goonan
-Goombi
-Gooley
-Goolesby
-Goodlet
-Goodland
-Gomaz
-Golt
-Golombek
-Golom
-Golojuch
-Golightley
-Goldyn
-Goldkamp
-Goldfine
-Goldermann
-Goffinet
-Goetter
-Goethals
-Goerdt
-Goehl
-Goedken
-Goede
-Goedde
-Goeckel
-Godshall
-Godleski
-Godino
-Godine
-Godden
-Godar
-Gockley
-Gockel
-Gochnour
-Gobler
-Goard
-Gniewek
-Gnerre
-Gluszek
-Glunt
-Glotzbach
-Glory
-Glista
-Glisan
-Glende
-Glee
-Gleave
-Glaus
-Glau
-Glassing
-Gladhill
-Gizzo
-Giulian
-Gittins
-Girven
-Girt
-Girling
-Girardot
-Gipp
-Giovannini
-Gionet
-Gins
-Ginolfi
-Gimar
-Gilvin
-Gilliom
-Gilling
-Gillece
-Gilio
-Gildow
-Gilberg
-Gieser
-Gierisch
-Gielow
-Gieck
-Gica
-Gibboney
-Giarraputo
-Gianopoulos
-Giannecchini
-Giambruno
-Ghrist
-Ghiloni
-Geving
-Getto
-Gessford
-Gesner
-Gesick
-Gerstenkorn
-Gersbach
-Geroge
-Gerleman
-Gerl
-Gerkin
-Gerding
-Gerchak
-Georgiades
-Geoffroy
-Gentes
-Genre
-Genous
-Genge
-Geney
-Gendusa
-Gendel
-Gemma
-Gembler
-Gemaehlich
-Geldmacher
-Gehris
-Geffrard
-Geffken
-Geans
-Gavel
-Gavaldon
-Gaughran
-Gaud
-Gaucin
-Gauch
-Gattuso
-Gatliff
-Gather
-Gastonguay
-Gassen
-Gasior
-Garzia
-Gartz
-Gartley
-Garski
-Garramone
-Garoner
-Garone
-Garnow
-Garley
-Garibai
-Garguilo
-Garfunkel
-Gardley
-Gardecki
-Garcilazo
-Garbarini
-Garan
-Garafalo
-Gani
-Gandert
-Gampong
-Gamons
-Gamma
-Gambone
-Gambler
-Galves
-Galo
-Galm
-Galluccio
-Gallinari
-Gallentine
-Gallamore
-Galeotti
-Galella
-Gajica
-Gaisford
-Gaietto
-Gahlman
-Gahl
-Gaglia
-Gaffke
-Gaetz
-Gadwah
-Gabaree
-Gaar
-Fust
-Furutani
-Furner
-Furnace
-Furgison
-Furgeson
-Fundis
-Fullem
-Fullagar
-Fujisawa
-Fugit
-Fugh
-Fuemmeler
-Fuelling
-Fude
-Frusci
-Frosch
-Frontera
-Fronek
-Fritzman
-Fristoe
-Frishkorn
-Frilling
-Frigge
-Friels
-Friehe
-Friedline
-Fridlington
-Frezzo
-Frezza
-Fresta
-Freise
-Freiman
-Freidhof
-Freiberger
-Freetage
-Freet
-Freemyer
-Fredin
-Fredenberg
-Frayne
-Fraughton
-Franzel
-Frankie
-Frankenstein
-Frankenberg
-Francher
-Franch
-Francesconi
-Franc
-Fraize
-Fragmin
-Frabott
-Foxman
-Fouty
-Fournet
-Foulcard
-Fouhy
-Fougere
-Fotopoulos
-Forsmark
-Fornell
-Form
-Forline
-Forguson
-Fontus
-Fontanella
-Folkner
-Fok
-Foggie
-Fogelman
-Flumerfelt
-Fluegge
-Fluegel
-Fluck
-Floe
-Flocco
-Flitsch
-Flirt
-Flinders
-Fletchen
-Flechsig
-Flebbe
-Flathers
-Flatau
-Flamer
-Flaharty
-Fladger
-Fitten
-Fitchpatrick
-Fissori
-Fissel
-Fischler
-Fioritto
-Fiori
-Fiorentini
-Fiorella
-Finnemore
-Finkelson
-Fingleton
-Fingerhut
-Finazzo
-Filmer
-Fillip
-Fillingham
-Filipek
-Filan
-Figurski
-Figueron
-Figueiras
-Figley
-Fiedor
-Ficker
-Fickas
-Fevig
-Feutz
-Fetner
-Fertal
-Ferraiolo
-Fernsler
-Fernet
-Fernatt
-Fergusen
-Ferg
-Feraco
-Fenny
-Fengler
-Felsted
-Fellner
-Fellin
-Fellenz
-Felkner
-Felkel
-Feliu
-Feleppa
-Felderman
-Felde
-Feigel
-Feickert
-Feibusch
-Fedorek
-Fedora
-Federgreen
-Fedalen
-Feck
-Febre
-Fearnow
-Feagler
-Favorito
-Faville
-Favalora
-Fauls
-Faudree
-Fasulo
-Fassino
-Farson
-Farlin
-Faretra
-Farenbaugh
-Farella
-Faraone
-Faragoza
-Fanucchi
-Fantroy
-Fanny
-Fangman
-Famiglietti
-Faltus
-Faltin
-Falt
-Falley
-Falldorf
-Falick
-Fala
-Fahrney
-Faggs
-Fafard
-Faes
-Fadely
-Fadel
-Facchine
-Fabionar
-Ezagui
-Evoy
-Evilsizer
-Evick
-Eversoll
-Eversman
-Everley
-Evelo
-Euvrard
-Eun
-Etkin
-Ethen
-Estrela
-Esteb
-Estain
-Estacion
-Esquerra
-Esposto
-Espert
-Eskra
-Eskin
-Eskenazi
-Eshom
-Eshenbrenner
-Esera
-Escobio
-Eschief
-Eschenbrenner
-Erschen
-Erlewine
-Erdner
-Erck
-Erceg
-Erbach
-Epolito
-Ephriam
-Enwright
-Enwall
-Entrikin
-Entress
-Entler
-Enstad
-Engwall
-Engroff
-Englemann
-Engelson
-Enderlin
-Enamorado
-Emme
-Emlay
-Emke
-Emerton
-Embertson
-Elworthy
-Elwick
-Elward
-Eloy
-Ellyson
-Ellstrom
-Ellingboe
-Elliam
-Elifritz
-Elgart
-Elerick
-Eitzen
-Eismann
-Eisentrout
-Eischeid
-Eirich
-Eikner
-Eickhorst
-Ehrler
-Ehrle
-Eglinton
-Egerer
-Egelhoff
-Edmunson
-Ecord
-Eckrich
-Eckland
-Echevaria
-Ebersold
-Eberenz
-Ebener
-Ebadi
-Ealand
-Eaks
-Eagleston
-Eaglen
-Eagin
-Dyals
-Dwelley
-Duy
-Duva
-Dutter
-Dutko
-Duster
-Duskin
-Dusel
-Durrenberger
-Durke
-Durian
-Dupay
-Duntley
-Dunsford
-Dundee
-Dulemba
-Dugi
-Dufficy
-Duensing
-Dueno
-Dueitt
-Duclo
-Dubrock
-Dubitsky
-Drumgo
-Drozdowicz
-Dromgoole
-Drobot
-Drivas
-Drinkwine
-Drewing
-Dressman
-Dreessen
-Drainville
-Dragna
-Draffin
-Dowgiallo
-Dovey
-Dougher
-Dottin
-Dossous
-Dossie
-Dose
-Doronio
-Dorning
-Dorko
-Dorion
-Dorinirl
-Doring
-Doorn
-Donohoo
-Donnally
-Donkin
-Donez
-Donerson
-Dondlinger
-Donchez
-Donaway
-Donatien
-Donath
-Dommel
-Domine
-Domin
-Domiano
-Domhoff
-Domek
-Doller
-Dolinsky
-Dolberry
-Doker
-Doil
-Doidge
-Dohman
-Doeden
-Dodridge
-Dodgson
-Dobkowski
-Dobie
-Dobes
-Dobert
-Diwan
-Ditomasso
-Distaffen
-Distad
-Dispenza
-Disorbo
-Diskind
-Diserens
-Discipio
-Dirico
-Dire
-Dirago
-Diprima
-Dinwoodie
-Dinn
-Dinkens
-Dinius
-Dingeldein
-Dimon
-Dimitt
-Dimitriadis
-Dilliard
-Dilick
-Dilauro
-Dilallo
-Dilalla
-Dihel
-Digilio
-Difonzo
-Difeo
-Dietze
-Dietl
-Diesi
-Diesel
-Dieppa
-Dienes
-Diemert
-Diegel
-Dieffenbacher
-Diec
-Dickhoff
-Dickensheets
-Dibonaventura
-Dibblee
-Dibartolo
-Dibacco
-Dhondt
-Dewer
-Develbiss
-Devazier
-Devara
-Deuser
-Deur
-Deuell
-Detzel
-Dettling
-Detro
-Destine
-Destefanis
-Desorcy
-Desomma
-Deslandes
-Desisto
-Desiga
-Deshler
-Deshaw
-Desgroseillie
-Desaulniers
-Derwitsch
-Derrig
-Derouchie
-Dermady
-Derider
-Derfus
-Derbes
-Depperschmidt
-Depoyster
-Depaula
-Dense
-Dennin
-Deniro
-Denio
-Dengel
-Deneen
-Dempsy
-Demmy
-Demmert
-Demichelis
-Demedeiros
-Dembroski
-Dembitzer
-Demarse
-Demaranville
-Demagistris
-Deluz
-Delson
-Delrossi
-Delrie
-Delossanto
-Delos
-Delmolino
-Dellis
-Dellarocco
-Dellano
-Della
-Delisser
-Delille
-Deleston
-Delerme
-Deleone
-Delehanty
-Delbalso
-Delavina
-Delauter
-Delashmit
-Dekalb
-Deguire
-Degross
-Degroote
-Degrasse
-Degrange
-Degrace
-Degasperis
-Deffibaugh
-Defaber
-Decrosta
-Decristoforo
-Dechert
-Decelle
-Decapua
-Decapite
-Decandia
-Debuse
-Debruler
-Deblauw
-Debella
-Debeer
-Dayrit
-Davidian
-Davick
-Davich
-Davia
-Daversa
-Davern
-Davault
-Dautrich
-Dausch
-Dathe
-Dastrup
-Dassow
-Darras
-Darnold
-Darks
-Dargis
-Dargatz
-Darbouze
-Dannenfelser
-Dannard
-Dampf
-Dalzen
-Dalphonse
-Dalluge
-Dalhover
-Daivs
-Dainack
-Daher
-Dagle
-Daghita
-Dagdag
-Dafonseca
-Daffern
-Daehler
-Dadson
-Czuba
-Czlapinski
-Czarnik
-Czap
-Cynova
-Cwiklinski
-Cuzco
-Cutno
-Curt
-Curbow
-Cunninghan
-Cunis
-Cuningham
-Cunico
-Culmer
-Cuhel
-Cuestas
-Cuebas
-Cuchares
-Cubr
-Csizmadia
-Crumpacker
-Cruell
-Crousore
-Crosten
-Crosman
-Crooked
-Cromuel
-Cromey
-Crockarell
-Croan
-Crissler
-Crispen
-Crismon
-Crise
-Criscillis
-Crippin
-Crilly
-Cresta
-Cregar
-Cragun
-Coye
-Cowing
-Cower
-Coverstone
-Coverdell
-Couty
-Coutant
-Courtnage
-Courteau
-Couper
-Countee
-Coultas
-Coughran
-Cottew
-Cotler
-Cotelesse
-Costen
-Cossin
-Coskrey
-Cosen
-Cosden
-Corvera
-Cortis
-Corsello
-Corrion
-Corrigeux
-Correiro
-Coro
-Cornetta
-Corneil
-Corlee
-Corin
-Corgan
-Corfman
-Corell
-Cordovi
-Cordia
-Cordas
-Corcino
-Corchero
-Coral
-Coppolino
-Coppernoll
-Coppens
-Coote
-Cooperstein
-Cooperrider
-Conterras
-Consolazio
-Cons
-Connin
-Connerley
-Conkin
-Congress
-Concienne
-Conaghan
-Comrey
-Cominsky
-Comella
-Comee
-Come
-Combe
-Coln
-Collums
-Collamore
-Colicchio
-Colee
-Colding
-Colder
-Colbenson
-Colagiovanni
-Cokely
-Coin
-Codde
-Cobrin
-Coak
-Cluxton
-Cluesman
-Clouston
-Closser
-Clopp
-Cliatt
-Clendennen
-Clearman
-Clattenburg
-Clarks
-Clapsaddle
-Cius
-Cira
-Ciolli
-Cinotti
-Cimko
-Cima
-Cienega
-Cicatello
-Cicale
-Ciarlante
-Cianfrini
-Cianciulli
-Churley
-Churches
-Chuong
-Chukes
-Christou
-Christescu
-Christe
-Chrismon
-Chrisler
-Choun
-Chobot
-Chisem
-Chiong
-Chimera
-Chila
-Chicca
-Chiarito
-Chhun
-Chhum
-Chhim
-Chestang
-Chesler
-Cherubin
-Chernosky
-Cherebin
-Chepiga
-Chellis
-Chell
-Cheda
-Checca
-Cheater
-Cheatem
-Chaulk
-Chaudhuri
-Chauca
-Chatcho
-Chartraw
-Charping
-Charnley
-Charm
-Charlson
-Charbonneaux
-Charan
-Chapp
-Chango
-Chanez
-Chancer
-Chamnanphony
-Chalepah
-Chaiken
-Chaddlesone
-Chaconas
-Chabaud
-Cestia
-Cessor
-Cervetti
-Cerveny
-Cerise
-Cerecer
-Cerasoli
-Cera
-Centini
-Cenci
-Cembura
-Celli
-Cederstrom
-Cdebaca
-Cayo
-Cawthron
-Caviggia
-Cavers
-Caveney
-Causley
-Caughlin
-Cathie
-Catan
-Catala
-Castrogiovann
-Castleton
-Castilo
-Castillio
-Castellaw
-Castellari
-Castejon
-Caspersen
-Casivant
-Cashio
-Cascioli
-Casciano
-Casamento
-Casadei
-Carwin
-Carvin
-Carucci
-Cartin
-Cartez
-Carston
-Carrio
-Carriaga
-Carretino
-Carotenuto
-Carosiello
-Carolfi
-Carnathan
-Carnalla
-Carnagey
-Carlill
-Carinio
-Cariker
-Caride
-Care
-Cardero
-Cardenal
-Carasquillo
-Carabez
-Capwell
-Capurro
-Capulong
-Cappucci
-Cappetta
-Cappa
-Capouch
-Caporali
-Caponigro
-Capilla
-Capata
-Capan
-Canzoneri
-Cantine
-Cantarano
-Cannellos
-Cannard
-Cannada
-Canlas
-Cangey
-Canaan
-Campoy
-Campany
-Campainha
-Cambi
-Camba
-Camastro
-Camano
-Calrk
-Callin
-Callari
-Calicutt
-Calemine
-Caleb
-Caldon
-Caldas
-Cajas
-Cadelina
-Cacal
-Cabriales
-Cables
-Bytheway
-Byland
-Byes
-Byan
-Buzick
-Buziak
-Buzhardt
-Butzlaff
-Buttolph
-Butta
-Butron
-Butorac
-Butaud
-Butac
-Busuttil
-Busque
-Busing
-Busboom
-Burwood
-Burright
-Burri
-Burrall
-Burness
-Burlington
-Burlin
-Burkham
-Burick
-Burich
-Burgner
-Burdex
-Burdell
-Burde
-Burba
-Buol
-Bundi
-Bulick
-Bulgin
-Bukovsky
-Bukovac
-Bujak
-Bugett
-Buffo
-Bueschel
-Bueckers
-Budnik
-Buckey
-Buckel
-Buchko
-Buchinski
-Buchana
-Buchaman
-Bucek
-Buba
-Bryans
-Brustkern
-Brussel
-Brusseau
-Bruntz
-Brunscheen
-Brunken
-Brumbach
-Bruess
-Brueckman
-Brueck
-Brucken
-Brozena
-Brozek
-Brownley
-Browers
-Brosman
-Brosch
-Broody
-Brood
-Bronzo
-Bronn
-Bromwell
-Brome
-Bromagen
-Broll
-Brofman
-Broekemeier
-Brodi
-Brixner
-Brisban
-Brinkmeier
-Bringham
-Bridgforth
-Bridgette
-Breznak
-Brewbaker
-Breitweiser
-Breiten
-Breitbarth
-Brehaut
-Breedan
-Breech
-Bree
-Bredernitz
-Brechner
-Brechbiel
-Breashears
-Brazinski
-Brazille
-Bratz
-Bratu
-Bratsch
-Bras
-Branting
-Brannin
-Bramsen
-Brailford
-Bragas
-Bradney
-Bradner
-Bradigan
-Bradica
-Brad
-Brabston
-Bozwell
-Boys
-Boyn
-Boyar
-Boyance
-Boxton
-Bowering
-Bowar
-Bournazian
-Bourgue
-Bourgoine
-Bourdage
-Boulier
-Boulds
-Boulding
-Bouch
-Bottum
-Bottorf
-Botero
-Bossler
-Bosshardt
-Bossart
-Bosman
-Borzillo
-Borstad
-Borsos
-Borsellino
-Borrayo
-Borowiak
-Borio
-Borgos
-Borglum
-Borghoff
-Boreland
-Bordeleau
-Borchelt
-Boorman
-Boole
-Bookwalter
-Bookhart
-Bonventre
-Bonucchi
-Bonnema
-Bongard
-Bonardi
-Bonadio
-Bomstad
-Bombaci
-Bolus
-Bolognese
-Bolnick
-Bolebruch
-Boldrin
-Bolder
-Boje
-Boho
-Bohmker
-Bogosh
-Bognar
-Bogin
-Bogatitus
-Bogaert
-Boga
-Boehmke
-Boeh
-Bodway
-Bodemann
-Bockhorst
-Bochner
-Bocek
-Boblitt
-Bobbit
-Boatfield
-Boast
-Boardley
-Bo
-Blumhardt
-Blower
-Blondell
-Bloemer
-Bloczynski
-Blint
-Blenden
-Blend
-Blem
-Bleininger
-Bleile
-Blehm
-Blechman
-Bleak
-Blattler
-Blattel
-Blatherwick
-Blatchley
-Blasing
-Blasen
-Blandin
-Blaire
-Blad
-Blackler
-Bizzle
-Bison
-Bisogno
-Bisking
-Bishopp
-Bischke
-Biscaro
-Bisarra
-Birton
-Birrueta
-Birrell
-Birklid
-Binkerd
-Binetti
-Binegar
-Bindrup
-Billerbeck
-Bilka
-Biley
-Bilecki
-Biglin
-Bievenue
-Bierwagen
-Biernat
-Bienvenue
-Bielik
-Biedrzycki
-Bideaux
-Bidding
-Bickman
-Biber
-Bibel
-Biancardi
-Bialy
-Bialke
-Bialecki
-Bhattacharya
-Bezak
-Bevilaqua
-Beuth
-Beuter
-Beutel
-Beucler
-Betties
-Betteridge
-Betschart
-Betran
-Bethley
-Beteta
-Beswick
-Bessmer
-Bessemer
-Besherse
-Beserra
-Berver
-Bertuzzi
-Bertke
-Berthelsen
-Berthelette
-Bertagna
-Bersch
-Berrio
-Bernoski
-Bernatowicz
-Bernardy
-Berling
-Berl
-Bergmeier
-Bergland
-Bergfield
-Bergesen
-Bergem
-Bergantzel
-Bergamo
-Berdecia
-Berardo
-Berardino
-Bequillard
-Benzinger
-Benyamin
-Bentzen
-Bennice
-Benke
-Benet
-Beneker
-Benedum
-Benedick
-Bend
-Bencosme
-Bemrose
-Bemiller
-Bemer
-Belzung
-Belmarez
-Bellina
-Bellendir
-Bellemare
-Bellantuono
-Bellanca
-Belkin
-Belinski
-Belcourt
-Bejaran
-Behl
-Beeker
-Beeghly
-Bedney
-Bedker
-Bedeau
-Beddome
-Beddoe
-Becvar
-Beccaria
-Beaz
-Beaushaw
-Beaulac
-Beatley
-Beardon
-Beachem
-Beachel
-Bazydlo
-Baydal
-Baxi
-Bauserman
-Baudler
-Batzli
-Battino
-Battee
-Batley
-Batesole
-Batcher
-Basurto
-Basu
-Bastianelli
-Bassage
-Basner
-Bashford
-Basher
-Bashara
-Basha
-Baselice
-Bartosiewicz
-Bartolomucci
-Bartnick
-Bartholic
-Barthe
-Bartelson
-Barsuhn
-Barson
-Barries
-Barricelli
-Barrena
-Barredo
-Barraz
-Barrale
-Baroldy
-Barne
-Barmettler
-Barjas
-Baris
-Bareis
-Bardach
-Barcroft
-Barcello
-Barbuto
-Barbrick
-Barbo
-Barbish
-Barbaria
-Baras
-Baragona
-Baquet
-Banwell
-Banowetz
-Bandle
-Bambhrolia
-Balthazar
-Balson
-Balliett
-Ballestas
-Balin
-Balfany
-Balette
-Baldrige
-Baldenegro
-Baldassara
-Baldasaro
-Balcorta
-Balckwell
-Balcitis
-Balasco
-Baka
-Baish
-Bainum
-Bailin
-Baile
-Bahlmann
-Baher
-Bagoyo
-Baggette
-Bafford
-Baddley
-Badanguio
-Badamo
-Badame
-Baczewski
-Bacorn
-Bacolor
-Bacigalupi
-Bachtold
-Bacha
-Babick
-Azzano
-Azua
-Azhocar
-Ayre
-Aydt
-Aydlett
-Axsom
-Awada
-Averbach
-Avenoso
-Auzston
-Auyong
-Autaubo
-Austad
-Aus
-Aurora
-Aultz
-Aulds
-Auldridge
-Aul
-Auge
-Auel
-Audirsch
-Audain
-Auchmoody
-Aubertine
-Auber
-Astry
-Asquith
-Asp
-Ashdown
-Asen
-Aselage
-Ascensio
-Asam
-Asad
-Artuso
-Artinger
-Arritola
-Arre
-Arraiol
-Arra
-Arouri
-Arnzen
-Arntson
-Arnstein
-Arnoldy
-Arnhart
-Arnet
-Armentor
-Armel
-Arganbright
-Argall
-Argabright
-Arenstam
-Ardinger
-Arcuo
-Arambulo
-Aramboles
-Arabian
-Appelt
-Appelgren
-Apodoca
-Ape
-Anzai
-Anttila
-Antoniou
-Antoniotti
-Antonakos
-Antell
-Antee
-Antaya
-Anschutz
-Ano
-Annon
-Anne
-Annarummo
-Anick
-Angelovich
-Anes
-Androes
-Andrle
-Andreoli
-Andreassen
-Anderl
-Ancira
-Anastasi
-Anastacio
-Analla
-Ana
-Amunrud
-Amparan
-Amory
-Amores
-Amodei
-Amdahl
-Amazan
-Alway
-Alvira
-Aluise
-Altomonte
-Altidor
-Altadonna
-Alstott
-Alsina
-Alshouse
-Alpizar
-Alonge
-Almestica
-Almaras
-Almand
-Allwardt
-Allum
-Allgier
-Allerman
-Alkbsh
-Alier
-Aliano
-Alfson
-Alfero
-Alexender
-Alessandro
-Alesci
-Aldas
-Aldaba
-Alcide
-Alby
-Albelo
-Albares
-Albair
-Albach
-Alamin
-Alagna
-Akuna
-Akright
-Akim
-Akes
-Aken
-Akbari
-Akau
-Aitkins
-Aita
-Airola
-Aines
-Aimone
-Ailts
-Ahrent
-Ahne
-Ahlman
-Ahlin
-Aguire
-Agor
-Agner
-Agerter
-Age
-Agcaoili
-Afzal
-Afshari
-Affleck
-Aduddell
-Adu
-Adolfo
-Adolf
-Adjei
-Adham
-Aderholdt
-Adens
-Adee
-Adauto
-Acocella
-Ackroyd
-Ackers
-Acken
-Ack
-Achter
-Acheampong
-Aceret
-Accornero
-Abts
-Abruzzino
-Abrecht
-Abramov
-Aboud
-Abo
-Abes
-Abed
-Abby
-Aamot
-Aalbers
-Zwolensky
-Zwiener
-Zwanzig
-Zvorsky
-Zutter
-Zurowski
-Zupfer
-Zunker
-Zumbach
-Zubik
-Zubiate
-Zottola
-Zoss
-Zorman
-Zonker
-Zomer
-Zollo
-Zolezzi
-Znidarsic
-Zmijewski
-Zmich
-Zlaten
-Zisk
-Zinter
-Zingler
-Zindel
-Zimlich
-Zillman
-Zilliox
-Zigich
-Ziesemer
-Zielonka
-Ziebart
-Zia
-Zhuang
-Zeyer
-Zerkle
-Zepf
-Zenisek
-Zempel
-Zemaitis
-Zeltner
-Zellman
-Zelasco
-Zeisler
-Zeinert
-Zeier
-Zegarra
-Zeeman
-Zedaker
-Zecher
-Zeagler
-Zbinden
-Zaunbrecher
-Zarlengo
-Zannino
-Zanni
-Zangara
-Zanetti
-Zanes
-Zanderigo
-Zanayed
-Zambito
-Zalusky
-Zakutney
-Zaiss
-Zahar
-Zagrodnik
-Zaeske
-Zadroga
-Zadeh
-Zacek
-Yzaquirre
-Yuro
-Yupe
-Yunt
-Yue
-Youns
-Youngerman
-Youkhana
-Yoshizumi
-Yoshiyama
-Yoshikawa
-Yoshihara
-Yore
-Yoneda
-Yoh
-Yepsen
-Yepiz
-Yentzer
-Yelin
-Yedid
-Yeddo
-Yeboah
-Yeah
-Yauck
-Yattaw
-Yarrow
-Yarosh
-Yarn
-Yanuaria
-Yanko
-Yampolsky
-Yamin
-Yamagata
-Yakow
-Yaegle
-Yacono
-Yacko
-Xayavong
-Wythe
-Wyrich
-Wydeven
-Wyandt
-Wurtzel
-Wurdeman
-Wunner
-Wulffraat
-Wujcik
-Wry
-Wrighton
-Wreath
-Wraight
-Wragge
-Woznick
-Woten
-Wormuth
-Woofter
-Woodmore
-Woode
-Womeldorff
-Wolvin
-Wolman
-Wolgast
-Wolfgramm
-Wojtas
-Wojenski
-Wohletz
-Woetzel
-Woelke
-Woelk
-Woehrle
-Wittlinger
-Wittke
-Witthuhn
-Witthoft
-Wittekind
-Witkus
-Witbeck
-Wist
-Wissinger
-Wisnoski
-Wisley
-Wishard
-Wish
-Wipperfurth
-Winterling
-Winterholler
-Winterfeld
-Winsman
-Winkenwerder
-Wingerson
-Winegard
-Windland
-Winchel
-Wilmott
-Willwerth
-Willougby
-Willinger
-Willims
-Williby
-Willian
-Williamon
-Willhelm
-Willging
-Willens
-Willenbring
-Willcott
-Willardson
-Wilhelmy
-Wildsmith
-Wildoner
-Wildberger
-Wikholm
-Wigner
-Wiglesworth
-Wiggett
-Wiget
-Wigdor
-Wieman
-Wied
-Wieboldt
-Widen
-Wickett
-Wickard
-Wichterman
-Wichland
-Wicher
-Whysong
-Whyms
-Whooper
-Whooley
-Whitver
-Whitmoyer
-Whitehorse
-Whitebear
-Whish
-Whippo
-Wheler
-Whelehan
-Wheetley
-Wheeland
-Wheelan
-Whatoname
-Whalan
-Weygandt
-Wexell
-Wetherald
-Westfahl
-Westerholm
-Westerheide
-Westenhaver
-Westen
-Wessendorf
-Wescom
-Werstein
-Wersal
-Werra
-Werntz
-Wernicki
-Wernett
-Werger
-Werber
-Wenskoski
-Wenk
-Wendzel
-Wendelboe
-Wenciker
-Wemhoff
-Welshans
-Welde
-Welby
-Welburn
-Weisfeld
-Weisenfels
-Weinreich
-Weikert
-Weiglein
-Weida
-Wegweiser
-Wegley
-Weflen
-Weeler
-Wedo
-Wedin
-Wedgewood
-Wedderspoon
-Wedd
-Weberg
-Weathington
-Wears
-Weakly
-Weafer
-Weaber
-Waz
-Waxler
-Wave
-Wauson
-Waugaman
-Waterer
-Wasmuth
-Washmuth
-Warters
-Warsaw
-Warns
-Warnken
-Warney
-Wariner
-Warchol
-Wansitler
-Wanless
-Wanker
-Wandrie
-Wandler
-Wanczyk
-Waltmann
-Waltersdorf
-Walsworth
-Walseth
-Walp
-Walner
-Walmer
-Walloch
-Wallinger
-Wallett
-Walkley
-Walkingstick
-Walentoski
-Walega
-Wale
-Waldock
-Waldenmyer
-Walde
-Waldbauer
-Walchak
-Wakayama
-Waiau
-Waddick
-Wacyk
-Vreeken
-Vrbka
-Vradenburg
-Vounas
-Votolato
-Vosquez
-Vosika
-Vorwald
-Vorse
-Voros
-Vorgas
-Vorel
-Voorhes
-Voncannon
-Volstad
-Volo
-Volkmer
-Volden
-Volbrecht
-Voisard
-Voetsch
-Voetberg
-Voeltner
-Voegeli
-Vock
-Vlloa
-Vivona
-Vivino
-Vivenzio
-Vitucci
-Vittitoe
-Viti
-Viteaux
-Vitatoe
-Viscome
-Virzi
-Virula
-Virrey
-Virella
-Virani
-Viox
-Violetta
-Vinall
-Villatora
-Vilcan
-Vik
-Vigen
-Vieths
-Vielman
-Vidra
-Vidot
-Vidalez
-Vicent
-Vibert
-Vibbard
-Veth
-Vestering
-Veshedsky
-Versoza
-Verrell
-Veroeven
-Vernola
-Vernia
-Verjan
-Verity
-Veriato
-Verhague
-Verdusco
-Verderosa
-Verderame
-Verdell
-Verch
-Verbeke
-Venture
-Veness
-Vener
-Vendrick
-Vences
-Vellucci
-Vellone
-Velk
-Vegh
-Vedia
-Vecchiarelli
-Vazzana
-Vaux
-Vaupel
-Vaudrain
-Vatalaro
-Vastano
-Vasso
-Vasiliou
-Vasher
-Vascones
-Vas
-Varuzzo
-Varrelman
-Varnedore
-Vari
-Varel
-Vanwright
-Vanvoorhees
-Vanvolkinburg
-Vantrump
-Vanstraten
-Vanstone
-Vansice
-Vanscoter
-Vanscoit
-Vanord
-Vanoosten
-Vannortwick
-Vannette
-Vannatten
-Vanloon
-Vanliere
-Vanis
-Vanhese
-Vangalder
-Vanelderen
-Vandre
-Vandover
-Vandinter
-Vandewalle
-Vandevander
-Vanderroest
-Vandermay
-Vanderloo
-Vanderlee
-Vanderlaan
-Vandergraph
-Vanderen
-Vandenbrink
-Vandenboom
-Vandenberge
-Vandel
-Vandegriff
-Vandale
-Vanbruggen
-Vanboerum
-Vanbelle
-Vanauker
-Vanasten
-Vanarsdall
-Vallerand
-Valladao
-Valis
-Valintine
-Valenziano
-Valentia
-Valensuela
-Vaisman
-Vahena
-Vaglienty
-Vacchiano
-Uziel
-Uyemura
-Utsler
-Usie
-Urzua
-Ureste
-Urby
-Urbine
-Urabe
-Uptgraft
-Unterzuber
-Untalan
-Ungerman
-Ungerland
-Underland
-Underberg
-Umholtz
-Umbright
-Ulwelling
-Ulstad
-Ulmen
-Ulcena
-Ulanski
-Uhlenkott
-Uher
-Uhas
-Uglow
-Ugland
-Uerkwitz
-Uccellini
-Tysarczyk
-Tyron
-Twymon
-Twohey
-Twisselman
-Twichell
-Tweten
-Tuzzolo
-Tuzzo
-Tutoky
-Tusler
-Turnner
-Turja
-Turick
-Turiano
-Tunnicliff
-Tummons
-Tumlison
-Tumaneng
-Tuder
-Tuczynski
-Tuchman
-Tubville
-Tsukiyama
-Tselee
-Truxon
-Truxler
-Trussler
-Trusler
-Trusillo
-Trudillo
-Trude
-Truchan
-Trowery
-Trotochaud
-Tropiano
-Tronstad
-Trolinger
-Trocinski
-Triveno
-Trites
-Triplet
-Trick
-Trichell
-Trichel
-Trevey
-Trester
-Treisch
-Treger
-Trefz
-Tredwell
-Trebbe
-Treakle
-Travillion
-Travillian
-Travaglio
-Trauscht
-Traube
-Trapper
-Tranum
-Trani
-Train
-Towlson
-Towlerton
-Towey
-Tovmasyan
-Tousley
-Tourtellotte
-Toure
-Toulson
-Totin
-Tosti
-Tosado
-Toruno
-Torrisi
-Torris
-Torrent
-Torrado
-Torner
-Torino
-Torell
-Topolansky
-Tooze
-Toot
-Tontarski
-Tonnessen
-Tonneson
-Tones
-Tomisin
-Tomilson
-Tomasetti
-Tolomeo
-Tollman
-Tolhurst
-Tolchin
-Tolbent
-Toher
-Toffton
-Toepel
-Toelkes
-Todorovich
-Todisco
-Toczek
-Tockey
-Tochterman
-Tobiasson
-Tlucek
-Titzer
-Titman
-Tise
-Tippets
-Tio
-Tingwald
-Timmel
-Timbrook
-Tilmon
-Tijerino
-Tigerino
-Tigano
-Tieken
-Tiegs
-Tiefenbrun
-Tichacek
-Tica
-Thurmer
-Thuotte
-Thramer
-Thoroughman
-Thornock
-Thorndyke
-Thongchanh
-Thomen
-Thoe
-Thody
-Thigpin
-Thielemier
-Thi
-Therres
-Thal
-Thakur
-Tewes
-Teves
-Tesmer
-Teslow
-Tesler
-Teruel
-Terron
-Terris
-Terre
-Terrasi
-Terrace
-Tero
-Terman
-Tereska
-Teresi
-Tepp
-Teo
-Tenzer
-Tennille
-Tennies
-Tencza
-Tenamore
-Tejadilla
-Tecklenburg
-Techaira
-Tayse
-Tawwater
-Tavolacci
-Taverner
-Taurino
-Taulman
-Taublee
-Tauarez
-Tattershall
-Tatsuta
-Tatsuno
-Taschner
-Tasby
-Tarrats
-Tarrants
-Tarone
-Tarley
-Taraborelli
-Taper
-Tanniehill
-Tanks
-Tankard
-Tangri
-Tanequodle
-Tamporello
-Tamer
-Tamburro
-Tambunga
-Taliman
-Talib
-Talas
-Takala
-Takach
-Taiwo
-Taibi
-Taghon
-Tagaban
-Tadena
-Taccone
-Taccetta
-Tabatabai
-Szyszka
-Szmalc
-Szerszen
-Szczepanik
-Szarek
-Szafraniec
-Szafran
-Szablewski
-Syta
-Sysyn
-Syndergaard
-Symanski
-Sylvian
-Syck
-Swymer
-Swoffer
-Swoager
-Swiggum
-Swiat
-Swetnam
-Swestka
-Swentzel
-Sweetwood
-Swedenburg
-Swearingin
-Swartzendrube
-Swarm
-Swant
-Swancey
-Sverchek
-Svenson
-Sutor
-Suthoff
-Suthar
-Susong
-Suskin
-Surra
-Surano
-Supplee
-Supino
-Sundborg
-Summons
-Summerour
-Sumers
-Sultzer
-Sulouff
-Sulecki
-Suhoski
-Suhar
-Sugerak
-Suganuma
-Suddoth
-Sudberry
-Sud
-Stymiest
-Stvrestil
-Stuve
-Sturrup
-Sturmer
-Stumer
-Stuhlsatz
-Stuenkel
-Studier
-Stuczynski
-Stubbolo
-Struebing
-Struchen
-Strozzi
-Strowder
-Strohbehn
-Stroer
-Strobridge
-Strobeck
-Stritmater
-Strike
-Strieter
-Strickling
-Streu
-Streifel
-Straugter
-Stratakos
-Strasburger
-Straface
-Straatmann
-Stpeters
-Stovel
-Stoudenmire
-Stotsky
-Stothart
-Storz
-Stormes
-Storman
-Stoppel
-Stooks
-Stonelake
-Stonebrook
-Stombaugh
-Stoltzman
-Stolsig
-Stolpe
-Stoglin
-Stoffle
-Stodgell
-Stocke
-Stirna
-Stipetich
-Stinner
-Stimpert
-Stimer
-Stilphen
-Stikeleather
-Stifel
-Stiely
-Stielau
-Stieger
-Stidman
-Stickrath
-Stickman
-Stickels
-Stgerard
-Sternberger
-Stergios
-Stepien
-Stepanski
-Stent
-Stenkamp
-Stenehjem
-Stempel
-Stemmer
-Stelb
-Steiskal
-Steinmuller
-Steinmacher
-Steinhorst
-Steinhaus
-Steinharter
-Steinhagen
-Steinburg
-Steifle
-Stefanick
-Stefanich
-Steeber
-Stay
-Stawarz
-Stavropoulos
-Staves
-Staup
-Stauch
-Staubs
-Stathopoulos
-Stathis
-Startz
-Starowitz
-Starowicz
-Starkie
-Starcic
-Stanely
-Standrod
-Standahl
-Stanczak
-Stample
-Stampka
-Stamer
-Stallins
-Stalford
-Stahoski
-Stagger
-Stader
-Staack
-Srsic
-Srey
-Squitieri
-Spyres
-Spuhler
-Sprouffske
-Sprosty
-Sprinzl
-Springle
-Spoth
-Spletzer
-Spizer
-Spitsberg
-Spitale
-Spiroff
-Spirer
-Spiotta
-Spinola
-Spingler
-Spike
-Spierling
-Spickler
-Sphon
-Spettel
-Sperle
-Sperka
-Sperberg
-Speltz
-Spaw
-Spasiano
-Spare
-Spancake
-Spagna
-Sowerby
-Sovern
-Souvannasap
-Southerly
-Sous
-Sourwine
-Soult
-Sotiriou
-Sothman
-Sota
-Sortore
-Sorley
-Sorin
-Sorells
-Soratos
-Soose
-Soong
-Sonsino
-Sonnabend
-Sonia
-Songster
-Sondrol
-Sondergaard
-Soltau
-Solinski
-Solinger
-Solid
-Sojda
-Sohns
-Softleigh
-Soffel
-Soffa
-Sodaro
-Sodano
-Soda
-Sobran
-Sobczynski
-Sneeden
-Snater
-Snair
-Smoker
-Smithingell
-Smink
-Smiles
-Smialek
-Smetak
-Smejkal
-Smeck
-Smaldone
-Sluyter
-Slot
-Slostad
-Slingerland
-Sliffe
-Slemmer
-Slawter
-Slavinski
-Slagowski
-Slaff
-Skuse
-Skulski
-Skornia
-Skolfield
-Skogstad
-Skinkle
-Skidgel
-Skeffington
-Skeets
-Skeele
-Skarupa
-Skarphol
-Skaare
-Sjolander
-Sjaarda
-Sitts
-Sitterud
-Sitt
-Sissell
-Siprasoeuth
-Sipper
-Sipla
-Sipkema
-Sinning
-Sinitiere
-Single
-Simmens
-Simm
-Simiskey
-Simelton
-Silverthorne
-Silvernale
-Silvan
-Siliado
-Silbaugh
-Siket
-Siker
-Sigurdson
-Signore
-Sigers
-Siffert
-Sieving
-Sieverding
-Sietsema
-Siering
-Sienicki
-Siemsen
-Siemonsma
-Siemering
-Sielski
-Siedlecki
-Siebers
-Sidbury
-Sickman
-Sickinger
-Sicilian
-Sible
-Sibilio
-Sibble
-Shutler
-Shurgot
-Shuping
-Shulda
-Shula
-Shrieves
-Shreiner
-Shreckengost
-Shreck
-Showes
-Showe
-Shoupe
-Shoumaker
-Shortey
-Shorten
-Shorrock
-Shorkey
-Shones
-Shockency
-Shoats
-Shivel
-Shipmen
-Shinsel
-Shindledecker
-Shinabarger
-Shiminski
-Shiloh
-Shillingford
-Shigo
-Shifman
-Shiers
-Shibuya
-Shewchuk
-Shettsline
-Shetter
-Shetrawski
-Sheffel
-Sheesley
-Sheekey
-Sheeder
-Sheares
-Shauger
-Sharko
-Shanna
-Shankin
-Shani
-Shandley
-Shanaa
-Shammo
-Shamlin
-Shambrook
-Shadow
-Shackley
-Sgambati
-Sferrazza
-Seydel
-Sewald
-Sevenbergen
-Sevaaetasi
-Seumanu
-Seuell
-Settler
-Setterberg
-Setera
-Sesso
-Sesay
-Servoss
-Servino
-Serpe
-Sermeno
-Serles
-Serena
-Serapio
-Senske
-Semmler
-Seminole
-Semel
-Selvaggi
-Sellai
-Selissen
-Seling
-Seleg
-Seledon
-Selbo
-Selan
-Sekuterski
-Sekula
-Seiwell
-Seivert
-Seise
-Sein
-Seils
-Seier
-Seidita
-Seiberling
-Seher
-Segroves
-Segoviano
-Segel
-Segee
-Seftick
-Sees
-Seekell
-Seegobin
-Seebold
-Sedlack
-Sedbrook
-Section
-Secrease
-Secore
-Seckler
-Seastrand
-Seargent
-Seacrist
-Seachord
-Seabrooke
-Scudieri
-Scrim
-Scozzafava
-Scotten
-Sconce
-Scircle
-Scipioni
-Sciarretta
-Sciallo
-Schwingler
-Schwinghammer
-Schwingel
-Schwiesow
-Schweinfurth
-Schweda
-Schwebke
-Schwarzkopf
-Schwander
-Schwaller
-Schwall
-Schut
-Schurkamp
-Schunter
-Schulder
-Schuenemann
-Schue
-Schuckman
-Schuchart
-Schroff
-Schoville
-Schorzman
-Schorder
-Schooner
-Schones
-Scholler
-Schofell
-Schoewe
-Schoeninger
-Schoenhals
-Schoenbeck
-Schoefield
-Schoberg
-Schnittker
-Schneidermann
-Schneckloth
-Schnebly
-Schnathorst
-Schnarrs
-Schnakenberg
-Schmitzer
-Schmidbauer
-Schmeeckle
-Schmeckpeper
-Schmandt
-Schmalzried
-Schmal
-Schlinker
-Schliep
-Schlette
-Schlesier
-Schleig
-Schlehuber
-Schlarbaum
-Schlaffer
-Schkade
-Schissel
-Schindeldecke
-Schimandle
-Schiermeier
-Scheunemann
-Scherrman
-Schepp
-Schemmer
-Schelp
-Schehr
-Schayer
-Schaunaman
-Schauland
-Schatzel
-Scharrer
-Scharping
-Scharpf
-Scharnberg
-Scharmer
-Scharbor
-Schalow
-Schaf
-Schader
-Schacter
-Scelfo
-Scarpello
-Scarlet
-Scaringe
-Scarduzio
-Scamardo
-Scaman
-Sbano
-Sayman
-Saylee
-Saxena
-Sawdey
-Sawada
-Savitsky
-Savickas
-Savic
-Savaglio
-Sauriol
-Sauret
-Saulo
-Satar
-Sasportas
-Sarvas
-Sarullo
-Sarsfield
-Sarne
-Sarmento
-Sarjent
-Sarellano
-Sardin
-Saputo
-Santheson
-Santellana
-Santarsiero
-Santago
-Sansalone
-Sanos
-Sanna
-Sanko
-Sanker
-Sanghani
-Sangalli
-Sandven
-Sandmann
-Sandhoff
-Sandelius
-Sandall
-Sanchious
-Sancedo
-Sance
-Sampogna
-Sampilo
-Sampayan
-Sampaia
-Sampaga
-Samo
-Samlal
-Samela
-Samec
-Samad
-Salzberg
-Salway
-Salwasser
-Salveson
-Salvemini
-Salus
-Salquero
-Salowitz
-Salizzoni
-Salina
-Salin
-Salimi
-Salgero
-Salemi
-Salato
-Salassi
-Salamacha
-Salahubdin
-Salada
-Saintignon
-Saintamand
-Saines
-Sahl
-Saha
-Sagona
-Sagedahl
-Saffel
-Saemenes
-Sadow
-Sadlow
-Sadger
-Sacramento
-Sackal
-Sachtleben
-Sabota
-Sabot
-Sabe
-Sabata
-Sabastian
-Sabad
-Rzepka
-Ryzinski
-Rytuba
-Ryon
-Rynes
-Rykiel
-Rykert
-Rykard
-Rydolph
-Rydell
-Ruzicki
-Rutko
-Rutenbar
-Rustrian
-Rusinski
-Rushmore
-Rushenberg
-Rushen
-Ruschak
-Rury
-Ruper
-Ruotolo
-Rummerfield
-Rumer
-Rumbolt
-Rulon
-Ruleman
-Rufe
-Rudo
-Rudkin
-Rudick
-Rubinich
-Rubidoux
-Rubero
-Roys
-Rowman
-Rovere
-Rousu
-Rouillier
-Rotton
-Rotondi
-Rothenbach
-Roszell
-Rossotto
-Rossmiller
-Rossey
-Roshannon
-Rosenfeldt
-Roscioli
-Rosander
-Rorrer
-Rorex
-Ropes
-Ropac
-Rooth
-Roorda
-Ronsani
-Ronne
-Rong
-Ronfeldt
-Rondy
-Romp
-Romon
-Romness
-Romm
-Romera
-Romeiro
-Rombach
-Romar
-Romansky
-Romagnoli
-Rom
-Rolson
-Rojos
-Rohanna
-Rogstad
-Rogillio
-Rogg
-Rogacki
-Roffman
-Roethle
-Roeth
-Roetcisoender
-Rodibaugh
-Roderiques
-Rodenburg
-Rodemeyer
-Rodberg
-Rockovich
-Rocher
-Roccio
-Robeck
-Robe
-Robayo
-Robar
-Rizzardo
-Rivie
-Rival
-Ritterbush
-Ritchko
-Ritchhart
-Ristig
-Rishty
-Rippstein
-Rippelmeyer
-Rioseco
-Ringwald
-Ringquist
-Ringham
-Rinella
-Rineer
-Rimple
-Rilling
-Rill
-Rijo
-Riihimaki
-Riglos
-Riggens
-Rigaud
-Rigali
-Rietz
-Rietdorf
-Riessen
-Riesgraf
-Rienstra
-Riekena
-Riedle
-Riedinger
-Rieb
-Rickenbaker
-Richcreek
-Richbourg
-Riccelli
-Riberdy
-Ribb
-Rhodie
-Rheome
-Rheinhardt
-Rezai
-Reynalds
-Reyman
-Reyez
-Rewenko
-Reville
-Revello
-Revelez
-Reul
-Resue
-Restuccia
-Replenski
-Reon
-Rentar
-Rensberger
-Rens
-Rennaker
-Renell
-Remson
-Rell
-Relacion
-Rekuc
-Reker
-Reitler
-Reischl
-Reints
-Reinoehl
-Reinart
-Reimund
-Reimold
-Reikowsky
-Reiger
-Reifman
-Reicks
-Reichler
-Reichhardt
-Rehling
-Regos
-Regino
-Regalbuto
-Reffner
-Reents
-Reenders
-Reeks
-Reek
-Reeck
-Redmer
-Redican
-Reddoch
-Reddig
-Reddicks
-Redbird
-Rectenwald
-Recek
-Rebillard
-Rebich
-Rebeck
-Reagon
-Raziano
-Raymore
-Ravenel
-Ravel
-Rause
-Rauschenbach
-Rauer
-Rauchwerger
-Ratelle
-Rasinski
-Rasbury
-Rardon
-Rapson
-Rapkin
-Raoof
-Rannells
-Ranke
-Rangitsch
-Rangasammy
-Randt
-Ran
-Ramser
-Ramsaroop
-Ramsahai
-Ramrez
-Rampley
-Ramirec
-Ramesh
-Ralbovsky
-Rakoczy
-Rakoci
-Rajwani
-Rajaratnam
-Raiden
-Rahmani
-Ragno
-Raghunandan
-Ragas
-Ragar
-Rafuse
-Radvany
-Rados
-Radmacher
-Radick
-Radecki
-Raczynski
-Rachell
-Qureshi
-Quirin
-Quire
-Quintona
-Quinnett
-Quinalty
-Quiambao
-Quella
-Quatraro
-Quartararo
-Qualle
-Qin
-Pytko
-Pyer
-Pyanowski
-Puzio
-Pushcar
-Purviance
-Purtlebaugh
-Pupo
-Pulte
-Pulse
-Pullom
-Pullings
-Pullano
-Pulkkinen
-Puliafico
-Pulfrey
-Pujols
-Puhala
-Puchalla
-Pucciarelli
-Prutzman
-Prutt
-Pruneau
-Prucha
-Provitt
-Protin
-Prose
-Proco
-Proa
-Prisk
-Prioletti
-Priode
-Prinkey
-Princiotta
-Prich
-Pribnow
-Prial
-Preyer
-Prestino
-Pressimone
-Preskitt
-Preli
-Preissler
-Prehoda
-Predovich
-Precise
-Prazenica
-Prawdzik
-Prast
-Pozzobon
-Pozos
-Powles
-Pov
-Poullard
-Pouch
-Potucek
-Postert
-Posten
-Posson
-Posa
-Portuondo
-Porten
-Porst
-Poree
-Pora
-Poque
-Popiolek
-Poot
-Poock
-Pongkhamsing
-Ponessa
-Pone
-Poncio
-Polumbo
-Pollutro
-Pollet
-Pollen
-Poljak
-Polemeni
-Pokswinski
-Poisel
-Poette
-Poelman
-Pody
-Podewils
-Podaras
-Pocius
-Pobanz
-Plympton
-Ply
-Plush
-Plume
-Pluff
-Plues
-Plue
-Plona
-Plexico
-Plew
-Pleiss
-Pleil
-Pleasanton
-Plattsmier
-Plathe
-Plankey
-Plahs
-Plagge
-Placker
-Placha
-Pizira
-Piwowar
-Piwetz
-Pittelkow
-Pitta
-Pithan
-Pitcherello
-Pisciotti
-Pipilas
-Pintea
-Pinta
-Pinkstaff
-Pinkos
-Pinc
-Pilotte
-Pillo
-Pihl
-Pignotti
-Piggs
-Pietrzyk
-Piermont
-Pieczynski
-Piechowski
-Piech
-Pickersgill
-Picetti
-Picciuto
-Piccinini
-Picarello
-Picardo
-Picado
-Piantanida
-Pianka
-Pian
-Phothirath
-Phippard
-Philman
-Philipson
-Philavanh
-Phelts
-Phanor
-Phanco
-Pflughoeft
-Pflugh
-Pfliger
-Pfeister
-Pfeifle
-Peyre
-Peyatt
-Pettine
-Pettett
-Petru
-Petronio
-Petricka
-Petrak
-Petko
-Petitto
-Petersson
-Pesnell
-Peshek
-Pesh
-Pescador
-Perze
-Perteet
-Pertee
-Pert
-Perschbacher
-Perruzzi
-Perrish
-Perrigan
-Perriello
-Perr
-Perozo
-Perlich
-Perking
-Perkes
-Perfater
-Perce
-Pepez
-Peon
-Penunuri
-Penuel
-Penso
-Pennisi
-Penkins
-Penkalski
-Pendon
-Pellon
-Pellissier
-Pelino
-Pel
-Peick
-Peguese
-Peggs
-Pefanis
-Peeters
-Peedin
-Peduto
-Pedulla
-Pedrozo
-Pedrotti
-Pedroncelli
-Pedrogo
-Pedri
-Pedregon
-Pederzani
-Pedde
-Pecukonis
-Peckler
-Pecka
-Pecha
-Pecci
-Peatman
-Peals
-Pazo
-Paye
-Pawlusiak
-Pawlitschek
-Pavlosky
-Pavlo
-Paveglio
-Paulman
-Paukstis
-Pauk
-Patts
-Patter
-Patriss
-Patneaude
-Paszek
-Paswaters
-Pastula
-Pastuch
-Pastel
-Passy
-Passarella
-Pasquin
-Pasqualetti
-Pasqual
-Pascuzzi
-Pasceri
-Parviainen
-Parral
-Parolini
-Parmele
-Parma
-Parlavecchio
-Parfitt
-Parez
-Pardieck
-Pardew
-Parda
-Paraz
-Parat
-Papay
-Paparello
-Papaioannou
-Paolello
-Pansini
-Panelli
-Panell
-Pander
-Pancholi
-Panaro
-Panagiotopoul
-Palomarez
-Palmrose
-Palmisciano
-Palmese
-Pallotto
-Palleschi
-Palk
-Palhegyi
-Palenzuela
-Paleaae
-Palczynski
-Palakiko
-Palaia
-Paith
-Pagonis
-Pago
-Pagliuca
-Pagliari
-Paganini
-Padovani
-Padfield
-Padamadan
-Pacquette
-Paco
-Packwood
-Pachero
-Pachar
-Pacewicz
-Paasch
-Pa
-Ozols
-Ozga
-Ozenne
-Oxman
-Overpeck
-Overbeek
-Overbee
-Oulette
-Otsu
-Otremba
-Otool
-Otar
-Otanicar
-Osumi
-Osucha
-Ostrov
-Osthoff
-Ostertag
-Ostergard
-Ostaba
-Ospital
-Ososkie
-Osofsky
-Osisek
-Oshinsky
-Orzalli
-Orwin
-Ortwein
-Ortuno
-Orts
-Ortell
-Orpen
-Ornelaz
-Orewiler
-Ores
-Ordones
-Opunui
-Oppenlander
-Opoien
-Opalka
-Ooley
-Ontko
-Ondrey
-Omura
-Omtiveros
-Omland
-Olup
-Olthoff
-Olsten
-Ollila
-Olivia
-Olinsky
-Olinick
-Oleksa
-Olejarz
-Oldakowski
-Okoronkwo
-Okins
-Ohmer
-Ohlsson
-Oherron
-Oheron
-Ohanian
-Oganesian
-Ogaldez
-Oest
-Oehlenschlage
-Oedekerk
-Odon
-Odekirk
-Ocran
-Oconor
-Obrzut
-Obrist
-Obringer
-Oborny
-Oblander
-Obi
-Oberley
-Oberer
-Obeng
-Oatridge
-Oajaca
-Nypaver
-Nuzzi
-Nuzback
-Nuxoll
-Nussbaumer
-Nurmi
-Nuhn
-Nugen
-Nuara
-Nquyen
-Nozicka
-Noxon
-Nowick
-Nowaczyk
-Novielli
-Novembre
-November
-Novas
-Noun
-Notto
-Notowich
-Norzagaray
-Norway
-Northover
-Northcross
-Norem
-Nordmann
-Nordenson
-Nolet
-Nojiri
-Nohel
-Noethiger
-Nodd
-Nitzel
-Nita
-Nisbit
-Nina
-Nikas
-Nigon
-Niglio
-Nighswander
-Nighbert
-Niemietz
-Niedzielski
-Niederkorn
-Niederhaus
-Niederer
-Nicometo
-Nicolaides
-Nickolich
-Nguyn
-Neyra
-Neymeyer
-Newmon
-Newgent
-Newbery
-Nevala
-Neuweg
-Neuhoff
-Neuhauser
-Neubecker
-Nettik
-Netters
-Nestingen
-Nesspor
-Nerad
-Nenez
-Neldon
-Neizer
-Neives
-Neils
-Neiger
-Neidich
-Neibert
-Negroni
-Neemann
-Needle
-Neeb
-Nedry
-Nedley
-Neas
-Naze
-Nazaroff
-Nayes
-Nayar
-Nattress
-Natonabah
-Nassr
-Nasseri
-Nassef
-Naso
-Narkier
-Naret
-Nardini
-Nardecchia
-Naragon
-Naputi
-Napierala
-Nanny
-Nanke
-Namdar
-Naji
-Naidoo
-Nahm
-Nahas
-Nagelschmidt
-Naes
-Naegeli
-Nacol
-Naclerio
-Nachor
-Nabozny
-Nabarrete
-Nab
-Myrlie
-Mykins
-Muzio
-Mutolo
-Muta
-Mustoe
-Muster
-Muske
-Muschamp
-Muscarello
-Musacchio
-Murzycki
-Murrufo
-Murnan
-Muraski
-Murany
-Murano
-Munzer
-Munis
-Munion
-Mumby
-Mumbower
-Mulrain
-Mullinex
-Mullineaux
-Mullennix
-Mullahey
-Mukhtar
-Muina
-Muha
-Muehlman
-Muccigrosso
-Mrozoski
-Mozier
-Mow
-Mova
-Moustafa
-Mousser
-Mouse
-Mousa
-Mouritsen
-Mourad
-Mottet
-Motten
-Motamedi
-Mostowy
-Mostafavi
-Mosiman
-Moscone
-Moscicki
-Mosbrucker
-Morva
-Mortinez
-Mortel
-Morsey
-Morrin
-Morren
-Morosco
-Morledge
-Morla
-Morisky
-Morishita
-Morisey
-Morgia
-Moretta
-Morera
-Morenz
-Mordue
-Mordhorst
-Mordaunt
-Morber
-Morawa
-Moravick
-Morarity
-Mooty
-Mooser
-Moock
-Moochler
-Montoure
-Montooth
-Montonez
-Montierth
-Monticello
-Monteverde
-Monterrano
-Montella
-Montecillo
-Monsrud
-Monsma
-Monserrat
-Monrreal
-Monro
-Monetti
-Mondok
-Mondella
-Moncion
-Monaldi
-Moltz
-Molon
-Mollicone
-Molle
-Moliterno
-Molinere
-Molinary
-Molesworth
-Moh
-Mogush
-Mogren
-Moellers
-Moeck
-Modert
-Mockbee
-Mocher
-Mochel
-Moc
-Moberley
-Moan
-Moallankamp
-Miyose
-Miyata
-Miyashita
-Miyagi
-Mitsuda
-Misumi
-Missel
-Miskelly
-Misiaszek
-Mirzadeh
-Mirto
-Mirsch
-Mirles
-Miolen
-Minzel
-Minutillo
-Minugh
-Mintzer
-Minskey
-Minnaert
-Minkoff
-Miniard
-Mingledorff
-Minas
-Minaai
-Milly
-Millinor
-Millie
-Millerd
-Millea
-Milkey
-Milham
-Milfeld
-Mileham
-Milas
-Milar
-Milak
-Mikulski
-Mihara
-Mihalek
-Mihalchik
-Mihal
-Mignot
-Mignano
-Mighty
-Miesse
-Mierzwinski
-Micthell
-Mickus
-Mickolick
-Mickiewicz
-Michlin
-Michelena
-Micha
-Miccio
-Micari
-Mezzatesta
-Mewbourn
-Meuse
-Meurin
-Metzker
-Mettling
-Metting
-Metters
-Metropoulos
-Metevia
-Mesteth
-Mesko
-Mesi
-Meserole
-Mervyn
-Mernin
-Mermelstein
-Merling
-Merli
-Merkowitz
-Merklin
-Merkerson
-Merica
-Merendino
-Mercury
-Meray
-Meranto
-Merancio
-Mensik
-Mense
-Menoni
-Mennie
-Mengsteab
-Menes
-Mend
-Mency
-Memolo
-Meltz
-Meling
-Melen
-Melcer
-Melamed
-Mekee
-Meiste
-Meise
-Meinhard
-Meierotto
-Mehok
-Meharg
-Meginnes
-Meenach
-Medicus
-Mediano
-Media
-Medell
-Mede
-Meddaugh
-Meconi
-Mech
-Mearse
-Meardon
-Mealor
-Meadville
-Meachen
-Mcvicar
-Mcsparin
-Mcrorie
-Mcrobbie
-Mcoy
-Mcowen
-Mcnorton
-Mcnertney
-Mcnamer
-Mcnail
-Mcmanamon
-Mcmain
-Mclyman
-Mcleland
-Mckirgan
-Mckew
-Mckevitt
-Mckercher
-Mckensie
-Mckeegan
-Mckeane
-Mckahan
-Mcinture
-Mcindoe
-Mcilvenny
-Mcillwain
-Mciff
-Mcgwin
-Mcguff
-Mcgrotty
-Mcgrone
-Mcgrant
-Mcgoogan
-Mcglon
-Mcgloin
-Mcgiveron
-Mcghehey
-Mcghay
-Mcgavin
-Mcgahen
-Mcfann
-Mcelwaine
-Mcelduff
-Mceachron
-Mcdilda
-Mcdermid
-Mcdannold
-Mcdale
-Mcculough
-Mccuien
-Mccrumb
-Mccrorey
-Mccreless
-Mccravy
-Mccourtney
-Mccorrison
-Mccorkell
-Mccorey
-Mcconney
-Mcconnaughhay
-Mccollester
-Mcclurkan
-Mccluer
-Mccloudy
-Mcclenaghan
-Mcclave
-Mcclarnon
-Mcclarin
-Mcclaney
-Mcclanan
-Mcclair
-Mcchristion
-Mccaskell
-Mccartha
-Mccarl
-Mccamant
-Mccalmont
-Mccalman
-Mccaine
-Mccahill
-Mccague
-Mcbrown
-Mcanany
-Mcalvain
-Mazzurco
-Mazuc
-Mazo
-Mazingo
-Mawhorter
-Mavro
-Mavraganis
-Mautner
-Mautino
-Mauceli
-Matzinger
-Maturi
-Matturro
-Mattlin
-Mattheis
-Matsuoka
-Matsuki
-Matro
-Matlack
-Matice
-Mathson
-Matheu
-Mathenia
-Math
-Matejka
-Mateja
-Matanane
-Masztal
-Mastropaolo
-Mastromarino
-Mastrolia
-Mastel
-Massy
-Massoud
-Massimino
-Maslanka
-Masini
-Mascioli
-Marzec
-Marvier
-Maruyama
-Marusarz
-Marum
-Martorella
-Martire
-Martinkus
-Martinas
-Martiez
-Marthe
-Marteney
-Marschall
-Marruffo
-Marrazzo
-Marples
-Marohl
-Marn
-Marlborough
-Markunas
-Marki
-Marjan
-Maritnez
-Marinkovic
-Marineau
-Margaitis
-Marentis
-Mare
-Marcou
-Marciel
-Marci
-Marchiori
-Marchello
-Marchell
-Marcelle
-Marcelin
-Marales
-Mapel
-Manzanarez
-Mantilia
-Mansmith
-Manon
-Mannschreck
-Mannick
-Mankiewicz
-Mankel
-Manila
-Manifold
-Manha
-Mangrich
-Mangiapane
-Mangiamele
-Manera
-Mandes
-Mandella
-Mandelik
-Mandaloniz
-Mand
-Mancusi
-Mancine
-Mana
-Mamula
-Mammoccio
-Malzhan
-Malzahn
-Malsom
-Maloon
-Malnar
-Mallone
-Mallinson
-Mallie
-Mallek
-Malle
-Malinoski
-Malinconico
-Malicoat
-Malicdem
-Malhi
-Malfatti
-Malandrino
-Malamud
-Malakowsky
-Makovec
-Makey
-Majercik
-Majer
-Majamay
-Maisenbacher
-Mainey
-Mailey
-Mailander
-Mahuna
-Mahomes
-Mahoe
-Mahnken
-Maheras
-Mahaxay
-Mahana
-Maham
-Magnia
-Magni
-Magnanti
-Magliano
-Magliacane
-Maglaughlin
-Magistrale
-Magierski
-Maggini
-Magano
-Mafnas
-Madren
-Mador
-Maderios
-Madena
-Maddron
-Madan
-Madalinski
-Macmanus
-Maclead
-Mackowski
-Mackinaw
-Mackessy
-Mackerl
-Macker
-Macivor
-Machold
-Machain
-Macedonio
-Macdiarmid
-Macchiaroli
-Macbean
-Macayan
-Macari
-Mabin
-Mabel
-Lyter
-Lyster
-Lysne
-Lynskey
-Lyness
-Lyndaker
-Lymaster
-Lykke
-Lyell
-Luxmore
-Luttmer
-Lutgen
-Lusignan
-Lupold
-Lungstrom
-Lunford
-Lundeby
-Lumbard
-Lule
-Lukaskiewicz
-Luinstra
-Luevand
-Luer
-Lueking
-Luehrs
-Luecking
-Ludvigson
-Ludgood
-Lucich
-Luchetti
-Lubman
-Lubic
-Lozito
-Lowhorn
-Lowd
-Loverich
-Loveman
-Lovas
-Lovaas
-Louvier
-Louthen
-Loury
-Loukanis
-Loughner
-Loughnane
-Louato
-Lotshaw
-Lother
-Lothamer
-Loter
-Losinski
-Losinger
-Loshek
-Losecco
-Lortie
-Lorin
-Lorent
-Lorello
-Loras
-Lorah
-Lopau
-Loosen
-Lontz
-Longpre
-Longie
-Loncaric
-Lombrana
-Lomba
-Lohrey
-Lohoff
-Logghe
-Loges
-Lofstead
-Lofft
-Loertscher
-Loeper
-Loeblein
-Lodato
-Lochen
-Lobbins
-Lobban
-Lizarrago
-Livigni
-Livernash
-Liukko
-Littich
-Litterer
-Littau
-Litchmore
-Lisy
-Lissy
-Lishman
-Lischak
-Lirag
-Liptow
-Lins
-Linkhart
-Linkert
-Lingren
-Lingelbach
-Lingel
-Lingad
-Linet
-Linegar
-Linebrink
-Lindroth
-Lindeland
-Lindboe
-Linardi
-Linard
-Ligman
-Liggans
-Lifland
-Liff
-Lieuallen
-Liesveld
-Liess
-Lienhard
-Liehr
-Liedy
-Liedke
-Liebau
-Lidtke
-Lidstrom
-Licano
-Libra
-Leys
-Leymeister
-Lewerke
-Lewand
-Levoci
-Leviton
-Levien
-Leveston
-Leverenz
-Levere
-Levangie
-Leuy
-Leukuma
-Lettman
-Letran
-Letlow
-Lethco
-Letersky
-Lestronge
-Lesso
-Lessey
-Leshem
-Lerud
-Leps
-Leonesio
-Leones
-Lento
-Lente
-Lennertz
-Lenior
-Lenhard
-Lenfest
-Lene
-Lendrum
-Lempicki
-Lemonier
-Lemle
-Lemkau
-Lemings
-Lem
-Lelli
-Lekas
-Leitten
-Leitheiser
-Leino
-Leiner
-Leinenbach
-Leidy
-Leidich
-Leid
-Leich
-Lehnhoff
-Leh
-Legum
-Legoullon
-Legeyt
-Legalley
-Legace
-Lefton
-Lefthand
-Leforge
-Lefore
-Lefleur
-Leerar
-Leef
-Leed
-Ledl
-Leddon
-Ledain
-Leckie
-Lecates
-Lebeouf
-Leben
-Lebeck
-Lebeaux
-Leban
-Leaverton
-Learman
-Leardi
-Leamy
-Lazare
-Lazarczyk
-Layssard
-Layson
-Layhew
-Layel
-Laychock
-Lawernce
-Lavzon
-Lavalla
-Lauterborn
-Laut
-Lauseng
-Lausen
-Laurino
-Lauri
-Laurenzano
-Laurenza
-Laundry
-Laumbach
-Lauinger
-Lauenroth
-Latzke
-Latulipe
-Lattig
-Latronica
-Latouf
-Latko
-Latiker
-Lathern
-Laterza
-Latchaw
-Lataquin
-Lasure
-Lashomb
-Lasell
-Lasasso
-Lartey
-Larriva
-Laro
-Lardner
-Lardieri
-Laprarie
-Lapping
-Lapitan
-Lapeyrolerie
-Lapar
-Lanzetta
-Lantis
-Lanka
-Lani
-Langshaw
-Langmyer
-Langin
-Langerman
-Langeland
-Langbein
-Landro
-Landrian
-Landmesser
-Landmann
-Landfair
-Landesberg
-Lanciotti
-Lamprey
-Lampey
-Lamos
-Lamora
-Lamoine
-Lamfers
-Lambka
-Lamance
-Lamana
-Laliotis
-Lajza
-Lajaunie
-Lainson
-Laher
-Lahar
-Lagrotta
-Lagrant
-Lagraize
-Lagnese
-Lafrazia
-Lafountaine
-Laflin
-Lafaso
-Lafarga
-Ladage
-Lacsamana
-Lacrosse
-Lacrone
-Lachowski
-Labruyere
-Labrake
-Labossiere
-Laba
-Laack
-Kyzar
-Kynard
-Kwek
-Kuzmin
-Kuttner
-Kusiak
-Kuser
-Kuse
-Kurtzer
-Kurtzeborn
-Kurpinski
-Kurohara
-Kuroda
-Kurnik
-Kurihara
-Kurdziel
-Kurban
-Kuras
-Kupper
-Kupferer
-Kupec
-Kunzelman
-Kunkler
-Kunin
-Kunesh
-Kumro
-Kumpf
-Kulon
-Kulka
-Kukucka
-Kuk
-Kuhse
-Kuhls
-Kuhlo
-Kuhar
-Kuerbitz
-Kuenzi
-Kuehneman
-Kudron
-Kuczenski
-Kuchle
-Kuchenmeister
-Kuchenbecker
-Kucan
-Kubu
-Kubsch
-Kubiszewski
-Kubish
-Kubicz
-Kubick
-Kubaska
-Kuarez
-Ksiazek
-Kshywonis
-Krzykowski
-Krzak
-Krysl
-Kruzewski
-Kruzan
-Krumrine
-Krumins
-Krucker
-Kroupa
-Krough
-Krotz
-Kronstedt
-Kromrey
-Krogstad
-Krogmann
-Kroeze
-Kroetz
-Kroc
-Kristianson
-Kristen
-Kriser
-Krips
-Kringas
-Kriete
-Kreuter
-Kretschmann
-Kresha
-Kreidel
-Kregger
-Kreatsoulas
-Kratochwil
-Krasovec
-Krase
-Krapf
-Kranawetter
-Krajnik
-Kozubal
-Koyanagi
-Kowalkowski
-Kovarovic
-Kovalcin
-Kou
-Kotzen
-Kotnik
-Kostelecky
-Kostek
-Kostecki
-Kostal
-Kosse
-Koslowski
-Koskie
-Kosicki
-Koshar
-Kosek
-Kortright
-Korpal
-Kornhauser
-Kormos
-Korinek
-Korgie
-Kordsmeier
-Kordish
-Koral
-Kops
-Kopps
-Kopperud
-Koppang
-Kopfer
-Kopet
-Kook
-Konno
-Konik
-Konek
-Konefal
-Komm
-Komis
-Komer
-Komarek
-Kolsrud
-Kolp
-Kolopajlo
-Kollmorgen
-Kolis
-Kolesnik
-Koles
-Kolding
-Kohs
-Kohlhoff
-Kohatsu
-Kohara
-Koetter
-Koestler
-Koepsel
-Koeppe
-Koenigsman
-Koelewyn
-Koe
-Kodadek
-Koci
-Kochler
-Kocab
-Kobylinski
-Kobryn
-Koberg
-Knower
-Knollenberg
-Knock
-Knizley
-Kniss
-Knies
-Knezovich
-Knesek
-Knepel
-Knehans
-Kneeskern
-Knaust
-Knapke
-Kmet
-Kluz
-Klukas
-Kloska
-Klopf
-Klinglesmith
-Klinekole
-Klimes
-Kliment
-Klimaszewski
-Klepfer
-Klepacki
-Klepac
-Klemash
-Kleinkopf
-Kleinknecht
-Kleimola
-Kleiboeker
-Klei
-Klehn
-Klegin
-Klavuhn
-Klauer
-Klasinski
-Klasing
-Klarr
-Klapec
-Klaass
-Klaameyer
-Kjelland
-Kiyuna
-Kitching
-Kistle
-Kissi
-Kishi
-Kirvin
-Kirtner
-Kirovac
-Kirnon
-Kirkby
-Kiritsy
-Kirchgesler
-Kippley
-Kipping
-Kinzig
-Kins
-Kinnare
-Kinna
-Kingcade
-Kinatyan
-Kimme
-Kimbrow
-Kimbril
-Kilzer
-Kiltz
-Killmer
-Killibrew
-Killeagle
-Kilger
-Kiles
-Kievit
-Kientzy
-Kielty
-Kiekbusch
-Kiehne
-Kiefert
-Khou
-Khiev
-Khat
-Khare
-Keywan
-Keyt
-Kevin
-Keville
-Kevern
-Keuler
-Ketola
-Ketelaar
-Kertis
-Kerson
-Kernen
-Kerkman
-Kerker
-Keogan
-Kenwood
-Kenne
-Kenaan
-Kempler
-Kempisty
-Kempfer
-Kempen
-Kemmerlin
-Kelter
-Kelman
-Kellie
-Keliihoomalu
-Keleman
-Kekiwi
-Keiswetter
-Keiss
-Keilty
-Keidong
-Kegel
-Keets
-Keeneth
-Keefner
-Kedzierski
-Kebort
-Keate
-Keat
-Kazmorck
-Kazi
-Kaz
-Kawachi
-Kaushiva
-Kauk
-Katzner
-Katzmark
-Katzen
-Katsuda
-Kats
-Kater
-Katen
-Kasting
-Kasserman
-Kassay
-Kassabian
-Kasprowicz
-Kasperek
-Kasowski
-Kasmir
-Kaska
-Kasik
-Kascak
-Karth
-Karsnak
-Karshner
-Karsh
-Karmel
-Karlstad
-Karley
-Karins
-Karimi
-Karcich
-Karch
-Karapetyan
-Karakas
-Kapsalis
-Kappeler
-Kapke
-Kaperonis
-Kapahu
-Kanthak
-Kansky
-Kansas
-Kanoy
-Kanno
-Kannady
-Kandarian
-Kanai
-Kanae
-Kanaan
-Kamphoefner
-Kammler
-Kaminetzky
-Kaminaka
-Kamienski
-Kamaunu
-Kamakea
-Kama
-Kaltefleiter
-Kaloustian
-Kaloi
-Kallmeyer
-Kalisch
-Kalinski
-Kaliher
-Kalgren
-Kalfas
-Kales
-Kalafatis
-Kagle
-Kadish
-Kachermeyer
-Kabina
-Kaawa
-Kaaua
-Kaatz
-Juvera
-Jutte
-Justen
-Jusko
-Juriga
-Jure
-Jungquist
-Jungbluth
-Juneja
-Juncaj
-Juliet
-Juhas
-Juenger
-Juell
-Jucean
-Jubinville
-Jovich
-Jorres
-Joris
-Jore
-Jonhson
-Joneson
-Jonassen
-Jolissaint
-Jointer
-Johnny
-Johengen
-Johar
-Joh
-Joern
-Jodway
-Jobs
-Joanette
-Jirik
-Jirasek
-Jipson
-Jinkerson
-Jinkens
-Jiminian
-Jimeno
-Jiau
-Jevnikar
-Jessel
-Jerauld
-Jephson
-Jentzen
-Jenkerson
-Jenista
-Jenifer
-Jemmett
-Jelovich
-Jehlicka
-Jeffris
-Jedziniak
-Jeantet
-Jeanclaude
-Jayme
-Javor
-Javaux
-Jaurigue
-Jaureguy
-Jarvinen
-Jarocki
-Japp
-Janszen
-Jansons
-Jans
-Jankauskas
-Janka
-Janhunen
-Janeczek
-Jandrin
-Janczewski
-Janack
-Jamir
-Jakuboski
-Jakubik
-Jakubek
-Jahnel
-Jageman
-Jaenicke
-Jacquem
-Jacquay
-Jaconski
-Jacobellis
-Jablon
-Iyo
-Ivancevic
-Iurato
-Iulianetti
-Itri
-Issler
-Isla
-Isip
-Ishmon
-Ishizu
-Isgrigg
-Iseri
-Iseli
-Iseley
-Isbrecht
-Isassi
-Isaiah
-Irsik
-Irias
-Inzana
-Intveld
-Intrieri
-Interdonato
-Instasi
-Inscho
-Ingwell
-Ingebretsen
-Inga
-Inda
-Incle
-Inabinett
-Imus
-Immordino
-Imbesi
-Imbach
-Illsley
-Illig
-Ill
-Ignowski
-Idler
-Idleburg
-Ideue
-Ibara
-Ianuzzi
-Ianniello
-Iacovone
-Hyter
-Hyles
-Hyle
-Hykes
-Hyams
-Huxley
-Hutch
-Hustead
-Huscher
-Hurtz
-Hurse
-Hurren
-Huret
-Huotari
-Huntress
-Hunting
-Hunstiger
-Hunking
-Humpries
-Humbles
-Hum
-Hulvey
-Hulcy
-Huizinga
-Huhman
-Huhammad
-Hufty
-Huesso
-Hueftle
-Huebschman
-Huebert
-Hue
-Hudmon
-Huberman
-Hubbartt
-Hubach
-Hsueh
-Hrycenko
-Hrabal
-Hoxit
-Howsare
-Howman
-Howitt
-Howerter
-Houlton
-Houis
-Hottman
-Hotovec
-Hostin
-Hoshall
-Hosfeld
-Hoschek
-Horwath
-Horsely
-Horsburgh
-Horovitz
-Hornstrom
-Hornbarger
-Horkley
-Horka
-Horey
-Horeth
-Hordyk
-Horack
-Hoppin
-Hoppel
-Hopfensperger
-Hooey
-Hooe
-Honhart
-Honga
-Honeck
-Homs
-Hommell
-Homles
-Homen
-Home
-Holzner
-Holzheimer
-Holzem
-Holsopple
-Holsman
-Holowell
-Holliway
-Holizna
-Holesovsky
-Holderbaum
-Holbach
-Holan
-Hoit
-Hoist
-Hohenbrink
-Hoger
-Hofmans
-Hofheimer
-Hoffhines
-Hofbauer
-Hoesing
-Hoeschen
-Hoerter
-Hoepfner
-Hoemann
-Hodgeman
-Hockersmith
-Hochadel
-Hobock
-Hobel
-Hluska
-Hlavac
-Hisrich
-Hirsbrunner
-Hirpara
-Hire
-Hinners
-Hindbaugh
-Himenez
-Hilles
-Hilleary
-Hillanbrand
-Hillan
-Hildner
-Hilding
-Hilderbrandt
-Hiland
-Hightree
-Highnote
-Highberger
-Higgason
-Higaneda
-Hidinger
-Hickock
-Heymann
-Heusinkveld
-Heusel
-Heuring
-Hettler
-Hesseltine
-Hesselink
-Hesford
-Herth
-Herskovits
-Herschell
-Heroman
-Hernton
-Herne
-Hernandaz
-Hermez
-Hermanstorfer
-Herling
-Herke
-Herimann
-Heriford
-Hergenrader
-Herforth
-Herdes
-Hercher
-Herceg
-Herbick
-Hentze
-Henniger
-Henney
-Henness
-Hennegan
-Henkes
-Heneisen
-Henderickson
-Henard
-Hemrick
-Hemric
-Hempton
-Hemp
-Hemme
-Hemeon
-Hembry
-Hembrough
-Hembrey
-Helstad
-Helmus
-Hellings
-Hellgren
-Helie
-Helgert
-Helgerman
-Helger
-Helgason
-Helfinstine
-Helfgott
-Helfenstein
-Heldreth
-Helander
-Heitzmann
-Heisserer
-Heising
-Heisel
-Heinold
-Heinis
-Heinemeyer
-Heimark
-Heiliger
-Heiderman
-Heidenescher
-Heidebrink
-Hehir
-Hegan
-Heersink
-Heep
-Hedquist
-Heckford
-Hebets
-Heberly
-Heberle
-Hebenstreit
-Heavilin
-Heartz
-Heaphy
-Heany
-Hazer
-Hazelgrove
-Haynsworth
-Haydock
-Hawelu
-Havnen
-Havely
-Hauss
-Hausam
-Haumesser
-Hauman
-Haulk
-Hauley
-Haubrick
-Haubner
-Hattman
-Hatman
-Hatherly
-Hatchcock
-Hastert
-Hassenplug
-Hasko
-Haser
-Haselhuhn
-Hasberry
-Has
-Harthorne
-Harthcock
-Harriett
-Harouff
-Harootunian
-Harkavy
-Harell
-Hardridge
-Hardacre
-Harborth
-Haraguchi
-Haptonstall
-Happenny
-Hantman
-Hanses
-Hannemann
-Hannay
-Hannafin
-Hanle
-Hangartner
-Handerson
-Hanberg
-Hamzik
-Hamstra
-Hammans
-Hamano
-Halsema
-Halonen
-Halim
-Halek
-Haleamau
-Halama
-Hakeem
-Hainley
-Hagley
-Hagist
-Hagie
-Haggberg
-Haggan
-Hagele
-Hafenstein
-Hafemeister
-Hady
-Hadges
-Hadef
-Hackey
-Hach
-Habbyshaw
-Haaga
-Haab
-Gysin
-Gwirtz
-Guzzio
-Guzzardo
-Guzma
-Gutzmann
-Gutta
-Gutermuth
-Guterman
-Gutenberger
-Gurganious
-Gural
-Guppy
-Gunzalez
-Guntert
-Gums
-Gumb
-Gullotta
-Gullixson
-Gulling
-Gullace
-Guler
-Gulbransen
-Guitian
-Guinta
-Guinasso
-Guilboard
-Guichard
-Gugliotta
-Guglielmina
-Guggenheim
-Gugel
-Guetierrez
-Guethle
-Gueth
-Guerrido
-Gueits
-Gudenkauf
-Gucciardo
-Guarnera
-Guadagnolo
-Gsell
-Gschwend
-Grush
-Grupp
-Grundmann
-Grunau
-Grueninger
-Gruca
-Groupe
-Grotzinger
-Grotheer
-Grossmeyer
-Grossetete
-Grossack
-Gromer
-Groenke
-Groening
-Groehler
-Groebner
-Grochmal
-Groby
-Grobes
-Gritman
-Griswould
-Grisset
-Grime
-Griffo
-Griesinger
-Greuel
-Greth
-Gressman
-Gremel
-Greiwe
-Greis
-Greil
-Greife
-Greider
-Grefrath
-Greff
-Greenmyer
-Greany
-Grazioplene
-Gravlin
-Gravito
-Gravert
-Grav
-Grater
-Grap
-Granzin
-Grannum
-Granlund
-Grando
-Grammes
-Gramley
-Grambo
-Grala
-Grahl
-Gradwohl
-Gradillas
-Gradert
-Graciana
-Grabner
-Grabinski
-Grabinger
-Grabel
-Graaf
-Gouzy
-Gouger
-Gottron
-Gottardo
-Gothro
-Gosso
-Gossi
-Gorringe
-Gorneault
-Gorn
-Gormly
-Gorenflo
-Goral
-Gopen
-Goosey
-Goodnoe
-Goodie
-Goodhile
-Goodfield
-Goodard
-Gonneville
-Gongalez
-Gondola
-Gompf
-Gommer
-Gollehon
-Golie
-Golebiewski
-Goldinger
-Goldhaber
-Goldfeder
-Goldbaum
-Golaszewski
-Gojcaj
-Gogerty
-Goettsche
-Goethe
-Goessl
-Godson
-Godbe
-Gochanour
-Gocha
-Gnau
-Gnatek
-Glud
-Glorius
-Glordano
-Gloodt
-Glod
-Glinka
-Glime
-Gleim
-Gleicher
-Glazewski
-Glay
-Glasford
-Glascott
-Glanzman
-Glahn
-Gladish
-Gjerde
-Gizinski
-Gitzen
-Girsh
-Girote
-Girman
-Giovino
-Giovanini
-Giorgini
-Ginty
-Ginsky
-Ginnings
-Gingues
-Gingg
-Ginger
-Giner
-Gimm
-Gilruth
-Gillund
-Gillenwaters
-Gilday
-Gilcrest
-Gilcher
-Gilani
-Gigstad
-Giernoth
-Gienger
-Gidaro
-Giczewski
-Gibas
-Giarratano
-Giantonio
-Giannitti
-Giannetti
-Giampapa
-Giacopelli
-Giacone
-Giacomelli
-Gherman
-Ghera
-Ghan
-Gevorkyan
-Gettig
-Getchman
-Gesinski
-Gerundo
-Gershenson
-Gerraro
-Gernert
-Germundson
-Gerloff
-Gergel
-Gerdeman
-Gerdel
-Geraldo
-Geraldes
-Georgopoulos
-Georgis
-Georgevic
-Georgeson
-Genzel
-Genung
-Gentzler
-Gentili
-Genich
-Gelzinis
-Geiken
-Geidner
-Geidl
-Gehrer
-Geho
-Gehlbach
-Geeding
-Gedye
-Geberth
-Geathers
-Gearan
-Gealy
-Gazzola
-Gazella
-Gawrych
-Gavidia
-Gautam
-Gaumont
-Gaudenzi
-Gaucher
-Gaubert
-Gattas
-Gatley
-Gaters
-Gatchalian
-Gassel
-Gasman
-Gaslin
-Garufi
-Garriepy
-Garrell
-Garrand
-Garnto
-Garns
-Garno
-Garlinger
-Garivay
-Garhart
-Gardino
-Garcea
-Garbin
-Garaventa
-Garavaglia
-Garahan
-Garafano
-Garacia
-Gapen
-Ganiron
-Ganino
-Ganim
-Gangwish
-Gange
-Ganes
-Gandia
-Gandeza
-Gamlin
-Gamelin
-Galway
-Galow
-Gallob
-Gallishaw
-Gallinaro
-Gallicchio
-Gallese
-Gallero
-Gallegas
-Galeoto
-Galeas
-Galbreth
-Galbavy
-Galavis
-Galam
-Gajate
-Gair
-Gagney
-Gagel
-Gagarin
-Gaete
-Gaetani
-Gadbaw
-Gack
-Gabrysch
-Gabardi
-Fyksen
-Futrelle
-Furl
-Furches
-Furbeck
-Funnye
-Funicello
-Fumagalli
-Fullford
-Fulginiti
-Fulenwider
-Fulena
-Fugler
-Fuerstenberge
-Fuentas
-Fucillo
-Fuapau
-Fryberger
-Frusciante
-Fruehling
-Fromberg
-Froeschle
-Frock
-Fritzgerald
-Fritcher
-Frisbey
-Frihart
-Frieling
-Friedler
-Frie
-Fridell
-Freuden
-Freud
-Frett
-Frend
-Freiling
-Freije
-Freie
-Freidman
-Freibert
-Fregozo
-Freehling
-Fredo
-Fredlund
-Fredley
-Frede
-Freberg
-Frayre
-Fraunfelter
-Frascella
-Franssen
-Frankowski
-Francour
-Francom
-Francillon
-Francey
-Fraioli
-Fracassa
-Fostervold
-Fossey
-Foshay
-Foscue
-Forsell
-Forrister
-Forren
-Fornicola
-Fornes
-Forgie
-Forbs
-Foppe
-Foore
-Fontecchio
-Fongeallaz
-Follick
-Folio
-Foder
-Flyzik
-Fluhman
-Fluet
-Flow
-Floto
-Floros
-Floriano
-Floren
-Floran
-Floerke
-Flitcroft
-Flipp
-Flintroy
-Fleschner
-Flenner
-Fleeting
-Flamio
-Flaggs
-Flagge
-Fjeseth
-Fithen
-Fissell
-Fischman
-Fire
-Fioranelli
-Finseth
-Finocchiaro
-Finerty
-Fineman
-Finchman
-Filyaw
-Filipovich
-Filas
-Figler
-Figge
-Fiers
-Fiereck
-Fidell
-Ficorilli
-Fico
-Ficks
-Fickle
-Fialkowski
-Feyen
-Fetz
-Fetsko
-Ferullo
-Fertitta
-Ferriman
-Ferrebee
-Ferrand
-Ferrales
-Fernelius
-Fernberg
-Ferioli
-Fergoson
-Ferenc
-Fereira
-Fequiere
-Fennema
-Fenelus
-Fenelon
-Feneis
-Femrite
-Feltenberger
-Felsenthal
-Fels
-Felmet
-Felgenhauer
-Felarca
-Feiteira
-Feirer
-Feinen
-Feigenbaum
-Fehlinger
-Federle
-Fecko
-Feavel
-Featheringham
-Fayer
-Faxon
-Faurrieta
-Faull
-Fatone
-Fatigate
-Fasy
-Fasula
-Fassio
-Fass
-Farwick
-Farrill
-Farquer
-Farmwald
-Fantozzi
-Fanoele
-Fannell
-Fanizza
-Fandrich
-Fallo
-Fallago
-Faist
-Faines
-Faine
-Fahrendorff
-Faggard
-Faessler
-Fadale
-Fabrizi
-Eychaner
-Exon
-Exilus
-Ewig
-Evitts
-Evinger
-Everheart
-Everhardt
-Eveleth
-Eveleigh
-Eurbin
-Esworthy
-Estus
-Estock
-Esterbrook
-Essler
-Esque
-Espina
-Espalin
-Eschenburg
-Eschberger
-Esbenshade
-Ertley
-Erstad
-Erp
-Eroman
-Erno
-Ermatinger
-Erkkila
-Erkela
-Eriquez
-Erin
-Ericks
-Erdahl
-Ercolani
-Equils
-Eppinette
-Eon
-Enter
-Enke
-Engley
-Englebrecht
-Engleberg
-Englar
-Engelstad
-Engelsman
-Engellant
-Ence
-Emslie
-Empie
-Emoto
-Emons
-Emley
-Emile
-Embly
-Embler
-Emanuelson
-Emal
-Elzinga
-Elwer
-Elvis
-Elvington
-Elshere
-Elmquist
-Ellout
-Ellifritz
-Ellerd
-Ellerbusch
-Elizando
-Elizabeth
-Elick
-Eliasen
-Elgert
-Elger
-Elena
-Elbers
-Ekstein
-Ekmark
-Eiser
-Einck
-Eimers
-Eilert
-Eidinger
-Eicke
-Ehsan
-Ehn
-Egleton
-Egel
-Effner
-Ednilao
-Edner
-Edmons
-Edmister
-Edmison
-Edlow
-Edholm
-Edgeman
-Edgcomb
-Edell
-Edelblute
-Eclarinal
-Eckroad
-Echave
-Ebesu
-Eberwein
-Ebeid
-Ebe
-Ebbing
-Eastlund
-Eary
-Earps
-Dzuro
-Dziuban
-Dysinger
-Dyner
-Dymek
-Dyll
-Dyl
-Dydell
-Dwelle
-Dwan
-Duvernois
-Dutson
-Dutro
-Dutchover
-Dusky
-Duskey
-Dusik
-Dushkin
-Dushane
-Durrani
-Duroseau
-Durnford
-Durk
-Durepo
-Duranceau
-Duprat
-Duplechin
-Duperry
-Dunscomb
-Dunkleberger
-Dung
-Dunegan
-Dundlow
-Dumpson
-Dumphy
-Dumpert
-Dumesnil
-Dullum
-Duldulao
-Dular
-Dukart
-Duhan
-Dugdale
-Dugat
-Duffney
-Duesing
-Duenow
-Duce
-Dubson
-Drzewicki
-Druetta
-Drube
-Drozdenko
-Drop
-Drohan
-Drivers
-Drinski
-Driever
-Drewer
-Dressen
-Drehmer
-Drawe
-Drapkin
-Draney
-Drahota
-Dowers
-Dowdall
-Dovenbarger
-Dousay
-Douin
-Doughan
-Doucett
-Douce
-Dorshimer
-Dorsaint
-Dorries
-Dorosky
-Dorl
-Dorich
-Dorenfeld
-Dorcelus
-Dool
-Donoso
-Donnick
-Donnely
-Donart
-Donalds
-Donaghey
-Donaghe
-Dominges
-Domebo
-Dollings
-Dolejsi
-Doggette
-Doell
-Dockwiller
-Dockal
-Dobosh
-Dobis
-Dobiesz
-Dluhy
-Dixons
-Divin
-Diventura
-Divenere
-Divelbiss
-Dittrick
-Ditommaso
-Dirosa
-Dircks
-Diogo
-Diodonet
-Dinning
-Dininno
-Dimodica
-Dimitroff
-Diminno
-Dimassimo
-Dillie
-Dilan
-Digsby
-Digrande
-Digmann
-Digirolomo
-Digian
-Digiacinto
-Dietzen
-Dietlin
-Dietert
-Diersen
-Dienst
-Dieffenbach
-Dicorcia
-Dickhaut
-Diberardino
-Diab
-Dhein
-Dhar
-Dhamer
-Dezan
-Dez
-Dewispelaere
-Dewhirst
-Devonish
-Devincenzo
-Devillez
-Devany
-Devalcourt
-Deubler
-Dettori
-Detone
-Detommaso
-Detoma
-Desue
-Destree
-Destephen
-Desso
-Desselle
-Desimoni
-Desadier
-Derham
-Derfler
-Dercole
-Derasmo
-Depugh
-Deporter
-Depolito
-Depa
-Deninno
-Deni
-Denenberg
-Denaro
-Denardis
-Demry
-Demro
-Demmel
-Demme
-Demiel
-Demeritte
-Demarzio
-Demaline
-Demaine
-Deluco
-Delton
-Delsordo
-Delosa
-Delongis
-Delois
-Deloff
-Delmuro
-Delmoro
-Delmonaco
-Delmage
-Dellen
-Dellaripa
-Dellamore
-Delhierro
-Delfuente
-Deleppo
-Delemos
-Delea
-Delcarmen
-Delaura
-Delanuez
-Delang
-Delamarter
-Delamare
-Delage
-Delacuesta
-Dekorte
-Dekenipp
-Dekany
-Deinhardt
-Deily
-Deierlein
-Degravelle
-Deglow
-Degler
-Degiulio
-Defoore
-Defonce
-Deflorio
-Defiore
-Defilippi
-Deed
-Dedeke
-Dedecker
-Dedaj
-Decost
-Decillis
-Dechellis
-Dechaine
-Decarr
-Decaprio
-Debutiaco
-Debski
-Debry
-Debruhl
-Debouse
-Deblase
-Debey
-Debenedetti
-Debacker
-Deang
-Deandrade
-Deadmond
-Deacy
-Daykin
-Dayhuff
-Dayal
-Davion
-Davidsen
-Dautremont
-Daughrity
-Daubs
-Datwyler
-Datko
-Dasmann
-Daruszka
-Darugar
-Darroch
-Daro
-Darkis
-Daricek
-Daras
-Dar
-Dapoz
-Dapinto
-Danuser
-Danoff
-Dankmeyer
-Danesi
-Danesh
-Daneker
-Dammen
-Damien
-Damberger
-Dalmoro
-Dallmier
-Daller
-Dalka
-Daliva
-Dahline
-Dahlhauser
-Daguerre
-Dagrella
-Dagraca
-Dagesse
-Dage
-Daehn
-Dado
-Dabbraccio
-Dabato
-Czolba
-Czepiel
-Czelusniak
-Czechowski
-Czarny
-Czar
-Czapski
-Cywinski
-Cyran
-Cypret
-Cwiek
-Cuzzort
-Cuzzi
-Cutty
-Cutrone
-Cuthrell
-Cuthill
-Cutbirth
-Custeau
-Cushingberry
-Curvey
-Curson
-Currell
-Curly
-Curll
-Curdy
-Curcuru
-Cupstid
-Cuoco
-Culverson
-Culnane
-Culliver
-Cullivan
-Culleton
-Cuddeback
-Cuckler
-Cubillo
-Cubias
-Cua
-Cryar
-Crutsinger
-Crusan
-Crupe
-Crummie
-Cruice
-Cruea
-Crowthers
-Crowers
-Crowdis
-Crovo
-Croson
-Crosno
-Crosdale
-Cronwell
-Cronon
-Crocetti
-Crnich
-Cristal
-Crisson
-Crismond
-Crighton
-Cridland
-Crickard
-Creten
-Cretella
-Crespino
-Cremins
-Cremers
-Creehan
-Creecy
-Credell
-Cranney
-Cranker
-Craker
-Craffey
-Cozzy
-Coyazo
-Coxum
-Cowdin
-Covino
-Coven
-Courtenay
-Course
-Courier
-Courchene
-Coup
-Couley
-Couchenour
-Cotugno
-Cottongim
-Cotti
-Cotillo
-Costine
-Costain
-Cosmo
-Coslan
-Cose
-Coryea
-Cortwright
-Corsoro
-Corrente
-Correl
-Cornford
-Corneluis
-Cornelious
-Corneau
-Corne
-Corkins
-Corippo
-Corgiat
-Coreil
-Cordwell
-Cordovano
-Cordill
-Cordano
-Corazza
-Coran
-Coppess
-Coonrad
-Coonfare
-Coomber
-Cooksley
-Cookis
-Coodey
-Contrino
-Contee
-Consorti
-Console
-Conorich
-Conole
-Connoly
-Connley
-Connington
-Connie
-Conness
-Conly
-Conkright
-Coner
-Conchas
-Comrie
-Compston
-Compagno
-Comnick
-Commiskey
-Commer
-Comiso
-Comish
-Comden
-Colondres
-Collica
-Colleen
-Colle
-Collaer
-Colinger
-Colford
-Colao
-Colanero
-Cohens
-Cofresi
-Coerver
-Cockriel
-Cockran
-Cockerell
-Cobham
-Cobert
-Cobern
-Cobell
-Clunie
-Clubs
-Clubbs
-Cloutman
-Clise
-Clippinger
-Clerkley
-Cler
-Clemmens
-Clemen
-Cleare
-Cleamons
-Claycamp
-Clawges
-Claverie
-Clarkston
-Clarity
-Clantz
-Clakley
-Clain
-Cizek
-Ciuffreda
-Citrone
-Ciraco
-Cinotto
-Cini
-Cinadr
-Cilento
-Cilano
-Cihon
-Ciganek
-Cieslinski
-Cicoria
-Cicco
-Cibula
-Ciarrocchi
-Ciak
-Ciafardoni
-Chubbs
-Chrzan
-Christophel
-Christoph
-Christoforou
-Christel
-Christan
-Chreene
-Chrabaszcz
-Chrabasz
-Chowhan
-Choules
-Chorney
-Chorley
-Cholico
-Cholewinski
-Cholakyan
-Chojnowski
-Chlebek
-Chittam
-Chiszar
-Chisam
-Chirafisi
-Chiprean
-Chinetti
-Chimes
-Chiera
-Chicon
-Chiarelli
-Chiaravalle
-Chiappetta
-Chesner
-Cheser
-Chesbrough
-Cherubino
-Cherrette
-Cherpak
-Chelf
-Cheesebrough
-Cheeney
-Cheely
-Chean
-Cheak
-Chavana
-Chauvette
-Chatt
-Chasser
-Chaskey
-Charriez
-Chappie
-Chappelear
-Chapparo
-Chapek
-Chanoine
-Chandley
-Challenger
-Challberg
-Challacombe
-Chaleun
-Chainey
-Chaffey
-Cetta
-Cerza
-Cervenak
-Certosimo
-Cerruti
-Cerqueira
-Cernohous
-Cereceres
-Ceovantes
-Ceo
-Centrich
-Centore
-Cellucci
-Ceglinski
-Ceconi
-Cecilio
-Cecchinato
-Cecchi
-Cazorla
-Cayne
-Cayabyab
-Cavill
-Cavicchia
-Cavez
-Cavener
-Cavasos
-Cavaness
-Cavalcante
-Caulk
-Caudel
-Cattano
-Catrett
-Catlow
-Catella
-Cataquet
-Catalino
-Cataline
-Catalanotto
-Catalanatto
-Cata
-Castenanos
-Castelo
-Cassiday
-Casparian
-Casillo
-Casewell
-Casarrubias
-Casalman
-Casal
-Carvalno
-Carskadon
-Carrus
-Carrison
-Carriker
-Carrazco
-Carratala
-Carpanini
-Carovski
-Caroli
-Carne
-Carmella
-Carlis
-Carfagno
-Carethers
-Carella
-Cardonia
-Cardno
-Carda
-Carcieri
-Carcano
-Carcana
-Carboneau
-Carbon
-Caravantes
-Carattini
-Caramanica
-Capriola
-Cappelluti
-Capossela
-Caponi
-Caperon
-Caper
-Capati
-Cantv
-Cantore
-Cantell
-Cantatore
-Cantarella
-Cantadore
-Canslor
-Canonico
-Cannonier
-Cannone
-Cannavo
-Cannatella
-Cangiano
-Campoli
-Campellone
-Campean
-Campanile
-Camera
-Camcam
-Cambel
-Calta
-Callsen
-Callarman
-Calicott
-Calhaun
-Calegari
-Calco
-Calciano
-Calabretta
-Cake
-Cairone
-Cahela
-Cagliostro
-Caflisch
-Cafferky
-Caetano
-Cadice
-Caddle
-Cadarette
-Cackowski
-Caccia
-Cabrena
-Cabotaje
-Caborn
-Caberto
-Bystrom
-Byndon
-Buzek
-Buysse
-Bux
-Buttrick
-Buttaro
-Butscher
-Butsch
-Butor
-Butman
-Buteux
-Butchee
-But
-Bustard
-Busta
-Bussy
-Busson
-Bussing
-Bussa
-Busi
-Buseman
-Buschner
-Buscaglia
-Burttram
-Burth
-Bursch
-Burnsworth
-Burland
-Burkowski
-Burglin
-Burgdorfer
-Burdman
-Burau
-Buran
-Burakowski
-Buquet
-Buonomo
-Buntyn
-Bungo
-Bunche
-Bunal
-Bult
-Bulliner
-Bullaro
-Bulkeley
-Bulcao
-Bula
-Buisson
-Buissereth
-Bugni
-Buetow
-Buesgens
-Budziszewski
-Budinich
-Buddington
-Buchtel
-Buchli
-Buchert
-Buchar
-Buben
-Brzuchalski
-Brummell
-Brull
-Brudnicki
-Brucz
-Bruchman
-Brubach
-Brownwood
-Browen
-Browe
-Brossett
-Brosco
-Brookshear
-Brookfield
-Bronstad
-Bronsky
-Bronaugh
-Bron
-Brohawn
-Brogna
-Brodzik
-Brodsho
-Brodowski
-Brodnicki
-Brodell
-Brod
-Brockney
-Broas
-Broadrick
-Briz
-Britschgi
-Brint
-Brinich
-Bringard
-Brindamour
-Brincat
-Brimfield
-Brillant
-Brilhante
-Brihon
-Brignoni
-Brightful
-Briggman
-Bried
-Brickle
-Brickel
-Brezeale
-Brewen
-Breutzman
-Bretado
-Brester
-Bresko
-Brennon
-Brennaman
-Breniser
-Brendon
-Brems
-Breisch
-Breidenstein
-Brechtel
-Brea
-Brazington
-Brazen
-Brayer
-Brawer
-Bravata
-Braune
-Braunbeck
-Braue
-Braucht
-Braseth
-Brantly
-Branter
-Branski
-Brandler
-Bramham
-Brahney
-Bradac
-Brackley
-Brackey
-Brackemyre
-Brach
-Boyarsky
-Bowlan
-Bowhall
-Bowdre
-Bovie
-Bouyea
-Boustead
-Bourgeault
-Bounthapanya
-Boultinghouse
-Bouillon
-Boudrie
-Boudinot
-Bottgenbach
-Bottari
-Botos
-Bothof
-Botha
-Bosten
-Bostelmann
-Bossley
-Bossick
-Bossen
-Bosquet
-Boscio
-Bosche
-Bosa
-Borski
-Borsh
-Borowik
-Borom
-Borke
-Borgerding
-Borgatti
-Bordwine
-Booser
-Bookbinder
-Bookard
-Boock
-Bonte
-Bonomi
-Bonning
-Bonito
-Bonillas
-Bondura
-Bombich
-Boltinghouse
-Bollozos
-Bolliger
-Bollie
-Bolka
-Bolitho
-Boldenow
-Bolch
-Bolay
-Boissoneault
-Boisjolie
-Boisclair
-Boie
-Bohrman
-Bohley
-Boglioli
-Boghosian
-Boggus
-Boggiano
-Bogden
-Boey
-Boesenhofer
-Boerst
-Boerma
-Boenisch
-Boemig
-Boebinger
-Boday
-Bodamer
-Bocklage
-Bocchini
-Bobseine
-Bobian
-Boberg
-Bobek
-Blyler
-Blumenstein
-Bloyer
-Blotter
-Blore
-Blomme
-Blomdahl
-Bliske
-Blinston
-Bliek
-Blessman
-Bleggi
-Bleeker
-Bledsaw
-Blauch
-Blaskovich
-Blankley
-Blankenberg
-Blanken
-Blakelock
-Blaida
-Bjorgen
-Biven
-Bitzel
-Bittman
-Bitonti
-Bissen
-Bisom
-Bisher
-Birman
-Birky
-Birkes
-Bippus
-Bintz
-Bintner
-Bintliff
-Binnie
-Binks
-Binkiewicz
-Binienda
-Bingley
-Bilotto
-Billheimer
-Billen
-Billeck
-Billeaudeau
-Bilinski
-Bilello
-Bild
-Bihari
-Bigda
-Biez
-Bierwirth
-Bierle
-Bierbower
-Bienenstock
-Biemer
-Bieler
-Bielak
-Bidle
-Biddleman
-Biddiscombe
-Bicknese
-Bickerton
-Bickelhaupt
-Bichsel
-Bibles
-Bibian
-Biase
-Biancuzzo
-Biancaniello
-Biamonte
-Bia
-Bhatnagar
-Bhardwaj
-Bhan
-Beyett
-Bewig
-Beuchat
-Better
-Betsill
-Bethey
-Betenbaugh
-Betance
-Betacourt
-Beske
-Besendorfer
-Besemer
-Besco
-Bery
-Bertran
-Bertling
-Bertie
-Bernson
-Bernosky
-Bernon
-Berninger
-Bernes
-Bernecker
-Bernasconi
-Bernardin
-Berlo
-Berliew
-Berky
-Berhe
-Berhalter
-Bergsjo
-Bergholm
-Bergener
-Bergeman
-Beraun
-Benward
-Benusa
-Bense
-Bennage
-Benischek
-Benion
-Beninato
-Bengel
-Benedek
-Bene
-Bendzus
-Bendler
-Bendit
-Benderman
-Benberry
-Benallie
-Bemrich
-Belyea
-Beltrain
-Belter
-Bellue
-Bellocchio
-Bellisle
-Bellipanni
-Bellion
-Bellessa
-Bellavia
-Belay
-Bejjani
-Beisser
-Beiriger
-Beik
-Beien
-Behymer
-Behrenwald
-Behanna
-Beed
-Beechum
-Beechner
-Bednarik
-Bednarek
-Bedenbaugh
-Becwar
-Beckton
-Beckom
-Bech
-Bebo
-Beatie
-Beat
-Bearman
-Beaner
-Beakley
-Beahan
-Beachamp
-Bazzi
-Bayman
-Bayardo
-Bayala
-Bawcum
-Bavier
-Bauswell
-Baures
-Baune
-Baumgarter
-Bault
-Baughey
-Baugatz
-Bauernfeind
-Bauerlein
-Bau
-Batun
-Battistone
-Batteen
-Batko
-Batistich
-Bater
-Batcheller
-Batarse
-Bastow
-Bassuk
-Bassolino
-Bassel
-Bason
-Basilone
-Basich
-Bascle
-Bascetta
-Bartush
-Bartrum
-Bartlet
-Barthelmes
-Bartberger
-Bartash
-Barsoum
-Barsanti
-Barrott
-Barrom
-Barriner
-Barnhurst
-Barnell
-Barkle
-Barkes
-Barillaro
-Bargerstock
-Barganier
-Baremore
-Bardney
-Barda
-Barbot
-Barbie
-Barayuga
-Barager
-Bantz
-Bandulin
-Banasiak
-Balzarini
-Balwin
-Balton
-Balsiger
-Balmos
-Balmir
-Ballestero
-Ballek
-Balick
-Balian
-Balestra
-Balensiefen
-Balduf
-Balckburn
-Balasa
-Balafoutas
-Baksi
-Bakowski
-Baklund
-Bakko
-Bakey
-Bakanauskas
-Baj
-Baio
-Bainard
-Baima
-Baillet
-Baich
-Bahrmasel
-Bahrke
-Bahoora
-Bagsby
-Bagger
-Badena
-Badders
-Backfisch
-Bacik
-Bachler
-Bachleda
-Bachhuber
-Bachert
-Babiracki
-Baatz
-Azzarito
-Azzarella
-Azulay
-Azotea
-Azeem
-Ayoob
-Ayola
-Ayles
-Ayersman
-Ayaia
-Axthelm
-Ax
-Awtry
-Avrett
-Avilar
-Aveni
-Avellino
-Aurelia
-Aumend
-Auletta
-Augustson
-Augustave
-Aughe
-Auerswald
-Aubrecht
-Athalone
-Atanacio
-Atamian
-Astrologo
-Astrella
-Aspinall
-Asman
-Ashlin
-Ashenfelter
-Aschenbrener
-Ascheman
-Ascenzo
-Asante
-Asa
-Arvayo
-Artmann
-Artice
-Art
-Arslan
-Arrott
-Arrojo
-Arrizola
-Arriano
-Arrendell
-Arps
-Aronstein
-Aronow
-Aronica
-Arntz
-Arnst
-Arnio
-Arne
-Armengol
-Armantrout
-Arlt
-Arkadie
-Arjune
-Arismendez
-Arimas
-Aries
-Ariel
-Argandona
-Arflack
-Areola
-Arenales
-Ardman
-Arciga
-Arciba
-Archacki
-Arcaro
-Arcano
-Arbogust
-Arauz
-Aranas
-Aquil
-Aquero
-Apresa
-Appiah
-Appert
-Apostal
-Apodace
-Apadoca
-Antrobus
-Antoniuk
-Antione
-Antinarelli
-Antich
-Anslow
-Ansbro
-Annicchiarico
-Angleberger
-Angelson
-Angello
-Andruzzi
-Androsky
-Androlewicz
-Andrion
-Andringa
-Andracki
-Andra
-Ancelet
-Anastas
-Anast
-Anagnost
-Amsley
-Amsdell
-Amsberry
-Amsbaugh
-Amoruso
-Amoa
-Amici
-Amesbury
-Ambrosia
-Ambrogi
-Amack
-Alvia
-Alvaro
-Alvanas
-Altrogge
-Altomare
-Altmire
-Altenbach
-Alsheimer
-Alquisira
-Alouf
-Aloisi
-Aloe
-Almiron
-Allford
-Allex
-Allery
-Allenbach
-Allegrucci
-Alig
-Alicuben
-Alfisi
-Alferez
-Alfandre
-Alf
-Alexion
-Alevras
-Alessandrini
-Alesi
-Alescio
-Alegre
-Alea
-Aldecoa
-Alcini
-Albrittain
-Albrashi
-Alawdi
-Ala
-Aksamit
-Akima
-Akel
-Akahi
-Ajose
-Ajayi
-Aivao
-Aiu
-Ainge
-Ailshire
-Aidt
-Aicklen
-Ahuja
-Ahr
-Aholt
-Agle
-Agamao
-Affeld
-Aeschbacher
-Aeling
-Adriance
-Adkin
-Adhami
-Adeyemo
-Ades
-Adelgren
-Addicks
-Adamitis
-Ada
-Acor
-Acimovic
-Accomando
-Accola
-Acampora
-Abuaita
-Abshear
-Abrantes
-Abramovich
-Abrachinsky
-Abilay
-Abellera
-Abeles
-Abdula
-Abdon
-Abbed
-Abati
-Abascal
-Aavang
-Aadland
-Zylka
-Zwolak
-Zwingman
-Zwerschke
-Zwack
-Zurin
-Zupp
-Zumbrunnen
-Zukoski
-Zukor
-Zukas
-Zuanich
-Zoumis
-Zoulek
-Zou
-Zorra
-Zorich
-Zomorodi
-Zolty
-Zolondek
-Zolnoske
-Zoldesy
-Zoldak
-Zocklein
-Zlotnik
-Ziraldo
-Zipf
-Zinsli
-Ziniewicz
-Zindell
-Zin
-Zimmerebner
-Zimmel
-Zimm
-Zills
-Zilla
-Zilka
-Zietz
-Zietlow
-Ziemski
-Zielesch
-Zieler
-Zieglen
-Ziegenbein
-Ziegelbauer
-Ziegel
-Ziech
-Zicker
-Zicherman
-Zich
-Ziccardi
-Zgoda
-Zeschke
-Zerko
-Zerhusen
-Zepka
-Zents
-Zeni
-Zeme
-Zematis
-Zema
-Zella
-Zelkin
-Zelenski
-Zeilinger
-Zeidan
-Zegarelli
-Zeanah
-Zdon
-Zbikowski
-Zazula
-Zavesky
-Zavasky
-Zaruba
-Zarrineh
-Zarrillo
-Zarraluqui
-Zarling
-Zaring
-Zaretsky
-Zarebski
-Zanini
-Zanin
-Zangl
-Zaner
-Zand
-Zampieri
-Zaltz
-Zaloudek
-Zall
-Zalk
-Zalar
-Zakowski
-Zajc
-Zahran
-Zahnen
-Zagroba
-Zagel
-Zagara
-Zagami
-Zaffuto
-Zachmann
-Zachariades
-Zaccagnino
-Zaccagnini
-Zaborski
-Zabloudil
-Zabarkes
-Yvon
-Yusef
-Yuricic
-Yuill
-Yuenger
-Yuasa
-Ysbrand
-Yourshaw
-Younkers
-Youngdahl
-Youngblut
-Youkers
-Youkanaa
-Yorkey
-Yoneyama
-Yonamine
-Yoeckel
-Yodis
-Yocius
-Yocham
-Yobst
-Yeubanks
-Yetto
-Yerigan
-Yerbic
-Yentsch
-Yennard
-Yemchuk
-Yax
-Yaun
-Yasurek
-Yasui
-Yaskiewicz
-Yantzer
-Yantz
-Yanosky
-Yanek
-Yandle
-Yance
-Yanagi
-Yambao
-Yamakawa
-Yagoda
-Yaekel
-Yackeren
-Yacavone
-Yacano
-Ximines
-Xaimoungkhoun
-Wysock
-Wyont
-Wynott
-Wynans
-Wylde
-Wyett
-Wydner
-Wurzbacher
-Wulfing
-Wruck
-Wroe
-Wrobliski
-Wrobbel
-Wrights
-Wraspir
-Wrape
-Woytowicz
-Woy
-Worthan
-Worstel
-Worsfold
-Worrel
-Worbington
-Wools
-Woollen
-Woolems
-Woodmancy
-Woodhull
-Woodgate
-Woodfield
-Woodcox
-Woock
-Wonsik
-Wolven
-Wolslegel
-Wolny
-Wolma
-Wollyung
-Wollin
-Wolley
-Wollan
-Wolkow
-Wolke
-Wolever
-Woleslagle
-Wolansky
-Wojnicki
-Wohner
-Wohlfahrt
-Wohler
-Wloch
-Wittlin
-Wittkopp
-Wittenborn
-Wittels
-Withiam
-Withfield
-Wisz
-Wissel
-Wisseh
-Wislocki
-Wiscombe
-Wischmeyer
-Wischman
-Wirebaugh
-Winzelberg
-Winterstein
-Wintersmith
-Winterroth
-Winrich
-Winograd
-Winlock
-Winley
-Winkley
-Wings
-Winfred
-Winebaugh
-Windover
-Windly
-Winarski
-Wimbs
-Wimber
-Wiltgen
-Willmschen
-Williver
-Willinghurst
-Williamston
-Willenbrock
-Willars
-Willamson
-Wileman
-Wileczek
-Wildenberg
-Wildeman
-Wilcutt
-Wilch
-Wilby
-Wilbers
-Wikstrom
-Wigman
-Wigle
-Wigelsworth
-Wietzel
-Wiesneski
-Wienert
-Wienecke
-Wienandt
-Wieloch
-Wielgosz
-Wiedmann
-Wieckowski
-Wiece
-Wieand
-Widmar
-Widhalm
-Widgeon
-Widerski
-Widdows
-Widdop
-Widdison
-Widby
-Wida
-Whyne
-Whyel
-Whybrew
-Whittman
-Whittall
-Whitler
-Whitinger
-Whitewater
-Whitescarver
-Whitemarsh
-Whitecloud
-Whit
-Whistlehunt
-Whinnery
-Whillock
-While
-Whilby
-Wheldon
-Wheatcroft
-Whapham
-Whaite
-Wettlaufer
-Wetterer
-Wettach
-Wetsel
-Wethern
-Westrum
-Westlie
-Westgaard
-Westerhof
-Westerfeld
-Westad
-Wesly
-Wesberry
-Werring
-Werre
-Wernz
-Wermter
-Werkmeister
-Werbelow
-Wentzlaff
-Weniger
-Wengreen
-Wendolski
-Wendelberger
-Wempa
-Weltzin
-Welti
-Weltch
-Wellnitz
-Wellenstein
-Wekenmann
-Weitze
-Weitman
-Weisholz
-Weishar
-Weisbaum
-Weinraub
-Weinbauer
-Weinbach
-Weidig
-Weiderhold
-Wehrwein
-Wehrs
-Wehrly
-Wehnes
-Wehn
-Wegge
-Weerts
-Weemhoff
-Weekey
-Wedman
-Weder
-Weckman
-Weckhorst
-Weaklend
-Wauters
-Wauer
-Waud
-Wattenberg
-Watte
-Watling
-Waszkiewicz
-Wasmus
-Wasilko
-Washor
-Wartchow
-Warshauer
-Warsham
-Warrender
-Warnstaff
-Warmuth
-Warmington
-Wardrup
-Wardhaugh
-Wardall
-Warchal
-Warboys
-Wanty
-Wanous
-Wanlass
-Wangstad
-Waneka
-Wandless
-Wandel
-Wanda
-Wamser
-Wamhoff
-Walvatne
-Waltemeyer
-Walsingham
-Walljasper
-Wallet
-Wallerich
-Walkling
-Walkers
-Walezak
-Waldroff
-Waldhoff
-Waldall
-Walbright
-Walat
-Wakita
-Waka
-Waisner
-Waiki
-Waiden
-Wagle
-Wagenblast
-Wadusky
-Wadden
-Waclawski
-Wackenhut
-Wackenheim
-Wachal
-Waananen
-Waack
-Vy
-Vukcevic
-Vreugdenhil
-Vreeman
-Vrazel
-Vranes
-Vranek
-Voytek
-Voves
-Vormelker
-Vorachek
-Vontungeln
-Vonniederhaus
-Vonner
-Vonhagen
-Vondrak
-Vondielingen
-Vonasek
-Vonallmen
-Voltaire
-Vollucci
-Vollick
-Vollenweider
-Volante
-Voitier
-Vogts
-Vocu
-Voci
-Voccia
-Vliet
-Vliem
-Vizarro
-Vizard
-Vittorini
-Vitro
-Vitolas
-Vititoe
-Viteo
-Visnic
-Visher
-Visel
-Viscia
-Viscera
-Vis
-Virrueta
-Virola
-Viren
-Vinz
-Vinke
-Vinger
-Vind
-Vinagre
-Viltz
-Villwock
-Villifana
-Villiard
-Villetas
-Villasana
-Villarin
-Villante
-Villacana
-Vile
-Vilcheck
-Vilardi
-Vigueras
-Vigoren
-Vignovich
-Vignaux
-Vignarath
-Vigier
-Vieweg
-Vietti
-Vietor
-Viegas
-Viebrock
-Vidals
-Victorin
-Vicsik
-Vicic
-Vicens
-Viapiano
-Vetsch
-Vetri
-Vertiz
-Versluis
-Verrilli
-Verrelli
-Verrecchia
-Verni
-Vernetti
-Vermeer
-Verling
-Verlato
-Verkler
-Verkamp
-Verghese
-Verducci
-Verant
-Venzeio
-Venturella
-Ventress
-Venton
-Venhorst
-Venerable
-Veneman
-Ven
-Velverton
-Velunza
-Velmontes
-Vellutini
-Vellekamp
-Veleta
-Veldkamp
-Velazques
-Veino
-Veigel
-Veeneman
-Vavro
-Vauters
-Vattes
-Vaszily
-Vastakis
-Vasiloff
-Vasilauskas
-Vasconcelos
-Vars
-Varos
-Varnon
-Varkey
-Vares
-Varenhorst
-Vardy
-Varcoe
-Vanwye
-Vanwoert
-Vanwieren
-Vanvickle
-Vantreese
-Vansyckle
-Vanstrander
-Vansteenburg
-Vanstee
-Vanslander
-Vanproosdy
-Vanpoucke
-Vanpoppelen
-Vanpatton
-Vanosdel
-Vannelli
-Vanmiddleswor
-Vanloh
-Vanlith
-Vankoten
-Vanisouvong
-Vanholland
-Vanhekken
-Vanharlingen
-Vanhandel
-Vangemert
-Vaneyck
-Vanert
-Vaneps
-Vanegdom
-Vandesteene
-Vanderschaege
-Vanderkam
-Vanderheiden
-Vandergriend
-Vanderark
-Vandeputte
-Vandenbergh
-Vandegraaff
-Vandebogart
-Vandamme
-Vandalsen
-Vandagriff
-Vanclief
-Vanboven
-Vanbecelaere
-Vanartsdalen
-Vanaller
-Vanakin
-Vanabel
-Valrie
-Valrey
-Valotta
-Vallangeon
-Valladolid
-Valaitis
-Vala
-Vair
-Vaidya
-Vaid
-Vagt
-Vagle
-Uyeno
-Uson
-Us
-Urwin
-Urtado
-Ursino
-Urry
-Urquiza
-Urps
-Urmeneta
-Urlaub
-Uribazo
-Urhahn
-Ure
-Urch
-Urbanic
-Urata
-Urankar
-Ur
-Uppinghouse
-Unthank
-Unland
-Unikel
-Ungvarsky
-Ungerleider
-Ungerecht
-Underkoffler
-Umlauf
-Umbdenstock
-Ulrick
-Uliano
-Uldrich
-Ulch
-Ulberg
-Uknown
-Ukena
-Uk
-Uhri
-Uhde
-Udley
-Uboldi
-Tzeremes
-Tysor
-Tyrus
-Tyrol
-Tyl
-Tyksinski
-Tycer
-Tyberg
-Twitt
-Tweden
-Tuy
-Tuton
-Tuter
-Tustison
-Tuschhoff
-Turso
-Turrigiano
-Turowski
-Turnbo
-Turnball
-Turlich
-Turli
-Turla
-Turkin
-Turke
-Turi
-Tuong
-Tulk
-Tulip
-Tugman
-Tuggles
-Tufano
-Tucknott
-Tuccillo
-Tubeszewski
-Tuason
-Tsuzuki
-Tsunoda
-Tschannen
-Trytten
-Trybala
-Truskowski
-Trueba
-Trueax
-Truden
-Trucchi
-Trotti
-Trongone
-Tromble
-Tromblay
-Trokey
-Troiani
-Troglin
-Trodden
-Troccoli
-Tritz
-Tritch
-Trischitta
-Trisch
-Trippet
-Triplette
-Trinca
-Trimmell
-Trilling
-Trieger
-Treworgy
-Trevorrow
-Trevillion
-Trevigne
-Trevett
-Tretter
-Treston
-Trepagnier
-Trentinella
-Trenkle
-Trenh
-Trenbeath
-Tremelling
-Treider
-Treib
-Treftz
-Tredennick
-Trecroci
-Trebil
-Traves
-Traversa
-Tratar
-Traster
-Trasport
-Trank
-Trampe
-Trammer
-Trame
-Trachte
-Toyoshima
-Towley
-Tovias
-Touvell
-Tout
-Toussant
-Tourikis
-Toten
-Tosten
-Tosic
-Tosches
-Tortoriello
-Tortorice
-Torstrick
-Torset
-Torrijos
-Torrie
-Torress
-Torred
-Torra
-Torma
-Torkildsen
-Toppi
-Toporek
-Topolosky
-Topick
-Topez
-Toper
-Toncrey
-Tompsett
-Tompkin
-Tomory
-Tommolino
-Tomjack
-Tombs
-Tombrello
-Tomaszycki
-Tomaski
-Tolzmann
-Tolston
-Tolosky
-Toldness
-Tokuoka
-Tokihiro
-Tokay
-Tok
-Tojo
-Tointon
-Tohill
-Togni
-Tognazzini
-Todeschi
-Tobola
-Tobeck
-Toala
-Toadvine
-Tllo
-Tkacz
-Titchener
-Titch
-Tissot
-Tiso
-Tirri
-Tipka
-Tintle
-Tinneberg
-Tinius
-Tinelli
-Tin
-Timmreck
-Timmerberg
-Timinsky
-Timi
-Timchak
-Tillberry
-Tilgner
-Tiff
-Tieszen
-Tiemeyer
-Tiemens
-Tiell
-Tiehen
-Tidey
-Tick
-Ticas
-Tiboni
-Tiberio
-Tibbert
-Thyne
-Thurton
-Thurau
-Thune
-Thrune
-Threets
-Thorngren
-Thornbrugh
-Thorin
-Thongdy
-Thommarson
-Thoene
-Thoben
-Thoams
-Thixton
-Thistlethwait
-Thingvold
-Thiesfeld
-Thierauf
-Thielbar
-Thiebeault
-Thiara
-Thews
-Theophilus
-Theodoratos
-Thenhaus
-Theam
-Thay
-Thalmann
-Thake
-Thady
-Tevlin
-Tevebaugh
-Testen
-Tesseneer
-Tervort
-Terri
-Terrey
-Terres
-Terrasas
-Terney
-Termeer
-Terlecki
-Terheggen
-Terhark
-Terhar
-Terepka
-Terault
-Terando
-Teppo
-Tepler
-Teper
-Tent
-Tenpas
-Tennill
-Tennett
-Tenley
-Templer
-Tempe
-Temp
-Teltschik
-Telschow
-Telle
-Tekippe
-Teitsort
-Teitenberg
-Tei
-Tegarden
-Teffeteller
-Tefera
-Teesdale
-Teemer
-Teekasingh
-Teddick
-Tebay
-Tebar
-Teats
-Teano
-Teagues
-Teachman
-Teabo
-Tchakian
-Tazzara
-Tayor
-Tavorn
-Tavira
-Taverna
-Tave
-Tautuiaki
-Tatters
-Tatevosian
-Tassey
-Taschereau
-Tarzia
-Tarring
-Tarrien
-Tarras
-Tarkenton
-Tariq
-Tardio
-Tarascio
-Tara
-Tappeiner
-Tannen
-Tankersly
-Tanious
-Tangren
-Tangredi
-Tangert
-Tamulis
-Tamburrino
-Tambasco
-Tamargo
-Tamanaha
-Talluto
-Taki
-Takeshita
-Takemura
-Takaoka
-Tajiri
-Taintor
-Tahu
-Tags
-Taglieri
-Tafel
-Tadiello
-Tacket
-Taborda
-Tabolt
-Tabisola
-Tabian
-Taback
-Szymansky
-Szwejbka
-Szweda
-Szufat
-Szubinski
-Szerlong
-Szekula
-Szczygiel
-Szczepanek
-Szalay
-Szafryk
-Syrek
-Syphard
-Synan
-Symmonds
-Sydner
-Swirsky
-Swires
-Swietoniowski
-Swickheimer
-Swets
-Swetland
-Swenk
-Sweetin
-Swavely
-Swatt
-Swatsworth
-Swatski
-Swartzmiller
-Swartzbeck
-Swartzbaugh
-Swansen
-Swalley
-Swaisgood
-Swails
-Swaggert
-Svrcek
-Svinth
-Svetz
-Svetlik
-Sutulovich
-Suttell
-Susswein
-Sussex
-Susor
-Susoev
-Susich
-Susana
-Surwillo
-Suran
-Sunn
-Sunkel
-Sundling
-Sundholm
-Sumsion
-Sump
-Summar
-Sumlar
-Suminski
-Sumi
-Sumas
-Sulzman
-Sultana
-Sullinger
-Suleski
-Sulcer
-Sul
-Sukeforth
-Suing
-Suglia
-Sugiki
-Suggett
-Sueltenfuss
-Suders
-Sudar
-Suchecki
-Sucharzewski
-Suchanek
-Subler
-Suben
-Subasic
-Styborski
-Stvil
-Stumme
-Stulick
-Studyvin
-Stubson
-Stuble
-Stubits
-Stubenrauch
-Strysko
-Struggs
-Strudwick
-Strowd
-Stroub
-Stroth
-Stropko
-Stroinski
-Strnad
-Stritzke
-Stritzinger
-Strittmater
-Strieker
-Strickert
-Strength
-Stremlow
-Stremel
-Strejcek
-Streitmatter
-Streif
-Streb
-Streams
-Straws
-Strausberg
-Strathy
-Strathman
-Strater
-Straseskie
-Strapp
-Stranger
-Strande
-Stramiello
-Strakbein
-Strachn
-Stoyer
-Stoyanoff
-Stowman
-Stowbridge
-Stove
-Stoutt
-Stoutenburg
-Stouer
-Stouder
-Store
-Stoppkotte
-Stopa
-Stolts
-Stolinski
-Stolecki
-Stole
-Stojanovic
-Stofsky
-Stoffregen
-Stoffels
-Stoffa
-Stoesz
-Stodolski
-Stockett
-Stittsworth
-Stipek
-Stinett
-Stillion
-Stillinger
-Stiel
-Stiehl
-Stiegler
-Stieg
-Stickrod
-Sticht
-Stibbins
-Stevener
-Steudeman
-Stetzel
-Sterr
-Sternal
-Sterback
-Stephco
-Stenman
-Stemmerman
-Stemme
-Stemarie
-Stelting
-Stellings
-Steir
-Steinlicht
-Steiniger
-Steinbrenner
-Steidinger
-Stehney
-Stehly
-Stefka
-Steffel
-Stefanovich
-Steeno
-Steeneck
-Steenburgh
-Steckline
-Steckelberg
-Stazenski
-Stavis
-Staum
-Stauffacher
-Stauder
-Staude
-Statzer
-Stasinos
-Starwalt
-Starrs
-Starnauld
-Starek
-Stapleford
-Stapf
-Stapels
-Stansifer
-Stanojevic
-Stanick
-Standring
-Standrew
-Standke
-Standford
-Stancle
-Stanciel
-Stamnos
-Stamison
-Stallons
-Stallion
-Stallbaumer
-Stailey
-Staie
-Staiano
-Stahnke
-Stahle
-Stageman
-Stacken
-Stachecki
-Stableford
-Stabb
-Sramek
-Squines
-Spurzem
-Sprock
-Springate
-Spreng
-Spratte
-Sprang
-Sprake
-Spotwood
-Splain
-Spiwak
-Spitznogle
-Spirito
-Spirek
-Spingola
-Spincic
-Spillett
-Spika
-Spigelman
-Spielmann
-Spetter
-Sperl
-Spenard
-Speilman
-Speigel
-Speice
-Speach
-Spaugh
-Spatafore
-Spatafora
-Spar
-Spanski
-Spannaus
-Spanish
-Spanfellner
-Spalinger
-Spagnolia
-Spadea
-Spadafore
-Spadaccini
-Spachtholz
-Spach
-Spacek
-Sozzi
-Sowels
-Soulasinh
-Souffront
-Soucier
-Sotolo
-Soteros
-Sotero
-Soter
-Sossaman
-Soshnik
-Sorrick
-Soron
-Soroa
-Sornsen
-Sorgente
-Sordahl
-Sonza
-Sontheimer
-Sonstroem
-Sonoski
-Sonnenfeld
-Sonderup
-Somani
-Soman
-Somalski
-Solymani
-Solton
-Soloveichik
-Solmonson
-Sollberger
-Solkowitz
-Solimini
-Soleman
-Solders
-Soldavini
-Solanki
-Sohm
-Sodek
-Sode
-Socks
-Sockalosky
-Sochan
-Sobilo
-Soapes
-Snyders
-Snowman
-Snowdy
-Sniffin
-Snetting
-Snellman
-Snellenberger
-Snellen
-Snellbaker
-Sneathen
-Sneath
-Smyrl
-Smull
-Smolko
-Smithheart
-Smiht
-Smestad
-Sluter
-Slupe
-Slomkowski
-Slomka
-Slomba
-Sliz
-Slipp
-Slim
-Slightam
-Sleper
-Sledz
-Slechta
-Slaughterbeck
-Slaughenhoupt
-Slaight
-Sladick
-Slader
-Skye
-Skupski
-Skroch
-Skripko
-Skrine
-Skreen
-Skradski
-Skorski
-Skornik
-Skokowski
-Skok
-Skocilich
-Skinnen
-Skillington
-Skemp
-Skay
-Skattebo
-Skagerberg
-Siwik
-Sivik
-Sitar
-Sitaca
-Sission
-Sissac
-Sisney
-Siruta
-Sirmon
-Sirkoch
-Siriano
-Siracuse
-Sipler
-Sipho
-Sinkovich
-Sinkey
-Sinistore
-Singo
-Sinclaire
-Simunovich
-Simuel
-Simril
-Simpton
-Simpliciano
-Simoson
-Simonis
-Simoncini
-Simister
-Simison
-Simenez
-Simco
-Simcheck
-Silvi
-Silveri
-Silvano
-Silletto
-Sillavan
-Siles
-Silbernagel
-Sigwart
-Sigona
-Signs
-Signaigo
-Sigmond
-Sigars
-Siemek
-Siem
-Sieloff
-Sieligowski
-Siefke
-Siebeneck
-Siebenberg
-Siderman
-Siderine
-Sidberry
-Sicilia
-Sichta
-Sibrel
-Sibell
-Sibayan
-Shyu
-Shvey
-Shuter
-Shumski
-Shulund
-Shulte
-Shuker
-Shugars
-Shufford
-Shubrick
-Shub
-Shouldice
-Shotton
-Shotkoski
-Shost
-Shortsleeve
-Shorette
-Shopen
-Shont
-Shonerd
-Shone
-Shomin
-Shomer
-Sholl
-Shoger
-Shirts
-Shirota
-Shinholster
-Shindle
-Shinaberry
-Shimura
-Shimsky
-Shimo
-Shillinger
-Shilleh
-Shihadeh
-Shierling
-Shewbridge
-Shevitz
-Sheumaker
-Shettle
-Shers
-Sherren
-Shern
-Sherling
-Sherle
-Sheridon
-Sherdon
-Shelter
-Shelmon
-Shelling
-Shelko
-Sheline
-Shelhamer
-Shekey
-Shekarchi
-Sheinberg
-Shehata
-Sheffo
-Shebchuk
-Shearing
-Sheaks
-Shazier
-Shayne
-Shawnee
-Shawhan
-Shaud
-Shastri
-Sharr
-Sharlin
-Shark
-Sharits
-Sharf
-Share
-Shapskinsky
-Shape
-Shankland
-Shames
-Shalhoup
-Shaftic
-Shadiack
-Shackle
-Shabala
-Sevick
-Sevedge
-Seurer
-Sette
-Servan
-Serva
-Serrett
-Serrand
-Serisky
-Sering
-Serie
-Serianni
-Sereda
-Sequin
-Senti
-Senosk
-Senno
-Senner
-Senna
-Senerchia
-Sendro
-Sencabaugh
-Semonick
-Semetara
-Sembler
-Selvaggio
-Seltzen
-Selser
-Sellek
-Sellberg
-Selking
-Seliba
-Selfe
-Seki
-Seifarth
-Seielstad
-Sehorn
-Sehl
-Segur
-Segrave
-Sefcovic
-Seeton
-Seek
-Seecharan
-Seeberger
-Sedman
-Sedano
-Secunda
-Seburg
-Sebold
-Sebastion
-Seate
-Seashore
-Seard
-Seang
-Seaney
-Seace
-Seabert
-Sczygiel
-Scurti
-Scullen
-Scroggy
-Scripter
-Scowden
-Scorsone
-Scoleri
-Scocca
-Scire
-Sciotti
-Sciera
-Scibilia
-Sciabica
-Schwisow
-Schwier
-Schweinert
-Schweinberg
-Schweiker
-Schweigart
-Schweickert
-Schwass
-Schwarzenbach
-Schwarts
-Schwarm
-Schwamberger
-Schwalenberg
-Schwabenbauer
-Schwabauer
-Schuttler
-Schutjer
-Schuring
-Schure
-Schuppert
-Schuner
-Schulthess
-Schulteis
-Schulle
-Schuhmacher
-Schuermann
-Schuepfer
-Schuele
-Schrott
-Schrope
-Schrauder
-Schrandt
-Schouviller
-Schonert
-Schonack
-Scholzen
-Scholnick
-Schoffstall
-Schoenthal
-Schoenstein
-Schoenhut
-Schoenhard
-Schoeneman
-Schoemer
-Schoborg
-Schnicke
-Schneidtmille
-Schneiders
-Schmunk
-Schmoyer
-Schmeider
-Schmale
-Schlottman
-Schlitzer
-Schlipp
-Schlink
-Schliesser
-Schlieper
-Schlesselman
-Schlensker
-Schleis
-Schlein
-Schleck
-Schlabaugh
-Schiver
-Schirpke
-Schindel
-Schimler
-Schiltz
-Schillings
-Schiffelbein
-Schiebel
-Schiaffino
-Schettig
-Schetrompf
-Schessler
-Scherler
-Scheppe
-Schepens
-Schellman
-Schellhammer
-Scheirman
-Scheibelhut
-Schei
-Schech
-Scheaffer
-Schattner
-Schatt
-Scharte
-Schappell
-Schanding
-Schanbacher
-Schan
-Schaming
-Schamburek
-Schaeffler
-Schadle
-Schadegg
-Schabot
-Schaberg
-Schaadt
-Scerra
-Scercy
-Scattergood
-Scarset
-Scarrow
-Scarritt
-Scarpaci
-Scarles
-Scarce
-Scanlin
-Scalice
-Scali
-Scahill
-Sazama
-Saysithideth
-Sayres
-Sayavong
-Sawlivich
-Sawczyszyn
-Savo
-Savina
-Savilla
-Savela
-Savasta
-Saurel
-Saupe
-Sauberan
-Satunas
-Sattley
-Satterley
-Satiago
-Satchel
-Saska
-Sarvey
-Saroukos
-Sarnowski
-Sarnoff
-Sarli
-Sarley
-Sarelas
-Sardi
-Sarconi
-Sarbacher
-Saragusa
-Saraceno
-Sar
-Sappenfield
-Sanzotta
-Santy
-Santorella
-Santopolo
-Santin
-Santiesteban
-Santhuff
-Santell
-Sansburn
-Sanpaolo
-Sanocki
-Sannon
-Sannella
-Sanlucas
-Sanjabi
-Sangrey
-Sangi
-Sanghvi
-Sangh
-Sanfiorenzo
-Sandrowicz
-Sandoual
-Sandora
-Sandlian
-Sandi
-Sandholm
-Samuelsen
-Samu
-Sampedro
-Samorano
-Samok
-Samide
-Samber
-Samain
-Saltzgaber
-Saltonstall
-Saltern
-Salte
-Salonia
-Salmond
-Sallas
-Saliva
-Saler
-Salek
-Saldibar
-Salabarria
-Sakon
-Sakelaris
-Sake
-Sajorda
-Sajor
-Sahni
-Sagoes
-Saglimbeni
-Sagehorn
-Sagayaga
-Safdeye
-Safa
-Sadlon
-Sadbury
-Sadahiro
-Sache
-Sacavage
-Sacarello
-Sables
-Sabean
-Sabates
-Sabataso
-Saager
-Saa
-Rzucidlo
-Rzeszutko
-Ryther
-Rylant
-Ryks
-Ryherd
-Ryhal
-Rygalski
-Rybacki
-Rviz
-Ruys
-Ruuska
-Ruttman
-Ruttinger
-Ruts
-Ruter
-Rutana
-Rusten
-Russnak
-Rusinko
-Rusi
-Rushiti
-Rushia
-Rushdan
-Ruscetti
-Rusboldt
-Ruppenthal
-Rupke
-Rundahl
-Rund
-Rummer
-Rummans
-Rumler
-Ruminski
-Rumfola
-Rull
-Ruise
-Ruggle
-Ruescher
-Ruegsegger
-Ruegger
-Rudzik
-Rudney
-Rudisail
-Rudis
-Rudduck
-Rucky
-Ruckdeschel
-Rubins
-Rubenzer
-Rozo
-Rox
-Rowzee
-Rownd
-Rowey
-Rowcliffe
-Rovinsky
-Roup
-Rottner
-Rothmiller
-Rothgery
-Rothbart
-Rotenberg
-Rotando
-Roswick
-Rosu
-Rossum
-Rossetto
-Rosseter
-Rosselli
-Roskos
-Roskopf
-Rosenholm
-Rosencranz
-Rosenbrook
-Rosella
-Rosebaugh
-Rosbough
-Rosan
-Roofe
-Ronson
-Ronhaar
-Rones
-Ronchetto
-Romeno
-Rombs
-Romanoski
-Romanini
-Romanick
-Roloson
-Rollock
-Rollheiser
-Rollans
-Rold
-Rolark
-Rokisky
-Roja
-Roik
-Rohaley
-Rognstad
-Rofkahr
-Roethel
-Roessner
-Roesser
-Roehrman
-Roehrenbeck
-Roegge
-Roefaro
-Rody
-Rodrigo
-Rodricks
-Rodino
-Rodillas
-Rodia
-Rodenbaugh
-Rodell
-Rodeiguez
-Rodarta
-Rockenbach
-Robley
-Robes
-Robertello
-Robello
-Robella
-Robak
-Roarx
-Rivlin
-Rivira
-Rivena
-Ritzert
-Ritell
-Ritcheson
-Riska
-Risberg
-Ripke
-Rinkel
-Riniker
-Ringman
-Ringlein
-Ringelheim
-Ringbloom
-Rinde
-Rincones
-Rimson
-Rimar
-Riliford
-Rihn
-Rihanek
-Rigoni
-Riggott
-Riffon
-Rievley
-Rieve
-Riesenweber
-Rieg
-Rieff
-Riedell
-Riechers
-Rieber
-Rieben
-Riebeling
-Ridpath
-Ridler
-Riddock
-Rickson
-Rickmon
-Rickley
-Rickie
-Richrdson
-Ribot
-Riblet
-Rhyme
-Rhoney
-Rhed
-Rhead
-Rezek
-Reynvaan
-Reynoza
-Reye
-Rexwinkle
-Revord
-Reven
-Reveal
-Reutlinger
-Reuland
-Reuer
-Retzler
-Rettke
-Retterbush
-Retort
-Reth
-Resureccion
-Restifo
-Resnikoff
-Rerko
-Repsher
-Repress
-Reppell
-Repinski
-Repenning
-Renze
-Rennix
-Renning
-Renney
-Rennell
-Renfer
-Rener
-Rendino
-Renaker
-Remmen
-Rementer
-Remenaric
-Relkin
-Reiterman
-Reist
-Reisser
-Reisling
-Reisert
-Reise
-Reio
-Reinmiller
-Reine
-Reill
-Reigner
-Reifler
-Reifel
-Reidenbach
-Rehnquist
-Rehler
-Rehfield
-Rehfeldt
-Rehberger
-Regler
-Regel
-Regehr
-Refsell
-Reen
-Reem
-Reeher
-Reech
-Reeber
-Redstone
-Redo
-Redish
-Redhage
-Redenz
-Redell
-Reddrick
-Redder
-Reckley
-Reckleben
-Recine
-Rebusi
-Rebuldela
-Rebera
-Rebell
-Rebeles
-Reavley
-Reau
-Reatherford
-Reaney
-Reaid
-Reagans
-Reado
-Razinger
-Razey
-Raza
-Rayside
-Raymos
-Raygosa
-Rawding
-Raw
-Ravens
-Ravenhorst
-Rav
-Rauzman
-Rautenberg
-Rausin
-Rauner
-Raudebaugh
-Rattner
-Ratleff
-Rathmell
-Rathgeb
-Ratermann
-Rataczak
-Rasher
-Rashdi
-Rashada
-Rasbery
-Rarang
-Rapose
-Rapa
-Ransick
-Ranos
-Rankhorn
-Raniero
-Rang
-Randzin
-Rancher
-Rances
-Rancatti
-Ramoutar
-Ramnarase
-Ramlakhan
-Ramiro
-Ramiriz
-Ramez
-Rameriez
-Rambus
-Ramaswamy
-Ramagos
-Ramadanovic
-Ramadan
-Ralko
-Ralat
-Rakel
-Raju
-Rajtar
-Raja
-Rairdon
-Raimo
-Raif
-Raiche
-Raheja
-Raheem
-Rahall
-Raguso
-Rafanan
-Rafalko
-Raes
-Radzavich
-Radune
-Radulescu
-Raduenz
-Radsek
-Radom
-Radell
-Rackett
-Racilis
-Rachi
-Rach
-Racedo
-Rabold
-Rabner
-Rabern
-Rabenstein
-Rabelo
-Quintas
-Quinlisk
-Quine
-Quincey
-Quilantang
-Quicksey
-Quereto
-Quelette
-Quaresma
-Quann
-Quall
-Quails
-Quaas
-Qadir
-Pytlovany
-Pybus
-Putaski
-Purwin
-Purter
-Purple
-Purol
-Purkiss
-Pummel
-Pults
-Pultorak
-Pullian
-Puller
-Pulham
-Puletasi
-Puidokas
-Puhuyaoma
-Puffinburger
-Puesey
-Puelo
-Puddephatt
-Pucillo
-Puc
-Przepiora
-Prys
-Pruzansky
-Pruyn
-Prust
-Prusinski
-Prus
-Pruette
-Provis
-Provine
-Proue
-Protz
-Prosonic
-Prophett
-Pronto
-Pronovost
-Proksch
-Prok
-Proietto
-Proia
-Proenza
-Probus
-Prizzi
-Privalsky
-Prisock
-Printy
-Primozich
-Priefert
-Pridham
-Preus
-Prettner
-Prester
-Pressel
-Preskar
-Premer
-Premeaux
-Preisinger
-Preisendorf
-Prehm
-Pregeant
-Preedom
-Pralle
-Prag
-Pradel
-Prabhakar
-Poyser
-Poupard
-Potterson
-Pottebaum
-Potolsky
-Poto
-Potes
-Postlethwaite
-Postin
-Pospishil
-Poskus
-Posik
-Portsche
-Portolese
-Porrini
-Poro
-Porietis
-Poppenhagen
-Poppen
-Poppel
-Pontonio
-Ponting
-Pono
-Pomposo
-Pomponio
-Pomplun
-Pomo
-Pomeranz
-Pomella
-Pomberg
-Pomares
-Polucha
-Polselli
-Polnau
-Pollins
-Pollara
-Polisky
-Polio
-Policz
-Policar
-Polchinski
-Polashek
-Polakowski
-Polaco
-Poitevin
-Poister
-Pointon
-Poinson
-Poinsett
-Pogar
-Poetter
-Podmore
-Poczobut
-Pockette
-Pocasangre
-Pobre
-Plys
-Plunket
-Plumpton
-Pluemer
-Plover
-Ploetz
-Ploense
-Plocek
-Plikerd
-Pleet
-Pleasure
-Plazza
-Plaxico
-Platko
-Platania
-Plassmann
-Plantier
-Plantenga
-Plancarte
-Plakke
-Pladson
-Pizzano
-Pivin
-Pittsinger
-Pittmann
-Pitsenbarger
-Pitonyak
-Pitmon
-Pitfield
-Pitek
-Pitassi
-Pistulka
-Pistole
-Piske
-Pishko
-Pisegna
-Pirnie
-Pirkey
-Pippitt
-Piorkowski
-Pinna
-Pinkton
-Pinks
-Pinkerman
-Pinchbeck
-Pimpare
-Pilloud
-Pillitteri
-Pilakowski
-Pikus
-Pikula
-Pikkarainen
-Pijanowski
-Pigao
-Piette
-Pietrzykowski
-Pietryga
-Pietropaolo
-Pies
-Piersaul
-Pieri
-Piepenbrink
-Pieloch
-Pieffer
-Picucci
-Pickl
-Pickhardt
-Picini
-Picerni
-Picaro
-Piatak
-Pianalto
-Piacquadio
-Phoun
-Phonharath
-Phomsoukha
-Phommaseng
-Phinazee
-Phillippy
-Phillians
-Philavong
-Phernetton
-Pheonix
-Phenes
-Pfotenhauer
-Pfleiderer
-Pfleider
-Pflanz
-Pfieffer
-Pfeiff
-Pfautz
-Pezzica
-Pevez
-Pevehouse
-Petrunger
-Petrullo
-Petrucco
-Petrson
-Petrilla
-Petrides
-Petrauskas
-Petkus
-Petiet
-Petgrave
-Peterschick
-Petaway
-Pesner
-Pesiri
-Pesin
-Pesa
-Pervine
-Pertubal
-Perschall
-Perrucci
-Perow
-Peroddy
-Perocho
-Perno
-Perloff
-Peria
-Pergerson
-Pereyda
-Pereria
-Pereiro
-Perdzock
-Perchinski
-Peraro
-Peques
-Pepito
-Pentek
-Pentaris
-Pennison
-Pennewell
-Pennacchio
-Penington
-Peninger
-Pengelly
-Penegar
-Pencek
-Penale
-Penaherrera
-Pembrook
-Pelyo
-Pelligra
-Pele
-Pekala
-Peine
-Peightal
-Peers
-Peerbolt
-Pedaci
-Ped
-Pectol
-Pecot
-Pecos
-Pecorelli
-Pechart
-Pebbles
-Peatry
-Pearle
-Peard
-Peakes
-Peaches
-Paywa
-Paysinger
-Payes
-Pawelczyk
-Pavoni
-Pavlovic
-Pavelec
-Pavan
-Paullus
-Pauldo
-Patuto
-Patruno
-Patoine
-Patock
-Patka
-Pata
-Pastiva
-Pastick
-Passwater
-Passineau
-Passi
-Pasquino
-Pasquel
-Pasquarelli
-Pason
-Paskert
-Pashley
-Pashia
-Partis
-Partido
-Parsi
-Parrill
-Parolari
-Parisio
-Pariser
-Parents
-Parduhn
-Parden
-Parcel
-Parbo
-Paray
-Papson
-Pappa
-Papillion
-Papik
-Paparella
-Papai
-Paoletto
-Pantone
-Pannhoff
-Pankowski
-Pangelina
-Pangallo
-Panda
-Panciera
-Panchana
-Panasci
-Panarella
-Paltanavage
-Palsgrove
-Palovick
-Paloma
-Palmiotto
-Palmiero
-Palmerton
-Palmerin
-Pallet
-Pallesen
-Pallazzo
-Palitti
-Palischak
-Paliotta
-Palifka
-Palenik
-Palecek
-Palczewski
-Palasik
-Palacious
-Pala
-Pahnke
-Pahls
-Paguirigan
-Pagnozzi
-Pagliarini
-Paduano
-Paddison
-Padavano
-Pacubas
-Packingham
-Packebush
-Pacius
-Paci
-Pacey
-Pacas
-Pac
-Ozolins
-Ozog
-Ozminkowski
-Oyuela
-Owston
-Ovsanik
-Overlie
-Overbo
-Oven
-Ovard
-Ourso
-Ouderkirk
-Ottis
-Otterholt
-Otomo
-Otley
-Osuch
-Ostling
-Ostlie
-Ostheimer
-Osterstuck
-Osterdyk
-Ostenson
-Osten
-Ossowski
-Osso
-Osmon
-Osle
-Oskins
-Osendorf
-Osburne
-Osawa
-Ortic
-Ortenzio
-Orrantia
-Orrala
-Orouke
-Orone
-Orofino
-Orkwis
-Orizetti
-Oris
-Orines
-Orgovan
-Orgain
-Orendorff
-Orendain
-Oree
-Orea
-Ordner
-Ordas
-Orbeck
-Oravec
-Opray
-Ophus
-Opela
-Opatrny
-Opara
-Oosterhof
-Onusko
-Onstead
-Onorata
-Onitsuka
-Onishea
-Oneel
-Ondrusek
-Omundson
-Omoyosi
-Omdahl
-Oltz
-Olton
-Olrich
-Olquin
-Olp
-Olmscheid
-Olm
-Olivio
-Oliverson
-Oliven
-Olis
-Oline
-Olexa
-Olesnevich
-Olesky
-Oleksiak
-Oldani
-Olcus
-Oksen
-Okolo
-Okojie
-Okerblom
-Okajima
-Ohrenich
-Ohms
-Ohmann
-Ohland
-Oguinn
-Ogiba
-Ogeen
-Oge
-Oganyan
-Offenbacker
-Oesterreich
-Oerther
-Oelschlager
-Odore
-Odonal
-Odonahue
-Odiase
-Odenwald
-Odens
-Odear
-Octave
-Ockey
-Ochwat
-Ochotorena
-Ochiltree
-Och
-Ocejo
-Ocano
-Obstfeld
-Obleness
-Obiesie
-Oberloh
-Oberfell
-Obannion
-Oakleaf
-Oak
-Nyswonger
-Nyseth
-Ny
-Nuvallie
-Nusom
-Nush
-Nurnberger
-Nunziata
-Nunev
-Nudelman
-Nucklos
-Nuce
-Novik
-Noury
-Notik
-Notari
-Nosis
-Nosel
-Northcraft
-Northcote
-Norskog
-Norrid
-Norquest
-Normann
-Norma
-Norlund
-Norley
-Norcott
-Norbeck
-Noonon
-Nooney
-Nonaka
-Nollora
-Nollman
-Nolda
-Nolau
-Nol
-Nogueras
-Nogowski
-Nogosek
-Noftsger
-Noeldner
-Nocum
-Nocket
-Nocar
-Noaks
-Niverson
-Nittinger
-Nitterhouse
-Nitkowski
-Niten
-Nitchals
-Nissila
-Nishiguchi
-Nippert
-Nippe
-Ninos
-Nine
-Nimocks
-Nimmer
-Nilsby
-Nill
-Nikolas
-Nikirk
-Niimi
-Nii
-Niheu
-Nihei
-Nigg
-Niforos
-Niezgoda
-Nieva
-Niethamer
-Niesman
-Nienow
-Niedermayer
-Niedecken
-Nied
-Niebyl
-Nie
-Nicotera
-Nicolet
-Nicolaisen
-Nickolls
-Nickol
-Nickleson
-Nickelston
-Nichois
-Nicewarner
-Niceswander
-Nicarry
-Nicar
-Nhep
-Ngueyn
-Nguen
-Ngov
-Nghe
-Newsted
-Newnum
-Newer
-Newburg
-Newall
-Nevland
-Neugin
-Neuenfeldt
-Neuby
-Nestel
-Nesseth
-Nervis
-Nerpio
-Nenninger
-Nemzek
-Nemoede
-Nemer
-Nelmark
-Nellem
-Neithercutt
-Neiswander
-Neisius
-Neish
-Neihart
-Neiderhiser
-Nehmer
-Negrisor
-Negrette
-Nefzger
-Neeper
-Neelon
-Needels
-Needam
-Nealley
-Nealen
-Nealeigh
-Nayee
-Nawn
-Navone
-Navejas
-Navedo
-Navar
-Naud
-Natiello
-Nathoo
-Nasson
-Naselli
-Nase
-Naschke
-Narez
-Nares
-Nappier
-Napoletano
-Napihaa
-Naone
-Nannini
-Nannie
-Nania
-Nanda
-Nampel
-Nalepka
-Najjar
-Nahass
-Naeve
-Naecker
-Nadell
-Myrum
-Myint
-Myhr
-Myerscough
-Muterspaw
-Mutana
-Muszar
-Mustafaa
-Must
-Mussenden
-Mussen
-Mushett
-Musetti
-Musemeche
-Musel
-Muscaro
-Murrock
-Murrie
-Murrain
-Murilla
-Murelli
-Murayama
-Murai
-Munzell
-Munteanu
-Munt
-Munshower
-Munlin
-Muni
-Munding
-Munda
-Mulvehill
-Mulry
-Mulliner
-Mullice
-Mullaly
-Muhr
-Muhn
-Mugica
-Muether
-Muehlberger
-Muehlbach
-Muccia
-Mrowka
-Mrotz
-Mrochek
-Mracek
-Moznett
-Moyse
-Moxham
-Mowris
-Moutoux
-Moussette
-Mousley
-Moun
-Moulinos
-Mostrom
-Mostert
-Mosses
-Moskovitz
-Mosinski
-Mosgrove
-Mosebach
-Moschetto
-Morway
-Morthland
-Morta
-Morsbach
-Morreau
-Morowski
-Moroles
-Morlas
-Morgenstein
-Morasch
-Moranda
-Moralis
-Moraitis
-Moraites
-Moote
-Moorcroft
-Montier
-Montie
-Montesa
-Monteros
-Montefusco
-Montecalvo
-Montazami
-Montaya
-Monsky
-Monsegur
-Monnet
-Monjaras
-Moniot
-Monholland
-Monet
-Monestine
-Monds
-Mondry
-Mondo
-Mondino
-Momsen
-Momaya
-Molski
-Mollins
-Molitoris
-Mokbel
-Moistner
-Moilien
-Mohring
-Mohrbacher
-Mogro
-Moerman
-Moellman
-Modero
-Moczo
-Mocco
-Mocarski
-Mobus
-Mizukami
-Miyares
-Miyahara
-Miyagishima
-Mittendorf
-Mittelstadt
-Mitsakos
-Mith
-Mita
-Misura
-Missler
-Misrahi
-Misnick
-Misemer
-Miscovich
-Miscavage
-Misasi
-Mirich
-Miravalle
-Miras
-Miramon
-Mioduszewski
-Mio
-Minster
-Minnier
-Minneweather
-Minnehan
-Minkel
-Miners
-Mineah
-Mincher
-Minatra
-Minato
-Minari
-Minardo
-Milush
-Miltner
-Milster
-Milovich
-Milman
-Millraney
-Millot
-Millisor
-Milliren
-Millimaki
-Millich
-Milland
-Milkovich
-Militano
-Mileti
-Milek
-Mildren
-Milder
-Milch
-Milbert
-Milbauer
-Milanowski
-Milanese
-Mikulecky
-Mikulak
-Mikita
-Mikelsen
-Mihlfeld
-Mihatsch
-Mihalkovic
-Mihalko
-Mignogna
-Migl
-Miessner
-Mieras
-Midcap
-Mickleberry
-Michocki
-Michelman
-Michales
-Michalenko
-Mias
-Mhoon
-Mezza
-Mezquita
-Mezera
-Meyette
-Meyerhoffer
-Meyerhofer
-Meury
-Meuller
-Mettle
-Metter
-Mettee
-Metta
-Metroka
-Metevier
-Metaxas
-Mestrovich
-Messa
-Mesidor
-Meschino
-Meryman
-Merrett
-Merrbach
-Merone
-Merkling
-Merickel
-Mercante
-Meo
-Mensinger
-Menist
-Menino
-Menhennett
-Mengarelli
-Menez
-Menesez
-Mendelowitz
-Mencl
-Men
-Mellors
-Mellom
-Mellencamp
-Mellekas
-Melkonian
-Melish
-Meleski
-Melero
-Melchin
-Melbert
-Melandez
-Melander
-Meisels
-Meighen
-Mehtala
-Mehserle
-Meholick
-Mehalic
-Megna
-Meginnis
-Meggitt
-Meggers
-Meger
-Meeter
-Meeske
-Meeder
-Medows
-Mednick
-Medich
-Mediate
-Median
-Medez
-Medbery
-Medak
-Mebus
-Meason
-Meanor
-Meager
-Mcwethy
-Mcvean
-Mcthune
-Mcsweeny
-Mcspedon
-Mcsharry
-Mcravin
-Mcraven
-Mcquistion
-Mcquilkin
-Mcquaide
-Mcquage
-Mcpherren
-Mcpeck
-Mcnaney
-Mcmindes
-Mcmilliam
-Mcmenomy
-Mcmarlin
-Mcmahill
-Mcloy
-Mcloone
-Mclear
-Mclaughlan
-Mckoan
-Mckerley
-Mckerchie
-Mckeone
-Mckennie
-Mckellan
-Mckaig
-Mcinally
-Mchendry
-Mcgwier
-Mcguirt
-Mcgugin
-Mcgready
-Mcgraff
-Mcgrade
-Mcgorry
-Mcglothian
-Mcglory
-Mcgavisk
-Mcgarrigle
-Mcever
-Mcelmurry
-Mcelheny
-Mcelhattan
-Mcdaries
-Mcdargh
-Mccumiskey
-Mccredie
-Mccraven
-Mccoyle
-Mccoppin
-Mccombie
-Mccloughan
-Mccleve
-Mcclenty
-Mcclennan
-Mcclees
-Mccleer
-Mcclearen
-Mccaskin
-Mccartin
-Mccamy
-Mccammack
-Mccaman
-Mccalop
-Mccaffity
-Mcburrows
-Mcburrough
-Mcbrady
-Mcalphin
-Mcalhaney
-Mcaboy
-Mazikowski
-Mazar
-Mayzes
-Maymon
-Mayeski
-Maycumber
-Mayala
-Maxin
-Maute
-Mauss
-Mauritz
-Maurey
-Maulin
-Matuszeski
-Matusik
-Matuseski
-Mattu
-Mattier
-Matthys
-Matteucci
-Matsuhara
-Matsen
-Matrejek
-Matlick
-Mathewes
-Mathal
-Matey
-Matesic
-Materna
-Matelic
-Matarese
-Matalavage
-Mataalii
-Mastrocovi
-Mastrobuono
-Mastoris
-Mastera
-Mastenbrook
-Mastella
-Massaglia
-Maslyn
-Masley
-Masin
-Masiclat
-Mashiah
-Mashek
-Mascot
-Maschke
-Maschio
-Masch
-Marzinske
-Marxen
-Marville
-Marushia
-Marungo
-Maruffo
-Maruca
-Martinz
-Martinetto
-Martinetti
-Martinea
-Martincic
-Martig
-Marske
-Marshalsea
-Marsette
-Marroguin
-Marreo
-Marquena
-Marona
-Marola
-Marmie
-Markstrom
-Marksbury
-Markrof
-Markovitz
-Markevich
-Markette
-Marius
-Maritt
-Marionneaux
-Marinos
-Marinese
-Maricich
-Marhoefer
-Margiotta
-Maren
-Marecki
-Marcone
-Marcoline
-Marcolina
-Marchuk
-Marcelynas
-Marcaida
-Marbus
-Marazzi
-Marazas
-Marashio
-Maranville
-Marani
-Marandi
-Marander
-Marade
-Mapalo
-Manza
-Manylath
-Manvelyan
-Manusyants
-Mantuano
-Mantsch
-Mantell
-Mantano
-Mansmann
-Manship
-Manozca
-Mannie
-Mannes
-Manliguis
-Manigold
-Maniatis
-Mania
-Mangon
-Manginelli
-Mangicavallo
-Mangiaracina
-Mangas
-Mangaoang
-Manford
-Mandiola
-Manchini
-Mamoran
-Mammucari
-Mamer
-Malys
-Malvin
-Malvaez
-Malusky
-Maltie
-Maltbie
-Malphurs
-Malotte
-Malloch
-Malkasian
-Malit
-Malis
-Malinski
-Malinchalk
-Malicote
-Malich
-Maletz
-Malesky
-Maler
-Malekzadeh
-Maleh
-Malech
-Malbaurn
-Malara
-Malakan
-Malakai
-Malafronte
-Malady
-Makley
-Makekau
-Majmundar
-Majersky
-Maiten
-Mainiero
-Mainello
-Mailes
-Maigret
-Mahusay
-Maharg
-Mahany
-Maguet
-Magowan
-Magone
-Magnall
-Magleby
-Maglaya
-Maginn
-Magin
-Magil
-Maggs
-Maggie
-Magelssen
-Magaw
-Magario
-Magallanez
-Maeweather
-Madura
-Madrueno
-Madinger
-Madho
-Maderas
-Maddry
-Madaris
-Maczko
-Macugay
-Macrowski
-Macomb
-Macnab
-Maclaurin
-Maclauchlan
-Mackynen
-Macksoud
-Macks
-Mackney
-Mackintosh
-Mackinder
-Maciej
-Macie
-Machowski
-Machol
-Machinsky
-Machalek
-Macchione
-Macall
-Macafee
-Mabus
-Mabins
-Mabane
-Maassen
-Lysen
-Lynaugh
-Lykens
-Luvian
-Luttenegger
-Lutkins
-Lutchman
-Lutao
-Luskin
-Luskey
-Lungren
-Lundburg
-Lumm
-Lulic
-Lulewicz
-Lukaszewicz
-Luiso
-Luhnow
-Lugg
-Lugardo
-Lufsey
-Luetmer
-Luepke
-Ludtke
-Luczkowiak
-Luckhardt
-Luckenbaugh
-Lucken
-Luchenbill
-Lubke
-Lubell
-Lube
-Lubbock
-Lozon
-Loze
-Lozaya
-Loynd
-Loxley
-Lowthorp
-Lowek
-Loviska
-Lovig
-Lovgren
-Loverink
-Lovensheimer
-Lounsbery
-Loukota
-Loughnan
-Loughborough
-Loudenslager
-Lotson
-Lothspeich
-Lotan
-Lossa
-Losolla
-Losier
-Lorna
-Lorimor
-Lori
-Lorett
-Lorens
-Loreg
-Loreaux
-Lorandeau
-Loque
-Lopus
-Lopriore
-Lootens
-Lookadoo
-Lonneman
-Lonn
-Longiotti
-Longhini
-Longendyke
-Longbotham
-Londre
-Londagin
-Lonabaugh
-Lomu
-Lominy
-Lomboy
-Lomartire
-Lollie
-Lokker
-Loia
-Loi
-Logrono
-Logosso
-Loggains
-Loflen
-Lofink
-Lofgreen
-Loewenthal
-Loeurm
-Loerzel
-Loeppke
-Loepp
-Loegering
-Lodholz
-Lockey
-Lockbaum
-Lochte
-Lochan
-Lobur
-Loban
-Llorca
-Lloid
-Llewlyn
-Llanez
-Liwanag
-Livernoche
-Litzenberg
-Litano
-Lissard
-Lisko
-Liscio
-Lipskar
-Lipscombe
-Lipschutz
-Lipphardt
-Lipinsky
-Lipani
-Lions
-Linnertz
-Links
-Linkowski
-Linko
-Lingafelter
-Lingafelt
-Lindzy
-Lindman
-Lindert
-Lindersmith
-Linders
-Linderholm
-Lindburg
-Lindaman
-Lincicome
-Linberg
-Linamen
-Limke
-Lilyquist
-Liloia
-Lillpop
-Lillick
-Lillich
-Lilien
-Lighter
-Liggin
-Lifton
-Lifsey
-Lifford
-Lifer
-Liest
-Liem
-Lidke
-Liddiard
-Lick
-Lichtenwalner
-Lichtenfeld
-Lichak
-Licerio
-Licausi
-Licause
-Libman
-Libera
-Liaw
-Leya
-Lewitt
-Lewandoski
-Levoy
-Levitin
-Leviston
-Leventer
-Levenhagen
-Leveillee
-Leve
-Lettre
-Letsche
-Lesiak
-Leshinsky
-Leriche
-Leri
-Lepri
-Leppke
-Lepping
-Lepp
-Lepo
-Leonhard
-Leonello
-Leona
-Leofsky
-Lensing
-Lenoci
-Lennington
-Lennihan
-Lenn
-Lenkiewicz
-Lenis
-Lenertz
-Lenehan
-Lenci
-Lenarz
-Lemucchi
-Lemick
-Lelah
-Lelacheur
-Lejenne
-Leitman
-Leithoff
-Leistiko
-Leipert
-Leibert
-Leibe
-Lehnertz
-Leheny
-Lehar
-Lehane
-Legorreta
-Legoff
-Legleu
-Legions
-Leggat
-Leggans
-Legaard
-Left
-Leesmann
-Leemaster
-Leemans
-Ledwig
-Ledlie
-Lederhos
-Lecorchick
-Leclear
-Leclare
-Leckman
-Leckbee
-Lebrecque
-Lebahn
-Leavenworth
-Leatherberry
-Leamer
-Leady
-Lazzeri
-Lazarini
-Lazarine
-Laza
-Layng
-Lawshe
-Lawman
-Lawer
-Laware
-Lavista
-Lavis
-Laviola
-Lavinder
-Lavern
-Lavene
-Lavelett
-Lavanway
-Lavanchy
-Lavalette
-Lavala
-Lavadie
-Lava
-Lautzenheiser
-Lautt
-Lauser
-Laurimore
-Lauridsen
-Laurey
-Laurenti
-Laurente
-Laurenitis
-Laurelli
-Laukitis
-Laud
-Lattrell
-Lattner
-Latterell
-Latten
-Lattari
-Lattanzi
-Latif
-Lastufka
-Lasswell
-Lasseson
-Lassa
-Laslo
-Laski
-Lashute
-Lashmet
-Larrieu
-Larrier
-Larribeau
-Laronda
-Larney
-Larita
-Lariccia
-Largin
-Larez
-Lardin
-Larch
-Lapusnak
-Laprete
-Lapre
-Lapradd
-Lapore
-Lapinsky
-Lapid
-Laperriere
-Laos
-Lantto
-Lantaff
-Lanson
-Lanois
-Lanius
-Lanini
-Languirand
-Languell
-Langstraat
-Langreck
-Langkabel
-Langill
-Langeness
-Langefels
-Langarica
-Langager
-Lanfranco
-Lanfear
-Lanfair
-Landvatter
-Landolfi
-Landborg
-Lanagan
-Lampson
-Lampshire
-Lamoreux
-Lambrukos
-Lambrakis
-Lamborne
-Lambing
-Lamax
-Lamarch
-Lallave
-Lalka
-Lais
-Lairy
-Laiben
-Lahren
-Lahn
-Lahmers
-Lah
-Lagory
-Laforrest
-Laflore
-Lafkas
-Lafield
-Lafay
-Laduc
-Laderer
-Ladell
-Ladakakos
-Lacoy
-Lacki
-Lacio
-Lacinski
-Lachowsky
-Lacerda
-Lace
-Lacasa
-Labruzzo
-Labre
-Labove
-Laberpool
-Labbadia
-Labarba
-Labady
-Kytle
-Kym
-Ky
-Kwasnicki
-Kwapniewski
-Kwang
-Kuzminski
-Kuzel
-Kuwahara
-Kut
-Kusko
-Kusick
-Kuruvilla
-Kurtulus
-Kurtis
-Kurtich
-Kurkowski
-Kurkeyerian
-Kuritz
-Kurelko
-Kurcaba
-Kuralt
-Kuprewicz
-Kupetz
-Kuntzman
-Kunishige
-Kundtz
-Kulwicki
-Kulow
-Kulis
-Kuhlmey
-Kufel
-Kues
-Kuehnel
-Kudrick
-Kudlacik
-Kudej
-Kuchel
-Kuchan
-Kucha
-Kuboushek
-Kubishta
-Kubilus
-Kubert
-Kubeika
-Kubasik
-Kuakini
-Krzyston
-Krzeczkowski
-Kryzak
-Krygier
-Kry
-Krupski
-Krupke
-Krupansky
-Krumvieda
-Krumholz
-Krumbholz
-Krudop
-Krstic
-Krovious
-Krommes
-Kromm
-Krolak
-Kroes
-Kroening
-Kroener
-Kritter
-Kristy
-Krisman
-Kriege
-Kridel
-Kreul
-Kretsinger
-Kretlow
-Kresal
-Krejsa
-Kreines
-Kreig
-Krefft
-Krauskopf
-Kratt
-Krassow
-Krasnecky
-Krance
-Krajcik
-Krail
-Kraham
-Krack
-Kozloff
-Kozlak
-Kozera
-Kozee
-Koyama
-Kowalowski
-Kowalchuk
-Kovalovsky
-Kovalcheck
-Koutz
-Kotts
-Kostyk
-Kosty
-Kostohryz
-Kostiuk
-Kostis
-Kostick
-Kosofsky
-Kosman
-Kosin
-Kosier
-Kosen
-Kosco
-Koschnitzki
-Kosbab
-Kosack
-Korzep
-Korvin
-Kortkamp
-Kornrumpf
-Korfhage
-Kordus
-Korchnak
-Koppinger
-Kopinski
-Kopald
-Kooyman
-Koopmans
-Koonz
-Kooker
-Kooch
-Konzal
-Konye
-Kontogiannis
-Konruff
-Konowal
-Konopnicki
-Konopacky
-Konopacki
-Konig
-Konicki
-Konecni
-Kondel
-Konakowitz
-Komlos
-Kombe
-Komatz
-Kolm
-Kollmeyer
-Kollasch
-Kolin
-Kolden
-Kolbo
-Kolata
-Kolaga
-Kokocinski
-Koko
-Koinzan
-Kohrman
-Kohnz
-Kogler
-Koets
-Koerwitz
-Koep
-Koenecke
-Koehly
-Kockler
-Kocka
-Kociolek
-Kobie
-Knudsuig
-Knoten
-Knotek
-Knole
-Knochel
-Knobbe
-Knightstep
-Knigge
-Knife
-Kniess
-Knickelbein
-Kneisler
-Kneedler
-Knedler
-Knall
-Knable
-Klym
-Klussmann
-Kluever
-Kludt
-Klouda
-Klotzbach
-Klosowski
-Klockars
-Klinker
-Klingshirn
-Klingelhoets
-Klingelhoefer
-Klena
-Klempa
-Klemisch
-Klemens
-Klemencic
-Klemen
-Kleinhenz
-Klecha
-Klebanow
-Klebanoff
-Klave
-Klang
-Klammer
-Klamet
-Klaers
-Klacic
-Kjar
-Kivisto
-Kivel
-Kitzrow
-Kitzerow
-Kitz
-Kiszka
-Kistenmacher
-Kisicki
-Kisak
-Kirylo
-Kirson
-Kirschke
-Kirmer
-Kirakosyan
-Kinton
-Kint
-Kinsland
-Kinlock
-Kini
-Kingsolver
-Kingdon
-Kindschuh
-Kindlimann
-Kindl
-Kindberg
-Kinas
-Kinaj
-Kimberl
-Killoy
-Killette
-Killer
-Killary
-Kilgor
-Kildoo
-Kilborne
-Kilbert
-Kil
-Kijek
-Kiewiet
-Kiever
-Kiesz
-Kiessling
-Kielar
-Kiehn
-Khosravi
-Kholodivker
-Kho
-Khatib
-Khatcherian
-Keyworth
-Keylor
-Kewanwytewa
-Kettman
-Kettlewell
-Kettl
-Kettelle
-Kethcart
-Ketay
-Keslar
-Kesby
-Kerne
-Kerk
-Kercy
-Kerchal
-Kerbel
-Kenrick
-Kennis
-Kennin
-Kennemuth
-Kennelty
-Kenkel
-Kemmerling
-Kemfort
-Kelstrom
-Kellow
-Kellom
-Kelk
-Keliiholokai
-Kelcourse
-Kekua
-Keiger
-Keglovic
-Keesecker
-Keehne
-Keedah
-Keding
-Keavney
-Keanu
-Keagy
-Keaffaber
-Keadle
-Kazemi
-Kazanowski
-Kazanjian
-Kazan
-Kawelo
-Kavanah
-Kautzer
-Kaukola
-Kaufusi
-Kauffeld
-Katowicz
-Katos
-Katheder
-Kately
-Kata
-Kastor
-Kastl
-Kassouf
-Kassler
-Kassam
-Kaskey
-Kasimis
-Kasdon
-Kaschmitter
-Kaschel
-Karratti
-Karpinen
-Karpen
-Karmann
-Karlovich
-Karlen
-Karkut
-Karin
-Kariger
-Karaffa
-Kapsos
-Kapps
-Kapnick
-Kanoa
-Kanney
-Kannas
-Kanduth
-Kampman
-Kamimura
-Kamens
-Kamemoto
-Kalvaitis
-Kaltenhauser
-Kalloch
-Kaller
-Kallenberg
-Kaliszuk
-Kalinoski
-Kalinger
-Kalich
-Kalfus
-Kalfayan
-Kalert
-Kalenkoski
-Kalen
-Kaleiwahea
-Kaleel
-Kaldas
-Kalawe
-Kalathas
-Kakos
-Kaiserman
-Kais
-Kailiponi
-Kaighn
-Kahuhu
-Kahoun
-Kahen
-Kahaleua
-Kah
-Kagy
-Kager
-Kagarise
-Kaffka
-Kaempfer
-Kaemmerer
-Kaelker
-Kady
-Kadner
-Kadlubowski
-Kadakia
-Kacynski
-Kacic
-Kach
-Kabrick
-Justman
-Justine
-Jurina
-Jurik
-Jurcik
-Junius
-Jumalon
-Julca
-Jui
-Jugan
-Juart
-Jove
-Journeay
-Joung
-Jou
-Josilowsky
-Josephsen
-Josephpauline
-Jorde
-Joor
-Jonte
-Jolie
-Johnke
-Johanningmeie
-Joerg
-Jochems
-Jilk
-Ji
-Jhonston
-Jez
-Jethva
-Jethro
-Jest
-Jesko
-Jerrel
-Jerich
-Jentsch
-Jensvold
-Jennrich
-Jenious
-Jenck
-Jemenez
-Jelle
-Jelinski
-Jeleniewski
-Jelen
-Jeffrie
-Jefford
-Jedik
-Jebbett
-Jayes
-Javarone
-Jauss
-Jaus
-Jaskolski
-Jasionowski
-Jasin
-Jarzynka
-Jarva
-Jaruis
-Jaross
-Jaret
-Jaquess
-Janovich
-Jannusch
-Jann
-Jankins
-Janitz
-Janicke
-Jangula
-Jamon
-Jammer
-Jamie
-Jameel
-Jakupcak
-Jakubczak
-Jakowich
-Jakeman
-Jagneaux
-Jagher
-Jaekel
-Jadin
-Jacobowitz
-Jackstadt
-Jackowiak
-Jackiewicz
-Jackels
-Jabour
-Izsak
-Izarraras
-Iwasa
-Iwanyszyn
-Iulo
-Iuliucci
-Iturbide
-Itkin
-Isby
-Isam
-Isales
-Isackson
-Irizarri
-Iribarren
-Irani
-Iracheta
-Iott
-Ioli
-Iodice
-Ioannidis
-Intriago
-Interrante
-Intermill
-Insco
-Inloes
-Ingrim
-Inglin
-Inglese
-Ingala
-Infield
-Inestroza
-Ineson
-Indest
-Incorvaia
-Inacio
-Imparato
-Imm
-Imfeld
-Imaizumi
-Illescas
-Ikuta
-Iino
-Ignasiak
-Igler
-Igel
-Iffert
-Idris
-Idema
-Ichinotsubo
-Ichinose
-Iburg
-Iarossi
-Iannaccone
-Iams
-Iacovissi
-Hytros
-Hyten
-Hysinger
-Hylle
-Hylinski
-Hvizdos
-Huyghe
-Huus
-Hutsler
-Hutchen
-Hustus
-Huso
-Husni
-Huslander
-Huska
-Hush
-Huschle
-Husayko
-Husanini
-Hurtis
-Hurter
-Hurrington
-Hurrigan
-Hurl
-Hurban
-Hunten
-Hundemer
-Humerickhouse
-Humbel
-Hulstine
-Hulm
-Huitzacua
-Hughlett
-Huger
-Huewe
-Huels
-Hudrick
-Hudek
-Huckeby
-Hubright
-Hubric
-Hubel
-Hsi
-Hryniewich
-Hrovat
-Hronick
-Hribar
-Hozempa
-Hoxworth
-Howryla
-Howison
-Howieson
-Howdeshell
-Hoving
-Hovi
-Hovelson
-Hovell
-Houten
-Housten
-Housekeeper
-Houpe
-Houp
-Houman
-Houghland
-Hougas
-Hothan
-Hotchkin
-Hoste
-Hosie
-Hosendove
-Hoseman
-Hoseck
-Hoschouer
-Horwood
-Horuath
-Hortillosa
-Horth
-Horsfield
-Horniak
-Hornby
-Hormander
-Horii
-Hores
-Horaney
-Horal
-Hopskins
-Hoppesch
-Hoopengardner
-Hoomana
-Hoolihan
-Hoof
-Honzel
-Honse
-Honohan
-Hongo
-Hongerholt
-Homola
-Homerding
-Homchick
-Holy
-Holvey
-Holsing
-Holshue
-Hollenberg
-Hollemon
-Holla
-Holka
-Holifeild
-Holets
-Holdt
-Holdness
-Holdiness
-Holda
-Holcey
-Holbein
-Hoium
-Hoisl
-Hohstadt
-Hohowski
-Hoh
-Hogy
-Hogsten
-Hogsette
-Hoggins
-Hofler
-Hoffstot
-Hoffschneider
-Hoffee
-Hoevel
-Hoernemann
-Hoeper
-Hoener
-Hoene
-Hoeke
-Hoeg
-Hoeflich
-Hoeffner
-Hoeffliger
-Hoecker
-Hoeck
-Hoe
-Hodgen
-Hodan
-Hockema
-Hochschild
-Hobkirk
-Hnatow
-Hledik
-Hjalmarson
-Hitzler
-Hittman
-Hisman
-Hirstein
-Hirschhorn
-Hirsche
-Hirkaler
-Hiraoka
-Hiraki
-Hipwell
-Hippo
-Hinsey
-Hinkey
-Hinish
-Hingst
-Hingle
-Hindin
-Hinahon
-Himelstein
-Hillburg
-Hillaire
-Hilgert
-Hildred
-Hildahl
-Hilcher
-Higueros
-Higle
-Higinbotham
-Hieserich
-Hidvegi
-Hidrogo
-Hickton
-Hickonbottom
-Hickert
-Hibl
-Heyveld
-Heydel
-Hevner
-Hevesy
-Heverley
-Heverin
-Heusley
-Heuberger
-Hettwer
-Hett
-Heter
-Hesters
-Hessong
-Hessing
-Hessenthaler
-Hessell
-Hessee
-Hesby
-Herzberger
-Herwood
-Herting
-Herscher
-Herschel
-Herrling
-Herrig
-Herriage
-Herrel
-Herre
-Herpolsheimer
-Hernanders
-Hermosura
-Hermie
-Hermens
-Herklotz
-Herkert
-Herby
-Herbster
-Herbison
-Herbers
-Herbein
-Heppeard
-Henrick
-Henrey
-Henretta
-Henneberg
-Hennagin
-Henington
-Henifin
-Heney
-Henesey
-Henehan
-Hendy
-Henderosn
-Hender
-Hendee
-Henby
-Henaire
-Hemrich
-Hemmie
-Hemmes
-Hemlepp
-Heminover
-Hemauer
-Helvy
-Helsing
-Helmy
-Helmstetler
-Helmink
-Helmcamp
-Hellar
-Hellams
-Helker
-Helgesen
-Helfritz
-Helena
-Hele
-Hektner
-Hejl
-Heitschmidt
-Heitger
-Heinzmann
-Heinzen
-Heininger
-Heineken
-Heimrich
-Heimbaugh
-Heiermann
-Hehr
-Hegre
-Hegmann
-Hefler
-Hefflinger
-Heese
-Heeney
-Heemstra
-Hedrich
-Hedgespeth
-Hedemann
-Hedegore
-Heddlesten
-Heckenberg
-Hebig
-Hebden
-Hebda
-Heatly
-Heathershaw
-Hearson
-Heally
-Healan
-Heads
-Hazleton
-Hazarika
-Hayhoe
-Haydal
-Hayburn
-Hawthrone
-Hawman
-Hawkey
-Hawf
-Havice
-Havercroft
-Hautamaki
-Hauskins
-Haulter
-Haugrud
-Hauan
-Hatzenbuhler
-Hatzenbuehler
-Hattub
-Hattier
-Hatteyer
-Hatstat
-Hathway
-Hataway
-Hassick
-Hassian
-Hasselman
-Hasselbarth
-Hasper
-Haspel
-Haske
-Hasgill
-Hasen
-Harviston
-Harvilla
-Harvilicz
-Harver
-Hartzer
-Hartup
-Hartsough
-Hartsch
-Hartly
-Hartlep
-Hartlein
-Hartkopf
-Harthun
-Hartfiel
-Hartery
-Hartert
-Hartage
-Harsey
-Harrey
-Harrett
-Harral
-Haroutunian
-Harmeyer
-Harlowe
-Harloff
-Hardyman
-Hards
-Hardrict
-Hardmon
-Hardigree
-Hardenburg
-Hardell
-Hardebeck
-Hardaman
-Hardaker
-Harcey
-Harbick
-Harajli
-Happer
-Hapgood
-Hanstein
-Hansbury
-Hanold
-Hanohano
-Hano
-Hanns
-Hannifan
-Hannes
-Hanko
-Hanis
-Hanenkrat
-Hanemann
-Hanek
-Handzel
-Handwerker
-Handwerk
-Handsaker
-Handrick
-Handelsman
-Handal
-Hancin
-Hanbury
-Hanaway
-Hanahan
-Hams
-Hammerly
-Hammeren
-Hammatt
-Hammarlund
-Hamling
-Hamiss
-Hamiel
-Hamelinck
-Hambrecht
-Halo
-Hallinger
-Hallick
-Halifax
-Halgrimson
-Halfmann
-Halder
-Hald
-Halburnt
-Halberstam
-Halaby
-Haker
-Haken
-Haine
-Hagos
-Hagmaier
-Hagenson
-Hagene
-Hagenbrok
-Hagenbaugh
-Hafter
-Haffling
-Haeger
-Haegele
-Hade
-Hadder
-Hadcock
-Haczynski
-Hackle
-Hachigian
-Hachez
-Habrock
-Habowski
-Habina
-Haberkamp
-Habben
-Habash
-Haaby
-Gyatso
-Gwalthney
-Guziec
-Guziak
-Guys
-Guynup
-Gutzwiller
-Guttmann
-Gutting
-Gutteridge
-Guterrez
-Guszak
-Gusky
-Gusciora
-Gurry
-Gurrieri
-Guritz
-Gunst
-Gundry
-Gundert
-Gulsvig
-Gulisano
-Gulinson
-Guittar
-Guitard
-Guisti
-Guiski
-Guinto
-Guinther
-Guinnip
-Guilliam
-Guillerault
-Guilfoil
-Guijarro
-Guidetti
-Guiberteau
-Guger
-Guevera
-Guetersloh
-Guerini
-Guella
-Guedea
-Guecho
-Gudis
-Guckin
-Guberman
-Guardipee
-Guanio
-Guagliardo
-Grzegorek
-Grybel
-Grunst
-Grunlien
-Grundmeier
-Grundhoefer
-Grun
-Grumer
-Grum
-Gruhn
-Gruger
-Grudt
-Growney
-Grotts
-Groton
-Grotelueschen
-Grotberg
-Grosswiler
-Gronowski
-Gronosky
-Gronewald
-Gronert
-Groholski
-Groetken
-Groeschel
-Groene
-Grodecki
-Groceman
-Griswell
-Griseta
-Grinkley
-Grinie
-Grinberg
-Grimmius
-Grieme
-Greytak
-Grett
-Grenke
-Grenda
-Greinke
-Greeves
-Greever
-Greet
-Greenlun
-Greenler
-Greenham
-Grebin
-Grboyan
-Grawburg
-Grattelo
-Grassham
-Granvold
-Granthan
-Gransky
-Grandolfo
-Grandmaison
-Grandchild
-Granbois
-Gramolini
-Grammatica
-Gramc
-Grajek
-Grahe
-Gragson
-Gragert
-Grage
-Grafenstein
-Graetz
-Gracely
-Graceffo
-Grabarczyk
-Gouzalez
-Gouse
-Gourdin
-Goudelock
-Goud
-Gottlob
-Gottke
-Gotthelf
-Gotthard
-Gotter
-Gotsche
-Gotschall
-Gosz
-Goston
-Gossack
-Gosdin
-Gorz
-Gorrill
-Gornto
-Gornie
-Gorenberg
-Gorelli
-Gordinier
-Gora
-Gopin
-Gopie
-Goolman
-Goolden
-Goodsite
-Goodmanson
-Goodly
-Goodkin
-Goodiel
-Gonzolas
-Gonsior
-Gonseth
-Gonez
-Gonchoff
-Gonales
-Gomzales
-Gomora
-Golly
-Gollihar
-Gollhofer
-Golka
-Golinski
-Golen
-Golembeski
-Golemba
-Goldwater
-Goldstock
-Goldklang
-Goldbeck
-Golda
-Gojmerac
-Goich
-Gohlke
-Goger
-Gogel
-Goga
-Gofton
-Goffe
-Goetting
-Goeser
-Goerner
-Goerke
-Goerdel
-Goeppner
-Godsman
-Godert
-Godel
-Gobeli
-Gnas
-Glucksman
-Glotzbecker
-Gloeckner
-Glockner
-Glish
-Glickson
-Glicken
-Glew
-Glessing
-Gleichman
-Glazener
-Glave
-Glausier
-Glatzel
-Glassett
-Glasbrenner
-Gladu
-Glab
-Glaab
-Giza
-Gittler
-Gittleman
-Gittinger
-Gitting
-Gitthens
-Gissel
-Gischer
-Girst
-Girsch
-Girona
-Girillo
-Gire
-Gira
-Giovanetti
-Gionest
-Gingles
-Gingery
-Ging
-Gillstrap
-Gillson
-Gillotti
-Gillmor
-Gilliss
-Gillig
-Gillert
-Gillcrest
-Gilgour
-Gilgore
-Gilding
-Gilderman
-Gilcreast
-Gieseman
-Gieselman
-Gieringer
-Gick
-Giangrosso
-Giangregorio
-Giambra
-Giambattista
-Ghibaudy
-Ghianni
-Ghelfi
-Ghaziani
-Ghantt
-Ghant
-Ghaemmaghami
-Gey
-Getler
-Getchius
-Gesualdo
-Gesmondi
-Gerweck
-Gerwe
-Gerula
-Gertsen
-Gershey
-Gershen
-Gers
-Gerritsen
-Gerdsen
-Gerczak
-Gerbatz
-Gerba
-Gerache
-Georgl
-Georgiadis
-Georgelis
-Georgalas
-Genualdo
-Gentery
-Gennock
-Gennett
-Genett
-Gendernalik
-Genas
-Gena
-Gemmen
-Gelston
-Gellman
-Gelfo
-Gelen
-Gelbowitz
-Geibig
-Gehlhausen
-Geffre
-Geesaman
-Geel
-Gedman
-Geckles
-Gebbie
-Gearwar
-Gearlds
-Gayne
-Gayfield
-Gawlas
-Gauwain
-Gaufin
-Gauani
-Gastley
-Gastello
-Gassoway
-Gasparino
-Gaskey
-Gaser
-Gascot
-Garuti
-Garrington
-Garreh
-Garnand
-Garlits
-Garity
-Garitty
-Gariety
-Garia
-Gari
-Garetson
-Garelik
-Garding
-Garb
-Garasha
-Ganzer
-Gantert
-Ganotisi
-Ganner
-Ganison
-Ganie
-Gangell
-Gangel
-Ganesh
-Gandrud
-Ganas
-Gamby
-Gambles
-Galyan
-Galuski
-Galper
-Gallwas
-Galluzzi
-Gallups
-Gallosa
-Gallipeau
-Gallet
-Gallerani
-Gallegly
-Gallaty
-Gallaspy
-Gallander
-Galioto
-Galicinao
-Galer
-Galdon
-Galardi
-Galamay
-Galabeas
-Gala
-Gaitor
-Gagg
-Gagan
-Gaerlan
-Gadley
-Gacke
-Gacia
-Gach
-Gabrelcik
-Gabay
-Gabard
-Fylnn
-Fydenkevez
-Futter
-Fuse
-Fuscaldo
-Furstenberg
-Furmanik
-Furlone
-Furia
-Furer
-Furci
-Furbish
-Funt
-Fulker
-Fukano
-Fujino
-Fuhrmeister
-Fugo
-Fuerman
-Frymyer
-Fryling
-Frontz
-Froncek
-Fronce
-Frolich
-Froio
-Froid
-Froehle
-Frischman
-Friou
-Friot
-Frieze
-Friesz
-Friemering
-Frieman
-Friedrick
-Friedle
-Frickson
-Frickel
-Frichette
-Fricano
-Fribley
-Frewing
-Frever
-Freudenstein
-Frerking
-Frenger
-Freisner
-Fregeau
-Freedle
-Frease
-Frazey
-Frascone
-Franzmann
-Franzetti
-Frankforter
-Francy
-Franckowiak
-Francies
-Franchette
-Fralin
-Fraleigh
-Fraint
-Fragozo
-Fracchia
-Frabizzio
-Fousek
-Fouraker
-Foucault
-Fosson
-Fossati
-Fosnough
-Forts
-Forthman
-Forsting
-Forstedt
-Forshay
-Forshaw
-Forsha
-Forro
-Forno
-Forlivio
-Forkosh
-Forkan
-Forcello
-Foradori
-Fontane
-Fonger
-Foney
-Fondy
-Fondow
-Folta
-Follin
-Folliard
-Folley
-Folken
-Foiles
-Fohn
-Foggs
-Foesch
-Foertsch
-Foecking
-Fodness
-Foat
-Flot
-Flosi
-Florenz
-Florens
-Florencio
-Florea
-Florczak
-Flodin
-Flocke
-Flo
-Flentroy
-Flenard
-Fleisner
-Flecther
-Flaks
-Flagstad
-Flagel
-Fjetland
-Fixico
-Fiume
-Fitterer
-Fisette
-Firlit
-Firestein
-Fiotodimitrak
-Fioto
-Finner
-Finnefrock
-Fingado
-Finely
-Fincel
-Finau
-Fimbrez
-Filoteo
-Fillpot
-Fillare
-Filipski
-Filippo
-Filipovic
-Filipelli
-Filimaua
-Filhiol
-Filgo
-Fileds
-Filbert
-Figuera
-Figliola
-Figart
-Fietsam
-Fieselman
-Fiene
-Fieldhouse
-Fiebig
-Fidel
-Fida
-Fickert
-Fiato
-Fevold
-Feuerborn
-Fetchko
-Fesh
-Feser
-Ferruso
-Ferriolo
-Ferriola
-Ferrence
-Ferrar
-Ferran
-Ferraiz
-Feroz
-Ferone
-Fernstrom
-Fernstaedt
-Fernow
-Ferkovich
-Fergen
-Ferdolage
-Ferdinandsen
-Ferbrache
-Fennewald
-Fenk
-Fenix
-Fendler
-Fenchel
-Felske
-Fellinger
-Felicetti
-Feldpausch
-Feighan
-Feichter
-Fehrle
-Fehringer
-Fegaro
-Feener
-Feeler
-Fedorchak
-Federowicz
-Fedd
-Feauto
-Feagen
-Feaganes
-Fazzina
-Fazzi
-Faykosh
-Fayard
-Favuzza
-Favolise
-Fausset
-Fauske
-Fausel
-Fauscett
-Faulknen
-Faulkenburg
-Fatica
-Fastlaben
-Fastic
-Farzan
-Farstvedt
-Farin
-Farguharson
-Fargnoli
-Farfalla
-Farese
-Farer
-Faraldo
-Faraj
-Fara
-Fanzo
-Fanton
-Fanney
-Fanizzi
-Fanion
-Fanelle
-Falterman
-Falsetti
-Fallone
-Falkiewicz
-Falconio
-Fake
-Fairleigh
-Fahringer
-Fahrenkrug
-Faerber
-Fadley
-Fadeley
-Facundo
-Fack
-Face
-Faby
-Fabrizius
-Fabozzi
-Fabiszewski
-Fabin
-Ezpeleta
-Ezparza
-Eyrich
-Eyerman
-Ewoldt
-Ewards
-Evasco
-Evanich
-Evangelo
-Eustace
-Eugley
-Euertz
-Etulain
-Etchells
-Esson
-Esskew
-Essery
-Esselink
-Espinol
-Espenoza
-Espelien
-Espeland
-Espadas
-Esler
-Eske
-Eska
-Escuriex
-Escovar
-Escort
-Eschrich
-Eschette
-Eschen
-Eschbaugh
-Escalon
-Escalero
-Esbrandt
-Esary
-Ertman
-Eroh
-Ernesto
-Erlenbusch
-Erle
-Erke
-Erichsen
-Eric
-Erholm
-Erbstein
-Erbst
-Eppolito
-Eppihimer
-Eppich
-Entin
-Enslinger
-Enslen
-Enockson
-Ennenga
-Enman
-Englett
-Engleson
-Englerth
-Engl
-Engholm
-Engelken
-Engelkemier
-Engelhaupt
-Engelbach
-Endries
-Endow
-Endito
-Enderby
-Encallado
-Emziah
-Embt
-Embs
-Embelton
-Emard
-Elwonger
-Elvsaas
-Elumbaugh
-Elstner
-Elsmore
-Elskamp
-Elshant
-Elmblad
-Ellson
-Ellias
-Elletson
-Ellestad
-Ellert
-Ellermann
-Ellerbrock
-Elleman
-Ellars
-Elland
-Eliezrie
-Eldib
-Eldert
-Elbe
-Ekwall
-Ekholm
-Eken
-Eitnier
-Eitniear
-Eisenzimmer
-Eisenstadt
-Eisensmith
-Eiselman
-Eisbach
-Eisaman
-Eiken
-Eibell
-Ehrke
-Ehrismann
-Ehrenfeld
-Ehlman
-Egizi
-Egitto
-Eggeman
-Effron
-Ednie
-Edelbrock
-Edde
-Edd
-Economos
-Eckols
-Eckloff
-Echegoyen
-Ebia
-Eberlin
-Ebbers
-Easterbrook
-Earney
-Earleywine
-Eanni
-Eadens
-Dyron
-Dykhoff
-Dyers
-Dyda
-Dybala
-Dwane
-Dwaileebe
-Duverne
-Duve
-Dusen
-Dusatko
-Dusablon
-Durrette
-Durphey
-Durnin
-Durkes
-Durette
-Durdy
-Durch
-Duracher
-Dupray
-Dupoux
-Duponte
-Duperclay
-Dupass
-Dupar
-Dunwiddie
-Dunsing
-Dunnaville
-Duncomb
-Duncklee
-Dunay
-Dunakin
-Dumpe
-Dumes
-Dumdei
-Dumay
-Dulkis
-Dukich
-Dukas
-Duin
-Dugo
-Duewall
-Duemmel
-Duelm
-Dueber
-Dudman
-Dudak
-Duckhorn
-Duchscherer
-Ducat
-Ducas
-Dubyk
-Dubill
-Dubiansky
-Dubaldi
-Dua
-Dspain
-Drzazgowski
-Drymon
-Drylie
-Druvenga
-Druschel
-Drungo
-Droze
-Drouse
-Drott
-Drosick
-Droneburg
-Droessler
-Droesch
-Drobny
-Drizin
-Dripps
-Drinkley
-Drillock
-Driesbach
-Dretzka
-Dresner
-Drentlaw
-Drenon
-Drehs
-Drehobl
-Drda
-Draxler
-Drath
-Drapeaux
-Dragula
-Drafts
-Draft
-Dozer
-Doxtater
-Doxie
-Dowst
-Dowson
-Downton
-Dowlen
-Dowey
-Dowery
-Douty
-Doughtry
-Doughtery
-Dotzler
-Dotterer
-Dothard
-Dosher
-Dosal
-Dorso
-Dorsette
-Doro
-Dornfeld
-Dorkin
-Dorka
-Dorge
-Dorchy
-Dorame
-Dopler
-Dopico
-Doore
-Dooms
-Donnie
-Donnelley
-Donnel
-Donayre
-Donatello
-Donachie
-Dominiguez
-Domingos
-Dominga
-Dominey
-Domenget
-Dolores
-Dollyhigh
-Dollen
-Dollak
-Doleac
-Dolch
-Dolbeare
-Dokka
-Dokes
-Doire
-Doing
-Dohring
-Dohogne
-Dohnal
-Dohan
-Doerle
-Doerhoff
-Doemelt
-Doehring
-Doegg
-Dodsworth
-Dodoo
-Dodier
-Dockendorf
-Docken
-Dobrowski
-Dobrin
-Dobine
-Doberstein
-Dizer
-Dixey
-Divita
-Diven
-Divalerio
-Dituri
-Ditton
-Disspain
-Disparte
-Dismore
-Disilvestro
-Dishong
-Dishian
-Diseth
-Discenza
-Dirkson
-Dirkse
-Dirker
-Dirk
-Dipippo
-Dipinto
-Dipierro
-Dinnocenzo
-Dinizio
-Dinis
-Dingivan
-Dingfelder
-Dincher
-Dimucci
-Dimpson
-Dimpfl
-Dimitrov
-Dimarzo
-Dils
-Dilisio
-Diliberto
-Diliberti
-Diles
-Dileonardo
-Dilena
-Dijulio
-Diiulio
-Digiuseppe
-Diga
-Difillippo
-Difebbo
-Dieng
-Diekman
-Didyk
-Didriksen
-Dickus
-Dickow
-Dickeson
-Dicastro
-Dibenedetti
-Dhaliwal
-Dezenzo
-Dewyse
-Dewinter
-Dewaters
-Dewaele
-Devoto
-Devor
-Devoogd
-Deviva
-Devitis
-Devit
-Deveyra
-Devericks
-Devenuto
-Deveja
-Devaughan
-Deutschendorf
-Deuink
-Deubner
-Detzler
-Detullio
-Detore
-Dethlefsen
-Dethlefs
-Detamble
-Desrevisseau
-Desotel
-Deso
-Desmeules
-Desmaris
-Desilvio
-Deshpande
-Deschambault
-Descamps
-Desatnik
-Desamito
-Desalle
-Desak
-Derwin
-Derting
-Derrah
-Deroven
-Derosso
-Deromer
-Dermott
-Deringer
-Derico
-Derga
-Derflinger
-Derezinski
-Derck
-Derbacher
-Deranick
-Depuydt
-Depung
-Depree
-Deppert
-Depierre
-Dephillips
-Deojay
-Denzin
-Denten
-Dentel
-Dennies
-Denina
-Denger
-Deneke
-Denegre
-Denboer
-Denapoli
-Demsky
-Demsey
-Demotta
-Demmons
-Demman
-Demendonca
-Demeester
-Dembowski
-Demarce
-Deman
-Demallie
-Demaire
-Delwiche
-Delphia
-Delore
-Dellenbaugh
-Dellbringge
-Dellaratta
-Dellaporta
-Dellapenna
-Dellacioppa
-Deliberto
-Delibertis
-Delgenio
-Delcueto
-Delaurie
-Delauder
-Delatrinidad
-Delash
-Delaet
-Del
-Dekrey
-Dejoie
-Deiters
-Deimund
-Degrenier
-Degre
-Degrand
-Degon
-Degeston
-Degelbeck
-Degaust
-Degasparre
-Defreece
-Defenderfer
-Defee
-Deeken
-Dedon
-Dedinas
-Dedicke
-Dedic
-Decristofaro
-Decoud
-Decos
-Deconti
-Deckers
-Decio
-Decenzo
-Debroux
-Debrot
-Debray
-Deboef
-Debiasio
-Debettignies
-Debenedittis
-Debbins
-Debaecke
-Dearson
-Dearo
-Deardon
-Deaquino
-Deacetis
-Dayne
-Dayem
-Dax
-Dawoud
-Davitt
-Davito
-Davidoff
-Dauterman
-Daughterty
-Daugaard
-Daudelin
-Daubendiek
-Dattilio
-Datcher
-Dasovich
-Daso
-Dasilua
-Dashem
-Darou
-Darke
-Dargin
-Darga
-Darco
-Darcey
-Dapas
-Dantos
-Danson
-Danny
-Danielian
-Danchetz
-Danby
-Damrow
-Damours
-Damboise
-Dambakly
-Dambach
-Damasco
-Damann
-Dallmeyer
-Dallesandro
-Dalfonso
-Dakins
-Dakes
-Daire
-Dahill
-Daguio
-Dagis
-Dabdoub
-Czerkies
-Czarnota
-Czachor
-Czach
-Cypress
-Cynthia
-Cylkowski
-Cyfers
-Cwiakala
-Cvetkovic
-Cuzman
-Cuzick
-Cuttler
-Cutt
-Cuti
-Cutforth
-Cutchins
-Cutchall
-Cushwa
-Curo
-Curbeam
-Cunnick
-Cuneio
-Cundick
-Cumbaa
-Cultice
-Cullity
-Cullip
-Cullifer
-Cucvas
-Cuculich
-Cucino
-Cubeta
-Cser
-Crupper
-Crunkilton
-Cruden
-Crover
-Crouter
-Crough
-Crouchet
-Crosthwaite
-Croon
-Cronshaw
-Cronenberg
-Crome
-Croman
-Crognale
-Crogan
-Croasmun
-Cristofori
-Cristiano
-Crisan
-Cringle
-Crincoli
-Crill
-Crieghton
-Cridge
-Criblez
-Crellin
-Cregeen
-Creeks
-Creath
-Creacy
-Crazier
-Crawmer
-Crawhorn
-Cratin
-Crapser
-Crapse
-Cranmore
-Cramm
-Cramblit
-Cramblet
-Cragin
-Cracas
-Cozzone
-Coyco
-Coxey
-Cowper
-Cowett
-Covone
-Covill
-Coverton
-Councilman
-Coultrap
-Coulas
-Coughenour
-Cough
-Cotty
-Cotherman
-Cother
-Costantini
-Cossell
-Cossano
-Cosley
-Coslett
-Coskey
-Cosgray
-Corza
-Corvi
-Corvan
-Corsetti
-Corscadden
-Corsa
-Corrow
-Corrice
-Correro
-Correale
-Corre
-Corna
-Corke
-Corid
-Corelli
-Cordonnier
-Cordona
-Corak
-Coppler
-Copelan
-Coore
-Coonradt
-Coones
-Cookus
-Conveniencia
-Contrerras
-Contrenas
-Contorno
-Constantini
-Constantineau
-Consolver
-Conrath
-Connet
-Connerly
-Conliffe
-Conforto
-Conda
-Conca
-Conales
-Compono
-Compau
-Commendatore
-Comings
-Comboy
-Combass
-Coltrin
-Colpetzer
-Colonel
-Colombini
-Cologie
-Colla
-Colbeth
-Colbaugh
-Colasuonno
-Colapinto
-Colamarino
-Colaluca
-Colaianni
-Colafrancesco
-Colace
-Colabella
-Coggsdale
-Coffill
-Codispoti
-Codell
-Cocoros
-Cocopoti
-Cocola
-Cockley
-Cockey
-Cochron
-Coch
-Cobden
-Coatsworth
-Coarsey
-Coar
-Clymore
-Clumpner
-Clougher
-Clolinger
-Clinkingbeard
-Clineman
-Clewes
-Clemments
-Claypole
-Clayburg
-Claybron
-Claybon
-Claughton
-Clase
-Clarenbach
-Clankscales
-Clampett
-Claessens
-Claburn
-Citrin
-Cisney
-Cirri
-Cipro
-Cipkowski
-Cione
-Cinquanti
-Cink
-Cimiano
-Ciervo
-Ciers
-Cicora
-Ciciora
-Cicione
-Cicerelli
-Ciccolini
-Ciccarone
-Cicarella
-Ciarletta
-Ciaccio
-Chuta
-Chustz
-Churan
-Chumbler
-Chuba
-Chruch
-Christler
-Christinsen
-Christinat
-Christello
-Chrispin
-Chrismer
-Chrislip
-Chrisjohn
-Chrestman
-Choute
-Chough
-Chorlton
-Chomka
-Chmelicek
-Chiulli
-Chislom
-Chiras
-Chinzi
-Chinnery
-Chinick
-Chim
-Chilvers
-Chilo
-Chiarmonte
-Chiarenza
-Chiapetti
-Chhuon
-Chhour
-Chheang
-Chetram
-Chessher
-Cherrier
-Cherepy
-Cherenfant
-Chenot
-Cheli
-Checa
-Cheathan
-Chears
-Chauvaux
-Chaudoin
-Chauarria
-Chatters
-Chatlos
-Chatley
-Chasey
-Charves
-Charsky
-Charania
-Chaplen
-Chaple
-Channer
-Chander
-Champey
-Champeau
-Challen
-Chall
-Chalkley
-Chalet
-Chalcraft
-Chaix
-Chadick
-Chadbourn
-Chaban
-Cesari
-Cervoni
-Cervin
-Certalich
-Cerni
-Cerney
-Cereo
-Cerce
-Ceravolo
-Ceparano
-Centrella
-Centner
-Centano
-Cenat
-Celmer
-Celenza
-Celadon
-Cefaratti
-Cefalo
-Cedillos
-Cecilia
-Cechini
-Cecala
-Cease
-Cearns
-Cazeau
-Cayson
-Cayanan
-Cavallario
-Cauthron
-Cattrell
-Catterson
-Catrone
-Catone
-Catoggio
-Caterino
-Catching
-Catalani
-Castrataro
-Castoe
-Castles
-Castillanos
-Castellonese
-Castelhano
-Cassman
-Cassius
-Cassisse
-Cassem
-Cassani
-Cassandra
-Casola
-Caselli
-Cascone
-Casburn
-Casbeer
-Casbarro
-Carrin
-Carreker
-Carrea
-Carre
-Carrauza
-Carranzo
-Carpinello
-Carolin
-Carmolli
-Carmena
-Carmell
-Carmain
-Carlye
-Carlsten
-Carlough
-Carlone
-Caringi
-Carine
-Carin
-Carela
-Cardono
-Cardle
-Cardinali
-Cardi
-Cardera
-Carback
-Capuzzi
-Capracotta
-Cappo
-Cappleman
-Capparelli
-Caponera
-Caplener
-Capanna
-Caoili
-Caoile
-Canzio
-Cantoran
-Cantillo
-Canta
-Canonica
-Cannington
-Canniff
-Cangas
-Canevazzi
-Canes
-Caneles
-Candido
-Canders
-Cance
-Canaway
-Canarte
-Canario
-Canan
-Camren
-Campusano
-Campman
-Camm
-Caminos
-Camferdam
-Camerena
-Camell
-Camak
-Camaj
-Calway
-Calvino
-Calvetti
-Calvani
-Caltabiano
-Calnimptewa
-Calnick
-Calnen
-Calmese
-Callander
-Callabrass
-Caliz
-Calija
-Calger
-Calendine
-Calderara
-Calcara
-Calamity
-Cailler
-Caho
-Caguimbal
-Cadoff
-Caddick
-Cadavieco
-Cabos
-Cabiltes
-Cabibbo
-Cabellero
-Cabasso
-Caballes
-Cabading
-Caal
-Byra
-Byod
-Bynon
-Byner
-Bynam
-Byker
-Buzzi
-Buzzeo
-Butzen
-Buttz
-Butteris
-Butkiewicz
-Buteaux
-Bustad
-Bussone
-Busman
-Bushmaker
-Busche
-Burwinkel
-Burum
-Burtless
-Bursi
-Burrup
-Burross
-Burries
-Burrichter
-Burrelli
-Buron
-Buro
-Burnstein
-Burnaugh
-Burnap
-Burkdoll
-Buris
-Burington
-Burgun
-Burgie
-Burghard
-Burgh
-Burgas
-Burgardt
-Burga
-Burdess
-Burcin
-Burchfiel
-Burchess
-Burandt
-Buonanno
-Buonamici
-Buntjer
-Bungert
-Bundschuh
-Bumps
-Buman
-Bulosan
-Bullocks
-Bullie
-Bularz
-Buland
-Bujarski
-Buhmann
-Buhman
-Bugna
-Buglisi
-Buggy
-Buemi
-Budke
-Buder
-Budds
-Buddie
-Buczak
-Buckwald
-Buckovitch
-Buckholtz
-Buckhanan
-Buchetto
-Buchauer
-Bucciarelli
-Buccheri
-Bucaram
-Bubis
-Bubash
-Bubak
-Brzostek
-Brzezowski
-Bryton
-Brusuelas
-Brussell
-Bruschi
-Brundrett
-Brundin
-Brumet
-Bruley
-Bruk
-Brug
-Bruestle
-Brudner
-Bruccoleri
-Brozie
-Broxterman
-Brox
-Browy
-Brownle
-Browm
-Broward
-Brouwers
-Brousard
-Brought
-Brotherson
-Brotemarkle
-Brossoit
-Broscious
-Brooms
-Broomhall
-Brookshaw
-Brookhouse
-Bronchetti
-Broks
-Broida
-Brohl
-Broglie
-Brofft
-Broermann
-Broenneke
-Brodnex
-Brodka
-Brodish
-Brockelmeyer
-Brockberg
-Broch
-Broccoli
-Brobeck
-Broadstone
-Brittman
-Brislan
-Brisk
-Brisentine
-Bringhurst
-Brindel
-Brinda
-Brincks
-Brimeyer
-Brihm
-Brignolo
-Briglia
-Brighi
-Brient
-Bridenbaker
-Briddell
-Briante
-Brians
-Briagas
-Brevo
-Breu
-Bretto
-Bretthauer
-Breslauer
-Bresemann
-Brentari
-Brenning
-Brenhaug
-Brengettey
-Brenek
-Brendal
-Brenagh
-Breiling
-Breidenbaugh
-Brehant
-Bregel
-Bredeweg
-Bredehoft
-Breceda
-Braylock
-Brause
-Brauning
-Braulio
-Braukus
-Braucher
-Bratchett
-Brasseur
-Brasser
-Branstutter
-Branstad
-Branscombe
-Brannick
-Brandolini
-Brandly
-Brandenberg
-Brandeis
-Brandal
-Branciforte
-Brancheau
-Brancati
-Bramlette
-Bramlet
-Brakhage
-Braitman
-Braisted
-Bradfute
-Bracks
-Bracket
-Braccia
-Braam
-Bozzone
-Bozenski
-Bozard
-Boyson
-Boylston
-Boxwell
-Bowlen
-Bowdle
-Bowdich
-Boward
-Bovia
-Bovey
-Boven
-Bouza
-Bouwman
-Bouwkamp
-Boutiette
-Boursaw
-Bourret
-Bourgoyne
-Bounleut
-Bound
-Bouma
-Bouleris
-Bouler
-Boughman
-Boughamer
-Boudoin
-Boudewyns
-Botwinick
-Bottone
-Bottino
-Botticello
-Botten
-Bottaro
-Bottalico
-Bostel
-Boshes
-Boshard
-Bosell
-Boscarello
-Bory
-Borsari
-Borok
-Borodec
-Bornmann
-Bormuth
-Bormet
-Borling
-Borlace
-Borkin
-Borkenhagen
-Boreen
-Bordin
-Borcherding
-Boote
-Booras
-Boody
-Bonton
-Bontemps
-Bonomini
-Bonina
-Bonifer
-Bongartz
-Boness
-Bonefont
-Bonefield
-Bonder
-Bonde
-Bondanza
-Bonavia
-Bonamo
-Bonadurer
-Bomkamp
-Bolognia
-Bollich
-Bollacker
-Bolinsky
-Boldosser
-Boldon
-Bolda
-Bolado
-Boken
-Bok
-Boisselle
-Boisen
-Bois
-Bohs
-Bohnenblust
-Bohlig
-Bohinc
-Bogumil
-Bogie
-Boggioni
-Boggi
-Bogenschneide
-Bogema
-Boge
-Bogdanski
-Bogdanovich
-Boettner
-Boesiger
-Boesel
-Boensch
-Boele
-Boeken
-Boehning
-Boehlar
-Bodwell
-Bodreau
-Bodovsky
-Boda
-Boczar
-Boclair
-Bockemehl
-Bochenski
-Bochat
-Boch
-Boccio
-Bocchicchio
-Boccanfuso
-Bobzien
-Bobson
-Bobino
-Bobier
-Bobeck
-Bobak
-Boarts
-Boardwine
-Boaldin
-Boakye
-Boady
-Blunden
-Blumenstock
-Blovin
-Blouir
-Bloschichak
-Bloome
-Bloodough
-Blonder
-Blommer
-Blok
-Bloeser
-Blinks
-Blinka
-Bline
-Blickem
-Bleyl
-Blews
-Bless
-Blenner
-Bleimehl
-Blecker
-Bleasdale
-Bleakney
-Blatnick
-Blaski
-Blare
-Blanzy
-Blankumsee
-Blancett
-Blaich
-Blada
-Blackbum
-Bjorseth
-Bjorlin
-Bizzaro
-Bivin
-Bitetto
-Bisso
-Biskup
-Biskach
-Bisio
-Bisi
-Bishard
-Bisesi
-Bisaccia
-Birtcher
-Birrittella
-Birkhimer
-Birkey
-Biringer
-Biren
-Birdette
-Birak
-Bio
-Binker
-Bink
-Bingler
-Bingert
-Bingamon
-Bindas
-Bilson
-Billow
-Billon
-Billo
-Bille
-Bilis
-Bilich
-Biler
-Bilek
-Bilden
-Bilazzo
-Bila
-Bigus
-Biggart
-Biggar
-Bigaud
-Biesheuvel
-Biernacki
-Bierley
-Bierlein
-Bielefeldt
-Biedermann
-Biedenbender
-Biddulph
-Bicksler
-Bickes
-Bicek
-Bica
-Bibiano
-Biangone
-Bi
-Bezzo
-Bezdicek
-Beyt
-Beydler
-Bevelacqua
-Beuther
-Beucke
-Betzold
-Bettman
-Bettino
-Betterley
-Betancourth
-Bessel
-Beska
-Beschorner
-Berwald
-Berum
-Bertotti
-Bertorelli
-Bertoldo
-Bertolami
-Bertley
-Berteotti
-Bertaina
-Berstler
-Berniard
-Berndsen
-Bernadette
-Berlinski
-Berkstresser
-Berks
-Berkovich
-Berkoff
-Berkhimer
-Berkery
-Bergmark
-Berga
-Berfield
-Bereznak
-Beresky
-Berenger
-Berendzen
-Berendt
-Berczel
-Berch
-Berbes
-Berardinelli
-Beppu
-Benziger
-Benzie
-Benzango
-Benthall
-Bentancourt
-Bensberg
-Benno
-Bennin
-Bennes
-Benken
-Benike
-Benigni
-Benestad
-Bendtsen
-Bendis
-Bendig
-Bendetti
-Bendele
-Benasher
-Benack
-Bemben
-Belts
-Belrose
-Belnas
-Bellusci
-Belloso
-Bellizzi
-Bellinghausen
-Belliard
-Belletto
-Bellettiere
-Belko
-Belitz
-Belfanti
-Beldon
-Bekis
-Bejcek
-Beitler
-Beiser
-Beine
-Beiley
-Beierschmitt
-Behrle
-Behran
-Behlmer
-Behlke
-Beguelin
-Beghtol
-Beger
-Begeal
-Beezley
-Beesmer
-Beerer
-Beere
-Beerbohm
-Beenel
-Beelby
-Beecken
-Bedor
-Bede
-Beddows
-Beddow
-Beddia
-Becky
-Beckius
-Beckfield
-Beckem
-Becena
-Beavis
-Beaumonte
-Beauman
-Beauharnois
-Beaudine
-Beasly
-Beales
-Be
-Bazylewicz
-Bazner
-Bazel
-Baytos
-Bayton
-Bayt
-Baylock
-Bayird
-Baygents
-Baxa
-Bawner
-Bawden
-Bavelas
-Bauske
-Baumberger
-Baul
-Battuello
-Battig
-Batterman
-Battani
-Battaglino
-Batimon
-Bathke
-Baters
-Batch
-Batas
-Batara
-Batala
-Bastine
-Bassani
-Bassali
-Baskind
-Baseman
-Basehore
-Basara
-Barze
-Barwell
-Barut
-Baruffa
-Bartlome
-Bartin
-Barthol
-Barthell
-Barters
-Barswell
-Barshaw
-Barrigan
-Barria
-Barrasa
-Barraco
-Barnthouse
-Barnt
-Barmes
-Barkhimer
-Barios
-Bario
-Barino
-Barie
-Barick
-Barfuss
-Barfknecht
-Barer
-Bareford
-Bardis
-Barcley
-Barchick
-Barcena
-Barbur
-Barbor
-Barbin
-Barben
-Barbella
-Barbaglia
-Baransky
-Baragan
-Baquiran
-Banzhaf
-Banter
-Bankowski
-Banet
-Bandt
-Banaszek
-Banana
-Balque
-Balowski
-Ballog
-Ballina
-Ballensky
-Ballato
-Baliga
-Baldomero
-Balden
-Balde
-Baldassare
-Balbontin
-Balbas
-Balassi
-Balandran
-Bakkala
-Bakhshian
-Bakerville
-Bakaler
-Bajaj
-Baites
-Baisten
-Bairam
-Bailard
-Baierl
-Baichan
-Bai
-Bahrs
-Bagozzi
-Bagni
-Bagnato
-Baglione
-Baggio
-Baggesen
-Baggenstoss
-Bagan
-Baessler
-Baerman
-Baerlocher
-Badgero
-Baddour
-Badami
-Baculpo
-Bacio
-Bacigalupo
-Bachta
-Bachar
-Bacchi
-Babrow
-Babonis
-Babish
-Babicke
-Babeu
-Baab
-Azzopardi
-Azore
-Azen
-Aykroid
-Axon
-Axelrad
-Awkard
-Awender
-Avon
-Avirett
-Averitte
-Averbeck
-Avellano
-Avary
-Auwaerter
-Autrano
-Auteri
-Austgen
-Ausdemore
-Aurich
-Aumen
-Auler
-Augustyniak
-Augliano
-Aughtman
-Aue
-Auduong
-Aucter
-Attianese
-Atiles
-Athas
-Asturias
-Astrup
-Astley
-Assante
-Aspden
-Aspacio
-Asley
-Asleson
-Askvig
-Askegren
-Askam
-Ashmen
-Ashauer
-Asfour
-Aschoff
-Aschim
-Aschan
-Asal
-Arzo
-Arvesen
-Arrow
-Arrocha
-Arris
-Arribas
-Arquitt
-Arone
-Aroche
-Arnt
-Arnoux
-Arnoldi
-Arning
-Arnholt
-Arndorfer
-Armson
-Arment
-Arlotta
-Arlinghaus
-Arlia
-Arkema
-Arizaga
-Arisumi
-Aristide
-Aris
-Arif
-Ariano
-Arguilez
-Argudo
-Argrow
-Argiro
-Argetsinger
-Arfman
-Arenburg
-Aredondo
-Area
-Ardry
-Ardner
-Ardizone
-Arcudi
-Arcizo
-Arcila
-Archilla
-Archangel
-Arcega
-Arbucci
-Arato
-Arano
-Aran
-Aragan
-Apostol
-Apolito
-Apland
-Apkin
-Aperges
-Apalategui
-Apaez
-Anzora
-Antonsen
-Antolos
-Antolini
-Antman
-Anter
-Anspaugh
-Anselm
-Annonio
-Annichiarico
-Annibale
-Annarumo
-Anliker
-Ankrapp
-Ankenman
-Anhorn
-Angton
-Angrisano
-Angon
-Angolo
-Angleton
-Anglebrandt
-Anglea
-Anglade
-Angilletta
-Angeron
-Angelotti
-Angelbeck
-Angela
-Anez
-Andueza
-Andrulis
-Andronis
-Andreu
-Andreoni
-Andert
-Anderlik
-Anauo
-Anastasiades
-Ananias
-Anand
-Amuso
-Amrich
-Amr
-Amour
-Amoss
-Amorosi
-Amoako
-Amoah
-Ammirato
-Ammar
-Amirian
-Amiot
-Amidi
-Ameduri
-Amderson
-Ambuehl
-Amass
-Amanza
-Amadio
-Alwang
-Alwan
-Alvine
-Alvarran
-Alvarracin
-Alvanez
-Aluqdah
-Altshuler
-Altonen
-Altmiller
-Altken
-Altiery
-Althiser
-Altaras
-Alstrom
-Alstad
-Alsbury
-Alsberry
-Alquijay
-Alpha
-Alonza
-Aloia
-Alnas
-Almerico
-Almenar
-Almen
-Allwood
-Allstott
-Allridge
-Alleva
-Allenson
-Allenbaugh
-Allegretta
-Allegra
-Allbritten
-Allara
-Allamon
-Alken
-Alizadeh
-Alirez
-Alires
-Aline
-Alim
-Algire
-Algier
-Algien
-Alfonsi
-Alexy
-Alexnder
-Alessandroni
-Alert
-Alemany
-Aleksey
-Alderton
-Alderfer
-Aldava
-Aldapa
-Alconcel
-Albornoz
-Albini
-Albergotti
-Alben
-Albea
-Albang
-Alario
-Alamilla
-Alalem
-Akoni
-Akles
-Akande
-Akamine
-Ajasin
-Aiyer
-Aihara
-Ahrendes
-Aherns
-Aharoni
-Agunos
-Aguliar
-Aguillar
-Agudo
-Agoras
-Agnor
-Agni
-Agers
-Agel
-Aery
-Aerts
-Adon
-Adessa
-Aderson
-Aderman
-Adema
-Adelsberg
-Adelblue
-Adel
-Addiego
-Adas
-Adamcik
-Acquilla
-Ackmann
-Achterhof
-Achane
-Abuhl
-Abrial
-Abreau
-Aboulahoud
-Aboudi
-Ablao
-Abilez
-Abete
-Aberson
-Abelman
-Abelardo
-Abedelah
-Abdulmateen
-Abato
-Aas
-Aarestad
-Aanenson
-Zymowski
-Zyla
-Zybia
-Zwolski
-Zwigart
-Zuwkowski
-Zurovec
-Zurkuhlen
-Zuppa
-Zunich
-Zumpfe
-Zumalt
-Zulkowski
-Zulfer
-Zugg
-Zuerlein
-Zuehls
-Zuckerberg
-Zuchelkowski
-Zucchetto
-Zucca
-Zubrowski
-Zubizarreta
-Zsadanyi
-Zrake
-Zotti
-Zosel
-Zoltek
-Zolla
-Zogopoulos
-Zogby
-Zmek
-Zitzmann
-Zitzelberger
-Zirker
-Zinzow
-Zimick
-Zimerman
-Zilk
-Zigomalas
-Ziesman
-Ziernicki
-Zierke
-Zierk
-Zierenberg
-Zierden
-Ziems
-Zieger
-Ziebert
-Zicafoose
-Zic
-Zibell
-Ziada
-Ziad
-Zhen
-Zetzer
-Zetino
-Zerphey
-Zercher
-Zeran
-Zephyr
-Zelonis
-Zellinger
-Zelko
-Zeliff
-Zeleznik
-Zekria
-Zeidman
-Zehrer
-Zehrbach
-Zeherquist
-Zehender
-Zegar
-Zega
-Zechiel
-Zeccardi
-Zebracki
-Zeavala
-Zbierski
-Zaza
-Zayicek
-Zawistowski
-Zawasky
-Zavitz
-Zaverl
-Zavcedo
-Zavattieri
-Zavacky
-Zausch
-Zatorski
-Zarrabi
-Zarlingo
-Zarin
-Zarillo
-Zaren
-Zapel
-Zapatero
-Zantow
-Zant
-Zannini
-Zangger
-Zanfardino
-Zanardi
-Zan
-Zampella
-Zamoro
-Zamborano
-Zambelli
-Zalamea
-Zajdel
-Zais
-Zahourek
-Zaharek
-Zagulski
-Zagacki
-Zadina
-Zaczek
-Zachter
-Zachariah
-Zacchini
-Zabenko
-Zabbo
-Yuska
-Yuscak
-Yurovic
-Yurek
-Yunes
-Yumas
-Yuk
-Yudell
-Ysaguirre
-Yray
-Yozzo
-Yovan
-Youssefi
-Yousko
-Younghans
-Youmon
-Youla
-Yotter
-Yoshi
-Yoseph
-Yorck
-Yono
-Yoneoka
-Yonashiro
-Yomes
-Yokel
-Yoest
-Ynocencio
-Yewell
-Yetzer
-Yetsko
-Yerty
-Yeropoli
-Yerka
-Yergin
-Yenor
-Yem
-Yeley
-Yearego
-Yeakel
-Yazzle
-Yazzi
-Yazdani
-Yaws
-Yasika
-Yarwood
-Yarris
-Yaroch
-Yarmitsky
-Yara
-Yantzi
-Yannucci
-Yannayon
-Yannantuono
-Yankovski
-Yankovitch
-Yandow
-Yanchik
-Yanagihara
-Yanagida
-Yanacek
-Yamanoha
-Yamaki
-Yalon
-Yaklin
-Yake
-Yaiva
-Yaish
-Yahne
-Yafuso
-Yafaie
-Yacullo
-Yacovone
-Yacoub
-Xyong
-Xayasith
-Wyze
-Wyrostek
-Wynes
-Wyker
-Wygal
-Wybenga
-Wurz
-Wung
-Wueste
-Wubnig
-Wubbena
-Wubben
-Wrzesien
-Wrynn
-Wrightington
-Wride
-Wreyford
-Woytowich
-Woytek
-Wosick
-Workowski
-Worell
-Wordlow
-Worchester
-Wooward
-Woolhiser
-Woodlin
-Woodka
-Woodbeck
-Woodal
-Wondoloski
-Wonderling
-Wolsdorf
-Wolper
-Wollert
-Wollenburg
-Woline
-Wolfing
-Wolfensperger
-Wolbrecht
-Wojnowski
-Wojewoda
-Wojdak
-Wohlfeil
-Wohlert
-Woge
-Woelfl
-Wodicka
-Wobser
-Wobbe
-Wnukowski
-Wnorowski
-Wmith
-Wlodarek
-Wiza
-Witucki
-Wittrup
-Wittnebel
-Witthoeft
-Wittenbrink
-Wittbrodt
-Witkowsky
-Wisnowski
-Wisely
-Wirtzfeld
-Wirfs
-Wipfli
-Winterberg
-Winslette
-Winscott
-Winnicki
-Winnen
-Winik
-Wingeier
-Windsheimer
-Windrow
-Windhorst
-Windfield
-Windauer
-Wincapaw
-Win
-Wimbrow
-Wimble
-Wilund
-Wilshusen
-Wilsen
-Willock
-Willmert
-Willies
-Williemae
-Williamis
-Willia
-Willi
-Willeto
-Willborn
-Wilkus
-Wilkson
-Wilkoff
-Wildridge
-Wilczak
-Wilcut
-Wiklund
-Wiggan
-Wigand
-Wig
-Wiesemann
-Wieseman
-Wiersteiner
-Wienberg
-Wielock
-Wielgasz
-Wiegard
-Wiedrich
-Wiederholt
-Wieben
-Widjaja
-Widera
-Wide
-Wicklin
-Wickersheim
-Wiborg
-Wiatrowski
-Why
-Whittum
-Whittinghill
-Whittenbeck
-Whitiker
-Whitey
-Whiter
-Whitelightnin
-Whitcome
-Whisted
-Whirlow
-Whiles
-Whilden
-Whetzell
-Whelihan
-Wheeldon
-Wheater
-Whaltey
-Weynand
-Weyker
-Weydert
-Weuve
-Wetzstein
-Wetzell
-Westler
-Westermeier
-Westermark
-Westermann
-Westerhoff
-Westbrooke
-Weske
-Weser
-Werst
-Werremeyer
-Wernsman
-Wernex
-Wern
-Werme
-Werline
-Werk
-Wergin
-Werdlow
-Werderman
-Went
-Wensman
-Wenske
-Wendorff
-Welzel
-Weltha
-Wellinghoff
-Welding
-Weit
-Weissenbach
-Weispfenning
-Weismantle
-Weisbecker
-Weirauch
-Weinzierl
-Weinrib
-Weinland
-Weinfurter
-Weinburg
-Weiher
-Weig
-Weidower
-Weicht
-Weibe
-Wehking
-Weglage
-Wegiel
-Wedige
-Weckwerth
-Weatherington
-Weasel
-Weant
-Wealer
-Weagraff
-Weader
-Wayts
-Wayson
-Waymon
-Waygood
-Wayford
-Waychowsky
-Waverly
-Wattigny
-Watsky
-Watry
-Wates
-Watah
-Wasurick
-Wassam
-Waskom
-Waskin
-Washum
-Washpun
-Washler
-Waser
-Warzybok
-Warstler
-Warrilow
-Warran
-Waroway
-Warntz
-Warnberg
-Warmka
-Warmbrod
-Warlow
-Warlock
-Warde
-War
-Wapp
-Wantuck
-Wannlund
-Wannarka
-Wanko
-Wandell
-Walund
-Waltos
-Waltho
-Walstrum
-Walrod
-Walper
-Waln
-Wallwork
-Wallo
-Wallman
-Walliser
-Wallie
-Wallenbrock
-Wallau
-Walka
-Walizer
-Walgren
-Waley
-Walen
-Waldroop
-Walderon
-Wal
-Wakeford
-Waitz
-Waiss
-Waisanen
-Wais
-Wainkrantz
-Wahn
-Wahdan
-Wahba
-Wagnor
-Waggy
-Wagemann
-Wagatsuma
-Waffenschmidt
-Waegner
-Waddups
-Waddles
-Wadas
-Wacht
-Waas
-Waaga
-Vuoso
-Vukelj
-Vriens
-Vredeveld
-Vrbas
-Vranicar
-Vovak
-Votsmier
-Vostal
-Vorsburgh
-Vornes
-Vopava
-Vonseeger
-Vonschriltz
-Vonholt
-Vongsamphanh
-Vongkhamphanh
-Vongkhamchanh
-Vonfelden
-Voner
-Vondrasek
-Vondracek
-Vonderhaar
-Vonderahe
-Vonbank
-Volpone
-Volmar
-Vollmers
-Vollette
-Volinsky
-Volek
-Volbert
-Vojna
-Voigtlander
-Vogelzang
-Voeltz
-Voelkerding
-Vocelka
-Vljeric
-Vleming
-Vlchek
-Vizzi
-Vixayack
-Vixay
-Vivyan
-Vivion
-Vitrano
-Vitez
-Vitellaro
-Visounnaraj
-Visick
-Viscosi
-Virostko
-Virgile
-Virgadamo
-Virant
-Vintila
-Vinti
-Vint
-Vilven
-Vilt
-Villnave
-Villescaz
-Ville
-Villasis
-Villaplana
-Villao
-Villanveua
-Villanvera
-Villandry
-Villamayor
-Villamarin
-Villaluz
-Villaluazo
-Villaire
-Villacrusis
-Vilegas
-Vildosola
-Viker
-Vijil
-Vijayan
-Vigneau
-Vigilo
-Vigiano
-Vieu
-Vietzke
-Vierk
-Viengxay
-Vieau
-Vidas
-Vidaca
-Vicuna
-Vicueroa
-Vicenteno
-Vias
-Viard
-Viano
-Viale
-Viafara
-Vezza
-Vevea
-Vetterkind
-Vetterick
-Veto
-Vessar
-Vesperas
-Vesley
-Verwers
-Verunza
-Verso
-Versage
-Verrue
-Verrone
-Verrastro
-Verplanck
-Verone
-Vernazza
-Verlinden
-Verlin
-Verkuilen
-Verfaillie
-Venzor
-Venturelli
-Venskoske
-Venning
-Venneman
-Veneri
-Vendig
-Vence
-Veltkamp
-Velthuis
-Velovic
-Veller
-Velky
-Velega
-Velardes
-Veksler
-Veitinger
-Vehrenkamp
-Vegerano
-Vedovelli
-Veasman
-Vbiles
-Vautier
-Vaulet
-Vatterott
-Vasudevan
-Vasos
-Vasek
-Vasallo
-Varquez
-Varquera
-Varoz
-Varone
-Varisco
-Varieur
-Varanda
-Vanzie
-Vanwyck
-Vanwhy
-Vanweerd
-Vanwechel
-Vanvuren
-Vanvorst
-Vanveldhuize
-Vanuden
-Vantuyle
-Vantull
-Vansteenhuyse
-Vansteenberg
-Vanson
-Vansise
-Vanschoor
-Vanschoiack
-Vanrossum
-Vanosdol
-Vanos
-Vanorsouw
-Vanoni
-Vannuck
-Vanlinden
-Vanlier
-Vanlaere
-Vaninetti
-Vanhove
-Vanhoutte
-Vanhoecke
-Vanheusen
-Vanhamme
-Vanham
-Vangordon
-Vaneekelen
-Vandonsel
-Vandevanter
-Vandesande
-Vandernoot
-Vanderjagt
-Vanderiet
-Vanderhurst
-Vanderbie
-Vandawalker
-Vandaele
-Vanblaricum
-Vanbeveren
-Vanamerongen
-Vanamburgh
-Vanalstin
-Valtas
-Valme
-Vallow
-Vallotton
-Valliant
-Vallegos
-Vallar
-Valladores
-Valerino
-Valeriani
-Valela
-Valdo
-Valant
-Valado
-Vajnar
-Vais
-Vagnier
-Vadlamudi
-Vactor
-Vaccarello
-Vacarro
-Uzzo
-Uutela
-Utzig
-Useted
-Urtz
-Urtiz
-Urtiaga
-Urteaga
-Urquides
-Urmston
-Urmos
-Urbany
-Urbaez
-Uptmor
-Upole
-Uphold
-Uoy
-Unverzagt
-Unvarsky
-Unterseher
-Unterman
-Unglesbee
-Underdue
-Uncapher
-Umeh
-Ulven
-Ulvan
-Ulshafer
-Ulsamer
-Uljevic
-Ulbricht
-Ulabarro
-Ujano
-Uimari
-Uihlein
-Ugolini
-Uglum
-Ufford
-Ueckert
-Udani
-Uchiyama
-Ubl
-Ubaldo
-Tyrie
-Tyndal
-Tyms
-Tylwalk
-Tyeryar
-Twilligear
-Twidwell
-Twardy
-Tuzzio
-Tutterow
-Tutaj
-Turziano
-Turzak
-Turtura
-Turtle
-Turrietta
-Turns
-Turnell
-Turneer
-Turnbill
-Turello
-Turbacuski
-Tupaj
-Tupacyupanqui
-Tuomi
-Tuomala
-Tuohey
-Tuning
-Tumolo
-Tuman
-Tullar
-Tulino
-Tuggerson
-Tuckerson
-Tucke
-Tuchy
-Tucek
-Tucciarone
-Tuamoheloa
-Tuai
-Tua
-Tsu
-Tsironis
-Tsing
-Tsiatsos
-Tsemetzis
-Tscrious
-Tsau
-Tsasie
-Tsakonas
-Trypaluk
-Trygg
-Truxell
-Truver
-Trusso
-Trush
-Trusello
-Truocchio
-Truncellito
-Trumps
-Trumper
-Trumbley
-Trulli
-Truhe
-Truglia
-Trufin
-Trudnowski
-Trudics
-Trudgeon
-Trucks
-Trucker
-Troyano
-Troyani
-Trouser
-Trotty
-Tronaas
-Tromley
-Tromburg
-Troller
-Trojecki
-Trojahn
-Troike
-Troidl
-Troge
-Trofholz
-Trochesset
-Trish
-Trio
-Trinkley
-Trinkl
-Tringham
-Trindle
-Trimnell
-Trilli
-Trill
-Triguro
-Trigueros
-Triece
-Trider
-Trexel
-Trewin
-Trewhitt
-Treuter
-Treutel
-Trettin
-Trett
-Treso
-Trenton
-Trentini
-Trenholme
-Tremel
-Trell
-Tregan
-Trecarichi
-Trbovich
-Traverse
-Traunfeld
-Trapanese
-Tramp
-Tramm
-Trajillo
-Trahin
-Traher
-Tradup
-Toyne
-Toyama
-Townzen
-Towber
-Toussiant
-Tousom
-Tourtelotte
-Touma
-Toulmin
-Touhy
-Tottingham
-Totter
-Tott
-Totosz
-Toti
-Tota
-Tostanoski
-Toso
-Tory
-Torreson
-Torreon
-Torrell
-Torralva
-Torno
-Torngren
-Tornese
-Tordsen
-Torbit
-Torbeck
-Toppins
-Toppen
-Toppah
-Topolinski
-Toplk
-Topliss
-Toplin
-Topinka
-Topi
-Toomsen
-Tools
-Toof
-Too
-Tonic
-Toniatti
-Toni
-Tongren
-Tonche
-Tonas
-Tomsick
-Tomsche
-Tomopoulos
-Tomkowicz
-Tomasko
-Toliongco
-Toleston
-Tokunaga
-Tokita
-Tohonnie
-Tognetti
-Toevs
-Todora
-Todahl
-Tod
-Tocher
-Tocchio
-Tobosa
-Tobiason
-Tjepkema
-Tizon
-Tixier
-Tiwald
-Tittl
-Tisue
-Tisinger
-Tisa
-Tirona
-Tiro
-Tirk
-Tirino
-Tiotuico
-Tinnea
-Tinin
-Timone
-Timber
-Tilleman
-Tille
-Tiley
-Tijing
-Tigg
-Tiffner
-Tietjens
-Tieger
-Tidrington
-Tidrick
-Tibwell
-Tibolla
-Tibbit
-Tiangco
-Tian
-Thyfault
-Thurstonson
-Thundercloud
-Thuman
-Thrun
-Thrill
-Thorsten
-Thornquist
-Thorner
-Thormina
-Thormer
-Thoran
-Thomspon
-Thoeny
-Thoennes
-Thoele
-Thoby
-Thillet
-Thiesse
-Thibedeau
-Theuner
-Thessing
-Therurer
-Thero
-Theo
-Themot
-Them
-Thein
-Theim
-Theiling
-Theesfeld
-Theaker
-Thaniel
-Thamphia
-Thammorongsa
-Thalheimer
-Thain
-Thaemert
-Thackxton
-Thackrey
-Thackery
-Teyler
-Tewmey
-Tevada
-Tetz
-Tetteh
-Tetro
-Tetreau
-Testman
-Tessner
-Tesoriero
-Tesnow
-Tesauro
-Tersteeg
-Terrett
-Terrero
-Terrence
-Terrall
-Terr
-Terkelsen
-Terbush
-Teranishi
-Tepperberg
-Tentler
-Tenor
-Tenharmsel
-Tengwall
-Tenerowicz
-Tenebruso
-Tendick
-Tencer
-Ten
-Temoshenka
-Telman
-Tellinghuisen
-Telega
-Telchik
-Tejeiro
-Teitel
-Teichrow
-Teichmiller
-Tegtmeier
-Tegenkamp
-Teet
-Teeples
-Teepe
-Tebow
-Tebbetts
-Tebbe
-Tease
-Teach
-Tayo
-Taymon
-Taylan
-Taydus
-Tavolario
-Taves
-Tauteoli
-Tatu
-Tatsak
-Tatnall
-Tates
-Tasto
-Tasse
-Tashman
-Tartar
-Tarsis
-Tarris
-Tarricone
-Tarran
-Tarner
-Tarbor
-Tarbet
-Tarasuik
-Taraschke
-Taps
-Tappis
-Tapio
-Tapat
-Tapales
-Tapaha
-Taomoto
-Tanzosch
-Tanzman
-Tanweer
-Tanoue
-Tanori
-Tanon
-Tannazzo
-Tanker
-Tanke
-Tango
-Tanen
-Tandon
-Tandetzke
-Tancer
-Tamminen
-Tamiya
-Tameron
-Talladino
-Taliulu
-Talburt
-Talboti
-Talat
-Talamas
-Takiguchi
-Takenaka
-Tak
-Tahir
-Tagliente
-Taglialatela
-Tagge
-Tagami
-Tafuri
-Tafreshi
-Tacderen
-Taccariello
-Tacata
-Tacadina
-Tablada
-Tabet
-Taberski
-Tabbaa
-Taake
-Szypowski
-Szynkowicz
-Szymula
-Szychowski
-Szwarc
-Szuszkiewicz
-Szumny
-Szumilas
-Szumiesz
-Szuch
-Szuba
-Sznejkowski
-Szmidt
-Szlosek
-Szigethy
-Szenasi
-Szczurek
-Szczesniak
-Szalankiewicz
-Szalai
-Szal
-Szaflarski
-Syrstad
-Syrop
-Synowiec
-Synakowski
-Symore
-Symon
-Syddall
-Sybounheuan
-Swonke
-Swisshelm
-Swiller
-Swenton
-Swell
-Sweley
-Sweger
-Swefford
-Sweere
-Swee
-Swedeen
-Sweazey
-Swearngen
-Swaynos
-Swatloski
-Swatek
-Swary
-Swartley
-Swarr
-Swarn
-Swarb
-Swarat
-Swanzy
-Swantner
-Swantko
-Swanteck
-Swanick
-Swaine
-Swadling
-Svob
-Svensen
-Sutt
-Suto
-Sutherburg
-Susmilch
-Susla
-Susko
-Susan
-Surridge
-Surran
-Surkamer
-Suon
-Suominen
-Suneson
-Sundman
-Sumstad
-Sumruld
-Sumey
-Sumbera
-Sumaran
-Sultaire
-Sully
-Sulloway
-Sulkowski
-Sulc
-Sukut
-Sukup
-Sukovich
-Suihkonen
-Suga
-Suffern
-Sueyoshi
-Suet
-Suennen
-Suellentrop
-Sueda
-Suddath
-Succop
-Sub
-Sualevai
-Styler
-Stvictor
-Stuzman
-Stusse
-Sturwold
-Sturino
-Sturiale
-Sturdnant
-Stupke
-Stumm
-Stumb
-Stukel
-Stufflebean
-Stuever
-Stuessy
-Stuedemann
-Stueckrath
-Stueck
-Studwell
-Stubler
-Stubbert
-Strzyzewski
-Strzelczyk
-Strutynski
-Struckmann
-Struber
-Strow
-Stropus
-Strople
-Stroot
-Strohecker
-String
-Strimel
-Stright
-Striffler
-Stridiron
-Stricklan
-Strem
-Streller
-Strekas
-Strek
-Streitz
-Streitenberge
-Strech
-Streat
-Strazzullo
-Strawberry
-Stratter
-Strathmann
-Strassell
-Strassberg
-Strangstalien
-Stoyanov
-Stouten
-Stoutamyer
-Stotelmyer
-Stoskopf
-Storton
-Storbeck
-Stoppenbach
-Stoot
-Stoor
-Stonewall
-Stonefield
-Stolzenberg
-Stollsteimer
-Stokel
-Stohs
-Stohrer
-Stofferahn
-Stoermer
-Stoen
-Stoecklin
-Stockhoff
-Stockburger
-Stoakley
-Stoa
-Stlucien
-Stitz
-Stittgen
-Stitch
-Stires
-Stippich
-Stinser
-Stinemetz
-Stinde
-Stinar
-Stimus
-Stiliner
-Stilgenbauer
-Stifflemire
-Stickfort
-Sticher
-Stibb
-Stewardson
-Stevison
-Steube
-Sternod
-Sterger
-Steptore
-Steppig
-Stepleton
-Stephanski
-Stephano
-Stepchinski
-Stepanik
-Stepaniak
-Stenslien
-Stenslie
-Stengle
-Stengele
-Stendal
-Stempert
-Steman
-Stelmach
-Steitzer
-Steinworth
-Steinway
-Steins
-Steinour
-Steinmiller
-Steinhouse
-Steinhour
-Steinger
-Steindorf
-Steinau
-Steinacker
-Stegmann
-Steff
-Stefansky
-Steensland
-Steenrod
-Steenland
-Steeby
-Stech
-Stealy
-Steagell
-Steadings
-Steach
-Stawasz
-Stavsvick
-Stavrides
-Stavish
-Stathes
-State
-Stassinos
-Stasser
-Stasio
-Stasa
-Starzynski
-Starritt
-Starring
-Starnold
-Starchman
-Starch
-Starace
-Stapelton
-Stanuszek
-Stanovich
-Stankovic
-Stankey
-Stanislaw
-Staniforth
-Stanier
-Stangarone
-Stanganelli
-Standlee
-Standerwick
-Standback
-Stancombe
-Stancer
-Stancato
-Stammel
-Stambough
-Stallones
-Stakelin
-Stagnitto
-Stafiej
-Staffon
-Staffieri
-Staffen
-Stade
-Stachniw
-Stachnik
-Stacer
-Staber
-Stabell
-Staback
-Staadt
-Spunt
-Spueler
-Spruit
-Spruel
-Spriggins
-Spratlen
-Sprain
-Sprafka
-Sportsman
-Sports
-Sporle
-Spoerl
-Spoerer
-Splonskowski
-Splinter
-Splane
-Spizzirri
-Spinoso
-Spinka
-Spiney
-Spine
-Spindola
-Spindle
-Spinas
-Spilski
-Spielmaker
-Spiegle
-Spevacek
-Sperrey
-Sperger
-Sperduti
-Speranza
-Sperandeo
-Spender
-Spena
-Spella
-Speith
-Speis
-Speiden
-Speidell
-Speese
-Specter
-Speake
-Speagle
-Spaun
-Spara
-Spanton
-Spanswick
-Spannbauer
-Spana
-Spaide
-Spadlin
-Sowash
-Sovey
-Sovak
-Souvannavong
-Souvannarith
-Souvannakhiry
-Souser
-Soulek
-Soukkhavong
-Soucek
-Sottosanti
-Sotlar
-Sotak
-Sossong
-Sosso
-Sosinsky
-Soscia
-Sorotzkin
-Sorokin
-Sorman
-Sorgatz
-Soren
-Soravilla
-Sor
-Soprych
-Sopata
-Soorus
-Sookoo
-Sonnenburg
-Sonkens
-Sondrini
-Sondelski
-Somsana
-Sommerdorf
-Sommella
-Solverson
-Soltren
-Soltes
-Solonika
-Solomons
-Sollock
-Sollman
-Solle
-Solimeno
-Soliece
-Solgovic
-Soldow
-Solas
-Solarz
-Sokorai
-Sokolik
-Soisson
-Sohrabi
-Soho
-Sogol
-Soga
-Sofka
-Sodomka
-Sodachanh
-Sochocki
-Socci
-Sobrowski
-Sobrino
-Soboleski
-Soberano
-Sobba
-Sobania
-Soans
-Snuffer
-Snowdon
-Snowdeal
-Snoderly
-Snock
-Snitker
-Snith
-Sniff
-Snedeger
-Snearly
-Snachez
-Smurthwaite
-Smolski
-Smithmyer
-Smithen
-Smithberger
-Smisek
-Smily
-Smiglewski
-Smietana
-Smialowski
-Smeltz
-Smelko
-Smeenk
-Smedsrud
-Smayda
-Smaw
-Smarsh
-Smalt
-Smalarz
-Slutzky
-Sluis
-Sloup
-Slotkin
-Slosek
-Sloon
-Slomski
-Slocombe
-Slockbower
-Slisz
-Slinsky
-Slicer
-Sleek
-Slayman
-Slavis
-Slatin
-Slanina
-Slagel
-Sladky
-Sladek
-Skyberg
-Skwara
-Skursky
-Skurski
-Skura
-Skrobacki
-Skretowicz
-Skorepa
-Skomo
-Sknerski
-Skinsacos
-Skillom
-Skillen
-Skibosh
-Skibisky
-Skewis
-Skene
-Skender
-Skalecki
-Skafec
-Sixon
-Sivia
-Sivert
-Sitto
-Sita
-Sissman
-Sisneroz
-Siskey
-Sischo
-Sirwet
-Sirucek
-Sirrine
-Sirnio
-Siriani
-Sirek
-Sippial
-Sionesini
-Sioma
-Sinkiewicz
-Sininger
-Singuefield
-Sings
-Singhisen
-Singeltary
-Singco
-Siner
-Sindt
-Sindorf
-Sindoni
-Sindel
-Simzer
-Simunek
-Simplot
-Simpelo
-Simonetta
-Simonett
-Simoneavd
-Simmelink
-Simlick
-Simkowitz
-Simino
-Simers
-Simer
-Simcic
-Simank
-Silverwood
-Silverhorn
-Silquero
-Sillitti
-Sillery
-Silla
-Silker
-Silerio
-Silagy
-Silago
-Sikorra
-Sikkila
-Sikel
-Sikat
-Sikander
-Sigworth
-Signorino
-Sigafoos
-Siewers
-Sievel
-Sierzenga
-Sierer
-Siepker
-Siena
-Sien
-Siegfreid
-Siegers
-Siefkes
-Siefferman
-Siebel
-Sidles
-Side
-Siddiq
-Sida
-Sickmeir
-Sickendick
-Sichler
-Sicheneder
-Sichel
-Siangco
-Siad
-Shymske
-Shutte
-Shutes
-Shurkus
-Shumay
-Shukert
-Shuhi
-Shuga
-Shuckhart
-Shryer
-Shroeder
-Shrimplin
-Shrier
-Shrefler
-Shrake
-Shoyer
-Showden
-Shouts
-Shoto
-Shonts
-Shoeman
-Shoddie
-Shirilla
-Shird
-Shirai
-Shipwash
-Shiplet
-Shipler
-Shintani
-Shinney
-Shinko
-Shindorf
-Shimonishi
-Shimanuki
-Shiller
-Shiiba
-Shigemitsu
-Shigematsu
-Shifley
-Shifflette
-Shiever
-Shido
-Shidemantle
-Shidel
-Shibahara
-Shey
-Shevenell
-Shetz
-Sheskey
-Sherratt
-Sherif
-Sherfy
-Sherbo
-Shepp
-Shenberger
-Shenassa
-Shemper
-Sheltrown
-Shellum
-Shellnut
-Shellhorn
-Shellgren
-Shelenberger
-Sheive
-Sheasby
-Shearier
-Shearhart
-Shawler
-Shawaiki
-Shaull
-Shau
-Shatt
-Sharratt
-Sharrai
-Sharpsteen
-Sharpey
-Sharley
-Shariff
-Shariat
-Sharar
-Shapin
-Shansky
-Shannonhouse
-Shangraw
-Shammaa
-Shamapande
-Shalam
-Shaker
-Shahinian
-Shaginaw
-Shaggy
-Shafto
-Shafi
-Shaer
-Shae
-Shadix
-Shadburn
-Sfera
-Sfatcu
-Seymoure
-Sey
-Sewester
-Severyn
-Seutter
-Seuss
-Seufer
-Settecase
-Sespinosa
-Servey
-Servano
-Serum
-Sertuche
-Sert
-Serro
-Serret
-Serre
-Sermon
-Sermania
-Sergovia
-Seremet
-Serabia
-Ser
-Sephton
-Sep
-Senta
-Sensenbach
-Senneker
-Senk
-Senion
-Senemounnarat
-Seneker
-Semo
-Semenick
-Seltrecht
-Sellar
-Seliski
-Selis
-Seligmann
-Selia
-Selestewa
-Selem
-Sele
-Selca
-Selbert
-Selbe
-Sekerak
-Sejkora
-Seiz
-Seiver
-Seirer
-Seilhymer
-Seiley
-Seiger
-Seigart
-Seifts
-Seiffert
-Seidle
-Seide
-Seiberlich
-Segota
-Segobia
-Seewald
-Seepersaud
-Seen
-Sedy
-Sedtal
-Sedotal
-Sedler
-Sedlachek
-Secreto
-Secora
-Secky
-Seckington
-Sebestyen
-Sebers
-Searchwell
-Searchfield
-Searcey
-Seanor
-Sean
-Seamen
-Sealander
-Seaford
-Scullion
-Scrudato
-Scronce
-Scrobola
-Scribellito
-Scozzari
-Scoresby
-Scolnik
-Scoh
-Scoble
-Sclavi
-Sciuto
-Scisco
-Scigliano
-Scieszka
-Scierka
-Scibetta
-Sciavillo
-Sciarini
-Sciancalepore
-Schwuchow
-Schwoyer
-Schwoerer
-Schwien
-Schwetz
-Schwertfager
-Schwentker
-Schwent
-Schwendinger
-Schwemm
-Schweiner
-Schwarzenberg
-Schwartzer
-Schwarten
-Schwanebeck
-Schwanbeck
-Schwallie
-Schwald
-Schuyleman
-Schustrich
-Schurer
-Schuppenhauer
-Schumucker
-Schumans
-Schuiling
-Schueth
-Schuckert
-Schuchmann
-Schuble
-Schub
-Schroy
-Schromen
-Schroeppel
-Schroedel
-Schreur
-Schreimann
-Schrecker
-Schouweiler
-Schou
-Schornick
-Schoreplum
-Schooling
-School
-Schoo
-Schontz
-Schoninger
-Schoneck
-Schone
-Schonaerts
-Schomberg
-Schollmeier
-Schoepflin
-Schoenegge
-Schoeneck
-Schoeller
-Schoebel
-Schnitman
-Schnetter
-Schnelzer
-Schneidmiller
-Schnair
-Schnabl
-Schmuff
-Schmoldt
-Schmider
-Schmeer
-Schlussel
-Schlissel
-Schlett
-Schlesner
-Schlesener
-Schlepphorst
-Schlepp
-Schlechten
-Schlaack
-Schiveley
-Schirm
-Schimanski
-Schilmoeller
-Schille
-Schilawski
-Schiffner
-Schiffert
-Schiedler
-Schickler
-Schiappa
-Scheuring
-Scheule
-Schepker
-Schenz
-Schenkelberg
-Schembri
-Schembra
-Schellhorn
-Schellenberge
-Schelle
-Scheitlin
-Scheidecker
-Scheibner
-Scheiblich
-Schehl
-Schefers
-Schee
-Schearer
-Schaubert
-Schattschneid
-Scharich
-Schares
-Scharber
-Schappach
-Schaneman
-Schamberger
-Schak
-Schaetzle
-Schaecher
-Scerbo
-Scelba
-Scavona
-Scatton
-Scarsdale
-Scarr
-Scarpone
-Scarlata
-Scariano
-Scandurra
-Scandura
-Scandalis
-Scammahorn
-Scafuto
-Scaffe
-Scachette
-Sayyed
-Sayko
-Sayco
-Sayasane
-Sayaphon
-Sawney
-Sawdo
-Sawatzke
-Sawallich
-Savko
-Savka
-Savitts
-Saviola
-Savio
-Savine
-Savich
-Savells
-Saulpaugh
-Saulino
-Sauler
-Saugis
-Sauber
-Sau
-Saturnio
-Sattel
-Satomba
-Saterfield
-Satava
-Sasseville
-Sasahara
-Sarzynski
-Sartorius
-Sartore
-Sartell
-Sarsour
-Sarson
-Sarp
-Sarnosky
-Sarni
-Sarlinas
-Sarka
-Sarinsky
-Sarin
-Sardo
-Sarden
-Sarchett
-Sarault
-Sarate
-Sarao
-Sarantakis
-Saralegui
-Sapper
-Sappah
-Sapinski
-Sapardanis
-Sapara
-Sanyaro
-Santwire
-Santrmire
-Santoriella
-Santor
-Santomassimo
-Santisteban
-Santillanez
-Santamarina
-Sansotta
-Sanpson
-Sannutti
-Sankoh
-Sangasy
-Sanfelix
-Sandvill
-Sandus
-Sandstede
-Sandling
-Sandland
-Sandhop
-Sandeen
-Sandblom
-Sanday
-Sandager
-Sancrant
-Sancken
-Sanchirico
-Sancher
-Sances
-Sanberg
-Sanacore
-Samyn
-Samul
-Samrov
-Samrah
-Sampere
-Sampang
-Samland
-Samii
-Samiento
-Sames
-Sambrook
-Samborski
-Samberg
-Samaroo
-Salzl
-Salvio
-Salvati
-Salvadge
-Saluan
-Saltzberg
-Saltus
-Saltman
-Salstrom
-Salotti
-Salmonsen
-Sallmen
-Salle
-Sallach
-Salines
-Salesky
-Saleme
-Saleha
-Saldano
-Salb
-Salazak
-Salasar
-Salado
-Salach
-Sakumoto
-Sakamaki
-Sajovic
-Sajous
-Sainte
-Sainliere
-Sainato
-Sails
-Saik
-Saieva
-Saice
-Sahe
-Sahady
-Sago
-Saft
-Safier
-Saffo
-Safer
-Saether
-Saens
-Saeler
-Saelens
-Sadvary
-Sadoski
-Sadorra
-Sadolsky
-Sadin
-Sadik
-Sadeghi
-Sadat
-Sacramed
-Sachetti
-Sacchi
-Sacca
-Saberi
-Saarela
-Saadat
-Saabatmand
-Rzeczycki
-Rysz
-Rynkowski
-Rynerson
-Ryneer
-Rymut
-Rymes
-Rymasz
-Rylaarsdam
-Rykaczewski
-Ryen
-Ryea
-Rydin
-Rydelek
-Rydel
-Rydeen
-Rybinski
-Ruvalcava
-Rutski
-Rutske
-Rutman
-Rutkin
-Ruths
-Ruthman
-Ruthers
-Rutheford
-Rutgers
-Rutenberg
-Rutar
-Russwurm
-Russomano
-Russomanno
-Russer
-Russello
-Rushanan
-Rusen
-Ruschmeyer
-Rusaw
-Rupnick
-Rupley
-Rupinski
-Ruopoli
-Rumps
-Rumbach
-Rulapaugh
-Ruivo
-Ruiter
-Ruhoff
-Ruhn
-Ruhman
-Ruggirello
-Ruffell
-Ruffel
-Ruezga
-Ruesga
-Ruelar
-Ruehter
-Ruehling
-Ruehlen
-Ruedas
-Rued
-Rueck
-Rudoy
-Rudio
-Rudh
-Rudell
-Rudat
-Rudack
-Ruckey
-Ruckel
-Ruckdaschel
-Rubsam
-Rubie
-Rubick
-Ruberti
-Rubeo
-Rubenfield
-Rubenfeld
-Rubash
-Rubalcave
-Rozzelle
-Rozon
-Royle
-Roxbury
-Rowlison
-Rowels
-Rowbotham
-Rovell
-Rouw
-Routzen
-Routzahn
-Routte
-Rousso
-Rousell
-Rous
-Rounsville
-Rouly
-Roulhac
-Roulette
-Roule
-Rouhoff
-Roughen
-Rouch
-Rottinghous
-Rottier
-Rotruck
-Rotkowski
-Rotkovecz
-Rothfeld
-Rotherham
-Rotch
-Rotanelli
-Rosul
-Rossie
-Rossen
-Rosseel
-Rosky
-Rosian
-Rosher
-Rosewall
-Roseum
-Roseth
-Rosenwinkel
-Rosentrater
-Rosenlof
-Rosenhagen
-Rosengren
-Rosendorf
-Rosendale
-Rosenbush
-Rosemore
-Rosek
-Rosebur
-Roscup
-Rosca
-Rosboril
-Rosazza
-Rosane
-Rorabacher
-Ropka
-Roofner
-Ronsini
-Ronnie
-Ronnfeldt
-Ronn
-Ronero
-Roner
-Ronayne
-Rona
-Ron
-Romprey
-Rommelfanger
-Romkema
-Romiro
-Romay
-Romanowicz
-Romanov
-Romanoff
-Romaniszyn
-Romanek
-Romane
-Rollf
-Rollag
-Rolfson
-Rolack
-Rokicki
-Rohrdanz
-Rohdenburg
-Rohal
-Rogowicz
-Rogish
-Rogian
-Rogens
-Rogado
-Roesslein
-Roesing
-Roerig
-Roenigk
-Roelle
-Roehler
-Rodvold
-Rodrigres
-Rodregues
-Rodolph
-Rodkin
-Rodiquez
-Rodina
-Rodero
-Roderman
-Roderiquez
-Rodenizer
-Rodenbough
-Rodebush
-Rodde
-Rocle
-Rochlitz
-Rochkes
-Rocheford
-Robyn
-Robusto
-Roberston
-Robbie
-Robbert
-Robberson
-Robair
-Roam
-Roadruck
-Roades
-Roaden
-Roadarmel
-Rizzardi
-Rivinius
-Riveras
-Rivello
-Rivelli
-Rivadulla
-Rittinger
-Rittie
-Rittichier
-Ritthaler
-Ritmiller
-Riskin
-Risien
-Rishor
-Risatti
-Ripson
-Ringold
-Ringen
-Rinfret
-Rineheart
-Rindal
-Rincan
-Rinauro
-Rinaldis
-Rina
-Rimkus
-Rimi
-Rimel
-Rimbach
-Rily
-Rillie
-Riller
-Rihner
-Riherd
-Rigley
-Rightmyer
-Righthouse
-Riggert
-Riggers
-Rigerman
-Rigas
-Rifai
-Riesner
-Rienzo
-Riemersma
-Riefer
-Ridgebear
-Rides
-Ridell
-Ridall
-Ricucci
-Ricley
-Rickerl
-Richemond
-Richelieu
-Richel
-Richardville
-Riccitelli
-Ricciardelli
-Ricardez
-Riblett
-Ribar
-Riase
-Rian
-Rhym
-Rhule
-Rhude
-Rhondes
-Rhodehamel
-Rhim
-Rheingold
-Rheaves
-Reznick
-Reynero
-Revolorio
-Revette
-Revelo
-Reuven
-Reusswig
-Reusser
-Reuhl
-Reuber
-Rettele
-Retka
-Retersdorf
-Resseguie
-Resper
-Resner
-Resides
-Reshard
-Resek
-Reseigh
-Repaci
-Renzullo
-Renuart
-Rentfrow
-Rennemeyer
-Renneker
-Renkes
-Renier
-Rendle
-Renburg
-Remsburg
-Remos
-Remmie
-Remmick
-Remlin
-Remkus
-Remfert
-Remey
-Remerez
-Remedies
-Remaly
-Relph
-Rellihan
-Relles
-Relaford
-Reksten
-Rekas
-Reitzes
-Reiten
-Reitema
-Reisin
-Reinmann
-Reinicke
-Reinholdt
-Reinheimer
-Reinfeld
-Reineman
-Reineking
-Reinartz
-Reimel
-Reik
-Reihe
-Reidling
-Reidler
-Reichenberg
-Reichenback
-Reho
-Rehnborg
-Rehnberg
-Rehart
-Regusters
-Regulus
-Reglin
-Reginal
-Reges
-Regensburg
-Regen
-Regas
-Reevers
-Reever
-Reeter
-Reedholm
-Redle
-Redic
-Redfear
-Reddekopp
-Rechel
-Rebick
-Rebholz
-Reazer
-Reauish
-Reath
-Reasinger
-Reas
-Reary
-Realmuto
-Reager
-Readenour
-Razze
-Rawicki
-Rawhoof
-Ravi
-Ravetti
-Ravenscraft
-Rava
-Rauf
-Rauelo
-Rattee
-Rattay
-Rattanachane
-Rattana
-Rathmanner
-Rathgeber
-Rathe
-Rathbum
-Rasul
-Rastogi
-Rastelli
-Rassman
-Rasmuson
-Rasely
-Raschko
-Raschilla
-Rasche
-Rasanen
-Rary
-Raring
-Raridon
-Rarey
-Raquel
-Rappenecker
-Rapelyea
-Ransier
-Ransberger
-Rannalli
-Ranjel
-Ranford
-Randoll
-Randklev
-Ramy
-Ramundo
-Ramu
-Ramsuer
-Ramstad
-Ramsbottom
-Ramphal
-Ramnarine
-Rammer
-Ramiscal
-Ramgel
-Ramesar
-Ramento
-Rambeau
-Ramales
-Ralon
-Rallison
-Rakich
-Raith
-Raiola
-Rainwaters
-Rainbott
-Raimundo
-Raimer
-Raimann
-Railing
-Rahl
-Rahama
-Ragusano
-Rafla
-Rafiq
-Rafi
-Raffone
-Raffo
-Rafail
-Raelson
-Raehl
-Raebel
-Radway
-Radue
-Radona
-Radisovich
-Radics
-Rademan
-Radeke
-Radder
-Radden
-Rackow
-Racitano
-Racina
-Rachar
-Racanello
-Rabuck
-Rabkin
-Rabidoux
-Rabello
-Rabel
-Rabara
-Qunnarath
-Quirindongo
-Quintel
-Quintano
-Quinlin
-Quinchia
-Quincel
-Quilling
-Quillian
-Quilliam
-Quillens
-Quihuiz
-Quiett
-Quicksall
-Quest
-Querta
-Querido
-Quent
-Quealy
-Quaye
-Quante
-Quamme
-Qualia
-Quaker
-Quagliano
-Quader
-Pytlewski
-Pyo
-Pylvainen
-Pyland
-Pych
-Py
-Puyear
-Puulei
-Puthiyamadam
-Putalavage
-Purzycki
-Purkerson
-Purcella
-Purce
-Puppe
-Pupa
-Pullon
-Pullie
-Pulgarin
-Pulford
-Pujals
-Puiatti
-Pugeda
-Puffett
-Puffenbarger
-Puertas
-Puddy
-Pucio
-Pucella
-Ptaszynski
-Psomiades
-Psencik
-Przybysz
-Przybycien
-Przedwiecki
-Pryzgoda
-Prvitt
-Pruskowski
-Prugh
-Prudent
-Prudden
-Provazek
-Protasewich
-Protain
-Proo
-Prondzinski
-Prokes
-Prohonic
-Progacz
-Proescher
-Prodan
-Privatsky
-Privateer
-Priore
-Prinzing
-Prinzi
-Printers
-Prigmore
-Priewe
-Prier
-Pribbeno
-Prezzia
-Preyor
-Prewer
-Prevett
-Preuitt
-Prepotente
-Prence
-Prekker
-Preisach
-Precythe
-Prebish
-Preato
-Prchlik
-Prazeres
-Prazak
-Prauner
-Prattella
-Prati
-Prat
-Prasser
-Prasomsack
-Praml
-Prabhakaran
-Prabel
-Poyneer
-Powroznik
-Powal
-Poux
-Poullion
-Pouliotte
-Pottier
-Potthast
-Potocnik
-Poties
-Poths
-Postuci
-Postal
-Posso
-Poser
-Portwine
-Portune
-Portaro
-Porrello
-Porreca
-Porrazzo
-Poremski
-Pore
-Porcello
-Popple
-Poppert
-Popowski
-Popovec
-Popke
-Popik
-Popielarczyk
-Popick
-Popi
-Poper
-Popelka
-Popec
-Poortinga
-Poorte
-Pooni
-Ponyah
-Pontin
-Pomerance
-Pomar
-Polynice
-Polyak
-Polverari
-Poltorak
-Polovoy
-Pollmann
-Pollio
-Pollinger
-Pollacco
-Polivka
-Polian
-Poleyestewa
-Polera
-Poldrack
-Polcovich
-Polakoff
-Polakis
-Poladian
-Pokorski
-Poiter
-Poffenroth
-Poetzsch
-Poeschl
-Poeschel
-Poepplein
-Poepping
-Poeling
-Podvin
-Podsiad
-Podrasky
-Podlas
-Pode
-Podbielski
-Podany
-Pochiba
-Pocchia
-Poalino
-Poaipuni
-Plymire
-Plyer
-Pluvoise
-Plungy
-Pluid
-Ploude
-Plosker
-Plomma
-Plohr
-Plocica
-Pliler
-Plevin
-Plessis
-Plesnarski
-Plesha
-Plenskofski
-Plecker
-Platenburg
-Platas
-Plansinis
-Plana
-Plamer
-Placencio
-Pizzolato
-Pizur
-Pius
-Piurkowski
-Pituch
-Pittillo
-Pitel
-Pitcak
-Piszczatowski
-Pisula
-Pishner
-Pirner
-Pirillo
-Pippert
-Pipe
-Pinyan
-Pinsonnault
-Pinnt
-Pinkelton
-Pinena
-Pinela
-Pineault
-Pinault
-Pilotti
-Pillips
-Pilbin
-Pilati
-Pikey
-Pih
-Piguet
-Pigna
-Pigler
-Pigat
-Pietzsch
-Pietrafesa
-Pieters
-Pierzchala
-Pierrie
-Pierfax
-Piercefield
-Piedmont
-Piedigrossi
-Piede
-Piechoski
-Piearcy
-Pidcock
-Picolet
-Pickren
-Pickings
-Picht
-Picco
-Pi
-Phomphithak
-Phommatheth
-Phlieger
-Phippen
-Philpotts
-Phillipi
-Philippon
-Philipose
-Philben
-Pherson
-Pherguson
-Phatdouang
-Phanthauong
-Phanord
-Pfirsch
-Pfendler
-Pfannenstein
-Pfahlert
-Pfahler
-Pezzuto
-Pezzimenti
-Pexton
-Pexsa
-Pewo
-Pevsner
-Petzel
-Petts
-Pettner
-Pettinella
-Petticrew
-Pettibon
-Pettes
-Petrov
-Petrosyan
-Petron
-Petrocelli
-Petrocco
-Petrizzo
-Petris
-Petrino
-Petricone
-Petralba
-Petrakis
-Petrain
-Petkoff
-Petitjean
-Petges
-Peteuil
-Petet
-Petersdorf
-Petchulis
-Pestronk
-Peskind
-Pesenti
-Pertsovsky
-Personette
-Persia
-Persampieri
-Persall
-Pers
-Perre
-Perper
-Perolta
-Perng
-Perler
-Perkoski
-Perish
-Perilloux
-Perey
-Peressini
-Percontino
-Perciballi
-Peral
-Peppas
-Pepitone
-Penzero
-Pentico
-Pent
-Penski
-Pense
-Penrice
-Penoyer
-Penovich
-Pennimpede
-Pennigton
-Pennig
-Penisson
-Pendl
-Pendill
-Penceal
-Penatac
-Penasa
-Penanegra
-Pelman
-Pelligrini
-Pelliccia
-Pellant
-Pelkowski
-Pelak
-Pein
-Peightell
-Pegler
-Pegelow
-Peffers
-Peetz
-Peelman
-Pee
-Pedrin
-Pedlow
-Pedelty
-Pede
-Peddy
-Peckinpaugh
-Peckens
-Pecht
-Pechin
-Peche
-Peccia
-Peca
-Peaker
-Pazik
-Pazderski
-Pazan
-Payno
-Payenda
-Pawluk
-Pawlosky
-Pawell
-Pavlikowski
-Pavlides
-Pavish
-Paviol
-Paulick
-Paukert
-Pattum
-Patrylak
-Patronella
-Patrich
-Patriarco
-Patraw
-Patierno
-Patient
-Patience
-Paten
-Pastorin
-Pasternack
-Pastano
-Passaro
-Pasqualino
-Paskoff
-Paskin
-Paskiewicz
-Pashel
-Pasey
-Pascher
-Pasaye
-Pasanen
-Parvis
-Partmann
-Parthemore
-Parshotam
-Parsens
-Parraga
-Paronto
-Paroda
-Parobek
-Parmann
-Parmalee
-Parlet
-Parle
-Parkers
-Pariente
-Paree
-Pardey
-Parde
-Pardall
-Parbs
-Parbol
-Paranada
-Parah
-Parado
-Pappy
-Pappenheim
-Paplow
-Papka
-Papich
-Papi
-Papallo
-Paolicelli
-Panzarella
-Panyik
-Pantle
-Pantera
-Pantalone
-Pansullo
-Panone
-Pano
-Panny
-Pannenbacker
-Pankiewicz
-Pankhurst
-Panke
-Pankau
-Pangan
-Panessa
-Pandolfi
-Pandiani
-Panchik
-Panchak
-Panakos
-Panak
-Panagakos
-Palubiak
-Palso
-Palowoda
-Palmucci
-Palmour
-Palmino
-Palmerino
-Palme
-Pallino
-Pallerino
-Palisi
-Palisano
-Palis
-Palazzola
-Palay
-Palaspas
-Palamara
-Paladini
-Paladin
-Paire
-Paillet
-Pailet
-Paider
-Paguin
-Pagoda
-Paglione
-Paglialunga
-Pageau
-Pagdanganan
-Pafundi
-Padiong
-Padberg
-Padarebones
-Padalecki
-Pacol
-Pacilio
-Pachter
-Pachew
-Pabelick
-Paaske
-Ozzella
-Owoc
-Owca
-Ovitz
-Overmann
-Overlee
-Overhulser
-Overholtzer
-Ovens
-Ovall
-Outhier
-Ouren
-Ouinones
-Ottum
-Ottomaniello
-Otteman
-Otsman
-Otinger
-Oszust
-Ostorga
-Ostolaza
-Osterhouse
-Osterberger
-Ostberg
-Ososki
-Osmers
-Osmera
-Oshey
-Osequera
-Osenkowski
-Oschmann
-Osbment
-Osbey
-Osazuwa
-Osayande
-Osako
-Orzell
-Orvin
-Ortwine
-Ortmeyer
-Ortelt
-Ortelli
-Orsten
-Orson
-Orrill
-Orphey
-Orndorf
-Orloski
-Orlich
-Orlander
-Orland
-Ork
-Orji
-Orison
-Orielly
-Orielley
-Ori
-Organek
-Orey
-Orender
-Ordona
-Ordon
-Ordman
-Orazine
-Oravetz
-Orandello
-Orabone
-Ora
-Or
-Oquenda
-Opyd
-Opteyndt
-Opoka
-Opiola
-Opielski
-Opell
-Opeka
-Onyeagu
-Onezne
-Ondeck
-Ona
-Oms
-Ommen
-Ominelli
-Omernik
-Omelia
-Olynger
-Olwin
-Olvey
-Olufson
-Olubunmi
-Olten
-Olshefski
-Olsby
-Olores
-Olma
-Olli
-Ollech
-Ollar
-Oliviera
-Olivarri
-Oligschlaeger
-Olheiser
-Olgin
-Olevera
-Olerud
-Olenski
-Olenius
-Oldow
-Oldershaw
-Oldenburger
-Olausen
-Olaes
-Okutsu
-Okken
-Okitsu
-Okie
-Okeson
-Okelberry
-Okel
-Ojito
-Ojano
-Ohyama
-Ohr
-Ohnstad
-Ohmen
-Ohlhauser
-Ohlensehlen
-Ohle
-Ohashi
-Ohanley
-Ogzewalla
-Ogutu
-Ogston
-Ogrodowicz
-Oginski
-Ogiamien
-Oger
-Ogarro
-Ofsak
-Oflynn
-Off
-Ofer
-Oelze
-Oehm
-Oehlschlager
-Oehl
-Odome
-Odo
-Odmark
-Odil
-Odgen
-Odermott
-Odair
-Oczon
-Ockman
-Ockleberry
-Ocken
-Ochal
-Ochakovsky
-Ocenasek
-Occhuizzo
-Ocanaz
-Obrein
-Obray
-Oborne
-Oblinski
-Obin
-Obierne
-Obholz
-Obhof
-Oberski
-Obermier
-Oberlies
-Obergfell
-Obenauer
-Obeid
-Obbink
-Obaker
-Oatney
-Oatfield
-Nyulassy
-Nwagbara
-Nutley
-Nuth
-Nurthen
-Nuntaray
-Nunno
-Nunlee
-Nuner
-Numkena
-Nuhfer
-Nugal
-Nuessen
-Nuding
-Nuchols
-Noye
-Noya
-Nowosielski
-Novickis
-Novi
-Novencido
-Novel
-Novad
-Noujaim
-Notoma
-Notice
-Noth
-Notch
-Notarnicola
-Nosworthy
-Nosacka
-Norum
-Northouse
-Nortesano
-Norstrand
-Norsingle
-Norrie
-Norr
-Norn
-Normoyle
-Norise
-Nordstrand
-Nordmark
-Nordes
-Norales
-Nopachai
-Noorda
-Nooman
-Nonroe
-Nonemaker
-Nonamaker
-Nommay
-Noman
-Nollet
-Nolle
-Noli
-Noice
-Noerr
-Nodland
-Nocon
-Nocks
-Nockels
-Nocella
-Nocek
-Njie
-Nizo
-Nitchman
-Nistendirk
-Nissan
-Nisly
-Nishitani
-Nishio
-Nishina
-Nirschl
-Niro
-Nirenberg
-Niquette
-Nip
-Nindorf
-Nincehelsor
-Nimz
-Nimura
-Nilmeier
-Nikula
-Nikach
-Nik
-Nightwine
-Night
-Nighman
-Nighbor
-Niffenegger
-Niez
-Niesporek
-Nier
-Nieminen
-Niemie
-Niedermeier
-Niederberger
-Nido
-Nicome
-Nicolozakes
-Nicolia
-Nicoles
-Nicolau
-Nickodem
-Nicklous
-Nickisch
-Nicka
-Nici
-Nibler
-Nibbe
-Nhatsavang
-Ngoun
-Neyer
-Newmyer
-Newitt
-Newgard
-Newenle
-Newbraugh
-Newbound
-Newand
-Nevue
-Nevison
-Nevis
-Nev
-Neujahr
-Neufer
-Nette
-Netkowicz
-Nethkin
-Nesvig
-Nestico
-Nessner
-Nesslein
-Nesset
-Nessel
-Neshem
-Nesbeth
-Neris
-Nerenberg
-Neren
-Nepomuceno
-Nemith
-Nelder
-Neitzke
-Neita
-Neiner
-Neimeyer
-Neigenfind
-Neiford
-Neidenbach
-Nehlsen
-Negreta
-Negrana
-Neenan
-Neddenriep
-Nech
-Neborak
-Nebesny
-Nazar
-Nawfel
-Navo
-Navarete
-Nauss
-Naumes
-Naugler
-Nauer
-Natvig
-Natalizio
-Natalie
-Natalia
-Nastasia
-Nasaire
-Naruaez
-Narrow
-Narkevicius
-Nardozzi
-Nardino
-Narain
-Napue
-Napenas
-Nap
-Naomi
-Nao
-Nanz
-Nantwi
-Nannen
-Nang
-Nanfito
-Nanes
-Nan
-Namsaly
-Namey
-Namer
-Namauu
-Namanworth
-Nalevanko
-Nalder
-Nakaoka
-Nakamatsu
-Nakajima
-Nakada
-Nakaahiki
-Naimoli
-Nahmias
-Nahhas
-Nagtalon
-Nagelkirk
-Nagasawa
-Naftel
-Nadine
-Naderman
-Nachbar
-Nacci
-Nabzdyk
-Nabor
-Nabavian
-Nabarowsky
-Naasz
-Myslim
-Myree
-Mylar
-Myall
-Muzii
-Muyres
-Muwwakkil
-Mutters
-Mutschelknaus
-Musulin
-Mustaro
-Mustache
-Musslewhite
-Mussell
-Mussa
-Musni
-Muslim
-Muskrat
-Muskopf
-Muskett
-Musitano
-Musilli
-Musielak
-Musguire
-Musgraves
-Muscott
-Muschik
-Muschaweck
-Mursch
-Murril
-Murra
-Muros
-Muri
-Murel
-Murcko
-Murak
-Muphy
-Muntean
-Mundz
-Mundinger
-Munder
-Mumaugh
-Mulville
-Mulrenin
-Mulnix
-Mullenaux
-Mullahy
-Mulkern
-Mulkerin
-Mulchrone
-Mulato
-Muinos
-Muhlstein
-Mugnolo
-Muggeo
-Mugge
-Muffett
-Muenzenberger
-Muellerleile
-Mudie
-Muckelroy
-Muccio
-Mrvan
-Mrkvicka
-Mraw
-Mozick
-Mozga
-Mozak
-Moxness
-Moxey
-Mounkes
-Mound
-Motonaga
-Mothershead
-Motayne
-Motayen
-Mosty
-Mostad
-Mossbarger
-Moskwa
-Moskop
-Mosena
-Mosen
-Moscoffian
-Moryl
-Morvillo
-Mortin
-Mortier
-Morsberger
-Morrey
-Morrales
-Morral
-Morphy
-Morock
-Morlino
-Morkert
-Morken
-Morisseau
-Morishito
-Morinville
-Morici
-Morgano
-Morgana
-Moreschi
-Morenco
-Morence
-Morella
-Mordeci
-Moratto
-Morath
-Morario
-Morando
-Moradian
-Morada
-Mootry
-Moomey
-Monville
-Montoto
-Montore
-Montoney
-Montfort
-Montey
-Montesi
-Monterrubio
-Montembeau
-Montayes
-Montalban
-Montaivo
-Monsay
-Monot
-Monopoli
-Monnerjahn
-Monkowski
-Monka
-Monjure
-Monios
-Monington
-Monges
-Monfils
-Moneyhun
-Moneaux
-Mondt
-Mondoza
-Mondloch
-Mondelli
-Mondale
-Monclova
-Moncher
-Monath
-Monagas
-Mominee
-Moma
-Molz
-Molstad
-Molsan
-Molnau
-Mollura
-Molleur
-Molla
-Molands
-Moitoza
-Moisa
-Moine
-Mohrlock
-Mohre
-Mohomed
-Mohmed
-Mohair
-Mogus
-Moeuy
-Moeser
-Moehr
-Moehle
-Modique
-Modgling
-Modglin
-Moderski
-Moczulski
-Moccasin
-Moayyad
-Moatz
-Mlodzianowski
-Mleczynski
-Mizwicki
-Mizutani
-Mizia
-Mizenko
-Miyataki
-Miyanaga
-Miville
-Mitsdarffer
-Mitrani
-Mitman
-Mitkowski
-Misuraca
-Miskinis
-Miskiewicz
-Miska
-Misik
-Mishulovin
-Mishulouin
-Mishkin
-Mishar
-Misenti
-Mischo
-Mischnick
-Mirisola
-Miricle
-Mirick
-Miramontez
-Mirafuentes
-Miraflores
-Miquel
-Mione
-Minzy
-Minzenmayer
-Minzenberger
-Mintken
-Minten
-Minot
-Minors
-Minn
-Minkowitz
-Minkins
-Minister
-Minic
-Minhas
-Mingioni
-Mingee
-Minert
-Minchow
-Mincer
-Minalga
-Mimozo
-Milward
-Milson
-Milosch
-Millings
-Millick
-Millare
-Milke
-Milinazzo
-Milin
-Milich
-Milette
-Mile
-Mildrum
-Mildon
-Milcher
-Milberger
-Mikuszewski
-Miklitz
-Mikko
-Mihalios
-Mihalick
-Mieth
-Mierzwiak
-Mierzwa
-Mierow
-Mierez
-Mierau
-Mielcarek
-Miecznikowski
-Miears
-Middlekauff
-Micucci
-Mickelberry
-Michno
-Michlich
-Michieli
-Michelstein
-Michelini
-Michalicek
-Michal
-Micciche
-Micalizzi
-Mguyen
-Mezzina
-Mezzenga
-Meydid
-Meusel
-Meusa
-Metty
-Mettig
-Mettenburg
-Metier
-Meth
-Metelko
-Mestemacher
-Messamore
-Mesplay
-Mespelt
-Mesiti
-Mesina
-Meshyock
-Mesenbring
-Meschke
-Merzlak
-Merrih
-Merner
-Merkwan
-Merklein
-Merkey
-Meringolo
-Merine
-Mergist
-Merganthaler
-Merckling
-Menzer
-Mensalvas
-Mennecke
-Menne
-Menjiva
-Mengwasser
-Menger
-Menedez
-Meneal
-Menck
-Mencia
-Menchen
-Menchavez
-Melzer
-Melve
-Melso
-Meloan
-Melman
-Mellison
-Mellerson
-Mellendorf
-Mellberg
-Melikian
-Melian
-Melgaard
-Meleo
-Melbye
-Melber
-Meja
-Meixelberger
-Meitz
-Meitner
-Meiss
-Meisch
-Meinen
-Meinberg
-Meigel
-Meierhofer
-Mehringer
-Mehrer
-Mehle
-Mehall
-Megahan
-Mega
-Mefferd
-Meenan
-Meecham
-Medvec
-Medinger
-Meddock
-Medawar
-Medaries
-Mecias
-Mecannic
-Meazell
-Measom
-Meaden
-Meach
-Mcwhinnie
-Mcwhinney
-Mcwells
-Mcvinney
-Mcvenes
-Mcthige
-Mcthay
-Mcshaw
-Mcroyal
-Mcrenolds
-Mcratt
-Mcquilliams
-Mcquesten
-Mcphetridge
-Mconnell
-Mcnolty
-Mcneish
-Mcnany
-Mcnamar
-Mcmullins
-Mcmulen
-Mcmenimen
-Mcmellen
-Mcmanuis
-Mcmanemy
-Mclernon
-Mclauren
-Mclamore
-Mckusick
-Mckosky
-Mckirryher
-Mckindra
-Mckin
-Mckever
-Mckernin
-Mckerlie
-Mckennzie
-Mckelvin
-Mckelphin
-Mckeague
-Mckaughan
-Mciwraith
-Mcilhinney
-Mchardy
-Mcgurie
-Mcgrevey
-Mcgreen
-Mcgohan
-Mcglocklin
-Mcglew
-Mcglaun
-Mcgibney
-Mcghinnis
-Mcgaughan
-Mcgathy
-Mcferran
-Mcfeely
-Mcfatten
-Mcewin
-Mcendarfer
-Mcenany
-Mcelvy
-Mcelmarry
-Mceathron
-Mceaddy
-Mcdugle
-Mcdoulett
-Mcdaneld
-Mcculloh
-Mccullin
-Mccullan
-Mccullagh
-Mccubrey
-Mccrobie
-Mccrain
-Mccraight
-Mccracker
-Mccrabb
-Mccowin
-Mccoubrey
-Mccoon
-Mcconomy
-Mcconnico
-Mcconahay
-Mccomish
-Mccoid
-Mccloude
-Mcclinsey
-Mcclenic
-Mcclee
-Mccier
-Mccathran
-Mccash
-Mccarvy
-Mccarrol
-Mccarraher
-Mccalpane
-Mccalebb
-Mccalanahan
-Mccade
-Mccadams
-Mcbroome
-Mcaskill
-Mcartor
-Mcaree
-Mbonu
-Mazzillo
-Mazzetti
-Mazuera
-Mazowieski
-Mazierski
-Mazella
-Mayze
-Maywalt
-Mayher
-Mawk
-Mavris
-Maushardt
-Mauras
-Mauracher
-Maupins
-Matysiak
-Matye
-Matusz
-Matuska
-Matusiewicz
-Matulewicz
-Mattock
-Mattingley
-Mattina
-Mattick
-Mattan
-Matskin
-Matros
-Matrisciano
-Matone
-Matonak
-Matlow
-Matkovic
-Matison
-Mathelier
-Matelski
-Mateiro
-Masunaga
-Masterton
-Mastalski
-Massini
-Massena
-Massed
-Massarelli
-Massanelli
-Maso
-Maslen
-Maslakowski
-Masincup
-Masilko
-Masher
-Mashall
-Masello
-Masell
-Maschmeyer
-Mascheck
-Maschak
-Mascari
-Masar
-Masak
-Masaitis
-Marxsen
-Maruschak
-Maruscak
-Marus
-Marumoto
-Martyr
-Martsolf
-Martorelli
-Martling
-Martischnig
-Martirano
-Martinsons
-Martinov
-Martinon
-Martinolli
-Martinet
-Martinell
-Martinel
-Martinat
-Martich
-Martey
-Martelles
-Martelle
-Marsolais
-Marsili
-Marshbanks
-Marshak
-Marseilles
-Marsaw
-Marrier
-Marrett
-Marrapodi
-Marrapese
-Marquitz
-Marousek
-Maronge
-Maro
-Marmerchant
-Marlene
-Markworth
-Markwardt
-Markuson
-Markou
-Markakis
-Marjenhoff
-Maritato
-Mariska
-Mariacher
-Margot
-Margis
-Marflak
-Marfil
-Marer
-Mardirossian
-Marcusen
-Marconis
-Marcisak
-Marcille
-Marchionni
-Marchesi
-Marchaland
-Marcet
-Marcelli
-Marca
-Marbley
-Marash
-Marascalco
-Marante
-Marangoni
-Marando
-Mapua
-Mapstone
-Mapa
-Maohu
-Manzur
-Manweiler
-Manuia
-Manto
-Mantifel
-Mantia
-Manteuffel
-Mantella
-Manteca
-Manspeaker
-Mansbach
-Manous
-Manoso
-Manolis
-Manocchia
-Mannheim
-Mannello
-Manlangit
-Manino
-Manieri
-Manicchio
-Maniar
-Maniaci
-Maniace
-Manglona
-Mangis
-Mangiafico
-Manghane
-Manero
-Manely
-Maneafaiga
-Mandril
-Mandolfo
-Mander
-Mandelberg
-Mandala
-Manco
-Mancill
-Mancher
-Manche
-Manaugh
-Manassa
-Manasares
-Manansala
-Manalili
-Mamudoski
-Mammo
-Mammenga
-Mamaril
-Mamaclay
-Malueg
-Malter
-Maltbia
-Maltas
-Malool
-Mallas
-Mallalieu
-Mallacara
-Malkiewicz
-Malinovsky
-Malewski
-Malett
-Maldomado
-Malcomson
-Malcik
-Malavet
-Malaver
-Malasky
-Malas
-Malango
-Malanaphy
-Malach
-Makofsky
-Mako
-Makler
-Maka
-Majuste
-Majied
-Majeske
-Majerowski
-Majera
-Maixner
-Maisto
-Maiocco
-Mailo
-Maile
-Maikoksoong
-Mahunik
-Mahrer
-Mahraun
-Maholmes
-Mahlke
-Mahli
-Mahfouz
-Maheia
-Mahalko
-Magwire
-Magpuri
-Magoun
-Magnone
-Magnetti
-Magliulo
-Magliolo
-Magliocco
-Magitt
-Magginson
-Maggert
-Magera
-Maged
-Mage
-Magbitang
-Magalong
-Magaha
-Maffitt
-Maffey
-Maestri
-Maenpaa
-Maenhout
-Maendel
-Mady
-Maduro
-Madu
-Madray
-Madras
-Madock
-Madlung
-Madler
-Madenford
-Madeau
-Maddaleno
-Macvean
-Macura
-Macrum
-Macrostie
-Macnaught
-Macnamee
-Macmurray
-Macmillen
-Maclay
-Mackle
-Mackimmie
-Mackedanz
-Maciejko
-Maciasz
-Maciak
-Machtley
-Machens
-Macentee
-Maceda
-Macdougald
-Maccauley
-Maccartney
-Macareno
-Macaraig
-Macapagal
-Macahilas
-Macadamia
-Mabone
-Mabary
-Maatta
-Maalouf
-Lysak
-Lynge
-Lynady
-Lykam
-Lyerla
-Lychwala
-Luzuriaga
-Luzinski
-Luxon
-Luvene
-Lutzi
-Luthe
-Luss
-Lushbaugh
-Luscavage
-Lurey
-Luquin
-Lupul
-Lupu
-Lupkin
-Lupfer
-Luoto
-Lundman
-Lundie
-Lundi
-Lundemo
-Luncsford
-Lumukanda
-Lumpp
-Lummis
-Lumantas
-Luloff
-Lukavsky
-Luitjens
-Luhring
-Luga
-Luffy
-Luelf
-Luehring
-Luedi
-Lueckenotte
-Luecht
-Luebano
-Ludvik
-Ludovici
-Ludkowski
-Luderman
-Luddy
-Lucksom
-Luckritz
-Luckadoo
-Lucion
-Luci
-Luchessa
-Luchesi
-Lucear
-Lucario
-Luben
-Luangsingotha
-Lozzi
-Lozo
-Loyst
-Loyed
-Lowin
-Lowber
-Lovich
-Lovenbury
-Loveh
-Lovec
-Louser
-Louris
-Lourence
-Loureiro
-Louras
-Lounds
-Loukidis
-Loukas
-Louissant
-Louer
-Louch
-Lotze
-Lotthammer
-Lotter
-Loterbauer
-Lotempio
-Lostracco
-Loston
-Lossman
-Loson
-Loskill
-Loske
-Loshe
-Lorz
-Lorion
-Lopuzzo
-Lopilato
-Lopera
-Loosey
-Looi
-Loock
-Lonsway
-Lons
-Longueville
-Longton
-Longknife
-Longin
-Longfield
-Longcor
-Londner
-Lompa
-Lommel
-Lomg
-Lolling
-Lolli
-Loli
-Lolar
-Lokuta
-Lokke
-Lokhmator
-Lojek
-Lois
-Loil
-Lohmeier
-Logero
-Loewe
-Loessberg
-Loeschner
-Loesche
-Loehlein
-Loeckle
-Loebs
-Loduca
-Lodense
-Lodeiro
-Locsin
-Locorriere
-Locklier
-Lockette
-Lochotzki
-Loche
-Locantore
-Locante
-Lobosco
-Lobingier
-Loats
-Loarca
-Llyod
-Llopis
-Llarenas
-Ljungquist
-Lizer
-Lizarda
-Livi
-Livezey
-Liverani
-Livas
-Liuzza
-Litzsinger
-Litza
-Littlehale
-Litter
-Litehiser
-Litecky
-Liskovec
-Liskiewicz
-Liskai
-Lisius
-Lisiecki
-Lisherness
-Lisanti
-Lipstone
-Lipsitz
-Lippi
-Lipovsky
-Lipkind
-Lipke
-Lipitz
-Lipa
-Liontos
-Linzie
-Linstrom
-Linssen
-Linsner
-Linsay
-Linnecke
-Linnan
-Linkkila
-Linginfelter
-Lingberg
-Lingardo
-Lingao
-Linea
-Lindwall
-Lindskog
-Lindline
-Lindesmith
-Lincicum
-Linahan
-Limthong
-Limesand
-Limauro
-Limardo
-Lilleberg
-Liljedahl
-Liljeberg
-Lilja
-Likio
-Ligons
-Lifshitz
-Liesch
-Lierle
-Lienke
-Lienemann
-Liekhus
-Liederbach
-Lieder
-Liechti
-Liebskind
-Liebhardt
-Liebelt
-Lie
-Liddie
-Lidbom
-Licor
-Lico
-Lickness
-Lickiss
-Lickey
-Lichtig
-Lichtenwalter
-Lichte
-Lichstein
-Lichorat
-Lichlyter
-Liccione
-Licalzi
-Librizzi
-Libre
-Librandi
-Libke
-Libert
-Liano
-Lianes
-Lezon
-Lezer
-Lezak
-Leynes
-Lewton
-Lewry
-Lewandowsky
-Levo
-Levites
-Levitch
-Levitas
-Levister
-Levinsky
-Leverentz
-Levendosky
-Leuty
-Leuters
-Leusink
-Leupold
-Leuchs
-Letteney
-Letteer
-Letrent
-Letourneaux
-Letofsky
-Letman
-Letko
-Letang
-Letalien
-Lestelle
-Lessin
-Lessenberry
-Lessen
-Lessa
-Lespier
-Lesky
-Leshure
-Leshko
-Lescavage
-Lermond
-Lerew
-Leonti
-Leonaggeo
-Lenza
-Lenters
-Lenord
-Lenny
-Lennert
-Lenix
-Lening
-Lengle
-Lengacher
-Lener
-Leneave
-Lencioni
-Lempe
-Lemone
-Lemin
-Lemich
-Lemert
-Lelis
-Lele
-Lekwa
-Lejune
-Leitze
-Leitem
-Leistner
-Leipheimer
-Leimkuehler
-Leiding
-Leidel
-Leidall
-Leichty
-Leichtman
-Leibenstein
-Leiba
-Lehrian
-Lehrfeld
-Legrow
-Legrant
-Legore
-Leghorn
-Legel
-Legallo
-Lefew
-Leemow
-Leebrick
-Ledy
-Leduke
-Ledon
-Ledley
-Ledec
-Ledebuhr
-Lecoultre
-Leconey
-Leckington
-Lechlak
-Lechel
-Lebovic
-Lebourgeois
-Leberman
-Lebario
-Leavelle
-Leasy
-Leah
-Leagjeld
-Leafe
-Leabow
-Lazzar
-Lazer
-Lazenson
-Lazenberry
-Layher
-Lawe
-Lavon
-Lavina
-Lavette
-Laverne
-Laverette
-Lavee
-Lavear
-Lavatch
-Lauwers
-Lauw
-Lauture
-Lautman
-Lauters
-Laurion
-Laurens
-Laurenceau
-Launt
-Launelez
-Laughbaum
-Lauerman
-Laudat
-Laubacher
-Latzka
-Latzig
-Latortue
-Lathon
-Lathim
-Latessa
-Latella
-Lataille
-Lasyone
-Lastovica
-Lasselle
-Lask
-Lashutva
-Laserna
-Lascody
-Lasaint
-Larve
-Laruffa
-Larsh
-Larreta
-Larko
-Largay
-Larey
-Lardydell
-Larde
-Laravie
-Larate
-Laquay
-Lapuz
-Laprairie
-Lapora
-Lapiana
-Lanzoni
-Lanzillotti
-Lanzillo
-Lanzer
-Lanzalotti
-Lanton
-Lantey
-Lansdowne
-Lansden
-Lansang
-Lanquist
-Lanosga
-Lanosa
-Laninga
-Langsdale
-Langoni
-Langlands
-Langhout
-Langhorst
-Langenheim
-Langehennig
-Laneve
-Landucci
-Landsberry
-Landrey
-Landolfo
-Landkamer
-Landham
-Landgrebe
-Landefeld
-Lampp
-Lamparski
-Lamorgese
-Lamorella
-Lammie
-Lamielle
-Lamela
-Lambourne
-Lambino
-Lamberto
-Lamber
-Lambeck
-Lamascolo
-Lamarsh
-Lamantagne
-Lamaitre
-Lalumiere
-Lallo
-Laliberty
-Lalata
-Lalanne
-Laland
-Lakner
-Laity
-Lahrman
-Lahmann
-Lahip
-Lagroon
-Lagoa
-Laginess
-Lagge
-Lagatella
-Lagassie
-Laganga
-Lafranca
-Lafosse
-Laffredo
-Laferty
-Lafera
-Lafaver
-Lafauci
-Laesser
-Ladyman
-Ladtkow
-Laditka
-Ladeau
-Ladas
-Lacouette
-Lacosta
-Lacock
-Lacks
-Lackman
-Lackie
-Lachley
-Lacassagne
-Labrune
-Labrode
-Labreque
-Labrec
-Labog
-Labkovsky
-Labita
-Labbie
-Lababit
-Laaker
-Kylish
-Kyhn
-Kwiat
-Kwasny
-Kwack
-Kvilhaug
-Kuznicki
-Kuzmish
-Kuzmanic
-Kuzemchak
-Kuttler
-Kutella
-Kutchin
-Kuszlyk
-Kusumoto
-Kusuma
-Kustes
-Kusinski
-Kushlan
-Kushiner
-Kushin
-Kusak
-Kurzyniec
-Kury
-Kurter
-Kurrie
-Kurpiel
-Kurkjian
-Kurk
-Kurisu
-Kupres
-Kuokkanen
-Kunzie
-Kunzel
-Kunis
-Kuning
-Kundrick
-Kundla
-Kundinger
-Kully
-Kullas
-Kulkarni
-Kulcona
-Kulak
-Kulacz
-Kuks
-Kuklis
-Kuka
-Kuja
-Kuizinas
-Kuhtz
-Kuhnle
-Kuhnen
-Kuhnemund
-Kuhnel
-Kuhens
-Kuharik
-Kufner
-Kufeldt
-Kuenstler
-Kuehnert
-Kudzma
-Kudasik
-Kuczkowski
-Kucinskas
-Kuchto
-Kuch
-Kucel
-Kucek
-Kubica
-Kubecka
-Kuban
-Kszaszcz
-Krzywicki
-Krzynowek
-Krzal
-Krystal
-Krysiak
-Krys
-Krutsch
-Kruss
-Krusen
-Krusemark
-Krupiak
-Krumsiek
-Kruml
-Krulish
-Krulik
-Krulicki
-Krueth
-Kruer
-Kruel
-Krows
-Krossen
-Krolikowski
-Krolczyk
-Kroetch
-Kriticos
-Krites
-Krisher
-Krinke
-Krienke
-Kriegh
-Krichbaum
-Kribbs
-Kretchmar
-Kreitzbender
-Kreitler
-Kreinbring
-Kreb
-Kreamalmeyer
-Kreager
-Krawiecz
-Krawetz
-Krasley
-Krapfl
-Kranze
-Kranendonk
-Kramper
-Krampe
-Kramm
-Kralicek
-Krajnovich
-Krajcer
-Krain
-Kracker
-Kozinski
-Kownacki
-Kown
-Kowing
-Kowallis
-Kowall
-Kowalcyk
-Kowalchick
-Kovacic
-Kourt
-Kourkoumellis
-Kounter
-Kounlavong
-Kounce
-Koulabout
-Koualeski
-Kotzur
-Kottsick
-Kottre
-Kotte
-Kotrys
-Kotow
-Kothenbeutel
-Kotara
-Kostyla
-Kostich
-Kostenko
-Kossmann
-Kossin
-Kossakowski
-Kossack
-Kosoff
-Kosmatka
-Koshiol
-Koscielak
-Koscho
-Korzenski
-Kortz
-Kortum
-Korthauer
-Korshak
-Korsen
-Korol
-Korns
-Kornprobst
-Kornman
-Kormann
-Korineck
-Korf
-Koretsky
-Korenic
-Korbal
-Koralewski
-Koppelmann
-Kopis
-Kopiak
-Kopera
-Kopchick
-Kooken
-Kontogianis
-Konon
-Konn
-Konieczko
-Konick
-Konicek
-Koneval
-Kondratowicz
-Koncan
-Konat
-Komsthoeft
-Komosinski
-Kommer
-Kominek
-Koman
-Kolthoff
-Kology
-Kolnik
-Kolmetz
-Kolling
-Kolkowski
-Kolkemeyer
-Kolias
-Kolen
-Kolehmainen
-Kolby
-Kolberg
-Kolat
-Kokoska
-Koistinen
-Kohnert
-Kohlmyer
-Kofutua
-Kofoid
-Kofler
-Kofa
-Koetz
-Koetje
-Koerper
-Koeppl
-Koenning
-Koenigstein
-Koenigsfeld
-Koelle
-Koegel
-Koebley
-Koczera
-Kochmanski
-Kocaj
-Koc
-Koblick
-Kobis
-Kobialka
-Kobernick
-Kobak
-Knost
-Knori
-Knopinski
-Knoepfler
-Knoche
-Knipping
-Knipfel
-Knighter
-Kniefel
-Knie
-Knickman
-Knezevic
-Knewtson
-Knestrick
-Knesel
-Kneifel
-Knavel
-Knappe
-Knackstedt
-Klusmeyer
-Klus
-Klund
-Klun
-Kloos
-Kloock
-Kloiber
-Klohr
-Kloepper
-Klocek
-Klis
-Klingerman
-Klingen
-Klines
-Klimkowicz
-Kliever
-Kliem
-Kleypas
-Klevene
-Kleppinger
-Kleparek
-Klepacz
-Klemenc
-Klemanski
-Kleinwolterin
-Kleinsmith
-Kleinke
-Kleinberger
-Kleidon
-Kleespies
-Kleese
-Kleekamp
-Kleban
-Klayman
-Klay
-Klaver
-Klarman
-Klarberg
-Klapperich
-Kjetland
-Kizewski
-Kiyabu
-Kivioja
-Kittner
-Kittelberger
-Kissik
-Kisser
-Kishaba
-Kisch
-Kirner
-Kirkpatric
-Kirchhofer
-Kirchgessner
-Kirchausen
-Kirbie
-Kiral
-Kippes
-Kipper
-Kippel
-Kintsel
-Kintop
-Kinseth
-Kinroth
-Kinnion
-Kinningham
-Kinnier
-Kinnie
-Kinkin
-Kinkella
-Kingshott
-Kingore
-Kingen
-Kinerson
-Kindermann
-Kinart
-Kinan
-Kinabrew
-Kimbral
-Killean
-Kilcrest
-Kilb
-Kilarjian
-Kiffe
-Kientz
-Kiening
-Kielich
-Kieger
-Kieft
-Kieff
-Kiefel
-Kie
-Khum
-Khu
-Khov
-Khounborine
-Khoun
-Khoo
-Khensovan
-Khela
-Khay
-Khansari
-Khanponaphan
-Khano
-Khammixay
-Khalife
-Khalifah
-Khachatoorian
-Keyna
-Kexel
-Kewish
-Kettmann
-Ketring
-Ketler
-Ketcheside
-Ket
-Kestle
-Kessner
-Kerzer
-Kerss
-Kerska
-Kershbaumer
-Keros
-Kerntke
-Kerkel
-Keri
-Kerger
-Kereluk
-Kerechanko
-Kercado
-Keppers
-Keohane
-Kennet
-Kennealy
-Kenely
-Keneally
-Kendrew
-Kenderdine
-Kenagy
-Kenady
-Kemner
-Kemmler
-Kemme
-Kemerer
-Kelzer
-Kellon
-Kello
-Kellin
-Kellebrew
-Kellaway
-Keliipio
-Kelder
-Kelash
-Keitzer
-Keigley
-Keicher
-Kegerries
-Keens
-Keemer
-Keckler
-Keaveny
-Keath
-Keasley
-Kears
-Keany
-Keanum
-Keamo
-Kealohanui
-Kazmi
-Kazmer
-Kazin
-Kazeck
-Kazakos
-Kayrouz
-Kaylo
-Kawata
-Kaveny
-Kavadias
-Kauphusman
-Kaune
-Kaull
-Kaub
-Katzberg
-Katynski
-Katula
-Katten
-Katsbulas
-Katnik
-Katechis
-Katcsmorak
-Katan
-Kastning
-Kastman
-Kassell
-Kassabaum
-Kasprak
-Kasica
-Kasack
-Karvonen
-Karvis
-Karpowich
-Karpiak
-Karnish
-Karma
-Karell
-Kareem
-Kardashian
-Karczewski
-Karayan
-Karatz
-Karadimas
-Kapusniak
-Kapraun
-Kappe
-Kappa
-Kapitula
-Kapfer
-Kapelke
-Kapa
-Kaopua
-Kantarian
-Kanta
-Kanoza
-Kannard
-Kanish
-Kaniecki
-Kanevsky
-Kaner
-Kandra
-Kanda
-Kanatzar
-Kanable
-Kamph
-Kamnik
-Kammes
-Kammerdiener
-Kamerad
-Kamelamela
-Kamealoha
-Kame
-Kamb
-Kaluzny
-Kalupa
-Kaluna
-Kaltved
-Kalter
-Kalscheuer
-Kalmus
-Kalmer
-Kalland
-Kalima
-Kalichman
-Kalfa
-Kalbaugh
-Kakudji
-Kaitz
-Kainoa
-Kailey
-Kaiama
-Kahrer
-Kahola
-Kahana
-Kagay
-Kafel
-Kaetzel
-Kaesemeyer
-Kaer
-Kaea
-Kaduk
-Kadis
-Kaderlik
-Kade
-Kacik
-Kachikian
-Kacerski
-Kaboos
-Kabba
-Kaaz
-Kaauamo
-Juza
-Justino
-Justason
-Jurs
-Jurisch
-Jurgensmeier
-Jurden
-Jura
-Jungling
-Julye
-Juluke
-Julock
-Julias
-Julen
-Jufer
-Juedes
-Jubic
-Juariqui
-Juaire
-Jozsa
-Joulwan
-Jostes
-Josten
-Josich
-Josias
-Joshlin
-Josefy
-Josef
-Jorski
-Jorn
-Jorinscay
-Jorda
-Jons
-Jongeling
-Jongebloed
-Jondle
-Jolls
-Johnshoy
-Johnico
-Johanek
-Jirjis
-Jiran
-Jimmison
-Jill
-Jewels
-Jevtic
-Jetty
-Jesmer
-Jes
-Jerone
-Jerko
-Jenschke
-Jenquin
-Jennins
-Jennelle
-Jenison
-Jendrick
-Jeminez
-Jellis
-Jekot
-Jekel
-Jehl
-Jebb
-Jeavons
-Jeanneret
-Jeane
-Jeancharles
-Jeanbaptise
-Jaworowicz
-Javellana
-Jaurigui
-Jauch
-Jastrzebski
-Jass
-Jasmine
-Jarzembowski
-Jarver
-Jarosh
-Jaroscak
-Jarnesky
-Jares
-Jarell
-Jaradat
-Jarad
-Jaquins
-Janulewicz
-Jansing
-Janrhett
-Janowicz
-Janosek
-Jannetti
-Jannell
-Janeczko
-Jandron
-Janczunski
-Jancik
-Janacek
-Jamwant
-Jamili
-Jakovac
-Jagoe
-Jaffy
-Jaeschke
-Jaenke
-Jacque
-Jacobos
-Jackovitz
-Jackola
-Jackley
-Jacka
-Jacckson
-Jablonsky
-Jabiro
-Jabaay
-Jaap
-Iyengar
-Iwanowski
-Iwanejko
-Ivon
-Iverslie
-Ivanov
-Ivancich
-Iturralde
-Ittner
-Israelsen
-Israels
-Ismay
-Isleib
-Isita
-Isiordia
-Ising
-Isidore
-Isbill
-Isagawa
-Isacs
-Isaacsen
-Irzyk
-Irizzary
-Irineo
-Irimata
-Ireton
-Irestone
-Iozzo
-Iozzi
-Iopa
-Intrabartolo
-Intihar
-Insko
-Insana
-Inocente
-Ink
-Inhulsen
-Ingole
-Inches
-Inafuku
-Imperatore
-Imgrund
-Imbimbo
-Imbier
-Imaino
-Ilse
-Illuzzi
-Illian
-Ilic
-Ilasin
-Ilagan
-Iker
-Ihnat
-Ihm
-Igwe
-Igtanloc
-Ifversen
-Iese
-Ieng
-Ienco
-Idemoto
-Icard
-Iborra
-Ible
-Iberg
-Ibbetson
-Ibale
-Iavarone
-Iatarola
-Iacovino
-Iacopino
-Iacobellis
-Iachetta
-Hysom
-Hymowitz
-Hymon
-Hymen
-Hylands
-Hych
-Huy
-Huval
-Hutmacher
-Huszar
-Hustace
-Hussien
-Huskinson
-Husfelt
-Husenaj
-Husch
-Hurtig
-Hurtgen
-Huro
-Hurne
-Hurlston
-Hupman
-Huor
-Hunzelman
-Hunsperger
-Hunneyman
-Hunckler
-Humphrys
-Humphers
-Humetewa
-Humeniuk
-Humenik
-Hulstrand
-Hullings
-Hulitt
-Hulick
-Huland
-Huiting
-Hugron
-Hufstedler
-Huffner
-Huezo
-Huettman
-Huereca
-Huenink
-Huelse
-Hueckman
-Hudgeons
-Hudach
-Huckstadt
-Huckle
-Huckabey
-Hubschmitt
-Hubin
-Hubertus
-Hubby
-Hubbel
-Huban
-Huaman
-Hsun
-Hsiang
-Hrapski
-Hoznour
-Hoyman
-Howkins
-Howick
-Howatt
-Hovorka
-Hovick
-Hovanesian
-Hounchell
-Houf
-Hotton
-Hottes
-Hotrum
-Hotelling
-Hotaki
-Hostoffer
-Hosterman
-Hosteller
-Hospkins
-Hospelhorn
-Hoscheit
-Hoschander
-Horstead
-Horris
-Hornoff
-Hornberg
-Hornandez
-Hornack
-Hormell
-Horikoshi
-Horigan
-Horger
-Hoppins
-Hopperstad
-Hopko
-Hootsell
-Hoopingarner
-Hookano
-Hooghkirk
-Hoofard
-Hoock
-Honsinger
-Honour
-Honnette
-Honnerlaw
-Honma
-Honkanen
-Hongach
-Honeycott
-Hondorp
-Honchell
-Honas
-Honanie
-Homsher
-Homestead
-Holze
-Holtorf
-Holthus
-Holster
-Holsonback
-Holom
-Hollinrake
-Hollidge
-Hollerman
-Hollendonner
-Hollberg
-Holk
-Holian
-Holes
-Holecz
-Holec
-Holdvogt
-Hokutan
-Hok
-Hoiness
-Hoilman
-Hohiudden
-Hohensee
-Hohaia
-Hogelin
-Hogatt
-Hogarty
-Hoftiezer
-Hoffstatter
-Hoffnagle
-Hoffeditz
-Hoffart
-Hoerl
-Hoefel
-Hodos
-Hodnefield
-Hockins
-Hockenbrock
-Hocke
-Hochard
-Hocate
-Hobler
-Hober
-Hoben
-Hobell
-Hobden
-Hoagberg
-Hnyda
-Hlavka
-Hladik
-Hladek
-Hitchen
-Hislope
-Hirschberg
-Hirneise
-Hirn
-Hirliman
-Hirleman
-Hirao
-Hippenstiel
-Hintson
-Hint
-Hinley
-Hinh
-Hinebaugh
-Hindson
-Hinderberger
-Himmelmann
-Himanga
-Him
-Hilston
-Hilstad
-Hilser
-Hilsendager
-Hilsenbeck
-Hilscher
-Hilsabeck
-Hilpert
-Hilman
-Hillerud
-Hillebrano
-Hillebrandt
-Hilland
-Hilgers
-Hilgeman
-Hilfiker
-Hildago
-Hilda
-Hilbrand
-Hikel
-Highbaugh
-Higgons
-Higgenbottom
-Hiersche
-Hierholcer
-Hiedeman
-Hiday
-Hickethier
-Hichens
-Hibbitt
-Heyduck
-Hewko
-Hevron
-Heuwinkel
-Heuvelmann
-Heusner
-Heung
-Heuett
-Heuck
-Hettinga
-Hessey
-Hespen
-Hescock
-Heschke
-Hervig
-Hertzel
-Herston
-Herstad
-Hershkop
-Hershelman
-Herschelman
-Herriges
-Herres
-Herrarte
-Herpich
-Hernanez
-Hernanadez
-Hernan
-Hermenau
-Hermanowicz
-Herkstroeter
-Herkenratt
-Herera
-Herendeen
-Herauf
-Henstrom
-Hense
-Henrity
-Hennigh
-Hennies
-Henneberry
-Henkey
-Henjes
-Hengl
-Hengen
-Henfling
-Henerson
-Henein
-Hendrik
-Hendricksen
-Hendeson
-Henderso
-Henderlite
-Hemon
-Hemmann
-Hemker
-Hemesath
-Hemani
-Helweg
-Helverson
-Helseth
-Helquist
-Helom
-Helmstetter
-Helmsing
-Hellweg
-Hellmich
-Helgager
-Helgaas
-Helfenbein
-Helems
-Helem
-Helde
-Heiting
-Heither
-Heisdorffer
-Heiro
-Heirendt
-Heinzig
-Heiniger
-Heingartner
-Heimlicher
-Heimburger
-Heiken
-Heidtman
-Heidrich
-Heidi
-Heidelberger
-Heidebrecht
-Heick
-Heibult
-Heholt
-Heggood
-Heeth
-Heers
-Heern
-Heerkes
-Hedtke
-Hedspeth
-Hedon
-Hedinger
-Hecke
-Hechinger
-Hebeisen
-Heatherton
-Heartsill
-Heagney
-Heafey
-Headly
-Headland
-Headlam
-Headington
-Heade
-Hazy
-Hazim
-Haza
-Haynam
-Hayertz
-Haydt
-Haxby
-Hawse
-Hawkinberry
-Hawe
-Havlin
-Havir
-Havelka
-Hauxwell
-Hautan
-Hausrath
-Hauptmann
-Haughn
-Hauersperger
-Hatzenbihler
-Hattley
-Hatta
-Hatori
-Hathorne
-Hatchitt
-Hatchet
-Hatada
-Hastin
-Hastedt
-Hassing
-Hassenger
-Hassanein
-Hasker
-Haskel
-Hashaway
-Hasenfuss
-Hasenfratz
-Hascup
-Hasas
-Hartwigsen
-Hartrum
-Hartquist
-Hartory
-Hartlen
-Hartleben
-Hartinger
-Harsin
-Harritt
-Harriage
-Harpham
-Harnos
-Harnist
-Harleman
-Harlee
-Harke
-Hargers
-Hardter
-Hardsock
-Hardnette
-Hardine
-Hardi
-Hardges
-Harderman
-Harde
-Hardan
-Harcar
-Harbater
-Harapat
-Harang
-Haq
-Hanzl
-Hansome
-Hansman
-Hansis
-Hansing
-Hanoa
-Hanninen
-Hannaway
-Hannawalt
-Hanmer
-Hankison
-Hanible
-Hanenberger
-Haneke
-Hanebutt
-Handzlik
-Handsom
-Handkins
-Handke
-Handin
-Hanback
-Hanawalt
-Hanavan
-Hamsik
-Hamonds
-Hammette
-Hammerman
-Hammacher
-Hamlette
-Hamiltan
-Hamidi
-Hamff
-Hamett
-Hamersly
-Hamers
-Hamdn
-Hamden
-Hamberry
-Hamara
-Hamacher
-Halyk
-Haltiwanger
-Halstrom
-Halse
-Halpert
-Halnon
-Hallo
-Halliman
-Hallemeyer
-Hallack
-Halima
-Halick
-Haldi
-Halcott
-Halbershtam
-Halajian
-Halaas
-Hakey
-Haitz
-Hairell
-Haims
-Haifa
-Hahnert
-Haggin
-Haggerton
-Haggermaker
-Hagey
-Hafferkamp
-Haferkamp
-Haeuser
-Haessly
-Haese
-Haerter
-Haering
-Haeder
-Hadvab
-Hadsall
-Hadler
-Hadesty
-Haddenham
-Hadaller
-Hacopian
-Hackl
-Hackerott
-Hacken
-Hachting
-Haboush
-Hable
-Habig
-Habibi
-Haberstroh
-Habenicht
-Haaz
-Haakenstad
-Haage
-Gyllensten
-Gwilt
-Gwillim
-Guzon
-Guzewicz
-Guye
-Gutzler
-Guttormson
-Gutsche
-Gutjahr
-Gutgesell
-Gutenberg
-Gustitus
-Gussow
-Gusmar
-Gushi
-Gushard
-Gurwell
-Gurske
-Gurrero
-Gurin
-Gurecki
-Guoan
-Gunzelman
-Gunyon
-Guntharp
-Gunstream
-Gungor
-Gundelach
-Gunawan
-Gumprecht
-Gumaer
-Gulston
-Gulnac
-Gulizio
-Gulbrandsen
-Guitano
-Guimares
-Guillebeau
-Guillary
-Guillama
-Guilfoos
-Guiggey
-Guiga
-Guieb
-Guidrey
-Guiab
-Guffanti
-Guerrini
-Guerrazzi
-Guerera
-Guenthur
-Guell
-Guedjian
-Gudmundsson
-Gucker
-Gubin
-Gubala
-Guba
-Guasp
-Guarriello
-Guarno
-Guarini
-Guanche
-Guagenti
-Gstohl
-Grzesik
-Grzebien
-Gryszowka
-Grymes
-Gruz
-Grustas
-Gruse
-Gruntz
-Grunert
-Grune
-Grunberg
-Grumney
-Grumbling
-Gruman
-Grulkey
-Gruiger
-Gruening
-Gruenewald
-Gruby
-Gruben
-Grubel
-Grubba
-Grriffin
-Groys
-Growell
-Grothaus
-Grosskreutz
-Groskreutz
-Grosclaude
-Groot
-Gronstal
-Gronquist
-Gronlund
-Gronitz
-Gronberg
-Grona
-Gromoll
-Grohowski
-Grohman
-Groetsch
-Groder
-Grobmyer
-Groberg
-Grivno
-Grivetti
-Grippen
-Grine
-Grimme
-Grills
-Grigoreas
-Griglen
-Griffitt
-Griffan
-Grieshop
-Grieshaber
-Griep
-Grieff
-Griebling
-Griblin
-Grev
-Greubel
-Gressmire
-Gresco
-Grenway
-Grensky
-Grennay
-Grenko
-Grenet
-Gremo
-Gremmels
-Gregware
-Gregus
-Greggory
-Gregan
-Greep
-Greenweig
-Greensfelder
-Greenhalge
-Greengo
-Greenbacker
-Greem
-Greder
-Greczkowski
-Grebner
-Greber
-Greason
-Gream
-Gravat
-Grauman
-Grauel
-Grassle
-Grasmick
-Grapp
-Granzella
-Granto
-Gransberry
-Granquist
-Granneman
-Granieri
-Granes
-Grandon
-Grandner
-Granai
-Grammont
-Gramble
-Graleski
-Grainey
-Grain
-Graichen
-Grahovac
-Grageda
-Gragas
-Graffney
-Graffagnino
-Grafals
-Gradley
-Gradias
-Gradford
-Grabowsky
-Grabonski
-Grabler
-Grabhorn
-Graap
-Gozman
-Goyen
-Goyda
-Gowey
-Gowda
-Govostes
-Govia
-Gour
-Gouldman
-Gouldie
-Gougis
-Gotts
-Gottemoeller
-Gottdenger
-Gotta
-Gotshall
-Gosvener
-Gostlin
-Gossow
-Gosson
-Gossling
-Gosset
-Gosey
-Gorrindo
-Gormanous
-Gormally
-Gorius
-Gorena
-Gorell
-Gordley
-Gordey
-Gorbea
-Goonen
-Goodmon
-Gonzelas
-Gonzalis
-Gonyou
-Gonsiewski
-Gonsar
-Goney
-Gomoran
-Gomoll
-Gollop
-Gollob
-Gollier
-Golik
-Golida
-Golias
-Golian
-Golia
-Golec
-Goldthorpe
-Goldhorn
-Goldhirsh
-Goldfuss
-Goldfeld
-Golderer
-Goldenstein
-Goldenman
-Golde
-Golbin
-Golackson
-Goicoechea
-Goffigan
-Goerlich
-Goepfarth
-Goepel
-Goeing
-Goehringer
-Godboldt
-Gochett
-Gochal
-Gocek
-Goblirsch
-Gnoza
-Gnegy
-Gnabah
-Gmernicki
-Glyn
-Glueckert
-Glowacky
-Glovinsky
-Gloston
-Gloshen
-Glos
-Glogowski
-Gloeckler
-Glimpse
-Glidwell
-Glesener
-Gleitz
-Gleckler
-Glebocki
-Gleber
-Glazner
-Glazebrook
-Glaves
-Glavan
-Glasby
-Gladysiewski
-Gladle
-Gladhart
-Gjeltema
-Givant
-Gius
-Giulioli
-Gitt
-Girres
-Girbach
-Girand
-Gip
-Giottonini
-Giorno
-Gionta
-Giombetti
-Gioffre
-Gioe
-Ginzel
-Ginsel
-Ginocchio
-Ginnis
-Ginard
-Gimse
-Gilzow
-Gilton
-Gilstad
-Gilomen
-Gilner
-Gilly
-Gillming
-Gillion
-Gillich
-Gillice
-Gille
-Giliberto
-Gilhuly
-Gilgan
-Gildemeister
-Gilcris
-Gigger
-Giffith
-Giffee
-Giff
-Gietz
-Giesel
-Giera
-Gibeaut
-Gibala
-Giasson
-Giarusso
-Giarrano
-Giaquinta
-Giannavola
-Giandomenico
-Gianandrea
-Giallorenzo
-Giacherio
-Giachelli
-Giacchi
-Ghebremicael
-Gezalyan
-Getzschman
-Getzlaff
-Gettens
-Gettelman
-Gestether
-Gesing
-Gesamondo
-Gerz
-Gerwin
-Gerveler
-Gertsema
-Gerthung
-Gerten
-Gertel
-Gerteisen
-Gerstenberger
-Gershkovich
-Gerney
-Germy
-Germana
-Gerich
-Gerdiman
-Gerckens
-Gerbig
-Georghiou
-Geoly
-Gentleman
-Gentges
-Gentelia
-Gensel
-Geniesse
-Genia
-Generalao
-Gemmiti
-Geml
-Gelner
-Gellings
-Gellinger
-Gelino
-Gelhar
-Gelfond
-Gelerter
-Gelder
-Gelbart
-Geisinsky
-Gehrki
-Gehm
-Geen
-Gederman
-Gede
-Gearn
-Geant
-Gazzara
-Gazitano
-Gazdik
-Gayanilo
-Gawthorp
-Gavit
-Gaviglia
-Gavett
-Gavan
-Gavagan
-Gausman
-Gaukroger
-Gaufusi
-Gaudier
-Gaudett
-Gauci
-Gatzow
-Gatta
-Gatheright
-Gatesy
-Gatesman
-Gastelo
-Gaschke
-Garwin
-Garter
-Gartenmayer
-Gartenhaus
-Garsjo
-Garroutte
-Garrettson
-Garrean
-Garre
-Garnham
-Garnache
-Garmire
-Garmen
-Garlett
-Garkow
-Garito
-Garinger
-Gargan
-Garcon
-Gapp
-Gantzler
-Gantvoort
-Gansert
-Gansen
-Ganns
-Gannetti
-Ganin
-Ganigan
-Gamotan
-Gammond
-Gamer
-Gamello
-Gambrill
-Gambold
-Gambee
-Gambardella
-Galven
-Galvani
-Galuszka
-Galuppo
-Galmore
-Gallusser
-Gallodoro
-Gallington
-Galleta
-Gallegoz
-Gallaugher
-Gallargo
-Galkin
-Galipo
-Galinis
-Galimberti
-Galic
-Galbiso
-Galathe
-Galassini
-Galanti
-Galano
-Galagher
-Gajeski
-Gajardo
-Gaiters
-Gails
-Gailliard
-Gaffer
-Gafanha
-Gaer
-Gadewoltz
-Gaden
-Gackle
-Gabrial
-Gabrenas
-Gabossi
-Gables
-Gabl
-Gabhart
-Gabeline
-Gabbamonte
-Fyler
-Fykes
-Fusner
-Fusillo
-Fushimi
-Fus
-Furtak
-Furblur
-Fundora
-Funderberg
-Fumero
-Fuls
-Fulham
-Fulco
-Fujimura
-Fujikake
-Fugueroa
-Fuger
-Fugatt
-Fuerstenau
-Fuerbringer
-Frymoyer
-Frymier
-Frymark
-Frutiger
-Frushour
-Fruman
-Fruin
-Frugoli
-Fruehauf
-Froyd
-Frosto
-Frontis
-Frontiero
-Fronick
-Froneberger
-Frohberg
-Froebe
-Frobish
-Frittz
-Fritchley
-Fritchey
-Frisinger
-Frisell
-Frija
-Friehauf
-Friedenthal
-Friebel
-Freundlich
-Fret
-Frerich
-Frens
-Freker
-Freiseis
-Freimark
-Freilino
-Freiheit
-Freiermuth
-Freidin
-Freemantle
-Freeh
-Freedlander
-Freeders
-Freeburger
-Fredregill
-Frederique
-Freckleton
-Frecker
-Frazzano
-Frauenfelder
-Frattali
-Fratta
-Fratrick
-Fratercangelo
-Frasso
-Frashure
-Fraschilla
-Franzman
-Franzini
-Franza
-Franty
-Fransisco
-Franpton
-Frankson
-Frankland
-Frankiewicz
-Frankart
-Frangione
-Franchini
-Francescone
-Fralic
-Fraklin
-Frair
-Fragosa
-Fradkin
-Fracasso
-Foyer
-Foxhoven
-Fowlie
-Fowley
-Fowlar
-Fower
-Foute
-Foussell
-Fouquette
-Founds
-Fougner
-Fosmire
-Fosher
-Fosbrook
-Fortun
-Forss
-Forsmann
-Forslin
-Forsee
-Forpahl
-Fornili
-Fornier
-Fornaro
-Formichelli
-Formaggioni
-Forkum
-Forkell
-Foriest
-Forgrave
-Foresta
-Forejt
-Foreback
-Forcum
-Forcht
-Forchione
-Forch
-Forberg
-Forbach
-Fonua
-Fonteno
-Fonteneau
-Fongvongsa
-Fondriest
-Fondaw
-Fonck
-Fohl
-Foglio
-Foersterling
-Foddrell
-Focke
-Flugum
-Flucas
-Fluaitt
-Floss
-Florendo
-Floras
-Floer
-Flockhart
-Flockerzi
-Floan
-Flin
-Fliger
-Flieller
-Fleurilus
-Flenord
-Fleniken
-Flenaugh
-Flemmon
-Flemm
-Fleites
-Fleischner
-Fleckles
-Flechas
-Flauding
-Flatter
-Flato
-Flanner
-Flanegan
-Flammang
-Flakne
-Flaker
-Flagiello
-Fladung
-Flachs
-Flaa
-Fiwck
-Fitzrandolph
-Fitzherbert
-Fitzgerrel
-Fitsgerald
-Fisser
-Fishell
-Fischl
-Fischhaber
-Fischel
-Fiscella
-Fiscel
-Firpi
-Firenze
-Fiorilli
-Fiorica
-Finwall
-Finklestein
-Fingerson
-Fingerman
-Fineout
-Finello
-Finell
-Findlen
-Finco
-Filthaut
-Filpus
-Filo
-Filla
-Fili
-Fil
-Figiel
-Figgeurs
-Figert
-Fietek
-Fiest
-Fieser
-Fiesel
-Fickbohm
-Ficht
-Ficchi
-Fialho
-Fial
-Feyh
-Feyereisen
-Feuss
-Feusier
-Fette
-Festini
-Fest
-Fesko
-Fertik
-Ferrusi
-Ferrone
-Ferrio
-Ferringo
-Ferries
-Ferrie
-Ferrett
-Ferrato
-Ferrario
-Ferraraccio
-Ferranto
-Ferr
-Ferouz
-Fernette
-Fernanders
-Ferkel
-Feret
-Ferer
-Ferenz
-Fenrich
-Fenniman
-Fennig
-Fenison
-Fendrick
-Fendlason
-Fend
-Fenbert
-Felver
-Feltham
-Felonia
-Felling
-Fellezs
-Felizardo
-Felio
-Felicien
-Felicia
-Felicano
-Feliberty
-Feistner
-Feister
-Feintuch
-Feilds
-Feighner
-Feierman
-Fehrs
-Fegueroa
-Fegles
-Fegette
-Feerick
-Feela
-Feehly
-Feehery
-Fedorko
-Fedie
-Fedezko
-Fedewa
-Federkeil
-Fecto
-Fechtig
-Fecher
-Featheroff
-Feagans
-Fazzari
-Faycurry
-Fawson
-Fawler
-Favuzzi
-Favro
-Favian
-Favazza
-Fausey
-Faus
-Faupel
-Fattore
-Fatora
-Fathy
-Fathree
-Fatheree
-Fassinger
-Faske
-Farug
-Fars
-Farnese
-Farkus
-Farinha
-Faren
-Faraimo
-Farahkhan
-Faragher
-Fanti
-Fanter
-Fantazia
-Fantauzzo
-Fansher
-Fandino
-Fanatia
-Famageltto
-Falzon
-Fallow
-Fallenstein
-Falencki
-Falcioni
-Falci
-Failey
-Failde
-Faigley
-Faidley
-Fahrni
-Fahrlander
-Fahrenthold
-Fahning
-Fago
-Fagle
-Fagerquist
-Fagerlund
-Fageraes
-Facello
-Ezzelle
-Eyton
-Eyestone
-Exton
-Exantus
-Evjen
-Evilsizor
-Evertt
-Evertsen
-Eversmeyer
-Everroad
-Everline
-Everet
-Evartt
-Evansky
-Evancho
-Eull
-Ettman
-Ettienne
-Ettel
-Etringer
-Eth
-Estronza
-Estrem
-Estrade
-Estok
-Estle
-Estimable
-Estess
-Estella
-Estanislau
-Essix
-Essency
-Esquinaldo
-Espiridion
-Espinel
-Esperon
-Espenlaub
-Espejel
-Esparsen
-Esmont
-Esmon
-Esmay
-Esmaili
-Eskins
-Eskind
-Eshmon
-Esfahani
-Escober
-Escanlar
-Erz
-Ersery
-Eros
-Ernster
-Erlebach
-Eriks
-Erichson
-Erger
-Eredia
-Erdos
-Ercole
-Ercolano
-Erazmus
-Eraso
-Epel
-Eovaldi
-Ensz
-Ensel
-Enock
-Ennes
-Enis
-Engnath
-Engfer
-Engelmeyer
-Engelberg
-Engard
-Endris
-Endreson
-Endorf
-Endersbe
-Ende
-Encino
-Emshwiller
-Empasis
-Emore
-Emmond
-Emiliano
-Emerling
-Emenaha
-Emde
-Emberling
-Emano
-Elway
-Elvey
-Eltringham
-Elter
-Elsken
-Elsheimer
-Elsaesser
-Elrick
-Elreda
-Elpert
-Elnicki
-Elmes
-Ellsmore
-Ellrod
-Ello
-Ellinghuysen
-Ellingham
-Ellingburg
-Elles
-Ellenbogen
-Elleby
-Ellcessor
-Ellamar
-Elke
-Elijah
-Eligio
-Elieff
-Elicker
-Elian
-Eliades
-Elhadi
-Elfenbein
-Elenbaas
-Eldringhoff
-Eld
-Elbie
-Eke
-Ekas
-Eisnaugle
-Eisiminger
-Eisenhaver
-Eisenhardt
-Eisenberger
-Eiselein
-Einwalter
-Eighmey
-Eidemiller
-Eickmeyer
-Eichstedt
-Eichenberg
-Eichberg
-Eibel
-Ehrisman
-Ehrenzeller
-Ehman
-Ehli
-Ehl
-Eheler
-Egwuohua
-Eglin
-Egler
-Egersdorf
-Egelston
-Efthimiou
-Eelkema
-Edu
-Edridge
-Edland
-Edenholm
-Edem
-Economou
-Eckmann
-Eckblad
-Eckardt
-Echternach
-Echter
-Ebrahimi
-Eberst
-Ebershoff
-Eberheart
-Ebbett
-Eayrs
-Eavey
-Eatough
-Eastling
-Eastern
-Easterlin
-Earthly
-Earing
-Eakles
-Eagleman
-Eacho
-Eaby
-Dzwonkowski
-Dzurnak
-Dzurilla
-Dziuba
-Dzinski
-Dziewanowski
-Dziekan
-Dyrstad
-Dydo
-Dvorsky
-Duyer
-Duttinger
-Dutchess
-Duston
-Dush
-Durward
-Dursteler
-Durpee
-Durough
-Durniok
-Durnan
-Durisseau
-Duris
-Duriga
-Durda
-Durboraw
-Dura
-Duquaine
-Duplessy
-Duplanti
-Dupes
-Duperre
-Dupaski
-Duos
-Dunshie
-Dunphe
-Dunnell
-Dunkinson
-Dunkerley
-Dunkan
-Dunemann
-Dunderman
-Duncans
-Dunahoe
-Dumouchel
-Dummett
-Dumeny
-Dumbar
-Dumar
-Dulan
-Dukett
-Duk
-Duis
-Duguette
-Dugre
-Dufrain
-Dufauchard
-Duesterhaus
-Duesterback
-Duerst
-Duenwald
-Dudzik
-Dudycha
-Dudenbostel
-Dudden
-Ducklow
-Duckey
-Duchnowski
-Duchane
-Duceman
-Dubovsky
-Dubler
-Duber
-Dubel
-Dubbert
-Drutman
-Drummey
-Drumbore
-Droy
-Drow
-Droubay
-Drorbaugh
-Dropinski
-Dronko
-Dronick
-Droggitis
-Drissel
-Driscol
-Drinen
-Driessen
-Driedric
-Dreuitt
-Drenning
-Drelick
-Drejka
-Dreiss
-Drebes
-Dratch
-Drakulic
-Drakos
-Draime
-Dragovich
-Dragich
-Draggett
-Dragg
-Drabicki
-Doyscher
-Doxbeck
-Downy
-Downhour
-Dowland
-Dowker
-Dowds
-Dowda
-Douyette
-Douthett
-Doughman
-Dougharty
-Douga
-Doudna
-Dotolo
-Dossman
-Dosh
-Dorsinville
-Dorsay
-Dorrill
-Dorosh
-Dornbrook
-Dorlando
-Dorio
-Dorie
-Dorcas
-Doporto
-Dopita
-Doorley
-Dooner
-Donton
-Dono
-Donnerberg
-Donnalley
-Donlyuk
-Donkle
-Donilon
-Doniger
-Donigan
-Doniel
-Doncaster
-Donatich
-Donaher
-Donah
-Donaghue
-Donaby
-Domowicz
-Domitrovich
-Dominowski
-Dominiak
-Domenice
-Dombek
-Domagalski
-Domagall
-Dolsen
-Dolmajian
-Dolley
-Dolinski
-Dolhun
-Dolfi
-Dolecek
-Dokovic
-Dok
-Dohrn
-Doerksen
-Doelger
-Doeberling
-Dody
-Dodimead
-Dodgion
-Dockum
-Dockerty
-Dochterman
-Dobrzykowski
-Dobrynski
-Dobrushin
-Dobrosky
-Dobrinin
-Dobison
-Dobbyn
-Dobbe
-Dlugos
-Ditucci
-Dittus
-Dittmann
-Dito
-Ditmars
-Disotell
-Disorda
-Disharoon
-Dischner
-Discala
-Disalvi
-Dirth
-Dirr
-Dirienzo
-Dipolito
-Dipilato
-Dipietrantoni
-Dipanfilo
-Dioneff
-Diomede
-Dinuzzo
-Dintino
-Dinsmoor
-Dinsdale
-Dinos
-Dinora
-Dinnendahl
-Dinkle
-Dininger
-Dingillo
-Dingie
-Dingell
-Dimitry
-Dimicco
-Dimezza
-Dimarzio
-Dimario
-Dimariano
-Dimanche
-Dilucca
-Dillis
-Dilliner
-Dillin
-Dillashaw
-Dilillo
-Dilg
-Dilella
-Diker
-Digiouanni
-Digeorgio
-Difronzo
-Difrancisco
-Dietterick
-Diestler
-Dies
-Dierkes
-Diekema
-Diederichs
-Dieball
-Didway
-Didonatis
-Didomizio
-Didio
-Didato
-Dicosmo
-Dicorpo
-Dicocco
-Diclaudio
-Dichiaro
-Dible
-Diblase
-Dibiasi
-Dibbern
-Diano
-Diani
-Diangelis
-Diamantopoulo
-Diaco
-Dhruva
-Dheel
-Dharas
-Dezalia
-Deyak
-Deya
-Dewolff
-Dewick
-Dewese
-Dewater
-Devot
-Devost
-Devis
-Devilliers
-Devery
-Deveny
-Devenny
-Develice
-Devasier
-Devarona
-Devanski
-Devai
-Deus
-Dettorre
-Dettor
-Detrolio
-Detrich
-Detillion
-Deteso
-Determann
-Deterline
-Deterding
-Detchon
-Detaeye
-Destina
-Destefani
-Desruisseaux
-Desormeau
-Desonia
-Desmore
-Desko
-Desimas
-Desher
-Deshayes
-Deschene
-Desantos
-Desando
-Desamparo
-Desalvatore
-Derx
-Deruiter
-Derosie
-Derogatis
-Derman
-Derkas
-Derivan
-Derington
-Derienzo
-Derian
-Dereus
-Derenzi
-Derentis
-Derderian
-Derastel
-Deraps
-Dequinzio
-Deprato
-Depont
-Depiro
-Depierro
-Depeyster
-Deonarine
-Deocampo
-Denzine
-Denwood
-Denos
-Denooyer
-Denomme
-Denoia
-Dennig
-Denjen
-Denisco
-Denick
-Denholm
-Denfip
-Deneui
-Denetclaw
-Denet
-Denery
-Demuzio
-Demske
-Dempewolf
-Demorrett
-Demorizi
-Demny
-Demiter
-Demilt
-Demik
-Demien
-Demianczyk
-Demetrakos
-Demer
-Dembek
-Demauro
-Demase
-Demart
-Demarino
-Deluzio
-Delullo
-Delucian
-Deltufo
-Deltora
-Delsoin
-Delsavio
-Delross
-Delperdang
-Delpaggio
-Delosier
-Delonge
-Delonais
-Deloge
-Delmendo
-Dellwo
-Dellum
-Dellosso
-Delliveneri
-Dellefave
-Dellarose
-Dellapenta
-Dellamonica
-Delgoda
-Delekta
-Delegado
-Deldonno
-Delco
-Delce
-Delbene
-Delavergne
-Delashmutt
-Delapuente
-Delaporte
-Delana
-Delallo
-Delahay
-Delagol
-Delagado
-Delabarre
-Dekruif
-Dekoning
-Dekeyzer
-Dejoseph
-Dejardin
-Dejarden
-Deister
-Deigado
-Deichmann
-Deichman
-Dehm
-Dehlinger
-Dehl
-Dehetre
-Dehaney
-Dehaas
-Degrood
-Degrass
-Degrande
-Degooyer
-Degnim
-Deglandon
-Degenfelder
-Degenaro
-Degear
-Degagne
-Defrang
-Defrain
-Defosset
-Defosse
-Defont
-Defir
-Defayette
-Deerdoff
-Deely
-Dedrickson
-Dednam
-Dederich
-Decurtis
-Decourt
-Decourcey
-Decock
-Declerk
-Decius
-Dechavez
-Dech
-December
-Decarvalho
-Decarmine
-Decaire
-Decaen
-Debrosse
-Debreto
-Debrecht
-Debrae
-Debore
-Debien
-Debenedictis
-Debarge
-Debardelaben
-Debaets
-Deasis
-Dears
-Dearruda
-Dearring
-Dearinger
-Dearin
-Dearcos
-Deanes
-Deakyne
-Dazzi
-Dazi
-Dayao
-Dawkin
-Davolt
-Davise
-Davine
-Davidsmeyer
-Davidowicz
-Davaz
-Davari
-Davance
-Dauster
-Dause
-Daulerio
-Daughters
-Daugereau
-Daubney
-Datamphay
-Dasouza
-Daskal
-Dashno
-Dashne
-Dasen
-Daschofsky
-Dasch
-Darwich
-Darvish
-Darveau
-Darting
-Darthard
-Darron
-Daron
-Darnstaedt
-Darmody
-Darmiento
-Darington
-Dariano
-Daria
-Dardenne
-Darakjian
-Danyow
-Dannis
-Danniels
-Danni
-Dannelly
-Dannelley
-Dannatt
-Daniely
-Dangelis
-Danese
-Daner
-Dandoy
-Danco
-Danca
-Danas
-Damrell
-Damone
-Damms
-Damme
-Dalporto
-Daloisio
-Dalmata
-Dallison
-Dallam
-Dallago
-Dalegowski
-Dalecki
-Daku
-Daking
-Daken
-Dajer
-Dajani
-Daidone
-Dahlka
-Dagres
-Dago
-Dager
-Dafonte
-Dada
-Daczewitz
-Dach
-Czysz
-Czubakowski
-Czartoryski
-Czapiewski
-Cyrnek
-Cyree
-Cygrymus
-Cwikla
-Cwalinski
-Cutrera
-Cuther
-Cutchember
-Cushner
-Cusenza
-Curreri
-Curlis
-Curio
-Curimao
-Curia
-Curey
-Cunio
-Cumoletti
-Cumberlander
-Culpit
-Culloton
-Cuffy
-Cuffman
-Cuddington
-Cucuta
-Cucufate
-Cubine
-Cubano
-Cuadras
-Csuhta
-Crutison
-Cruther
-Crusinberry
-Crummell
-Crumly
-Cruff
-Crozat
-Crossmon
-Crosiar
-Crookshank
-Crookes
-Cronoble
-Croner
-Cromeans
-Crolley
-Crofutt
-Crockette
-Crivelli
-Crivaro
-Cristino
-Criste
-Crissey
-Crisalli
-Criley
-Cribari
-Crewe
-Creselious
-Crescenti
-Crepps
-Crenwelge
-Creitz
-Cregin
-Cregger
-Creekbaum
-Credi
-Crebs
-Crayford
-Cravy
-Cravalho
-Crauswell
-Crathers
-Crask
-Crapp
-Crape
-Crapanzano
-Cranson
-Crans
-Crannell
-Crandal
-Craigwell
-Craigmyle
-Crafter
-Cradler
-Coxwell
-Coxen
-Cowlin
-Covitz
-Coventon
-Coutre
-Coutinho
-Coutermarsh
-Courton
-Courseault
-Courrege
-Courey
-Coulon
-Coulibaly
-Couden
-Coton
-Coste
-Cossett
-Cosman
-Cosma
-Coslow
-Cosico
-Coshow
-Corwell
-Corvo
-Corujo
-Cortopassi
-Cortinez
-Cortijo
-Corrio
-Corrington
-Corriher
-Corridan
-Corrga
-Correla
-Corping
-Corpe
-Coroniti
-Cornn
-Cornmesser
-Cornella
-Corneille
-Corkron
-Corf
-Coreen
-Cordiero
-Cordew
-Cordenas
-Corcuera
-Corbley
-Coray
-Coraham
-Copstead
-Copsey
-Copping
-Coppes
-Copney
-Coopper
-Cooperider
-Coopage
-Coonse
-Cookerly
-Conwright
-Contreraz
-Continenza
-Contes
-Consuelo
-Constine
-Constanzo
-Constantin
-Constancio
-Consentino
-Conradt
-Conour
-Conoley
-Conney
-Connerat
-Conlogue
-Conforme
-Confalone
-Coneway
-Condroski
-Condina
-Condiff
-Condi
-Conchado
-Conch
-Concatelli
-Conaughty
-Commerford
-Comissiong
-Cominski
-Cominotti
-Comar
-Colschen
-Colpi
-Colpa
-Colony
-Collons
-Collon
-Collicott
-Collea
-Collari
-Colker
-Colier
-Colesar
-Colemen
-Colecchi
-Colcher
-Colchado
-Coklow
-Cokel
-Cohick
-Cofone
-Coffinberger
-Coffell
-Coffel
-Codispot
-Codilla
-Cocroft
-Cockerhan
-Cochren
-Cochenour
-Cobetto
-Cobar
-Coalter
-Clyman
-Cluver
-Clusky
-Clunes
-Clukies
-Clowerd
-Clouatre
-Clossin
-Cloos
-Clokey
-Clinkinbeard
-Cliffton
-Clibon
-Clevland
-Cleverley
-Clesca
-Clerc
-Clemenza
-Cleath
-Cleasby
-Cleal
-Clavijo
-Clater
-Claros
-Claghorn
-Clacher
-Clabo
-Civil
-Cittadini
-Citroni
-Cissel
-Cisar
-Cirella
-Circelli
-Ciprian
-Cipcic
-Ciotta
-Cinnamond
-Cinkan
-Cinco
-Cinar
-Cimorelli
-Ciminera
-Cilenti
-Cihak
-Cieloszyk
-Cidre
-Cicen
-Cicali
-Cibik
-Ciavardini
-Cianfrani
-Cianciola
-Ciallella
-Ciaffone
-Chyle
-Chy
-Churchfield
-Churape
-Chuma
-Chulla
-Chueng
-Chubicks
-Chrystal
-Chrosniak
-Chriswell
-Christopoulos
-Christi
-Christerson
-Christenbury
-Chowenhill
-Chowansky
-Choudhary
-Chor
-Chopton
-Cholula
-Chollett
-Choinski
-Chocron
-Chockley
-Chochrek
-Choates
-Chlebus
-Chiz
-Chitrik
-Chisman
-Chiphe
-Chiola
-Chiodi
-Chinault
-Chime
-Chimal
-Chilsom
-Chillo
-Chicles
-Chicharello
-Chicalace
-Chiariello
-Chiappari
-Chhan
-Chham
-Chez
-Chevis
-Cheverton
-Cheverez
-Cheu
-Chessman
-Cherubini
-Cherrin
-Cheroki
-Cherny
-Chernich
-Chernesky
-Cheranichit
-Cheeseboro
-Chech
-Cheam
-Chavoustie
-Chavies
-Chaumont
-Chaulklin
-Chatampaya
-Chasson
-Chassaniol
-Chary
-Charvet
-Charry
-Chari
-Chararria
-Chappo
-Chappa
-Chapmond
-Chaplik
-Chapen
-Chanthasene
-Chanler
-Chanco
-Chamul
-Champaco
-Chalupa
-Challinor
-Challa
-Chalender
-Chaknis
-Chakkalakal
-Chaisty
-Chaddick
-Chaboya
-Chaberek
-Chabbez
-Cevera
-Cerverizzo
-Cerventez
-Cervantsz
-Cerva
-Cerroni
-Cerri
-Cerrello
-Cerone
-Cernuto
-Cernota
-Cerminaro
-Cerf
-Ceretti
-Cerceo
-Cerasuolo
-Ceraso
-Cerasi
-Cerar
-Ceraos
-Cepin
-Cepas
-Centi
-Cendana
-Cendan
-Cellar
-Celeya
-Ceder
-Cecot
-Cazel
-Cazaree
-Cawon
-Cawein
-Cavrak
-Caveness
-Cavalaris
-Cavaiani
-Cauterucci
-Caughorn
-Caughell
-Cauazos
-Catts
-Cattanach
-Catrini
-Catozzi
-Catignani
-Catholic
-Catherson
-Catherine
-Cathell
-Catello
-Catchpole
-Catanzano
-Casuscelli
-Castros
-Castrey
-Castongvay
-Castillion
-Castelum
-Castells
-Castellion
-Cassler
-Cassino
-Cassilano
-Cassiano
-Cassetty
-Cassens
-Cassells
-Cassavaugh
-Cassagne
-Cassa
-Casolary
-Casmore
-Casley
-Caska
-Casis
-Casini
-Cashour
-Cashmer
-Cashett
-Casement
-Casciato
-Casavez
-Casasola
-Casarz
-Casar
-Casana
-Casales
-Carvill
-Carvallo
-Cartner
-Carrousal
-Carrizo
-Carretta
-Carrethers
-Carrao
-Carran
-Carpen
-Caroselli
-Carolla
-Carnillo
-Carnegia
-Carmin
-Carmickel
-Carlini
-Carland
-Carknard
-Carioscia
-Carina
-Carideo
-Carfrey
-Cardinalli
-Cardiff
-Cardazone
-Carbonella
-Carbery
-Carbee
-Caravetta
-Caravati
-Caramelo
-Caramella
-Caraig
-Carabine
-Cara
-Capristo
-Capri
-Cappellini
-Caporiccio
-Capicotto
-Capestro
-Capener
-Capek
-Capas
-Capaccino
-Caoagdan
-Canwell
-Cantella
-Cantakis
-Canson
-Cansino
-Cansibog
-Cannistraro
-Canner
-Caneza
-Caney
-Caneva
-Canetta
-Canestraro
-Candozo
-Candlish
-Candell
-Canant
-Canalez
-Can
-Camus
-Campora
-Campobasso
-Campble
-Campau
-Campain
-Camlin
-Camisa
-Camerino
-Camerano
-Camenisch
-Camelin
-Cameli
-Cambia
-Camareno
-Camancho
-Camack
-Calvan
-Calumag
-Caltagirone
-Calowell
-Callnan
-Callington
-Calliham
-Calligaro
-Caller
-Callar
-Callam
-Callagy
-Callagher
-Callado
-Caliman
-Caldron
-Caldoron
-Caldarera
-Calcao
-Calaf
-Cakmak
-Cajulus
-Cajka
-Caivano
-Caires
-Caire
-Caiozzo
-Cains
-Cainne
-Caimi
-Cagnon
-Cagno
-Cagan
-Caffentzis
-Cafasso
-Caez
-Caddigan
-Caddel
-Cacatian
-Cabugos
-Cabon
-Cabarcas
-Cabanillas
-Cabanela
-Cabam
-Bywaters
-Bystron
-Byse
-Byous
-Bynun
-Byczek
-Bybel
-Byal
-Buzza
-Buzo
-Buzis
-Buvinghausen
-Butzke
-Buttross
-Buttray
-Buttke
-Buttitta
-Butenhoff
-Busscher
-Busk
-Busitzky
-Bushweller
-Bushrod
-Bushfield
-Buschur
-Busacca
-Burzlaff
-Burvine
-Burtts
-Burtschi
-Burtell
-Bursik
-Burrs
-Burras
-Burows
-Burnie
-Burnash
-Burmside
-Burm
-Burly
-Burlson
-Burlile
-Burlaza
-Burlage
-Burkstrand
-Burkly
-Burklow
-Burkin
-Burian
-Burgs
-Burgoa
-Burgey
-Burgees
-Burfeind
-Burdzel
-Burchinal
-Burbine
-Buratti
-Buonassisi
-Buonaiuto
-Buntz
-Bunts
-Buntenbach
-Bunson
-Bunda
-Bumpaus
-Bumbalo
-Bumbaca
-Bullivant
-Bullin
-Bulisco
-Bulik
-Buley
-Bulat
-Bukowiecki
-Builes
-Buhrke
-Buhlig
-Bugh
-Buffone
-Buenviaje
-Bueler
-Buehlman
-Budzik
-Budy
-Budrovich
-Budish
-Budiao
-Budhu
-Buden
-Buddy
-Bud
-Buczko
-Bucknor
-Buckmeon
-Buckless
-Buckett
-Buckaloo
-Buchwalter
-Buchmiller
-Buchmeier
-Buchite
-Buchinsky
-Bucheli
-Buchann
-Buchal
-Bucaro
-Bubolz
-Buboltz
-Bubert
-Brzezicki
-Brzenk
-Brys
-Bryngelson
-Bryla
-Bryington
-Bruzewski
-Bruzek
-Brustmann
-Brusser
-Bruscato
-Brunzel
-Brunkhardt
-Brunick
-Brunetta
-Brunecz
-Bruna
-Brumaghim
-Bruker
-Bruin
-Brugliera
-Bruffee
-Brueske
-Bruegger
-Bruechert
-Bruckmeier
-Brroks
-Brozeski
-Broyle
-Brownlie
-Browman
-Broudy
-Brothen
-Broski
-Brosi
-Brookskennedy
-Brookie
-Bronston
-Broncheau
-Brommer
-Brola
-Broitzman
-Brohn
-Broglio
-Brogley
-Broers
-Broering
-Brodtmann
-Brodis
-Brodine
-Brodfuehrer
-Brodess
-Brodes
-Brockus
-Brockenberry
-Brociner
-Brochet
-Broadnay
-Brizeno
-Britts
-Brinley
-Brinkhaus
-Brinius
-Brininger
-Bringer
-Brindza
-Brindger
-Brinar
-Brilowski
-Brigner
-Brightharp
-Brighter
-Brienza
-Brienen
-Bridenbecker
-Brickson
-Breznay
-Brezinka
-Breyers
-Brevell
-Brettmann
-Bretos
-Bresser
-Brentz
-Brennick
-Brening
-Brendeland
-Brem
-Breiter
-Breihan
-Breidigan
-Bredlow
-Bredin
-Breckley
-Breckenstein
-Brebes
-Breaz
-Breaud
-Breath
-Bready
-Brazie
-Braunwarth
-Braunberger
-Brauman
-Braucks
-Brath
-Brasure
-Brasswell
-Brasseux
-Braskett
-Brasby
-Brantingham
-Bransfield
-Branseum
-Brano
-Brangers
-Brang
-Branes
-Brandstrom
-Brandorff
-Brandom
-Brandenburger
-Branck
-Brancaccio
-Bramuchi
-Bramlitt
-Bramel
-Bramasco
-Bram
-Brakke
-Brak
-Braget
-Bragado
-Brafman
-Bradmon
-Bradick
-Bradey
-Bradd
-Bracklin
-Brackbill
-Brabazon
-Braband
-Bozych
-Bozic
-Boyl
-Boyens
-Boyde
-Boyas
-Bowlick
-Bowle
-Bowcock
-Bouy
-Bouvia
-Bousum
-Bourraine
-Bourgon
-Bourbois
-Bouquin
-Boumthavee
-Boulger
-Boulch
-Boulais
-Boughn
-Bouges
-Boudle
-Boudjouk
-Boucouvalas
-Boucaud
-Bottrell
-Bottoni
-Bottella
-Bothner
-Botellio
-Boswink
-Bostow
-Bostain
-Bosson
-Bossier
-Bossey
-Bosold
-Boslet
-Boshnack
-Boshell
-Bosheers
-Bosefski
-Borza
-Boryszewski
-Borysewicz
-Borson
-Borseth
-Borroto
-Borrigo
-Borriello
-Borrello
-Borowicz
-Borovetz
-Borovec
-Borgelt
-Bordinger
-Bordas
-Bord
-Borcuk
-Borcher
-Borbridge
-Boothman
-Bookhardt
-Boocock
-Bonwell
-Bonsal
-Bonnoitt
-Bonnifield
-Bonnick
-Bonnel
-Bonker
-Bonita
-Boning
-Bonifield
-Boniface
-Bongle
-Bongivengo
-Bongio
-Bonge
-Bonett
-Bonebright
-Bondroff
-Bondoc
-Bonda
-Boncella
-Bonaventure
-Bonalumi
-Bonadona
-Bonaccorso
-Bonaccorsi
-Bompiani
-Bommer
-Bolvin
-Boluda
-Bolorin
-Bolon
-Bollom
-Bollettino
-Bolk
-Boliver
-Boline
-Bolieu
-Boliek
-Boleyn
-Boldul
-Boldery
-Bolante
-Bokor
-Boklund
-Bojanowski
-Boisuert
-Boislard
-Bohren
-Bohmann
-Bohlinger
-Bohart
-Boham
-Bogust
-Bogh
-Bogatay
-Bogany
-Boeving
-Boeshore
-Boesenberg
-Boerstler
-Boers
-Boenig
-Boelsche
-Boelke
-Boekhout
-Boekelman
-Boehner
-Boeckmann
-Bodwin
-Bodrey
-Bodman
-Bodiroga
-Bodford
-Bodensteiner
-Bodenheimer
-Boddorf
-Boddeker
-Bockskopf
-Bocchi
-Bocage
-Bobola
-Bobko
-Boben
-Boardway
-Boards
-Blyzes
-Blumenkranz
-Bloomgren
-Blong
-Blondeau
-Blommel
-Blois
-Bloem
-Blocklinger
-Blisset
-Blimka
-Bliler
-Bliese
-Blice
-Bleyer
-Blette
-Blesh
-Blender
-Blemel
-Bleifus
-Blechinger
-Bleattler
-Blazosky
-Blatti
-Blatteau
-Blatnik
-Blatchford
-Blankship
-Blankschan
-Blandy
-Blandino
-Blakeway
-Blakeborough
-Blaho
-Blackstar
-Blackgoat
-Blachly
-Blacher
-Blach
-Bizcassa
-Bizarro
-Bivings
-Bitsuie
-Bitsui
-Bitsko
-Bistodeau
-Bister
-Bisonette
-Bishel
-Bisconer
-Biscocho
-Biscahall
-Bisby
-Bisagna
-Birts
-Birnell
-Birkline
-Birkenhead
-Birenbaum
-Birckett
-Birckbichler
-Birchwood
-Biorkman
-Bimler
-Bilous
-Billinghurst
-Billey
-Billeter
-Billegas
-Billard
-Bilkiss
-Bile
-Bilcik
-Bigos
-Bignall
-Bigio
-Biggio
-Bigas
-Biffer
-Biffar
-Biesinger
-Bieschke
-Bierbrauer
-Bienfang
-Biehn
-Biederwolf
-Bieberle
-Biebel
-Bidon
-Bidner
-Bidgood
-Bidez
-Biderman
-Bickleman
-Bicklein
-Bicket
-Bicker
-Bickart
-Bichel
-Biard
-Bialik
-Bialczyk
-Bezner
-Beyrer
-Beylotte
-Beyerl
-Bevly
-Beulah
-Beul
-Betzel
-Betterman
-Betsinger
-Betschman
-Betita
-Bethurum
-Bethoney
-Beth
-Beston
-Besso
-Bessick
-Besio
-Beshear
-Besarra
-Bervig
-Bertus
-Bertrano
-Bertovich
-Bertolasio
-Bertog
-Bertinetti
-Bertelle
-Bertel
-Bertch
-Bertagnoli
-Berschauer
-Bersamin
-Bers
-Berri
-Berretti
-Berretta
-Berret
-Bernucho
-Bernt
-Bernstrom
-Berno
-Bernick
-Bernice
-Bernhagen
-Bernardoni
-Bernabo
-Bermers
-Berlove
-Berlinghof
-Berkhalter
-Berisha
-Bergseng
-Bergreen
-Bergholz
-Bergert
-Berez
-Beresnyak
-Berdes
-Beras
-Benzschawel
-Benzi
-Benya
-Benwell
-Benty
-Bentrup
-Bentele
-Benser
-Bennison
-Bennink
-Bennerson
-Bennerman
-Benitone
-Beniquez
-Benik
-Bengelsdorf
-Benell
-Beneduce
-Benecke
-Benear
-Bendzans
-Bendy
-Bendt
-Bendorf
-Bendolph
-Bendlage
-Benders
-Bendavid
-Benck
-Benassi
-Benari
-Benage
-Benadom
-Benabides
-Bembury
-Bemboom
-Bemberry
-Belyoussian
-Belveal
-Belsey
-Belongie
-Belone
-Belon
-Beloff
-Belluomini
-Belloma
-Bellmay
-Bellish
-Bellisario
-Bellingham
-Bellflower
-Bellfleur
-Bellerdine
-Bellemy
-Bellazer
-Belkowski
-Belich
-Belfiglio
-Beley
-Beldin
-Belback
-Belarde
-Belangia
-Bel
-Bekerman
-Beker
-Bek
-Beiswanger
-Beirise
-Behun
-Behning
-Behmer
-Behlen
-Begor
-Begg
-Beetley
-Bees
-Beermudez
-Beerling
-Beeck
-Bedsaul
-Bedoka
-Bednorz
-Becklund
-Beckerdite
-Beckendorf
-Beckenbach
-Bechthold
-Bechman
-Becherer
-Beavin
-Beauprez
-Beaumier
-Beauliev
-Beaugard
-Beaufait
-Beaudrie
-Beathe
-Beasmore
-Bearup
-Bearfield
-Beahn
-Beadnell
-Beadell
-Bazzel
-Bazzanella
-Bazelais
-Bazata
-Bazarte
-Baza
-Bayle
-Bayete
-Bawa
-Bavzee
-Bavard
-Bausley
-Baunleuang
-Baumgard
-Baumbusch
-Bauknight
-Baugham
-Bauers
-Bauermeister
-Baublitz
-Battistini
-Battiato
-Battiata
-Batters
-Battaglini
-Bathurst
-Bathrick
-Batel
-Batalona
-Basua
-Bastura
-Bastress
-Bastilla
-Bastidos
-Bastic
-Basten
-Bastedo
-Bastain
-Bassil
-Basset
-Bashinelli
-Basbas
-Baruth
-Barufaldi
-Bartylla
-Barts
-Bartrop
-Bartosz
-Bartosiak
-Bartolotto
-Bartolet
-Bartoldus
-Bartnett
-Bartlone
-Barthen
-Barthelman
-Bartenfield
-Bartczak
-Barsotti
-Barrocas
-Barrile
-Barrieau
-Barrer
-Barreira
-Barranger
-Barranca
-Barquera
-Barnscater
-Barnfield
-Barncastle
-Barnathan
-Barnar
-Barlip
-Barkins
-Barkenhagen
-Barkalow
-Barimah
-Baridon
-Barhydt
-Bargar
-Barff
-Bardeen
-Barcelona
-Barby
-Barbini
-Barbiere
-Barbetta
-Barberis
-Barberian
-Barban
-Barasch
-Baranow
-Baranovic
-Barajos
-Baraby
-Bapties
-Banyas
-Bantug
-Bantin
-Bantillan
-Bantay
-Bansbach
-Bankemper
-Banis
-Banick
-Banecker
-Bandin
-Bandemer
-Bandanza
-Bance
-Banales
-Bammon
-Bamfield
-Bambacigno
-Bambaci
-Balyeat
-Balvanz
-Balsano
-Balmores
-Ballreich
-Balloon
-Ballmer
-Ballintyn
-Balley
-Balletta
-Balhorn
-Balford
-Balezentis
-Baldrey
-Baldiviez
-Balder
-Baldassarre
-Baldacchino
-Balchunas
-Balceiro
-Balbin
-Balaz
-Balaski
-Balancia
-Balagtas
-Bakst
-Bakkum
-Bakios
-Bakeley
-Bajorek
-Bajdas
-Baizer
-Baitg
-Baise
-Bailony
-Baillio
-Baille
-Baiera
-Bahun
-Bah
-Bagne
-Bagi
-Baghdasarian
-Bageant
-Bagdonas
-Baetz
-Baeringer
-Badget
-Badeau
-Baddeley
-Bacy
-Backey
-Backenstose
-Backen
-Backe
-Backbone
-Baccouche
-Bacco
-Bacarella
-Babitsch
-Babena
-Babbin
-Babbel
-Babat
-Bab
-Azzaro
-Azoulay
-Azimi
-Azer
-Aylsworth
-Ayarza
-Axline
-Axelsen
-Awtrey
-Avola
-Avie
-Avetisyan
-Averyt
-Aveado
-Avanzato
-Avala
-Auyer
-Auxilien
-Auwarter
-Aurges
-Aures
-Auprey
-Aupperle
-Aunkst
-Aumich
-Aument
-Aumavae
-Aulbach
-Aukes
-Augspurger
-Auffrey
-Attridge
-Attkisson
-Attinger
-Atta
-Aton
-Atoe
-Atiyeh
-Athmann
-Athay
-Atchity
-Atallah
-Atala
-Astwood
-Astolfi
-Astol
-Asters
-Aspegren
-Asma
-Ashpole
-Ashfield
-Ashely
-Asevedo
-Aschmann
-Asar
-Asaeli
-Arzilli
-Arundel
-Arujo
-Aruiso
-Arturo
-Artry
-Artison
-Artinian
-Arrizaga
-Arriazola
-Arpino
-Arons
-Aronhalt
-Arntt
-Arniotes
-Arnholtz
-Arneberg
-Armillei
-Armijos
-Arm
-Arleth
-Arlen
-Arlan
-Arkins
-Arjes
-Arizzi
-Arizola
-Ariyoshi
-Aring
-Arimoto
-Arigo
-Arietta
-Arie
-Aridas
-Aricas
-Arhelger
-Arhart
-Arguillo
-Arguellez
-Argote
-Argenal
-Arenos
-Arenivas
-Arenivar
-Arendz
-Arendsee
-Arebela
-Ardizzone
-Ardion
-Ardery
-Ardd
-Ardan
-Arcino
-Arcilla
-Arcea
-Arcaute
-Arcangel
-Arcadipane
-Arbry
-Araque
-Aramini
-Arambuia
-Aragus
-Aragundi
-Aragoni
-Aragaki
-Aradanas
-Arabie
-Arabia
-Ar
-Apyuan
-Apuzzi
-Apruzzese
-Applewhaite
-Applebury
-Appeling
-Appelgate
-Apling
-Apking
-Apela
-Aparo
-Apa
-Aoay
-Anyan
-Antrican
-Antonopoulos
-Antonis
-Antonich
-Antonaccio
-Antona
-Antolik
-Antinore
-Anteby
-Anslinger
-Ansbacher
-Ansara
-Annette
-Ankersen
-Anis
-Aniol
-Aningalan
-Aniello
-Anichini
-Anibal
-Angviano
-Anglum
-Angley
-Angerer
-Angeloro
-Angeloff
-Angelocci
-Anestos
-Anerton
-Anelli
-Andzulis
-Andruss
-Andrian
-Andreatta
-Andonian
-Andon
-Anderon
-Andebe
-Andary
-Ancy
-Ancell
-Anasagasti
-Anakalea
-Anagnostou
-Amyotte
-Amtower
-Amstein
-Amsinger
-Amsili
-Amphy
-Amonette
-Amolsch
-Amistoso
-Amisano
-Amidei
-Amesquieto
-Amert
-Amento
-Ameling
-Amelang
-Ambroz
-Ambrosone
-Ambres
-Amble
-Amberson
-Ambeau
-Amati
-Amargo
-Amancio
-Amailla
-Amadi
-Alzugaray
-Alvorez
-Alverest
-Alven
-Alvarengo
-Alvalle
-Alvacado
-Alummoottil
-Alukonis
-Alu
-Altwies
-Altum
-Altringer
-Altop
-Altheimer
-Altew
-Alterio
-Alsman
-Alsdon
-Alsbrooks
-Alsandor
-Alrich
-Alrais
-Almario
-Allor
-Allocca
-Allnutt
-Allmand
-Allhands
-Allgaeuer
-Allessi
-Allenbrand
-Allemond
-Allegre
-Allcorn
-Allbones
-Allamong
-Allaband
-Algeo
-Alge
-Alfreds
-Alfera
-Alexzander
-Alexiou
-Alexaki
-Alexader
-Alevedo
-Alerte
-Alekna
-Aleizar
-Alegi
-Alegar
-Aleff
-Alecca
-Aldrege
-Aldi
-Aldarondo
-Alcosiba
-Alcombright
-Alce
-Alcaoa
-Alcaide
-Albriton
-Albrekht
-Albracht
-Alberthal
-Alberro
-Alberda
-Alattar
-Alar
-Alampi
-Alamos
-Alaibilla
-Alacano
-Akuchie
-Akram
-Akinyooye
-Akiereisen
-Aimbez
-Ailstock
-Ahyou
-Ahrenholtz
-Ahonen
-Ahmau
-Ahlstedt
-Ahle
-Ahlborn
-Aharonof
-Aharon
-Ahal
-Aguino
-Aguillera
-Aguiler
-Agueda
-Aguallo
-Agrios
-Agriesti
-Agricola
-Agreste
-Agrela
-Agre
-Agney
-Agne
-Agliam
-Agerton
-Afoa
-Aflalo
-Affelt
-Affagato
-Afan
-Aemmer
-Adzhabakyan
-Ady
-Adside
-Adrovel
-Adrid
-Adonis
-Adleman
-Adle
-Adjutant
-Adesso
-Adels
-Addo
-Adamiak
-Acron
-Ackins
-Ackies
-Achziger
-Achzet
-Achekian
-Ache
-Acfalle
-Accetturo
-Abubakr
-Abson
-Abramowski
-Aboytes
-Aboulissan
-Abling
-Ablin
-Ablang
-Abke
-Abetrani
-Abernatha
-Abela
-Abeb
-Abdin
-Abdelwahed
-Abdella
-Abdeldayen
-Abdel
-Abbinanti
-Abbay
-Abbadessa
-Abaya
-Abaunza
-Abatti
-Aasby
-Aaland
-Aaby
-Zysett
-Zwinger
-Zweier
-Zuziak
-Zusman
-Zuro
-Zurkus
-Zurheide
-Zurawik
-Zuniega
-Zumot
-Zullig
-Zukowsky
-Zukof
-Zukerman
-Zuclich
-Zuchara
-Zubrzycki
-Zuberbuhler
-Zuazo
-Zsohar
-Zschoche
-Zrimsek
-Zoutte
-Zotos
-Zorzi
-Zoroiwchak
-Zorens
-Zoquier
-Zonia
-Zone
-Zondlo
-Zomora
-Zombro
-Zombory
-Zombo
-Zomberg
-Zolman
-Zollar
-Zolinski
-Zolinas
-Zoellick
-Zoelle
-Zoebisch
-Zodrow
-Zoda
-Zobell
-Zmiejko
-Zlotnick
-Zlatkin
-Ziyad
-Ziter
-Zita
-Zissler
-Zisser
-Zirin
-Zircher
-Zipse
-Zipkin
-Zipay
-Zinni
-Zinkl
-Zimit
-Zimba
-Ziman
-Ziler
-Zilahi
-Ziko
-Zihal
-Zieske
-Zieser
-Zientara
-Ziencina
-Zielonko
-Ziek
-Ziehm
-Ziego
-Ziegenhagen
-Ziedan
-Ziebold
-Zidzik
-Zickuhr
-Zicari
-Zibert
-Zibelli
-Ziak
-Ziadie
-Zezima
-Zeyadeh
-Zeto
-Zetes
-Zerzan
-Zerring
-Zerom
-Zerck
-Zerbel
-Zentgraf
-Zenker
-Zener
-Zenbaver
-Zena
-Zemon
-Zemjanis
-Zeminski
-Zelmar
-Zellous
-Zellefrow
-Zelkind
-Zeleny
-Zelenko
-Zeis
-Zeimetz
-Zeimantz
-Zeilman
-Zehnpfennig
-Zehe
-Zeegers
-Zeckzer
-Zebell
-Zebel
-Zeals
-Zdrojkowski
-Zazozdor
-Zaxas
-Zawadzki
-Zavatson
-Zavadoski
-Zatko
-Zastawny
-Zaspel
-Zarzuela
-Zarycki
-Zarucki
-Zart
-Zarriello
-Zarozinski
-Zarnick
-Zarkin
-Zaritsky
-Zarella
-Zappolo
-Zappile
-Zappavigna
-Zapoticky
-Zapico
-Zapato
-Zapatas
-Zanueta
-Zanter
-Zanola
-Zanis
-Zaneski
-Zanco
-Zamzam
-Zamperini
-Zamparini
-Zampaglione
-Zamostny
-Zammiello
-Zammetti
-Zambotti
-Zamborsky
-Zam
-Zalwsky
-Zakarian
-Zaituna
-Zaitlin
-Zaidel
-Zaic
-Zaibel
-Zahri
-Zahradka
-Zahra
-Zahorchak
-Zaharchuk
-Zagorac
-Zagen
-Zaffina
-Zaffalon
-Zadra
-Zadow
-Zador
-Zadd
-Zacharia
-Zacharewicz
-Zablonski
-Zabka
-Zabik
-Zabielski
-Zabek
-Yuzn
-Yuste
-Yusi
-Yurkanin
-Yurich
-Yurchiak
-Yungclas
-Yungbluth
-Yunan
-Yuki
-Yueh
-Yucha
-Yslava
-Yrigollen
-Yragui
-Ypina
-Yozamp
-Yovino
-Yovanovich
-Yournet
-Younkins
-Younglove
-Younglas
-Youket
-Yosko
-Yoshimori
-Yorton
-Yorn
-Yorkman
-Yorio
-Yorgey
-Yoquelet
-Yonkoske
-Yongue
-Yonge
-Yoney
-Yonemori
-Yonek
-Yokiel
-Yokely
-Yoders
-Yo
-Yngsdal
-Ylonen
-Yilma
-Yidiaris
-Yezek
-Yestramski
-Yessios
-Yeskey
-Yerry
-Yerly
-Yerbich
-Yenz
-Yenney
-Yenner
-Yenglin
-Yengich
-Yendell
-Yeldon
-Yekel
-Yeisley
-Yeilding
-Yegge
-Yeend
-Yeeloy
-Yearicks
-Yeamans
-Yeakle
-Ydara
-Ybos
-Yballe
-Yavorsky
-Yater
-Yasutomi
-Yasinski
-Yarzabal
-Yarrell
-Yarish
-Yanoff
-Yannotti
-Yankovitz
-Yanity
-Yanetta
-Yandura
-Yancik
-Yanan
-Yanai
-Yamnitz
-Yammine
-Yamkosumpa
-Yakulis
-Yaklich
-Yakel
-Yahraus
-Yahna
-Yahl
-Yagoudaef
-Yagin
-Yagecic
-Yaftali
-Yafei
-Yafai
-Yablonsky
-Xander
-Wzorek
-Wykes
-Wydryck
-Wydo
-Wydler
-Wycuff
-Wyborny
-Wurts
-Wurgler
-Wuolle
-Wunderly
-Wun
-Wulkan
-Wuitschick
-Wuestenberg
-Wuerz
-Wuellenweber
-Wucherer
-Wublin
-Wubbel
-Wrotten
-Wrinkles
-Wriedt
-Wrenne
-Wreede
-Wraggs
-Woyahn
-Woulard
-Woudenberg
-Woskobojnik
-Wosher
-Wortinger
-Worstell
-Worst
-Worner
-Worn
-Wormely
-Worlow
-Workings
-Workinger
-Wootan
-Woolhouse
-Wooleyhan
-Woolcott
-Woodliff
-Woodert
-Woodend
-Woodburg
-Woodand
-Women
-Wombolt
-Wolzen
-Wolthuis
-Wolsted
-Wolsky
-Woloszczak
-Woller
-Wolkowski
-Wolkowiecki
-Woliver
-Wolhok
-Wolfsberger
-Wolfred
-Wolffe
-Wolfertz
-Wolbeck
-Wokwicz
-Wojtowich
-Wojtecki
-Wojnaroski
-Wojeik
-Woiwode
-Wohlwendi
-Wohlschlegel
-Wohlrab
-Wohld
-Woester
-Woernle
-Woelzlein
-Woelfle
-Wodskow
-Wlosinski
-Wlodyka
-Wlazlowski
-Wlach
-Wizar
-Wiuff
-Witvoet
-Wittstruck
-Wittry
-Wittliff
-Witterstauter
-Witsell
-Witosky
-Withy
-Witherbee
-Withenshaw
-Witczak
-Wisterman
-Wisnosky
-Wisniowski
-Wiskowski
-Wisk
-Wisinger
-Wisenor
-Wischner
-Wisbey
-Wirtjes
-Wirght
-Wirf
-Wipprecht
-Winzler
-Winzenried
-Wintringham
-Winterton
-Winterfeldt
-Winterbottom
-Winsted
-Wins
-Winninger
-Winning
-Winney
-Winnewisser
-Winners
-Winnegan
-Winklepleck
-Winkleblack
-Winkelpleck
-Winkeljohn
-Winkelbauer
-Winingear
-Winikoff
-Wingstrom
-Winett
-Winesickle
-Winesberry
-Winek
-Windmeyer
-Windhurst
-Windam
-Wimpey
-Wiman
-Wilts
-Wiltjer
-Wilterdink
-Willrett
-Willour
-Willmes
-Willmann
-Willinsky
-Willington
-Willigar
-Williama
-Willegal
-Willcoxon
-Willand
-Willame
-Willaby
-Wilkowitz
-Wilkers
-Wilison
-Wilis
-Wilgocki
-Wilging
-Wilfinger
-Wilebski
-Wildin
-Wildfong
-Wilderson
-Wildenthaler
-Wildeisen
-Wildauer
-Wilcinski
-Wilansky
-Wilabay
-Wikins
-Wikert
-Wik
-Wiinikainen
-Wiggains
-Wigen
-Wieto
-Wiess
-Wiesman
-Wierzba
-Wierschen
-Wierschem
-Wiehe
-Wieger
-Wiederwax
-Wiederin
-Wiede
-Wieciech
-Wiechert
-Wiechec
-Widrig
-Widowski
-Widmaier
-Widlak
-Widdoes
-Wickus
-Wicketts
-Wickemeyer
-Wicka
-Wicinsky
-Wibeto
-Wibberley
-Wibbenmeyer
-Wiatrak
-Wiatr
-Wiand
-Whyman
-Wholly
-Whittley
-Whittiker
-Whitteker
-Whitset
-Whitmyre
-Whitmeyer
-Whitheld
-Whitesinger
-Whitemore
-Whitacker
-Whistle
-Whisker
-Whisenton
-Whippie
-Whipp
-Whildin
-Whigum
-Whiby
-Whelton
-Wheeington
-Whan
-Whaler
-Whal
-Weyhrauch
-Wewerka
-Wetterauer
-Wetselline
-Wetklow
-Westwater
-Westrom
-Westre
-Westhouse
-Westervoorde
-Westergaard
-Westerbeck
-Westcote
-Westaway
-Wesselink
-Wesselhoft
-Weslowski
-Weslow
-Wescovich
-Werthman
-Wershey
-Werries
-Wernli
-Werning
-Werma
-Werking
-Wenzell
-Wentzloff
-Wentcell
-Wenstrand
-Wensky
-Wennersten
-Wenman
-Wengren
-Wener
-Weneck
-Wendy
-Wendte
-Wenderoth
-Wend
-Wenclawiak
-Wence
-Wemark
-Weltmer
-Welms
-Welman
-Wellendorf
-Welfel
-Weitkamp
-Weith
-Weiszbrod
-Weissmann
-Weissert
-Weisse
-Weissbrodt
-Weismiller
-Weisiger
-Weisenhorn
-Weisenfluh
-Weisend
-Weisenberg
-Weisdorfer
-Weisberger
-Weirather
-Weinzinger
-Weinzimer
-Weinzetl
-Weintz
-Weinand
-Weiker
-Weikal
-Weik
-Weigman
-Weigleb
-Weigart
-Weidenheimer
-Weiden
-Weickum
-Wehring
-Wehausen
-Weglin
-Weghorst
-Weeth
-Weeter
-Weenum
-Weelborg
-Weegar
-Weeber
-Wedwick
-Wedner
-Wedlow
-Wedlock
-Wedi
-Wedgworth
-Weckenborg
-Wechselblatt
-Webbs
-Webbink
-Weavil
-Weatherley
-Weatherill
-Wearrien
-Wearly
-Weagel
-Weadon
-Waymer
-Wayde
-Waybill
-Wavra
-Waughtel
-Waughtal
-Wauch
-Watzke
-Wattson
-Watrs
-Watral
-Watne
-Waterston
-Waszmer
-Wasylow
-Wasyliszyn
-Wassermann
-Wassenberg
-Wassenaar
-Waskow
-Waskey
-Waska
-Washurn
-Washup
-Washuk
-Washnock
-Washman
-Washinski
-Wasem
-Wartman
-Warsme
-Warsing
-Warschaw
-Warsager
-Warpool
-Warneka
-Warnasch
-Warmbier
-Warley
-Warick
-Warholic
-Warhola
-Warhol
-Warens
-Wareheim
-Wardrop
-Wardon
-Wardman
-Wardinsky
-Wardian
-Wappel
-Wanvig
-Wanser
-Wanschek
-Wanland
-Waninger
-Wanders
-Wampol
-Walzier
-Walvoord
-Walto
-Waltenbaugh
-Waltemath
-Waloven
-Walman
-Wally
-Wallravin
-Wallor
-Wallinga
-Walles
-Wallentine
-Wallenda
-Walleck
-Wallbrown
-Wallberg
-Wallbank
-Walland
-Wallaker
-Wallaert
-Wallack
-Walkinshaw
-Walking
-Walicki
-Waldrope
-Waldmann
-Waldenberg
-Walczynski
-Walchli
-Walbrecht
-Wakula
-Wakham
-Wakenight
-Wakeling
-Waitkus
-Waisman
-Waisath
-Wainman
-Wahoske
-Wahner
-Wahlenmaier
-Wahid
-Wagon
-Waggaman
-Wagenheim
-Waganer
-Wafula
-Waeyaert
-Waetzig
-Waelti
-Waeckerlin
-Waddouds
-Wackman
-Wackerbarth
-Wachsmuth
-Wabasha
-Vyhnal
-Vuturo
-Vulgamott
-Vukich
-Vrias
-Vranich
-Vrablic
-Votraw
-Voter
-Votaua
-Voskowsky
-Vorwaller
-Vorholt
-Voracek
-Voong
-Vonwagoner
-Vonstaden
-Vonsoosten
-Vonkrosigk
-Vongxay
-Vongvivath
-Vongunten
-Vongsakda
-Vongal
-Vonfeldt
-Vondohlen
-Vonderkell
-Vonbraunsberg
-Vonarx
-Volpert
-Volper
-Volpa
-Volmink
-Vollmering
-Volking
-Volkers
-Volkens
-Volin
-Volesky
-Volckmann
-Vojta
-Voita
-Voights
-Vogtman
-Vogtlin
-Voglund
-Vogland
-Vogenthaler
-Vogelpohl
-Vogds
-Voetmann
-Voedisch
-Vodder
-Voce
-Vlk
-Vlasaty
-Vlasak
-Vlahovich
-Vizza
-Vizuete
-Vivolo
-Vittum
-Vittek
-Vitorino
-Vitkus
-Vititow
-Vitera
-Vitantonio
-Vitaniemi
-Visvardis
-Vissman
-Visovsky
-Visosky
-Visocsky
-Visnosky
-Visnocky
-Viscarro
-Visaya
-Virts
-Virkler
-Virgili
-Virgie
-Virgel
-Virelli
-Viramontas
-Viorel
-Vintinner
-Vintimilla
-Vinsel
-Viniegra
-Vinck
-Villot
-Villenas
-Villemarette
-Villecus
-Villaquiran
-Villane
-Villalouos
-Villaescusa
-Vilkoski
-Vilkama
-Vilca
-Vilaro
-Vilardo
-Vilandre
-Viken
-Vigus
-Viguerie
-Vigorito
-Vigario
-Viessman
-Viesselman
-Viesca
-Vierthaler
-Vierps
-Vientos
-Vienneau
-Vidler
-Victorica
-Vickey
-Vicioso
-Vichidvongsa
-Viccica
-Veysey
-Vespia
-Veselic
-Verzi
-Versele
-Veroba
-Vernet
-Verlotte
-Verigan
-Verhaag
-Vergamini
-Verga
-Verfaille
-Verela
-Vere
-Verdine
-Verdiguel
-Verd
-Verbridge
-Verble
-Verbit
-Verbilla
-Verbasco
-Ventur
-Ventrice
-Ventre
-Ventors
-Venth
-Venosh
-Vennari
-Venkus
-Veninga
-Venible
-Venghaus
-Venetos
-Venere
-Veneable
-Vendelin
-Vemura
-Velzeboer
-Veltre
-Veltin
-Veloso
-Veles
-Vele
-Veld
-Veitz
-Veitenheimer
-Vein
-Veillette
-Vegher
-Vegetabile
-Vegar
-Veerkamp
-Veen
-Vecino
-Vebel
-Veater
-Veader
-Ve
-Vayon
-Vayner
-Vavricek
-Vauter
-Vaulx
-Vaughner
-Vaudreuil
-Vaubel
-Vattikuti
-Vathroder
-Vatch
-Vastola
-Vastardis
-Vassure
-Vassil
-Vassie
-Vasseur
-Vassen
-Vasquiz
-Vasaure
-Varvil
-Vartanyan
-Varron
-Varro
-Vargis
-Varesko
-Varda
-Varanese
-Varakuta
-Varagona
-Vanzante
-Vanyo
-Vanwyngaarden
-Vanwassenhove
-Vanvolkenburg
-Vanvalen
-Vantuyl
-Vantil
-Vanta
-Vanstrom
-Vanslooten
-Vansicklin
-Vanscoik
-Vanschaick
-Vanruiten
-Vanostberg
-Vanorsdol
-Vanolinda
-Vanoflen
-Vannuland
-Vannover
-Vannorsdell
-Vanniello
-Vanni
-Vanner
-Vanmarter
-Vanleuvan
-Vanlaar
-Vankilsdonk
-Vankammen
-Vanhevel
-Vanheukelem
-Vanhee
-Vanhauen
-Vanhamlin
-Vanhamersveld
-Vangyi
-Vangompel
-Vangoff
-Vangerbig
-Vangelos
-Vanfossan
-Vanez
-Vaneffen
-Vandygriff
-Vandy
-Vanduynhoven
-Vandunk
-Vandorien
-Vandon
-Vandiest
-Vandeweert
-Vandevort
-Vandevere
-Vandeveble
-Vandestreek
-Vandesteeg
-Vanderwyk
-Vanderwood
-Vanderwilt
-Vanderwege
-Vanderweerd
-Vanderweel
-Vandertuig
-Vanderstappen
-Vanderschoot
-Vandermoon
-Vanderkaaden
-Vanderhoot
-Vanderboom
-Vanderau
-Vandenacre
-Vandemortel
-Vandeman
-Vandelaare
-Vandebrake
-Vanconant
-Vancleaf
-Vanbogelen
-Vanbenthuyse
-Vanbeck
-Vanasselt
-Vanaprasert
-Vanandel
-Vampa
-Valseca
-Valree
-Valot
-Valorie
-Vallimont
-Vallie
-Vallentine
-Vallelonga
-Vallario
-Vall
-Valgren
-Valer
-Valenzvela
-Valentyn
-Valenstein
-Valenciana
-Valderamo
-Valcin
-Valcho
-Valakas
-Vaksman
-Vakil
-Vaka
-Vajgrt
-Vaissiere
-Vainio
-Vaiko
-Vaghy
-Vaghn
-Vafiadis
-Vafiades
-Vaeza
-Vaeth
-Vadasy
-Vaclavik
-Vacio
-Vaci
-Vache
-Vaccarino
-Vacante
-Uzun
-Uxa
-Uvalles
-Utvik
-Uttley
-Ustico
-Usman
-Usina
-Ushioda
-Ushijima
-Uscio
-Usack
-Urse
-Urrey
-Urreta
-Urraca
-Urness
-Urlanza
-Uriostejue
-Urik
-Urenio
-Urdiano
-Urbieta
-Uptegraft
-Uppencamp
-Unterkofler
-Unnold
-Unnewehr
-Unkn
-Uniacke
-Unglaub
-Unck
-Umnus
-Umezawa
-Umbel
-Ultseh
-Ultreras
-Ulses
-Ullum
-Ulisch
-Ulicnik
-Ulich
-Uleman
-Ukich
-Uken
-Uhrin
-Uhrhammer
-Uhles
-Uhlenhopp
-Ugaz
-Ugaitafa
-Ueki
-Uebersax
-Udinsky
-Udicious
-Ucha
-Uccio
-Uc
-Ubry
-Ubiles
-Ubertini
-Ubence
-Tyssens
-Tysseling
-Tyrance
-Tynio
-Tylman
-Tydings
-Tydeman
-Twohatchet
-Twito
-Twillie
-Twiet
-Twiest
-Tweet
-Tweddell
-Twait
-Tvedt
-Tuxbury
-Tuukanen
-Tutuska
-Tutoni
-Tutela
-Tushoski
-Turvaville
-Turturo
-Turrill
-Turrie
-Turpiano
-Turomsha
-Turocy
-Turnpaugh
-Turnow
-Turnmyre
-Turnier
-Turkmay
-Turkasz
-Turinetti
-Tureson
-Turdo
-Turcio
-Turbiner
-Turbide
-Turber
-Turbe
-Turansky
-Tupy
-Tuppen
-Tuplano
-Tuorto
-Tunon
-Tunget
-Tunby
-Tun
-Tumolillo
-Tumminia
-Tumbleston
-Tullison
-Tulis
-Tuliau
-Tukuafa
-Tukis
-Tujague
-Tuia
-Tugade
-Tuffin
-Tuesburg
-Tuerk
-Tuer
-Tuenge
-Tudruj
-Tudman
-Tudisco
-Tuccio
-Tucay
-Tuberman
-Tsuruda
-Tsuchiura
-Tsuchida
-Tsistinas
-Tshudy
-Tschirhart
-Tschache
-Tsantakis
-Trzaska
-Trythall
-Tryninewski
-Truont
-Trumpp
-Truka
-Truiolo
-Truglio
-Trueluck
-Trudo
-Truchon
-Trucchio
-Trube
-Truan
-Troxil
-Trowel
-Trovinger
-Trotz
-Trotto
-Trosen
-Troost
-Tronzo
-Tront
-Trometter
-Trombino
-Tromba
-Trollope
-Troke
-Trojanovich
-Trojak
-Trohanov
-Trogstad
-Troe
-Trocchio
-Trobridge
-Trobough
-Trnong
-Trivane
-Trippel
-Trimnal
-Trimis
-Trimino
-Trilt
-Trillas
-Trillana
-Triglia
-Trigillo
-Trifone
-Triffo
-Trifero
-Tridenti
-Tricoli
-Tricamo
-Tribue
-Triblett
-Trevithick
-Trevisone
-Trevis
-Trevillian
-Trevethan
-Treves
-Treusdell
-Tretola
-Tretina
-Tretera
-Tressel
-Treola
-Trentz
-Trento
-Trentman
-Trenor
-Trennell
-Trend
-Trenchard
-Tremore
-Tremillo
-Trembinski
-Trelles
-Treister
-Treine
-Treible
-Treff
-Tredinnick
-Treder
-Trebon
-Trebesch
-Trear
-Traviss
-Traux
-Trautner
-Trausch
-Traum
-Trattner
-Trass
-Traphagen
-Trapeni
-Trapalis
-Traner
-Tramonti
-Trainham
-Traicoff
-Trahern
-Traffanstedt
-Trachsel
-Tracewell
-Trabold
-Trabazo
-Tozloski
-Toyota
-Toyn
-Towse
-Townsand
-Towels
-Touton
-Toussand
-Toupe
-Touney
-Toudle
-Touchard
-Touby
-Touart
-Totzke
-Tototzintle
-Totino
-Toting
-Tossie
-Tosco
-Tosch
-Tortu
-Tortolano
-Tortelli
-Torruellas
-Torros
-Torrion
-Torrillo
-Torrico
-Torreblanca
-Torrano
-Torongeau
-Toromanides
-Tornincasa
-Torey
-Toren
-Torbus
-Toquinto
-Topolewski
-Topoian
-Topness
-Toplistky
-Topliffe
-Topal
-Topacio
-Toothacre
-Tooms
-Toolsiram
-Toolan
-Tookmanian
-Tonzi
-Tonti
-Tonschock
-Tonsall
-Tonrey
-Tonnesen
-Tonnar
-Tongate
-Tonetti
-Tonelson
-Tonder
-Tonai
-Tomspon
-Tomski
-Tomshack
-Tomkus
-Tomka
-Tomidy
-Tomichek
-Tomeldan
-Tomehak
-Tombleson
-Tomasson
-Tomasic
-Tomash
-Tomanek
-Tolontino
-Tollin
-Tollerud
-Tollefsen
-Toline
-Tokley
-Tokkesdal
-Tohen
-Togashi
-Tofolla
-Toepperwein
-Toeller
-Toelke
-Toedebusch
-Todt
-Todoroff
-Todor
-Todesco
-Toboz
-Tobolski
-Toaston
-Toa
-Tlumacki
-Tlatenchi
-Tlatelpa
-Tlamka
-Tjandra
-Tix
-Tivis
-Tivar
-Titterness
-Titone
-Titler
-Tith
-Tisi
-Tish
-Tisdel
-Tisdal
-Tischner
-Tipre
-Tippey
-Tipold
-Tinucci
-Tintinger
-Tinnerello
-Tinn
-Tinlin
-Tinger
-Timus
-Timothe
-Timons
-Timonere
-Timon
-Timenez
-Timchula
-Timbrell
-Timas
-Timar
-Tilzer
-Tilus
-Tilt
-Tilow
-Tillou
-Tietge
-Tieng
-Tichnell
-Tichi
-Tibor
-Thy
-Thury
-Thurness
-Thurlby
-Thurby
-Thuney
-Thuma
-Thull
-Thruthley
-Throssell
-Thress
-Threlfall
-Thrapp
-Thrams
-Thraen
-Thouvenel
-Thorstenson
-Thorsness
-Thoroughgood
-Thornborough
-Thormaehlen
-Thorade
-Thonney
-Thompon
-Thometz
-Thomeczek
-Thomases
-Thomae
-Thoburn
-Thobbs
-Thivener
-Thim
-Thilmony
-Thiengtham
-Thielges
-Thieklin
-Thidphy
-Thibaut
-Thibadeau
-Thew
-Theule
-Theuenin
-Thepbanthao
-Theos
-Thell
-Thelin
-Thelemaque
-Theinert
-Theeman
-Theden
-Thebo
-Thansamai
-Thanos
-Thangavelu
-Thanem
-Thanasouk
-Thanas
-Thamann
-Thaman
-Thalls
-Thaller
-Thall
-Thadison
-Tewolde
-Tewa
-Teuteberg
-Teteak
-Testolin
-Tessendorf
-Tess
-Tesmar
-Teschler
-Terwey
-Tertinek
-Terstage
-Terrone
-Terrible
-Terrian
-Terrezza
-Terracciano
-Terp
-Teroganesyan
-Termilus
-Terinoni
-Teri
-Terhorst
-Terherst
-Terazes
-Teravainen
-Teque
-Teoh
-Teodoro
-Tention
-Tenore
-Tenofsky
-Tenn
-Tenhoff
-Tenhaeff
-Tengben
-Tenerovich
-Tener
-Tenda
-Tenario
-Tempelton
-Temoney
-Teman
-Tellefsen
-Telkamp
-Telgen
-Teles
-Telch
-Telander
-Teklu
-Teixeria
-Teissedre
-Teisberg
-Tehney
-Tegner
-Tegan
-Teehee
-Teder
-Teddy
-Tecuanhuey
-Techau
-Tecchio
-Teakell
-Teager
-Taylar
-Tayan
-Tawwab
-Tavolieri
-Taverab
-Tavaris
-Tavana
-Tauzin
-Tautolo
-Tausch
-Taula
-Taualii
-Tattrie
-Tatsuhara
-Taton
-Tatge
-Tatel
-Tastet
-Tassa
-Tasma
-Taskey
-Tashiro
-Taruer
-Taruc
-Tartsah
-Tarski
-Tarrenis
-Tarnoff
-Tarmey
-Tarman
-Tarling
-Tarella
-Tarduno
-Tarboro
-Tarbert
-Taray
-Taras
-Taque
-Tapian
-Taphous
-Tapaoan
-Tanzi
-Tantum
-Tannous
-Tankxley
-Tankesly
-Tanh
-Tangney
-Tangerman
-Tangaro
-Tangari
-Tangabekyan
-Tandus
-Tande
-Tamkin
-Tami
-Tamburrelli
-Tamburino
-Tamborlane
-Tamai
-Talvy
-Talsky
-Talleut
-Tallacksen
-Taliferro
-Talicska
-Talentino
-Talaro
-Talamentez
-Talaga
-Tako
-Taker
-Takara
-Takai
-Tajudeen
-Tajima
-Taitague
-Taillefer
-Tail
-Tahon
-Tagupa
-Taglauer
-Tagalog
-Tagaloe
-Tagala
-Tagaca
-Tag
-Tafiti
-Tafelski
-Taetzsch
-Taegel
-Tadt
-Tadgerson
-Taddio
-Tadd
-Tacopino
-Tacneau
-Tackette
-Tackes
-Tacke
-Tachauer
-Tacason
-Tabuena
-Tabion
-Tabatt
-Szysh
-Szymonik
-Szwede
-Szulimowski
-Szpak
-Szoka
-Szocki
-Szklarski
-Szitar
-Szewc
-Szesterniak
-Szermer
-Szerbin
-Szczepkowski
-Szczeblewski
-Szachewicz
-Szabat
-Syzdek
-Syrrakos
-Syria
-Sypult
-Sypolt
-Synovic
-Syner
-Symkowick
-Symeon
-Sylney
-Sylla
-Syktich
-Syer
-Swopshire
-Swolley
-Swithenbank
-Swiss
-Swirczek
-Swingler
-Swingen
-Swinerton
-Swinea
-Swille
-Swierenga
-Swierczynski
-Swieca
-Swicord
-Swerdloff
-Swenceski
-Swelt
-Swelgart
-Swehla
-Sweets
-Sweem
-Swed
-Sweatmon
-Sweatfield
-Swatman
-Swartzman
-Swartzell
-Swantak
-Swanston
-Swancutt
-Swanay
-Swamm
-Swam
-Swait
-Swainey
-Swaggart
-Swabe
-Swabb
-Svobodny
-Svetlak
-Svennungsen
-Svedine
-Svatos
-Svare
-Svancara
-Suydan
-Suwannakintho
-Suvada
-Suttin
-Suttee
-Sutkus
-Sutic
-Suthers
-Sutcliff
-Suszynski
-Sustar
-Sustaire
-Suskay
-Susany
-Susanin
-Suryanarayana
-Survis
-Surpris
-Suro
-Surminec
-Surguy
-Surgoine
-Sures
-Suren
-Surbella
-Suomela
-Sunyich
-Sunniga
-Sunier
-Sumrow
-Sumption
-Summerlot
-Sumerix
-Sumeriski
-Sultani
-Sulley
-Sullenberger
-Sulipizio
-Sulin
-Sulima
-Sulikowski
-Sulentic
-Sulejmanovski
-Sugabo
-Suffield
-Suentenfuss
-Suehs
-Sudekum
-Sudbrock
-Sucre
-Suchocki
-Suchla
-Sucgang
-Succar
-Subijano
-Subich
-Subert
-Subera
-Suaava
-Stuttgen
-Sturner
-Sturk
-Sturgul
-Sturghill
-Stukowski
-Stuesse
-Stuermer
-Stuer
-Stuebe
-Studyvance
-Studnicki
-Studniarz
-Studmire
-Studdiford
-Stucke
-Stublaski
-Stubby
-Stubbendeck
-Strzalkowski
-Struzzi
-Struzik
-Strubel
-Strozewski
-Strowe
-Strous
-Strotz
-Strombeck
-Stroker
-Strohmayer
-Strogen
-Strizich
-Strini
-Stringari
-Strimling
-Strimback
-Strife
-Strid
-Stricklind
-Stribley
-Strevels
-Strevell
-Streva
-Stretz
-Strenge
-Stremi
-Strelecki
-Strejan
-Streitnatter
-Streff
-Strefeler
-Streeton
-Stred
-Strazisar
-Strayhand
-Strayham
-Stravinski
-Strausz
-Strausner
-Strauhal
-Straugh
-Strasters
-Stranford
-Strandburg
-Stranahan
-Strahin
-Stradtner
-Stracquatanio
-Strachman
-Straathof
-Stpierrie
-Stoviak
-Stovell
-Stoutenger
-Stoudymire
-Stoud
-Stouch
-Stouall
-Stottlar
-Stotko
-Stothard
-Stotesbury
-Stotesberry
-Storto
-Stores
-Storage
-Stoos
-Stonich
-Stolzenburg
-Stolly
-Stolebarger
-Stolcals
-Stolar
-Stoklasa
-Stogden
-Stoffey
-Stofferan
-Stoey
-Stoett
-Stoeltzing
-Stoel
-Stoeke
-Stoeffler
-Stoeckert
-Stoebner
-Stoeberl
-Stodomingo
-Stodder
-Stockwin
-Stockon
-Stocki
-Stockebrand
-Stocco
-Stobie
-Stlouise
-Stives
-Stirn
-Stire
-Stipanuk
-Stingle
-Stinespring
-Stinehour
-Stinebuck
-Stindt
-Stimple
-Stimler
-Stilwagen
-Stiltz
-Stilner
-Stillie
-Stigsell
-Stiern
-Stiens
-Stiehm
-Stiegman
-Stiegemeier
-Stieb
-Stidstone
-Sticklin
-Sticklen
-Stickford
-Sthole
-Stford
-Stflorant
-Steury
-Stetzenbach
-Stetke
-Sterpka
-Sterker
-Sterkenburg
-Sterkel
-Stephensen
-Stepan
-Step
-Stenz
-Stenn
-Stendeback
-Stenbeck
-Stenback
-Sten
-Stemmler
-Stelzl
-Steltzer
-Stellpflug
-Stellfox
-Stelk
-Stele
-Steinruck
-Steinmeiz
-Steinkuehler
-Steinkirchner
-Steinkellner
-Steinerkert
-Steine
-Steinbrink
-Steinbauer
-Steik
-Steighner
-Steiert
-Steich
-Steibel
-Stehno
-Steggeman
-Stefl
-Stefford
-Steffa
-Stefanatos
-Steep
-Steenwyk
-Steenhoven
-Steelmon
-Steeg
-Steeb
-Stedronsky
-Steczo
-Stecklair
-Stechuchak
-Stechlinski
-Steber
-Stebe
-Stearnes
-Stearne
-Stea
-Stdenny
-Stchur
-Stayter
-Stawicki
-Stavrositu
-Staudenmeier
-Stattelman
-Statires
-Station
-Stathos
-Stathas
-Stasulis
-Stassen
-Stasny
-Staser
-Staschke
-Starweather
-Stars
-Starnaud
-Starley
-Starkman
-Starken
-Starich
-Starghill
-Starcevic
-Staplins
-Stapelman
-Stanzak
-Stanway
-Stanowski
-Stankowitz
-Stankaitis
-Staniec
-Stania
-Stangroom
-Stanesic
-Stanert
-Staneart
-Stands
-Standors
-Standifur
-Standeven
-Standaert
-Stancoven
-Stanclift
-Stancey
-Stanbaugh
-Stana
-Stammler
-Stamenov
-Stambach
-Stamatopoulos
-Stamas
-Stalberger
-Stakoe
-Stakley
-Stakkeland
-Stakemann
-Stainbach
-Stagowski
-Stagno
-Stagman
-Stagles
-Stagers
-Staffeld
-Staenglen
-Staehler
-Stadther
-Stadt
-Stadnik
-Stadick
-Stachurski
-Stace
-Stabs
-Stabley
-Stable
-Srygley
-Srinvasan
-Squarciafico
-Squair
-Spyrakos
-Spyies
-Spycher
-Spurger
-Spulick
-Spudis
-Spuck
-Sprygada
-Spruiell
-Spruance
-Sprowls
-Sprouls
-Sprong
-Sprole
-Springe
-Sprewell
-Sprengelmeyer
-Sprawls
-Sprauve
-Spragley
-Spotorno
-Sporysz
-Sporman
-Sporich
-Spoonemore
-Spoleti
-Spohnholz
-Splitt
-Splett
-Splatt
-Spiter
-Spirounias
-Spirk
-Spire
-Spinoza
-Spinn
-Spinetti
-Spinello
-Spinar
-Spilis
-Spiliakos
-Spigutz
-Spielvogel
-Spicknall
-Spicker
-Sperier
-Speraw
-Spennicchia
-Spene
-Spellane
-Spegal
-Spee
-Specken
-Spearow
-Spearmon
-Spayd
-Spartin
-Spartichino
-Spart
-Sparacina
-Spannuth
-Spanner
-Spanicek
-Spanger
-Spane
-Spakes
-Spadard
-Spacht
-Spacagna
-Sozio
-Soyke
-Sowl
-Sowden
-Sowada
-Sovel
-Souvannakhily
-Souto
-Southand
-Sourlis
-Soulliere
-Souhrada
-Sou
-Sotos
-Sothen
-Sosbe
-Sorzano
-Sorvig
-Sortland
-Sorokata
-Soro
-Sorlie
-Sorhaindo
-Sorell
-Sordia
-Sorace
-Soptick
-Soppeland
-Sophy
-Sopczak
-Sooy
-Soop
-Soomaroo
-Soolua
-Sonterre
-Sonsteng
-Sonnefeld
-Sonnee
-Sonka
-Songy
-Sondrup
-Sondles
-Sondheimer
-Sonderman
-Sonderegger
-Somvang
-Somsy
-Somrak
-Somoza
-Somogye
-Somo
-Sommons
-Sommar
-Somji
-Somilleda
-Somerfield
-Somdah
-Somayor
-Solwold
-Solverud
-Soltow
-Soltmann
-Solow
-Solorsano
-Solonar
-Solomen
-Sollors
-Sollitto
-Solliday
-Solito
-Solinas
-Solima
-Solies
-Solien
-Solich
-Solian
-Solhjem
-Solera
-Soldeo
-Solazar
-Solarski
-Solaita
-Soladine
-Sokul
-Sokotowski
-Sokolski
-Sokolowich
-Sojo
-Soito
-Soiro
-Soifer
-Softich
-Sofer
-Soechting
-Sodini
-Sodervick
-Soders
-Sodawasser
-Sockey
-Sobrio
-Sobieraj
-Sobeski
-Sobery
-Soberanes
-Sobenes
-Sobe
-Sobanski
-Soape
-Snowder
-Snorden
-Snode
-Snetsinger
-Snaples
-Snaer
-Snaders
-Smyrski
-Smyntek
-Smykowski
-Smutzler
-Smutny
-Smulik
-Smugala
-Smuck
-Smolnicky
-Smolinsky
-Smitty
-Smithe
-Smiling
-Smiler
-Smigiel
-Smerdon
-Smeja
-Smedes
-Smeathers
-Smarra
-Smar
-Smallmon
-Smallin
-Smallidge
-Slyton
-Slutsky
-Sluski
-Slovinski
-Sloter
-Slonecker
-Slomer
-Slogeris
-Slobodnik
-Sloanes
-Slipper
-Slingluff
-Slingland
-Sliney
-Slimko
-Sliman
-Slimak
-Slessman
-Slepski
-Sleppy
-Sleiman
-Sleaford
-Slaugenhaupt
-Slark
-Slackman
-Slaboda
-Skyes
-Skweres
-Skwarek
-Skubik
-Skrzypinski
-Skrebes
-Skrabanek
-Skovlund
-Skotnicki
-Skone
-Skonczewski
-Skold
-Skoien
-Skoczen
-Skobiak
-Skimehorn
-Skillpa
-Skillett
-Skillan
-Skildum
-Skibski
-Skibo
-Skevofilakas
-Skepple
-Skarzynski
-Skartvedt
-Skar
-Skapura
-Skaflen
-Skaer
-Skabo
-Sjulstad
-Sjerven
-Sizar
-Sixt
-Sixsmith
-Siwicki
-Sivills
-Sivilay
-Sivie
-Sivick
-Sivay
-Sivalia
-Sival
-Siurek
-Siuda
-Sittre
-Sittner
-Sittman
-Sitterding
-Sitosky
-Sitkiewicz
-Sistek
-Sista
-Sisomphou
-Sisofo
-Sisley
-Siskin
-Sisavath
-Sirpilla
-Sirosky
-Sirolli
-Siroka
-Sirna
-Sirico
-Sirhan
-Siravo
-Sipriano
-Sippy
-Siphan
-Siona
-Siok
-Sinrich
-Sington
-Singharath
-Singewald
-Singerman
-Sinarath
-Simple
-Simper
-Simor
-Simoniello
-Simonetty
-Simonet
-Simokat
-Simoens
-Simmond
-Simmes
-Simitian
-Simich
-Simerson
-Simensky
-Simcock
-Silvestrini
-Silvaggio
-Siluis
-Siltman
-Silovich
-Sillitoe
-Silkenson
-Siliezar
-Silevinac
-Silence
-Silbiger
-Silao
-Sil
-Sikarskie
-Siglow
-Siglar
-Sifre
-Sifontes
-Sifers
-Sievertsen
-Sieverson
-Sieve
-Sietz
-Siert
-Sieradski
-Sier
-Sielaff
-Sieja
-Siedner
-Siedel
-Siebenthal
-Sidorowicz
-Sidley
-Sidi
-Sideman
-Sicks
-Sickel
-Sickafoose
-Sicinski
-Sibounma
-Sibgert
-Sibeto
-Sibel
-Sibal
-Siar
-Siaperas
-Siami
-Sialana
-Shyne
-Shybut
-Shwab
-Shutty
-Shutters
-Shusterman
-Shurr
-Shurak
-Shuptrine
-Shupert
-Shummon
-Shulthess
-Shult
-Shulse
-Shullick
-Shulick
-Shulenberger
-Shuffleburg
-Shubov
-Shry
-Shrigley
-Shren
-Shrawder
-Showen
-Shoulder
-Shorthair
-Shopbell
-Shoobridge
-Shongo
-Shoman
-Shollenbarger
-Shoji
-Shofestall
-Shodunke
-Shober
-Shivy
-Shisila
-Shirvanian
-Shirakawa
-Shippen
-Ship
-Shinsky
-Shinnick
-Shinkel
-Shingleur
-Shingledecker
-Shindel
-Shimon
-Shimaoka
-Shilo
-Shillito
-Shillingsford
-Shilkuski
-Shiliata
-Shildneck
-Shikuma
-Shike
-Shigeta
-Shigemi
-Shifferd
-Shider
-Shibi
-Shettleroe
-Shetterly
-Sherville
-Sherrock
-Sherrange
-Sherraden
-Sherles
-Sherief
-Sherbon
-Shepperdson
-Shenker
-Sheneman
-Shene
-Shempert
-Sheman
-Shelvy
-Shelsy
-Shelkoff
-Shekels
-Sheirich
-Sheingold
-Sheidler
-Shehee
-Shefte
-Sheftall
-Sheerer
-Sheer
-Sheakley
-Shbi
-Shawber
-Shatek
-Shasky
-Shary
-Sharplin
-Sharperson
-Sharabi
-Shappen
-Shapouri
-Shapleigh
-Shapino
-Shaper
-Shanno
-Shandro
-Shanberg
-Shamsi
-Shammah
-Shamir
-Shamily
-Shalwani
-Shalla
-Shaline
-Shalhoub
-Shakoor
-Shakin
-Shahinfar
-Shahin
-Shahim
-Shahbaz
-Shaffren
-Shaffen
-Shadfar
-Shadding
-Shadazz
-Shaben
-Shabel
-Sgueglia
-Sgrignoli
-Sgammato
-Seykoski
-Seyb
-Sewyerd
-Seweall
-Sewade
-Severi
-Seveney
-Sevadjian
-Settlemyre
-Settlemires
-Settino
-Settimo
-Setterland
-Seton
-Setler
-Setias
-Seti
-Setchell
-Setaro
-Sestoso
-Sessin
-Sesser
-Serville
-Servi
-Servedio
-Serve
-Serravalli
-Sermersheim
-Serfoss
-Serfling
-Serey
-Seres
-Serens
-Serene
-Sercovich
-Serban
-Seratti
-Seratt
-Serasio
-Serandos
-Seraiva
-Seraille
-Sepvlieda
-Sepulbeda
-Septelka
-Seppelt
-Seppanen
-Seppa
-Senz
-Senst
-Sensor
-Sensmeier
-Sensing
-Senseney
-Sensenbrenner
-Senseman
-Seniff
-Sengvilay
-Sengun
-Senethavilouk
-Senesenes
-Senderling
-Sender
-Senavanh
-Semsem
-Semonis
-Seminario
-Sember
-Selzler
-Selvester
-Selusi
-Selnes
-Sellin
-Sellards
-Selkey
-Selic
-Selgrade
-Selesnick
-Selakovic
-Seiters
-Seit
-Seisler
-Seil
-Seikaly
-Seidenbecker
-Seibt
-Seibers
-Seiavitch
-Segreto
-Segonia
-Seggerman
-Segerman
-Segelhorst
-Seferovic
-Sefcheck
-Seering
-Seemer
-Seekford
-Seekamp
-Seegar
-Seedorff
-Seedborg
-Seebaum
-Sedanos
-Secundo
-Second
-Seckletstewa
-Sechang
-Sebranek
-Sebion
-Sebero
-Sebeniecher
-Sebasovich
-Searer
-Seara
-Seanger
-Seajack
-Seaholtz
-Seagers
-Seaforth
-Seacrest
-Seacat
-Seaburn
-Sdoia
-Sczbecki
-Scurci
-Scullin
-Scuito
-Scudero
-Scucchi
-Scsarpisnato
-Scro
-Scrivener
-Scriuner
-Scripps
-Scrimsher
-Scrichfield
-Screnci
-Scrape
-Scouller
-Scotts
-Scotting
-Scorgie
-Scollan
-Sciullo
-Scites
-Scicutella
-Scialpi
-Sciacchitano
-Schy
-Schworm
-Schwizer
-Schwister
-Schwipps
-Schwertfeger
-Schwerdt
-Schwerd
-Schwenzer
-Schwenneker
-Schwendeman
-Schwemmer
-Schweitz
-Schwarzlose
-Schwart
-Schwantd
-Schwadron
-Schutze
-Schute
-Schusted
-Schurk
-Schumachor
-Schulter
-Schultens
-Schulkin
-Schulist
-Schuit
-Schuering
-Schueren
-Schueneman
-Schuemann
-Schuchat
-Schuber
-Schubach
-Schrumpf
-Schroot
-Schroen
-Schroedter
-Schreuder
-Schreacke
-Schrayter
-Schrawder
-Schrauger
-Schraub
-Schrameck
-Schraff
-Schradle
-Schrab
-Schowengerdt
-Schossow
-Schopmeyer
-Schopflin
-Schop
-Schomin
-Schomas
-Schomacker
-Scholtens
-Scholin
-Schoggen
-Schoessow
-Schoepfer
-Schoenmaker
-Schoenig
-Schoelman
-Schoellkopf
-Schoell
-Schoeben
-Schoderbek
-Schockley
-Schnure
-Schnorbus
-Schnopp
-Schnobrich
-Schnitz
-Schnickel
-Schnibbe
-Schnepf
-Schnelder
-Schneidman
-Schneeberger
-Schnackel
-Schmollinger
-Schmoak
-Schmittou
-Schmiot
-Schmille
-Schmier
-Schmiel
-Schmiedeskamp
-Schmidtka
-Schmidlin
-Schmertz
-Schmerge
-Schmerer
-Schmelmer
-Schmeidler
-Schmautz
-Schmauder
-Schmatz
-Schmand
-Schmaling
-Schlund
-Schlumaker
-Schlotthauer
-Schlotte
-Schlotfeldt
-Schlote
-Schlossman
-Schloemann
-Schlindwein
-Schlimmer
-Schlieter
-Schlichenmaye
-Schleppy
-Schlenger
-Schleker
-Schleibaum
-Schleh
-Schlecter
-Schlaefli
-Schladweiler
-Schlabs
-Schirrmacher
-Schiralli
-Schinnell
-Schinker
-Schingeck
-Schindewolf
-Schimel
-Schilsky
-Schilk
-Schilder
-Schifko
-Schiffmann
-Schierenbeck
-Schierbrock
-Schielke
-Schieferstein
-Schiefen
-Schickedanz
-Schey
-Scheuren
-Scheuers
-Scherschligt
-Scherma
-Scherbring
-Scherbel
-Scheno
-Schenfeld
-Schells
-Schellin
-Schellermann
-Scheiern
-Scheiderer
-Schegetz
-Scheffrahn
-Scheffert
-Schechinger
-Schavone
-Schaunt
-Schaumann
-Schauble
-Schaubhut
-Schatzle
-Scharmann
-Scharler
-Scharbrough
-Schap
-Schanzenbach
-Schantini
-Schange
-Schandel
-Schammel
-Schallig
-Schaffter
-Schaffeld
-Schaffel
-Schafersman
-Schaen
-Schachterle
-Schachsieck
-Schabbing
-Scelzo
-Scelsi
-Scavo
-Scavetta
-Scaturro
-Scatenato
-Scarpitto
-Scarpitta
-Scarpato
-Scarpati
-Scarp
-Scarlato
-Scargall
-Scarfi
-Scantlen
-Scanneu
-Scannapieco
-Scanio
-Scandrett
-Scandalios
-Scancarello
-Scamehorn
-Scalzi
-Scallorn
-Scallion
-Scalet
-Scaiano
-Scaia
-Scagliotti
-Scace
-Sboro
-Sbarra
-Saysongkham
-Saysana
-Sayloe
-Saxinger
-Saxfield
-Sawtell
-Sawransky
-Sawhill
-Sawatzki
-Sawaia
-Savitch
-Savinar
-Savi
-Saven
-Savas
-Savaria
-Savakis
-Sava
-Sauveur
-Sausser
-Saurey
-Sauredo
-Saunas
-Saulsbery
-Sauger
-Sauerhage
-Sauerbry
-Sauce
-Sauby
-Satz
-Sattlefield
-Satmary
-Sathiraboot
-Satchwell
-Sat
-Sasuille
-Sashington
-Sasengbong
-Sasao
-Sarwar
-Sarrell
-Sarraga
-Saroop
-Sarnes
-Sarnacki
-Sarlo
-Sarks
-Sarkodie
-Sark
-Sargis
-Sargetakis
-Saretto
-Sarette
-Sarensen
-Sarcinelli
-Sarcinella
-Sarcia
-Saras
-Saranzak
-Saraniti
-Sarani
-Sarafian
-Saraf
-Sarac
-Sarabando
-Saporita
-Sapnu
-Sapko
-Saous
-Sanzenbacher
-Santti
-Santrizos
-Santoscoy
-Santomauro
-Santolucito
-Santis
-Santio
-Santilukka
-Santaloci
-Santagata
-Santaella
-Sanseda
-Sanquenetti
-Sanots
-Sanosyan
-Sann
-Sanmarco
-Sanlatte
-Sankovich
-Sanke
-Sankary
-Sankaran
-Sanislo
-Sanipasi
-Saniger
-Sangren
-Sanghez
-Saneaux
-Sandstedt
-Sandry
-Sandovar
-Sandos
-Sandone
-Sandness
-Sandlan
-Sandison
-Sandersen
-Sandborg
-Sanchz
-Sanchec
-Sancen
-Sanasith
-Samway
-Samuell
-Sampselle
-Sampieri
-Sampair
-Samoyoa
-Samowitz
-Sammut
-Samiec
-Samick
-Samele
-Sambucetti
-Samara
-Samantha
-Samanlego
-Salverson
-Salvature
-Saluto
-Saluja
-Saltourides
-Saltmarsh
-Salta
-Salsberg
-Saloum
-Salos
-Saloom
-Sallings
-Sallies
-Sallah
-Salisberry
-Salimas
-Salfelder
-Salesses
-Salen
-Saleado
-Saldvir
-Saldi
-Saldeen
-Salceda
-Salazan
-Salaza
-Salay
-Salandy
-Sakshaug
-Sakovitch
-Sakkinen
-Sakkas
-Sakiestewa
-Sakic
-Sakakeeny
-Saison
-Saisa
-Saintfleur
-Saide
-Saicedo
-Sahsman
-Sahli
-Sahler
-Sahlberg
-Sahagian
-Saggione
-Sages
-Sagendorf
-Safron
-Safar
-Saetteurn
-Saenphimmacha
-Sadhu
-Sadhra
-Saden
-Sadee
-Saddat
-Sackos
-Sachleben
-Saches
-Sachar
-Saccucci
-Sacane
-Sablone
-Sablock
-Sablea
-Sabiston
-Sabini
-Sabi
-Sabha
-Sabellico
-Sabaj
-Saadd
-Ryun
-Rysavy
-Rysanek
-Rylowicz
-Ryll
-Ryken
-Rygiewicz
-Rydalch
-Rychlicki
-Rybowiak
-Ryal
-Ruzycki
-Ruyz
-Ruwet
-Rutley
-Ruthenberg
-Ruszala
-Rusteika
-Rusteberg
-Russotto
-Russotti
-Russman
-Russek
-Russe
-Rusley
-Rusich
-Rushworth
-Rushman
-Rushforth
-Ruscitti
-Ruscio
-Ruschmann
-Ruschel
-Rusak
-Rupertus
-Ruoho
-Runzler
-Runyons
-Runswick
-Runfola
-Rumney
-Rummler
-Rumford
-Rumburd
-Rumbold
-Ruman
-Rulnick
-Rujawitz
-Ruhstorfer
-Ruhmann
-Ruhling
-Ruhlin
-Ruggiere
-Ruggero
-Rugga
-Rugama
-Ruffolo
-Ruether
-Ruesswick
-Ruell
-Rudnitski
-Rudnicky
-Rudish
-Rudicil
-Rudes
-Rudeen
-Rubow
-Rubloff
-Rubison
-Rubinow
-Ruberte
-Rubenacker
-Rubarts
-Ruballos
-Rubal
-Rozgonyi
-Rozga
-Rozenberg
-Rozas
-Rozance
-Roytek
-Rowsell
-Rowray
-Rowold
-Rowntree
-Rowlins
-Rowling
-Rowback
-Rovelto
-Rovella
-Rovack
-Rouzzo
-Rout
-Roussos
-Rounkles
-Roundabush
-Rouisse
-Rougier
-Rouff
-Roudybush
-Roucoulet
-Roubekas
-Rotstein
-Rothmann
-Rothhaupt
-Rothfus
-Rothenburger
-Rothbauer
-Rothacher
-Rotering
-Roszales
-Rossnagel
-Rossingnol
-Rossing
-Rosselle
-Roskovensky
-Roskop
-Rositano
-Rosine
-Rosich
-Rosettie
-Rosentrance
-Rosenthall
-Rosenkoetter
-Rosenheim
-Rosenbarger
-Rosekrans
-Rosebure
-Roseboom
-Roscow
-Roscorla
-Rosbozom
-Rosavio
-Rosacker
-Ropiski
-Ronzoni
-Rons
-Rondell
-Ronde
-Roncskevitz
-Romulus
-Rompf
-Romjue
-Romenesko
-Rombult
-Rombardo
-Romaniak
-Romandia
-Romanchuk
-Romag
-Rolseth
-Rollind
-Rollend
-Rolfsen
-Rolff
-Rolek
-Rokusek
-Rohs
-Rohowetz
-Rohlack
-Rohla
-Rogugbakaa
-Roguemore
-Rogosky
-Roginson
-Roggero
-Roggensack
-Roggenbaum
-Roggeman
-Roever
-Roetzler
-Roettgen
-Roessing
-Roerish
-Roemhild
-Roehling
-Roede
-Roeber
-Rodriuez
-Rodrigeuz
-Rodnguez
-Rodis
-Rodinson
-Rodine
-Rodemoyer
-Rodeigues
-Rodea
-Roddick
-Rodar
-Rodamis
-Rodal
-Rockymore
-Rockelman
-Rockafellow
-Rocho
-Rochlin
-Rochenstire
-Rocasah
-Roblow
-Roblodowski
-Robinzine
-Robinsons
-Robinso
-Robinault
-Robilotto
-Robichard
-Robeza
-Robertos
-Roberrtson
-Robblee
-Robante
-Roats
-Roatch
-Roaoo
-Roanhorse
-Roal
-Roacho
-Rizas
-Rivord
-Riveroll
-Riverman
-Rivel
-Ritzke
-Ritzie
-Ritums
-Ritson
-Ritchlin
-Ritari
-Ristaino
-Rissell
-Rissanen
-Risler
-Riskalla
-Risius
-Rishell
-Risha
-Risewick
-Risden
-Rische
-Riscen
-Risbeck
-Riquelme
-Ripoll
-Rioz
-Riofrio
-Riobe
-Rinnert
-Rinkus
-Rininger
-Ringland
-Ringhouse
-Ringelspaugh
-Rinebold
-Rindler
-Rinderle
-Rimm
-Rillera
-Riise
-Riippi
-Rightnour
-Rightley
-Riggings
-Rigger
-Riffee
-Rifenbery
-Riexinger
-Riesland
-Rieske
-Riesinger
-Rieley
-Riekert
-Rief
-Riedlinger
-Ridgnal
-Ridgle
-Ridgill
-Ridep
-Ridel
-Riddleberger
-Ridders
-Riculfy
-Rickford
-Richters
-Richmann
-Richlin
-Richiusa
-Richerds
-Richan
-Ricenberg
-Ricaud
-Ricardi
-Ribsamen
-Ribron
-Ribiero
-Ribero
-Ribbink
-Rhump
-Rhum
-Rhorer
-Rhoe
-Rhoan
-Rhoad
-Rhinerson
-Rhen
-Reznicek
-Reyner
-Reyne
-Reynaldo
-Reyelts
-Rewerts
-Rewakowski
-Revira
-Revils
-Revering
-Revera
-Revelli
-Revay
-Reuteler
-Reust
-Reuschel
-Reudink
-Retzloff
-Rethmeier
-Retek
-Retchless
-Retamar
-Ressel
-Respicio
-Respes
-Respers
-Resos
-Resetar
-Resenz
-Resecker
-Res
-Rerucha
-Requarth
-Reprogle
-Repoff
-Replin
-Repetowski
-Repasky
-Reola
-Renzoni
-Renzo
-Renyer
-Rentoulis
-Rentie
-Renouf
-Renosky
-Renigar
-Renert
-Rendler
-Rend
-Remondet
-Remis
-Remian
-Remele
-Remeder
-Rellama
-Rekus
-Rekemeyer
-Reives
-Reitter
-Reistetter
-Reinsvold
-Reinsfelder
-Reinowski
-Reinier
-Reing
-Reinen
-Reineccius
-Reindeau
-Reinbolt
-Reimnitz
-Reimmer
-Reihl
-Reihing
-Reigleman
-Reighley
-Reidherd
-Reidhaar
-Reichow
-Reibman
-Reial
-Rehse
-Rehmert
-Rehlander
-Reher
-Rehbock
-Regulski
-Regueira
-Regn
-Reginaldo
-Regelman
-Regar
-Refsal
-Refazo
-Reemer
-Reefer
-Redlon
-Redkey
-Redinbo
-Rediker
-Redig
-Redemer
-Redcross
-Redal
-Recuparo
-Recksiek
-Reckers
-Recidivi
-Rechichi
-Reburn
-Rebold
-Rebik
-Rebar
-Reavish
-Reaver
-Reavely
-Reash
-Reaollano
-Reagey
-Readinger
-Readdy
-Razon
-Rayyan
-Rayshell
-Rayow
-Rayome
-Rayhel
-Raychard
-Rayam
-Rawi
-Rawhouser
-Rawat
-Ravizee
-Raviele
-Ravago
-Rautenstrauch
-Raulino
-Raul
-Rauhecker
-Rauhe
-Raught
-Rauco
-Raucci
-Ratzloff
-Rattu
-Rattell
-Rattanasinh
-Ratsep
-Ratkovich
-Rathrock
-Rathel
-Rathai
-Ratana
-Rasual
-Rastetter
-Rastegar
-Rasset
-Raspotnik
-Raspa
-Rasool
-Rasole
-Rasley
-Raskey
-Rasico
-Rasavong
-Ras
-Rarogal
-Rarden
-Raptis
-Rappl
-Rapkowicz
-Rapisura
-Rapanot
-Rapalo
-Rapacki
-Ranweiler
-Ransonet
-Ransler
-Ranni
-Ranmar
-Ranks
-Ranildi
-Randgaard
-Randahl
-Ranch
-Ranaudo
-Ranah
-Ramsy
-Ramsour
-Ramshur
-Ramsby
-Ramrirez
-Rampy
-Rampulla
-Rampadarat
-Rampa
-Ramonez
-Ramler
-Ramlall
-Ramjhon
-Ramjan
-Ramirel
-Rametta
-Ramelli
-Ramelize
-Ramelb
-Ramdeo
-Ramcharran
-Ramaudar
-Ramal
-Ramagano
-Ramach
-Rakyta
-Rakus
-Rakestrow
-Rakers
-Rajk
-Rajas
-Rajaphoumy
-Raisley
-Raisler
-Raisin
-Rais
-Railes
-Raike
-Raigosa
-Rahoche
-Rahmes
-Rahib
-Rahaman
-Ragus
-Ragula
-Raguay
-Raglow
-Rafus
-Rafey
-Rafel
-Rafala
-Raethke
-Raemer
-Raef
-Raeder
-Radziwon
-Radwick
-Radwanski
-Radoslovich
-Radon
-Radmall
-Radlinski
-Radie
-Raderstorf
-Radej
-Raddle
-Raczak
-Racko
-Raciti
-Racioppo
-Racer
-Rabuse
-Rabsatt
-Rabjohn
-Rabito
-Rabey
-Rabeneck
-Rabehl
-Rabeck
-Rabbe
-Rabal
-Quivoz
-Quiver
-Quituqua
-Quitugua
-Quittner
-Quitter
-Quitero
-Quitedo
-Quirke
-Quiram
-Quiralte
-Quintard
-Quintania
-Quinnan
-Quinlivan
-Quilter
-Quillman
-Quillan
-Quilindrino
-Quiel
-Quidas
-Quicho
-Quibodeaux
-Quezergue
-Quezad
-Quettant
-Queros
-Querio
-Quercioli
-Quenzel
-Quencer
-Queller
-Quebral
-Quatrevingt
-Quashnock
-Quasdorf
-Quartuccio
-Quartiero
-Quartieri
-Quartaro
-Quarrell
-Quanstrum
-Quammen
-Qualheim
-Quagliato
-Quadnau
-Qua
-Qasba
-Qare
-Qadeer
-Pywell
-Pysher
-Pyros
-Pyfrom
-Pyfer
-Pyette
-Pychardo
-Puzon
-Putzer
-Putton
-Putcha
-Puskarich
-Push
-Purkhiser
-Purfeerst
-Puraty
-Puotinen
-Puntillo
-Punihaole
-Pundsack
-Puna
-Pulwer
-Pullus
-Pullara
-Puita
-Puhrman
-Puhr
-Puhl
-Puffenberger
-Puerto
-Puent
-Pudenz
-Pucket
-Pucker
-Public
-Ptaschinski
-Psuty
-Psuik
-Psilovikos
-Przybyl
-Przeniczny
-Prye
-Prybylski
-Prukop
-Pruessner
-Provosty
-Provorse
-Provins
-Provino
-Provenzo
-Provent
-Protich
-Protas
-Pross
-Prosienski
-Prosenick
-Proscia
-Prosak
-Propheter
-Promisco
-Promer
-Prokup
-Prokos
-Progl
-Profeta
-Profera
-Profancik
-Procsal
-Prociuk
-Prochak
-Proch
-Procaccino
-Prizio
-Privado
-Pritzker
-Pritzel
-Pritcher
-Pritchell
-Prisoc
-Priolean
-Prinn
-Prindiville
-Princevalle
-Primos
-Prima
-Prigg
-Priego
-Priegnitz
-Prible
-Pribish
-Pribbenow
-Prevot
-Prevet
-Pretzer
-Pretzel
-Prety
-Presume
-Prestley
-Prestipino
-Presnal
-Preslipsky
-Presiado
-Prendes
-Prejsnar
-Preist
-Preissner
-Preisner
-Preheim
-Prefontaine
-Predom
-Precissi
-Prechtel
-Precht
-Prause
-Pratten
-Prately
-Prante
-Prang
-Pramuk
-Praley
-Prakoth
-Prach
-Pozar
-Poynton
-Powskey
-Powsey
-Powlen
-Powells
-Pourvase
-Pourner
-Pourier
-Pourchot
-Pouncil
-Poulisse
-Poulet
-Pouk
-Pouche
-Potulski
-Pottkotter
-Pottichen
-Potteiger
-Potsander
-Pothoven
-Potanovic
-Potaczala
-Posusta
-Posto
-Postles
-Postiglione
-Postemski
-Possinger
-Possick
-Possehl
-Pospicil
-Poskitt
-Poska
-Posis
-Portnoff
-Portello
-Porris
-Porres
-Porep
-Porell
-Porat
-Popularis
-Poppo
-Popadiuk
-Pooyouma
-Pooschke
-Poort
-Poolheco
-Ponsler
-Poniatowski
-Pomykala
-Pompi
-Pomilla
-Pomiecko
-Pomfret
-Polzer
-Polvino
-Poltrock
-Polton
-Polter
-Polski
-Poloskey
-Pollot
-Pollnow
-Polivick
-Polisoto
-Polintan
-Poliks
-Polikoff
-Policicchio
-Policastri
-Policare
-Poletski
-Polee
-Poledore
-Polacco
-Pokrzywa
-Pokallas
-Pointe
-Poinelli
-Pohorilla
-Pohlson
-Pogozelski
-Pogorelc
-Poellinetz
-Podwoski
-Podeszwa
-Pod
-Pocklington
-Pociengel
-Pochatko
-Pocekay
-Pocai
-Poague
-Pniewski
-Plutt
-Plumbar
-Pluma
-Plotzker
-Plotrowski
-Ploskunak
-Ploennigs
-Plimpton
-Plienis
-Plewinski
-Plett
-Pleskac
-Pleshe
-Plesant
-Pleppo
-Plegge
-Playl
-Plavnik
-Plateroti
-Plateros
-Plastow
-Plassmeyer
-Plassman
-Planer
-Plance
-Planagan
-Plan
-Plamondin
-Plainy
-Plackett
-Placino
-Plachecki
-Placeres
-Plaas
-Pjetrovic
-Pizzulo
-Pizzini
-Pizzico
-Pivec
-Pitpitan
-Pitorak
-Pitocco
-Pitka
-Pitch
-Pitcairn
-Pitarresi
-Piszczek
-Pistelli
-Piskel
-Pisicchio
-Piserchio
-Piscitello
-Pirrotta
-Pirrello
-Pirre
-Pirozhkov
-Pirollo
-Pirieda
-Pipper
-Pipia
-Pioske
-Piombino
-Pinzino
-Pintello
-Pinsonneault
-Pinsoneault
-Pinn
-Pinkenburg
-Pinke
-Pindell
-Pinchock
-Pince
-Pimple
-Pim
-Piluso
-Pillon
-Pillarella
-Pillado
-Pilkey
-Pilette
-Pilchowski
-Piirto
-Pihlaja
-Piggie
-Piganelli
-Piety
-Pietrowicz
-Pietrok
-Pietrini
-Piesco
-Piertraccini
-Piersiak
-Pierrot
-Pierdon
-Pierannunzio
-Pientka
-Pielow
-Piela
-Piek
-Piegaro
-Piefer
-Piecuch
-Pidro
-Picotte
-Pickman
-Picketts
-Picketpin
-Pickerell
-Pickenpaugh
-Pichoff
-Picher
-Piccuillo
-Piccirilli
-Piccinone
-Piccinich
-Piccillo
-Picchetti
-Piatz
-Piao
-Piacitelli
-Piacenza
-Phyfe
-Phurrough
-Phuong
-Phuma
-Phuaphes
-Phramany
-Phoubandith
-Phommajack
-Phom
-Pho
-Phimsoutham
-Phimpradapsy
-Philmore
-Phillies
-Philliber
-Philio
-Phildor
-Philabaum
-Phi
-Phetsanghane
-Phetphongsy
-Phelp
-Phaymany
-Pharmer
-Pharao
-Phanthavongsa
-Pfrommer
-Pfoutz
-Pforr
-Pfnister
-Pflugradt
-Pflugrad
-Pfleuger
-Pfingsten
-Pfifer
-Pfeiffenberge
-Pfefferkorn
-Pfanstiel
-Pfander
-Pfalmer
-Pfaffinger
-Pezley
-Pezina
-Pezez
-Peyser
-Pevahouse
-Petula
-Petton
-Pettipas
-Pettijohn
-Pettigrove
-Pettay
-Petrouits
-Petropulos
-Petronzio
-Petronella
-Petrilli
-Petriccione
-Petric
-Petrecca
-Petralia
-Petr
-Petka
-Petigny
-Petesic
-Petersik
-Petek
-Petanick
-Petalcu
-Peszynski
-Pessolano
-Pesses
-Pesicka
-Peschong
-Pesarchick
-Pesantes
-Perza
-Pertea
-Persyn
-Persten
-Persch
-Perrota
-Perrot
-Perriott
-Perring
-Perrilloux
-Perrette
-Perrelli
-Perrell
-Pernod
-Pernin
-Perniciaro
-Pernesky
-Permann
-Perlson
-Perkiss
-Perina
-Perie
-Perencevich
-Peredz
-Percey
-Peraha
-Peplau
-Pepka
-Pepion
-Penzien
-Penzel
-Penya
-Penwarden
-Penticoff
-Pensky
-Pensick
-Pensa
-Pennelle
-Penird
-Penhallurick
-Penha
-Pengra
-Penderel
-Pendegraft
-Pencak
-Pemelton
-Peluse
-Pelnar
-Pellom
-Pellitteri
-Pelligrino
-Pellietier
-Pellicone
-Pelletiu
-Pellet
-Pellam
-Peleg
-Pekas
-Pekara
-Pehowich
-Peha
-Pegeron
-Peffly
-Pefferkorn
-Peetoom
-Peerzada
-Peecha
-Peduzzi
-Pedralba
-Pedez
-Pedeare
-Pecinousky
-Pechaira
-Pecatoste
-Pecarina
-Pecararo
-Pearyer
-Peacy
-Peachay
-Payseur
-Payor
-Payna
-Payant
-Payamps
-Pax
-Pawluch
-Pavliska
-Pavis
-Pavelski
-Pavella
-Pav
-Pauza
-Pausch
-Paulshock
-Paulseth
-Paulmino
-Paulic
-Paulauskis
-Paulauskas
-Paulas
-Pauker
-Paugsch
-Patzner
-Patzke
-Patwell
-Patuel
-Pattyre
-Pattinson
-Pattengale
-Patriquin
-Patrin
-Patrias
-Patria
-Patolot
-Patik
-Paterniti
-Patellis
-Patches
-Patcher
-Patanella
-Pataki
-Patajo
-Pasvizaca
-Pastures
-Pasto
-Pastian
-Passerino
-Passer
-Paskow
-Pasket
-Pasinski
-Pasho
-Pashea
-Pashal
-Pascorell
-Pascoal
-Pascanik
-Pascall
-Pasaya
-Pasana
-Paruta
-Party
-Partman
-Partipilo
-Partenope
-Partelow
-Part
-Parsygnat
-Parsh
-Parsells
-Parrotta
-Parron
-Parrington
-Parrin
-Parriera
-Parreno
-Parquette
-Parpan
-Parone
-Parnin
-Parms
-Parmantier
-Parkos
-Parkhouse
-Parizek
-Paripovich
-Parinas
-Parihar
-Parhan
-Pargman
-Pardoe
-Parayuelos
-Paravano
-Paratore
-Parara
-Papranec
-Pappajohn
-Paponetti
-Papitto
-Papike
-Papiernik
-Papciak
-Papantonio
-Papanikolas
-Papania
-Papan
-Papale
-Pap
-Paongo
-Paola
-Panzica
-Panzella
-Panyko
-Panuccio
-Pantosa
-Pantoliano
-Pantelakis
-Panrell
-Panowicz
-Panora
-Pankiw
-Pankake
-Panitz
-Panila
-Panias
-Paneque
-Panela
-Paneczko
-Pandola
-Panahon
-Panah
-Panagoulias
-Panagis
-Paluszynski
-Paluk
-Paluck
-Palu
-Paloukos
-Palombit
-Palmios
-Palley
-Pallant
-Pallansch
-Pallafor
-Palisbo
-Palchetti
-Palazola
-Palas
-Palacois
-Pakonen
-Pajerski
-Paillant
-Pahk
-Pagni
-Pagnello
-Paglio
-Paga
-Pafel
-Padol
-Padgette
-Padeken
-Paddio
-Paddilla
-Paddack
-Padavich
-Pacquin
-Packineau
-Pacior
-Pacholec
-Pachlin
-Pachla
-Pach
-Pacenta
-Pacek
-Pacapac
-Pacana
-Paben
-Paarmann
-Paalan
-Ozer
-Ozane
-Ozaine
-Ozaeta
-Oz
-Oyston
-Oyellette
-Oxton
-Oxnam
-Oxenrider
-Oxborough
-Owers
-Ow
-Ovit
-Ovesen
-Overstrom
-Overshiner
-Overmire
-Overley
-Overkamp
-Overdick
-Overbough
-Ovdenk
-Ovadilla
-Ouye
-Outzen
-Ousdahl
-Oury
-Ourth
-Ounsy
-Ouellete
-Oudker
-Otutaha
-Otuafi
-Ottrix
-Ottogary
-Ottino
-Ottilige
-Ottenwess
-Otiz
-Othoudt
-Otex
-Otega
-Osvaldo
-Ostwald
-Ostrzyeki
-Ostrum
-Ostroot
-Osterhaut
-Ostendorff
-Ostenberg
-Ostasiewicz
-Osswald
-Ossola
-Osowicz
-Osorno
-Osollo
-Osol
-Osnoe
-Osmus
-Osmanski
-Osias
-Oshman
-Osentowski
-Osden
-Osche
-Osbeck
-Orttenburger
-Ortolf
-Orto
-Ortga
-Orrego
-Orpin
-Orozeo
-Orochena
-Orobona
-Oroark
-Ornelos
-Ornedo
-Orne
-Orm
-Orlove
-Orlosky
-Orlof
-Orlinsky
-Orlinski
-Orlin
-Orizabal
-Oriti
-Orion
-Origer
-Orie
-Orhenkowski
-Orford
-Orff
-Oreskovich
-Orellama
-Oreily
-Orehek
-Oreb
-Ordazzo
-Ordahl
-Orcholski
-Orce
-Oras
-Opula
-Opstein
-Oppliger
-Oppegard
-Opichka
-Opher
-Opet
-Opalicki
-Opaka
-Ooton
-Onyeanus
-Onwunli
-Onukogu
-Onisick
-Onifade
-Oneale
-Ondik
-Ondic
-Ondersma
-Omullan
-Omoto
-Omo
-Omlin
-Omli
-Omersa
-Olverson
-Olveira
-Olvedo
-Olowe
-Olona
-Olnes
-Olloqui
-Olliver
-Ollhoff
-Ollendick
-Olkowski
-Olivid
-Olivers
-Oliveres
-Olivarra
-Olinghouse
-Oligee
-Olgvin
-Olfers
-Olewinski
-Olewine
-Oleveda
-Oleskiewicz
-Olejarski
-Olecki
-Olde
-Olckhart
-Olbrish
-Olay
-Olarte
-Okwuona
-Okuley
-Okula
-Okorududu
-Okoren
-Okoli
-Okihara
-Okerson
-Oken
-Ojard
-Ojanen
-Oines
-Oilvares
-Oieda
-Ohrnstein
-Ohren
-Ohmit
-Ohmie
-Ohlmacher
-Ohlenbusch
-Ohlen
-Ohaver
-Oharroll
-Ogwynn
-Ogunyemi
-Ogram
-Ogilive
-Ogen
-Ogbonnaya
-Ogasawara
-Ogans
-Ogami
-Oflahrity
-Offret
-Oen
-Oeler
-Oehrlein
-Oehrle
-Oehmke
-Oehmig
-Oeftger
-Oeder
-Odougherty
-Odorizzi
-Odomes
-Odin
-Odien
-Odhner
-Odess
-Odenheimer
-Ocus
-Ochsenbein
-Ochinang
-Ochiai
-Ochalek
-Occhino
-Ocacio
-Obnegon
-Oblow
-Oblinger
-Obiano
-Obery
-Oberson
-Oberpriller
-Obermuller
-Obermoeller
-Oberholzer
-Oberhaus
-Oberdier
-Oberdick
-Oaxaca
-Oar
-Nysether
-Nykiel
-Nygaro
-Nycum
-Nyahay
-Nwankwo
-Nwakanma
-Nwadiora
-Nwabeke
-Nuzenski
-Nusz
-Nunnelee
-Nunmaker
-Nuniz
-Nunery
-Nulisch
-Nuetzman
-Nuessle
-Nuesca
-Nuckoles
-Nuccitelli
-Nucci
-Nozum
-Nozick
-Nowzari
-Nowosadko
-Nowley
-Nowitzke
-Novitsky
-Novitski
-Novitske
-Novikoff
-Novida
-Novetsky
-Novelly
-Novellino
-Novara
-Nouth
-Noullet
-Noud
-Notwick
-Notowitz
-Notley
-Notis
-Nothem
-Nothacker
-Nostro
-Noseff
-Norwell
-Northwood
-Northcut
-Norstrud
-Norseth
-Norse
-Norsaganay
-Norko
-Norkaitis
-Noriego
-Norg
-Noreiga
-Nordwall
-Nordsiek
-Nordlinger
-Nordick
-Nordenstrom
-Norbo
-Noorigian
-Noordam
-Nonu
-Nones
-Noneman
-Nondorf
-Noltensmeier
-Nollette
-Nolfe
-Nolazco
-Nokken
-Noke
-Noiseux
-Noia
-Nohe
-Nogueda
-Noguchi
-Nogoda
-Noggles
-Noggler
-Noftsier
-Noey
-Noerenberg
-Noegel
-Nodurft
-Nodarse
-Nockai
-Nobregas
-Nobis
-Nkuku
-Nkomo
-Njango
-Niziol
-Nixion
-Nixa
-Nivar
-Nivala
-Nitzschke
-Nitzsche
-Nitzkowski
-Nitcher
-Niswender
-Nisley
-Nishimori
-Nirmaier
-Nipps
-Nipple
-Ninke
-Nini
-Ninh
-Nimrod
-Nimox
-Nimick
-Nila
-Niksich
-Nikodem
-Nikocevic
-Nikaido
-Nightlinger
-Niggemann
-Nietfeldt
-Niess
-Niesent
-Niesborella
-Nierer
-Niemitzio
-Niemiel
-Niemants
-Niedzwiedzki
-Niedzwiedz
-Niedens
-Niedbalec
-Niebaum
-Nicoson
-Nicoli
-Nicolaus
-Nickoley
-Nicklos
-Nicklien
-Nickenberry
-Nickas
-Nicholason
-Nichell
-Nichalson
-Nicewonger
-Niau
-Nian
-Nham
-Nguyan
-Ngin
-Nezich
-Nezat
-Neyaci
-Newstead
-Newness
-Newhook
-Newes
-Newens
-Newbell
-Newball
-Nevinger
-Nevilles
-Nevil
-Never
-Nevarrez
-Neuse
-Neundorfer
-Neuenswander
-Neudeck
-Neubig
-Neubaum
-Neubacher
-Nettleingham
-Netrosio
-Netolicky
-Netley
-Nesti
-Nessmith
-Neslusan
-Nesline
-Nesland
-Nesin
-Nerlich
-Nepa
-Neonakis
-Nenni
-Nemzin
-Nemunaitis
-Nemets
-Nemard
-Nemani
-Nelmes
-Nellums
-Nellenback
-Nelisse
-Nejaime
-Neja
-Neither
-Neiswoger
-Neiper
-Neild
-Neidiger
-Nehrt
-Nehme
-Neglio
-Negbenebor
-Needy
-Nedman
-Nedina
-Nederostek
-Nedelman
-Neddo
-Nedbalek
-Nebred
-Neblock
-Nebesnik
-Nebarez
-Neall
-Nealious
-Nealer
-Neahr
-Ncneal
-Nazzise
-Nazzal
-Nazir
-Nazelrod
-Naz
-Naysmith
-Nayman
-Nawwar
-Nawda
-Naveed
-Navarrate
-Navaretta
-Navappo
-Navanjo
-Natwick
-Nattiah
-Natsis
-Nati
-Nathans
-Natewa
-Natani
-Natalello
-Nasti
-Nassie
-Nasr
-Nasers
-Nasalroad
-Narr
-Nargi
-Nardy
-Napieralski
-Nanthanong
-Nantanapibul
-Nanna
-Nanik
-Nanasy
-Nanas
-Namur
-Namihira
-Namaka
-Nalty
-Nalbach
-Naki
-Nakatsu
-Nakamori
-Najarian
-Nailer
-Naifeh
-Naidu
-Nahrwold
-Nahl
-Nahari
-Nagode
-Nagindas
-Nagengast
-Nagelhout
-Nagase
-Naftzinger
-Naftali
-Naeher
-Nadoff
-Naderi
-Nadelbach
-Naddeo
-Nacy
-Nacisse
-Nacion
-Nachtrieb
-Nachmias
-Nachazel
-Nacar
-Naborg
-Nabity
-Nabhan
-Mytych
-Myslinski
-Myslin
-Mysak
-Myrtle
-Myrman
-Myrck
-Myntti
-Mynnerlyn
-Mylott
-Myking
-Myes
-Mycroft
-Mway
-Muzyka
-Muzacz
-Muyskens
-Muysenberg
-Mutone
-Mutner
-Mutherspaw
-Muthart
-Muthana
-Mutart
-Musty
-Muston
-Mussmann
-Musshorn
-Musse
-Muss
-Musquiz
-Musolf
-Muskthel
-Muska
-Musinski
-Musigdilok
-Muschick
-Muschett
-Musch
-Murwin
-Murty
-Mursko
-Murnock
-Mure
-Murasso
-Muraro
-Muran
-Murallies
-Muraco
-Munyer
-Munshi
-Munning
-Munl
-Munir
-Muninger
-Munhall
-Muney
-Munet
-Mundziak
-Mundschau
-Mundhenk
-Munderville
-Muncil
-Munchmeyer
-Munaz
-Muna
-Mulzer
-Mulvahill
-Mulryan
-Mulroney
-Mulready
-Mulneix
-Mullowney
-Mullner
-Mullison
-Mullany
-Mulich
-Mula
-Muhtaseb
-Muhlenkamp
-Muhlbach
-Muggley
-Mueske
-Muenkel
-Muell
-Muehleisen
-Mudrick
-Muddaththir
-Muczynski
-Mucklow
-Muckley
-Muckelvaney
-Muchortow
-Mthimunye
-Mrazik
-Mozzone
-Mozo
-Mozley
-Mozie
-Mozgala
-Mozelak
-Moyerman
-Mowder
-Mowan
-Movlin
-Mouzas
-Mourino
-Moulhem
-Mottillo
-Motteshard
-Mottershead
-Motamed
-Mosz
-Mostoller
-Mostiller
-Mostero
-Mostella
-Mosson
-Mossing
-Mossien
-Mossel
-Mosmeyer
-Moskau
-Moshos
-Mosho
-Moscovic
-Moscaritolo
-Moscariello
-Moscardelli
-Morosow
-Morono
-Morneault
-Morna
-Morn
-Morkve
-Moriwaki
-Morise
-Moriera
-Moricle
-Moribayed
-Morgret
-Morgner
-Morgas
-Morgans
-Morgandi
-Morfee
-Morelen
-Moreida
-Moreci
-Moreb
-Mordino
-Mordini
-Mordehay
-Morda
-Mootz
-Mootispaw
-Moosbrugger
-Moosa
-Moonsommy
-Moonshower
-Moodispaugh
-Mooberry
-Monz
-Montuoro
-Montrella
-Montijano
-Montgonery
-Montelle
-Montell
-Montcalm
-Montalgo
-Monske
-Monrroy
-Monrow
-Monnot
-Moniak
-Mongue
-Mongolo
-Mongiovi
-Monfore
-Mondoux
-Mondone
-Mondell
-Mondaine
-Moncrieffe
-Moncrieff
-Moncier
-Monasterio
-Monarque
-Monaham
-Monagle
-Momper
-Momeni
-Moltrie
-Molone
-Molly
-Mollohan
-Molliere
-Mollere
-Molleker
-Mollberg
-Molinini
-Moling
-Molineaux
-Molett
-Moldan
-Molavi
-Molaison
-Mokriski
-Mokiao
-Mojzisik
-Mojardin
-Moisey
-Mohorovich
-Mohinani
-Mohaupt
-Mohabeer
-Mogollon
-Moghadam
-Mofle
-Mofford
-Moevao
-Moelter
-Moede
-Modrak
-Moddejonge
-Mockler
-Mocha
-Mobilio
-Mlenar
-Mizzi
-Mizner
-Mizee
-Miyasaka
-Miyao
-Mixdorf
-Mitter
-Mittchell
-Mittag
-Mithani
-Mitchler
-Misove
-Mismit
-Misluk
-Miskovich
-Mishou
-Miserendino
-Misek
-Miscoe
-Mirmow
-Mirman
-Mirkovich
-Mirao
-Miran
-Miquelon
-Minucci
-Mintreas
-Mintos
-Mintor
-Minotti
-Minock
-Minnatee
-Miniuk
-Minissale
-Minihan
-Minicozzi
-Mini
-Minford
-Minette
-Minery
-Minehan
-Mineconzo
-Mindingall
-Minchella
-Minarcik
-Minacci
-Mimaki
-Milz
-Milwee
-Miltz
-Milsaps
-Milosevich
-Millstead
-Millott
-Millora
-Millian
-Millhiser
-Millerr
-Millbrand
-Millbern
-Millberg
-Milkent
-Milius
-Milite
-Milelr
-Mildred
-Milderberger
-Mildenstein
-Milbrodt
-Milare
-Mikulec
-Mikovec
-Mikota
-Mikolon
-Mikhaiel
-Mikez
-Miker
-Mikasa
-Mihovk
-Mihor
-Mihaliak
-Mihalco
-Mihalak
-Miggo
-Miessler
-Miernik
-Miernicki
-Miene
-Mieloszyk
-Mielkie
-Mielczarek
-Mielcarz
-Miehe
-Midget
-Middough
-Middents
-Microni
-Mickulskis
-Micks
-Mickonis
-Mickenheim
-Michello
-Michealson
-Michavd
-Michalczik
-Mezzinni
-Mezzanotte
-Meysembourg
-Meyerowitz
-Meyerott
-Meyerman
-Meyerhoefer
-Mevis
-Mevers
-Meuler
-Meulemans
-Meua
-Metzga
-Metzel
-Mettlen
-Mettille
-Metott
-Metos
-Metil
-Metia
-Metherell
-Metevelis
-Metenosky
-Meteer
-Metchikoff
-Mestler
-Mestanza
-Messman
-Messey
-Messervy
-Messel
-Messan
-Mesoloras
-Mesmer
-Mesiona
-Mesias
-Meshew
-Meshanko
-Meservy
-Mesecar
-Mesdaq
-Merzig
-Mervine
-Mertine
-Merrills
-Merren
-Merlette
-Merles
-Merlain
-Merl
-Merksamer
-Merithew
-Merisier
-Mering
-Merilos
-Merical
-Merhar
-Merette
-Mereno
-Merdian
-Merceir
-Mercando
-Merante
-Merana
-Merales
-Menucci
-Mentkowski
-Mentgen
-Menso
-Mensen
-Menkin
-Menjes
-Menjares
-Menitz
-Menietto
-Menier
-Meneus
-Menefield
-Menees
-Mendrin
-Mendrala
-Mendler
-Mendiaz
-Mendesa
-Mencke
-Menchu
-Menches
-Menas
-Mems
-Memo
-Memmo
-Meltzner
-Melter
-Melstrom
-Melsheimer
-Melser
-Melodia
-Mellos
-Mellis
-Melliere
-Mellie
-Mellecker
-Mellage
-Mellady
-Melikyan
-Melford
-Meley
-Melencamp
-Meleen
-Melear
-Melchert
-Melaun
-Melaro
-Melady
-Mekonis
-Meisenburg
-Meireles
-Meinsen
-Meinershagen
-Meil
-Meihofer
-Mehrotra
-Mehlhaff
-Mehis
-Mehelich
-Mehdizadeh
-Mehdi
-Meharry
-Mehalko
-Megraw
-Megown
-Mego
-Megill
-Megia
-Meggison
-Meggett
-Meggerson
-Meetze
-Meeroff
-Meemken
-Meehleder
-Meeds
-Medure
-Medosch
-Medora
-Mednis
-Medling
-Medland
-Medious
-Medino
-Medin
-Medill
-Medieros
-Medi
-Medhus
-Medearis
-Medanich
-Medalion
-Meckel
-Meccia
-Mecardo
-Measheaw
-Measeck
-Mearing
-Meara
-Meakin
-Mcwilson
-Mcward
-Mcwalters
-Mcwade
-Mcvoy
-Mctush
-Mctiernan
-Mctarnaghan
-Mcswiggan
-Mcstay
-Mcritchie
-Mcrill
-Mcquiddy
-Mcqueeny
-Mcpharlane
-Mcphan
-Mcpartlin
-Mcnutty
-Mcnuh
-Mcnicoll
-Mcnicol
-Mcnevin
-Mcnespey
-Mcneme
-Mcnellie
-Mcnayr
-Mcmina
-Mcmenamy
-Mcmanigal
-Mcluckie
-Mclilly
-Mcleskey
-Mclearan
-Mclauchlen
-Mclatchy
-Mclaen
-Mckray
-Mckouen
-Mckoon
-Mckisson
-Mckinna
-Mckines
-Mckimmy
-Mckimley
-Mckewen
-Mckerrow
-Mckenzy
-Mckentie
-Mckemie
-Mckaskle
-Mckanic
-Mcintyde
-Mcinroy
-Mcinnish
-Mcilwaine
-Mciltrot
-Mchalffey
-Mcgurren
-Mcgurr
-Mcgunnis
-Mcgunnigle
-Mcgunagle
-Mcguinnes
-Mcguin
-Mcgrotha
-Mcgrogan
-Mcgraph
-Mcgoon
-Mcglothern
-Mcgloster
-Mcglohon
-Mcglockton
-Mcglawn
-Mcginnity
-Mcginister
-Mcgilberry
-Mcgiboney
-Mcghin
-Mcghaney
-Mcgeeney
-Mcgeady
-Mcgartland
-Mcgarraugh
-Mcgaffey
-Mcgafferty
-Mcgaffee
-Mcfeeley
-Mcfan
-Mceneny
-Mcelwine
-Mcelreavy
-Mcelpraug
-Mcelmeel
-Mceirath
-Mceady
-Mcdunn
-Mcdonnall
-Mcdewitt
-Mcdermett
-Mcdeavitt
-Mcdearmont
-Mccurine
-Mccunn
-Mccumbers
-Mccumbee
-Mccullors
-Mccullon
-Mccullogh
-Mccullock
-Mccuan
-Mccrate
-Mccra
-Mccoulskey
-Mccornack
-Mccormik
-Mccorkindale
-Mccorison
-Mcconnal
-Mccomack
-Mccole
-Mccoil
-Mccoard
-Mcclurken
-Mcclodden
-Mcclod
-Mcclimens
-Mccleveland
-Mcclenningham
-Mcclellon
-Mcclaugherty
-Mcclatcher
-Mcclarty
-Mcclamma
-Mcclaim
-Mcchain
-Mccelland
-Mccastle
-Mccarvill
-Mccarther
-Mccarr
-Mccarns
-Mccarn
-Mccard
-Mccandrew
-Mccandliss
-Mccalvin
-Mccalpin
-Mccalment
-Mccallun
-Mccallough
-Mccahan
-Mccaffree
-Mcbratney
-Mcaveney
-Mcausland
-Mcauly
-Mcarthun
-Mcanaw
-Mcall
-Mbamalu
-Mazzera
-Mazze
-Mazzawi
-Mazzaferro
-Mazzacano
-Mazuo
-Mazion
-Mazey
-Maywood
-Mayshack
-Mayrose
-Mayou
-Mayorca
-Mayoka
-Maynerich
-Maylone
-Mayhood
-Mayeshiba
-Maydew
-Maxi
-Maxell
-Mawhinney
-Mavropoulos
-Mavle
-Mavai
-Mautte
-Mauson
-Mausey
-Mauseth
-Mausbach
-Maurus
-Maurizio
-Maura
-Maupredi
-Maung
-Maultasch
-Mauleon
-Maud
-Matyi
-Matuszak
-Matushevsky
-Matusek
-Matuck
-Mattys
-Mattsey
-Mattione
-Mattias
-Matteis
-Matsu
-Matsoukas
-Matrey
-Matot
-Matlin
-Matkowsky
-Matise
-Mathwich
-Mathus
-Mathony
-Mathery
-Matherson
-Mathen
-Maten
-Matelich
-Matejek
-Matczak
-Matchen
-Matarrita
-Matakonis
-Mataka
-Matacale
-Masuyama
-Masure
-Masupha
-Masudi
-Masturzo
-Mastrocola
-Mastriano
-Mastrianni
-Mastrianna
-Mastrelli
-Massicotte
-Massetti
-Massella
-Massei
-Massee
-Massaquoi
-Masood
-Masom
-Maslowsky
-Masloski
-Maslonka
-Maski
-Maskaly
-Masiejczyk
-Masgalas
-Masero
-Masenten
-Masciantonio
-Masaya
-Masaracchia
-Marzocchi
-Marzili
-Marzigliano
-Marye
-Marusiak
-Marullo
-Marturano
-Martos
-Martorello
-Martineze
-Martillo
-Martignago
-Martiarena
-Marsters
-Marshalek
-Marsell
-Marsek
-Marseglia
-Marriot
-Marrion
-Marrington
-Marrietta
-Marrello
-Marreel
-Marrable
-Marquina
-Marque
-Marozzi
-Marovic
-Marotti
-Marose
-Marnett
-Marmolejos
-Markt
-Markson
-Marklund
-Markewich
-Marinoni
-Marinko
-Marinas
-Maril
-Mariello
-Marguardt
-Margreiter
-Margraf
-Margel
-Margaryan
-Margarita
-Margan
-Marevka
-Maresco
-Marero
-Marentez
-Maree
-Mardini
-Marcotrigiano
-Marcoguisepp
-Marcks
-Marcinka
-Marchizano
-Marchitto
-Marchiony
-Marchionese
-Marchesseault
-Marcheski
-Marchesano
-Marchall
-Marceaux
-Marbray
-Maratre
-Maratos
-Marashi
-Marasciulo
-Maras
-Marantz
-Marallo
-Maragni
-Maragh
-Marabella
-Maquis
-Maontesano
-Maobi
-Manzie
-Manzay
-Manvelito
-Manvel
-Manuell
-Mantik
-Mantele
-Mantegna
-Mansbridge
-Mansanares
-Manora
-Manolakis
-Manokey
-Mannine
-Mannheimer
-Mannebach
-Mannchen
-Manlito
-Mankoski
-Manivong
-Manheim
-Mangubat
-Manfra
-Manemann
-Manecke
-Mandry
-Mandler
-Mandi
-Mandap
-Mandahl
-Mancos
-Manciel
-Mancherian
-Manchel
-Manca
-Manby
-Manatt
-Manaker
-Mamone
-Mammano
-Malvern
-Malton
-Malsch
-Malovich
-Malouff
-Malory
-Maloff
-Malocha
-Malmanger
-Mallinger
-Mallinak
-Mallegni
-Mallat
-Malkoski
-Malinky
-Malinak
-Malichi
-Malgieri
-Maleszka
-Males
-Maleonado
-Malenke
-Malekan
-Malehorn
-Maleck
-Malcome
-Malay
-Malawy
-Malarkey
-Malanado
-Malama
-Malabey
-Makua
-Makhija
-Makel
-Makarem
-Majorga
-Majocka
-Majica
-Majic
-Majeau
-Maizes
-Mairot
-Maione
-Mainz
-Mainland
-Mainetti
-Mainero
-Maimone
-Maifeld
-Maiers
-Maiello
-Maidonado
-Maicus
-Mahung
-Mahula
-Mahrenholz
-Mahran
-Mahomly
-Mahin
-Mahe
-Mahall
-Mahal
-Magsby
-Magsayo
-Magrone
-Magraw
-Magrann
-Magpali
-Magouliotis
-Magorina
-Magobet
-Magnini
-Magnifico
-Magnie
-Magnett
-Maglioli
-Maggit
-Magg
-Magette
-Magdefrau
-Magdalena
-Magaziner
-Magathan
-Magalski
-Magaldi
-Magadan
-Mafua
-Maeno
-Maenaga
-Maedke
-Madziar
-Madre
-Madine
-Madin
-Madhavan
-Madge
-Madeja
-Maddoy
-Maddison
-Maddin
-Maddern
-Mad
-Macvicar
-Macurdy
-Macreno
-Macpartland
-Macoreno
-Macola
-Macnutt
-Macnevin
-Macmullan
-Maclain
-Mackstutis
-Macknair
-Macklem
-Mackillop
-Mackenthun
-Mackechnie
-Mackaman
-Macione
-Maciolek
-Maciarello
-Machover
-Machle
-Machi
-Machel
-Machak
-Macduffee
-Maccutcheon
-Macculloch
-Maccord
-Macconaghy
-Maccoll
-Macclellan
-Macclairty
-Maccini
-Macchiarella
-Maccheyne
-Maccarter
-Maccarino
-Maccarini
-Macandog
-Macanas
-Macalma
-Macabeo
-Maasen
-Maarx
-Lytell
-Lyson
-Lysher
-Lyngholm
-Lynchj
-Lynah
-Lyme
-Lyken
-Lyew
-Lydecker
-Lybert
-Lyberger
-Lybecker
-Lyau
-Lweis
-Luzi
-Luzell
-Luvianos
-Luvera
-Lutze
-Lutkus
-Luten
-Lusty
-Lustberg
-Lurye
-Lury
-Lurtz
-Luquette
-Lupiani
-Lupacchino
-Lunter
-Lunstrum
-Lungwitz
-Lungsford
-Lunemann
-Lunderman
-Lunch
-Luminati
-Lumbley
-Lumba
-Lumadue
-Lulas
-Lukow
-Lukianov
-Lukesh
-Lukander
-Luka
-Luing
-Luikart
-Lugabihl
-Lufborough
-Luette
-Luescher
-Lueschen
-Luersen
-Luensmann
-Luening
-Lueker
-Luedecke
-Lueckenbach
-Luebbering
-Ludovico
-Ludera
-Ludeker
-Ludecke
-Luczki
-Luco
-Luckinbill
-Lucis
-Lucik
-Lucie
-Lucic
-Luchterhand
-Luccous
-Lucash
-Luberger
-Lubbert
-Lubben
-Lubawy
-Lubahn
-Luangxay
-Luangrath
-Luangamath
-Luague
-Lozey
-Loyborg
-Loyack
-Loxton
-Loxtercamp
-Lownsbery
-Lowler
-Lowcks
-Lowa
-Lovstad
-Lovisone
-Lovfald
-Lovetinsky
-Lovet
-Lovero
-Loverdi
-Lovellette
-Loveberry
-Louwagie
-Lournes
-Louria
-Lourentzos
-Lourdes
-Louka
-Louil
-Loudermelt
-Louchen
-Loubier
-Lotto
-Lotridge
-Lothringer
-Lothridge
-Lota
-Lot
-Loszynski
-Lossius
-Losneck
-Loseth
-Losavio
-Losardo
-Losano
-Losado
-Losacco
-Losa
-Lorr
-Loron
-Lorincz
-Loria
-Loretz
-Lorentine
-Lordi
-Loraine
-Lopze
-Lopiccalo
-Lopey
-Loperfido
-Lope
-Lopata
-Lopas
-Loparco
-Loofbourrow
-Longwith
-Longhi
-Longenberger
-Longbine
-Longaker
-Longabaugh
-Lomonte
-Lomino
-Lominack
-Lomen
-Lombel
-Lombardino
-Lomago
-Loma
-Lokan
-Loiacona
-Lohry
-Lohrke
-Lohre
-Logoleo
-Loggens
-Logarbo
-Lofwall
-Lofty
-Lofts
-Lofthus
-Lofte
-Lofstrom
-Loforte
-Lofman
-Lofing
-Lofguist
-Loffier
-Loffelbein
-Loerwald
-Loeppky
-Loehrer
-Loehner
-Loecken
-Lockshaw
-Locknane
-Lockington
-Lockery
-Lockemer
-Lochrico
-Lobregat
-Lobley
-Lobello
-Lobell
-Lobalbo
-Lobach
-Llaneza
-Llanet
-Llams
-Livley
-Livinton
-Living
-Liversedge
-Livernois
-Livermon
-Liverance
-Liveoak
-Livecchi
-Livasy
-Liukkonen
-Litzenberger
-Litvak
-Littfin
-Litmanowicz
-Litchard
-Listi
-Listen
-Lisker
-Lisitano
-Lisena
-Lisbey
-Lipsie
-Lips
-Lippoldt
-Lippitt
-Lipper
-Lipoma
-Lipkovitch
-Lipira
-Lipan
-Linzan
-Linza
-Linsin
-Linsenmayer
-Linsdau
-Linnert
-Linman
-Linkon
-Lingner
-Lingley
-Lingerfelter
-Lingbeek
-Linero
-Lindorf
-Lindmeyer
-Lindinha
-Linderleaf
-Lindau
-Lindabury
-Linburg
-Linak
-Limmel
-Limle
-Limbert
-Limardi
-Lilyblade
-Lillehaug
-Likar
-Liiv
-Ligonis
-Ligler
-Lighthart
-Ligget
-Liftin
-Lifschitz
-Liewald
-Lievsay
-Lievens
-Lietzow
-Lierz
-Liegler
-Liedberg
-Lied
-Liebrecht
-Liebherr
-Lieberg
-Liebenthal
-Liebenow
-Liebeck
-Lidstone
-Lidie
-Lidge
-Lidder
-Licursi
-Licklider
-Lickfelt
-Lichota
-Lichenstein
-Liceaga
-Liccketto
-Libertini
-Libberton
-Leyton
-Leyh
-Leydecker
-Leyda
-Lexer
-Lewi
-Lewars
-Levreau
-Levra
-Levielle
-Levian
-Leveto
-Leversee
-Levers
-Leverone
-Leverance
-Levendoski
-Levee
-Levatino
-Levans
-Levandofsky
-Leuze
-Leutwiler
-Leuthe
-Leuhring
-Leuga
-Leuckel
-Leuasseur
-Lettsome
-Lettiere
-Letscher
-Letender
-Letchaw
-Leta
-Lestrange
-Lestourgeon
-Lestor
-Leston
-Lessner
-Lessmann
-Lessly
-Lespedes
-Leso
-Lesneski
-Leskovar
-Leskovac
-Lese
-Lesco
-Lesches
-Lesa
-Lerra
-Lerper
-Lerow
-Lero
-Lermon
-Lepretre
-Lepre
-Leppink
-Lepke
-Lepez
-Lepetich
-Leopardi
-Leonpacher
-Leonick
-Leonberger
-Leomiti
-Leny
-Lenski
-Lenorud
-Lenort
-Lennis
-Lennart
-Lennan
-Lenling
-Lenke
-Lenigan
-Lenhoff
-Lenharr
-Leners
-Lendt
-Lendor
-Lendo
-Lenczyk
-Lench
-Lenberg
-Lemoyne
-Lemmonds
-Lemmings
-Lemish
-Lemear
-Lembcke
-Lemansky
-Lemans
-Lellig
-Lekey
-Lekberg
-Lekan
-Lek
-Lejman
-Leitzinger
-Leithiser
-Leiper
-Leinwand
-Leimkuhler
-Leimberger
-Leilich
-Leigland
-Leichtenberge
-Leiberton
-Leho
-Lehning
-Lehneis
-Lehmer
-Lehenbauer
-Lehberger
-Legrotte
-Legro
-Legra
-Legat
-Legall
-Lefurgy
-Leflores
-Leffers
-Leffelman
-Lefeld
-Lefaver
-Leetham
-Leesman
-Leeker
-Leehan
-Leeber
-Ledsinger
-Ledermann
-Ledenbach
-Ledee
-Led
-Lecznar
-Leckband
-Lechleidner
-Lechelt
-Lecato
-Lecaros
-Lecain
-Lebroke
-Lebold
-Leblane
-Lebitski
-Lebish
-Leberte
-Lebedeff
-Lebby
-Lebaugh
-Lebarge
-Leavigne
-Leaven
-Leasor
-Leasher
-Leash
-Leanza
-Leanen
-Leaird
-Leahman
-Leadford
-Lazusky
-Lazurek
-Lazott
-Lazio
-Lazier
-Lazich
-Lazewski
-Lazares
-Layva
-Layell
-Laycox
-Lawsky
-Lawrentz
-Lawis
-Lawford
-Lawcewicz
-Lawbaugh
-Lawary
-Lawal
-Lavongsar
-Lavgle
-Lavezzo
-Lavelli
-Lave
-Lavani
-Lavander
-Lavagnino
-Lavadera
-Lautieri
-Lautaret
-Lausell
-Lauschus
-Laurole
-Lauretta
-Laureno
-Laureles
-Laurance
-Launiere
-Laundree
-Lauigne
-Laughon
-Laugen
-Laudeman
-Laudadio
-Lauckner
-Lauchaire
-Lauby
-Laubersheimer
-Latus
-Latourrette
-Latos
-Laton
-Lathrum
-Lather
-Lathe
-Latendresse
-Late
-Latassa
-Latam
-Lat
-Lastella
-Lassetter
-Laskosky
-Laskoskie
-Lasin
-Lasik
-Lashlee
-Lashier
-Laselle
-Laschinger
-Lascaro
-Lasane
-Lasagna
-Lasage
-Larusch
-Larrosa
-Larriviere
-Larralde
-Larr
-Larowe
-Larousse
-Larotta
-Laroia
-Laroe
-Larmett
-Larman
-Larkan
-Largena
-Laregina
-Lardone
-Larcom
-Larche
-Larbie
-Larbi
-Larason
-Laranjo
-Laragy
-Laraby
-Larabell
-Larabel
-Lapuerta
-Lappinga
-Lappi
-Laport
-Lapinta
-Lapila
-Laperuta
-Lapere
-Laper
-Lapek
-Lapari
-Lapalme
-Laorange
-Lanze
-Lanzarotta
-Lantry
-Lantgen
-Lantelme
-Lanteigne
-Lansey
-Lansberg
-Lannier
-Lannen
-Lanna
-Lankster
-Lanie
-Langrum
-Langness
-Langmo
-Langlitz
-Langi
-Langholdt
-Langhans
-Langgood
-Langanke
-Lanfor
-Lanen
-Laneaux
-Landu
-Landruth
-Landrie
-Landreville
-Landres
-Landquist
-Landolf
-Landmark
-Landini
-Landevos
-Landenberger
-Landan
-Lancz
-Lamudio
-Lampsas
-Lampl
-Lampinen
-Lamphiear
-Lampel
-Lamoree
-Lamoreau
-Lamoore
-Lamontagna
-Lammy
-Lammel
-Lamison
-Laming
-Lamie
-Lamia
-Lameda
-Lambuth
-Lambertus
-Lambermont
-Lamartina
-Lamango
-Lamaack
-Lalinde
-Lalich
-Lale
-Lakowski
-Lakhan
-Lajoye
-Lajoy
-Laios
-Lahne
-Laham
-Laguire
-Lagrenade
-Lagore
-Lagoo
-Lagonia
-Lagoni
-Laglie
-Laggan
-Lagesse
-Lagerstedt
-Lagergren
-Lagatta
-Lagard
-Lagant
-Lagamba
-Lagadinos
-Lafuze
-Lafrate
-Laforey
-Lafoon
-Lafontain
-Laflam
-Laffer
-Lafevre
-Lafemina
-Lafantano
-Laface
-Laessig
-Laehn
-Ladt
-Ladouce
-Ladonne
-Lado
-Ladika
-Ladick
-Ladebauche
-Lacz
-Lacusky
-Lacovara
-Lackett
-Lackage
-Lachino
-Lachiatto
-Lacharite
-Lacerenza
-Lacek
-Lacau
-Lacatena
-Lacaille
-Labovitch
-Labounta
-Labombar
-Laboissonnier
-Labo
-Labitan
-Labier
-Labeots
-Labarriere
-Labaro
-Labarbara
-Laatsch
-Laasaga
-Laake
-Kyseth
-Kypuros
-Kyper
-Kyner
-Kwilosz
-Kvzian
-Kvoeschen
-Kveton
-Kvek
-Kveen
-Kvaternik
-Kuziel
-Kuypers
-Kuykendoll
-Kuwana
-Kuwada
-Kutzer
-Kuty
-Kutlu
-Kuti
-Kutchie
-Kuszynski
-Kussmaul
-Kussel
-Kusnic
-Kusner
-Kusky
-Kushaney
-Kurzinski
-Kurtti
-Kurshuk
-Kurr
-Kurokawa
-Kurns
-Kuretich
-Kurasz
-Kurant
-Kura
-Kur
-Kupihea
-Kupferberg
-Kupersmith
-Kupchinsky
-Kunter
-Kunkleman
-Kuniyoshi
-Kunimitsu
-Kunich
-Kundanani
-Kunau
-Kummerow
-Kumlander
-Kumfer
-Kuman
-Kumalaa
-Kum
-Kulseth
-Kulbeth
-Kulbacki
-Kulback
-Kukura
-Kukler
-Kuklenski
-Kukauskas
-Kukahiko
-Kujat
-Kuiz
-Kuitu
-Kuick
-Kuhry
-Kuhlenschmidt
-Kuffa
-Kuepfer
-Kuehnhold
-Kuechler
-Kudro
-Kudrle
-Kuczma
-Kuckens
-Kuciemba
-Kuchinski
-Kuchem
-Kubley
-Kubler
-Kubesh
-Kubeck
-Kubasch
-Kub
-Kuanoni
-Krzewinski
-Krzesinski
-Krzan
-Kryston
-Krystek
-Krynicki
-Krylo
-Kruzel
-Kruyt
-Kruszewski
-Krusor
-Kruskie
-Krushansky
-Krush
-Kruppenbacher
-Krupinsky
-Krumroy
-Krumbein
-Krumbach
-Krukiel
-Kruizenga
-Kruis
-Kruiboesch
-Kruebbe
-Krucke
-Krotine
-Krostag
-Kropff
-Kropfelder
-Kroninger
-Kronau
-Krome
-Krolick
-Krokus
-Krog
-Krofta
-Krofft
-Kroesing
-Krochmal
-Krobath
-Krnach
-Krivanec
-Kristofferson
-Kristof
-Kristan
-Krissie
-Kriskovich
-Kriske
-Krishun
-Krishnamurthy
-Krishman
-Krinov
-Kriek
-Kriegshauser
-Krewer
-Kreutzbender
-Kreusch
-Kretzinger
-Kressler
-Kressin
-Kressierer
-Kresky
-Krepp
-Krenzke
-Krenning
-Krenik
-Kremple
-Kremmel
-Kremen
-Krejcik
-Kreissler
-Kreinhagen
-Krehel
-Kreese
-Krawitz
-Kravetsky
-Kravets
-Kravec
-Krausse
-Krausmann
-Krauel
-Kratowicz
-Kratchman
-Krasnici
-Krasnansky
-Kraskouskas
-Krasinski
-Kranwinkle
-Kranock
-Kramarczyk
-Krallman
-Krallis
-Krakowiak
-Krakauer
-Krainbucher
-Kraig
-Kraichely
-Krahulec
-Krahe
-Krah
-Kragt
-Kraetsch
-Krabel
-Krabbenhoft
-Kraasch
-Kraack
-Kozlovsky
-Kozlik
-Koziak
-Kozeyah
-Kozan
-Kowitz
-Kowalke
-Kowalec
-Koves
-Kovalaske
-Kovacik
-Koutras
-Koussa
-Kousonsavath
-Kounthong
-Kounthapanya
-Kounovsky
-Kounkel
-Kounick
-Koulavongsa
-Koulalis
-Kotyk
-Kotur
-Kottraba
-Kottlowski
-Kotterna
-Kotschevar
-Kotonski
-Kotlar
-Kotheimer
-Kotey
-Koterba
-Koteras
-Kotarski
-Kotaki
-Kosuta
-Kostrzewa
-Kostiv
-Kosters
-Kossey
-Kossen
-Kossak
-Kososky
-Kosorog
-Koso
-Koslan
-Kosiorek
-Koshi
-Koscielniak
-Kosareff
-Korzyniowski
-Korzybski
-Korynta
-Korwin
-Korwatch
-Kortemeier
-Korst
-Korsmeyer
-Korslund
-Koroch
-Kornn
-Kornfield
-Kornblatt
-Korkmas
-Koritko
-Korinta
-Koria
-Korewdit
-Kores
-Korenek
-Kordys
-Kordowski
-Kordiak
-Korbin
-Kopsho
-Koppy
-Kopke
-Kopin
-Kopicko
-Kopiasz
-Koperski
-Kopay
-Kopatz
-Kopan
-Koosman
-Koong
-Koolman
-Kool
-Konty
-Konow
-Konopski
-Konma
-Konishi
-Konger
-Konetchy
-Kone
-Konderla
-Konczewski
-Konarik
-Komula
-Kominski
-Komada
-Koma
-Kolwyck
-Kolupke
-Koltz
-Kolts
-Kolppa
-Koloc
-Kollross
-Kollos
-Kolkman
-Kolkhorst
-Kolikas
-Kolic
-Kolbusz
-Kolassa
-Kol
-Kokubun
-Kokoszka
-Kokko
-Kokenge
-Koitzsch
-Koiner
-Kohus
-Kohles
-Kohel
-Koguchi
-Kofoot
-Koers
-Koenitzer
-Koeninger
-Koenigsberg
-Koener
-Koenemund
-Koelbel
-Koehring
-Koeck
-Kody
-Kodera
-Koczwara
-Kocieda
-Kochkodin
-Kochen
-Kochanek
-Kobylski
-Kobylarz
-Kobylarczyk
-Kobold
-Knyzewski
-Knupke
-Knudsvig
-Knowiton
-Knowell
-Knous
-Knotowicz
-Knorp
-Knoflicek
-Knoeppel
-Knoepke
-Knoell
-Knoechel
-Knodel
-Knockaert
-Knobler
-Kniola
-Knill
-Knilands
-Kniesel
-Kniceley
-Kneuper
-Knetsch
-Kneser
-Knerien
-Knellinger
-Kneefe
-Knazs
-Knatt
-Knapko
-Knapick
-Knape
-Knap
-Knake
-Kmiotek
-Kment
-Kmatz
-Kman
-Klyn
-Klute
-Kluse
-Klumph
-Klukken
-Klukan
-Kluemper
-Kluber
-Klosky
-Kloppenburg
-Klonowski
-Klomp
-Klohs
-Klohe
-Kloeppel
-Kloeker
-Kloefkorn
-Kloeck
-Klobucar
-Kljucaric
-Klitzner
-Klitsch
-Kliskey
-Klinski
-Klinnert
-Klinich
-Klingner
-Klingenberger
-Klingberg
-Klingaman
-Klimo
-Klimavicius
-Klickman
-Klicka
-Klez
-Klevjer
-Klette
-Kletschka
-Kless
-Kleppen
-Klenovich
-Kleintop
-Kleinsasser
-Kleinfeld
-Kleifgen
-Kleid
-Kleftogiannis
-Kleefisch
-Kleck
-Klebes
-Klear
-Klawuhn
-Klawinski
-Klavon
-Klavetter
-Klarin
-Klappholz
-Klande
-Klancnik
-Klan
-Klamn
-Klamert
-Klaja
-Klaich
-Klafehn
-Klabunde
-Kjolseth
-Kjergaard
-Kjellsen
-Kjellman
-Kjeldgaard
-Kizzia
-Kizior
-Kivela
-Kitty
-Kitthikoune
-Kittelman
-Kitelinger
-Kitcher
-Kitchenman
-Kitanik
-Kisro
-Kisielewski
-Kiryakoza
-Kirsopp
-Kirshman
-Kirlin
-Kirkness
-Kirkling
-Kirkconnell
-Kirgan
-Kirchmann
-Kirchherr
-Kirchberg
-Kirchbaum
-Kirberger
-Kiracofe
-Kipple
-Kip
-Kious
-Kintopp
-Kintigh
-Kinsolving
-Kinsky
-Kinlin
-Kinlecheeny
-Kingwood
-Kingson
-Kinds
-Kindregan
-Kinderman
-Kinde
-Kimminau
-Kimbal
-Kilver
-Kiltie
-Kilstofte
-Kilogan
-Kilness
-Kilner
-Kilmister
-Killoren
-Killius
-Kilimnik
-Kilichowski
-Kildare
-Kiko
-Kijak
-Kiili
-Kihlstrom
-Kietzer
-Kiesser
-Kierzewski
-Kienbaum
-Kienast
-Kieke
-Kieck
-Kiebala
-Kiddle
-Kickel
-Kichline
-Kibbler
-Kiani
-Khubba
-Khora
-Khokher
-Khn
-Khlok
-Khilling
-Khensamphanh
-Khemmanivong
-Khazdozian
-Khazaleh
-Khauv
-Khairallah
-Kezele
-Keyon
-Keyl
-Kew
-Kevwitch
-Kevorkian
-Keveth
-Kevelin
-Kevan
-Keuper
-Ketzler
-Kettinger
-Ketterl
-Ketteringham
-Kettenring
-Ketchersid
-Kessans
-Kesey
-Kesek
-Kertzman
-Kertels
-Kerst
-Kerper
-Kernodle
-Kernighan
-Kernagis
-Kermes
-Kerens
-Kercheff
-Kerce
-Kerans
-Keppner
-Kepke
-Kepani
-Keovongxay
-Keoghan
-Keodalah
-Keobaunleuang
-Kenzie
-Kenson
-Kenoyer
-Kenouo
-Kennie
-Kenngott
-Kennaugh
-Kenik
-Keney
-Kenekham
-Kenealy
-Kendziora
-Kendal
-Kenaga
-Kempster
-Kemps
-Kempon
-Kempkens
-Kemmeries
-Kemerly
-Keltt
-Kellywood
-Kellish
-Kellem
-Keliipaakaua
-Kelau
-Keks
-Keisacker
-Keis
-Keinonen
-Keilholz
-Keilholtz
-Keihl
-Kehres
-Keetch
-Keetan
-Keet
-Keeser
-Keenom
-Keeman
-Keehner
-Keehan
-Kedra
-Kedia
-Kecskes
-Kecker
-Kebede
-Kebe
-Keba
-Keaty
-Keaten
-Keaser
-Kearsey
-Kearn
-Kazunas
-Kazimi
-Kazar
-Kazabi
-Kaza
-Kayat
-Kayastha
-Kawski
-Kawell
-Kawczynski
-Kawaiaea
-Kave
-Kavaney
-Kaut
-Kaushal
-Kausch
-Kauo
-Kaumans
-Kaui
-Kauder
-Kaucher
-Kaua
-Katzmann
-Katzaman
-Katterjohn
-Kattaura
-Katsaounis
-Katoh
-Katke
-Katis
-Katin
-Katie
-Kathleen
-Kathel
-Kataoka
-Kaszton
-Kaszinski
-Kasula
-Kasuba
-Kastens
-Kaspari
-Kasmarek
-Kasky
-Kashner
-Kasen
-Kasemeier
-Kasee
-Kasal
-Karz
-Karwowski
-Karstensen
-Karroach
-Karro
-Karrels
-Karpstein
-Karpe
-Karoly
-Karnath
-Karnas
-Karlinsky
-Karlgaard
-Kardux
-Karangelen
-Karamchandani
-Karagiannes
-Karageorge
-Karabin
-Kar
-Kapsner
-Kapperman
-Kappelmann
-Kapler
-Kapiloff
-Kapetanos
-Kanzenbach
-Kanwar
-Kantis
-Kantah
-Kanosh
-Kanoon
-Kanniard
-Kannan
-Kanjirathinga
-Kangleon
-Kaneta
-Kanekuni
-Kanealii
-Kand
-Kanakares
-Kamstra
-Kamradt
-Kampner
-Kamna
-Kammerzell
-Kamman
-Kamiya
-Kaminska
-Kamensky
-Kamber
-Kallhoff
-Kallfelz
-Kalley
-Kallestad
-Kallal
-Kalista
-Kalhorn
-Kalenak
-Kaldahl
-Kalberg
-Kalandek
-Kalan
-Kalamaras
-Kalafarski
-Kalaf
-Kakowski
-Kakeh
-Kakani
-Kajder
-Kaja
-Kaines
-Kaiktsian
-Kaid
-Kahookele
-Kahoohalphala
-Kahley
-Kahao
-Kahalehoe
-Kahal
-Kahae
-Kagimoto
-Kaewprasert
-Kaemingk
-Kadow
-Kadelak
-Kaczka
-Kacvinsky
-Kacprowski
-Kachmarsky
-Kabzinski
-Kabus
-Kabir
-Kabigting
-Kabala
-Kabacinski
-Kababik
-Kaarlela
-Kaanana
-Kaan
-Kaak
-Kaai
-Ka
-Juvenal
-Justian
-Juste
-Justak
-Jurries
-Jurney
-Jurkovich
-Jurist
-Jurin
-Jurgen
-Juray
-Junod
-Junkersfeld
-Junick
-Jumbo
-Julsrud
-Julitz
-Juliana
-Jukich
-Juengling
-Juen
-Juelich
-Judie
-Jubyna
-Jubran
-Jubeh
-Juback
-Juba
-Juanico
-Joynson
-Joyne
-Jover
-Journot
-Joto
-Jotblad
-Josic
-Jorrisch
-Jordt
-Jording
-Jondrow
-Jonah
-Jome
-Jollimore
-Joline
-Jolina
-Joler
-Joki
-Johnting
-Johnstonbaugh
-Johnikins
-Johniken
-Johe
-Johansing
-Johal
-Joganic
-Joerger
-Joelson
-Joehnck
-Jody
-Jodha
-Joanis
-Jirsa
-Jirak
-Jira
-Jingst
-Jhingree
-Jhanson
-Jews
-Jestis
-Jessica
-Jeskie
-Jesiolowski
-Jesenovec
-Jeschon
-Jermeland
-Jerkin
-Jericho
-Jerger
-Jergen
-Jerding
-Jepko
-Jens
-Jenovese
-Jennkie
-Jenderer
-Jenab
-Jempty
-Jemmings
-Jelome
-Jellings
-Jelden
-Jelarde
-Jeffryes
-Jeffirs
-Jedan
-Jecmenek
-Jecklin
-Jeck
-Jeanquart
-Jeanphilippe
-Jeannoel
-Jeanette
-Jeancy
-Jaysura
-Javis
-Javers
-Javed
-Jave
-Jaussen
-Jauhar
-Jastremski
-Jastrebski
-Jasmann
-Jaskolka
-Jasko
-Jaskiewicz
-Jasica
-Jasch
-Jarriett
-Jaroski
-Jarnutowski
-Jarmin
-Jaremka
-Jarema
-Jarels
-Jarecke
-Jarding
-Jardel
-Japak
-Janysek
-Janway
-Janowiec
-Janow
-Janofsky
-Janoff
-Jannise
-Jannett
-Jankoff
-Janeiro
-Jana
-Jaminet
-Jami
-Jamgochian
-Jamesson
-Jamer
-Jamel
-Jamason
-Jalovel
-Jalkut
-Jakubov
-Jaksic
-Jaksch
-Jakiela
-Jaji
-Jaiyesimi
-Jahosky
-Jahoda
-Jahaly
-Jagiello
-Jaggie
-Jafek
-Jafari
-Jae
-Jadoo
-Jaculina
-Jacquin
-Jacquelin
-Jacobsohn
-Jacobovits
-Jackso
-Jacksits
-Jackosn
-Jackett
-Jacinthe
-Jabbie
-Jabaut
-Jabali
-Jaarda
-Izak
-Izaguine
-Iwasko
-Iwashita
-Ivrin
-Ivener
-Iveans
-Ivancic
-Iuchs
-Itnyre
-Istorico
-Isiminger
-Isgur
-Isgro
-Isenbarger
-Iseman
-Isebrand
-Isaksen
-Isagba
-Isacson
-Isaack
-Irr
-Ironhorse
-Irigoyen
-Ireson
-Ipsen
-Iossa
-Inzano
-Introini
-Insognia
-Inserra
-Inostraza
-Innerst
-Innella
-Innarelli
-Innamorato
-Inkavesvanitc
-Ingvolostad
-Inguardsen
-Ingran
-Ingrahm
-Ingraffea
-Ingleton
-Inghem
-Ingersol
-Ingargiolo
-Inferrera
-Iner
-Induddi
-Indermuehle
-Indeck
-Indal
-Incomstanti
-Incera
-Incarnato
-Inbody
-Inabnit
-Imming
-Immerman
-Immediato
-Imholte
-Imeson
-Imbruglia
-Imbrock
-Imbriale
-Imbrenda
-Imam
-Imada
-Iltzsch
-Illovsky
-Illich
-Illas
-Illar
-Iliffe
-Ilg
-Ilarraza
-Ilaria
-Ilalio
-Ikzda
-Ikkela
-Ikenberry
-Ikemoto
-Ikemire
-Ikeard
-Ihnen
-Ihenyen
-Iheme
-Igus
-Iguina
-Ignoria
-Igles
-Igbinosun
-Ifie
-Ifft
-Ifeanyi
-Ifantides
-Iennaco
-Idrovo
-Idriss
-Idiart
-Ickert
-Icardo
-Ibric
-Ibdah
-Ibbotson
-Ibasitas
-Iarussi
-Iara
-Iannalo
-Iamiceli
-Iacuzio
-Iacobucci
-Iacobelli
-Hysquierdo
-Hyske
-Hydzik
-Hyberger
-Hyatte
-Huysman
-Huyna
-Hutyra
-Huttman
-Huttar
-Huter
-Husul
-Hustedt
-Hussy
-Hussong
-Hussian
-Huski
-Hushon
-Husein
-Husaini
-Hurtubise
-Hurta
-Hurni
-Hurme
-Hupy
-Huppenbauer
-Hunze
-Hunson
-Huner
-Hundertmark
-Hunderlach
-Humston
-Hummert
-Huminski
-Humerick
-Humbard
-Hulzing
-Hulshoff
-Hulmes
-Hukle
-Hujer
-Huitink
-Huirgs
-Hugus
-Huguet
-Hugghis
-Huffstutter
-Huerto
-Huertes
-Huenergardt
-Huemmer
-Huelle
-Huehn
-Huebsch
-Hudok
-Hudnut
-Hudlow
-Hudlin
-Hudes
-Huddy
-Huckabone
-Huckabaa
-Hubsch
-Hubl
-Hubertz
-Htwe
-Hsy
-Hrycko
-Hrna
-Hric
-Hribal
-Hrcka
-Hrbacek
-Hranchak
-Hradecky
-Hoysock
-Hoyne
-Hoylton
-Hoyal
-Hoxsie
-Howlingwolf
-Howett
-Howarter
-Hovnanian
-Hovard
-Hovantzi
-Hovanes
-Houzah
-Houtkooper
-Housner
-Housemate
-Hourihan
-Houltberg
-Houghtelling
-Houey
-Houchard
-Houben
-Hotter
-Hotten
-Hottell
-Hotek
-Hosoi
-Hosner
-Hosle
-Hoskyns
-Hoskey
-Hoshino
-Hosfield
-Hortein
-Horseford
-Horse
-Horridge
-Hornshaw
-Horns
-Hornlein
-Hornig
-Horneff
-Hormuth
-Horimoto
-Horesco
-Horenstein
-Horelick
-Hore
-Horbert
-Horabik
-Hoppenrath
-Hoppa
-Hopfauf
-Hoosock
-Hool
-Hoogheem
-Hoogendoorn
-Hoo
-Honus
-Honold
-Honokaupu
-Honigsberg
-Hongisto
-Hongeva
-Hones
-Honegger
-Hondros
-Hondel
-Honchul
-Honch
-Homza
-Homsey
-Homrighaus
-Hommer
-Homiak
-Homby
-Homans
-Holznecht
-Holzmiller
-Holzhueter
-Holzboog
-Holtmeier
-Holtmann
-Holthouse
-Holthoff
-Holtham
-Holtgrefe
-Holstad
-Holshovser
-Holquist
-Holmers
-Hollyday
-Hollo
-Hollner
-Hollinghurst
-Holleyman
-Hollett
-Hollerud
-Hollering
-Hollembaek
-Hollarn
-Hollamon
-Hollack
-Holihan
-Holibaugh
-Holgersen
-Holdy
-Holdgrafer
-Holdcraft
-Holdbrook
-Holcroft
-Holch
-Hokula
-Hokett
-Hojeij
-Hojczyk
-Hoivik
-Hoiseth
-Hoinacki
-Hohnson
-Hohney
-Hohmeier
-Hohm
-Hohlstein
-Hogstrum
-Hogon
-Hoglan
-Hogenmiller
-Hogains
-Hoga
-Hofstra
-Hofstadter
-Hofhine
-Hoffpavir
-Hoeser
-Hoerig
-Hoerger
-Hoelzel
-Hoelter
-Hoeller
-Hoek
-Hoehl
-Hoefflin
-Hoeffer
-Hodosy
-Hodnicki
-Hodermarsky
-Hodd
-Hockley
-Hochstine
-Hochfelder
-Hobstetter
-Hoblit
-Hobin
-Hoberek
-Hobb
-Hnot
-Hlywa
-Hlastala
-Hjermstad
-Hizkiya
-Hitzfelder
-Hiteman
-Hitchko
-Hitchingham
-Hissom
-Hismith
-Hiske
-Hirte
-Hirschmann
-Hirose
-Hirezi
-Hipsley
-Hippley
-Hipol
-Hintergardt
-Hinokawa
-Hinely
-Hindsman
-Hindmarsh
-Hinderaker
-Hindall
-Hinckson
-Hinajosa
-Himmelsbach
-Himmelright
-Hilyar
-Hilvers
-Hilu
-Hiltunen
-Hiltebeitel
-Hilsgen
-Hilovsky
-Hilo
-Hilmer
-Hillseth
-Hillered
-Hilleman
-Hillbrant
-Hillabush
-Hilla
-Hilkert
-Hilk
-Hildman
-Hilbner
-Hilbig
-Hilb
-Hila
-Hija
-Higy
-Hightshoe
-Higashida
-Hiens
-Hielscher
-Hidde
-Hidaka
-Hickley
-Hickingbotham
-Hickie
-Hiciano
-Hibble
-Hibbits
-Heziak
-Heynen
-Heykoop
-Heydenreich
-Heybrock
-Hevrin
-Hevessy
-Heugel
-Heuangvilay
-Hettes
-Hettenhausen
-Hetling
-Hetjonk
-Hethcox
-Hethcote
-Hetchman
-Hetcher
-Hesterly
-Hessman
-Hesselrode
-Hesselman
-Hesselbein
-Hesselbach
-Herzbrun
-Heryford
-Herwehe
-Hervol
-Hertle
-Herta
-Herskovic
-Hershnowitz
-Hershfield
-Herschaft
-Hersberger
-Herrud
-Herrnandez
-Herrlich
-Herritt
-Herrion
-Herrand
-Herran
-Herout
-Heroth
-Heronemus
-Hero
-Herny
-Hermus
-Herline
-Herley
-Hergenroeder
-Hergenreter
-Herena
-Herem
-Herek
-Hercman
-Heral
-Hequembourg
-Heppert
-Hepperly
-Heppel
-Heppding
-Henzler
-Hentrich
-Henter
-Hensle
-Hensdill
-Henschke
-Hennighausen
-Hennard
-Henkin
-Henges
-Henedia
-Hendson
-Hendsbee
-Hendrics
-Hendrickx
-Hencken
-Henchel
-Hencheck
-Hemsworth
-Hemry
-Hemperley
-Hemmig
-Hemmeter
-Hemmert
-Hemmelgarn
-Hemmeke
-Hemley
-Hemeyer
-Hemerly
-Hembre
-Hemans
-Hemanes
-Helwick
-Helvik
-Helphinstine
-Helphenstine
-Helowicz
-Helmert
-Helmen
-Helmbright
-Helliwell
-Helley
-Hellerman
-Hellenbrand
-Helferty
-Helfert
-Hekman
-Heitmuller
-Heitbrink
-Heisse
-Heisner
-Heir
-Heinzle
-Heinzerling
-Heino
-Heinig
-Heindl
-Heimerl
-Heimbuch
-Heilbrun
-Heilbron
-Heidtke
-Heidmann
-Heglund
-Heggins
-Heggestad
-Hegener
-Hegdahl
-Hefter
-Heffernen
-Heery
-Heebsh
-Hedrix
-Hedler
-Hedeiros
-Hedegaard
-Heddleson
-Heddins
-Hect
-Heckle
-Heckers
-Hebsch
-Hebrard
-Heberer
-Hebblethwaite
-Heaviland
-Heartley
-Hearston
-Heang
-Hean
-Heam
-Heagany
-Headlon
-Heading
-Hazouri
-Hazinski
-Hazekamp
-Hayword
-Haysbert
-Hayn
-Hayball
-Hawkings
-Havier
-Havermann
-Havekost
-Hauswald
-Haustein
-Hausteen
-Hauslein
-Hausher
-Haurin
-Hauptly
-Haulbrook
-Haukaas
-Haugaard
-Hauffe
-Hauben
-Hatzell
-Hatto
-Hattenbach
-Hatridge
-Hatlee
-Hathcox
-Hatchette
-Hatcherson
-Hatake
-Hassig
-Hasselvander
-Hasselkus
-Haslinger
-Haskamp
-Hashbarger
-Hasha
-Hasfjord
-Hasencamp
-Haseloff
-Haschke
-Hasbni
-Hasbell
-Hasak
-Harwin
-Harvley
-Harvilchuck
-Harvick
-Harutunian
-Hartzo
-Hartzheim
-Hartjen
-Hartgraves
-Hartgrave
-Hartgerink
-Hartenstein
-Harsy
-Harrisow
-Harrigton
-Harrellson
-Harralson
-Harrald
-Harradine
-Harraden
-Haroun
-Harnly
-Harnes
-Harnar
-Harnan
-Harnack
-Harlston
-Harlor
-Harleston
-Harkenreader
-Harkcom
-Harjochee
-Hargest
-Harges
-Harfert
-Harens
-Hardung
-Hardney
-Hardinson
-Hardigan
-Harby
-Harbus
-Harbough
-Harbottle
-Harbold
-Harary
-Haramoto
-Harader
-Harabedian
-Har
-Happney
-Happe
-Haper
-Hape
-Hanville
-Hanusey
-Hantzarides
-Hantula
-Hanstine
-Hansteen
-Hansson
-Hansrote
-Hansil
-Hanoharo
-Hanock
-Hannula
-Hanno
-Hannem
-Hanneken
-Hannegan
-Hanmore
-Hanisko
-Hanisco
-Hanify
-Hanhan
-Hanegan
-Handt
-Handshaw
-Handschumaker
-Handren
-Handlin
-Handing
-Handeland
-Hanagan
-Hanagami
-Hanafin
-Hanafan
-Hanacek
-Hamway
-Hampon
-Hamper
-Hamparian
-Hamor
-Hamontree
-Hamolik
-Hamnon
-Hamn
-Hammet
-Hammerstein
-Hammerstad
-Hammerlund
-Hammed
-Hammang
-Hameen
-Hamborsky
-Hamb
-Hamalak
-Hamai
-Halwood
-Halston
-Halpainy
-Halon
-Halmstead
-Halmick
-Hallstead
-Hallowich
-Hallio
-Hallie
-Hallerman
-Halleen
-Hallczuk
-Hallan
-Halgren
-Halechko
-Halcom
-Halbritter
-Halaliky
-Hal
-Hajdukiewicz
-Hait
-Haislett
-Hairster
-Hainsey
-Hainds
-Hailes
-Hagwell
-Hagon
-Haghighi
-Haggstrom
-Haggis
-Haggen
-Hageny
-Hagelgans
-Hagarty
-Hafenbrack
-Haessler
-Haessig
-Haerr
-Haener
-Haen
-Haeckel
-Hadson
-Hadland
-Hadian
-Haddaway
-Hackmeyer
-Hackethal
-Hackerd
-Hackenmiller
-Hackenbery
-Hacke
-Hackborn
-Hachette
-Habif
-Habermann
-Haberern
-Habbs
-Haakinson
-Haagensen
-Gzym
-Gyurko
-Gyllenband
-Gyaki
-Gwynes
-Gwenn
-Guzmdn
-Guziczek
-Guz
-Guyott
-Guyot
-Guyet
-Guttenberg
-Gutschow
-Gutreuter
-Gutrerrez
-Gutieres
-Gutiennez
-Guthorn
-Guthary
-Guterriez
-Gutenson
-Gussin
-Gushue
-Gusa
-Gurvine
-Gurtin
-Gurrad
-Gurne
-Guridi
-Gureczny
-Guralnick
-Gunzenhauser
-Gunthrop
-Gunkelman
-Gunagan
-Gun
-Gumphrey
-Gummersall
-Gumbert
-Gulnick
-Gullung
-Gullage
-Gulini
-Gulikers
-Guley
-Guldemond
-Gulde
-Gulbraa
-Gulati
-Guittennez
-Guitreau
-Guith
-Guitar
-Guirgis
-Guinle
-Guiltner
-Guilstorf
-Guillote
-Guillan
-Guilianelli
-Guilbe
-Guiffre
-Guiel
-Guidaboni
-Guiao
-Guialdo
-Guevana
-Guesman
-Guerrouxo
-Guerinot
-Gueretta
-Guenison
-Guenin
-Guempel
-Guemmer
-Guelpa
-Guelff
-Guelespe
-Guedesse
-Gudroe
-Gudat
-Guckes
-Gucciardi
-Gubser
-Gubitosi
-Gubernath
-Gubbins
-Guarracino
-Guarin
-Guariglio
-Guandique
-Guaman
-Gualdoni
-Guadalajara
-Grzywinski
-Grzywacz
-Grzyb
-Grzesiak
-Grygiel
-Gruzinsky
-Gruters
-Grusenmeyer
-Grupa
-Gruninger
-Grunin
-Grundon
-Gruhlke
-Gruett
-Gruesbeck
-Gruell
-Grueber
-Gruda
-Grubman
-Gruba
-Grovier
-Grothen
-Groszkiewicz
-Grossley
-Grossklaus
-Grosshans
-Grosky
-Groshek
-Grosenick
-Groscost
-Grosby
-Groombridge
-Gronvall
-Gromley
-Grollman
-Grohoske
-Groesser
-Groeber
-Grocott
-Grobstein
-Grix
-Grivna
-Gritsch
-Grit
-Gristede
-Grissam
-Grisostomo
-Grisom
-Grishan
-Grip
-Grinner
-Grinman
-Grines
-Grindel
-Grimlie
-Grimard
-Grillette
-Griggers
-Grigas
-Grigalonis
-Grigaliunas
-Grifin
-Griffins
-Griffes
-Griffel
-Grife
-Griesmeyer
-Griesi
-Griem
-Grham
-Grgurevic
-Greyovich
-Greydanus
-Greviston
-Gretzner
-Gretz
-Gretsch
-Greto
-Gresl
-Gresko
-Grengs
-Gremler
-Greist
-Greisser
-Greisiger
-Greiser
-Greiber
-Gregoroff
-Gregoreski
-Gregas
-Greenrose
-Greenlow
-Greenlees
-Greenfelder
-Greenen
-Greenbush
-Greeb
-Grebs
-Grebel
-Greaux
-Grdina
-Gravit
-Gravenstein
-Gravelin
-Grava
-Graul
-Graughard
-Graue
-Grat
-Grastorf
-Grassano
-Grasmuck
-Grashot
-Grasha
-Grappo
-Graper
-Granvil
-Granucci
-Grantier
-Granstaff
-Granroth
-Granizo
-Graniero
-Graniela
-Granelli
-Grandos
-Grandmont
-Gramza
-Graminski
-Gramberg
-Grahams
-Grago
-Graen
-Graefe
-Grae
-Gradle
-Graciani
-Graci
-Grabowiecki
-Grabauskas
-Gounder
-Gougeon
-Goudge
-Gouchie
-Gou
-Gottula
-Gottleber
-Gotthardt
-Gotowka
-Gotlib
-Gotimer
-Gothier
-Gothe
-Goswami
-Gostowski
-Gossin
-Gosserand
-Gossen
-Goshow
-Goshi
-Gosda
-Gosche
-Gorychka
-Gorri
-Gornikiewicz
-Gorlich
-Gorgo
-Gorglione
-Goretti
-Gorence
-Gorelik
-Goreczny
-Gordis
-Gorczynski
-Gorans
-Gootz
-Goosen
-Goonez
-Goolsbee
-Goolia
-Goodvin
-Goodpastor
-Goodgine
-Goodger
-Gooder
-Goodenberger
-Goodaker
-Goodacre
-Gonzolez
-Gonzaliz
-Gonsalues
-Gones
-Gone
-Gondran
-Gonda
-Gonazlez
-Gomzalez
-Gomey
-Gome
-Gomberg
-Golumski
-Goluba
-Goltry
-Goltra
-Golpe
-Golombecki
-Gollwitzer
-Gollogly
-Gollin
-Golkin
-Golk
-Goldware
-Goldrup
-Goldrich
-Goldhammer
-Goldhahn
-Goldfischer
-Goldfield
-Goldeman
-Goldak
-Golberg
-Golba
-Golanski
-Golabek
-Goick
-Gogocha
-Goglia
-Gogins
-Goetzke
-Goettman
-Goettig
-Goetjen
-Goeman
-Goeldner
-Goeken
-Goeden
-Godyn
-Godwyn
-Godown
-Godfray
-Goderich
-Gode
-Godde
-Goda
-Gockerell
-Gochnauer
-Gochie
-Gobrecht
-Gobeyn
-Gobern
-Gobea
-Gobbo
-Gobbi
-Gnagey
-Glugla
-Gluckman
-Gluc
-Glowski
-Glowka
-Glowinski
-Glow
-Glossner
-Gloff
-Gloe
-Glodich
-Gliwski
-Gliues
-Glise
-Glinkerman
-Glimp
-Glicher
-Glenny
-Glembocki
-Gleiss
-Gleichweit
-Gleghorn
-Glaviano
-Glauser
-Glaue
-Glaubke
-Glauberman
-Glathar
-Glasow
-Glashen
-Glasglow
-Glarson
-Glapion
-Glanden
-Glader
-Gladen
-Glacken
-Gjorven
-Gjokaj
-Gjesdal
-Gjelten
-Givliani
-Gitzlaff
-Gittere
-Gitlewski
-Gitchell
-Gissler
-Gisriel
-Gislason
-Girolami
-Girmazion
-Girellini
-Girauard
-Girardeau
-Girad
-Giove
-Gioriano
-Gionson
-Gioacchini
-Ginnetti
-Ginnery
-Ginanni
-Gillom
-Gillmer
-Gillerist
-Gillentine
-Gilhooley
-Gilfoy
-Gilespie
-Gildroy
-Gildore
-Gilcoine
-Gilarski
-Gihring
-Giggie
-Giessinger
-Gierling
-Gielstra
-Giehl
-Giegerich
-Giedlin
-Gieber
-Giebel
-Gidwani
-Gicker
-Gibes
-Gibbings
-Gibbard
-Gianopulos
-Gianola
-Giannell
-Giandelone
-Giancaspro
-Giancarlo
-Gian
-Giamichael
-Giagni
-Giacomazzi
-Giacoletti
-Giachino
-Ghramm
-Ghosten
-Ghiringhelli
-Ghiorso
-Ghil
-Ghia
-Gheza
-Ghekiere
-Gheewala
-Ghazvini
-Ghazi
-Ghazal
-Ghaor
-Ghane
-Ghanayem
-Ghamdi
-Gfroerer
-Geyette
-Gewinner
-Gewant
-Gevorkian
-Gevedon
-Geuder
-Getting
-Gettenberg
-Getschman
-Getachew
-Gestes
-Gesselli
-Geryol
-Gerych
-Gerty
-Gerton
-Gertken
-Gerster
-Gersch
-Gerpheide
-Geronime
-Gerondale
-Gerock
-Germinaro
-Germershausen
-Germer
-Gerlock
-Gerla
-Gerking
-Gerguson
-Geres
-Gerbs
-Gerbi
-Gerathy
-Gerardot
-Georgiana
-Georgales
-Geohagan
-Geoghan
-Geoffrey
-Genualdi
-Gentis
-Gennusa
-Gennaria
-Gennarelli
-Genin
-Genga
-Geng
-Geneseo
-Generous
-Generoso
-Genera
-Genberg
-Gemmel
-Gembe
-Gembarowski
-Gelzer
-Gelo
-Gellis
-Gellespie
-Gell
-Gelineau
-Gelger
-Geldrich
-Gelbach
-Geister
-Geissel
-Geisen
-Geiman
-Geils
-Gehrking
-Gehri
-Gehrett
-Gehred
-Gefroh
-Geerken
-Geelan
-Gedris
-Gedo
-Gechas
-Gecan
-Gebrayel
-Gebers
-Geasley
-Geanopulos
-Gdula
-Gbur
-Gazzillo
-Gazza
-Gazo
-Gaznes
-Gazdecki
-Gayoso
-Gayo
-Gaymes
-Gawlak
-Gavula
-Gavles
-Gaviria
-Gavinski
-Gavigan
-Gaves
-Gavell
-Gavalis
-Gautsch
-Gauron
-Gauntner
-Gaulzetti
-Gattie
-Gatski
-Gatch
-Gata
-Gastelun
-Gastellum
-Gastel
-Gasson
-Gassler
-Gasse
-Gasquet
-Gaspari
-Gasienica
-Gaseoma
-Gasch
-Garzone
-Garverick
-Garve
-Garthee
-Garrod
-Garriss
-Garrish
-Garraghty
-Garnet
-Garness
-Garnder
-Garlovsky
-Gariti
-Garich
-Garibaldo
-Garib
-Gargani
-Garfias
-Garff
-Garf
-Gares
-Garen
-Gardy
-Garder
-Garcelon
-Garced
-Garavelli
-Garala
-Garacci
-Ganze
-Gantewood
-Ganska
-Gannoe
-Ganji
-Ganja
-Ganibe
-Ganiban
-Ganguli
-Gangluff
-Gangadyal
-Gane
-Gandhy
-Gandarillia
-Gancio
-Gana
-Gamrath
-Gamewell
-Gamela
-Gamberini
-Gamberg
-Gambell
-Gambaiani
-Galvano
-Galva
-Galustian
-Galston
-Galstian
-Galson
-Gals
-Galon
-Galofaro
-Gallipo
-Gallery
-Galleno
-Gallegher
-Gallante
-Gallagos
-Gallaga
-Galjour
-Galinoo
-Galinol
-Galin
-Galietti
-Galhardo
-Galfayan
-Galetti
-Galetta
-Galecki
-Galauiz
-Galaska
-Galashaw
-Galarita
-Galanga
-Galacio
-Gailun
-Gailis
-Gaibler
-Gagon
-Gago
-Gagliardotto
-Gaetke
-Gaestel
-Gaekle
-Gadue
-Gades
-Gacusan
-Gacad
-Gabrel
-Gabouer
-Gabisi
-Gabino
-Gabbett
-Gabbay
-Gab
-Gaarsland
-Fyles
-Fventes
-Fusselman
-Fusik
-Fusi
-Fusha
-Fusca
-Furuyama
-Furubotten
-Furton
-Furrh
-Furne
-Furna
-Furlotte
-Furler
-Furkin
-Furfey
-Fure
-Furch
-Furay
-Fupocyupanqui
-Funderbunk
-Fundenberger
-Fulwiler
-Fulsom
-Fullwiler
-Fulliton
-Fulling
-Fuleki
-Fulda
-Fukuroku
-Fukada
-Fuhri
-Fuglsang
-Fugle
-Fugah
-Fuesting
-Fuents
-Fudacz
-Fucile
-Fuchser
-Frydman
-Fryday
-Fruusto
-Frutoz
-Frullate
-Fruchey
-Frossard
-Fross
-Froschheiser
-Froozy
-Fronduti
-Frondorf
-Fron
-Fromong
-Frometa
-Froiland
-Frohwein
-Frohock
-Froeliger
-Frodsham
-Fritzpatrick
-Frist
-Frisino
-Frisella
-Frischkorn
-Fringuello
-Frings
-Friling
-Frikken
-Frietsch
-Friest
-Friedstrom
-Friedhaber
-Friedenberg
-Friedeck
-Fridal
-Freytas
-Freydel
-Freudiger
-Freshley
-Frere
-Frenner
-Freniere
-Fremon
-Fremming
-Freme
-Freligh
-Freistuhler
-Freiser
-Freil
-Freifeld
-Freidkin
-Freidet
-Frehse
-Freguson
-Freerksen
-Freelon
-Freeley
-Freehoffer
-Freedland
-Fredrikson
-Fredric
-Fredline
-Fredicks
-Freddrick
-Frawkin
-Frauenkron
-Frati
-Franzeo
-Frantzich
-Frankina
-Frankford
-Frankenreiter
-Frankenfeld
-Franeo
-Frandeen
-Franculli
-Francolino
-Francoise
-Francisque
-Franciosa
-Francios
-Francione
-Franceski
-Franceschina
-Fram
-Fraine
-Fragassi
-Fracier
-Fraccola
-Frabotta
-Frabizio
-Fouyer
-Foux
-Foutain
-Fourre
-Fouracre
-Found
-Foules
-Foucha
-Fosso
-Fosser
-Fossa
-Fosburgh
-Forwood
-Fortado
-Forston
-Forsthoffer
-Forschner
-Forsch
-Fornkohl
-Fornerod
-Formhals
-Formey
-Formento
-Formato
-Forlani
-Forgy
-Forgach
-Fordon
-Forcino
-Forcell
-Forcade
-Forbish
-Forber
-Fontneau
-Fontelroy
-Fonteboa
-Fontanini
-Fonsecn
-Fondell
-Fon
-Follie
-Foller
-Folkins
-Folkens
-Folgar
-Foks
-Fogus
-Fogo
-Foerschler
-Foell
-Foecke
-Foderaro
-Foddrill
-Focks
-Flum
-Flugence
-Fluette
-Fluetsch
-Flueck
-Flournay
-Flotow
-Flota
-Florkowski
-Florestal
-Florance
-Floore
-Floerchinger
-Flodman
-Floch
-Flitton
-Flitt
-Flister
-Flinton
-Flinspach
-Flierl
-Flever
-Fleurissaint
-Fleurantin
-Flether
-Flennoy
-Fleitman
-Flegler
-Fleak
-Flautt
-Flaum
-Flasher
-Flaminio
-Fixari
-Fiumefreddo
-Fitzmier
-Fitzgerlad
-Fitzen
-Fittje
-Fitser
-Fitchette
-Fisichella
-Fisger
-Fischbein
-Fischang
-Fiscal
-Fisanick
-Firoozbakht
-Firlik
-Firkey
-Fiorenzi
-Fiora
-Finucan
-Finto
-Finona
-Finocan
-Finnley
-Finnin
-Finnila
-Finni
-Finnel
-Finne
-Finland
-Finkenbiner
-Finey
-Finders
-Filzen
-Filyan
-Filteau
-Filonuk
-Fillo
-Fillerup
-Filkey
-Filippides
-Filippello
-Filburn
-Filbrardt
-Filbey
-Filary
-Filarecki
-Filak
-Fijalkowski
-Figurelli
-Figone
-Figlioli
-Figlar
-Figary
-Figarsky
-Fiermonte
-Fierge
-Fiely
-Fieldstadt
-Fiedtkou
-Fiedorowicz
-Fiebich
-Fie
-Fidsky
-Fido
-Ficenec
-Feyler
-Fewless
-Feulner
-Feuerberg
-Fetui
-Fetrow
-Fesus
-Fesenbek
-Ferugson
-Ferster
-Ferrise
-Ferratt
-Ferratella
-Ferrarotti
-Ferrarini
-Ferrao
-Ferrandino
-Ferrall
-Ferracioli
-Feron
-Ferndez
-Fernandz
-Fermo
-Ferm
-Ferlic
-Ferjerang
-Feris
-Ferentz
-Fereday
-Ferdin
-Ferdico
-Ferderer
-Ferard
-Feramisco
-Fenti
-Fensel
-Fenoglio
-Fenoff
-Feno
-Fenniwald
-Fenger
-Fenceroy
-Felzien
-Felson
-Felsher
-Fellon
-Felli
-Fellhauer
-Fellenbaum
-Felleman
-Fellars
-Felks
-Felipa
-Felila
-Felico
-Felicione
-Felger
-Feldtman
-Feldner
-Feldker
-Feldhake
-Felciano
-Felcher
-Fekety
-Feindt
-Feinblatt
-Feilbach
-Feikles
-Feigh
-Feichtner
-Fehribach
-Fehnel
-Fehn
-Fegurgur
-Fego
-Fefer
-Feezor
-Feery
-Feerst
-Feeling
-Feekes
-Feduniewicz
-Feduccia
-Fedorka
-Fedoriw
-Fedorczyk
-Fedel
-Feddes
-Fedderly
-Fechtel
-Fecat
-Feazelle
-Feast
-Fearheller
-Fearen
-Feamster
-Fealy
-Fazzinga
-Fawell
-Favilla
-Favieri
-Favaron
-Favaro
-Faustman
-Faurot
-Faur
-Faulstick
-Faulstich
-Faulkes
-Faulkenbury
-Faulisi
-Faubus
-Fat
-Faster
-Fash
-Fasenmyer
-Fasci
-Fasbender
-Faruolo
-Farrin
-Farria
-Farrauto
-Farmsworth
-Farmar
-Farm
-Farlee
-Fariello
-Farid
-Farha
-Fardo
-Faraco
-Fantz
-Fanner
-Famy
-Famiano
-Fam
-Falu
-Faltz
-Falto
-Falson
-Fallie
-Fallick
-Falla
-Falknor
-Falkenthal
-Falis
-Falha
-Falge
-Falconeri
-Falcione
-Falchi
-Falb
-Falasco
-Falah
-Falack
-Falacco
-Faix
-Faisca
-Fairy
-Fairly
-Faigle
-Faichtinger
-Fahrenwald
-Fahrenbruck
-Fahner
-Fahlstedt
-Fagnoni
-Faglie
-Fagala
-Faehnle
-Fadri
-Fadei
-Facenda
-Fabus
-Fabroquez
-Fabello
-Fabeck
-Fabbozzi
-Ezernack
-Ezer
-Ezechu
-Ezdebski
-Eyubeh
-Eyermann
-Extine
-Expose
-Ewelike
-Evora
-Eviston
-Evertz
-Eversmann
-Everleth
-Evering
-Eveline
-Eveler
-Evanski
-Evanosky
-Evanoski
-Evanchyk
-Evanchalk
-Euton
-Euser
-Eurton
-Europe
-Ettl
-Ettison
-Etters
-Etoll
-Ethel
-Etchinson
-Esty
-Esteybar
-Estevane
-Esterson
-Esterling
-Estergard
-Estela
-Estaban
-Esshaki
-Essepian
-Esselman
-Essaid
-Essaff
-Esquiuel
-Esquerre
-Esquea
-Esposita
-Espenscheid
-Esparaza
-Esoimeme
-Esnard
-Eskuchen
-Eskelsen
-Eskeets
-Eskaran
-Eskaf
-Eshlerman
-Esenwein
-Escorza
-Escoe
-Escobeo
-Eschenbacher
-Eschenbach
-Eschborn
-Escarrega
-Escalet
-Esbensen
-Esannason
-Ervine
-Ervay
-Ertelt
-Erpenbach
-Ero
-Ernstrom
-Ernspiker
-Ernandez
-Ermogemous
-Ermita
-Erm
-Erlwein
-Erlanson
-Erixon
-Erice
-Erfert
-Ereth
-Erdmun
-Erdelt
-Erchul
-Ercek
-Erbentraut
-Erard
-Eracleo
-Equiluz
-Eppert
-Epperheimer
-Eppenger
-Epifano
-Eperson
-Enzenauer
-Entzi
-Entrup
-Entel
-Enote
-Enocencio
-Enny
-Ennist
-Ennels
-Ennaco
-Enkerud
-Enick
-Engwer
-Engleby
-Enget
-Engessor
-Engerman
-Engbretson
-Enfort
-Ends
-Endresen
-Endecott
-Encalade
-Emuka
-Emslander
-Emshoff
-Empleo
-Empfield
-Emperor
-Emo
-Emmrich
-Emlin
-Emigholz
-Emfield
-Emeru
-Emeche
-Emdee
-Emberlin
-Emberley
-Emberger
-Emayo
-Emanus
-Emami
-Elvert
-Elshair
-Elsensohn
-Elsbury
-Elsa
-Elroy
-Elquist
-Elofson
-Elmaghrabi
-Ellworths
-Ellifritt
-Ellies
-Elliem
-Ellerkamp
-Ellerbeck
-Ellenbee
-Ellena
-Ellebrecht
-Elldrege
-Ellanson
-Elko
-Elkayam
-Eliszewski
-Eliseo
-Elis
-Elion
-Elhosni
-Elhassan
-Elhaj
-Elhaddad
-Elgen
-Elgas
-Elgar
-Elg
-Elftman
-Elfering
-Elewa
-Eleveld
-Elefritz
-Elbogen
-Elbertson
-Elberson
-Elbahtity
-Elahi
-Ekstrum
-Eklov
-Ekis
-Ejide
-Eissinger
-Eirls
-Einfeldt
-Eilts
-Eilders
-Eilbert
-Eilbeck
-Eikmeier
-Eifler
-Eiesland
-Eichstadt
-Eichenmiller
-Eichenauer
-Eichelmann
-Ehr
-Ehorn
-Ehnis
-Ehmen
-Ehleiter
-Ehinger
-Ehiginator
-Ehigiator
-Egvirre
-Egure
-Eguizabal
-Ego
-Egidio
-Eggenberg
-Eggart
-Eget
-Egertson
-Egbe
-Efrati
-Eflin
-Eerkes
-Ee
-Edwads
-Edster
-Edralin
-Edmerson
-Edmeier
-Edleston
-Edlao
-Edith
-Edis
-Edeline
-Edeker
-Economus
-Economides
-Ecoffey
-Eckrote
-Eckmeyer
-Eckle
-Ecklar
-Eckis
-Echemendia
-Echavez
-Echaure
-Ebrani
-Ebo
-Ebilane
-Ebesugawa
-Eberting
-Ebersol
-Eberline
-Eberl
-Ebenstein
-Eben
-Ebbesen
-Ebach
-Easom
-Easlick
-Easker
-Easey
-Easdon
-Earman
-Earll
-Earlgy
-Earenfight
-Earehart
-Ealley
-Ealick
-Eagy
-Eafford
-Dziurawiec
-Dzierzanowski
-Dziegielewski
-Dziduch
-Dziadek
-Dzama
-Dyser
-Dys
-Dyreson
-Dymke
-Dyen
-Dwyar
-Dwornik
-Dwellingham
-Duxbury
-Duwhite
-Duverney
-Duvel
-Dutschmann
-Dutel
-Dute
-Dusak
-Durun
-Dursch
-Durrwachter
-Durousseau
-Durol
-Durig
-Durett
-Duresky
-Durelli
-Duree
-Dural
-Duraku
-Dupouy
-Duplin
-Duplesis
-Duplaga
-Dupaty
-Duonola
-Dunzelman
-Dunten
-Dunt
-Dunster
-Dunnahoo
-Dunmead
-Dunks
-Dunkentell
-Dunemn
-Duncker
-Dunckel
-Dunahoo
-Dummitt
-Dumez
-Dumag
-Dulberg
-Dulatre
-Dukhovny
-Dukeshire
-Dukeshier
-Duitscher
-Duitch
-Duh
-Dugmore
-Dughi
-Duffus
-Duffany
-Dufer
-Duesenberg
-Duerkson
-Duerkop
-Duenke
-Duel
-Dudleson
-Dudik
-Duderstadt
-Dudack
-Duchow
-Duchesney
-Duchatellier
-Ducceschi
-Ducayne
-Ducay
-Ducatelli
-Dubonnet
-Duberstein
-Dubej
-Dubeck
-Dubeau
-Dubbin
-Duban
-Duball
-Duartes
-Dsaachs
-Dryman
-Drybread
-Drumwright
-Drumheiser
-Drumgole
-Drullard
-Drue
-Drude
-Druckhammer
-Dru
-Drought
-Drossos
-Drossman
-Droski
-Drong
-Drones
-Dronen
-Droegmiller
-Drock
-Drisdelle
-Drinkall
-Drimmer
-Driggins
-Driesel
-Driere
-Drewski
-Dreps
-Dreka
-Dreith
-Dregrich
-Dreggs
-Drawy
-Drawec
-Dravland
-Drape
-Dramis
-Drainer
-Dragun
-Dragt
-Dragotta
-Dragaj
-Drafton
-Drafall
-Drader
-Draa
-Dozois
-Dozar
-Doyan
-Doxon
-Dowsett
-Dovenmuehler
-Douyon
-Douvier
-Douvia
-Douthart
-Doussan
-Dourado
-Doulani
-Douillet
-Dougharity
-Dougall
-Douet
-Dou
-Dotto
-Dottery
-Dotstry
-Doto
-Dotie
-Doswell
-Doskocil
-Doseck
-Dorweiler
-Dorvillier
-Dorvee
-Dortilla
-Dorsainvil
-Dorrian
-Dorpinghaus
-Dorph
-Dorosan
-Dornseif
-Dornhelm
-Dornellas
-Dorne
-Dornbos
-Dormanen
-Dormane
-Doriean
-Dorer
-Dorcent
-Dorat
-Dopf
-Dootson
-Doornbos
-Dooney
-Donten
-Dontas
-Donota
-Donohve
-Donning
-Donnellon
-Donne
-Donmore
-Donkor
-Donkervoet
-Donhoe
-Dongo
-Donelon
-Donchatz
-Donawa
-Donar
-Domnick
-Domkowski
-Domio
-Dominis
-Dominiquez
-Dominicus
-Dominico
-Domingus
-Domianus
-Domas
-Dolven
-Dolliver
-Doljac
-Doliveira
-Dolhon
-Dolgas
-Dolfay
-Dolcetto
-Dokuchitz
-Doino
-Doiel
-Doffing
-Doerflinger
-Doepner
-Doelling
-Dodich
-Doderer
-Dockray
-Dockett
-Docker
-Docimo
-Dobre
-Dobrasz
-Dobmeier
-Dobesh
-Dobberfuhl
-Dobb
-Dmitriev
-Dlobik
-Dlabaj
-Djuric
-Dizadare
-Divento
-Divan
-Diulio
-Ditti
-Dittbrenner
-Ditta
-Ditolla
-Ditchfield
-Distilo
-Distance
-Disponette
-Dispirito
-Dishinger
-Discon
-Disarufino
-Disabato
-Diruzzo
-Dirose
-Dirollo
-Dirado
-Dippery
-Dionisopoulos
-Diones
-Dinunzio
-Dinucci
-Dinovo
-Dinovi
-Dinola
-Dinho
-Dings
-Dinglasan
-Dingel
-Dinco
-Dimperio
-Dimoulakis
-Dimopoulos
-Dimmack
-Dimling
-Dimitriou
-Dimes
-Dilthey
-Dilox
-Dillworth
-Dillmore
-Dilligard
-Dilleshaw
-Dilgard
-Dilda
-Dilcher
-Dilchand
-Dikkers
-Diket
-Dikens
-Digrazia
-Digness
-Digiorgi
-Digiambattist
-Digesare
-Difiora
-Diffendal
-Diewold
-Dietsche
-Diestel
-Diesen
-Dien
-Diemoz
-Dielman
-Diegidio
-Diedricks
-Diebol
-Didlake
-Didamo
-Dickun
-Dickstein
-Dickirson
-Dickins
-Dicioccio
-Diciano
-Dichristopher
-Dicaro
-Dicara
-Dibrino
-Dibenedict
-Diamico
-Diak
-Diachenko
-Dhosane
-Dezell
-Dezayas
-Deyette
-Deyarmond
-Deyarmin
-Dewyer
-Dewulf
-Dewit
-Dewinne
-Dewaratanawan
-Devreese
-Devitto
-Devincenzi
-Devick
-Devey
-Devenecia
-Devel
-Deuschle
-Deuschel
-Deuman
-Deuermeyer
-Detz
-Deturenne
-Dettra
-Dettore
-Dettmering
-Dettmann
-Detterich
-Detorres
-Detlefs
-Detjen
-Detillier
-Dethomasis
-Detering
-Detar
-Desutter
-Destime
-Destephano
-Desrocher
-Desquare
-Desporte
-Desparrois
-Desort
-Desormo
-Desorbo
-Desolier
-Desmarias
-Desloge
-Deslaurier
-Desjardiws
-Desiyatnikov
-Desisles
-Desilvo
-Desiato
-Deshazior
-Desforges
-Deserres
-Deschomp
-Deschino
-Deschambeault
-Desautelle
-Desantigo
-Desan
-Deruso
-Derubeis
-Derriso
-Derricott
-Derrer
-Deroos
-Deroko
-Deroin
-Deroest
-Derobles
-Dernier
-Dermo
-Derkach
-Derizzio
-Deritis
-Derion
-Deriggi
-Dergurahian
-Dereu
-Derer
-Derenzis
-Derenthal
-Derensis
-Derendal
-Derenberger
-Deremiah
-Deraveniere
-Deramo
-Deralph
-Depsky
-Deprizio
-Deprince
-Deprez
-Depratt
-Depottey
-Depippo
-Depinho
-Depietro
-Depetris
-Deperte
-Depena
-Depaulis
-Depasse
-Depace
-Deonarian
-Deodato
-Denski
-Densieski
-Denoyelles
-Denofrio
-Denni
-Dennert
-Denna
-Deniken
-Denier
-Denice
-Denhartog
-Dench
-Dence
-Denburger
-Denafo
-Demyers
-Demulling
-Demuizon
-Demosthenes
-Demoney
-Demonett
-Demmon
-Demich
-Demian
-Demetris
-Demetree
-Demeris
-Demchok
-Dembosky
-Dembinski
-Dember
-Demauri
-Dematos
-Demasters
-Demarrais
-Demarini
-Demarc
-Demara
-Delvin
-Delveechio
-Delusia
-Deluney
-Deluccia
-Delre
-Delpiano
-Delosanglel
-Delosangeles
-Delon
-Delnegro
-Dellos
-Dellon
-Delling
-Dellibovi
-Dellasciucca
-Dellasanta
-Dellapina
-Dellajacono
-Dellagatta
-Dellaca
-Deliso
-Delinois
-Delilli
-Delilla
-Deliberato
-Delhomme
-Delguercio
-Delger
-Delgadilo
-Delfi
-Delfelder
-Deley
-Delevik
-Delettre
-Delessio
-Deleonardo
-Delellis
-Delehoy
-Delegeane
-Deldeo
-Delcine
-Delbusto
-Delbrune
-Delbrocco
-Delbo
-Delasko
-Delashaw
-Delasancha
-Delaremore
-Delaplane
-Delapenha
-Delanoche
-Delalla
-Delaguila
-Delaglio
-Dekuyper
-Dekort
-Dekorne
-Deklerk
-Dekine
-Dejoode
-Dejes
-Dejarme
-Dejager
-Deja
-Deischer
-Deir
-Deighton
-Deidrick
-Deida
-Deible
-Dehrer
-Dehombre
-Dehler
-Dehghani
-Dehan
-Dehaemers
-Degunya
-Deguise
-Degrella
-Degrazio
-Degrandpre
-Degori
-Degolyer
-Deglopper
-Deglanville
-Degado
-Defrates
-Defrancis
-Defranceschi
-Defouw
-Defiguero
-Defiglio
-Defide
-Defaria
-Deeters
-Dedominicis
-Dedo
-Dedier
-Dedek
-Deculus
-Decroo
-Decree
-Decourley
-Decomo
-Declouette
-Declet
-Declark
-Deckelman
-Dechart
-Dechamplain
-Decasanova
-Decardo
-Decardenas
-Decann
-Decaneo
-Debrita
-Debrie
-Debraga
-Debnar
-Debiew
-Debes
-Debenham
-Debello
-Debarba
-Deback
-Dearstyne
-Dearco
-Deanne
-Deanhardt
-Deamer
-Deaguero
-Daylong
-Daya
-Dawber
-Dawahoya
-Davydov
-Davtyan
-Davos
-Davirro
-Davidek
-Davide
-Davers
-Davensizer
-Davel
-Davda
-Dauzart
-Daurizio
-Dauila
-Daughetee
-Dauge
-Daufeldt
-Daudier
-Daubenmire
-Daty
-Datu
-Datte
-Dastoli
-Daste
-Dasso
-Daskam
-Dasinger
-Dasalia
-Daryanl
-Darvile
-Darsi
-Darsch
-Darrup
-Darnel
-Darm
-Darjean
-Dargenio
-Darey
-Dardashti
-Dardagnac
-Darbro
-Darbeau
-Daramola
-Daquip
-Dapvaala
-Danza
-Dantoni
-Dantes
-Danoski
-Danns
-Dannecker
-Danfield
-Danella
-Danczak
-Dancoes
-Damphousse
-Damoth
-Damoro
-Dammrich
-Dammad
-Damis
-Damerell
-Dambrozio
-Dama
-Daltorio
-Dalponte
-Dalomba
-Dalmida
-Dalmau
-Dallen
-Dalla
-Dalitz
-Dalio
-Dalhart
-Daleus
-Dalene
-Dalee
-Dalbeck
-Dalaq
-Dair
-Daimaru
-Daill
-Daichendt
-Dahood
-Dahlstedt
-Dahley
-Dahler
-Dagnone
-Dagnon
-Dagner
-Daggy
-Daer
-Dae
-Dadds
-Daddea
-Daddabbo
-Dad
-Dacres
-Dachs
-Dachelet
-Daber
-Czyrnik
-Czwakiel
-Czupryna
-Czubia
-Czosek
-Czernovski
-Czerno
-Czernik
-Czerniak
-Czekaj
-Czarniecki
-Cyler
-Cychosz
-Cuzzo
-Cuva
-Cutri
-Cutone
-Cutia
-Cutburth
-Cusworth
-Custa
-Cusmano
-Cushway
-Cushinberry
-Cusher
-Cushen
-Cushard
-Cusatis
-Curzi
-Curylo
-Curriere
-Currans
-Curra
-Curpupoz
-Curls
-Curleyhair
-Curella
-Cureau
-Curameng
-Cupe
-Cunningan
-Cunnane
-Cummisky
-Cummer
-Cumley
-Cumblidge
-Culotti
-Cullin
-Culajay
-Cujas
-Cuez
-Cuddihee
-Cudan
-Cuchiara
-Cuccinello
-Cucchiaro
-Cuartas
-Cuaresma
-Cuadro
-Csensich
-Cruthirds
-Cruthers
-Crutchev
-Crutch
-Crummedyo
-Crumlish
-Cruiz
-Cruey
-Cruel
-Croxford
-Croxen
-Crowin
-Croutch
-Croushorn
-Crotwell
-Crother
-Croslen
-Crookston
-Cronholm
-Cronauer
-Cromeens
-Crogier
-Croffie
-Crocitto
-Critzman
-Criton
-Critchelow
-Cristofaro
-Cristello
-Cristelli
-Crissinger
-Crispo
-Criqui
-Crickenberger
-Cressell
-Cresencio
-Creglow
-Creggett
-Creenan
-Creeley
-Credo
-Credille
-Crease
-Crawn
-Cravenho
-Cravatta
-Cration
-Crantz
-Cragar
-Cragan
-Cracolici
-Cracknell
-Craawford
-Craan
-Cozadd
-Coyier
-Cowser
-Cowns
-Cowder
-Covotta
-Covitt
-Covil
-Covarruvia
-Covarrubio
-Covarrubia
-Covar
-Cova
-Coutino
-Cousey
-Courtoy
-Courtad
-Couron
-Courneya
-Courie
-Couret
-Courchine
-Countis
-Counceller
-Cottillion
-Cottengim
-Cotroneo
-Cotreau
-Cotheran
-Cotey
-Coteat
-Cotant
-Coswell
-Costenive
-Costellowo
-Costeira
-Costanzi
-Cossaboon
-Cossaboom
-Cosimini
-Cosier
-Cosca
-Cosano
-Corvelli
-Corti
-Cortesi
-Corsilles
-Corsey
-Corseri
-Corron
-Corridoni
-Corrett
-Correo
-Corren
-Correau
-Corraro
-Corporon
-Corporal
-Corpeno
-Corolla
-Corolis
-Cornes
-Cornelson
-Cornea
-Cornacchio
-Cormican
-Cormia
-Coriz
-Coric
-Coriaty
-Coriano
-Corderman
-Cordel
-Corde
-Cordasco
-Corburn
-Corallo
-Coradi
-Coponen
-Coples
-Copier
-Copa
-Coopey
-Coonley
-Coomey
-Coolbrith
-Coolbeth
-Coolahan
-Cookey
-Coogen
-Cooey
-Cooch
-Conze
-Conzalez
-Contreros
-Contreres
-Contras
-Contraras
-Contopoulos
-Contofalsky
-Contino
-Consoli
-Consigli
-Conoly
-Connyer
-Conninghan
-Connette
-Connerty
-Connarton
-Conlans
-Conkrite
-Confrey
-Confair
-Coneys
-Conelly
-Conejo
-Condreay
-Condino
-Condell
-Condelario
-Concini
-Concilio
-Concho
-Conces
-Concepion
-Conceicao
-Conable
-Compres
-Compiseno
-Compeau
-Compean
-Comparoni
-Companie
-Compagna
-Comoletti
-Commes
-Comment
-Comeauy
-Colyott
-Columbres
-Colsch
-Colpaert
-Colpack
-Colorina
-Colopy
-Colonnese
-Colona
-Colomy
-Colombe
-Colomba
-Colmer
-Colly
-Collozo
-Collova
-Collora
-Collmeyer
-Collaco
-Colian
-Colglazier
-Colehour
-Colebrook
-Coldsmith
-Colden
-Colato
-Colasanti
-Colasamte
-Colarossi
-Colander
-Colaizzo
-Colaiacovo
-Coladonato
-Colacone
-Colabrese
-Cokins
-Cohoe
-Coho
-Cohlmia
-Cohagan
-Cogen
-Cofrancesco
-Cofran
-Codey
-Codeluppi
-Cocran
-Cocozza
-Cocoran
-Cocomazzi
-Cockrin
-Cockreham
-Cocking
-Cochis
-Cocherell
-Coccoli
-Cobio
-Cobane
-Coatley
-Coatie
-Coant
-Coaker
-Coachys
-Cmiel
-Clozza
-Cloughly
-Clothey
-Closovschi
-Closey
-Cloman
-Cloffi
-Cloepfil
-Clites
-Clinker
-Cleverly
-Cleve
-Clesen
-Clery
-Clerf
-Clemson
-Clemo
-Clemmon
-Clemmo
-Clemmey
-Cleark
-Clayter
-Clavey
-Clavelle
-Clausel
-Claud
-Claucherty
-Claton
-Clarson
-Clarendon
-Clarbour
-Clar
-Clap
-Clanin
-Clan
-Claman
-Clam
-Claes
-Civitello
-Civcci
-Civatte
-Civale
-Ciucci
-Cito
-Cisneroz
-Cislo
-Cisewski
-Cirioni
-Cirilli
-Cipullo
-Cippina
-Cipolone
-Cipolloni
-Cioni
-Cintra
-Cinkosky
-Cinalli
-Cimmiyotti
-Cimeno
-Cilva
-Cills
-Ciliento
-Cilibrasi
-Cilfone
-Ciesiolka
-Ciersezwski
-Cierpke
-Cierley
-Cieloha
-Cicio
-Cichosz
-Cichonski
-Cicconi
-Cibulskas
-Ciaramitaro
-Ciano
-Cianciotta
-Ciampanella
-Cialella
-Ciaccia
-Chwieroth
-Chwalek
-Chvilicek
-Chuyangher
-Churner
-Churchville
-Chuppa
-Chupik
-Chukri
-Chuh
-Chudzinski
-Chudzik
-Chudej
-Chrones
-Chroman
-Christoffer
-Christmau
-Christle
-Christaldi
-Christal
-Chrispen
-Chriscoe
-Chown
-Chowen
-Chowanec
-Chounlapane
-Choulnard
-Chott
-Chopelas
-Chomicki
-Chomali
-Choen
-Chodorov
-Chmelik
-Chludzinski
-Chivalette
-Chiv
-Chiumento
-Chittom
-Chisnall
-Chischilly
-Chisari
-Chirdon
-Chirasello
-Chipp
-Chiotti
-Chionchio
-Chioma
-Chinweze
-Chinskey
-Chinnis
-Chinni
-Chindlund
-Chimeno
-Chilinskas
-Childes
-Chikko
-Chihak
-Chiffriller
-Chieves
-Chieng
-Chiavaroli
-Chiara
-Chiapetto
-Chiaminto
-Chhor
-Chhon
-Chheng
-Chhabra
-Cheyney
-Chey
-Chevres
-Chetelat
-Chet
-Chestand
-Chessor
-Chesmore
-Chesick
-Chesanek
-Cherwinski
-Chervin
-Cherven
-Cherrie
-Chernick
-Chernay
-Cherchio
-Cheon
-Chenevey
-Chenet
-Chenauls
-Chenaille
-Chemin
-Chemell
-Chegwidden
-Cheffer
-Chefalo
-Chebret
-Chebahtah
-Cheas
-Chaven
-Chavayda
-Chautin
-Chauhdrey
-Chauffe
-Chaudet
-Chatterson
-Chatriand
-Chaton
-Chastant
-Chass
-Chasnoff
-Chars
-Charnoski
-Charleton
-Charle
-Charisse
-Charif
-Charfauros
-Chareunsri
-Chareunrath
-Charbonnel
-Chappan
-Chaples
-Chaplean
-Chapko
-Chaobal
-Chanthaumlsa
-Chantha
-Chanofsky
-Chanel
-Chandsawangbh
-Chandronnait
-Chandrasekhar
-Chandrasekara
-Chandier
-Chanchuan
-Chananie
-Chanady
-Champy
-Champany
-Chamley
-Chamers
-Chamble
-Chamberlian
-Chalow
-Chaloner
-Chalita
-Chalaban
-Chajon
-Chais
-Chaim
-Chaille
-Chaidy
-Chagollan
-Chafe
-Chadsey
-Chaderton
-Chabotte
-Cezil
-Cersey
-Cerritelli
-Ceronsky
-Ceroni
-Cernansky
-Cerenzia
-Cereghino
-Cerdan
-Cerchia
-Cerbantes
-Cerao
-Ceranski
-Centrone
-Centorino
-Censky
-Ceman
-Cely
-Celuch
-Cellupica
-Cellio
-Celani
-Cegla
-Cedars
-Ceasor
-Cearlock
-Cazzell
-Cazeault
-Caza
-Cavezon
-Cavalli
-Cavaleri
-Cavaco
-Cautillo
-Cauthorne
-Caulley
-Caughran
-Cauchon
-Catucci
-Cattladge
-Cattabriga
-Catillo
-Cathers
-Catenaccio
-Catena
-Catani
-Catalli
-Catacun
-Casumpang
-Casuat
-Castrovinci
-Castronova
-Castoral
-Castiola
-Castin
-Castillero
-Castillejo
-Castera
-Castellanoz
-Castellaneta
-Castelan
-Castanio
-Castanado
-Castagnier
-Cassis
-Cassion
-Cassello
-Casseday
-Cassase
-Cassarubias
-Cassard
-Cassaday
-Caspary
-Caspar
-Casoria
-Casilles
-Casile
-Casida
-Cashing
-Casgrove
-Caseman
-Caselton
-Casello
-Caselden
-Cascia
-Casario
-Casareno
-Casarella
-Casamayor
-Casaliggi
-Casalenda
-Casagranda
-Casabona
-Carza
-Caryk
-Carvett
-Carthew
-Carther
-Carthens
-Cartaya
-Cartan
-Carsno
-Carscallen
-Carrubba
-Carroca
-Carril
-Carrigg
-Carridine
-Carrelli
-Carraturo
-Carratura
-Carras
-Carransa
-Carrahan
-Carpente
-Carpenito
-Caroway
-Carota
-Caronna
-Caroline
-Carnoske
-Carnohan
-Carnighan
-Carnie
-Carnahiba
-Carmichel
-Carmello
-Carlsley
-Carlington
-Carleo
-Cariveau
-Caristo
-Carillion
-Carilli
-Caridine
-Cariaso
-Cardoni
-Cardish
-Cardino
-Cardinas
-Cardenos
-Cardejon
-Cardeiro
-Carco
-Carbal
-Caravalho
-Caraher
-Caradonna
-Caracso
-Caracciola
-Capshaws
-Caprice
-Capriccioso
-Capraro
-Cappaert
-Caposole
-Capitani
-Capinpin
-Capiga
-Capezzuto
-Capetl
-Capestany
-Capels
-Capellas
-Caparoula
-Caparelli
-Capalongan
-Capaldo
-Canu
-Cantre
-Cantoral
-Cantfield
-Cantabrana
-Canori
-Cannuli
-Canestro
-Canestrini
-Canerday
-Canellas
-Canella
-Candon
-Cancer
-Canatella
-Canak
-Cana
-Campolongo
-Campagnone
-Campagnini
-Campagne
-Camon
-Cammarn
-Caminita
-Camidge
-Cambronne
-Cambric
-Cambero
-Camaron
-Calzone
-Calzadilla
-Calver
-Calvent
-Calvelo
-Calvaruso
-Calvaresi
-Calpin
-Calonsag
-Calonne
-Caloca
-Calligy
-Callez
-Calleo
-Callaro
-Calixtro
-Caliguire
-Caligari
-Calicut
-Caler
-Calderson
-Caldarone
-Calchera
-Calcagino
-Calaycay
-Calamarino
-Calamari
-Calamare
-Cakanic
-Cajune
-Cajucom
-Cajero
-Cainion
-Cainglit
-Caiafa
-Cagey
-Cafourek
-Caffarel
-Cafarella
-Cafagno
-Cadoy
-Cadmen
-Cader
-Cademartori
-Cackett
-Cacibauda
-Caci
-Cacciola
-Cabrar
-Cabla
-Cabiya
-Cabido
-Cabeza
-Cabellon
-Cabeceira
-Cabanes
-Cabag
-Bzhyan
-Byther
-Byro
-Byrley
-Byrdsong
-Bynd
-Bylund
-Byant
-Bverger
-Buzzelle
-Buzzanca
-Buyes
-Buyak
-Buvens
-Buttino
-Buttimer
-Buttari
-Buttaccio
-Buther
-Butel
-Buszak
-Bustinza
-Bussom
-Busskohl
-Bussink
-Bussinger
-Bussert
-Busselberg
-Bussani
-Busl
-Buskohl
-Busie
-Bushie
-Busenius
-Buseck
-Buscarino
-Busacker
-Burwick
-Burtin
-Burriesci
-Burreson
-Burnum
-Burnet
-Burneisen
-Burnaman
-Burlette
-Burlando
-Burki
-Burker
-Burkel
-Burka
-Burigsay
-Burhanuddin
-Burgen
-Burgbacher
-Buretta
-Buress
-Burdsall
-Burdis
-Burdi
-Burdg
-Burbano
-Bur
-Buquo
-Buontempo
-Buonadonna
-Bunzey
-Bunyea
-Buntain
-Bunkers
-Bungy
-Bungart
-Bunetta
-Bunes
-Bundley
-Bundette
-Bumm
-Bumbray
-Bumba
-Bumatay
-Bulwinkle
-Bultron
-Bulnes
-Bullo
-Bullmore
-Bullerwell
-Bullert
-Bullara
-Bulland
-Bulkin
-Bulgarella
-Bulacan
-Bukrim
-Bukowinski
-Bujol
-Buja
-Buike
-Buhoveckey
-Buhite
-Bugtong
-Bugler
-Bugenhagen
-Bugayong
-Bugarewicz
-Bufton
-Buetti
-Buess
-Buerstatte
-Buergel
-Buerge
-Buer
-Buena
-Buegler
-Bueggens
-Buecher
-Budzyna
-Budz
-Budworth
-Budesa
-Buddle
-Budden
-Buddemeyer
-Buckridge
-Buckreis
-Buckmiller
-Bucke
-Buchser
-Buchsbaum
-Buchs
-Buchna
-Buchheim
-Buchberger
-Bucchin
-Bucanan
-Bubbico
-Buanno
-Bual
-Brzycki
-Brzostowski
-Bryum
-Brynga
-Brynestad
-Bryar
-Bruzewicz
-Bruyn
-Bruun
-Brutlag
-Bruson
-Bruski
-Bruse
-Brusco
-Bruscino
-Brunsting
-Brunskill
-Brunow
-Brunnemer
-Brunderman
-Brunckhorst
-Brunback
-Brumbley
-Bruh
-Brugal
-Bruenderman
-Bruegman
-Brucie
-Brozyna
-Brozell
-Brownsworth
-Brownsword
-Brownsberger
-Browley
-Brous
-Brounson
-Broumley
-Brostoff
-Brossmann
-Brosig
-Broschinsky
-Broomell
-Brookshier
-Brooklyn
-Bronikowski
-Brondyke
-Bromberek
-Brombach
-Brokins
-Broking
-Brojakowski
-Broich
-Brogren
-Brogglin
-Brodhurst
-Brodhag
-Brodey
-Brocklebank
-Brockie
-Brockell
-Brochure
-Brochhausen
-Broccolo
-Brixius
-Brittsan
-Brits
-Britnell
-Brisley
-Brisbone
-Briola
-Brintnall
-Bringman
-Bringas
-Bringantino
-Brinckerhoff
-Briguglio
-Briggerman
-Brigg
-Brigantino
-Briehl
-Brieger
-Bridson
-Bridjmohan
-Bridgford
-Bridget
-Bridgens
-Bridendolph
-Briden
-Briddick
-Bricknell
-Brickles
-Brichetto
-Briare
-Brez
-Brevitz
-Brevil
-Breutzmann
-Breuning
-Bretl
-Brethour
-Bretana
-Bresolin
-Breslawski
-Brentnall
-Brentano
-Brensnan
-Brensinger
-Brensel
-Brenowitz
-Brennenstuhl
-Brengle
-Brendlinger
-Brenda
-Brend
-Brence
-Brenaman
-Bremseth
-Bremme
-Breman
-Brelje
-Breitung
-Breitenfeldt
-Breitenbucher
-Breitenberg
-Breines
-Breiland
-Brehony
-Bregon
-Brege
-Bregantini
-Brefka
-Breeman
-Breehl
-Bredy
-Bredow
-Bredice
-Bredahl
-Brechbill
-Brearley
-Brdar
-Brazzi
-Brazler
-Braye
-Braver
-Bravender
-Bravard
-Braunsdorf
-Braunschweige
-Braught
-Brauchla
-Bratek
-Braskey
-Brasket
-Branske
-Branot
-Branine
-Braniff
-Brangan
-Branen
-Branecki
-Brandsrud
-Brandman
-Brandeland
-Brande
-Brandauer
-Brancazio
-Brancanto
-Branaugh
-Bramucci
-Brakstad
-Brais
-Braim
-Braig
-Brah
-Brage
-Bradtke
-Bradrick
-Bradon
-Bradicich
-Brackelsberg
-Brachman
-Brachle
-Bracetty
-Bracaloni
-Bozzell
-Bozovich
-Bozinovich
-Boyenga
-Bowring
-Bowlet
-Bowgren
-Bowersmith
-Bowels
-Bowcutt
-Bovio
-Boveja
-Bovain
-Boutchyard
-Bousson
-Bousqute
-Bousley
-Bourns
-Bourlier
-Bourgois
-Bourff
-Bourek
-Bourdeaux
-Bourdages
-Bourbonnais
-Boundy
-Bouliouris
-Boudrieau
-Boudin
-Bouchaert
-Botwin
-Bottomly
-Bottolfson
-Bottolene
-Bottiggi
-Botterbusch
-Botros
-Botras
-Botdorf
-Bostelman
-Bossenbroek
-Bossardet
-Bosowski
-Boschult
-Borycz
-Borwig
-Boruvka
-Bortignon
-Borsa
-Borromeo
-Borrolli
-Borries
-Borreta
-Borremans
-Borras
-Borr
-Borozny
-Borowiec
-Boronat
-Bornman
-Bormes
-Borlin
-Borguez
-Borgstede
-Borgese
-Borgert
-Borgers
-Borgella
-Borell
-Bordon
-Bordi
-Bordges
-Bordenkircher
-Borde
-Borbon
-Boratko
-Boque
-Boppre
-Boosalis
-Boorom
-Bookter
-Bookmiller
-Bookamer
-Bonzo
-Bonyai
-Bonugli
-Bonsu
-Bonsey
-Bonsell
-Bonsee
-Bonow
-Bonno
-Bonnlander
-Bonnin
-Bonnenfant
-Bonjorno
-Boniol
-Bongo
-Bonetto
-Bonepart
-Bondre
-Bonaventura
-Bonatti
-Bonapart
-Bonagurio
-Bonaguidi
-Bomzer
-Bompane
-Bomilla
-Bomia
-Bombino
-Bomaster
-Bollens
-Bollbach
-Bollaert
-Bolins
-Bolinder
-Bolig
-Bolian
-Bolfa
-Bolevice
-Boldwyn
-Bolduan
-Boldizsar
-Bolde
-Bokal
-Boitel
-Boin
-Boillot
-Boid
-Bohonik
-Bohnker
-Bohney
-Bohlsen
-Bohlman
-Bohlken
-Bogut
-Bognuda
-Bogguess
-Bogg
-Bofinger
-Boero
-Boerm
-Boeri
-Boera
-Boelk
-Boehnke
-Boege
-Bodyfelt
-Bodon
-Bodison
-Bodfish
-Boderick
-Bodenhagen
-Bodelson
-Bodary
-Bocskor
-Bockrath
-Bocklund
-Bockhorn
-Bockenstedt
-Bockelmann
-Bochicchio
-Boches
-Bochek
-Bocchieri
-Boccard
-Bobsin
-Bobrosky
-Bobowiec
-Boblak
-Bobet
-Boane
-Boamah
-Blyze
-Blute
-Blush
-Blunkall
-Blundo
-Blumkin
-Bluming
-Blumenschein
-Blumenkrantz
-Blumenberg
-Bluel
-Bloye
-Blott
-Blotsky
-Blossomgame
-Blosfield
-Bloomstrom
-Bloomstrand
-Bloomsburg
-Blonsky
-Blonigan
-Blomstrand
-Bloes
-Bloemker
-Bloedel
-Blochberger
-Blizard
-Blinebry
-Blindt
-Blihovde
-Blide
-Blicker
-Bleything
-Blevans
-Blessett
-Blesofsky
-Bleiler
-Bleichner
-Bleicher
-Bleeck
-Blee
-Blazon
-Blazing
-Blazich
-Blaydon
-Blaxland
-Blauw
-Blauman
-Blaszczyk
-Blasl
-Blashak
-Blasenhauer
-Blanscet
-Blanquet
-Blanquart
-Blannon
-Blanko
-Blankenbecler
-Blanga
-Blander
-Blakstad
-Blailock
-Blafield
-Blaeser
-Blaese
-Blady
-Bladt
-Blacock
-Blackwall
-Blackmoore
-Blackmar
-Blackington
-Blackbird
-Blacio
-Blachowski
-Bjornstrom
-Bjorn
-Bjerknes
-Bjerken
-Bjella
-Bizzard
-Bivans
-Bitzenhofer
-Bitar
-Bitah
-Bissol
-Bissel
-Bissada
-Bispham
-Bisikirski
-Bischel
-Biscari
-Bisanz
-Birthwright
-Birsner
-Bironas
-Birner
-Birnberg
-Birkmaier
-Birkenhagen
-Birely
-Birdon
-Bionda
-Binn
-Bininger
-Binet
-Binderup
-Binam
-Billus
-Billue
-Billotti
-Billinsley
-Billingsby
-Billigmeier
-Billiet
-Billiar
-Billesbach
-Bilchak
-Bilansky
-Bijan
-Bihler
-Bihl
-Bigusiak
-Bigony
-Bignell
-Biggard
-Biewald
-Biever
-Bietsch
-Biesenthal
-Biesecker
-Bierut
-Bierstedt
-Bierschbach
-Biersack
-Bierod
-Bierl
-Bierkortte
-Biener
-Bielser
-Bielke
-Bielefield
-Biedekapp
-Bidstrup
-Bidell
-Biddlecome
-Bicknase
-Bicking
-Bichoupan
-Bichoff
-Bibiloni
-Biastock
-Biasotti
-Bianchin
-Bhullar
-Bhaskar
-Bhamaraniyama
-Bhairo
-Bezenek
-Beyser
-Beyke
-Beyea
-Beydoun
-Beyale
-Beyal
-Bevevino
-Beuttel
-Beutnagel
-Beuthin
-Beuse
-Beurskens
-Beukema
-Beukelman
-Beuerle
-Beuchler
-Betzner
-Betzler
-Betzig
-Bettley
-Betry
-Betit
-Bethurem
-Betha
-Betenson
-Betak
-Bestwick
-Bestine
-Beste
-Bessone
-Bessinger
-Bessellieu
-Besong
-Besner
-Beskom
-Beshore
-Beser
-Besen
-Beseke
-Besares
-Besant
-Besanson
-Besancon
-Berzunza
-Berulie
-Bertrum
-Bertot
-Berto
-Bertman
-Berther
-Berth
-Bertella
-Bertao
-Bershadsky
-Bersaw
-Berrospe
-Berrocal
-Berray
-Bernstock
-Bernotas
-Bernos
-Bernmen
-Bernitsky
-Bernieri
-Berni
-Bernheim
-Berneri
-Bernell
-Bernbeck
-Bernaudo
-Bernau
-Bernatchez
-Bernarducci
-Bernardon
-Bernand
-Bernacki
-Berlingo
-Berley
-Berlandy
-Berlacher
-Berkovitch
-Berkenbile
-Berkbigler
-Berishaj
-Bering
-Bergstedt
-Bergsman
-Bergouignan
-Bergold
-Bergmeyer
-Bergfalk
-Bergenty
-Bergenstock
-Bergene
-Bergamine
-Bergami
-Berey
-Beresik
-Berentz
-Berenschot
-Bereda
-Berdux
-Berdar
-Berdahl
-Berczy
-Berchielli
-Bercher
-Berceir
-Berbig
-Berbereia
-Benzee
-Benwarc
-Benulis
-Bentzinger
-Bentrem
-Benthusen
-Benston
-Bennings
-Bennight
-Benneth
-Bennard
-Bennafield
-Benkosky
-Benker
-Benje
-Benisek
-Benintendi
-Bening
-Beninati
-Benimadho
-Benezra
-Beneuento
-Bendu
-Bending
-Bendell
-Benckendorf
-Benbenek
-Benanti
-Benamati
-Benafield
-Benach
-Benac
-Bembi
-Belwood
-Belvees
-Beltramo
-Belstad
-Belski
-Belschner
-Belscher
-Belovs
-Belousson
-Belous
-Belony
-Belonger
-Belluz
-Bellmore
-Bellitti
-Belliston
-Bellingtier
-Bellinder
-Bellhouse
-Bellflowers
-Bellen
-Bellehumeur
-Bellefontaine
-Bellar
-Bellantone
-Bellair
-Bellace
-Belken
-Belke
-Beliz
-Belina
-Belieu
-Belidor
-Beliard
-Belhumeur
-Belfy
-Belfort
-Belfi
-Belfast
-Belezos
-Belchior
-Belarmino
-Belanich
-Belancer
-Bejil
-Bejger
-Bejerano
-Beja
-Beiswenger
-Beissel
-Beilstein
-Beilinson
-Beilfuss
-Beile
-Behner
-Behizadeh
-Behimer
-Beherns
-Behanan
-Behal
-Begun
-Beguhl
-Begonia
-Begolli
-Begnoche
-Begen
-Beese
-Beerle
-Beemon
-Beelar
-Beedoo
-Beedles
-Beedham
-Beeckman
-Beebout
-Bedre
-Bedocs
-Bednarowicz
-Bedlion
-Bedillion
-Beder
-Bedenfield
-Bedee
-Bedaw
-Bedatsky
-Bedar
-Beckor
-Becklin
-Beckes
-Beckelheimer
-Beaureguard
-Beauparlant
-Beau
-Beattle
-Beatson
-Beath
-Beards
-Bearded
-Beandoin
-Beady
-Beachman
-Beachell
-Bayus
-Baysden
-Bayouth
-Bayon
-Bayn
-Bayani
-Baxtor
-Bawks
-Bawer
-Bawcombe
-Baves
-Bautiste
-Baute
-Baurer
-Baumohl
-Baumli
-Baumkirchner
-Baumiester
-Baumgartel
-Baumgarn
-Baumfalk
-Bauchspies
-Bauce
-Batzri
-Battisto
-Batter
-Battenhouse
-Batteiger
-Batrich
-Batra
-Batlle
-Batlis
-Batliner
-Batkin
-Batchellor
-Bastick
-Bastardi
-Bassiti
-Basore
-Basone
-Baskow
-Basini
-Basila
-Bashline
-Baseley
-Bascas
-Barvosa
-Barvick
-Barus
-Bartuska
-Bartula
-Bartosik
-Bartosch
-Bartoli
-Bartmes
-Bartlette
-Bartkus
-Bartkiewicz
-Bartholomeu
-Barte
-Bartch
-Barsegyan
-Barschdoor
-Barscewski
-Barsamian
-Barryman
-Barrowman
-Barrois
-Barrish
-Barriault
-Barrete
-Barree
-Barran
-Baronne
-Barninger
-Barners
-Barnebey
-Barnak
-Barnacle
-Barlup
-Barlock
-Barlau
-Barlak
-Barken
-Barkema
-Barjenbruch
-Barillo
-Barill
-Barientos
-Baria
-Bargstadt
-Bargmann
-Bargeron
-Baresi
-Barera
-Barends
-Bardos
-Bardoner
-Bardill
-Bardell
-Barck
-Barcik
-Barchus
-Barchacky
-Barberr
-Barbaza
-Barbarito
-Barbare
-Barbalich
-Barbadillo
-Baranga
-Barahana
-Baradi
-Barad
-Barach
-Barabin
-Baquero
-Banwarth
-Bansmer
-Banse
-Banowski
-Bannett
-Bankos
-Bangura
-Banerji
-Banek
-Bandyk
-Bandura
-Bandasak
-Bandarra
-Bancourt
-Banco
-Bancks
-Banbury
-Bamforth
-Bambas
-Bambace
-Balzotti
-Balzarine
-Balza
-Balwinski
-Baltruweit
-Baltazor
-Balsis
-Baloy
-Balow
-Balock
-Balo
-Balm
-Balluch
-Ballowe
-Ballmann
-Ballez
-Balletto
-Ballesterous
-Ballena
-Ballejos
-Ballar
-Ballan
-Ballagas
-Balitas
-Balish
-Baligod
-Balich
-Baldwyn
-Balduzzi
-Baldos
-Balderree
-Baldearena
-Balda
-Balcos
-Balasko
-Balangatan
-Balak
-Baladejo
-Bakalars
-Bajko
-Bajek
-Baitner
-Baison
-Bairo
-Baiotto
-Bainey
-Bailleu
-Bailado
-Baibak
-Bahri
-Bahde
-Bahadue
-Bagwill
-Bagu
-Bagron
-Bagnaschi
-Baffa
-Baff
-Baeskens
-Baerg
-Baenziger
-Baena
-Baell
-Badzinski
-Badruddin
-Badlam
-Badey
-Badertscher
-Badenoch
-Badagliacca
-Bacone
-Bacman
-Backhuus
-Bacino
-Bachmeyer
-Bachinski
-Bachas
-Bachan
-Bacerra
-Bacayo
-Babson
-Bablak
-Babinski
-Babilon
-Babikian
-Babicz
-Babey
-Babbish
-Baarts
-Baack
-Azznara
-Azuma
-Azor
-Azatyan
-Azapinto
-Azahar
-Ayyad
-Aytes
-Aysien
-Aymar
-Aylock
-Ayhens
-Ayele
-Aydin
-Axtman
-Axman
-Awyie
-Aw
-Avona
-Avner
-Avison
-Avenia
-Aveles
-Avarbuch
-Avancena
-Autullo
-Autovino
-Autobee
-Auther
-Auter
-Austino
-Austine
-Auster
-Auslam
-Aurrichio
-Aun
-Auls
-Aulder
-Aufiero
-Audrey
-Audibert
-Audelhuk
-Auckley
-Auces
-Aubel
-Auala
-Atzinger
-Atzhorn
-Attwell
-Attles
-Attilio
-Attia
-Atthowe
-Atteburg
-Atmore
-Atma
-Atleh
-Atkisson
-Athy
-Atherholt
-Athanasiou
-Atengco
-Atamanczyk
-Astillero
-Astafan
-Assum
-Assis
-Assing
-Assenmacher
-Assalone
-Assael
-Asrari
-Aspri
-Aspley
-Asperheim
-Aspell
-Asnicar
-Asner
-Askiew
-Askia
-Aske
-Ask
-Ashly
-Ashkettle
-Ashing
-Ashbourne
-Ashbach
-Ashaf
-Asenjo
-Aseng
-Aseltine
-Ascol
-Aschbacher
-Asamoah
-Arzt
-Arzabala
-Arview
-Arvez
-Arvanitis
-Arva
-Arunachalam
-Arton
-Arties
-Artibee
-Arthun
-Artez
-Arters
-Arsham
-Arseneault
-Arroyd
-Arroyano
-Arrospide
-Arrocho
-Arrisola
-Arrindel
-Arrigone
-Arrellin
-Arredla
-Arrand
-Arrance
-Arquelles
-Arosemena
-Arollo
-Aroca
-Arntzen
-Arnsberger
-Arnitz
-Arnerich
-Arndell
-Arnaudet
-Arnao
-Arnaldo
-Army
-Armout
-Armold
-Armocida
-Armlin
-Armiso
-Armesto
-Armen
-Armada
-Arkontaky
-Arking
-Aristizabal
-Arisa
-Arildsen
-Arichabala
-Ariail
-Argulewicz
-Argudin
-Argro
-Argie
-Argenziano
-Argenti
-Arendash
-Arendall
-Arendale
-Arelleano
-Arehano
-Ards
-Ardeneaux
-Ardelean
-Ardaly
-Arciola
-Arcieri
-Archiopoli
-Archdale
-Archbell
-Arbon
-Arbolida
-Arbetman
-Arbertha
-Arau
-Arashiro
-Araneo
-Arancibia
-Araldi
-Aragones
-Aragao
-Arabajian
-Aquas
-Apthorpe
-Apshire
-Aprill
-Aprigliano
-Applonie
-Appl
-Appia
-Appana
-Aponta
-Aplington
-Apley
-Apker
-Apelian
-Apadaca
-Aono
-Ao
-Anzideo
-Anway
-Antronica
-Antosh
-Antonovich
-Antoniak
-Antolak
-Antila
-Antignani
-Anthes
-Antao
-Ansoategui
-Ansloan
-Anreozzi
-Anos
-Anolick
-Anoe
-Annuzzi
-Anning
-Annarino
-Annal
-Annable
-Annabel
-Anitok
-Aninion
-Animashaun
-Anidi
-Angocicco
-Angland
-Angiolelli
-Angileri
-Angilello
-Angier
-Angermeier
-Angelozzi
-Angelou
-Angellotti
-Angelillo
-Angelica
-Angalich
-Aney
-Anewalt
-Anetsberger
-Anesi
-Aneshansley
-Anene
-Anecelle
-Andrzejczyk
-Andrzejczak
-Andruszkiewic
-Andrson
-Androde
-Andriopulos
-Andrino
-Andrich
-Andreola
-Andregg
-Andreessen
-Andrango
-Andradez
-Andrades
-Andrachak
-Andoh
-Andina
-Anderst
-Anderholm
-Andere
-Andalora
-Anciso
-Ancic
-Ancel
-Ancar
-Ancalade
-Anawaty
-Anawalt
-Amys
-Amstrong
-Amspaugh
-Amous
-Amott
-Amoros
-Amormino
-Amoriello
-Amorello
-Amoe
-Amodt
-Ammonds
-Ammirata
-Ammer
-Amlin
-Amith
-Amistadi
-Amill
-Amigo
-Amerio
-American
-Amentler
-Amemiya
-Amela
-Amejorado
-Amedro
-Amedeo
-Amburgy
-Ambroziak
-Ambrister
-Amboree
-Amboise
-Ambert
-Ambagis
-Amauty
-Amat
-Amas
-Amarian
-Amara
-Amalong
-Alwin
-Alwazan
-Alvirez
-Alvero
-Alverado
-Alty
-Altstatt
-Altsisi
-Altmark
-Altimus
-Altamiruno
-Alson
-Alsing
-Alsaqri
-Alrod
-Alquesta
-Alpis
-Alpheaus
-Alperin
-Aloy
-Alosta
-Aloan
-Alnoor
-Almsteadt
-Almstead
-Almos
-Almgren
-Almarza
-Almajhoub
-Allyne
-Allsbrooks
-Allon
-Allinger
-Alliman
-Alliance
-Allgire
-Allevato
-Alleshouse
-Alleruzzo
-Allerton
-Allder
-Allcock
-Allbert
-Allanson
-Allabaugh
-Alkins
-Alkema
-Alkana
-Aljemal
-Alisauskas
-Alimo
-Alimento
-Alie
-Alicer
-Alias
-Alhusseini
-Alhameed
-Alhambra
-Alhaddad
-Alfredo
-Alfiero
-Aleyandrez
-Alexidor
-Alexandropoul
-Alexanders
-Alexakis
-Alesse
-Alesna
-Alepin
-Alejandrez
-Aldworth
-Aldrow
-Aldrige
-Aldonza
-Alcine
-Alcantas
-Albu
-Albrough
-Albor
-Albe
-Albarracin
-Albarazi
-Alatosse
-Alarcone
-Alanko
-Aland
-Alamia
-Alameida
-Alambar
-Alai
-Akwei
-Aksoy
-Ako
-Akley
-Akinrefon
-Akimseu
-Akhavan
-Akhand
-Akery
-Akawanzie
-Akapo
-Akamiro
-Akal
-Ajoku
-Ajani
-Aiuto
-Aiudi
-Airth
-Aipperspach
-Aiporlani
-Aipopo
-Aiola
-Aini
-Ailsworth
-Aills
-Ailiff
-Aievoli
-Aid
-Aiava
-Ahyet
-Ahrenholz
-Ahnell
-Ahlo
-Ahlfield
-Ahlemeyer
-Ahimud
-Ahia
-Ahhee
-Ahaus
-Ahalt
-Agustino
-Agustine
-Agurs
-Agumga
-Aguele
-Agresto
-Agreda
-Agpaoa
-Agosti
-Agoro
-Agonoy
-Agoff
-Aggers
-Agemy
-Ageboi
-Agbisit
-Afurong
-Afshar
-Affronti
-Afflick
-Affeltranger
-Afable
-Aeillo
-Adule
-Adrion
-Adolphe
-Adolfson
-Adner
-Adloff
-Adling
-Adickes
-Adib
-Adelsperger
-Adelmund
-Adelizzi
-Addeo
-Adamsonis
-Adamsen
-Adamowski
-Adamos
-Adamec
-Adalja
-Acosto
-Acors
-Acorda
-Acock
-Acly
-Ackah
-Achin
-Aceveda
-Acerra
-Acerno
-Aceituno
-Acee
-Accala
-Acal
-Abusufait
-Abugn
-Abuel
-Absalon
-Abriola
-Abrey
-Abrell
-Abramovitz
-Abramoff
-Abramian
-Abrahamian
-Abousaleh
-Aboshihata
-Abolafia
-Ableman
-Abkemeier
-Abington
-Abina
-Abigantus
-Abide
-Abeta
-Abercombie
-Abdulmuniem
-Abdulaziz
-Abdou
-Abdelmuti
-Abdelaziz
-Abdelal
-Abbington
-Abbatiello
-Abajian
-Abaja
-Aarsvold
-Aarhus
-Aardema
-Aarant
-Aanderud
-Aalund
-Aalderink
diff --git a/hyracks-storage-am-lsm-btree/pom.xml b/hyracks-storage-am-lsm-btree/pom.xml
deleted file mode 100644
index afef819..0000000
--- a/hyracks-storage-am-lsm-btree/pom.xml
+++ /dev/null
@@ -1,49 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <groupId>edu.uci.ics.hyracks</groupId>
-  <artifactId>hyracks-storage-am-lsm-btree</artifactId>
-  <version>0.2.2-SNAPSHOT</version>
-
-  <parent>
-    <groupId>edu.uci.ics.hyracks</groupId>
-    <artifactId>hyracks</artifactId>
-    <version>0.2.2-SNAPSHOT</version>
-  </parent>
-
-  <build>
-    <plugins>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-compiler-plugin</artifactId>
-        <version>2.0.2</version>
-        <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
-        </configuration>
-      </plugin>
-    </plugins>
-  </build>
-  <dependencies>    
-    <dependency>
-  		<groupId>edu.uci.ics.hyracks</groupId>
-  		<artifactId>hyracks-storage-am-btree</artifactId>
-  		<version>0.2.2-SNAPSHOT</version>
-  		<type>jar</type>
-  		<scope>compile</scope>
-  	</dependency> 
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks</groupId>
-  		<artifactId>hyracks-storage-am-bloomfilter</artifactId>
-  		<version>0.2.2-SNAPSHOT</version>
-  		<type>jar</type>
-  		<scope>compile</scope>
-  	</dependency> 
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks</groupId>
-  		<artifactId>hyracks-storage-am-lsm-common</artifactId>
-  		<version>0.2.2-SNAPSHOT</version>
-  		<type>jar</type>
-  		<scope>compile</scope>
-  	</dependency>  	
-  </dependencies>
-</project>
diff --git a/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/dataflow/LSMBTreeDataflowHelper.java b/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/dataflow/LSMBTreeDataflowHelper.java
deleted file mode 100644
index 06f06c6..0000000
--- a/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/dataflow/LSMBTreeDataflowHelper.java
+++ /dev/null
@@ -1,68 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.btree.dataflow;
-
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.storage.am.common.api.IInMemoryFreePageManager;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrameFactory;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.AbstractTreeIndexOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.frames.LIFOMetaDataFrameFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.btree.util.LSMBTreeUtils;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.IInMemoryBufferCache;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallbackProvider;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationScheduler;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMMergePolicy;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMOperationTrackerFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.common.dataflow.AbstractLSMIndexDataflowHelper;
-import edu.uci.ics.hyracks.storage.am.lsm.common.freepage.InMemoryBufferCache;
-import edu.uci.ics.hyracks.storage.am.lsm.common.freepage.InMemoryFreePageManager;
-import edu.uci.ics.hyracks.storage.common.buffercache.HeapBufferAllocator;
-import edu.uci.ics.hyracks.storage.common.file.TransientFileMapManager;
-
-public class LSMBTreeDataflowHelper extends AbstractLSMIndexDataflowHelper {
-
-    public LSMBTreeDataflowHelper(IIndexOperatorDescriptor opDesc, IHyracksTaskContext ctx, int partition,
-            ILSMMergePolicy mergePolicy, ILSMOperationTrackerFactory opTrackerFactory,
-            ILSMIOOperationScheduler ioScheduler, ILSMIOOperationCallbackProvider ioOpCallbackProvider) {
-        this(opDesc, ctx, partition, DEFAULT_MEM_PAGE_SIZE, DEFAULT_MEM_NUM_PAGES, mergePolicy, opTrackerFactory,
-                ioScheduler, ioOpCallbackProvider);
-    }
-
-    public LSMBTreeDataflowHelper(IIndexOperatorDescriptor opDesc, IHyracksTaskContext ctx, int partition,
-            int memPageSize, int memNumPages, ILSMMergePolicy mergePolicy,
-            ILSMOperationTrackerFactory opTrackerFactory, ILSMIOOperationScheduler ioScheduler,
-            ILSMIOOperationCallbackProvider ioOpCallbackProvider) {
-        super(opDesc, ctx, partition, memPageSize, memNumPages, mergePolicy, opTrackerFactory, ioScheduler,
-                ioOpCallbackProvider);
-    }
-
-    @Override
-    public ITreeIndex createIndexInstance() throws HyracksDataException {
-        AbstractTreeIndexOperatorDescriptor treeOpDesc = (AbstractTreeIndexOperatorDescriptor) opDesc;
-        ITreeIndexMetaDataFrameFactory metaDataFrameFactory = new LIFOMetaDataFrameFactory();
-        IInMemoryBufferCache memBufferCache = new InMemoryBufferCache(new HeapBufferAllocator(), memPageSize,
-                memNumPages, new TransientFileMapManager());
-        IInMemoryFreePageManager memFreePageManager = new InMemoryFreePageManager(memNumPages, metaDataFrameFactory);
-        return LSMBTreeUtils.createLSMTree(memBufferCache, memFreePageManager, ctx.getIOManager(), file, opDesc
-                .getStorageManager().getBufferCache(ctx), opDesc.getStorageManager().getFileMapProvider(ctx),
-                treeOpDesc.getTreeIndexTypeTraits(), treeOpDesc.getTreeIndexComparatorFactories(), treeOpDesc
-                        .getTreeIndexBloomFilterKeyFields(), mergePolicy, opTrackerFactory, ioScheduler,
-                ioOpCallbackProvider, partition);
-    }
-}
diff --git a/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/dataflow/LSMBTreeDataflowHelperFactory.java b/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/dataflow/LSMBTreeDataflowHelperFactory.java
deleted file mode 100644
index ebf4bc0..0000000
--- a/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/dataflow/LSMBTreeDataflowHelperFactory.java
+++ /dev/null
@@ -1,45 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.btree.dataflow;
-
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IndexDataflowHelper;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallbackProvider;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationSchedulerProvider;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMMergePolicyProvider;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMOperationTrackerFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.common.dataflow.AbstractLSMIndexDataflowHelperFactory;
-
-public class LSMBTreeDataflowHelperFactory extends AbstractLSMIndexDataflowHelperFactory {
-
-    private static final long serialVersionUID = 1L;
-
-    public LSMBTreeDataflowHelperFactory(ILSMMergePolicyProvider mergePolicyProvider,
-            ILSMOperationTrackerFactory opTrackerFactory, ILSMIOOperationSchedulerProvider ioSchedulerProvider,
-            ILSMIOOperationCallbackProvider ioOpCallbackProvider, int memPageSize, int memNumPages) {
-        super(mergePolicyProvider, opTrackerFactory, ioSchedulerProvider, ioOpCallbackProvider, memPageSize,
-                memNumPages);
-    }
-
-    @Override
-    public IndexDataflowHelper createIndexDataflowHelper(IIndexOperatorDescriptor opDesc, IHyracksTaskContext ctx,
-            int partition) {
-        return new LSMBTreeDataflowHelper(opDesc, ctx, partition, memPageSize, memNumPages,
-                mergePolicyProvider.getMergePolicy(ctx), opTrackerFactory, ioSchedulerProvider.getIOScheduler(ctx),
-                ioOpCallbackProvider);
-    }
-}
diff --git a/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/impls/AntimatterAwareTupleAcceptor.java b/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/impls/AntimatterAwareTupleAcceptor.java
deleted file mode 100644
index e88aea6..0000000
--- a/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/impls/AntimatterAwareTupleAcceptor.java
+++ /dev/null
@@ -1,18 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.lsm.btree.impls;
-
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.btree.api.ITupleAcceptor;
-import edu.uci.ics.hyracks.storage.am.lsm.btree.tuples.LSMBTreeTupleReference;
-
-public enum AntimatterAwareTupleAcceptor implements ITupleAcceptor {
-    INSTANCE;
-
-    @Override
-    public boolean accept(ITupleReference tuple) {
-        if (tuple == null) {
-            return true;
-        }
-        return ((LSMBTreeTupleReference) tuple).isAntimatter();
-    }
-
-}
diff --git a/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/impls/LSMBTree.java b/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/impls/LSMBTree.java
deleted file mode 100644
index 3596466..0000000
--- a/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/impls/LSMBTree.java
+++ /dev/null
@@ -1,640 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.btree.impls;
-
-import java.io.File;
-import java.util.List;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-import edu.uci.ics.hyracks.storage.am.bloomfilter.impls.BloomCalculations;
-import edu.uci.ics.hyracks.storage.am.bloomfilter.impls.BloomFilter;
-import edu.uci.ics.hyracks.storage.am.bloomfilter.impls.BloomFilterFactory;
-import edu.uci.ics.hyracks.storage.am.bloomfilter.impls.BloomFilterSpecification;
-import edu.uci.ics.hyracks.storage.am.btree.exceptions.BTreeDuplicateKeyException;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTree.BTreeAccessor;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTree.BTreeBulkLoader;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeRangeSearchCursor;
-import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
-import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManager;
-import edu.uci.ics.hyracks.storage.am.common.api.IInMemoryFreePageManager;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexAccessor;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexBulkLoader;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexCursor;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexOperationContext;
-import edu.uci.ics.hyracks.storage.am.common.api.IModificationOperationCallback;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchOperationCallback;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchPredicate;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallback;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOperation;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-import edu.uci.ics.hyracks.storage.am.lsm.btree.tuples.LSMBTreeTupleReference;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.IInMemoryBufferCache;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMComponent;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMHarness;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperation;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallback;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallbackProvider;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationScheduler;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIndexAccessor;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIndexAccessorInternal;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIndexFileManager;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIndexOperationContext;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMMergePolicy;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMOperationTrackerFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.common.freepage.InMemoryBufferCache;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.AbstractLSMIndex;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.BlockingIOOperationCallbackWrapper;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.LSMComponentFileReferences;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.LSMTreeIndexAccessor;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.TreeIndexFactory;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
-
-public class LSMBTree extends AbstractLSMIndex implements ITreeIndex {
-
-    // In-memory components.   
-    private final LSMBTreeMutableComponent mutableComponent;
-
-    // For creating BTree's used in flush and merge.
-    private final LSMBTreeImmutableComponentFactory componentFactory;
-    // For creating BTree's used in bulk load. Different from diskBTreeFactory
-    // because it should have a different tuple writer in it's leaf frames.
-    private final LSMBTreeImmutableComponentFactory bulkLoadComponentFactory;
-
-    // Common for in-memory and on-disk components.
-    private final ITreeIndexFrameFactory insertLeafFrameFactory;
-    private final ITreeIndexFrameFactory deleteLeafFrameFactory;
-    private final IBinaryComparatorFactory[] cmpFactories;
-
-    public LSMBTree(IInMemoryBufferCache memBufferCache, IInMemoryFreePageManager memFreePageManager,
-            ITreeIndexFrameFactory interiorFrameFactory, ITreeIndexFrameFactory insertLeafFrameFactory,
-            ITreeIndexFrameFactory deleteLeafFrameFactory, ILSMIndexFileManager fileManager,
-            TreeIndexFactory<BTree> diskBTreeFactory, TreeIndexFactory<BTree> bulkLoadBTreeFactory,
-            BloomFilterFactory bloomFilterFactory, IFileMapProvider diskFileMapProvider, int fieldCount,
-            IBinaryComparatorFactory[] cmpFactories, ILSMMergePolicy mergePolicy,
-            ILSMOperationTrackerFactory opTrackerFactory, ILSMIOOperationScheduler ioScheduler,
-            ILSMIOOperationCallbackProvider ioOpCallbackProvider) {
-        super(memFreePageManager, diskBTreeFactory.getBufferCache(), fileManager, diskFileMapProvider, mergePolicy,
-                opTrackerFactory, ioScheduler, ioOpCallbackProvider);
-        mutableComponent = new LSMBTreeMutableComponent(new BTree(memBufferCache,
-                ((InMemoryBufferCache) memBufferCache).getFileMapProvider(), memFreePageManager, interiorFrameFactory,
-                insertLeafFrameFactory, cmpFactories, fieldCount, new FileReference(new File("membtree"))),
-                memFreePageManager);
-        this.insertLeafFrameFactory = insertLeafFrameFactory;
-        this.deleteLeafFrameFactory = deleteLeafFrameFactory;
-        this.cmpFactories = cmpFactories;
-        componentFactory = new LSMBTreeImmutableComponentFactory(diskBTreeFactory, bloomFilterFactory);
-        bulkLoadComponentFactory = new LSMBTreeImmutableComponentFactory(bulkLoadBTreeFactory, bloomFilterFactory);
-    }
-
-    @Override
-    public synchronized void create() throws HyracksDataException {
-        if (isActivated) {
-            throw new HyracksDataException("Failed to create the index since it is activated.");
-        }
-
-        fileManager.deleteDirs();
-        fileManager.createDirs();
-        componentsRef.get().clear();
-    }
-
-    @Override
-    public synchronized void activate() throws HyracksDataException {
-        if (isActivated) {
-            return;
-        }
-
-        ((InMemoryBufferCache) mutableComponent.getBTree().getBufferCache()).open();
-        mutableComponent.getBTree().create();
-        mutableComponent.getBTree().activate();
-        List<ILSMComponent> immutableComponents = componentsRef.get();
-        immutableComponents.clear();
-        List<LSMComponentFileReferences> validFileReferences;
-        try {
-            validFileReferences = fileManager.cleanupAndGetValidFiles();
-        } catch (IndexException e) {
-            throw new HyracksDataException(e);
-        }
-        for (LSMComponentFileReferences lsmComonentFileReference : validFileReferences) {
-            LSMBTreeImmutableComponent component;
-            try {
-                component = createDiskComponent(componentFactory,
-                        lsmComonentFileReference.getInsertIndexFileReference(),
-                        lsmComonentFileReference.getBloomFilterFileReference(), false);
-            } catch (IndexException e) {
-                throw new HyracksDataException(e);
-            }
-            immutableComponents.add(component);
-        }
-        isActivated = true;
-    }
-
-    @Override
-    public synchronized void deactivate(boolean flushOnExit) throws HyracksDataException {
-        if (!isActivated) {
-            return;
-        }
-
-        if (flushOnExit) {
-            BlockingIOOperationCallbackWrapper cb = new BlockingIOOperationCallbackWrapper(
-                    ioOpCallbackProvider.getIOOperationCallback(this));
-            ILSMIndexAccessor accessor = createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
-            accessor.scheduleFlush(cb);
-            try {
-                cb.waitForIO();
-            } catch (InterruptedException e) {
-                throw new HyracksDataException(e);
-            }
-        }
-
-        List<ILSMComponent> immutableComponents = componentsRef.get();
-        for (ILSMComponent c : immutableComponents) {
-            LSMBTreeImmutableComponent component = (LSMBTreeImmutableComponent) c;
-            BTree btree = component.getBTree();
-            BloomFilter bloomFilter = component.getBloomFilter();
-            btree.deactivate();
-            bloomFilter.deactivate();
-        }
-        mutableComponent.getBTree().deactivate();
-        mutableComponent.getBTree().destroy();
-        ((InMemoryBufferCache) mutableComponent.getBTree().getBufferCache()).close();
-        isActivated = false;
-    }
-
-    @Override
-    public synchronized void deactivate() throws HyracksDataException {
-        deactivate(true);
-    }
-
-    @Override
-    public void destroy() throws HyracksDataException {
-        if (isActivated) {
-            throw new HyracksDataException("Failed to destroy the index since it is activated.");
-        }
-
-        List<ILSMComponent> immutableComponents = componentsRef.get();
-        for (ILSMComponent c : immutableComponents) {
-            LSMBTreeImmutableComponent component = (LSMBTreeImmutableComponent) c;
-            component.getBTree().destroy();
-            component.getBloomFilter().destroy();
-        }
-        mutableComponent.getBTree().destroy();
-        fileManager.deleteDirs();
-    }
-
-    @Override
-    public void clear() throws HyracksDataException {
-        if (!isActivated) {
-            throw new HyracksDataException("Failed to clear the index since it is not activated.");
-        }
-
-        List<ILSMComponent> immutableComponents = componentsRef.get();
-        mutableComponent.getBTree().clear();
-        for (ILSMComponent c : immutableComponents) {
-            LSMBTreeImmutableComponent component = (LSMBTreeImmutableComponent) c;
-            component.getBloomFilter().deactivate();
-            component.getBTree().deactivate();
-            component.getBloomFilter().destroy();
-            component.getBTree().destroy();
-        }
-        immutableComponents.clear();
-    }
-
-    @Override
-    public void getOperationalComponents(ILSMIndexOperationContext ctx) {
-        List<ILSMComponent> immutableComponents = componentsRef.get();
-        List<ILSMComponent> operationalComponents = ctx.getComponentHolder();
-        operationalComponents.clear();
-        switch (ctx.getOperation()) {
-            case UPDATE:
-            case UPSERT:
-            case PHYSICALDELETE:
-            case FLUSH:
-            case DELETE:
-                operationalComponents.add(mutableComponent);
-                break;
-            case SEARCH:
-            case INSERT:
-                operationalComponents.add(mutableComponent);
-                operationalComponents.addAll(immutableComponents);
-                break;
-            case MERGE:
-                operationalComponents.addAll(immutableComponents);
-                break;
-            default:
-                throw new UnsupportedOperationException("Operation " + ctx.getOperation() + " not supported.");
-        }
-    }
-
-    @Override
-    public void modify(IIndexOperationContext ictx, ITupleReference tuple) throws HyracksDataException, IndexException {
-        LSMBTreeOpContext ctx = (LSMBTreeOpContext) ictx;
-        switch (ctx.getOperation()) {
-            case PHYSICALDELETE:
-                ctx.memBTreeAccessor.delete(tuple);
-                break;
-            case INSERT:
-                insert(tuple, ctx);
-                break;
-            default:
-                ctx.memBTreeAccessor.upsert(tuple);
-                break;
-        }
-    }
-
-    private boolean insert(ITupleReference tuple, LSMBTreeOpContext ctx) throws HyracksDataException, IndexException {
-        MultiComparator comparator = MultiComparator.createIgnoreFieldLength(mutableComponent.getBTree()
-                .getComparatorFactories());
-        LSMBTreePointSearchCursor searchCursor = new LSMBTreePointSearchCursor(ctx);
-        IIndexCursor memCursor = new BTreeRangeSearchCursor(ctx.memBTreeOpCtx.leafFrame, false);
-        RangePredicate predicate = new RangePredicate(tuple, tuple, true, true, comparator, comparator);
-
-        // first check the inmemory component
-        ctx.memBTreeAccessor.search(memCursor, predicate);
-        try {
-            if (memCursor.hasNext()) {
-                memCursor.next();
-                LSMBTreeTupleReference lsmbtreeTuple = (LSMBTreeTupleReference) memCursor.getTuple();
-                if (!lsmbtreeTuple.isAntimatter()) {
-                    throw new BTreeDuplicateKeyException("Failed to insert key since key already exists.");
-                } else {
-                    memCursor.close();
-                    ctx.memBTreeAccessor.upsertIfConditionElseInsert(tuple, AntimatterAwareTupleAcceptor.INSTANCE);
-                    return true;
-                }
-            }
-        } finally {
-            memCursor.close();
-        }
-
-        // TODO: Can we just remove the above code that search the mutable component and do it together with the search call below? i.e. instead of passing false to the lsmHarness.search(), we pass true to include the mutable component?
-        // the key was not in the inmemory component, so check the disk components
-        search(ctx, searchCursor, predicate);
-        try {
-            if (searchCursor.hasNext()) {
-                throw new BTreeDuplicateKeyException("Failed to insert key since key already exists.");
-            }
-        } finally {
-            searchCursor.close();
-        }
-        ctx.memBTreeAccessor.upsertIfConditionElseInsert(tuple, AntimatterAwareTupleAcceptor.INSTANCE);
-
-        return true;
-    }
-
-    @Override
-    public void search(ILSMIndexOperationContext ictx, IIndexCursor cursor, ISearchPredicate pred)
-            throws HyracksDataException, IndexException {
-        LSMBTreeOpContext ctx = (LSMBTreeOpContext) ictx;
-        List<ILSMComponent> operationalComponents = ctx.getComponentHolder();
-        int numBTrees = operationalComponents.size();
-        assert numBTrees > 0;
-
-        boolean includeMutableComponent = operationalComponents.get(0) == mutableComponent;
-        LSMBTreeCursorInitialState initialState = new LSMBTreeCursorInitialState(numBTrees, insertLeafFrameFactory,
-                ctx.cmp, ctx.bloomFilterCmp, includeMutableComponent, lsmHarness, ctx.memBTreeAccessor, pred,
-                ctx.searchCallback, operationalComponents);
-        cursor.open(initialState, pred);
-    }
-
-    @Override
-    public void scheduleFlush(ILSMIndexOperationContext ctx, ILSMIOOperationCallback callback)
-            throws HyracksDataException {
-        LSMComponentFileReferences componentFileRefs = fileManager.getRelFlushFileReference();
-        LSMBTreeOpContext opCtx = createOpContext(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
-        assert ctx.getComponentHolder().size() == 1;
-        ILSMComponent flushingComponent = ctx.getComponentHolder().get(0);
-        opCtx.setOperation(IndexOperation.FLUSH);
-        opCtx.getComponentHolder().add(flushingComponent);
-        ILSMIndexAccessorInternal flushAccessor = new LSMBTreeAccessor(lsmHarness, opCtx);
-        ioScheduler.scheduleOperation(new LSMBTreeFlushOperation(flushAccessor, flushingComponent, componentFileRefs
-                .getInsertIndexFileReference(), componentFileRefs.getBloomFilterFileReference(), callback));
-    }
-
-    @Override
-    public ILSMComponent flush(ILSMIOOperation operation) throws HyracksDataException, IndexException {
-        LSMBTreeFlushOperation flushOp = (LSMBTreeFlushOperation) operation;
-        LSMBTreeMutableComponent flushingComponent = (LSMBTreeMutableComponent) flushOp.getFlushingComponent();
-        IIndexAccessor accessor = flushingComponent.getBTree().createAccessor(NoOpOperationCallback.INSTANCE,
-                NoOpOperationCallback.INSTANCE);
-
-        RangePredicate nullPred = new RangePredicate(null, null, true, true, null, null);
-        IIndexCursor countingCursor = ((BTreeAccessor) accessor).createCountingSearchCursor();
-        accessor.search(countingCursor, nullPred);
-        long numElements = 0L;
-        try {
-            while (countingCursor.hasNext()) {
-                countingCursor.next();
-                ITupleReference countTuple = countingCursor.getTuple();
-                numElements = IntegerSerializerDeserializer.getInt(countTuple.getFieldData(0),
-                        countTuple.getFieldStart(0));
-            }
-        } finally {
-            countingCursor.close();
-        }
-
-        int maxBucketsPerElement = BloomCalculations.maxBucketsPerElement(numElements);
-        BloomFilterSpecification bloomFilterSpec = BloomCalculations.computeBloomSpec(maxBucketsPerElement,
-                MAX_BLOOM_FILTER_ACCEPTABLE_FALSE_POSITIVE_RATE);
-
-        LSMBTreeImmutableComponent component = createDiskComponent(componentFactory, flushOp.getBTreeFlushTarget(),
-                flushOp.getBloomFilterFlushTarget(), true);
-        IIndexBulkLoader bulkLoader = component.getBTree().createBulkLoader(1.0f, false, numElements);
-        IIndexBulkLoader builder = component.getBloomFilter().createBuilder(numElements,
-                bloomFilterSpec.getNumHashes(), bloomFilterSpec.getNumBucketsPerElements());
-
-        IIndexCursor scanCursor = accessor.createSearchCursor();
-        accessor.search(scanCursor, nullPred);
-        try {
-            while (scanCursor.hasNext()) {
-                scanCursor.next();
-                builder.add(scanCursor.getTuple());
-                bulkLoader.add(scanCursor.getTuple());
-            }
-        } finally {
-            scanCursor.close();
-            builder.end();
-        }
-        bulkLoader.end();
-        return component;
-    }
-
-    public void scheduleMerge(ILSMIndexOperationContext ctx, ILSMIOOperationCallback callback)
-            throws HyracksDataException, IndexException {
-        LSMBTreeOpContext opCtx = createOpContext(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
-        List<ILSMComponent> mergingComponents = ctx.getComponentHolder();
-        opCtx.getComponentHolder().addAll(mergingComponents);
-        ITreeIndexCursor cursor = new LSMBTreeRangeSearchCursor(opCtx);
-        RangePredicate rangePred = new RangePredicate(null, null, true, true, null, null);
-        search(opCtx, cursor, rangePred);
-
-        opCtx.setOperation(IndexOperation.MERGE);
-        BTree firstBTree = (BTree) ((LSMBTreeImmutableComponent) mergingComponents.get(0)).getBTree();
-        BTree lastBTree = (BTree) ((LSMBTreeImmutableComponent) mergingComponents.get(mergingComponents.size() - 1))
-                .getBTree();
-        FileReference firstFile = diskFileMapProvider.lookupFileName(firstBTree.getFileId());
-        FileReference lastFile = diskFileMapProvider.lookupFileName(lastBTree.getFileId());
-        LSMComponentFileReferences relMergeFileRefs = fileManager.getRelMergeFileReference(firstFile.getFile()
-                .getName(), lastFile.getFile().getName());
-        ILSMIndexAccessorInternal accessor = new LSMBTreeAccessor(lsmHarness, opCtx);
-        ioScheduler.scheduleOperation(new LSMBTreeMergeOperation(accessor, mergingComponents, cursor, relMergeFileRefs
-                .getInsertIndexFileReference(), relMergeFileRefs.getBloomFilterFileReference(), callback));
-    }
-
-    @Override
-    public ILSMComponent merge(List<ILSMComponent> mergedComponents, ILSMIOOperation operation)
-            throws HyracksDataException, IndexException {
-        LSMBTreeMergeOperation mergeOp = (LSMBTreeMergeOperation) operation;
-        ITreeIndexCursor cursor = mergeOp.getCursor();
-        mergedComponents.addAll(mergeOp.getMergingComponents());
-
-        long numElements = 0L;
-        for (int i = 0; i < mergedComponents.size(); ++i) {
-            numElements += ((LSMBTreeImmutableComponent) mergedComponents.get(i)).getBloomFilter().getNumElements();
-        }
-
-        int maxBucketsPerElement = BloomCalculations.maxBucketsPerElement(numElements);
-        BloomFilterSpecification bloomFilterSpec = BloomCalculations.computeBloomSpec(maxBucketsPerElement,
-                MAX_BLOOM_FILTER_ACCEPTABLE_FALSE_POSITIVE_RATE);
-        LSMBTreeImmutableComponent mergedComponent = createDiskComponent(componentFactory,
-                mergeOp.getBTreeMergeTarget(), mergeOp.getBloomFilterMergeTarget(), true);
-
-        IIndexBulkLoader bulkLoader = mergedComponent.getBTree().createBulkLoader(1.0f, false, numElements);
-        IIndexBulkLoader builder = mergedComponent.getBloomFilter().createBuilder(numElements,
-                bloomFilterSpec.getNumHashes(), bloomFilterSpec.getNumBucketsPerElements());
-        try {
-            while (cursor.hasNext()) {
-                cursor.next();
-                ITupleReference frameTuple = cursor.getTuple();
-                builder.add(frameTuple);
-                bulkLoader.add(frameTuple);
-            }
-        } finally {
-            cursor.close();
-            builder.end();
-        }
-        bulkLoader.end();
-        return mergedComponent;
-    }
-
-    private LSMBTreeImmutableComponent createDiskComponent(LSMBTreeImmutableComponentFactory factory,
-            FileReference btreeFileRef, FileReference bloomFilterFileRef, boolean createComponent)
-            throws HyracksDataException, IndexException {
-        // Create new BTree instance.
-        LSMBTreeImmutableComponent component = (LSMBTreeImmutableComponent) factory
-                .createLSMComponentInstance(new LSMComponentFileReferences(btreeFileRef, null, bloomFilterFileRef));
-        if (createComponent) {
-            component.getBTree().create();
-            component.getBloomFilter().create();
-        }
-        // BTree will be closed during cleanup of merge().
-        component.getBTree().activate();
-        component.getBloomFilter().activate();
-        return component;
-    }
-
-    @Override
-    public IIndexBulkLoader createBulkLoader(float fillLevel, boolean verifyInput, long numElementsHint)
-            throws TreeIndexException {
-        try {
-            return new LSMBTreeBulkLoader(fillLevel, verifyInput, numElementsHint);
-        } catch (HyracksDataException e) {
-            throw new TreeIndexException(e);
-        }
-    }
-
-    private ILSMComponent createBulkLoadTarget() throws HyracksDataException, IndexException {
-        LSMComponentFileReferences componentFileRefs = fileManager.getRelFlushFileReference();
-        return createDiskComponent(bulkLoadComponentFactory, componentFileRefs.getInsertIndexFileReference(),
-                componentFileRefs.getBloomFilterFileReference(), true);
-    }
-
-    @Override
-    public void markAsValid(ILSMComponent lsmComponent) throws HyracksDataException {
-        // The order of forcing the dirty page to be flushed is critical. The bloom filter must be always done first.
-        LSMBTreeImmutableComponent component = (LSMBTreeImmutableComponent) lsmComponent;
-        // Flush the bloom filter first.
-        int fileId = component.getBloomFilter().getFileId();
-        IBufferCache bufferCache = component.getBTree().getBufferCache();
-        int startPage = 0;
-        int maxPage = component.getBloomFilter().getNumPages();
-        forceFlushDirtyPages(bufferCache, fileId, startPage, maxPage);
-        forceFlushDirtyPages(component.getBTree());
-        markAsValidInternal(component.getBTree());
-    }
-
-    public class LSMBTreeBulkLoader implements IIndexBulkLoader {
-        private final ILSMComponent component;
-        private final BTreeBulkLoader bulkLoader;
-        private final IIndexBulkLoader builder;
-        private boolean endHasBeenCalled = false;
-
-        public LSMBTreeBulkLoader(float fillFactor, boolean verifyInput, long numElementsHint)
-                throws TreeIndexException, HyracksDataException {
-            try {
-                component = createBulkLoadTarget();
-            } catch (HyracksDataException e) {
-                throw new TreeIndexException(e);
-            } catch (IndexException e) {
-                throw new TreeIndexException(e);
-            }
-            bulkLoader = (BTreeBulkLoader) ((LSMBTreeImmutableComponent) component).getBTree().createBulkLoader(
-                    fillFactor, verifyInput, numElementsHint);
-
-            int maxBucketsPerElement = BloomCalculations.maxBucketsPerElement(numElementsHint);
-            BloomFilterSpecification bloomFilterSpec = BloomCalculations.computeBloomSpec(maxBucketsPerElement,
-                    MAX_BLOOM_FILTER_ACCEPTABLE_FALSE_POSITIVE_RATE);
-            builder = ((LSMBTreeImmutableComponent) component).getBloomFilter().createBuilder(numElementsHint,
-                    bloomFilterSpec.getNumHashes(), bloomFilterSpec.getNumBucketsPerElements());
-        }
-
-        @Override
-        public void add(ITupleReference tuple) throws IndexException, HyracksDataException {
-            try {
-                bulkLoader.add(tuple);
-                builder.add(tuple);
-            } catch (IndexException e) {
-                handleException();
-                throw e;
-            } catch (HyracksDataException e) {
-                handleException();
-                throw e;
-            } catch (RuntimeException e) {
-                handleException();
-                throw e;
-            }
-        }
-
-        protected void handleException() throws HyracksDataException, IndexException {
-            if (!endHasBeenCalled) {
-                builder.end();
-            }
-            ((LSMBTreeImmutableComponent) component).getBTree().deactivate();
-            ((LSMBTreeImmutableComponent) component).getBTree().destroy();
-            ((LSMBTreeImmutableComponent) component).getBloomFilter().deactivate();
-            ((LSMBTreeImmutableComponent) component).getBloomFilter().destroy();
-        }
-
-        @Override
-        public void end() throws HyracksDataException, IndexException {
-            bulkLoader.end();
-            builder.end();
-            endHasBeenCalled = true;
-            lsmHarness.addBulkLoadedComponent(component);
-        }
-
-    }
-
-    public LSMBTreeOpContext createOpContext(IModificationOperationCallback modificationCallback,
-            ISearchOperationCallback searchCallback) {
-        return new LSMBTreeOpContext(mutableComponent.getBTree(), insertLeafFrameFactory, deleteLeafFrameFactory,
-                modificationCallback, searchCallback, componentFactory.getBloomFilterKeyFields().length);
-    }
-
-    @Override
-    public ILSMIndexAccessorInternal createAccessor(IModificationOperationCallback modificationCallback,
-            ISearchOperationCallback searchCallback) {
-        return new LSMBTreeAccessor(lsmHarness, createOpContext(modificationCallback, searchCallback));
-    }
-
-    public class LSMBTreeAccessor extends LSMTreeIndexAccessor {
-        public LSMBTreeAccessor(ILSMHarness lsmHarness, ILSMIndexOperationContext ctx) {
-            super(lsmHarness, ctx);
-        }
-
-        @Override
-        public IIndexCursor createSearchCursor() {
-            return new LSMBTreeSearchCursor(ctx);
-        }
-
-        public MultiComparator getMultiComparator() {
-            LSMBTreeOpContext concreteCtx = (LSMBTreeOpContext) ctx;
-            return concreteCtx.cmp;
-        }
-    }
-
-    @Override
-    public IBufferCache getBufferCache() {
-        return diskBufferCache;
-    }
-
-    public IBinaryComparatorFactory[] getComparatorFactories() {
-        return cmpFactories;
-    }
-
-    @Override
-    public ITreeIndexFrameFactory getInteriorFrameFactory() {
-        return mutableComponent.getBTree().getInteriorFrameFactory();
-    }
-
-    @Override
-    public int getFieldCount() {
-        return mutableComponent.getBTree().getFieldCount();
-    }
-
-    @Override
-    public int getFileId() {
-        return mutableComponent.getBTree().getFileId();
-    }
-
-    @Override
-    public IFreePageManager getFreePageManager() {
-        return mutableComponent.getBTree().getFreePageManager();
-    }
-
-    @Override
-    public ITreeIndexFrameFactory getLeafFrameFactory() {
-        return mutableComponent.getBTree().getLeafFrameFactory();
-    }
-
-    @Override
-    public long getMemoryAllocationSize() {
-        InMemoryBufferCache memBufferCache = (InMemoryBufferCache) mutableComponent.getBTree().getBufferCache();
-        return memBufferCache.getNumPages() * memBufferCache.getPageSize();
-    }
-
-    @Override
-    public int getRootPageId() {
-        return mutableComponent.getBTree().getRootPageId();
-    }
-
-    public boolean isEmptyIndex() throws HyracksDataException {
-        List<ILSMComponent> immutableComponents = componentsRef.get();
-        return immutableComponents.isEmpty()
-                && mutableComponent.getBTree().isEmptyTree(
-                        mutableComponent.getBTree().getInteriorFrameFactory().createFrame());
-    }
-
-    @Override
-    public void validate() throws HyracksDataException {
-        mutableComponent.getBTree().validate();
-        List<ILSMComponent> immutableComponents = componentsRef.get();
-        for (ILSMComponent c : immutableComponents) {
-            BTree btree = (BTree) ((LSMBTreeImmutableComponent) c).getBTree();
-            btree.validate();
-        }
-    }
-}
diff --git a/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/impls/LSMBTreeCursorInitialState.java b/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/impls/LSMBTreeCursorInitialState.java
deleted file mode 100644
index 2b7029b..0000000
--- a/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/impls/LSMBTreeCursorInitialState.java
+++ /dev/null
@@ -1,121 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.btree.impls;
-
-import java.util.List;
-
-import edu.uci.ics.hyracks.storage.am.common.api.ICursorInitialState;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexAccessor;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchOperationCallback;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchPredicate;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMComponent;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMHarness;
-import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
-
-public class LSMBTreeCursorInitialState implements ICursorInitialState {
-
-    private final int numBTrees;
-    private final ITreeIndexFrameFactory leafFrameFactory;
-    private MultiComparator cmp;
-    private final MultiComparator bloomFilterCmp;
-    private final boolean includeMemComponent;
-    private final ILSMHarness lsmHarness;
-
-    private final IIndexAccessor memBtreeAccessor;
-    private final ISearchPredicate predicate;
-    private ISearchOperationCallback searchCallback;
-
-    private final List<ILSMComponent> operationalComponents;
-
-    public LSMBTreeCursorInitialState(int numBTrees, ITreeIndexFrameFactory leafFrameFactory, MultiComparator cmp,
-            MultiComparator bloomFilterCmp, boolean includeMemComponent, ILSMHarness lsmHarness,
-            IIndexAccessor memBtreeAccessor, ISearchPredicate predicate, ISearchOperationCallback searchCallback,
-            List<ILSMComponent> operationalComponents) {
-        this.numBTrees = numBTrees;
-        this.leafFrameFactory = leafFrameFactory;
-        this.cmp = cmp;
-        this.bloomFilterCmp = bloomFilterCmp;
-        this.includeMemComponent = includeMemComponent;
-        this.lsmHarness = lsmHarness;
-        this.searchCallback = searchCallback;
-        this.memBtreeAccessor = memBtreeAccessor;
-        this.predicate = predicate;
-        this.operationalComponents = operationalComponents;
-    }
-
-    public int getNumBTrees() {
-        return numBTrees;
-    }
-
-    public ITreeIndexFrameFactory getLeafFrameFactory() {
-        return leafFrameFactory;
-    }
-
-    @Override
-    public ICachedPage getPage() {
-        return null;
-    }
-
-    @Override
-    public void setPage(ICachedPage page) {
-    }
-
-    public boolean getIncludeMemComponent() {
-        return includeMemComponent;
-    }
-
-    public ILSMHarness getLSMHarness() {
-        return lsmHarness;
-    }
-
-    @Override
-    public ISearchOperationCallback getSearchOperationCallback() {
-        return searchCallback;
-    }
-
-    @Override
-    public void setSearchOperationCallback(ISearchOperationCallback searchCallback) {
-        this.searchCallback = searchCallback;
-    }
-
-    public List<ILSMComponent> getOperationalComponents() {
-        return operationalComponents;
-    }
-
-    public IIndexAccessor getMemBTreeAccessor() {
-        return memBtreeAccessor;
-    }
-
-    public ISearchPredicate getSearchPredicate() {
-        return predicate;
-    }
-
-    public MultiComparator getBloomFilterComparator() {
-        return bloomFilterCmp;
-    }
-
-    @Override
-    public MultiComparator getOriginalKeyComparator() {
-        return cmp;
-    }
-
-    @Override
-    public void setOriginialKeyComparator(MultiComparator originalCmp) {
-        this.cmp = originalCmp;
-    }
-}
\ No newline at end of file
diff --git a/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/impls/LSMBTreeFileManager.java b/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/impls/LSMBTreeFileManager.java
deleted file mode 100644
index 38766c3..0000000
--- a/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/impls/LSMBTreeFileManager.java
+++ /dev/null
@@ -1,179 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.btree.impls;
-
-import java.io.File;
-import java.io.FilenameFilter;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.Date;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.List;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.api.io.IIOManager;
-import edu.uci.ics.hyracks.api.io.IODeviceHandle;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.AbstractLSMIndexFileManager;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.LSMComponentFileReferences;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.TreeIndexFactory;
-import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
-
-public class LSMBTreeFileManager extends AbstractLSMIndexFileManager {
-    private static final String BTREE_STRING = "b";
-
-    private final TreeIndexFactory<? extends ITreeIndex> btreeFactory;
-
-    public LSMBTreeFileManager(IIOManager ioManager, IFileMapProvider fileMapProvider, FileReference file,
-            TreeIndexFactory<? extends ITreeIndex> btreeFactory, int startIODeviceIndex) {
-        super(ioManager, fileMapProvider, file, null, startIODeviceIndex);
-        this.btreeFactory = btreeFactory;
-    }
-
-    @Override
-    public LSMComponentFileReferences getRelFlushFileReference() {
-        Date date = new Date();
-        String ts = formatter.format(date);
-        String baseName = baseDir + ts + SPLIT_STRING + ts;
-        // Begin timestamp and end timestamp are identical since it is a flush
-        return new LSMComponentFileReferences(createFlushFile(baseName + SPLIT_STRING + BTREE_STRING), null,
-                createFlushFile(baseName + SPLIT_STRING + BLOOM_FILTER_STRING));
-    }
-
-    @Override
-    public LSMComponentFileReferences getRelMergeFileReference(String firstFileName, String lastFileName)
-            throws HyracksDataException {
-        String[] firstTimestampRange = firstFileName.split(SPLIT_STRING);
-        String[] lastTimestampRange = lastFileName.split(SPLIT_STRING);
-
-        String baseName = baseDir + firstTimestampRange[0] + SPLIT_STRING + lastTimestampRange[1];
-        // Get the range of timestamps by taking the earliest and the latest timestamps
-        return new LSMComponentFileReferences(createMergeFile(baseName + SPLIT_STRING + BTREE_STRING), null,
-                createMergeFile(baseName + SPLIT_STRING + BLOOM_FILTER_STRING));
-    }
-
-    private static FilenameFilter btreeFilter = new FilenameFilter() {
-        public boolean accept(File dir, String name) {
-            return !name.startsWith(".") && name.endsWith(BTREE_STRING);
-        }
-    };
-
-    @Override
-    public List<LSMComponentFileReferences> cleanupAndGetValidFiles() throws HyracksDataException, IndexException {
-        List<LSMComponentFileReferences> validFiles = new ArrayList<LSMComponentFileReferences>();
-        ArrayList<ComparableFileName> allBTreeFiles = new ArrayList<ComparableFileName>();
-        ArrayList<ComparableFileName> allBloomFilterFiles = new ArrayList<ComparableFileName>();
-
-        // Gather files from all IODeviceHandles.
-        for (IODeviceHandle dev : ioManager.getIODevices()) {
-            cleanupAndGetValidFilesInternal(dev, bloomFilterFilter, null, allBloomFilterFiles);
-            HashSet<String> bloomFilterFilesSet = new HashSet<String>();
-            for (ComparableFileName cmpFileName : allBloomFilterFiles) {
-                int index = cmpFileName.fileName.lastIndexOf(SPLIT_STRING);
-                bloomFilterFilesSet.add(cmpFileName.fileName.substring(0, index));
-            }
-            // List of valid BTree files that may or may not have a bloom filter buddy. Will check for buddies below.
-            ArrayList<ComparableFileName> tmpAllBTreeFiles = new ArrayList<ComparableFileName>();
-            cleanupAndGetValidFilesInternal(dev, btreeFilter, btreeFactory, tmpAllBTreeFiles);
-            // Look for buddy bloom filters for all valid BTrees. 
-            // If no buddy is found, delete the file, otherwise add the BTree to allBTreeFiles. 
-            for (ComparableFileName cmpFileName : tmpAllBTreeFiles) {
-                int index = cmpFileName.fileName.lastIndexOf(SPLIT_STRING);
-                String file = cmpFileName.fileName.substring(0, index);
-                if (bloomFilterFilesSet.contains(file)) {
-                    allBTreeFiles.add(cmpFileName);
-                } else {
-                    // Couldn't find the corresponding bloom filter file; thus, delete
-                    // the BTree file.
-                    File invalidBTreeFile = new File(cmpFileName.fullPath);
-                    invalidBTreeFile.delete();
-                }
-            }
-        }
-        // Sanity check.
-        if (allBTreeFiles.size() != allBloomFilterFiles.size()) {
-            throw new HyracksDataException(
-                    "Unequal number of valid BTree and bloom filter files found. Aborting cleanup.");
-        }
-
-        // Trivial cases.
-        if (allBTreeFiles.isEmpty() || allBloomFilterFiles.isEmpty()) {
-            return validFiles;
-        }
-
-        if (allBTreeFiles.size() == 1 && allBloomFilterFiles.size() == 1) {
-            validFiles.add(new LSMComponentFileReferences(allBTreeFiles.get(0).fileRef, null, allBloomFilterFiles
-                    .get(0).fileRef));
-            return validFiles;
-        }
-
-        // Sorts files names from earliest to latest timestamp.
-        Collections.sort(allBTreeFiles);
-        Collections.sort(allBloomFilterFiles);
-
-        List<ComparableFileName> validComparableBTreeFiles = new ArrayList<ComparableFileName>();
-        ComparableFileName lastBTree = allBTreeFiles.get(0);
-        validComparableBTreeFiles.add(lastBTree);
-
-        List<ComparableFileName> validComparableBloomFilterFiles = new ArrayList<ComparableFileName>();
-        ComparableFileName lastBloomFilter = allBloomFilterFiles.get(0);
-        validComparableBloomFilterFiles.add(lastBloomFilter);
-
-        for (int i = 1; i < allBTreeFiles.size(); i++) {
-            ComparableFileName currentBTree = allBTreeFiles.get(i);
-            ComparableFileName currentBloomFilter = allBloomFilterFiles.get(i);
-            // Current start timestamp is greater than last stop timestamp.
-            if (currentBTree.interval[0].compareTo(lastBTree.interval[1]) > 0
-                    && currentBloomFilter.interval[0].compareTo(lastBloomFilter.interval[1]) > 0) {
-                validComparableBTreeFiles.add(currentBTree);
-                validComparableBloomFilterFiles.add(currentBloomFilter);
-                lastBTree = currentBTree;
-                lastBloomFilter = currentBloomFilter;
-            } else if (currentBTree.interval[0].compareTo(lastBTree.interval[0]) >= 0
-                    && currentBTree.interval[1].compareTo(lastBTree.interval[1]) <= 0
-                    && currentBloomFilter.interval[0].compareTo(lastBloomFilter.interval[0]) >= 0
-                    && currentBloomFilter.interval[1].compareTo(lastBloomFilter.interval[1]) <= 0) {
-                // Invalid files are completely contained in last interval.
-                File invalidBTreeFile = new File(currentBTree.fullPath);
-                invalidBTreeFile.delete();
-                File invalidBloomFilterFile = new File(currentBloomFilter.fullPath);
-                invalidBloomFilterFile.delete();
-            } else {
-                // This scenario should not be possible.
-                throw new HyracksDataException("Found LSM files with overlapping but not contained timetamp intervals.");
-            }
-        }
-
-        // Sort valid files in reverse lexicographical order, such that newer
-        // files come first.
-        Collections.sort(validComparableBTreeFiles, recencyCmp);
-        Collections.sort(validComparableBloomFilterFiles, recencyCmp);
-
-        Iterator<ComparableFileName> btreeFileIter = validComparableBTreeFiles.iterator();
-        Iterator<ComparableFileName> bloomFilterFileIter = validComparableBloomFilterFiles.iterator();
-        while (btreeFileIter.hasNext() && bloomFilterFileIter.hasNext()) {
-            ComparableFileName cmpBTreeFileName = btreeFileIter.next();
-            ComparableFileName cmpBloomFilterFileName = bloomFilterFileIter.next();
-            validFiles.add(new LSMComponentFileReferences(cmpBTreeFileName.fileRef, null,
-                    cmpBloomFilterFileName.fileRef));
-        }
-
-        return validFiles;
-    }
-}
diff --git a/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/impls/LSMBTreeFlushOperation.java b/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/impls/LSMBTreeFlushOperation.java
deleted file mode 100644
index dfda07b..0000000
--- a/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/impls/LSMBTreeFlushOperation.java
+++ /dev/null
@@ -1,71 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.lsm.btree.impls;
-
-import java.util.Collections;
-import java.util.HashSet;
-import java.util.Set;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.api.io.IODeviceHandle;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMComponent;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperation;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallback;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIndexAccessorInternal;
-
-public class LSMBTreeFlushOperation implements ILSMIOOperation {
-
-    private final ILSMIndexAccessorInternal accessor;
-    private final ILSMComponent flushingComponent;
-    private final FileReference btreeFlushTarget;
-    private final FileReference bloomFilterFlushTarget;
-    private final ILSMIOOperationCallback callback;
-
-    public LSMBTreeFlushOperation(ILSMIndexAccessorInternal accessor, ILSMComponent flushingComponent,
-            FileReference btreeFlushTarget, FileReference bloomFilterFlushTarget, ILSMIOOperationCallback callback) {
-        this.accessor = accessor;
-        this.flushingComponent = flushingComponent;
-        this.btreeFlushTarget = btreeFlushTarget;
-        this.bloomFilterFlushTarget = bloomFilterFlushTarget;
-        this.callback = callback;
-    }
-
-    @Override
-    public Set<IODeviceHandle> getReadDevices() {
-        return Collections.emptySet();
-    }
-
-    @Override
-    public Set<IODeviceHandle> getWriteDevices() {
-        Set<IODeviceHandle> devs = new HashSet<IODeviceHandle>();
-        devs.add(btreeFlushTarget.getDeviceHandle());
-        devs.add(bloomFilterFlushTarget.getDeviceHandle());
-        return devs;
-    }
-
-    @Override
-    public void perform() throws HyracksDataException, IndexException {
-        accessor.flush(this);
-    }
-
-    @Override
-    public ILSMIOOperationCallback getCallback() {
-        return callback;
-    }
-
-    public FileReference getBTreeFlushTarget() {
-        return btreeFlushTarget;
-    }
-
-    public FileReference getBloomFilterFlushTarget() {
-        return bloomFilterFlushTarget;
-    }
-
-    public ILSMIndexAccessorInternal getAccessor() {
-        return accessor;
-    }
-
-    public ILSMComponent getFlushingComponent() {
-        return flushingComponent;
-    }
-}
diff --git a/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/impls/LSMBTreeImmutableComponent.java b/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/impls/LSMBTreeImmutableComponent.java
deleted file mode 100644
index daa86d9..0000000
--- a/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/impls/LSMBTreeImmutableComponent.java
+++ /dev/null
@@ -1,32 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.lsm.btree.impls;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.storage.am.bloomfilter.impls.BloomFilter;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.AbstractImmutableLSMComponent;
-
-public class LSMBTreeImmutableComponent extends AbstractImmutableLSMComponent {
-    private final BTree btree;
-    private final BloomFilter bloomFilter;
-
-    public LSMBTreeImmutableComponent(BTree btree, BloomFilter bloomFilter) {
-        this.btree = btree;
-        this.bloomFilter = bloomFilter;
-    }
-
-    @Override
-    public void destroy() throws HyracksDataException {
-        btree.deactivate();
-        btree.destroy();
-        bloomFilter.deactivate();
-        bloomFilter.destroy();
-    }
-
-    public BTree getBTree() {
-        return btree;
-    }
-
-    public BloomFilter getBloomFilter() {
-        return bloomFilter;
-    }
-}
diff --git a/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/impls/LSMBTreeImmutableComponentFactory.java b/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/impls/LSMBTreeImmutableComponentFactory.java
deleted file mode 100644
index e9da5a5..0000000
--- a/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/impls/LSMBTreeImmutableComponentFactory.java
+++ /dev/null
@@ -1,52 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.btree.impls;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.storage.am.bloomfilter.impls.BloomFilterFactory;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMComponent;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMComponentFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.LSMComponentFileReferences;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.TreeIndexFactory;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-
-public class LSMBTreeImmutableComponentFactory implements ILSMComponentFactory {
-    private final TreeIndexFactory<BTree> btreeFactory;
-    private final BloomFilterFactory bloomFilterFactory;
-
-    public LSMBTreeImmutableComponentFactory(TreeIndexFactory<BTree> btreeFactory, BloomFilterFactory bloomFilterFactory) {
-        this.btreeFactory = btreeFactory;
-        this.bloomFilterFactory = bloomFilterFactory;
-    }
-
-    @Override
-    public ILSMComponent createLSMComponentInstance(LSMComponentFileReferences cfr) throws IndexException,
-            HyracksDataException {
-        return new LSMBTreeImmutableComponent(btreeFactory.createIndexInstance(cfr.getInsertIndexFileReference()),
-                bloomFilterFactory.createBloomFiltertInstance(cfr.getBloomFilterFileReference()));
-    }
-
-    @Override
-    public IBufferCache getBufferCache() {
-        return btreeFactory.getBufferCache();
-    }
-
-    public int[] getBloomFilterKeyFields() {
-        return bloomFilterFactory.getBloomFilterKeyFields();
-    }
-}
diff --git a/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/impls/LSMBTreeMergeOperation.java b/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/impls/LSMBTreeMergeOperation.java
deleted file mode 100644
index 180fb9a..0000000
--- a/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/impls/LSMBTreeMergeOperation.java
+++ /dev/null
@@ -1,96 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.btree.impls;
-
-import java.util.HashSet;
-import java.util.List;
-import java.util.Set;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.api.io.IODeviceHandle;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMComponent;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperation;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallback;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIndexAccessorInternal;
-
-public class LSMBTreeMergeOperation implements ILSMIOOperation {
-
-    private final ILSMIndexAccessorInternal accessor;
-    private final List<ILSMComponent> mergingComponents;
-    private final ITreeIndexCursor cursor;
-    private final FileReference btreeMergeTarget;
-    private final FileReference bloomFilterMergeTarget;
-    private final ILSMIOOperationCallback callback;
-
-    public LSMBTreeMergeOperation(ILSMIndexAccessorInternal accessor, List<ILSMComponent> mergingComponents,
-            ITreeIndexCursor cursor, FileReference btreeMergeTarget, FileReference bloomFilterMergeTarget,
-            ILSMIOOperationCallback callback) {
-        this.accessor = accessor;
-        this.mergingComponents = mergingComponents;
-        this.cursor = cursor;
-        this.btreeMergeTarget = btreeMergeTarget;
-        this.bloomFilterMergeTarget = bloomFilterMergeTarget;
-        this.callback = callback;
-    }
-
-    @Override
-    public Set<IODeviceHandle> getReadDevices() {
-        Set<IODeviceHandle> devs = new HashSet<IODeviceHandle>();
-        for (ILSMComponent o : mergingComponents) {
-            LSMBTreeImmutableComponent component = (LSMBTreeImmutableComponent) o;
-            devs.add(component.getBTree().getFileReference().getDeviceHandle());
-            devs.add(component.getBloomFilter().getFileReference().getDeviceHandle());
-        }
-        return devs;
-    }
-
-    @Override
-    public Set<IODeviceHandle> getWriteDevices() {
-        Set<IODeviceHandle> devs = new HashSet<IODeviceHandle>();
-        devs.add(btreeMergeTarget.getDeviceHandle());
-        devs.add(bloomFilterMergeTarget.getDeviceHandle());
-        return devs;
-    }
-
-    @Override
-    public void perform() throws HyracksDataException, IndexException {
-        accessor.merge(this);
-    }
-
-    @Override
-    public ILSMIOOperationCallback getCallback() {
-        return callback;
-    }
-
-    public FileReference getBTreeMergeTarget() {
-        return btreeMergeTarget;
-    }
-
-    public FileReference getBloomFilterMergeTarget() {
-        return bloomFilterMergeTarget;
-    }
-
-    public ITreeIndexCursor getCursor() {
-        return cursor;
-    }
-
-    public List<ILSMComponent> getMergingComponents() {
-        return mergingComponents;
-    }
-}
diff --git a/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/impls/LSMBTreeMutableComponent.java b/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/impls/LSMBTreeMutableComponent.java
deleted file mode 100644
index 30e79b4..0000000
--- a/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/impls/LSMBTreeMutableComponent.java
+++ /dev/null
@@ -1,47 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.btree.impls;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
-import edu.uci.ics.hyracks.storage.am.common.api.IInMemoryFreePageManager;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.AbstractMutableLSMComponent;
-
-public class LSMBTreeMutableComponent extends AbstractMutableLSMComponent {
-
-    private final BTree btree;
-    private final IInMemoryFreePageManager mfpm;
-
-    public LSMBTreeMutableComponent(BTree btree, IInMemoryFreePageManager mfpm) {
-        this.btree = btree;
-        this.mfpm = mfpm;
-    }
-
-    public BTree getBTree() {
-        return btree;
-    }
-
-    @Override
-    protected boolean isFull() {
-        return mfpm.isFull();
-    }
-
-    @Override
-    protected void reset() throws HyracksDataException {
-        btree.clear();
-    }
-
-}
diff --git a/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/impls/LSMBTreeOpContext.java b/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/impls/LSMBTreeOpContext.java
deleted file mode 100644
index 9400d2d..0000000
--- a/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/impls/LSMBTreeOpContext.java
+++ /dev/null
@@ -1,150 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.btree.impls;
-
-import java.util.LinkedList;
-import java.util.List;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeOpContext;
-import edu.uci.ics.hyracks.storage.am.common.api.IModificationOperationCallback;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchOperationCallback;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallback;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOperation;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMComponent;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIndexOperationContext;
-
-public final class LSMBTreeOpContext implements ILSMIndexOperationContext {
-
-    public ITreeIndexFrameFactory insertLeafFrameFactory;
-    public ITreeIndexFrameFactory deleteLeafFrameFactory;
-    public IBTreeLeafFrame insertLeafFrame;
-    public IBTreeLeafFrame deleteLeafFrame;
-    public final BTree memBTree;
-    public BTree.BTreeAccessor memBTreeAccessor;
-    public BTreeOpContext memBTreeOpCtx;
-    public IndexOperation op;
-    public final MultiComparator cmp;
-    public final MultiComparator bloomFilterCmp;
-    public final IModificationOperationCallback modificationCallback;
-    public final ISearchOperationCallback searchCallback;
-    private final List<ILSMComponent> componentHolder;
-
-    public LSMBTreeOpContext(BTree memBTree, ITreeIndexFrameFactory insertLeafFrameFactory,
-            ITreeIndexFrameFactory deleteLeafFrameFactory, IModificationOperationCallback modificationCallback,
-            ISearchOperationCallback searchCallback, int numBloomFilterKeyFields) {
-        IBinaryComparatorFactory cmpFactories[] = memBTree.getComparatorFactories();
-        if (cmpFactories[0] != null) {
-            this.cmp = MultiComparator.createIgnoreFieldLength(memBTree.getComparatorFactories());
-        } else {
-            this.cmp = null;
-        }
-
-        bloomFilterCmp = MultiComparator.createIgnoreFieldLength(memBTree.getComparatorFactories(), 0,
-                numBloomFilterKeyFields);
-
-        this.memBTree = memBTree;
-        this.insertLeafFrameFactory = insertLeafFrameFactory;
-        this.deleteLeafFrameFactory = deleteLeafFrameFactory;
-        this.insertLeafFrame = (IBTreeLeafFrame) insertLeafFrameFactory.createFrame();
-        this.deleteLeafFrame = (IBTreeLeafFrame) deleteLeafFrameFactory.createFrame();
-        if (insertLeafFrame != null && this.cmp != null) {
-            insertLeafFrame.setMultiComparator(cmp);
-        }
-        if (deleteLeafFrame != null && this.cmp != null) {
-            deleteLeafFrame.setMultiComparator(cmp);
-        }
-        this.componentHolder = new LinkedList<ILSMComponent>();
-        this.modificationCallback = modificationCallback;
-        this.searchCallback = searchCallback;
-    }
-
-    @Override
-    public void setOperation(IndexOperation newOp) {
-        reset();
-        this.op = newOp;
-        switch (newOp) {
-            case SEARCH:
-                setMemBTreeAccessor();
-                break;
-            case DISKORDERSCAN:
-            case UPDATE:
-                // Attention: It is important to leave the leafFrame and
-                // leafFrameFactory of the memBTree as is when doing an update.
-                // Update will only be set if a previous attempt to delete or
-                // insert failed, so we must preserve the semantics of the
-                // previously requested operation.
-                setMemBTreeAccessor();
-                return;
-            case UPSERT:
-            case INSERT:
-                setInsertMode();
-                break;
-            case PHYSICALDELETE:
-            case DELETE:
-                setDeleteMode();
-                break;
-        }
-    }
-
-    private void setMemBTreeAccessor() {
-        if (memBTreeAccessor == null) {
-            memBTreeAccessor = (BTree.BTreeAccessor) memBTree.createAccessor(modificationCallback,
-                    NoOpOperationCallback.INSTANCE);
-            memBTreeOpCtx = memBTreeAccessor.getOpContext();
-        }
-    }
-
-    public void setInsertMode() {
-        setMemBTreeAccessor();
-        memBTreeOpCtx.leafFrame = insertLeafFrame;
-        memBTreeOpCtx.leafFrameFactory = insertLeafFrameFactory;
-    }
-
-    public void setDeleteMode() {
-        setMemBTreeAccessor();
-        memBTreeOpCtx.leafFrame = deleteLeafFrame;
-        memBTreeOpCtx.leafFrameFactory = deleteLeafFrameFactory;
-    }
-
-    @Override
-    public void reset() {
-        componentHolder.clear();
-    }
-
-    public IndexOperation getOperation() {
-        return op;
-    }
-
-    @Override
-    public List<ILSMComponent> getComponentHolder() {
-        return componentHolder;
-    }
-
-    @Override
-    public ISearchOperationCallback getSearchOperationCallback() {
-        return searchCallback;
-    }
-
-    @Override
-    public IModificationOperationCallback getModificationCallback() {
-        return modificationCallback;
-    }
-}
\ No newline at end of file
diff --git a/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/impls/LSMBTreePointSearchCursor.java b/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/impls/LSMBTreePointSearchCursor.java
deleted file mode 100644
index 0204833..0000000
--- a/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/impls/LSMBTreePointSearchCursor.java
+++ /dev/null
@@ -1,224 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.btree.impls;
-
-import java.util.ListIterator;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeRangeSearchCursor;
-import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
-import edu.uci.ics.hyracks.storage.am.common.api.ICursorInitialState;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexAccessor;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexCursor;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchOperationCallback;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchPredicate;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallback;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMComponent;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMHarness;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIndexOperationContext;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMTreeTupleReference;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.BloomFilterAwareBTreePointSearchCursor;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
-
-public class LSMBTreePointSearchCursor implements ITreeIndexCursor {
-
-    private IIndexCursor[] rangeCursors;
-    private final ILSMIndexOperationContext opCtx;
-    private ISearchOperationCallback searchCallback;
-    private RangePredicate predicate;
-    private IIndexAccessor memBTreeAccessor;
-    private boolean includeMemComponent;
-    private int numBTrees;
-    private IIndexAccessor[] bTreeAccessors;
-    private ILSMHarness lsmHarness;
-    private boolean nextHasBeenCalled;
-    private boolean foundTuple;
-    private ITupleReference frameTuple;
-
-    public LSMBTreePointSearchCursor(ILSMIndexOperationContext opCtx) {
-        this.opCtx = opCtx;
-    }
-
-    @Override
-    public boolean hasNext() throws HyracksDataException, IndexException {
-        if (nextHasBeenCalled) {
-            return false;
-        } else if (foundTuple) {
-            return true;
-        }
-        boolean reconciled = false;
-        for (int i = 0; i < numBTrees; ++i) {
-            bTreeAccessors[i].search(rangeCursors[i], predicate);
-            if (rangeCursors[i].hasNext()) {
-                rangeCursors[i].next();
-                // We use the predicate's to lock the key instead of the tuple that we get from cursor to avoid copying the tuple when we do the "unlatch dance"
-                if (reconciled || searchCallback.proceed(predicate.getLowKey())) {
-                    // if proceed is successful, then there's no need for doing the "unlatch dance"
-                    if (((ILSMTreeTupleReference) rangeCursors[i].getTuple()).isAntimatter()) {
-                        searchCallback.cancel(predicate.getLowKey());
-                        rangeCursors[i].close();
-                        return false;
-                    } else {
-                        frameTuple = rangeCursors[i].getTuple();
-                        foundTuple = true;
-                        return true;
-                    }
-                }
-                if (i == 0 && includeMemComponent) {
-                    // unlatch/unpin
-                    rangeCursors[i].reset();
-                    searchCallback.reconcile(predicate.getLowKey());
-                    reconciled = true;
-
-                    // retraverse
-                    memBTreeAccessor.search(rangeCursors[i], predicate);
-                    if (rangeCursors[i].hasNext()) {
-                        rangeCursors[i].next();
-                        if (((ILSMTreeTupleReference) rangeCursors[i].getTuple()).isAntimatter()) {
-                            searchCallback.cancel(predicate.getLowKey());
-                            rangeCursors[i].close();
-                            return false;
-                        } else {
-                            frameTuple = rangeCursors[i].getTuple();
-                            foundTuple = true;
-                            return true;
-                        }
-                    } else {
-                        rangeCursors[i].close();
-                    }
-                } else {
-                    frameTuple = rangeCursors[i].getTuple();
-                    searchCallback.reconcile(frameTuple);
-                    foundTuple = true;
-                    return true;
-                }
-            } else {
-                rangeCursors[i].close();
-            }
-        }
-        return false;
-    }
-
-    @Override
-    public void reset() throws HyracksDataException, IndexException {
-        if (rangeCursors != null) {
-            for (int i = 0; i < rangeCursors.length; ++i) {
-                rangeCursors[i].reset();
-            }
-        }
-        rangeCursors = null;
-        nextHasBeenCalled = false;
-        foundTuple = false;
-    }
-
-    @Override
-    public void open(ICursorInitialState initialState, ISearchPredicate searchPred) throws HyracksDataException {
-        LSMBTreeCursorInitialState lsmInitialState = (LSMBTreeCursorInitialState) initialState;
-        includeMemComponent = lsmInitialState.getIncludeMemComponent();
-        lsmHarness = lsmInitialState.getLSMHarness();
-        searchCallback = lsmInitialState.getSearchOperationCallback();
-        memBTreeAccessor = lsmInitialState.getMemBTreeAccessor();
-        predicate = (RangePredicate) lsmInitialState.getSearchPredicate();
-
-        numBTrees = lsmInitialState.getNumBTrees();
-        rangeCursors = new IIndexCursor[numBTrees];
-        int i = 0;
-        if (includeMemComponent) {
-            // No need for a bloom filter for the in-memory BTree.
-            IBTreeLeafFrame leafFrame = (IBTreeLeafFrame) lsmInitialState.getLeafFrameFactory().createFrame();
-            rangeCursors[i] = new BTreeRangeSearchCursor(leafFrame, false);
-            ++i;
-        }
-        for (; i < numBTrees; ++i) {
-            IBTreeLeafFrame leafFrame = (IBTreeLeafFrame) lsmInitialState.getLeafFrameFactory().createFrame();
-            rangeCursors[i] = new BloomFilterAwareBTreePointSearchCursor(leafFrame, false,
-                    ((LSMBTreeImmutableComponent) lsmInitialState.getOperationalComponents().get(i)).getBloomFilter());
-        }
-
-        bTreeAccessors = new IIndexAccessor[numBTrees];
-        int cursorIx = 0;
-        ListIterator<ILSMComponent> btreesIter = lsmInitialState.getOperationalComponents().listIterator();
-        if (includeMemComponent) {
-            bTreeAccessors[cursorIx] = memBTreeAccessor;
-            ++cursorIx;
-            btreesIter.next();
-        }
-
-        while (btreesIter.hasNext()) {
-            BTree diskBTree = ((LSMBTreeImmutableComponent) btreesIter.next()).getBTree();
-            bTreeAccessors[cursorIx] = diskBTree.createAccessor(NoOpOperationCallback.INSTANCE,
-                    NoOpOperationCallback.INSTANCE);
-            cursorIx++;
-        }
-        nextHasBeenCalled = false;
-        foundTuple = false;
-    }
-
-    @Override
-    public void next() throws HyracksDataException {
-        nextHasBeenCalled = true;
-    }
-
-    @Override
-    public void close() throws HyracksDataException {
-        if (lsmHarness != null) {
-            try {
-                for (int i = 0; i < rangeCursors.length; i++) {
-                    rangeCursors[i].close();
-                }
-                rangeCursors = null;
-            } finally {
-                lsmHarness.endSearch(opCtx);
-            }
-        }
-        nextHasBeenCalled = false;
-        foundTuple = false;
-    }
-
-    @Override
-    public ITupleReference getTuple() {
-        return frameTuple;
-    }
-
-    @Override
-    public ICachedPage getPage() {
-        // do nothing
-        return null;
-    }
-
-    @Override
-    public void setBufferCache(IBufferCache bufferCache) {
-        // do nothing
-
-    }
-
-    @Override
-    public void setFileId(int fileId) {
-        // do nothing
-
-    }
-
-    @Override
-    public boolean exclusiveLatchNodes() {
-        return false;
-    }
-}
\ No newline at end of file
diff --git a/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/impls/LSMBTreeRangeSearchCursor.java b/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/impls/LSMBTreeRangeSearchCursor.java
deleted file mode 100644
index 4255dbf..0000000
--- a/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/impls/LSMBTreeRangeSearchCursor.java
+++ /dev/null
@@ -1,214 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.btree.impls;
-
-import java.util.Iterator;
-import java.util.ListIterator;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleReference;
-import edu.uci.ics.hyracks.dataflow.common.util.TupleUtils;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeRangeSearchCursor;
-import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
-import edu.uci.ics.hyracks.storage.am.common.api.ICursorInitialState;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexAccessor;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexCursor;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchOperationCallback;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchPredicate;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexAccessor;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallback;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMComponent;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIndexOperationContext;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.LSMIndexSearchCursor;
-
-public class LSMBTreeRangeSearchCursor extends LSMIndexSearchCursor {
-    private final ArrayTupleReference copyTuple;
-    private final RangePredicate reusablePred;
-
-    private ISearchOperationCallback searchCallback;
-    private RangePredicate predicate;
-    private IIndexAccessor memBTreeAccessor;
-    private ArrayTupleBuilder tupleBuilder;
-    private boolean proceed = true;
-
-    public LSMBTreeRangeSearchCursor(ILSMIndexOperationContext opCtx) {
-        super(opCtx);
-        this.copyTuple = new ArrayTupleReference();
-        this.reusablePred = new RangePredicate(null, null, true, true, null, null);
-    }
-
-    @Override
-    public void reset() throws HyracksDataException, IndexException {
-        super.reset();
-        proceed = true;
-    }
-
-    @Override
-    public void next() throws HyracksDataException {
-        outputElement = outputPriorityQueue.poll();
-        needPush = true;
-        proceed = false;
-    }
-
-    protected void checkPriorityQueue() throws HyracksDataException, IndexException {
-        while (!outputPriorityQueue.isEmpty() || needPush == true) {
-            if (!outputPriorityQueue.isEmpty()) {
-                PriorityQueueElement checkElement = outputPriorityQueue.peek();
-                if (proceed && !searchCallback.proceed(checkElement.getTuple())) {
-                    if (includeMemComponent) {
-                        PriorityQueueElement inMemElement = null;
-                        boolean inMemElementFound = false;
-                        // scan the PQ for the in-memory component's element
-                        Iterator<PriorityQueueElement> it = outputPriorityQueue.iterator();
-                        while (it.hasNext()) {
-                            inMemElement = it.next();
-                            if (inMemElement.getCursorIndex() == 0) {
-                                inMemElementFound = true;
-                                it.remove();
-                                break;
-                            }
-                        }
-                        if (inMemElementFound) {
-                            // copy the in-mem tuple
-                            if (tupleBuilder == null) {
-                                tupleBuilder = new ArrayTupleBuilder(cmp.getKeyFieldCount());
-                            }
-                            TupleUtils.copyTuple(tupleBuilder, inMemElement.getTuple(), cmp.getKeyFieldCount());
-                            copyTuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());
-
-                            // unlatch/unpin
-                            rangeCursors[0].reset();
-
-                            // reconcile
-                            if (checkElement.getCursorIndex() == 0) {
-                                searchCallback.reconcile(copyTuple);
-                            } else {
-                                searchCallback.reconcile(checkElement.getTuple());
-                            }
-                            // retraverse
-                            reusablePred.setLowKey(copyTuple, true);
-                            memBTreeAccessor.search(rangeCursors[0], reusablePred);
-                            pushIntoPriorityQueue(inMemElement);
-                            if (cmp.compare(copyTuple, inMemElement.getTuple()) != 0) {
-                                searchCallback.cancel(copyTuple);
-                                continue;
-                            }
-                        } else {
-                            // the in-memory cursor is exhausted
-                            searchCallback.reconcile(checkElement.getTuple());
-                        }
-                    } else {
-                        searchCallback.reconcile(checkElement.getTuple());
-                    }
-                }
-                // If there is no previous tuple or the previous tuple can be ignored
-                if (outputElement == null) {
-                    if (isDeleted(checkElement)) {
-                        // If the key has been deleted then pop it and set needPush to true.
-                        // We cannot push immediately because the tuple may be
-                        // modified if hasNext() is called
-                        outputElement = outputPriorityQueue.poll();
-                        searchCallback.cancel(checkElement.getTuple());
-                        needPush = true;
-                        proceed = false;
-                    } else {
-                        break;
-                    }
-                } else {
-                    // Compare the previous tuple and the head tuple in the PQ
-                    if (compare(cmp, outputElement.getTuple(), checkElement.getTuple()) == 0) {
-                        // If the previous tuple and the head tuple are
-                        // identical
-                        // then pop the head tuple and push the next tuple from
-                        // the tree of head tuple
-
-                        // the head element of PQ is useless now
-                        PriorityQueueElement e = outputPriorityQueue.poll();
-                        pushIntoPriorityQueue(e);
-                    } else {
-                        // If the previous tuple and the head tuple are different
-                        // the info of previous tuple is useless
-                        if (needPush == true) {
-                            pushIntoPriorityQueue(outputElement);
-                            needPush = false;
-                        }
-                        proceed = true;
-                        outputElement = null;
-                    }
-                }
-            } else {
-                // the priority queue is empty and needPush
-                pushIntoPriorityQueue(outputElement);
-                needPush = false;
-                outputElement = null;
-                proceed = true;
-            }
-        }
-    }
-
-    @Override
-    public void open(ICursorInitialState initialState, ISearchPredicate searchPred) throws HyracksDataException,
-            IndexException {
-        LSMBTreeCursorInitialState lsmInitialState = (LSMBTreeCursorInitialState) initialState;
-        cmp = lsmInitialState.getOriginalKeyComparator();
-        includeMemComponent = lsmInitialState.getIncludeMemComponent();
-        operationalComponents = lsmInitialState.getOperationalComponents();
-        lsmHarness = lsmInitialState.getLSMHarness();
-        searchCallback = lsmInitialState.getSearchOperationCallback();
-        memBTreeAccessor = lsmInitialState.getMemBTreeAccessor();
-        predicate = (RangePredicate) lsmInitialState.getSearchPredicate();
-        reusablePred.setLowKeyComparator(cmp);
-        reusablePred.setHighKey(predicate.getHighKey(), predicate.isHighKeyInclusive());
-        reusablePred.setHighKeyComparator(predicate.getHighKeyComparator());
-
-        int numBTrees = lsmInitialState.getNumBTrees();
-        rangeCursors = new IIndexCursor[numBTrees];
-        for (int i = 0; i < numBTrees; i++) {
-            IBTreeLeafFrame leafFrame = (IBTreeLeafFrame) lsmInitialState.getLeafFrameFactory().createFrame();
-            rangeCursors[i] = new BTreeRangeSearchCursor(leafFrame, false);
-        }
-        setPriorityQueueComparator();
-
-        int cursorIx = 0;
-        ListIterator<ILSMComponent> btreesIter = operationalComponents.listIterator();
-        if (includeMemComponent) {
-            // Open cursor of in-memory BTree at index 0.
-            memBTreeAccessor.search(rangeCursors[cursorIx], searchPred);
-            // Skip 0 because it is the in-memory BTree.
-            ++cursorIx;
-            btreesIter.next();
-        }
-
-        // Open cursors of on-disk BTrees.
-        int numDiskComponents = includeMemComponent ? numBTrees - 1 : numBTrees;
-        ITreeIndexAccessor[] diskBTreeAccessors = new ITreeIndexAccessor[numDiskComponents];
-        int diskBTreeIx = 0;
-        while (btreesIter.hasNext()) {
-            BTree diskBTree = (BTree) ((LSMBTreeImmutableComponent) btreesIter.next()).getBTree();
-            diskBTreeAccessors[diskBTreeIx] = diskBTree.createAccessor(NoOpOperationCallback.INSTANCE,
-                    NoOpOperationCallback.INSTANCE);
-            diskBTreeAccessors[diskBTreeIx].search(rangeCursors[cursorIx], searchPred);
-            cursorIx++;
-            diskBTreeIx++;
-        }
-        initPriorityQueue();
-        proceed = true;
-    }
-}
\ No newline at end of file
diff --git a/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/impls/LSMBTreeSearchCursor.java b/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/impls/LSMBTreeSearchCursor.java
deleted file mode 100644
index 6872520..0000000
--- a/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/impls/LSMBTreeSearchCursor.java
+++ /dev/null
@@ -1,132 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.btree.impls;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
-import edu.uci.ics.hyracks.storage.am.common.api.ICursorInitialState;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchPredicate;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIndexOperationContext;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
-
-public class LSMBTreeSearchCursor implements ITreeIndexCursor {
-
-    public enum LSMBTreeSearchType {
-        POINT,
-        RANGE
-    }
-
-    private final LSMBTreePointSearchCursor pointCursor;
-    private final LSMBTreeRangeSearchCursor rangeCursor;
-    private ITreeIndexCursor currentCursor;
-
-    public LSMBTreeSearchCursor(ILSMIndexOperationContext opCtx) {
-        pointCursor = new LSMBTreePointSearchCursor(opCtx);
-        rangeCursor = new LSMBTreeRangeSearchCursor(opCtx);
-    }
-
-    @Override
-    public void open(ICursorInitialState initialState, ISearchPredicate searchPred) throws IndexException,
-            HyracksDataException {
-
-        LSMBTreeCursorInitialState lsmInitialState = (LSMBTreeCursorInitialState) initialState;
-
-        LSMBTreeSearchType searchType = LSMBTreeSearchType.RANGE;
-        RangePredicate btreePred = (RangePredicate) searchPred;
-        if (btreePred.getLowKey() != null && btreePred.getHighKey() != null) {
-            if (btreePred.isLowKeyInclusive() && btreePred.isHighKeyInclusive()) {
-                if (btreePred.getLowKeyComparator().getKeyFieldCount() == btreePred.getHighKeyComparator()
-                        .getKeyFieldCount()) {
-                    if (btreePred.getLowKeyComparator().getKeyFieldCount() == lsmInitialState
-                            .getOriginalKeyComparator().getKeyFieldCount()) {
-                        if (lsmInitialState.getOriginalKeyComparator().compare(btreePred.getLowKey(),
-                                btreePred.getHighKey()) == 0) {
-                            searchType = LSMBTreeSearchType.POINT;
-                        }
-                    }
-                }
-            }
-        }
-        switch (searchType) {
-            case POINT:
-                currentCursor = pointCursor;
-                break;
-            case RANGE:
-                currentCursor = rangeCursor;
-                break;
-            default:
-                throw new HyracksDataException("Wrong search type");
-        }
-        currentCursor.open(lsmInitialState, searchPred);
-    }
-
-    @Override
-    public boolean hasNext() throws HyracksDataException, IndexException {
-        return currentCursor.hasNext();
-    }
-
-    @Override
-    public void next() throws HyracksDataException {
-        currentCursor.next();
-    }
-
-    @Override
-    public void close() throws HyracksDataException {
-        if (currentCursor != null) {
-            currentCursor.close();
-        }
-        currentCursor = null;
-    }
-
-    @Override
-    public void reset() throws HyracksDataException, IndexException {
-        if (currentCursor != null) {
-            currentCursor.reset();
-        }
-        currentCursor = null;
-    }
-
-    @Override
-    public ITupleReference getTuple() {
-        return currentCursor.getTuple();
-    }
-
-    @Override
-    public ICachedPage getPage() {
-        return currentCursor.getPage();
-    }
-
-    @Override
-    public void setBufferCache(IBufferCache bufferCache) {
-        currentCursor.setBufferCache(bufferCache);
-    }
-
-    @Override
-    public void setFileId(int fileId) {
-        currentCursor.setFileId(fileId);
-
-    }
-
-    @Override
-    public boolean exclusiveLatchNodes() {
-        return currentCursor.exclusiveLatchNodes();
-    }
-
-}
\ No newline at end of file
diff --git a/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/tuples/LSMBTreeCopyTupleWriter.java b/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/tuples/LSMBTreeCopyTupleWriter.java
deleted file mode 100644
index 2a66644..0000000
--- a/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/tuples/LSMBTreeCopyTupleWriter.java
+++ /dev/null
@@ -1,35 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.btree.tuples;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-
-public class LSMBTreeCopyTupleWriter extends LSMBTreeTupleWriter {
-	public LSMBTreeCopyTupleWriter(ITypeTraits[] typeTraits, int numKeyFields){
-		// Third parameter is never used locally, just give false.
-	    super(typeTraits, numKeyFields, false);
-	}
-	
-	@Override
-    public int writeTuple(ITupleReference tuple, byte[] targetBuf, int targetOff) {
-		int tupleSize = bytesRequired(tuple);
-		byte[] buf = tuple.getFieldData(0);
-		int tupleStartOff = ((LSMBTreeTupleReference)tuple).getTupleStart();
-		System.arraycopy(buf, tupleStartOff, targetBuf, targetOff, tupleSize);
-        return tupleSize;
-    }
-}
diff --git a/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/tuples/LSMBTreeCopyTupleWriterFactory.java b/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/tuples/LSMBTreeCopyTupleWriterFactory.java
deleted file mode 100644
index b73e9af..0000000
--- a/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/tuples/LSMBTreeCopyTupleWriterFactory.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.btree.tuples;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriter;
-import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriterFactory;
-
-public class LSMBTreeCopyTupleWriterFactory extends TypeAwareTupleWriterFactory {
-	private static final long serialVersionUID = 1L;
-	private final ITypeTraits[] typeTraits;
-	private final int numKeyFields;
-	
-	public LSMBTreeCopyTupleWriterFactory(ITypeTraits[] typeTraits, int numKeyFields) {
-		super(typeTraits);
-		this.typeTraits = typeTraits;
-		this.numKeyFields = numKeyFields;
-	}
-
-	@Override
-	public ITreeIndexTupleWriter createTupleWriter() {
-		return new LSMBTreeCopyTupleWriter(typeTraits, numKeyFields);
-	}
-}
diff --git a/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/tuples/LSMBTreeTupleReference.java b/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/tuples/LSMBTreeTupleReference.java
deleted file mode 100644
index 2c55792..0000000
--- a/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/tuples/LSMBTreeTupleReference.java
+++ /dev/null
@@ -1,93 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.btree.tuples;
-
-import java.nio.ByteBuffer;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
-import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleReference;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMTreeTupleReference;
-
-public class LSMBTreeTupleReference extends TypeAwareTupleReference implements ILSMTreeTupleReference {
-
-    // Indicates whether the last call to setFieldCount() was initiated by
-    // by the outside or whether it was called internally to set up an
-    // antimatter tuple.
-    private boolean resetFieldCount = false;
-    private final int numKeyFields;
-    
-    public LSMBTreeTupleReference(ITypeTraits[] typeTraits, int numKeyFields) {
-		super(typeTraits);
-		this.numKeyFields = numKeyFields;
-	}
-
-    public void setFieldCount(int fieldCount) {
-        super.setFieldCount(fieldCount);
-        // Don't change the fieldCount in reset calls.
-        resetFieldCount = false;
-    }
-
-    @Override
-    public void setFieldCount(int fieldStartIndex, int fieldCount) {
-        super.setFieldCount(fieldStartIndex, fieldCount);
-        // Don't change the fieldCount in reset calls.
-        resetFieldCount = false;
-    }
-    
-    @Override
-    public void resetByTupleOffset(ByteBuffer buf, int tupleStartOff) {
-        this.buf = buf;
-        this.tupleStartOff = tupleStartOff;
-        if (numKeyFields != typeTraits.length) {
-            if (isAntimatter()) {
-                setFieldCount(numKeyFields);
-                // Reset the original field count for matter tuples.
-                resetFieldCount = true;
-            } else {
-                if (resetFieldCount) {
-                    setFieldCount(typeTraits.length);
-                }
-            }
-        }
-        super.resetByTupleOffset(buf, tupleStartOff);
-    }
-    
-    @Override
-    public void resetByTupleIndex(ITreeIndexFrame frame, int tupleIndex) {
-        resetByTupleOffset(frame.getBuffer(), frame.getTupleOffset(tupleIndex));
-    }
-    
-	@Override
-	protected int getNullFlagsBytes() {
-		// +1.0 is for matter/antimatter bit.
-		return (int) Math.ceil((fieldCount + 1.0) / 8.0);
-    }
-
-	@Override
-	public boolean isAntimatter() {
-	      // Check if the leftmost bit is 0 or 1.
-		final byte mask = (byte) (1 << 7);
-		if ((buf.array()[tupleStartOff] & mask) != 0) {
-		    return true;
-		}
-		return false;
-	}
-	
-    public int getTupleStart() {
-    	return tupleStartOff;
-    }
-}
diff --git a/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/tuples/LSMBTreeTupleWriter.java b/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/tuples/LSMBTreeTupleWriter.java
deleted file mode 100644
index 12aca6f..0000000
--- a/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/tuples/LSMBTreeTupleWriter.java
+++ /dev/null
@@ -1,81 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.btree.tuples;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
-import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriter;
-
-public class LSMBTreeTupleWriter extends TypeAwareTupleWriter {
-    private final boolean isAntimatter;
-    private final int numKeyFields;
-
-    public LSMBTreeTupleWriter(ITypeTraits[] typeTraits, int numKeyFields, boolean isAntimatter) {
-        super(typeTraits);
-        this.numKeyFields = numKeyFields;
-        this.isAntimatter = isAntimatter;
-    }
-
-    @Override
-    public int bytesRequired(ITupleReference tuple) {
-        if (isAntimatter) {
-            // Only requires space for the key fields.
-            return super.bytesRequired(tuple, 0, numKeyFields);
-        } else {
-            return super.bytesRequired(tuple);
-        }
-    }
-
-    @Override
-    public int getCopySpaceRequired(ITupleReference tuple) {
-        return super.bytesRequired(tuple);
-    }
-
-    @Override
-    public ITreeIndexTupleReference createTupleReference() {
-        return new LSMBTreeTupleReference(typeTraits, numKeyFields);
-    }
-
-    @Override
-    protected int getNullFlagsBytes(int numFields) {
-        // +1.0 is for matter/antimatter bit.
-        return (int) Math.ceil(((double) numFields + 1.0) / 8.0);
-    }
-
-    @Override
-    protected int getNullFlagsBytes(ITupleReference tuple) {
-        // +1.0 is for matter/antimatter bit.
-        return (int) Math.ceil(((double) tuple.getFieldCount() + 1.0) / 8.0);
-    }
-
-    @Override
-    public int writeTuple(ITupleReference tuple, byte[] targetBuf, int targetOff) {
-        int bytesWritten = -1;
-        if (isAntimatter) {
-            bytesWritten = super.writeTupleFields(tuple, 0, numKeyFields, targetBuf, targetOff);
-            setAntimatterBit(targetBuf, targetOff);
-        } else {
-            bytesWritten = super.writeTuple(tuple, targetBuf, targetOff);
-        }
-        return bytesWritten;
-    }
-
-    private void setAntimatterBit(byte[] targetBuf, int targetOff) {
-        // Set leftmost bit to 1.
-        targetBuf[targetOff] = (byte) (targetBuf[targetOff] | (1 << 7));
-    }
-}
diff --git a/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/tuples/LSMBTreeTupleWriterFactory.java b/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/tuples/LSMBTreeTupleWriterFactory.java
deleted file mode 100644
index 8eb24a0..0000000
--- a/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/tuples/LSMBTreeTupleWriterFactory.java
+++ /dev/null
@@ -1,40 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.btree.tuples;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriter;
-import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriterFactory;
-
-public class LSMBTreeTupleWriterFactory extends TypeAwareTupleWriterFactory {
-
-	private static final long serialVersionUID = 1L;
-	private final ITypeTraits[] typeTraits;
-	private final int numKeyFields;
-	private final boolean isDelete;
-	
-	public LSMBTreeTupleWriterFactory(ITypeTraits[] typeTraits, int numKeyFields, boolean isDelete) {
-		super(typeTraits);
-		this.typeTraits = typeTraits;
-		this.numKeyFields = numKeyFields;
-		this.isDelete = isDelete;
-	}
-
-	@Override
-	public ITreeIndexTupleWriter createTupleWriter() {
-		return new LSMBTreeTupleWriter(typeTraits, numKeyFields, isDelete);
-	}
-}
diff --git a/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/util/LSMBTreeUtils.java b/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/util/LSMBTreeUtils.java
deleted file mode 100644
index ac20b6d..0000000
--- a/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/util/LSMBTreeUtils.java
+++ /dev/null
@@ -1,98 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.btree.util;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.api.io.IIOManager;
-import edu.uci.ics.hyracks.storage.am.bloomfilter.impls.BloomFilterFactory;
-import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMInteriorFrameFactory;
-import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMLeafFrameFactory;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
-import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManagerFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.IInMemoryFreePageManager;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrameFactory;
-import edu.uci.ics.hyracks.storage.am.common.frames.LIFOMetaDataFrameFactory;
-import edu.uci.ics.hyracks.storage.am.common.freepage.LinkedListFreePageManagerFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.btree.impls.LSMBTree;
-import edu.uci.ics.hyracks.storage.am.lsm.btree.impls.LSMBTreeFileManager;
-import edu.uci.ics.hyracks.storage.am.lsm.btree.tuples.LSMBTreeCopyTupleWriterFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.btree.tuples.LSMBTreeTupleWriterFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.IInMemoryBufferCache;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallbackProvider;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationScheduler;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIndexFileManager;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMMergePolicy;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMOperationTrackerFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.BTreeFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.TreeIndexFactory;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
-
-public class LSMBTreeUtils {
-
-    public static LSMBTree createLSMTree(IInMemoryBufferCache memBufferCache,
-            IInMemoryFreePageManager memFreePageManager, IIOManager ioManager, FileReference file,
-            IBufferCache diskBufferCache, IFileMapProvider diskFileMapProvider, ITypeTraits[] typeTraits,
-            IBinaryComparatorFactory[] cmpFactories, int[] bloomFilterKeyFields, ILSMMergePolicy mergePolicy,
-            ILSMOperationTrackerFactory opTrackerFactory, ILSMIOOperationScheduler ioScheduler,
-            ILSMIOOperationCallbackProvider ioOpCallbackProvider) {
-        return createLSMTree(memBufferCache, memFreePageManager, ioManager, file, diskBufferCache, diskFileMapProvider,
-                typeTraits, cmpFactories, bloomFilterKeyFields, mergePolicy, opTrackerFactory, ioScheduler,
-                ioOpCallbackProvider, 0);
-    }
-
-    public static LSMBTree createLSMTree(IInMemoryBufferCache memBufferCache,
-            IInMemoryFreePageManager memFreePageManager, IIOManager ioManager, FileReference file,
-            IBufferCache diskBufferCache, IFileMapProvider diskFileMapProvider, ITypeTraits[] typeTraits,
-            IBinaryComparatorFactory[] cmpFactories, int[] bloomFilterKeyFields, ILSMMergePolicy mergePolicy,
-            ILSMOperationTrackerFactory opTrackerFactory, ILSMIOOperationScheduler ioScheduler,
-            ILSMIOOperationCallbackProvider ioOpCallbackProvider, int startIODeviceIndex) {
-        LSMBTreeTupleWriterFactory insertTupleWriterFactory = new LSMBTreeTupleWriterFactory(typeTraits,
-                cmpFactories.length, false);
-        LSMBTreeTupleWriterFactory deleteTupleWriterFactory = new LSMBTreeTupleWriterFactory(typeTraits,
-                cmpFactories.length, true);
-        LSMBTreeCopyTupleWriterFactory copyTupleWriterFactory = new LSMBTreeCopyTupleWriterFactory(typeTraits,
-                cmpFactories.length);
-        ITreeIndexFrameFactory insertLeafFrameFactory = new BTreeNSMLeafFrameFactory(insertTupleWriterFactory);
-        ITreeIndexFrameFactory copyTupleLeafFrameFactory = new BTreeNSMLeafFrameFactory(copyTupleWriterFactory);
-        ITreeIndexFrameFactory deleteLeafFrameFactory = new BTreeNSMLeafFrameFactory(deleteTupleWriterFactory);
-        ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(insertTupleWriterFactory);
-        ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
-        IFreePageManagerFactory freePageManagerFactory = new LinkedListFreePageManagerFactory(diskBufferCache,
-                metaFrameFactory);
-
-        TreeIndexFactory<BTree> diskBTreeFactory = new BTreeFactory(diskBufferCache, diskFileMapProvider,
-                freePageManagerFactory, interiorFrameFactory, copyTupleLeafFrameFactory, cmpFactories,
-                typeTraits.length);
-        TreeIndexFactory<BTree> bulkLoadBTreeFactory = new BTreeFactory(diskBufferCache, diskFileMapProvider,
-                freePageManagerFactory, interiorFrameFactory, insertLeafFrameFactory, cmpFactories, typeTraits.length);
-
-        BloomFilterFactory bloomFilterFactory = new BloomFilterFactory(diskBufferCache, diskFileMapProvider,
-                bloomFilterKeyFields);
-
-        ILSMIndexFileManager fileNameManager = new LSMBTreeFileManager(ioManager, diskFileMapProvider, file,
-                diskBTreeFactory, startIODeviceIndex);
-
-        LSMBTree lsmTree = new LSMBTree(memBufferCache, memFreePageManager, interiorFrameFactory,
-                insertLeafFrameFactory, deleteLeafFrameFactory, fileNameManager, diskBTreeFactory,
-                bulkLoadBTreeFactory, bloomFilterFactory, diskFileMapProvider, typeTraits.length, cmpFactories,
-                mergePolicy, opTrackerFactory, ioScheduler, ioOpCallbackProvider);
-        return lsmTree;
-    }
-}
diff --git a/hyracks-storage-am-lsm-common/pom.xml b/hyracks-storage-am-lsm-common/pom.xml
deleted file mode 100644
index 94ed2f4..0000000
--- a/hyracks-storage-am-lsm-common/pom.xml
+++ /dev/null
@@ -1,49 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <groupId>edu.uci.ics.hyracks</groupId>
-  <artifactId>hyracks-storage-am-lsm-common</artifactId>
-  <version>0.2.2-SNAPSHOT</version>
-
-  <parent>
-    <groupId>edu.uci.ics.hyracks</groupId>
-    <artifactId>hyracks</artifactId>
-    <version>0.2.2-SNAPSHOT</version>
-  </parent>
-
-  <build>
-    <plugins>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-compiler-plugin</artifactId>
-        <version>2.0.2</version>
-        <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
-        </configuration>
-      </plugin>
-    </plugins>
-  </build>
-  <dependencies>
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks</groupId>
-  		<artifactId>hyracks-storage-am-common</artifactId>
-  		<version>0.2.2-SNAPSHOT</version>
-  		<type>jar</type>
-  		<scope>compile</scope>
-  	</dependency>
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks</groupId>
-  		<artifactId>hyracks-storage-am-bloomfilter</artifactId>
-  		<version>0.2.2-SNAPSHOT</version>
-  		<type>jar</type>
-  		<scope>compile</scope>
-  	</dependency>
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks</groupId>
-  		<artifactId>hyracks-storage-am-btree</artifactId>
-  		<version>0.2.2-SNAPSHOT</version>
-  		<type>jar</type>
-  		<scope>compile</scope>
-  	</dependency>  	
-  </dependencies>
-</project>
diff --git a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/api/IInMemoryBufferCache.java b/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/api/IInMemoryBufferCache.java
deleted file mode 100644
index 082ad2f..0000000
--- a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/api/IInMemoryBufferCache.java
+++ /dev/null
@@ -1,7 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.lsm.common.api;
-
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCacheInternal;
-
-public interface IInMemoryBufferCache extends IBufferCacheInternal {
-    public void open();
-}
diff --git a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/api/ILSMComponent.java b/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/api/ILSMComponent.java
deleted file mode 100644
index fa00f85..0000000
--- a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/api/ILSMComponent.java
+++ /dev/null
@@ -1,10 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.lsm.common.api;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.LSMOperationType;
-
-public interface ILSMComponent {
-    public boolean threadEnter(LSMOperationType opType) throws InterruptedException;
-
-    public void threadExit(LSMOperationType opType, boolean failedOperation) throws HyracksDataException;
-}
diff --git a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/api/ILSMComponentFactory.java b/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/api/ILSMComponentFactory.java
deleted file mode 100644
index 1f3a2b7..0000000
--- a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/api/ILSMComponentFactory.java
+++ /dev/null
@@ -1,13 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.lsm.common.api;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.LSMComponentFileReferences;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-
-public interface ILSMComponentFactory {
-    public ILSMComponent createLSMComponentInstance(LSMComponentFileReferences cfr) throws IndexException,
-            HyracksDataException;
-
-    public IBufferCache getBufferCache();
-}
diff --git a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/api/ILSMHarness.java b/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/api/ILSMHarness.java
deleted file mode 100644
index 04d7fa3..0000000
--- a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/api/ILSMHarness.java
+++ /dev/null
@@ -1,54 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.common.api;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexCursor;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchPredicate;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-
-public interface ILSMHarness {
-
-    public void forceModify(ILSMIndexOperationContext ctx, ITupleReference tuple) throws HyracksDataException,
-            IndexException;
-
-    public boolean modify(ILSMIndexOperationContext ctx, boolean tryOperation, ITupleReference tuple)
-            throws HyracksDataException, IndexException;
-
-    public void noOp(ILSMIndexOperationContext ctx) throws HyracksDataException;
-
-    public void search(ILSMIndexOperationContext ctx, IIndexCursor cursor, ISearchPredicate pred)
-            throws HyracksDataException, IndexException;
-
-    public void endSearch(ILSMIndexOperationContext ctx) throws HyracksDataException;
-
-    public void scheduleMerge(ILSMIndexOperationContext ctx, ILSMIOOperationCallback callback)
-            throws HyracksDataException, IndexException;
-
-    public void merge(ILSMIndexOperationContext ctx, ILSMIOOperation operation) throws HyracksDataException,
-            IndexException;
-
-    public void scheduleFlush(ILSMIndexOperationContext ctx, ILSMIOOperationCallback callback)
-            throws HyracksDataException;
-
-    public void flush(ILSMIndexOperationContext ctx, ILSMIOOperation operation) throws HyracksDataException,
-            IndexException;
-
-    public void addBulkLoadedComponent(ILSMComponent index) throws HyracksDataException, IndexException;
-
-    public ILSMOperationTracker getOperationTracker();
-}
diff --git a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/api/ILSMIOOperation.java b/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/api/ILSMIOOperation.java
deleted file mode 100644
index 15fdc6e..0000000
--- a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/api/ILSMIOOperation.java
+++ /dev/null
@@ -1,17 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.lsm.common.api;
-
-import java.util.Set;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.io.IODeviceHandle;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-
-public interface ILSMIOOperation {
-    public Set<IODeviceHandle> getReadDevices();
-
-    public Set<IODeviceHandle> getWriteDevices();
-
-    public void perform() throws HyracksDataException, IndexException;
-
-    public ILSMIOOperationCallback getCallback();
-}
diff --git a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/api/ILSMIOOperationCallback.java b/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/api/ILSMIOOperationCallback.java
deleted file mode 100644
index bf12583..0000000
--- a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/api/ILSMIOOperationCallback.java
+++ /dev/null
@@ -1,14 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.lsm.common.api;
-
-import java.util.List;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-
-public interface ILSMIOOperationCallback {
-    public void beforeOperation() throws HyracksDataException;
-
-    public void afterOperation(List<ILSMComponent> oldComponents, ILSMComponent newComponent)
-            throws HyracksDataException;
-
-    public void afterFinalize(ILSMComponent newComponent) throws HyracksDataException;
-}
diff --git a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/api/ILSMIOOperationCallbackFactory.java b/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/api/ILSMIOOperationCallbackFactory.java
deleted file mode 100644
index 52361ee..0000000
--- a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/api/ILSMIOOperationCallbackFactory.java
+++ /dev/null
@@ -1,7 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.lsm.common.api;
-
-import java.io.Serializable;
-
-public interface ILSMIOOperationCallbackFactory extends Serializable {
-    public ILSMIOOperationCallback createIOOperationCallback(Object syncObj);
-}
diff --git a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/api/ILSMIOOperationCallbackProvider.java b/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/api/ILSMIOOperationCallbackProvider.java
deleted file mode 100644
index b5c9741..0000000
--- a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/api/ILSMIOOperationCallbackProvider.java
+++ /dev/null
@@ -1,5 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.lsm.common.api;
-
-public interface ILSMIOOperationCallbackProvider {
-    public ILSMIOOperationCallback getIOOperationCallback(ILSMIndex index);
-}
diff --git a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/api/ILSMIOOperationScheduler.java b/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/api/ILSMIOOperationScheduler.java
deleted file mode 100644
index 6d96562..0000000
--- a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/api/ILSMIOOperationScheduler.java
+++ /dev/null
@@ -1,7 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.lsm.common.api;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-
-public interface ILSMIOOperationScheduler {
-    public void scheduleOperation(ILSMIOOperation operation) throws HyracksDataException;
-}
diff --git a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/api/ILSMIOOperationSchedulerProvider.java b/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/api/ILSMIOOperationSchedulerProvider.java
deleted file mode 100644
index 9737728..0000000
--- a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/api/ILSMIOOperationSchedulerProvider.java
+++ /dev/null
@@ -1,9 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.lsm.common.api;
-
-import java.io.Serializable;
-
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-
-public interface ILSMIOOperationSchedulerProvider extends Serializable {
-    public ILSMIOOperationScheduler getIOScheduler(IHyracksTaskContext ctx);
-}
diff --git a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/api/ILSMIndex.java b/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/api/ILSMIndex.java
deleted file mode 100644
index cff47bb..0000000
--- a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/api/ILSMIndex.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.common.api;
-
-import java.util.List;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndex;
-import edu.uci.ics.hyracks.storage.am.common.api.IModificationOperationCallback;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchOperationCallback;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.LSMHarness;
-
-/**
- * Methods to be implemented by an LSM index, which are called from {@link LSMHarness}.
- * The implementations of the methods below should be thread agnostic.
- * Synchronization of LSM operations like updates/searches/flushes/merges are
- * done by the {@link LSMHarness}. For example, a flush() implementation should only
- * create and return the new on-disk component, ignoring the fact that
- * concurrent searches/updates/merges may be ongoing.
- */
-public interface ILSMIndex extends IIndex {
-    
-    public void deactivate(boolean flushOnExit) throws HyracksDataException;
-
-    public ILSMIndexAccessor createAccessor(IModificationOperationCallback modificationCallback,
-            ISearchOperationCallback searchCallback);
-
-    public boolean getFlushStatus(ILSMIndex index);
-
-    public ILSMOperationTracker getOperationTracker();
-
-    public ILSMIOOperationScheduler getIOScheduler();
-
-    public List<ILSMComponent> getImmutableComponents();
-}
diff --git a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/api/ILSMIndexAccessor.java b/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/api/ILSMIndexAccessor.java
deleted file mode 100644
index a85c24b..0000000
--- a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/api/ILSMIndexAccessor.java
+++ /dev/null
@@ -1,122 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.common.api;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexAccessor;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-
-/**
- * Client handle for performing operations
- * (insert/delete/update/search/diskorderscan/merge/flush) on an {@link ILSMHarness}.
- * An {@link ILSMIndexAccessor} is not thread safe, but different {@link ILSMIndexAccessor}s
- * can concurrently operate on the same {@link ILSMIndex} (i.e., the {@link ILSMIndex} must allow
- * concurrent operations).
- */
-public interface ILSMIndexAccessor extends IIndexAccessor {
-    public void scheduleFlush(ILSMIOOperationCallback callback) throws HyracksDataException;
-
-    public void scheduleMerge(ILSMIOOperationCallback callback) throws HyracksDataException, IndexException;
-
-    /**
-     * Deletes the tuple from the memory component only.
-     * 
-     * @throws HyracksDataException
-     * @throws IndexException
-     */
-    public void physicalDelete(ITupleReference tuple) throws HyracksDataException, IndexException;
-
-    /**
-     * Attempts to insert the given tuple.
-     * If the insert would have to wait for a flush to complete, then this method returns false to
-     * allow the caller to avoid potential deadlock situations.
-     * Otherwise, returns true (insert was successful).
-     * 
-     * @param tuple
-     *            Tuple to be inserted.
-     * @throws HyracksDataException
-     *             If the BufferCache throws while un/pinning or un/latching.
-     * @throws IndexException
-     *             If an index-specific constraint is violated, e.g., the key
-     *             already exists.
-     */
-    public boolean tryInsert(ITupleReference tuple) throws HyracksDataException, IndexException;
-
-    /**
-     * Attempts to delete the given tuple.
-     * If the delete would have to wait for a flush to complete, then this method returns false to
-     * allow the caller to avoid potential deadlock situations.
-     * Otherwise, returns true (delete was successful).
-     * 
-     * @param tuple
-     *            Tuple to be deleted.
-     * @throws HyracksDataException
-     *             If the BufferCache throws while un/pinning or un/latching.
-     * @throws IndexException
-     *             If there is no matching tuple in the index.
-     */
-    public boolean tryDelete(ITupleReference tuple) throws HyracksDataException, IndexException;
-
-    /**
-     * Attempts to update the given tuple.
-     * If the update would have to wait for a flush to complete, then this method returns false to
-     * allow the caller to avoid potential deadlock situations.
-     * Otherwise, returns true (update was successful).
-     * 
-     * @param tuple
-     *            Tuple whose match in the index is to be update with the given
-     *            tuples contents.
-     * @throws HyracksDataException
-     *             If the BufferCache throws while un/pinning or un/latching.
-     * @throws IndexException
-     *             If there is no matching tuple in the index.
-     */
-    public boolean tryUpdate(ITupleReference tuple) throws HyracksDataException, IndexException;
-
-    /**
-     * This operation is only supported by indexes with the notion of a unique key.
-     * If tuple's key already exists, then this operation attempts to performs an update.
-     * Otherwise, it attempts to perform an insert.
-     * If the operation would have to wait for a flush to complete, then this method returns false to
-     * allow the caller to avoid potential deadlock situations.
-     * Otherwise, returns true (insert/update was successful).
-     * 
-     * @param tuple
-     *            Tuple to be deleted.
-     * @throws HyracksDataException
-     *             If the BufferCache throws while un/pinning or un/latching.
-     * @throws IndexException
-     *             If there is no matching tuple in the index.
-     */
-    public boolean tryUpsert(ITupleReference tuple) throws HyracksDataException, IndexException;
-
-    public void forcePhysicalDelete(ITupleReference tuple) throws HyracksDataException, IndexException;
-
-    public void forceInsert(ITupleReference tuple) throws HyracksDataException, IndexException;
-
-    public void forceDelete(ITupleReference tuple) throws HyracksDataException, IndexException;
-
-    /**
-     * This method can be used to increase the number of 'active' operations of an index artificially,
-     * without actually modifying the index.
-     * This method does not block and is guaranteed to trigger the {@link ILSMOperationTracker}'s beforeOperation
-     * and afterOperation calls.
-     * 
-     * @throws HyracksDataException
-     */
-    public void noOp() throws HyracksDataException;
-}
diff --git a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/api/ILSMIndexAccessorInternal.java b/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/api/ILSMIndexAccessorInternal.java
deleted file mode 100644
index 8c28d47..0000000
--- a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/api/ILSMIndexAccessorInternal.java
+++ /dev/null
@@ -1,24 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.lsm.common.api;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
-
-public interface ILSMIndexAccessorInternal extends ILSMIndexAccessor {
-
-    /**
-     * Force a flush of the in-memory component.
-     * 
-     * @throws HyracksDataException
-     * @throws TreeIndexException
-     */
-    public void flush(ILSMIOOperation operation) throws HyracksDataException, IndexException;
-
-    /**
-     * Merge all on-disk components.
-     * 
-     * @throws HyracksDataException
-     * @throws TreeIndexException
-     */
-    public void merge(ILSMIOOperation operation) throws HyracksDataException, IndexException;
-}
diff --git a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/api/ILSMIndexFileManager.java b/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/api/ILSMIndexFileManager.java
deleted file mode 100644
index bc922fe..0000000
--- a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/api/ILSMIndexFileManager.java
+++ /dev/null
@@ -1,49 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.common.api;
-
-import java.util.Comparator;
-import java.util.List;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.LSMComponentFileReferences;
-
-/**
- * Provides file names for LSM on-disk components. Also cleans up invalid files.
- * There are separate methods to get file names for merge and flush because we
- * need to guarantee the correct order of on-disk components (i.e., the
- * components produced by flush are always newer than those produced by a
- * merge).
- */
-public interface ILSMIndexFileManager {
-    public void createDirs();
-
-    public void deleteDirs();
-
-    public LSMComponentFileReferences getRelFlushFileReference();
-
-    public LSMComponentFileReferences getRelMergeFileReference(String firstFileName, String lastFileName)
-            throws HyracksDataException;
-
-    public String getBaseDir();
-
-    // Deletes invalid files, and returns list of valid files from baseDir.
-    // The returned valid files are correctly sorted (based on the recency of data). 
-    public List<LSMComponentFileReferences> cleanupAndGetValidFiles() throws HyracksDataException, IndexException;
-
-    public Comparator<String> getFileNameComparator();
-}
diff --git a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/api/ILSMIndexInternal.java b/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/api/ILSMIndexInternal.java
deleted file mode 100644
index e98165b..0000000
--- a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/api/ILSMIndexInternal.java
+++ /dev/null
@@ -1,70 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.common.api;
-
-import java.util.List;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.common.api.IInMemoryFreePageManager;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexCursor;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexOperationContext;
-import edu.uci.ics.hyracks.storage.am.common.api.IModificationOperationCallback;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchOperationCallback;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchPredicate;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-
-public interface ILSMIndexInternal extends ILSMIndex {
-    public ILSMIndexAccessorInternal createAccessor(IModificationOperationCallback modificationCallback,
-            ISearchOperationCallback searchCallback);
-
-    public void modify(IIndexOperationContext ictx, ITupleReference tuple) throws HyracksDataException, IndexException;
-
-    public void search(ILSMIndexOperationContext ictx, IIndexCursor cursor, ISearchPredicate pred)
-            throws HyracksDataException, IndexException;
-
-    public void scheduleFlush(ILSMIndexOperationContext ctx, ILSMIOOperationCallback callback)
-            throws HyracksDataException;
-
-    public ILSMComponent flush(ILSMIOOperation operation) throws HyracksDataException, IndexException;
-
-    public void scheduleMerge(ILSMIndexOperationContext ctx, ILSMIOOperationCallback callback)
-            throws HyracksDataException, IndexException;
-
-    public ILSMComponent merge(List<ILSMComponent> mergedComponents, ILSMIOOperation operation)
-            throws HyracksDataException, IndexException;
-
-    public void addComponent(ILSMComponent index);
-
-    public void subsumeMergedComponents(ILSMComponent newComponent, List<ILSMComponent> mergedComponents);
-
-    /**
-     * Populates the context's component holder with a snapshot of the components involved in the operation.
-     * 
-     * @param ctx
-     *            - the operation's context
-     */
-    public void getOperationalComponents(ILSMIndexOperationContext ctx);
-
-    public IInMemoryFreePageManager getInMemoryFreePageManager();
-
-    public List<ILSMComponent> getImmutableComponents();
-
-    public void markAsValid(ILSMComponent lsmComponent) throws HyracksDataException;
-
-    public void setFlushStatus(ILSMIndex index, boolean needsFlush);
-
-}
diff --git a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/api/ILSMIndexOperationContext.java b/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/api/ILSMIndexOperationContext.java
deleted file mode 100644
index 864d0e7..0000000
--- a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/api/ILSMIndexOperationContext.java
+++ /dev/null
@@ -1,15 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.lsm.common.api;
-
-import java.util.List;
-
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexOperationContext;
-import edu.uci.ics.hyracks.storage.am.common.api.IModificationOperationCallback;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchOperationCallback;
-
-public interface ILSMIndexOperationContext extends IIndexOperationContext {
-    public List<ILSMComponent> getComponentHolder();
-
-    public ISearchOperationCallback getSearchOperationCallback();
-
-    public IModificationOperationCallback getModificationCallback();
-}
diff --git a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/api/ILSMMergePolicy.java b/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/api/ILSMMergePolicy.java
deleted file mode 100644
index 877c6ff..0000000
--- a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/api/ILSMMergePolicy.java
+++ /dev/null
@@ -1,23 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.common.api;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-
-public interface ILSMMergePolicy {
-    public void diskComponentAdded(ILSMIndex index, int totalNumDiskComponents) throws HyracksDataException, IndexException;
-}
diff --git a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/api/ILSMMergePolicyProvider.java b/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/api/ILSMMergePolicyProvider.java
deleted file mode 100644
index 57a9609..0000000
--- a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/api/ILSMMergePolicyProvider.java
+++ /dev/null
@@ -1,9 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.lsm.common.api;
-
-import java.io.Serializable;
-
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-
-public interface ILSMMergePolicyProvider extends Serializable {
-    public ILSMMergePolicy getMergePolicy(IHyracksTaskContext ctx);
-}
diff --git a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/api/ILSMOperationTracker.java b/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/api/ILSMOperationTracker.java
deleted file mode 100644
index c3f1f3e..0000000
--- a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/api/ILSMOperationTracker.java
+++ /dev/null
@@ -1,44 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.lsm.common.api;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexAccessor;
-import edu.uci.ics.hyracks.storage.am.common.api.IModificationOperationCallback;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchOperationCallback;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.LSMOperationType;
-
-/**
- * This interface exposes methods for tracking and setting the status of operations for the purpose
- * of coordinating flushes/merges in {@link ILSMIndex}.
- * Note that 'operation' below refers to {@link IIndexAccessor} methods.
- * 
- * @author zheilbron
- */
-public interface ILSMOperationTracker {
-
-    /**
-     * An {@link ILSMIndex} will call this method before an operation enters it,
-     * i.e., before any latches are taken.
-     * If tryOperation is true, and the operation would have to wait for a flush,
-     * then this method does not block and returns false.
-     * Otherwise, this method returns true, and the operation is considered 'active' in the index.
-     */
-    public void beforeOperation(LSMOperationType opType, ISearchOperationCallback searchCallback,
-            IModificationOperationCallback modificationCallback) throws HyracksDataException;
-
-    /**
-     * An {@link ILSMIndex} will call this method after an operation has left the index,
-     * i.e., after all relevant latches have been released.
-     * After this method has been called, the operation is still considered 'active',
-     * until the issuer of the operation declares it completed by calling completeOperation().
-     */
-    public void afterOperation(LSMOperationType opType, ISearchOperationCallback searchCallback,
-            IModificationOperationCallback modificationCallback) throws HyracksDataException;
-
-    /**
-     * This method must be called by whoever is requesting the index operation through an {@link IIndexAccessor}.
-     * The use of this method indicates that the operation is no longer 'active'
-     * for the purpose of coordinating flushes/merges.
-     */
-    public void completeOperation(LSMOperationType opType, ISearchOperationCallback searchCallback,
-            IModificationOperationCallback modificationCallback) throws HyracksDataException;
-}
diff --git a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/api/ILSMOperationTrackerFactory.java b/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/api/ILSMOperationTrackerFactory.java
deleted file mode 100644
index db7ff6c..0000000
--- a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/api/ILSMOperationTrackerFactory.java
+++ /dev/null
@@ -1,7 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.lsm.common.api;
-
-import java.io.Serializable;
-
-public interface ILSMOperationTrackerFactory extends Serializable {
-    public ILSMOperationTracker createOperationTracker(ILSMIndex index);
-}
diff --git a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/api/ILSMTreeTupleReference.java b/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/api/ILSMTreeTupleReference.java
deleted file mode 100644
index 8d82673..0000000
--- a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/api/ILSMTreeTupleReference.java
+++ /dev/null
@@ -1,22 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.common.api;
-
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
-
-public interface ILSMTreeTupleReference extends ITreeIndexTupleReference {
-    public boolean isAntimatter();
-}
diff --git a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/dataflow/AbstractLSMIndexDataflowHelper.java b/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/dataflow/AbstractLSMIndexDataflowHelper.java
deleted file mode 100644
index ea7c3b4..0000000
--- a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/dataflow/AbstractLSMIndexDataflowHelper.java
+++ /dev/null
@@ -1,58 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.common.dataflow;
-
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IndexDataflowHelper;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallbackProvider;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationScheduler;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMMergePolicy;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMOperationTrackerFactory;
-
-public abstract class AbstractLSMIndexDataflowHelper extends IndexDataflowHelper {
-
-    protected static int DEFAULT_MEM_PAGE_SIZE = 32768;
-    protected static int DEFAULT_MEM_NUM_PAGES = 1000;
-
-    protected final int memPageSize;
-    protected final int memNumPages;
-
-    protected final ILSMMergePolicy mergePolicy;
-    protected final ILSMIOOperationScheduler ioScheduler;
-    protected final ILSMOperationTrackerFactory opTrackerFactory;
-    protected final ILSMIOOperationCallbackProvider ioOpCallbackProvider;
-
-    public AbstractLSMIndexDataflowHelper(IIndexOperatorDescriptor opDesc, IHyracksTaskContext ctx, int partition,
-            ILSMMergePolicy mergePolicy, ILSMOperationTrackerFactory opTrackerFactory,
-            ILSMIOOperationScheduler ioScheduler, ILSMIOOperationCallbackProvider ioOpCallbackProvider) {
-        this(opDesc, ctx, partition, DEFAULT_MEM_PAGE_SIZE, DEFAULT_MEM_NUM_PAGES, mergePolicy, opTrackerFactory,
-                ioScheduler, ioOpCallbackProvider);
-    }
-
-    public AbstractLSMIndexDataflowHelper(IIndexOperatorDescriptor opDesc, IHyracksTaskContext ctx, int partition,
-            int memPageSize, int memNumPages, ILSMMergePolicy mergePolicy,
-            ILSMOperationTrackerFactory opTrackerFactory, ILSMIOOperationScheduler ioScheduler,
-            ILSMIOOperationCallbackProvider ioOpCallbackProvider) {
-        super(opDesc, ctx, partition);
-        this.memPageSize = memPageSize;
-        this.memNumPages = memNumPages;
-        this.mergePolicy = mergePolicy;
-        this.opTrackerFactory = opTrackerFactory;
-        this.ioScheduler = ioScheduler;
-        this.ioOpCallbackProvider = ioOpCallbackProvider;
-    }
-}
diff --git a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/dataflow/AbstractLSMIndexDataflowHelperFactory.java b/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/dataflow/AbstractLSMIndexDataflowHelperFactory.java
deleted file mode 100644
index a2f2a11..0000000
--- a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/dataflow/AbstractLSMIndexDataflowHelperFactory.java
+++ /dev/null
@@ -1,44 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.common.dataflow;
-
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallbackProvider;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationSchedulerProvider;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMMergePolicyProvider;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMOperationTrackerFactory;
-
-public abstract class AbstractLSMIndexDataflowHelperFactory implements IIndexDataflowHelperFactory {
-    protected static final long serialVersionUID = 1L;
-
-    protected final ILSMMergePolicyProvider mergePolicyProvider;
-    protected final ILSMOperationTrackerFactory opTrackerFactory;
-    protected final ILSMIOOperationSchedulerProvider ioSchedulerProvider;
-    protected final ILSMIOOperationCallbackProvider ioOpCallbackProvider;
-    protected final int memPageSize;
-    protected final int memNumPages;
-
-    public AbstractLSMIndexDataflowHelperFactory(ILSMMergePolicyProvider mergePolicyProvider,
-            ILSMOperationTrackerFactory opTrackerFactory, ILSMIOOperationSchedulerProvider ioSchedulerProvider,
-            ILSMIOOperationCallbackProvider ioOpCallbackProvider, int memPageSize, int memNumPages) {
-        this.mergePolicyProvider = mergePolicyProvider;
-        this.opTrackerFactory = opTrackerFactory;
-        this.ioSchedulerProvider = ioSchedulerProvider;
-        this.ioOpCallbackProvider = ioOpCallbackProvider;
-        this.memPageSize = memPageSize;
-        this.memNumPages = memNumPages;
-    }
-}
diff --git a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/dataflow/LSMIndexInsertUpdateDeleteOperatorNodePushable.java b/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/dataflow/LSMIndexInsertUpdateDeleteOperatorNodePushable.java
deleted file mode 100644
index baa9648..0000000
--- a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/dataflow/LSMIndexInsertUpdateDeleteOperatorNodePushable.java
+++ /dev/null
@@ -1,121 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.storage.am.lsm.common.dataflow;
-
-import java.nio.ByteBuffer;
-
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
-import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IndexInsertUpdateDeleteOperatorNodePushable;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOperation;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIndexAccessor;
-
-public class LSMIndexInsertUpdateDeleteOperatorNodePushable extends IndexInsertUpdateDeleteOperatorNodePushable {
-
-    protected FrameTupleAppender appender;
-
-    public LSMIndexInsertUpdateDeleteOperatorNodePushable(IIndexOperatorDescriptor opDesc, IHyracksTaskContext ctx,
-            int partition, int[] fieldPermutation, IRecordDescriptorProvider recordDescProvider, IndexOperation op) {
-        super(opDesc, ctx, partition, fieldPermutation, recordDescProvider, op);
-    }
-
-    @Override
-    public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
-        accessor.reset(buffer);
-        ILSMIndexAccessor lsmAccessor = (ILSMIndexAccessor) indexAccessor;
-        int lastFlushedTupleIndex = 0;
-        int tupleCount = accessor.getTupleCount();
-        for (int i = 0; i < tupleCount; i++) {
-            try {
-                if (tupleFilter != null) {
-                    frameTuple.reset(accessor, i);
-                    if (!tupleFilter.accept(frameTuple)) {
-                        lsmAccessor.noOp();
-                        continue;
-                    }
-                }
-                tuple.reset(accessor, i);
-
-                switch (op) {
-                    case INSERT: {
-                        if (!lsmAccessor.tryInsert(tuple)) {
-                            flushPartialFrame(lastFlushedTupleIndex, i);
-                            lastFlushedTupleIndex = (i == 0) ? 0 : i - 1;
-                            lsmAccessor.insert(tuple);
-                        }
-                        break;
-                    }
-                    case DELETE: {
-                        if (!lsmAccessor.tryDelete(tuple)) {
-                            flushPartialFrame(lastFlushedTupleIndex, i);
-                            lastFlushedTupleIndex = (i == 0) ? 0 : i - 1;
-                            lsmAccessor.delete(tuple);
-                        }
-                        break;
-                    }
-                    case UPSERT: {
-                        if (!lsmAccessor.tryUpsert(tuple)) {
-                            flushPartialFrame(lastFlushedTupleIndex, i);
-                            lastFlushedTupleIndex = (i == 0) ? 0 : i - 1;
-                            lsmAccessor.upsert(tuple);
-                        }
-                        break;
-                    }
-                    case UPDATE: {
-                        if (!lsmAccessor.tryUpdate(tuple)) {
-                            flushPartialFrame(lastFlushedTupleIndex, i);
-                            lastFlushedTupleIndex = (i == 0) ? 0 : i - 1;
-                            lsmAccessor.update(tuple);
-                        }
-                        break;
-                    }
-                    default: {
-                        throw new HyracksDataException("Unsupported operation " + op
-                                + " in tree index InsertUpdateDelete operator");
-                    }
-                }
-            } catch (HyracksDataException e) {
-                throw e;
-            } catch (Exception e) {
-                throw new HyracksDataException(e);
-            }
-        }
-        if (lastFlushedTupleIndex == 0) {
-            // No partial flushing was necessary. Forward entire frame.
-            System.arraycopy(buffer.array(), 0, writeBuffer.array(), 0, buffer.capacity());
-            FrameUtils.flushFrame(writeBuffer, writer);
-        } else {
-            // Flush remaining partial frame.
-            flushPartialFrame(lastFlushedTupleIndex, tupleCount);
-        }
-    }
-
-    private void flushPartialFrame(int startTupleIndex, int endTupleIndex) throws HyracksDataException {
-        if (appender == null) {
-            appender = new FrameTupleAppender(ctx.getFrameSize());
-        }
-        appender.reset(writeBuffer, true);
-        for (int i = startTupleIndex; i < endTupleIndex; i++) {
-            if (!appender.append(accessor, i)) {
-                throw new IllegalStateException("Failed to append tuple into frame.");
-            }
-        }
-        FrameUtils.flushFrame(writeBuffer, writer);
-    }
-}
diff --git a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/dataflow/LSMTreeIndexInsertUpdateDeleteOperatorDescriptor.java b/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/dataflow/LSMTreeIndexInsertUpdateDeleteOperatorDescriptor.java
deleted file mode 100644
index 6297576..0000000
--- a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/dataflow/LSMTreeIndexInsertUpdateDeleteOperatorDescriptor.java
+++ /dev/null
@@ -1,63 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.common.dataflow;
-
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.job.IOperatorDescriptorRegistry;
-import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexLifecycleManagerProvider;
-import edu.uci.ics.hyracks.storage.am.common.api.IModificationOperationCallbackFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.ITupleFilterFactory;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.AbstractTreeIndexOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOperation;
-import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
-import edu.uci.ics.hyracks.storage.common.file.NoOpLocalResourceFactoryProvider;
-
-public class LSMTreeIndexInsertUpdateDeleteOperatorDescriptor extends AbstractTreeIndexOperatorDescriptor {
-
-    private static final long serialVersionUID = 1L;
-
-    private final int[] fieldPermutation;
-    private final IndexOperation op;
-
-    public LSMTreeIndexInsertUpdateDeleteOperatorDescriptor(IOperatorDescriptorRegistry spec, RecordDescriptor recDesc,
-            IStorageManagerInterface storageManager, IIndexLifecycleManagerProvider lifecycleManagerProvider,
-            IFileSplitProvider fileSplitProvider, ITypeTraits[] typeTraits,
-            IBinaryComparatorFactory[] comparatorFactories, int[] bloomFilterKeyFields, int[] fieldPermutation,
-            IndexOperation op, IIndexDataflowHelperFactory dataflowHelperFactory,
-            ITupleFilterFactory tupleFilterFactory, IModificationOperationCallbackFactory modificationOpCallbackProvider) {
-        super(spec, 1, 1, recDesc, storageManager, lifecycleManagerProvider, fileSplitProvider, typeTraits,
-                comparatorFactories, bloomFilterKeyFields, dataflowHelperFactory, tupleFilterFactory, false,
-                NoOpLocalResourceFactoryProvider.INSTANCE, NoOpOperationCallbackFactory.INSTANCE,
-                modificationOpCallbackProvider);
-        this.fieldPermutation = fieldPermutation;
-        this.op = op;
-    }
-
-    @Override
-    public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
-            IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) {
-        return new LSMIndexInsertUpdateDeleteOperatorNodePushable(this, ctx, partition, fieldPermutation,
-                recordDescProvider, op);
-    }
-}
\ No newline at end of file
diff --git a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/freepage/DualIndexInMemoryBufferCache.java b/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/freepage/DualIndexInMemoryBufferCache.java
deleted file mode 100644
index c24c473..0000000
--- a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/freepage/DualIndexInMemoryBufferCache.java
+++ /dev/null
@@ -1,63 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.common.freepage;
-
-import java.nio.ByteBuffer;
-
-import edu.uci.ics.hyracks.storage.common.buffercache.ICacheMemoryAllocator;
-import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
-import edu.uci.ics.hyracks.storage.common.file.BufferedFileHandle;
-import edu.uci.ics.hyracks.storage.common.file.TransientFileMapManager;
-
-/**
- * In-memory buffer cache that supports two tree indexes.
- * We assume that the tree indexes have 2 fixed pages, one at index 0 (metadata page), and one at index 1 (root page).
- */
-public class DualIndexInMemoryBufferCache extends InMemoryBufferCache {
-
-    public DualIndexInMemoryBufferCache(ICacheMemoryAllocator allocator, int pageSize, int numPages) {
-        super(allocator, pageSize, numPages, new TransientFileMapManager());
-    }
-
-    @Override
-    public ICachedPage pin(long dpid, boolean newPage) {
-        int pageId = BufferedFileHandle.getPageId(dpid);
-        int fileId = BufferedFileHandle.getFileId(dpid);
-        if (pageId < pages.length) {
-            // Common case: Return regular page.
-            if (pageId == 0 || pageId == 1) {
-                return pages[pageId + 2 * fileId];
-            } else {
-                return pages[pageId];
-            }
-        } else {
-            // Rare case: Return overflow page, possibly expanding overflow
-            // array.
-            synchronized (overflowPages) {
-                int numNewPages = pageId - pages.length - overflowPages.size() + 1;
-                if (numNewPages > 0) {
-                    ByteBuffer[] buffers = allocator.allocate(pageSize, numNewPages);
-                    for (int i = 0; i < numNewPages; i++) {
-                        CachedPage overflowPage = new CachedPage(pages.length + overflowPages.size(), buffers[i]);
-                        overflowPages.add(overflowPage);
-                    }
-                }
-                return overflowPages.get(pageId - pages.length);
-            }
-        }
-    }
-
-}
diff --git a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/freepage/DualIndexInMemoryFreePageManager.java b/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/freepage/DualIndexInMemoryFreePageManager.java
deleted file mode 100644
index 7a2be7f..0000000
--- a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/freepage/DualIndexInMemoryFreePageManager.java
+++ /dev/null
@@ -1,49 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.common.freepage;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrame;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrameFactory;
-
-/**
- * In-memory free page manager that supports two tree indexes.
- * We assume that the tree indexes have 2 fixed pages, one at index 0 (metadata page), and one at index 1 (root page).
- */
-public class DualIndexInMemoryFreePageManager extends InMemoryFreePageManager {
-
-    public DualIndexInMemoryFreePageManager(int capacity, ITreeIndexMetaDataFrameFactory metaDataFrameFactory) {
-        super(capacity, metaDataFrameFactory);
-        // We start the currentPageId from 3, because the RTree uses
-        // the first page as metadata page, and the second page as root page.
-        // And the BTree uses the third page as metadata, and the third page as root page 
-        // (when returning free pages we first increment, then get)
-        currentPageId.set(3);
-    }
-
-    @Override
-    public void init(ITreeIndexMetaDataFrame metaFrame, int currentMaxPage) throws HyracksDataException {
-        currentPageId.set(3);
-    }
-
-    public int getCapacity() {
-        return capacity - 4;
-    }
-
-    public void reset() {
-        currentPageId.set(3);
-    }
-}
diff --git a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/freepage/InMemoryBufferCache.java b/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/freepage/InMemoryBufferCache.java
deleted file mode 100644
index 66d8ec2..0000000
--- a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/freepage/InMemoryBufferCache.java
+++ /dev/null
@@ -1,206 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.common.freepage;
-
-import java.nio.ByteBuffer;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.concurrent.locks.ReadWriteLock;
-import java.util.concurrent.locks.ReentrantReadWriteLock;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.IInMemoryBufferCache;
-import edu.uci.ics.hyracks.storage.common.buffercache.ICacheMemoryAllocator;
-import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
-import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPageInternal;
-import edu.uci.ics.hyracks.storage.common.file.BufferedFileHandle;
-import edu.uci.ics.hyracks.storage.common.file.IFileMapManager;
-import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
-
-public class InMemoryBufferCache implements IInMemoryBufferCache {
-    protected final ICacheMemoryAllocator allocator;
-    protected final IFileMapManager fileMapManager;
-    protected final int pageSize;
-    protected final int numPages;
-    protected final List<CachedPage> overflowPages = new ArrayList<CachedPage>();
-    protected CachedPage[] pages;
-
-    public InMemoryBufferCache(ICacheMemoryAllocator allocator, int pageSize, int numPages,
-            IFileMapManager fileMapManager) {
-        this.allocator = allocator;
-        this.fileMapManager = fileMapManager;
-        this.pageSize = pageSize;
-        this.numPages = numPages;
-    }
-
-    public void open() {
-        pages = new CachedPage[numPages];
-        ByteBuffer[] buffers = allocator.allocate(pageSize, numPages);
-        for (int i = 0; i < buffers.length; ++i) {
-            pages[i] = new CachedPage(i, buffers[i]);
-        }
-    }
-
-    @Override
-    public ICachedPage pin(long dpid, boolean newPage) {
-        int pageId = BufferedFileHandle.getPageId(dpid);
-        if (pageId < pages.length) {
-            // Common case: Return regular page.
-            return pages[pageId];
-        } else {
-            // Rare case: Return overflow page, possibly expanding overflow array.
-            synchronized (overflowPages) {
-                int numNewPages = pageId - pages.length - overflowPages.size() + 1;
-                if (numNewPages > 0) {
-                    ByteBuffer[] buffers = allocator.allocate(pageSize, numNewPages);
-                    for (int i = 0; i < numNewPages; i++) {
-                        CachedPage overflowPage = new CachedPage(pages.length + overflowPages.size(), buffers[i]);
-                        overflowPages.add(overflowPage);
-                    }
-                }
-                return overflowPages.get(pageId - pages.length);
-            }
-        }
-    }
-
-    @Override
-    public ICachedPage tryPin(long dpid) throws HyracksDataException {
-        return pin(dpid, false);
-    }
-
-    @Override
-    public int getPageSize() {
-        return pageSize;
-    }
-
-    @Override
-    public int getNumPages() {
-        return numPages;
-    }
-
-    @Override
-    public ICachedPageInternal getPage(int cpid) {
-        return pages[cpid];
-    }
-
-    public int getNumOverflowPages() {
-        return overflowPages.size();
-    }
-
-    @Override
-    public void createFile(FileReference fileRef) throws HyracksDataException {
-        synchronized (fileMapManager) {
-            fileMapManager.registerFile(fileRef);
-        }
-    }
-
-    @Override
-    public void openFile(int fileId) throws HyracksDataException {
-        // Do nothing.
-    }
-
-    @Override
-    public void closeFile(int fileId) throws HyracksDataException {
-        // Do nothing.
-    }
-
-    @Override
-    public void deleteFile(int fileId, boolean flushDirtyPages) throws HyracksDataException {
-        synchronized (fileMapManager) {
-            fileMapManager.unregisterFile(fileId);
-        }
-    }
-
-    @Override
-    public void unpin(ICachedPage page) throws HyracksDataException {
-        // Do Nothing.
-    }
-
-    @Override
-    public void close() {
-        for (int i = 0; i < numPages; ++i) {
-            pages[i] = null;
-        }
-        overflowPages.clear();
-    }
-
-    public class CachedPage implements ICachedPageInternal {
-        private final int cpid;
-        private final ByteBuffer buffer;
-        private final ReadWriteLock latch;
-
-        public CachedPage(int cpid, ByteBuffer buffer) {
-            this.cpid = cpid;
-            this.buffer = buffer;
-            latch = new ReentrantReadWriteLock(true);
-        }
-
-        @Override
-        public ByteBuffer getBuffer() {
-            return buffer;
-        }
-
-        @Override
-        public Object getReplacementStrategyObject() {
-            // Do nothing.
-            return null;
-        }
-
-        @Override
-        public boolean pinIfGoodVictim() {
-            // Do nothing.
-            return false;
-        }
-
-        @Override
-        public int getCachedPageId() {
-            return cpid;
-        }
-
-        @Override
-        public void acquireReadLatch() {
-            latch.readLock().lock();
-        }
-
-        @Override
-        public void acquireWriteLatch() {
-            latch.writeLock().lock();
-        }
-
-        @Override
-        public void releaseReadLatch() {
-            latch.readLock().unlock();
-        }
-
-        @Override
-        public void releaseWriteLatch() {
-            latch.writeLock().unlock();
-        }
-    }
-
-    @Override
-    public void force(int fileId, boolean metadata) throws HyracksDataException {
-    }
-
-    @Override
-    public void flushDirtyPage(ICachedPage page) throws HyracksDataException {
-    }
-
-    public IFileMapProvider getFileMapProvider() {
-        return fileMapManager;
-    }
-}
diff --git a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/freepage/InMemoryFreePageManager.java b/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/freepage/InMemoryFreePageManager.java
deleted file mode 100644
index c601a9b..0000000
--- a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/freepage/InMemoryFreePageManager.java
+++ /dev/null
@@ -1,112 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.common.freepage;
-
-import java.util.concurrent.atomic.AtomicInteger;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.storage.am.common.api.IInMemoryFreePageManager;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrame;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrameFactory;
-
-public class InMemoryFreePageManager implements IInMemoryFreePageManager {
-    protected final int capacity;
-    protected final AtomicInteger currentPageId = new AtomicInteger();
-    protected final ITreeIndexMetaDataFrameFactory metaDataFrameFactory;
-
-    public InMemoryFreePageManager(int capacity, ITreeIndexMetaDataFrameFactory metaDataFrameFactory) {
-        // We start the currentPageId from 1, because the BTree uses
-        // the first page as metadata page, and the second page as root page.
-        // (when returning free pages we first increment, then get)
-        currentPageId.set(1);
-        this.capacity = capacity;
-        this.metaDataFrameFactory = metaDataFrameFactory;
-    }
-
-    @Override
-    public int getFreePage(ITreeIndexMetaDataFrame metaFrame) throws HyracksDataException {
-        // The very first call returns page id 2 because the BTree uses
-        // the first page as metadata page, and the second page as root page.
-        return currentPageId.incrementAndGet();
-    }
-
-    @Override
-    public int getMaxPage(ITreeIndexMetaDataFrame metaFrame) throws HyracksDataException {
-        return currentPageId.get();
-    }
-
-    @Override
-    public void init(ITreeIndexMetaDataFrame metaFrame, int currentMaxPage) throws HyracksDataException {
-        currentPageId.set(1);
-    }
-
-    @Override
-    public ITreeIndexMetaDataFrameFactory getMetaDataFrameFactory() {
-        return metaDataFrameFactory;
-    }
-
-    public int getCapacity() {
-        return capacity - 2;
-    }
-
-    public void reset() {
-        currentPageId.set(1);
-    }
-
-    public boolean isFull() {
-        return currentPageId.get() >= capacity;
-    }
-
-    @Override
-    public void addFreePage(ITreeIndexMetaDataFrame metaFrame, int freePage) throws HyracksDataException {
-    }
-
-    @Override
-    public byte getMetaPageLevelIndicator() {
-        return 0;
-    }
-
-    @Override
-    public byte getFreePageLevelIndicator() {
-        return 0;
-    }
-
-    @Override
-    public boolean isMetaPage(ITreeIndexMetaDataFrame metaFrame) {
-        return false;
-    }
-
-    @Override
-    public boolean isFreePage(ITreeIndexMetaDataFrame metaFrame) {
-        return false;
-    }
-
-    @Override
-    public int getFirstMetadataPage() {
-        // Method doesn't make sense for this free page manager.
-        return -1;
-    }
-
-    @Override
-    public void open(int fileId) {
-        // Method doesn't make sense for this free page manager.
-    }
-
-    @Override
-    public void close() {
-        // Method doesn't make sense for this free page manager.
-    }
-}
diff --git a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/AbstractImmutableLSMComponent.java b/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/AbstractImmutableLSMComponent.java
deleted file mode 100644
index b6fc2f7..0000000
--- a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/AbstractImmutableLSMComponent.java
+++ /dev/null
@@ -1,69 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.lsm.common.impls;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMComponent;
-
-public abstract class AbstractImmutableLSMComponent implements ILSMComponent {
-
-    private ComponentState state;
-    private int readerCount;
-
-    private enum ComponentState {
-        READABLE,
-        READABLE_MERGING,
-        KILLED
-    }
-
-    public AbstractImmutableLSMComponent() {
-        state = ComponentState.READABLE;
-        readerCount = 0;
-    }
-
-    @Override
-    public synchronized boolean threadEnter(LSMOperationType opType) {
-        if (state == ComponentState.KILLED) {
-            return false;
-        }
-
-        switch (opType) {
-            case MODIFICATION:
-            case SEARCH:
-                readerCount++;
-                break;
-            case MERGE:
-                if (state == ComponentState.READABLE_MERGING) {
-                    return false;
-                }
-                state = ComponentState.READABLE_MERGING;
-                readerCount++;
-                break;
-            default:
-                throw new UnsupportedOperationException("Unsupported operation " + opType);
-        }
-        return true;
-    }
-
-    @Override
-    public synchronized void threadExit(LSMOperationType opType, boolean failedOperation) throws HyracksDataException {
-        switch (opType) {
-            case MERGE:
-                if (failedOperation) {
-                    state = ComponentState.READABLE;
-                }
-            case MODIFICATION:
-            case SEARCH:
-                readerCount--;
-
-                if (readerCount == 0 && state == ComponentState.READABLE_MERGING) {
-                    destroy();
-                    state = ComponentState.KILLED;
-                }
-                break;
-            default:
-                throw new UnsupportedOperationException("Unsupported operation " + opType);
-        }
-    }
-
-    protected abstract void destroy() throws HyracksDataException;
-
-}
diff --git a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/AbstractLSMIndex.java b/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/AbstractLSMIndex.java
deleted file mode 100644
index 0c6b9ab..0000000
--- a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/AbstractLSMIndex.java
+++ /dev/null
@@ -1,194 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.common.impls;
-
-import java.util.ArrayList;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.concurrent.atomic.AtomicReference;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.storage.am.common.api.IInMemoryFreePageManager;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrame;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMComponent;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMHarness;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallbackProvider;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationScheduler;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIndex;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIndexFileManager;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIndexInternal;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMMergePolicy;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMOperationTracker;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMOperationTrackerFactory;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
-import edu.uci.ics.hyracks.storage.common.file.BufferedFileHandle;
-import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
-
-public abstract class AbstractLSMIndex implements ILSMIndexInternal {
-    protected final static double MAX_BLOOM_FILTER_ACCEPTABLE_FALSE_POSITIVE_RATE = 0.1;
-
-    protected final ILSMHarness lsmHarness;
-
-    protected final ILSMIOOperationScheduler ioScheduler;
-    protected final ILSMIOOperationCallbackProvider ioOpCallbackProvider;
-
-    // In-memory components.   
-    protected final IInMemoryFreePageManager memFreePageManager;
-
-    // On-disk components.    
-    protected final IBufferCache diskBufferCache;
-    protected final ILSMIndexFileManager fileManager;
-    protected final IFileMapProvider diskFileMapProvider;
-    protected final AtomicReference<List<ILSMComponent>> componentsRef;
-
-    protected boolean isActivated;
-
-    private boolean needsFlush = false;
-
-    public AbstractLSMIndex(IInMemoryFreePageManager memFreePageManager, IBufferCache diskBufferCache,
-            ILSMIndexFileManager fileManager, IFileMapProvider diskFileMapProvider, ILSMMergePolicy mergePolicy,
-            ILSMOperationTrackerFactory opTrackerFactory, ILSMIOOperationScheduler ioScheduler,
-            ILSMIOOperationCallbackProvider ioOpCallbackProvider) {
-        this.memFreePageManager = memFreePageManager;
-        this.diskBufferCache = diskBufferCache;
-        this.diskFileMapProvider = diskFileMapProvider;
-        this.fileManager = fileManager;
-        this.ioScheduler = ioScheduler;
-        this.ioOpCallbackProvider = ioOpCallbackProvider;
-        ILSMOperationTracker opTracker = opTrackerFactory.createOperationTracker(this);
-        lsmHarness = new LSMHarness(this, mergePolicy, opTracker);
-        isActivated = false;
-        componentsRef = new AtomicReference<List<ILSMComponent>>();
-        componentsRef.set(new LinkedList<ILSMComponent>());
-    }
-
-    protected void forceFlushDirtyPages(ITreeIndex treeIndex) throws HyracksDataException {
-        int fileId = treeIndex.getFileId();
-        IBufferCache bufferCache = treeIndex.getBufferCache();
-        // Flush all dirty pages of the tree. 
-        // By default, metadata and data are flushed asynchronously in the buffercache.
-        // This means that the flush issues writes to the OS, but the data may still lie in filesystem buffers.
-        ITreeIndexMetaDataFrame metadataFrame = treeIndex.getFreePageManager().getMetaDataFrameFactory().createFrame();
-        int startPage = 0;
-        int maxPage = treeIndex.getFreePageManager().getMaxPage(metadataFrame);
-        forceFlushDirtyPages(bufferCache, fileId, startPage, maxPage);
-    }
-
-    protected void forceFlushDirtyPages(IBufferCache bufferCache, int fileId, int startPageId, int endPageId)
-            throws HyracksDataException {
-        for (int i = startPageId; i <= endPageId; i++) {
-            ICachedPage page = bufferCache.tryPin(BufferedFileHandle.getDiskPageId(fileId, i));
-            // If tryPin returns null, it means the page is not cached, and therefore cannot be dirty.
-            if (page == null) {
-                continue;
-            }
-            try {
-                bufferCache.flushDirtyPage(page);
-            } finally {
-                bufferCache.unpin(page);
-            }
-        }
-        // Forces all pages of given file to disk. This guarantees the data makes it to disk.
-        bufferCache.force(fileId, true);
-    }
-
-    protected void markAsValidInternal(ITreeIndex treeIndex) throws HyracksDataException {
-        int fileId = treeIndex.getFileId();
-        IBufferCache bufferCache = treeIndex.getBufferCache();
-        ITreeIndexMetaDataFrame metadataFrame = treeIndex.getFreePageManager().getMetaDataFrameFactory().createFrame();
-        // Mark the component as a valid component by flushing the metadata page to disk
-        int metadataPageId = treeIndex.getFreePageManager().getFirstMetadataPage();
-        ICachedPage metadataPage = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, metadataPageId), false);
-        metadataPage.acquireWriteLatch();
-        try {
-            metadataFrame.setPage(metadataPage);
-            metadataFrame.setValid(true);
-
-            // Flush the single modified page to disk.
-            bufferCache.flushDirtyPage(metadataPage);
-
-            // Force modified metadata page to disk.
-            bufferCache.force(fileId, true);
-        } finally {
-            metadataPage.releaseWriteLatch();
-            bufferCache.unpin(metadataPage);
-        }
-    }
-
-    @Override
-    public void addComponent(ILSMComponent c) {
-        List<ILSMComponent> oldList = componentsRef.get();
-        List<ILSMComponent> newList = new ArrayList<ILSMComponent>();
-        newList.add(c);
-        for (ILSMComponent oc : oldList) {
-            newList.add(oc);
-        }
-        componentsRef.set(newList);
-    }
-
-    @Override
-    public void subsumeMergedComponents(ILSMComponent newComponent, List<ILSMComponent> mergedComponents) {
-        List<ILSMComponent> oldList = componentsRef.get();
-        List<ILSMComponent> newList = new ArrayList<ILSMComponent>();
-        int swapIndex = oldList.indexOf(mergedComponents.get(0));
-        int swapSize = mergedComponents.size();
-        for (int i = 0; i < oldList.size(); i++) {
-            if (i < swapIndex || i >= swapIndex + swapSize) {
-                newList.add(oldList.get(i));
-            } else if (i == swapIndex) {
-                newList.add(newComponent);
-            }
-        }
-        componentsRef.set(newList);
-    }
-
-    @Override
-    public IInMemoryFreePageManager getInMemoryFreePageManager() {
-        return memFreePageManager;
-    }
-
-    @Override
-    public List<ILSMComponent> getImmutableComponents() {
-        return componentsRef.get();
-    }
-
-    @Override
-    public void setFlushStatus(ILSMIndex index, boolean needsFlush) {
-        this.needsFlush = needsFlush;
-    }
-
-    @Override
-    public boolean getFlushStatus(ILSMIndex index) {
-        return needsFlush;
-    }
-
-    @Override
-    public ILSMOperationTracker getOperationTracker() {
-        return lsmHarness.getOperationTracker();
-    }
-
-    @Override
-    public ILSMIOOperationScheduler getIOScheduler() {
-        return ioScheduler;
-    }
-
-    @Override
-    public IBufferCache getBufferCache() {
-        return diskBufferCache;
-    }
-}
diff --git a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/AbstractLSMIndexFileManager.java b/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/AbstractLSMIndexFileManager.java
deleted file mode 100644
index a84f8c9..0000000
--- a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/AbstractLSMIndexFileManager.java
+++ /dev/null
@@ -1,295 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.common.impls;
-
-import java.io.File;
-import java.io.FilenameFilter;
-import java.text.Format;
-import java.text.SimpleDateFormat;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.Comparator;
-import java.util.Date;
-import java.util.List;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.api.io.IIOManager;
-import edu.uci.ics.hyracks.api.io.IODeviceHandle;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrame;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIndexFileManager;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
-import edu.uci.ics.hyracks.storage.common.file.BufferedFileHandle;
-import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
-
-public abstract class AbstractLSMIndexFileManager implements ILSMIndexFileManager {
-
-    protected static final String SPLIT_STRING = "_";
-    protected static final String BLOOM_FILTER_STRING = "f";
-
-    // Use all IODevices registered in ioManager in a round-robin fashion to choose
-    // where to flush and merge
-    protected final IIOManager ioManager;
-    protected final IFileMapProvider fileMapProvider;
-
-    // baseDir should reflect dataset name and partition name.
-    protected String baseDir;
-    protected final Format formatter = new SimpleDateFormat("yyyy-MM-dd-HH-mm-ss-SSS");
-    protected final Comparator<String> cmp = new FileNameComparator();
-    protected final Comparator<ComparableFileName> recencyCmp = new RecencyComparator();
-
-    protected final TreeIndexFactory<? extends ITreeIndex> treeFactory;
-
-    // The current index for the round-robin file assignment
-    private int ioDeviceIndex = 0;
-
-    public AbstractLSMIndexFileManager(IIOManager ioManager, IFileMapProvider fileMapProvider, FileReference file,
-            TreeIndexFactory<? extends ITreeIndex> treeFactory, int startIODeviceIndex) {
-        this.baseDir = file.getFile().getPath();
-        if (!baseDir.endsWith(System.getProperty("file.separator"))) {
-            baseDir += System.getProperty("file.separator");
-        }
-        this.fileMapProvider = fileMapProvider;
-        this.ioManager = ioManager;
-        this.treeFactory = treeFactory;
-        ioDeviceIndex = startIODeviceIndex % ioManager.getIODevices().size();
-    }
-
-    private static FilenameFilter fileNameFilter = new FilenameFilter() {
-        public boolean accept(File dir, String name) {
-            return !name.startsWith(".");
-        }
-    };
-
-    protected boolean isValidTreeIndex(ITreeIndex treeIndex) throws HyracksDataException {
-        IBufferCache bufferCache = treeIndex.getBufferCache();
-        treeIndex.activate();
-        try {
-            int metadataPage = treeIndex.getFreePageManager().getFirstMetadataPage();
-            ITreeIndexMetaDataFrame metadataFrame = treeIndex.getFreePageManager().getMetaDataFrameFactory()
-                    .createFrame();
-            ICachedPage page = bufferCache.pin(BufferedFileHandle.getDiskPageId(treeIndex.getFileId(), metadataPage),
-                    false);
-            page.acquireReadLatch();
-            try {
-                metadataFrame.setPage(page);
-                return metadataFrame.isValid();
-            } finally {
-                page.releaseReadLatch();
-                bufferCache.unpin(page);
-            }
-        } finally {
-            treeIndex.deactivate();
-        }
-    }
-
-    protected void cleanupAndGetValidFilesInternal(IODeviceHandle dev, FilenameFilter filter,
-            TreeIndexFactory<? extends ITreeIndex> treeFactory, ArrayList<ComparableFileName> allFiles)
-            throws HyracksDataException, IndexException {
-        File dir = new File(dev.getPath(), baseDir);
-        String[] files = dir.list(filter);
-        for (String fileName : files) {
-            File file = new File(dir.getPath() + File.separator + fileName);
-            FileReference fileRef = new FileReference(file);
-            if (treeFactory == null || isValidTreeIndex(treeFactory.createIndexInstance(fileRef))) {
-                allFiles.add(new ComparableFileName(fileRef));
-            } else {
-                file.delete();
-            }
-        }
-    }
-
-    @Override
-    public void createDirs() {
-        for (IODeviceHandle dev : ioManager.getIODevices()) {
-            File f = new File(dev.getPath(), baseDir);
-            f.mkdirs();
-        }
-    }
-
-    @Override
-    public void deleteDirs() {
-        for (IODeviceHandle dev : ioManager.getIODevices()) {
-            File f = new File(dev.getPath(), baseDir);
-            delete(f);
-        }
-    }
-
-    private void delete(File f) {
-        if (f.isDirectory()) {
-            for (File c : f.listFiles()) {
-                delete(c);
-            }
-        }
-        f.delete();
-    }
-
-    protected static FilenameFilter bloomFilterFilter = new FilenameFilter() {
-        public boolean accept(File dir, String name) {
-            return !name.startsWith(".") && name.endsWith(BLOOM_FILTER_STRING);
-        }
-    };
-
-    protected FileReference createFlushFile(String relFlushFileName) {
-        // Assigns new files to I/O devices in round-robin fashion.
-        IODeviceHandle dev = ioManager.getIODevices().get(ioDeviceIndex);
-        ioDeviceIndex = (ioDeviceIndex + 1) % ioManager.getIODevices().size();
-        return dev.createFileReference(relFlushFileName);
-    }
-
-    protected FileReference createMergeFile(String relMergeFileName) {
-        return createFlushFile(relMergeFileName);
-    }
-
-    @Override
-    public LSMComponentFileReferences getRelFlushFileReference() {
-        Date date = new Date();
-        String ts = formatter.format(date);
-        // Begin timestamp and end timestamp are identical since it is a flush
-        return new LSMComponentFileReferences(createFlushFile(baseDir + ts + SPLIT_STRING + ts), null, null);
-    }
-
-    @Override
-    public LSMComponentFileReferences getRelMergeFileReference(String firstFileName, String lastFileName)
-            throws HyracksDataException {
-        String[] firstTimestampRange = firstFileName.split(SPLIT_STRING);
-        String[] lastTimestampRange = lastFileName.split(SPLIT_STRING);
-        // Get the range of timestamps by taking the earliest and the latest timestamps
-        return new LSMComponentFileReferences(createMergeFile(baseDir + firstTimestampRange[0] + SPLIT_STRING
-                + lastTimestampRange[1]), null, null);
-    }
-
-    @Override
-    public List<LSMComponentFileReferences> cleanupAndGetValidFiles() throws HyracksDataException, IndexException {
-        List<LSMComponentFileReferences> validFiles = new ArrayList<LSMComponentFileReferences>();
-        ArrayList<ComparableFileName> allFiles = new ArrayList<ComparableFileName>();
-
-        // Gather files from all IODeviceHandles and delete invalid files
-        // There are two types of invalid files:
-        // (1) The isValid flag is not set
-        // (2) The file's interval is contained by some other file
-        // Here, we only filter out (1).
-        for (IODeviceHandle dev : ioManager.getIODevices()) {
-            cleanupAndGetValidFilesInternal(dev, fileNameFilter, treeFactory, allFiles);
-        }
-
-        if (allFiles.isEmpty()) {
-            return validFiles;
-        }
-
-        if (allFiles.size() == 1) {
-            validFiles.add(new LSMComponentFileReferences(allFiles.get(0).fileRef, null, null));
-            return validFiles;
-        }
-
-        // Sorts files names from earliest to latest timestamp.
-        Collections.sort(allFiles);
-
-        List<ComparableFileName> validComparableFiles = new ArrayList<ComparableFileName>();
-        ComparableFileName last = allFiles.get(0);
-        validComparableFiles.add(last);
-        for (int i = 1; i < allFiles.size(); i++) {
-            ComparableFileName current = allFiles.get(i);
-            // The current start timestamp is greater than last stop timestamp so current is valid.
-            if (current.interval[0].compareTo(last.interval[1]) > 0) {
-                validComparableFiles.add(current);
-                last = current;
-            } else if (current.interval[0].compareTo(last.interval[0]) >= 0
-                    && current.interval[1].compareTo(last.interval[1]) <= 0) {
-                // The current file is completely contained in the interval of the 
-                // last file. Thus the last file must contain at least as much information 
-                // as the current file, so delete the current file.
-                current.fileRef.delete();
-            } else {
-                // This scenario should not be possible since timestamps are monotonically increasing.
-                throw new HyracksDataException("Found LSM files with overlapping timestamp intervals, "
-                        + "but the intervals were not contained by another file.");
-            }
-        }
-
-        // Sort valid files in reverse lexicographical order, such that newer files come first.
-        Collections.sort(validComparableFiles, recencyCmp);
-        for (ComparableFileName cmpFileName : validComparableFiles) {
-            validFiles.add(new LSMComponentFileReferences(cmpFileName.fileRef, null, null));
-        }
-
-        return validFiles;
-    }
-
-    @Override
-    public Comparator<String> getFileNameComparator() {
-        return cmp;
-    }
-
-    /**
-     * Sorts strings in reverse lexicographical order. The way we construct the
-     * file names above guarantees that:
-     * 1. Flushed files sort lower than merged files
-     * 2. Flushed files are sorted from newest to oldest (based on the timestamp
-     * string)
-     */
-    private class FileNameComparator implements Comparator<String> {
-        @Override
-        public int compare(String a, String b) {
-            // Consciously ignoring locale.
-            return -a.compareTo(b);
-        }
-    }
-
-    @Override
-    public String getBaseDir() {
-        return baseDir;
-    }
-
-    protected class ComparableFileName implements Comparable<ComparableFileName> {
-        public final FileReference fileRef;
-        public final String fullPath;
-        public final String fileName;
-
-        // Timestamp interval.
-        public final String[] interval;
-
-        public ComparableFileName(FileReference fileRef) {
-            this.fileRef = fileRef;
-            this.fullPath = fileRef.getFile().getAbsolutePath();
-            this.fileName = fileRef.getFile().getName();
-            interval = fileName.split(SPLIT_STRING);
-        }
-
-        @Override
-        public int compareTo(ComparableFileName b) {
-            int startCmp = interval[0].compareTo(b.interval[0]);
-            if (startCmp != 0) {
-                return startCmp;
-            }
-            return b.interval[1].compareTo(interval[1]);
-        }
-    }
-
-    private class RecencyComparator implements Comparator<ComparableFileName> {
-        @Override
-        public int compare(ComparableFileName a, ComparableFileName b) {
-            int cmp = -a.interval[0].compareTo(b.interval[0]);
-            if (cmp != 0) {
-                return cmp;
-            }
-            return -a.interval[1].compareTo(b.interval[1]);
-        }
-    }
-}
diff --git a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/AbstractMutableLSMComponent.java b/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/AbstractMutableLSMComponent.java
deleted file mode 100644
index 1a6636a..0000000
--- a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/AbstractMutableLSMComponent.java
+++ /dev/null
@@ -1,104 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.lsm.common.impls;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMComponent;
-
-public abstract class AbstractMutableLSMComponent implements ILSMComponent {
-
-    private int readerCount;
-    private int writerCount;
-    private ComponentState state;
-
-    private enum ComponentState {
-        READABLE_WRITABLE,
-        READABLE_UNWRITABLE,
-        READABLE_UNWRITABLE_FLUSHING,
-        UNREADABLE_UNWRITABLE
-    }
-
-    public AbstractMutableLSMComponent() {
-        readerCount = 0;
-        writerCount = 0;
-        state = ComponentState.READABLE_WRITABLE;
-    }
-
-    @Override
-    public synchronized boolean threadEnter(LSMOperationType opType) throws InterruptedException {
-        switch (opType) {
-            case FORCE_MODIFICATION:
-                if (state != ComponentState.READABLE_WRITABLE && state != ComponentState.READABLE_UNWRITABLE) {
-                    return false;
-                }
-                writerCount++;
-                break;
-            case MODIFICATION:
-                if (state != ComponentState.READABLE_WRITABLE) {
-                    return false;
-                }
-                writerCount++;
-                break;
-            case SEARCH:
-                if (state == ComponentState.UNREADABLE_UNWRITABLE) {
-                    return false;
-                }
-                readerCount++;
-                break;
-            case FLUSH:
-                if (state == ComponentState.READABLE_UNWRITABLE_FLUSHING
-                        || state == ComponentState.UNREADABLE_UNWRITABLE) {
-                    return false;
-                }
-
-                state = ComponentState.READABLE_UNWRITABLE_FLUSHING;
-                while (writerCount > 0) {
-                    wait();
-                }
-                readerCount++;
-                break;
-            default:
-                throw new UnsupportedOperationException("Unsupported operation " + opType);
-        }
-        return true;
-    }
-
-    @Override
-    public synchronized void threadExit(LSMOperationType opType, boolean failedOperation) throws HyracksDataException {
-        switch (opType) {
-            case FORCE_MODIFICATION:
-            case MODIFICATION:
-                writerCount--;
-                if (state == ComponentState.READABLE_WRITABLE && isFull()) {
-                    state = ComponentState.READABLE_UNWRITABLE;
-                }
-                break;
-            case SEARCH:
-                readerCount--;
-                if (state == ComponentState.UNREADABLE_UNWRITABLE && readerCount == 0) {
-                    reset();
-                    state = ComponentState.READABLE_WRITABLE;
-                } else if (state == ComponentState.READABLE_WRITABLE && isFull()) {
-                    state = ComponentState.READABLE_UNWRITABLE;
-                }
-                break;
-            case FLUSH:
-                if (failedOperation) {
-                    state = isFull() ? ComponentState.READABLE_UNWRITABLE : ComponentState.READABLE_WRITABLE;
-                }
-                readerCount--;
-                if (readerCount == 0) {
-                    reset();
-                    state = ComponentState.READABLE_WRITABLE;
-                } else if (state == ComponentState.READABLE_UNWRITABLE_FLUSHING) {
-                    state = ComponentState.UNREADABLE_UNWRITABLE;
-                }
-                break;
-            default:
-                throw new UnsupportedOperationException("Unsupported operation " + opType);
-        }
-        notifyAll();
-    }
-
-    protected abstract boolean isFull();
-
-    protected abstract void reset() throws HyracksDataException;
-}
diff --git a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/BTreeFactory.java b/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/BTreeFactory.java
deleted file mode 100644
index 008c418..0000000
--- a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/BTreeFactory.java
+++ /dev/null
@@ -1,42 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.common.impls;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
-import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManagerFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
-
-public class BTreeFactory extends TreeIndexFactory<BTree> {
-
-    public BTreeFactory(IBufferCache bufferCache, IFileMapProvider fileMapProvider,
-            IFreePageManagerFactory freePageManagerFactory, ITreeIndexFrameFactory interiorFrameFactory,
-            ITreeIndexFrameFactory leafFrameFactory, IBinaryComparatorFactory[] cmpFactories, int fieldCount) {
-        super(bufferCache, fileMapProvider, freePageManagerFactory, interiorFrameFactory, leafFrameFactory,
-                cmpFactories, fieldCount);
-    }
-
-    @Override
-    public BTree createIndexInstance(FileReference file) throws IndexException {
-        return new BTree(bufferCache, fileMapProvider, freePageManagerFactory.createFreePageManager(),
-                interiorFrameFactory, leafFrameFactory, cmpFactories, fieldCount, file);
-    }
-
-}
\ No newline at end of file
diff --git a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/BlockingIOOperationCallbackWrapper.java b/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/BlockingIOOperationCallbackWrapper.java
deleted file mode 100644
index 34e1f0d..0000000
--- a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/BlockingIOOperationCallbackWrapper.java
+++ /dev/null
@@ -1,43 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.lsm.common.impls;
-
-import java.util.List;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMComponent;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallback;
-
-public class BlockingIOOperationCallbackWrapper implements ILSMIOOperationCallback {
-
-    private boolean notified = false;
-
-    private final ILSMIOOperationCallback wrappedCallback;
-
-    public BlockingIOOperationCallbackWrapper(ILSMIOOperationCallback callback) {
-        this.wrappedCallback = callback;
-    }
-
-    public synchronized void waitForIO() throws InterruptedException {
-        if (!notified) {
-            this.wait();
-        }
-        notified = false;
-    }
-
-    @Override
-    public void beforeOperation() throws HyracksDataException {
-        wrappedCallback.beforeOperation();
-    }
-
-    @Override
-    public void afterOperation(List<ILSMComponent> oldComponents, ILSMComponent newComponent)
-            throws HyracksDataException {
-        wrappedCallback.afterOperation(oldComponents, newComponent);
-    }
-
-    @Override
-    public synchronized void afterFinalize(ILSMComponent newComponent) throws HyracksDataException {
-        wrappedCallback.afterFinalize(newComponent);
-        this.notifyAll();
-        notified = true;
-    }
-}
diff --git a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/BloomFilterAwareBTreePointSearchCursor.java b/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/BloomFilterAwareBTreePointSearchCursor.java
deleted file mode 100644
index af08bdb..0000000
--- a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/BloomFilterAwareBTreePointSearchCursor.java
+++ /dev/null
@@ -1,40 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.common.impls;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.storage.am.bloomfilter.impls.BloomFilter;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeRangeSearchCursor;
-
-public class BloomFilterAwareBTreePointSearchCursor extends BTreeRangeSearchCursor {
-    private final BloomFilter bloomFilter;
-    private long[] hashes = new long[2];
-
-    public BloomFilterAwareBTreePointSearchCursor(IBTreeLeafFrame frame, boolean exclusiveLatchNodes,
-            BloomFilter bloomFilter) {
-        super(frame, exclusiveLatchNodes);
-        this.bloomFilter = bloomFilter;
-    }
-
-    @Override
-    public boolean hasNext() throws HyracksDataException {
-        if (bloomFilter.contains(lowKey, hashes)) {
-            return super.hasNext();
-        }
-        return false;
-    }
-}
\ No newline at end of file
diff --git a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/ConstantMergePolicy.java b/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/ConstantMergePolicy.java
deleted file mode 100644
index 1c72abf..0000000
--- a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/ConstantMergePolicy.java
+++ /dev/null
@@ -1,42 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.common.impls;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallback;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIndex;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIndexAccessor;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMMergePolicy;
-
-public class ConstantMergePolicy implements ILSMMergePolicy {
-
-    private final int threshold;
-
-    public ConstantMergePolicy(int threshold) {
-        this.threshold = threshold;
-    }
-
-    @Override
-    public void diskComponentAdded(final ILSMIndex index, int totalNumDiskComponents) throws HyracksDataException,
-            IndexException {
-        if (totalNumDiskComponents >= threshold) {
-            ILSMIndexAccessor accessor = (ILSMIndexAccessor) index.createAccessor(NoOpOperationCallback.INSTANCE,
-                    NoOpOperationCallback.INSTANCE);
-            accessor.scheduleMerge(NoOpIOOperationCallback.INSTANCE);
-        }
-    }
-}
diff --git a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/ConstantMergePolicyProvider.java b/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/ConstantMergePolicyProvider.java
deleted file mode 100644
index b404c9b..0000000
--- a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/ConstantMergePolicyProvider.java
+++ /dev/null
@@ -1,22 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.lsm.common.impls;
-
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMMergePolicy;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMMergePolicyProvider;
-
-public class ConstantMergePolicyProvider implements ILSMMergePolicyProvider {
-
-    private static final long serialVersionUID = 1L;
-
-    private final int threshold;
-
-    public ConstantMergePolicyProvider(int threshold) {
-        this.threshold = threshold;
-    }
-
-    @Override
-    public ILSMMergePolicy getMergePolicy(IHyracksTaskContext ctx) {
-        return new ConstantMergePolicy(threshold);
-    }
-
-}
diff --git a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/IndexFactory.java b/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/IndexFactory.java
deleted file mode 100644
index 3feaecf..0000000
--- a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/IndexFactory.java
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.common.impls;
-
-import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManagerFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndex;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
-
-public abstract class IndexFactory<T extends IIndex> {
-
-    protected final IBufferCache bufferCache;
-    protected final IFileMapProvider fileMapProvider;
-    protected final IFreePageManagerFactory freePageManagerFactory;
-
-    public IndexFactory(IBufferCache bufferCache, IFileMapProvider fileMapProvider,
-            IFreePageManagerFactory freePageManagerFactory) {
-        this.bufferCache = bufferCache;
-        this.fileMapProvider = fileMapProvider;
-        this.freePageManagerFactory = freePageManagerFactory;
-    }
-
-    public abstract T createIndexInstance(FileReference file) throws IndexException;
-
-    public IBufferCache getBufferCache() {
-        return bufferCache;
-    }
-}
\ No newline at end of file
diff --git a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/LSMComponentFileReferences.java b/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/LSMComponentFileReferences.java
deleted file mode 100644
index 019dca4..0000000
--- a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/LSMComponentFileReferences.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.common.impls;
-
-import edu.uci.ics.hyracks.api.io.FileReference;
-
-public final class LSMComponentFileReferences {
-
-    // The FileReference for the index that is used for inserting records of the component. For instance, this will be the FileReference of the RTree in one component of the LSM-RTree.
-    private final FileReference insertIndexFileReference;
-    // This FileReference for the delete index (if any). For example, this will be the the FileReference of the buddy BTree in one component of the LSM-RTree.
-    private final FileReference deleteIndexFileReference;
-
-    // This FileReference for the bloom filter (if any). 
-    private final FileReference bloomFilterFileReference;
-
-    public LSMComponentFileReferences(FileReference insertIndexFileReference, FileReference deleteIndexFileReference,
-            FileReference bloomFilterFileReference) {
-        this.insertIndexFileReference = insertIndexFileReference;
-        this.deleteIndexFileReference = deleteIndexFileReference;
-        this.bloomFilterFileReference = bloomFilterFileReference;
-    }
-
-    public FileReference getInsertIndexFileReference() {
-        return insertIndexFileReference;
-    }
-
-    public FileReference getDeleteIndexFileReference() {
-        return deleteIndexFileReference;
-    }
-
-    public FileReference getBloomFilterFileReference() {
-        return bloomFilterFileReference;
-    }
-}
diff --git a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/LSMComponentState.java b/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/LSMComponentState.java
deleted file mode 100644
index e554a6e..0000000
--- a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/LSMComponentState.java
+++ /dev/null
@@ -1,23 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.common.impls;
-
-public enum LSMComponentState {
-    FLUSHING,
-    MERGING,
-    DONE_FLUSHING,
-    DONE_MERGING
-}
diff --git a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/LSMHarness.java b/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/LSMHarness.java
deleted file mode 100644
index 4a140b4..0000000
--- a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/LSMHarness.java
+++ /dev/null
@@ -1,231 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.common.impls;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexCursor;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchPredicate;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOperation;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMComponent;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMHarness;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperation;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallback;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIndexInternal;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIndexOperationContext;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMMergePolicy;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMOperationTracker;
-
-public class LSMHarness implements ILSMHarness {
-    private final ILSMIndexInternal lsmIndex;
-    private final ILSMMergePolicy mergePolicy;
-    private final ILSMOperationTracker opTracker;
-
-    public LSMHarness(ILSMIndexInternal lsmIndex, ILSMMergePolicy mergePolicy, ILSMOperationTracker opTracker) {
-        this.lsmIndex = lsmIndex;
-        this.opTracker = opTracker;
-        this.mergePolicy = mergePolicy;
-    }
-
-    private void threadExit(ILSMIndexOperationContext opCtx, LSMOperationType opType) throws HyracksDataException {
-        if (!lsmIndex.getFlushStatus(lsmIndex) && lsmIndex.getInMemoryFreePageManager().isFull()) {
-            lsmIndex.setFlushStatus(lsmIndex, true);
-        }
-        opTracker.afterOperation(opType, opCtx.getSearchOperationCallback(), opCtx.getModificationCallback());
-    }
-
-    private boolean getAndEnterComponents(ILSMIndexOperationContext ctx, LSMOperationType opType, boolean tryOperation)
-            throws HyracksDataException {
-        int numEntered = 0;
-        boolean entranceSuccessful = false;
-        List<ILSMComponent> entered = new ArrayList<ILSMComponent>();
-
-        while (!entranceSuccessful) {
-            entered.clear();
-            lsmIndex.getOperationalComponents(ctx);
-            List<ILSMComponent> components = ctx.getComponentHolder();
-            try {
-                for (ILSMComponent c : components) {
-                    if (!c.threadEnter(opType)) {
-                        break;
-                    }
-                    numEntered++;
-                    entered.add(c);
-                }
-                entranceSuccessful = numEntered == components.size();
-            } catch (InterruptedException e) {
-                entranceSuccessful = false;
-                throw new HyracksDataException(e);
-            } finally {
-                if (!entranceSuccessful) {
-                    for (ILSMComponent c : components) {
-                        if (numEntered <= 0) {
-                            break;
-                        }
-                        c.threadExit(opType, true);
-                        numEntered--;
-                    }
-                }
-            }
-            if (tryOperation && !entranceSuccessful) {
-                return false;
-            }
-        }
-
-        opTracker.beforeOperation(opType, ctx.getSearchOperationCallback(), ctx.getModificationCallback());
-        return true;
-    }
-
-    private void exitComponents(ILSMIndexOperationContext ctx, LSMOperationType opType, boolean failedOperation)
-            throws HyracksDataException {
-        try {
-            for (ILSMComponent c : ctx.getComponentHolder()) {
-                c.threadExit(opType, failedOperation);
-            }
-        } finally {
-            threadExit(ctx, opType);
-        }
-    }
-
-    @Override
-    public void forceModify(ILSMIndexOperationContext ctx, ITupleReference tuple) throws HyracksDataException,
-            IndexException {
-        LSMOperationType opType = LSMOperationType.FORCE_MODIFICATION;
-        modify(ctx, false, tuple, opType);
-    }
-
-    @Override
-    public boolean modify(ILSMIndexOperationContext ctx, boolean tryOperation, ITupleReference tuple)
-            throws HyracksDataException, IndexException {
-        LSMOperationType opType = LSMOperationType.MODIFICATION;
-        return modify(ctx, tryOperation, tuple, opType);
-    }
-
-    private boolean modify(ILSMIndexOperationContext ctx, boolean tryOperation, ITupleReference tuple,
-            LSMOperationType opType) throws HyracksDataException, IndexException {
-        if (!getAndEnterComponents(ctx, opType, tryOperation)) {
-            return false;
-        }
-        try {
-            lsmIndex.modify(ctx, tuple);
-        } finally {
-            exitComponents(ctx, opType, false);
-        }
-
-        return true;
-    }
-
-    @Override
-    public void search(ILSMIndexOperationContext ctx, IIndexCursor cursor, ISearchPredicate pred)
-            throws HyracksDataException, IndexException {
-        LSMOperationType opType = LSMOperationType.SEARCH;
-        getAndEnterComponents(ctx, opType, false);
-        try {
-            lsmIndex.search(ctx, cursor, pred);
-        } catch (HyracksDataException e) {
-            exitComponents(ctx, opType, true);
-            throw e;
-        } catch (IndexException e) {
-            exitComponents(ctx, opType, true);
-            throw e;
-        }
-    }
-
-    @Override
-    public void endSearch(ILSMIndexOperationContext ctx) throws HyracksDataException {
-        if (ctx.getOperation() == IndexOperation.SEARCH) {
-            exitComponents(ctx, LSMOperationType.SEARCH, false);
-        }
-    }
-
-    @Override
-    public void noOp(ILSMIndexOperationContext ctx) throws HyracksDataException {
-        LSMOperationType opType = LSMOperationType.NOOP;
-        opTracker.beforeOperation(opType, ctx.getSearchOperationCallback(), ctx.getModificationCallback());
-        threadExit(ctx, opType);
-    }
-
-    @Override
-    public void scheduleFlush(ILSMIndexOperationContext ctx, ILSMIOOperationCallback callback)
-            throws HyracksDataException {
-        if (!getAndEnterComponents(ctx, LSMOperationType.FLUSH, true)) {
-            return;
-        }
-        lsmIndex.setFlushStatus(lsmIndex, false);
-        lsmIndex.scheduleFlush(ctx, callback);
-    }
-
-    @Override
-    public void flush(ILSMIndexOperationContext ctx, ILSMIOOperation operation) throws HyracksDataException,
-            IndexException {
-        operation.getCallback().beforeOperation();
-        ILSMComponent newComponent = lsmIndex.flush(operation);
-        operation.getCallback().afterOperation(null, newComponent);
-        lsmIndex.markAsValid(newComponent);
-        operation.getCallback().afterFinalize(newComponent);
-
-        lsmIndex.addComponent(newComponent);
-        int numComponents = lsmIndex.getImmutableComponents().size();
-
-        mergePolicy.diskComponentAdded(lsmIndex, numComponents);
-        exitComponents(ctx, LSMOperationType.FLUSH, false);
-    }
-
-    @Override
-    public void scheduleMerge(ILSMIndexOperationContext ctx, ILSMIOOperationCallback callback)
-            throws HyracksDataException, IndexException {
-        LSMOperationType opType = LSMOperationType.MERGE;
-        if (!getAndEnterComponents(ctx, opType, false)) {
-            return;
-        }
-        if (ctx.getComponentHolder().size() > 1) {
-            lsmIndex.scheduleMerge(ctx, callback);
-        } else {
-            exitComponents(ctx, opType, true);
-        }
-    }
-
-    @Override
-    public void merge(ILSMIndexOperationContext ctx, ILSMIOOperation operation) throws HyracksDataException,
-            IndexException {
-        List<ILSMComponent> mergedComponents = new ArrayList<ILSMComponent>();
-        operation.getCallback().beforeOperation();
-        ILSMComponent newComponent = lsmIndex.merge(mergedComponents, operation);
-        ctx.getComponentHolder().addAll(mergedComponents);
-        operation.getCallback().afterOperation(mergedComponents, newComponent);
-        lsmIndex.markAsValid(newComponent);
-        operation.getCallback().afterFinalize(newComponent);
-        lsmIndex.subsumeMergedComponents(newComponent, mergedComponents);
-        exitComponents(ctx, LSMOperationType.MERGE, false);
-    }
-
-    @Override
-    public void addBulkLoadedComponent(ILSMComponent c) throws HyracksDataException, IndexException {
-        lsmIndex.markAsValid(c);
-        lsmIndex.addComponent(c);
-        int numComponents = lsmIndex.getImmutableComponents().size();
-        mergePolicy.diskComponentAdded(lsmIndex, numComponents);
-    }
-
-    @Override
-    public ILSMOperationTracker getOperationTracker() {
-        return opTracker;
-    }
-}
diff --git a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/LSMIndexSearchCursor.java b/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/LSMIndexSearchCursor.java
deleted file mode 100644
index 7f08ba4..0000000
--- a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/LSMIndexSearchCursor.java
+++ /dev/null
@@ -1,211 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.common.impls;
-
-import java.util.Comparator;
-import java.util.List;
-import java.util.PriorityQueue;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexCursor;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMComponent;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMHarness;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIndexOperationContext;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMTreeTupleReference;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
-
-public abstract class LSMIndexSearchCursor implements ITreeIndexCursor {
-    protected PriorityQueueElement outputElement;
-    protected IIndexCursor[] rangeCursors;
-    protected PriorityQueue<PriorityQueueElement> outputPriorityQueue;
-    protected PriorityQueueComparator pqCmp;
-    protected MultiComparator cmp;
-    protected boolean needPush;
-    protected boolean includeMemComponent;
-    protected ILSMHarness lsmHarness;
-    protected final ILSMIndexOperationContext opCtx;
-
-    protected List<ILSMComponent> operationalComponents;
-
-    public LSMIndexSearchCursor(ILSMIndexOperationContext opCtx) {
-        this.opCtx = opCtx;
-        outputElement = null;
-        needPush = false;
-    }
-
-    public void initPriorityQueue() throws HyracksDataException, IndexException {
-        int pqInitSize = (rangeCursors.length > 0) ? rangeCursors.length : 1;
-        outputPriorityQueue = new PriorityQueue<PriorityQueueElement>(pqInitSize, pqCmp);
-        for (int i = 0; i < rangeCursors.length; i++) {
-            pushIntoPriorityQueue(new PriorityQueueElement(i));
-        }
-    }
-
-    public IIndexCursor getCursor(int cursorIndex) {
-        return rangeCursors[cursorIndex];
-    }
-
-    @Override
-    public void reset() throws HyracksDataException, IndexException {
-        outputElement = null;
-        needPush = false;
-
-        if (outputPriorityQueue != null) {
-            outputPriorityQueue.clear();
-        }
-
-        if (rangeCursors != null) {
-            for (int i = 0; i < rangeCursors.length; i++) {
-                rangeCursors[i].reset();
-            }
-        }
-        rangeCursors = null;
-    }
-
-    @Override
-    public boolean hasNext() throws HyracksDataException, IndexException {
-        checkPriorityQueue();
-        return !outputPriorityQueue.isEmpty();
-    }
-
-    @Override
-    public void next() throws HyracksDataException {
-        outputElement = outputPriorityQueue.poll();
-        needPush = true;
-    }
-
-    @Override
-    public ICachedPage getPage() {
-        // do nothing
-        return null;
-    }
-
-    @Override
-    public void close() throws HyracksDataException {
-        if (lsmHarness != null) {
-            try {
-                outputPriorityQueue.clear();
-                for (int i = 0; i < rangeCursors.length; i++) {
-                    rangeCursors[i].close();
-                }
-                rangeCursors = null;
-            } finally {
-                lsmHarness.endSearch(opCtx);
-            }
-        }
-    }
-
-    @Override
-    public void setBufferCache(IBufferCache bufferCache) {
-        // do nothing
-    }
-
-    @Override
-    public void setFileId(int fileId) {
-        // do nothing
-    }
-
-    @Override
-    public ITupleReference getTuple() {
-        return outputElement.getTuple();
-    }
-
-    protected boolean pushIntoPriorityQueue(PriorityQueueElement e) throws HyracksDataException, IndexException {
-        int cursorIndex = e.getCursorIndex();
-        if (rangeCursors[cursorIndex].hasNext()) {
-            rangeCursors[cursorIndex].next();
-            e.reset(rangeCursors[cursorIndex].getTuple());
-            outputPriorityQueue.offer(e);
-            return true;
-        }
-        rangeCursors[cursorIndex].close();
-        return false;
-    }
-
-    protected boolean isDeleted(PriorityQueueElement checkElement) throws HyracksDataException, IndexException {
-        return ((ILSMTreeTupleReference) checkElement.getTuple()).isAntimatter();
-    }
-
-    abstract protected void checkPriorityQueue() throws HyracksDataException, IndexException;
-
-    @Override
-    public boolean exclusiveLatchNodes() {
-        return false;
-    }
-
-    public class PriorityQueueElement {
-        private ITupleReference tuple;
-        private final int cursorIndex;
-
-        public PriorityQueueElement(int cursorIndex) {
-            tuple = null;
-            this.cursorIndex = cursorIndex;
-        }
-
-        public ITupleReference getTuple() {
-            return tuple;
-        }
-
-        public int getCursorIndex() {
-            return cursorIndex;
-        }
-
-        public void reset(ITupleReference tuple) {
-            this.tuple = tuple;
-        }
-    }
-
-    public class PriorityQueueComparator implements Comparator<PriorityQueueElement> {
-
-        protected final MultiComparator cmp;
-
-        public PriorityQueueComparator(MultiComparator cmp) {
-            this.cmp = cmp;
-        }
-
-        @Override
-        public int compare(PriorityQueueElement elementA, PriorityQueueElement elementB) {
-            int result = cmp.compare(elementA.getTuple(), elementB.getTuple());
-            if (result != 0) {
-                return result;
-            }
-            if (elementA.getCursorIndex() > elementB.getCursorIndex()) {
-                return 1;
-            } else {
-                return -1;
-            }
-        }
-
-        public MultiComparator getMultiComparator() {
-            return cmp;
-        }
-    }
-
-    protected void setPriorityQueueComparator() {
-        if (pqCmp == null || cmp != pqCmp.getMultiComparator()) {
-            pqCmp = new PriorityQueueComparator(cmp);
-        }
-    }
-
-    protected int compare(MultiComparator cmp, ITupleReference tupleA, ITupleReference tupleB) {
-        return cmp.compare(tupleA, tupleB);
-    }
-}
\ No newline at end of file
diff --git a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/LSMOperationType.java b/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/LSMOperationType.java
deleted file mode 100644
index 981cefe..0000000
--- a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/LSMOperationType.java
+++ /dev/null
@@ -1,10 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.lsm.common.impls;
-
-public enum LSMOperationType {
-    SEARCH,
-    MODIFICATION,
-    FORCE_MODIFICATION,
-    FLUSH,
-    MERGE,
-    NOOP
-}
diff --git a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/LSMTreeIndexAccessor.java b/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/LSMTreeIndexAccessor.java
deleted file mode 100644
index 7cc29a5..0000000
--- a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/LSMTreeIndexAccessor.java
+++ /dev/null
@@ -1,146 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.common.impls;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexCursor;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchPredicate;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOperation;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMHarness;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperation;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallback;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIndexAccessorInternal;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIndexOperationContext;
-
-public abstract class LSMTreeIndexAccessor implements ILSMIndexAccessorInternal {
-    protected ILSMHarness lsmHarness;
-    protected ILSMIndexOperationContext ctx;
-
-    public LSMTreeIndexAccessor(ILSMHarness lsmHarness, ILSMIndexOperationContext ctx) {
-        this.lsmHarness = lsmHarness;
-        this.ctx = ctx;
-    }
-
-    @Override
-    public void insert(ITupleReference tuple) throws HyracksDataException, IndexException {
-        ctx.setOperation(IndexOperation.INSERT);
-        lsmHarness.modify(ctx, false, tuple);
-    }
-
-    @Override
-    public void update(ITupleReference tuple) throws HyracksDataException, IndexException {
-        // Update is the same as insert.
-        ctx.setOperation(IndexOperation.UPDATE);
-        lsmHarness.modify(ctx, false, tuple);
-    }
-
-    @Override
-    public void delete(ITupleReference tuple) throws HyracksDataException, IndexException {
-        ctx.setOperation(IndexOperation.DELETE);
-        lsmHarness.modify(ctx, false, tuple);
-    }
-
-    @Override
-    public void upsert(ITupleReference tuple) throws HyracksDataException, IndexException {
-        ctx.setOperation(IndexOperation.UPSERT);
-        lsmHarness.modify(ctx, false, tuple);
-    }
-
-    @Override
-    public boolean tryInsert(ITupleReference tuple) throws HyracksDataException, IndexException {
-        ctx.setOperation(IndexOperation.INSERT);
-        return lsmHarness.modify(ctx, true, tuple);
-    }
-
-    @Override
-    public boolean tryDelete(ITupleReference tuple) throws HyracksDataException, IndexException {
-        ctx.setOperation(IndexOperation.DELETE);
-        return lsmHarness.modify(ctx, true, tuple);
-    }
-
-    @Override
-    public boolean tryUpdate(ITupleReference tuple) throws HyracksDataException, IndexException {
-        // Update is the same as insert.
-        ctx.setOperation(IndexOperation.UPDATE);
-        return lsmHarness.modify(ctx, true, tuple);
-    }
-
-    @Override
-    public boolean tryUpsert(ITupleReference tuple) throws HyracksDataException, IndexException {
-        ctx.setOperation(IndexOperation.UPSERT);
-        return lsmHarness.modify(ctx, true, tuple);
-    }
-
-    @Override
-    public void search(IIndexCursor cursor, ISearchPredicate searchPred) throws HyracksDataException, IndexException {
-        ctx.setOperation(IndexOperation.SEARCH);
-        lsmHarness.search(ctx, cursor, searchPred);
-    }
-
-    @Override
-    public void flush(ILSMIOOperation operation) throws HyracksDataException, IndexException {
-        lsmHarness.flush(ctx, operation);
-    }
-
-    @Override
-    public void merge(ILSMIOOperation operation) throws HyracksDataException, IndexException {
-        ctx.setOperation(IndexOperation.MERGE);
-        lsmHarness.merge(ctx, operation);
-    }
-
-    @Override
-    public void physicalDelete(ITupleReference tuple) throws HyracksDataException, IndexException {
-        ctx.setOperation(IndexOperation.PHYSICALDELETE);
-        lsmHarness.modify(ctx, false, tuple);
-    }
-
-    @Override
-    public void scheduleFlush(ILSMIOOperationCallback callback) throws HyracksDataException {
-        ctx.setOperation(IndexOperation.FLUSH);
-        lsmHarness.scheduleFlush(ctx, callback);
-    }
-
-    @Override
-    public void scheduleMerge(ILSMIOOperationCallback callback) throws HyracksDataException, IndexException {
-        ctx.setOperation(IndexOperation.MERGE);
-        lsmHarness.scheduleMerge(ctx, callback);
-    }
-
-    @Override
-    public void noOp() throws HyracksDataException {
-        lsmHarness.noOp(ctx);
-    }
-
-    @Override
-    public void forcePhysicalDelete(ITupleReference tuple) throws HyracksDataException, IndexException {
-        ctx.setOperation(IndexOperation.PHYSICALDELETE);
-        lsmHarness.forceModify(ctx, tuple);
-    }
-
-    @Override
-    public void forceInsert(ITupleReference tuple) throws HyracksDataException, IndexException {
-        ctx.setOperation(IndexOperation.INSERT);
-        lsmHarness.forceModify(ctx, tuple);
-    }
-
-    @Override
-    public void forceDelete(ITupleReference tuple) throws HyracksDataException, IndexException {
-        ctx.setOperation(IndexOperation.DELETE);
-        lsmHarness.forceModify(ctx, tuple);
-    }
-}
\ No newline at end of file
diff --git a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/NoMergePolicy.java b/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/NoMergePolicy.java
deleted file mode 100644
index 5d36c09..0000000
--- a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/NoMergePolicy.java
+++ /dev/null
@@ -1,14 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.lsm.common.impls;
-
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIndex;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMMergePolicy;
-
-public enum NoMergePolicy implements ILSMMergePolicy {
-    INSTANCE;
-
-    @Override
-    public void diskComponentAdded(ILSMIndex index, int totalNumDiskComponents) {
-        // Do nothing
-    }
-
-}
diff --git a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/NoOpIOOperationCallback.java b/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/NoOpIOOperationCallback.java
deleted file mode 100644
index b123b30..0000000
--- a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/NoOpIOOperationCallback.java
+++ /dev/null
@@ -1,34 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.lsm.common.impls;
-
-import java.util.List;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMComponent;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallback;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallbackProvider;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIndex;
-
-public enum NoOpIOOperationCallback implements ILSMIOOperationCallback, ILSMIOOperationCallbackProvider {
-    INSTANCE;
-
-    @Override
-    public void beforeOperation() throws HyracksDataException {
-        // Do nothing.
-    }
-
-    @Override
-    public void afterOperation(List<ILSMComponent> oldComponents, ILSMComponent newComponent)
-            throws HyracksDataException {
-        // Do nothing.
-    }
-
-    @Override
-    public void afterFinalize(ILSMComponent newComponent) throws HyracksDataException {
-        // Do nothing.
-    }
-
-    @Override
-    public ILSMIOOperationCallback getIOOperationCallback(ILSMIndex index) {
-        return INSTANCE;
-    }
-}
diff --git a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/NoOpOperationTrackerFactory.java b/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/NoOpOperationTrackerFactory.java
deleted file mode 100644
index 97ec50e..0000000
--- a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/NoOpOperationTrackerFactory.java
+++ /dev/null
@@ -1,48 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.lsm.common.impls;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.storage.am.common.api.IModificationOperationCallback;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchOperationCallback;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIndex;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMOperationTracker;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMOperationTrackerFactory;
-
-/**
- * Operation tracker that does nothing.
- * WARNING: This op tracker should only be used for specific testing purposes.
- * It is assumed than an op tracker cooperates with an lsm index to synchronize flushes with
- * regular operations, and this implementation does no such tracking at all.
- */
-public class NoOpOperationTrackerFactory implements ILSMOperationTrackerFactory {
-    private static final long serialVersionUID = 1L;
-
-    public static NoOpOperationTrackerFactory INSTANCE = new NoOpOperationTrackerFactory();
-
-    @Override
-    public ILSMOperationTracker createOperationTracker(ILSMIndex index) {
-        return new ILSMOperationTracker() {
-
-            @Override
-            public void completeOperation(LSMOperationType opType, ISearchOperationCallback searchCallback,
-                    IModificationOperationCallback modificationCallback) throws HyracksDataException {
-                // Do nothing.
-            }
-
-            @Override
-            public void beforeOperation(LSMOperationType opType, ISearchOperationCallback searchCallback,
-                    IModificationOperationCallback modificationCallback) throws HyracksDataException {
-            }
-
-            @Override
-            public void afterOperation(LSMOperationType opType, ISearchOperationCallback searchCallback,
-                    IModificationOperationCallback modificationCallback) throws HyracksDataException {
-                // Do nothing.                        
-            }
-        };
-    }
-
-    // Enforce singleton.
-    private NoOpOperationTrackerFactory() {
-    }
-
-};
diff --git a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/SynchronousScheduler.java b/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/SynchronousScheduler.java
deleted file mode 100644
index 9bbd394..0000000
--- a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/SynchronousScheduler.java
+++ /dev/null
@@ -1,19 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.lsm.common.impls;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperation;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationScheduler;
-
-public enum SynchronousScheduler implements ILSMIOOperationScheduler {
-    INSTANCE;
-
-    @Override
-    public void scheduleOperation(ILSMIOOperation operation) throws HyracksDataException {
-        try {
-            operation.perform();
-        } catch (IndexException e) {
-            throw new HyracksDataException(e);
-        }
-    }
-}
diff --git a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/SynchronousSchedulerProvider.java b/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/SynchronousSchedulerProvider.java
deleted file mode 100644
index 72d9d1d..0000000
--- a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/SynchronousSchedulerProvider.java
+++ /dev/null
@@ -1,15 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.lsm.common.impls;
-
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationScheduler;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationSchedulerProvider;
-
-public enum SynchronousSchedulerProvider implements ILSMIOOperationSchedulerProvider {
-    INSTANCE;
-
-    @Override
-    public ILSMIOOperationScheduler getIOScheduler(IHyracksTaskContext ctx) {
-        return SynchronousScheduler.INSTANCE;
-    }
-
-}
diff --git a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/ThreadCountingOperationTrackerFactory.java b/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/ThreadCountingOperationTrackerFactory.java
deleted file mode 100644
index 3b4b00f..0000000
--- a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/ThreadCountingOperationTrackerFactory.java
+++ /dev/null
@@ -1,21 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.lsm.common.impls;
-
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIndex;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMOperationTracker;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMOperationTrackerFactory;
-
-public class ThreadCountingOperationTrackerFactory implements ILSMOperationTrackerFactory {
-
-    private static final long serialVersionUID = 1L;
-    
-    public static ThreadCountingOperationTrackerFactory INSTANCE = new ThreadCountingOperationTrackerFactory(); 
-    
-    @Override
-    public ILSMOperationTracker createOperationTracker(ILSMIndex index) {
-        return new ThreadCountingTracker(index);
-    }
-
-    // Enforce singleton.
-    private ThreadCountingOperationTrackerFactory() {
-    }
-}
diff --git a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/ThreadCountingTracker.java b/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/ThreadCountingTracker.java
deleted file mode 100644
index 7fee06e..0000000
--- a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/ThreadCountingTracker.java
+++ /dev/null
@@ -1,49 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.lsm.common.impls;
-
-import java.util.concurrent.atomic.AtomicInteger;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.storage.am.common.api.IModificationOperationCallback;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchOperationCallback;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallback;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIndex;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIndexAccessor;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMOperationTracker;
-
-public class ThreadCountingTracker implements ILSMOperationTracker {
-    private final AtomicInteger threadRefCount;
-    private final ILSMIndex index;
-
-    public ThreadCountingTracker(ILSMIndex index) {
-        this.index = index;
-        this.threadRefCount = new AtomicInteger();
-    }
-
-    @Override
-    public void beforeOperation(LSMOperationType opType, ISearchOperationCallback searchCallback,
-            IModificationOperationCallback modificationCallback) throws HyracksDataException {
-        if (opType == LSMOperationType.MODIFICATION) {
-            threadRefCount.incrementAndGet();
-        }
-    }
-
-    @Override
-    public void afterOperation(LSMOperationType opType, ISearchOperationCallback searchCallback,
-            IModificationOperationCallback modificationCallback) throws HyracksDataException {
-        // The operation is considered inactive, immediately after leaving the index.
-        completeOperation(opType, searchCallback, modificationCallback);
-    }
-
-    @Override
-    public void completeOperation(LSMOperationType opType, ISearchOperationCallback searchCallback,
-            IModificationOperationCallback modificationCallback) throws HyracksDataException {
-        // Flush will only be handled by last exiting thread.
-        if (opType == LSMOperationType.MODIFICATION) {
-            if (threadRefCount.decrementAndGet() == 0 && index.getFlushStatus(index)) {
-                ILSMIndexAccessor accessor = (ILSMIndexAccessor) index.createAccessor(NoOpOperationCallback.INSTANCE,
-                        NoOpOperationCallback.INSTANCE);
-                accessor.scheduleFlush(NoOpIOOperationCallback.INSTANCE);
-            }
-        }
-    }
-}
diff --git a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/TreeIndexFactory.java b/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/TreeIndexFactory.java
deleted file mode 100644
index f570058..0000000
--- a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/TreeIndexFactory.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.common.impls;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManagerFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
-
-public abstract class TreeIndexFactory<T extends ITreeIndex> extends IndexFactory<T> {
-
-    protected final ITreeIndexFrameFactory interiorFrameFactory;
-    protected final ITreeIndexFrameFactory leafFrameFactory;
-    protected final IBinaryComparatorFactory[] cmpFactories;
-    protected final int fieldCount;
-
-    public TreeIndexFactory(IBufferCache bufferCache, IFileMapProvider fileMapProvider,
-            IFreePageManagerFactory freePageManagerFactory, ITreeIndexFrameFactory interiorFrameFactory,
-            ITreeIndexFrameFactory leafFrameFactory, IBinaryComparatorFactory[] cmpFactories, int fieldCount) {
-        super(bufferCache, fileMapProvider, freePageManagerFactory);
-        this.interiorFrameFactory = interiorFrameFactory;
-        this.leafFrameFactory = leafFrameFactory;
-        this.cmpFactories = cmpFactories;
-        this.fieldCount = fieldCount;
-    }
-}
\ No newline at end of file
diff --git a/hyracks-storage-am-lsm-invertedindex/pom.xml b/hyracks-storage-am-lsm-invertedindex/pom.xml
deleted file mode 100644
index 4b04bb6..0000000
--- a/hyracks-storage-am-lsm-invertedindex/pom.xml
+++ /dev/null
@@ -1,41 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-	<modelVersion>4.0.0</modelVersion>
-	<artifactId>hyracks-storage-am-lsm-invertedindex</artifactId>
-
-	<parent>
-		<artifactId>hyracks</artifactId>
-		<groupId>edu.uci.ics.hyracks</groupId>
-		<version>0.2.2-SNAPSHOT</version>
-		<relativePath>..</relativePath>
-	</parent>
-
-	<build>
-		<plugins>
-			<plugin>
-				<groupId>org.apache.maven.plugins</groupId>
-				<artifactId>maven-compiler-plugin</artifactId>
-				<version>2.0.2</version>
-				<configuration>
-					<source>1.6</source>
-					<target>1.6</target>
-				</configuration>
-			</plugin>
-		</plugins>
-	</build>
-	<dependencies>
-		<dependency>
-			<groupId>edu.uci.ics.hyracks</groupId>
-			<artifactId>hyracks-storage-am-btree</artifactId>
-			<version>0.2.2-SNAPSHOT</version>
-			<type>jar</type>
-			<scope>compile</scope>
-		</dependency>
-		<dependency>
-			<groupId>edu.uci.ics.hyracks</groupId>
-			<artifactId>hyracks-storage-am-lsm-common</artifactId>
-			<version>0.2.2-SNAPSHOT</version>
-			<type>jar</type>
-			<scope>compile</scope>
-	    </dependency>
-	</dependencies>
-</project>
\ No newline at end of file
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/api/IInvertedIndex.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/api/IInvertedIndex.java
deleted file mode 100644
index 2556a25..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/api/IInvertedIndex.java
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndex;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexOperationContext;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-
-public interface IInvertedIndex extends IIndex {
-    public IInvertedListCursor createInvertedListCursor();
-    
-    public void openInvertedListCursor(IInvertedListCursor listCursor, ITupleReference searchKey, IIndexOperationContext ictx)
-            throws HyracksDataException, IndexException;
-    
-    public ITypeTraits[] getInvListTypeTraits();
-    
-    public IBinaryComparatorFactory[] getInvListCmpFactories();    
-    
-    public ITypeTraits[] getTokenTypeTraits();
-    
-    public IBinaryComparatorFactory[] getTokenCmpFactories();
-}
\ No newline at end of file
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/api/IInvertedIndexAccessor.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/api/IInvertedIndexAccessor.java
deleted file mode 100644
index 3fe5b57..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/api/IInvertedIndexAccessor.java
+++ /dev/null
@@ -1,35 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexAccessor;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexCursor;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchPredicate;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-
-public interface IInvertedIndexAccessor extends IIndexAccessor {
-    public IInvertedListCursor createInvertedListCursor();
-
-    public void openInvertedListCursor(IInvertedListCursor listCursor, ITupleReference searchKey)
-            throws HyracksDataException, IndexException;
-
-    public IIndexCursor createRangeSearchCursor();
-
-    public void rangeSearch(IIndexCursor cursor, ISearchPredicate searchPred) throws IndexException,
-            HyracksDataException;
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/api/IInvertedIndexFileNameMapper.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/api/IInvertedIndexFileNameMapper.java
deleted file mode 100644
index d7ec129..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/api/IInvertedIndexFileNameMapper.java
+++ /dev/null
@@ -1,23 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api;
-
-/**
- * Maps from the dictionary BTree file/path to a corresponding inverted-lists file/path.
- */
-public interface IInvertedIndexFileNameMapper {
-    public String getInvListsFilePath(String dictBTreeFilePath);
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/api/IInvertedIndexOperatorDescriptor.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/api/IInvertedIndexOperatorDescriptor.java
deleted file mode 100644
index 60d1b03..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/api/IInvertedIndexOperatorDescriptor.java
+++ /dev/null
@@ -1,34 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.tokenizers.IBinaryTokenizerFactory;
-
-public interface IInvertedIndexOperatorDescriptor extends IIndexOperatorDescriptor {
-    
-    public ITypeTraits[] getInvListsTypeTraits();
-    
-    public IBinaryComparatorFactory[] getInvListsComparatorFactories();
-    
-    public ITypeTraits[] getTokenTypeTraits();
-    
-    public IBinaryComparatorFactory[] getTokenComparatorFactories();
-
-    public IBinaryTokenizerFactory getTokenizerFactory();        
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/api/IInvertedIndexSearchModifier.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/api/IInvertedIndexSearchModifier.java
deleted file mode 100644
index 0d0d936..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/api/IInvertedIndexSearchModifier.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api;
-
-public interface IInvertedIndexSearchModifier {
-    public int getOccurrenceThreshold(int numQueryTokens);
-
-    public int getNumPrefixLists(int occurrenceThreshold, int numInvLists);
-
-    public short getNumTokensLowerBound(short numQueryTokens);
-
-    public short getNumTokensUpperBound(short numQueryTokens);
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/api/IInvertedIndexSearchModifierFactory.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/api/IInvertedIndexSearchModifierFactory.java
deleted file mode 100644
index 180f491..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/api/IInvertedIndexSearchModifierFactory.java
+++ /dev/null
@@ -1,22 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api;
-
-import java.io.Serializable;
-
-public interface IInvertedIndexSearchModifierFactory extends Serializable {
-    public IInvertedIndexSearchModifier createSearchModifier();
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/api/IInvertedIndexSearcher.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/api/IInvertedIndexSearcher.java
deleted file mode 100644
index 0ea9383..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/api/IInvertedIndexSearcher.java
+++ /dev/null
@@ -1,42 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api;
-
-import java.nio.ByteBuffer;
-import java.util.List;
-
-import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexOperationContext;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.ondisk.OnDiskInvertedIndexSearchCursor;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.search.InvertedIndexSearchPredicate;
-
-public interface IInvertedIndexSearcher {
-    public void search(OnDiskInvertedIndexSearchCursor resultCursor, InvertedIndexSearchPredicate searchPred, IIndexOperationContext ictx)
-            throws HyracksDataException, IndexException;
-
-    public IFrameTupleAccessor createResultFrameTupleAccessor();
-
-    public ITupleReference createResultFrameTupleReference();
-
-    public List<ByteBuffer> getResultBuffers();
-
-    public int getNumValidResultBuffers();
-    
-    public void reset();
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/api/IInvertedListBuilder.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/api/IInvertedListBuilder.java
deleted file mode 100644
index dcb5fdd..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/api/IInvertedListBuilder.java
+++ /dev/null
@@ -1,32 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api;
-
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-
-public interface IInvertedListBuilder {
-    public boolean startNewList(ITupleReference tuple, int numTokenFields);
-
-    // returns true if successfully appended
-    // returns false if not enough space in targetBuf
-    public boolean appendElement(ITupleReference tuple, int numTokenFields, int numElementFields);
-
-    public void setTargetBuffer(byte[] targetBuf, int startPos);
-
-    public int getListSize();
-
-    public int getPos();
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/api/IInvertedListBuilderFactory.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/api/IInvertedListBuilderFactory.java
deleted file mode 100644
index 9cde18b..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/api/IInvertedListBuilderFactory.java
+++ /dev/null
@@ -1,20 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api;
-
-public interface IInvertedListBuilderFactory {
-    public IInvertedListBuilder create();
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/api/IInvertedListCursor.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/api/IInvertedListCursor.java
deleted file mode 100644
index de703ac..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/api/IInvertedListCursor.java
+++ /dev/null
@@ -1,54 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-
-public interface IInvertedListCursor extends Comparable<IInvertedListCursor> {
-    public void reset(int startPageId, int endPageId, int startOff, int numElements);
-
-    public void pinPages() throws HyracksDataException, IndexException;
-
-    public void unpinPages() throws HyracksDataException;
-
-    public boolean hasNext() throws HyracksDataException, IndexException;
-
-    public void next() throws HyracksDataException;
-
-    public ITupleReference getTuple();
-
-    // getters
-    public int size();
-
-    public int getStartPageId();
-
-    public int getEndPageId();
-
-    public int getStartOff();
-
-    public boolean containsKey(ITupleReference searchTuple, MultiComparator invListCmp) throws HyracksDataException, IndexException;
-    
-    // for debugging
-    @SuppressWarnings("rawtypes")
-    public String printInvList(ISerializerDeserializer[] serdes) throws HyracksDataException, IndexException;
-
-    @SuppressWarnings("rawtypes")
-    public String printCurrentElement(ISerializerDeserializer[] serdes) throws HyracksDataException;
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/api/IObjectFactory.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/api/IObjectFactory.java
deleted file mode 100644
index 9068a2b..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/api/IObjectFactory.java
+++ /dev/null
@@ -1,20 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api;
-
-public interface IObjectFactory<T> {
-    public T create();
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/api/IPartitionedInvertedIndex.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/api/IPartitionedInvertedIndex.java
deleted file mode 100644
index 89fd69d..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/api/IPartitionedInvertedIndex.java
+++ /dev/null
@@ -1,31 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api;
-
-import java.util.ArrayList;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexOperationContext;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.search.InvertedListPartitions;
-
-public interface IPartitionedInvertedIndex {
-    public boolean openInvertedListPartitionCursors(IInvertedIndexSearcher searcher, IIndexOperationContext ictx,
-            short numTokensLowerBound, short numTokensUpperBound, InvertedListPartitions invListPartitions,
-            ArrayList<IInvertedListCursor> cursorsOrderedByTokens) throws HyracksDataException, IndexException;
-
-    public boolean isEmpty();
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/dataflow/AbstractLSMInvertedIndexOperatorDescriptor.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/dataflow/AbstractLSMInvertedIndexOperatorDescriptor.java
deleted file mode 100644
index e0af36e..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/dataflow/AbstractLSMInvertedIndexOperatorDescriptor.java
+++ /dev/null
@@ -1,89 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.dataflow;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.job.IOperatorDescriptorRegistry;
-import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexLifecycleManagerProvider;
-import edu.uci.ics.hyracks.storage.am.common.api.IModificationOperationCallbackFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchOperationCallbackFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.ITupleFilterFactory;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.AbstractIndexOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndexOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.tokenizers.IBinaryTokenizerFactory;
-import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
-import edu.uci.ics.hyracks.storage.common.file.ILocalResourceFactoryProvider;
-
-public abstract class AbstractLSMInvertedIndexOperatorDescriptor extends AbstractIndexOperatorDescriptor implements
-        IInvertedIndexOperatorDescriptor {
-
-    private static final long serialVersionUID = 1L;
-
-    protected final ITypeTraits[] invListsTypeTraits;
-    protected final IBinaryComparatorFactory[] invListComparatorFactories;
-    protected final ITypeTraits[] tokenTypeTraits;
-    protected final IBinaryComparatorFactory[] tokenComparatorFactories;
-    protected final IBinaryTokenizerFactory tokenizerFactory;
-
-    public AbstractLSMInvertedIndexOperatorDescriptor(IOperatorDescriptorRegistry spec, int inputArity,
-            int outputArity, RecordDescriptor recDesc, IStorageManagerInterface storageManager,
-            IFileSplitProvider fileSplitProvider, IIndexLifecycleManagerProvider lifecycleManagerProvider,
-            ITypeTraits[] tokenTypeTraits, IBinaryComparatorFactory[] tokenComparatorFactories,
-            ITypeTraits[] invListsTypeTraits, IBinaryComparatorFactory[] invListComparatorFactories,
-            IBinaryTokenizerFactory tokenizerFactory, IIndexDataflowHelperFactory dataflowHelperFactory,
-            ITupleFilterFactory tupleFilterFactory, boolean retainInput,
-            ILocalResourceFactoryProvider localResourceFactoryProvider,
-            ISearchOperationCallbackFactory searchOpCallbackFactory,
-            IModificationOperationCallbackFactory modificationOpCallbackFactory) {
-        super(spec, inputArity, outputArity, recDesc, storageManager, lifecycleManagerProvider, fileSplitProvider,
-                dataflowHelperFactory, tupleFilterFactory, retainInput, localResourceFactoryProvider,
-                searchOpCallbackFactory, modificationOpCallbackFactory);
-        this.invListsTypeTraits = invListsTypeTraits;
-        this.invListComparatorFactories = invListComparatorFactories;
-        this.tokenTypeTraits = tokenTypeTraits;
-        this.tokenComparatorFactories = tokenComparatorFactories;
-        this.tokenizerFactory = tokenizerFactory;
-    }
-
-    @Override
-    public IBinaryComparatorFactory[] getTokenComparatorFactories() {
-        return tokenComparatorFactories;
-    }
-
-    @Override
-    public ITypeTraits[] getTokenTypeTraits() {
-        return tokenTypeTraits;
-    }
-
-    @Override
-    public IBinaryComparatorFactory[] getInvListsComparatorFactories() {
-        return invListComparatorFactories;
-    }
-
-    @Override
-    public IBinaryTokenizerFactory getTokenizerFactory() {
-        return tokenizerFactory;
-    }
-
-    @Override
-    public ITypeTraits[] getInvListsTypeTraits() {
-        return invListsTypeTraits;
-    }
-}
\ No newline at end of file
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/dataflow/BinaryTokenizerOperatorDescriptor.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/dataflow/BinaryTokenizerOperatorDescriptor.java
deleted file mode 100644
index 84152d5..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/dataflow/BinaryTokenizerOperatorDescriptor.java
+++ /dev/null
@@ -1,59 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.dataflow;
-
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
-import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.job.IOperatorDescriptorRegistry;
-import edu.uci.ics.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.tokenizers.IBinaryTokenizerFactory;
-
-public class BinaryTokenizerOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor {
-
-    private static final long serialVersionUID = 1L;
-
-    private final IBinaryTokenizerFactory tokenizerFactory;
-    // Field that will be tokenized.
-    private final int docField;
-    // operator will append these key fields to each token, e.g., as
-    // payload for an inverted list
-    // WARNING: too many key fields can cause significant data blowup.
-    private final int[] keyFields;
-    // Indicates whether the first key field should be the number of tokens in the tokenized set of the document.
-    // This value is used in partitioned inverted indexes, for example.
-    private final boolean addNumTokensKey;
-
-    public BinaryTokenizerOperatorDescriptor(IOperatorDescriptorRegistry spec, RecordDescriptor recDesc,
-            IBinaryTokenizerFactory tokenizerFactory, int docField, int[] keyFields, boolean addNumTokensKey) {
-        super(spec, 1, 1);
-        this.tokenizerFactory = tokenizerFactory;
-        this.docField = docField;
-        this.keyFields = keyFields;
-        this.addNumTokensKey = addNumTokensKey;
-        recordDescriptors[0] = recDesc;
-    }
-
-    @Override
-    public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
-            IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) throws HyracksDataException {
-        return new BinaryTokenizerOperatorNodePushable(ctx, recordDescProvider.getInputRecordDescriptor(
-                getActivityId(), 0), recordDescriptors[0], tokenizerFactory.createTokenizer(), docField, keyFields,
-                addNumTokensKey);
-    }
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/dataflow/BinaryTokenizerOperatorNodePushable.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/dataflow/BinaryTokenizerOperatorNodePushable.java
deleted file mode 100644
index 6f28f61..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/dataflow/BinaryTokenizerOperatorNodePushable.java
+++ /dev/null
@@ -1,138 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.dataflow;
-
-import java.io.IOException;
-import java.nio.ByteBuffer;
-
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.data.std.util.GrowableArray;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
-import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
-import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryInputUnaryOutputOperatorNodePushable;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.tokenizers.IBinaryTokenizer;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.tokenizers.IToken;
-
-public class BinaryTokenizerOperatorNodePushable extends AbstractUnaryInputUnaryOutputOperatorNodePushable {
-
-    private final IHyracksTaskContext ctx;
-    private final IBinaryTokenizer tokenizer;
-    private final int docField;
-    private final int[] keyFields;
-    private final boolean addNumTokensKey;
-    private final RecordDescriptor inputRecDesc;
-    private final RecordDescriptor outputRecDesc;
-
-    private FrameTupleAccessor accessor;
-    private ArrayTupleBuilder builder;
-    private GrowableArray builderData;
-    private FrameTupleAppender appender;
-    private ByteBuffer writeBuffer;
-
-    public BinaryTokenizerOperatorNodePushable(IHyracksTaskContext ctx, RecordDescriptor inputRecDesc,
-            RecordDescriptor outputRecDesc, IBinaryTokenizer tokenizer, int docField, int[] keyFields,
-            boolean addNumTokensKey) {
-        this.ctx = ctx;
-        this.tokenizer = tokenizer;
-        this.docField = docField;
-        this.keyFields = keyFields;
-        this.addNumTokensKey = addNumTokensKey;
-        this.inputRecDesc = inputRecDesc;
-        this.outputRecDesc = outputRecDesc;
-    }
-
-    @Override
-    public void open() throws HyracksDataException {
-        accessor = new FrameTupleAccessor(ctx.getFrameSize(), inputRecDesc);
-        writeBuffer = ctx.allocateFrame();
-        builder = new ArrayTupleBuilder(outputRecDesc.getFieldCount());
-        builderData = builder.getFieldData();
-        appender = new FrameTupleAppender(ctx.getFrameSize());
-        appender.reset(writeBuffer, true);
-        writer.open();
-    }
-
-    @Override
-    public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
-        accessor.reset(buffer);
-        int tupleCount = accessor.getTupleCount();
-        for (int i = 0; i < tupleCount; i++) {
-            short numTokens = 0;
-            if (addNumTokensKey) {
-                // Run through the tokens to get the total number of tokens.
-                tokenizer.reset(
-                        accessor.getBuffer().array(),
-                        accessor.getTupleStartOffset(i) + accessor.getFieldSlotsLength()
-                                + accessor.getFieldStartOffset(i, docField), accessor.getFieldLength(i, docField));
-                while (tokenizer.hasNext()) {
-                    tokenizer.next();
-                    numTokens++;
-                }
-            }
-
-            tokenizer.reset(
-                    accessor.getBuffer().array(),
-                    accessor.getTupleStartOffset(i) + accessor.getFieldSlotsLength()
-                            + accessor.getFieldStartOffset(i, docField), accessor.getFieldLength(i, docField));
-            while (tokenizer.hasNext()) {
-                tokenizer.next();
-
-                builder.reset();
-                try {
-                    IToken token = tokenizer.getToken();
-                    token.serializeToken(builderData);
-                    builder.addFieldEndOffset();
-                    // Add number of tokens if requested.
-                    if (addNumTokensKey) {
-                        builder.getDataOutput().writeShort(numTokens);
-                        builder.addFieldEndOffset();
-                    }
-                } catch (IOException e) {
-                    throw new HyracksDataException(e.getMessage());
-                }
-
-                for (int k = 0; k < keyFields.length; k++) {
-                    builder.addField(accessor, i, keyFields[k]);
-                }
-
-                if (!appender.append(builder.getFieldEndOffsets(), builder.getByteArray(), 0, builder.getSize())) {
-                    FrameUtils.flushFrame(writeBuffer, writer);
-                    appender.reset(writeBuffer, true);
-                    if (!appender.append(builder.getFieldEndOffsets(), builder.getByteArray(), 0, builder.getSize())) {
-                        throw new IllegalStateException();
-                    }
-                }
-            }
-        }
-    }
-
-    @Override
-    public void close() throws HyracksDataException {
-        if (appender.getTupleCount() > 0) {
-            FrameUtils.flushFrame(writeBuffer, writer);
-        }
-        writer.close();
-    }
-
-    @Override
-    public void fail() throws HyracksDataException {
-        writer.fail();
-    }
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/dataflow/LSMInvertedIndexBulkLoadOperatorDescriptor.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/dataflow/LSMInvertedIndexBulkLoadOperatorDescriptor.java
deleted file mode 100644
index da3cad5..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/dataflow/LSMInvertedIndexBulkLoadOperatorDescriptor.java
+++ /dev/null
@@ -1,64 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.dataflow;
-
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.job.IOperatorDescriptorRegistry;
-import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexLifecycleManagerProvider;
-import edu.uci.ics.hyracks.storage.am.common.api.IModificationOperationCallbackFactory;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IndexBulkLoadOperatorNodePushable;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.tokenizers.IBinaryTokenizerFactory;
-import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
-import edu.uci.ics.hyracks.storage.common.file.NoOpLocalResourceFactoryProvider;
-
-public class LSMInvertedIndexBulkLoadOperatorDescriptor extends AbstractLSMInvertedIndexOperatorDescriptor {
-
-    private static final long serialVersionUID = 1L;
-
-    private final int[] fieldPermutation;
-    private final boolean verifyInput;
-    private final long numElementsHint;
-
-    public LSMInvertedIndexBulkLoadOperatorDescriptor(IOperatorDescriptorRegistry spec, int[] fieldPermutation,
-            boolean verifyInput, long numElementsHint, IStorageManagerInterface storageManager,
-            IFileSplitProvider fileSplitProvider, IIndexLifecycleManagerProvider lifecycleManagerProvider,
-            ITypeTraits[] tokenTypeTraits, IBinaryComparatorFactory[] tokenComparatorFactories,
-            ITypeTraits[] invListsTypeTraits, IBinaryComparatorFactory[] invListComparatorFactories,
-            IBinaryTokenizerFactory tokenizerFactory, IIndexDataflowHelperFactory invertedIndexDataflowHelperFactory,
-            IModificationOperationCallbackFactory modificationOpCallbackFactory) {
-        super(spec, 1, 0, null, storageManager, fileSplitProvider, lifecycleManagerProvider, tokenTypeTraits,
-                tokenComparatorFactories, invListsTypeTraits, invListComparatorFactories, tokenizerFactory,
-                invertedIndexDataflowHelperFactory, null, false, NoOpLocalResourceFactoryProvider.INSTANCE,
-                NoOpOperationCallbackFactory.INSTANCE, modificationOpCallbackFactory);
-        this.fieldPermutation = fieldPermutation;
-        this.verifyInput = verifyInput;
-        this.numElementsHint = numElementsHint;
-    }
-
-    @Override
-    public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
-            IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) {
-        return new IndexBulkLoadOperatorNodePushable(this, ctx, partition, fieldPermutation, 1.0f, verifyInput,
-                numElementsHint, recordDescProvider);
-    }
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/dataflow/LSMInvertedIndexCreateOperatorDescriptor.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/dataflow/LSMInvertedIndexCreateOperatorDescriptor.java
deleted file mode 100644
index 641878f..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/dataflow/LSMInvertedIndexCreateOperatorDescriptor.java
+++ /dev/null
@@ -1,57 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.dataflow;
-
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.job.IOperatorDescriptorRegistry;
-import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexLifecycleManagerProvider;
-import edu.uci.ics.hyracks.storage.am.common.api.IModificationOperationCallbackFactory;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IndexCreateOperatorNodePushable;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.tokenizers.IBinaryTokenizerFactory;
-import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
-import edu.uci.ics.hyracks.storage.common.file.ILocalResourceFactoryProvider;
-
-public class LSMInvertedIndexCreateOperatorDescriptor extends AbstractLSMInvertedIndexOperatorDescriptor {
-
-    private static final long serialVersionUID = 1L;
-
-    public LSMInvertedIndexCreateOperatorDescriptor(IOperatorDescriptorRegistry spec,
-            IStorageManagerInterface storageManager, IFileSplitProvider fileSplitProvider,
-            IIndexLifecycleManagerProvider lifecycleManagerProvider, ITypeTraits[] tokenTypeTraits,
-            IBinaryComparatorFactory[] tokenComparatorFactories, ITypeTraits[] invListsTypeTraits,
-            IBinaryComparatorFactory[] invListComparatorFactories, IBinaryTokenizerFactory tokenizerFactory,
-            IIndexDataflowHelperFactory btreeDataflowHelperFactory,
-            ILocalResourceFactoryProvider localResourceFactoryProvider,
-            IModificationOperationCallbackFactory modificationOpCallbackFactory) {
-        super(spec, 0, 0, null, storageManager, fileSplitProvider, lifecycleManagerProvider, tokenTypeTraits,
-                tokenComparatorFactories, invListsTypeTraits, invListComparatorFactories, tokenizerFactory,
-                btreeDataflowHelperFactory, null, false, localResourceFactoryProvider,
-                NoOpOperationCallbackFactory.INSTANCE, modificationOpCallbackFactory);
-    }
-
-    @Override
-    public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
-            IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) {
-        return new IndexCreateOperatorNodePushable(this, ctx, partition);
-    }
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/dataflow/LSMInvertedIndexDataflowHelper.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/dataflow/LSMInvertedIndexDataflowHelper.java
deleted file mode 100644
index 3d8b391..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/dataflow/LSMInvertedIndexDataflowHelper.java
+++ /dev/null
@@ -1,79 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.dataflow;
-
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.storage.am.common.api.IInMemoryFreePageManager;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndex;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrameFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.frames.LIFOMetaDataFrameFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.IInMemoryBufferCache;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallbackProvider;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationScheduler;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMMergePolicy;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMOperationTrackerFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.common.dataflow.AbstractLSMIndexDataflowHelper;
-import edu.uci.ics.hyracks.storage.am.lsm.common.freepage.DualIndexInMemoryBufferCache;
-import edu.uci.ics.hyracks.storage.am.lsm.common.freepage.DualIndexInMemoryFreePageManager;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndexOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.impls.LSMInvertedIndex;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.util.InvertedIndexUtils;
-import edu.uci.ics.hyracks.storage.common.buffercache.HeapBufferAllocator;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
-
-public final class LSMInvertedIndexDataflowHelper extends AbstractLSMIndexDataflowHelper {
-
-    public LSMInvertedIndexDataflowHelper(IIndexOperatorDescriptor opDesc, IHyracksTaskContext ctx, int partition,
-            ILSMMergePolicy mergePolicy, ILSMOperationTrackerFactory opTrackerFactory,
-            ILSMIOOperationScheduler ioScheduler, ILSMIOOperationCallbackProvider ioOpCallbackProvider) {
-        this(opDesc, ctx, partition, DEFAULT_MEM_PAGE_SIZE, DEFAULT_MEM_NUM_PAGES, mergePolicy, opTrackerFactory,
-                ioScheduler, ioOpCallbackProvider);
-    }
-
-    public LSMInvertedIndexDataflowHelper(IIndexOperatorDescriptor opDesc, IHyracksTaskContext ctx, int partition,
-            int memPageSize, int memNumPages, ILSMMergePolicy mergePolicy,
-            ILSMOperationTrackerFactory opTrackerFactory, ILSMIOOperationScheduler ioScheduler,
-            ILSMIOOperationCallbackProvider ioOpCallbackProvider) {
-        super(opDesc, ctx, partition, memPageSize, memNumPages, mergePolicy, opTrackerFactory, ioScheduler,
-                ioOpCallbackProvider);
-    }
-
-    @Override
-    public IIndex createIndexInstance() throws HyracksDataException {
-        IInvertedIndexOperatorDescriptor invIndexOpDesc = (IInvertedIndexOperatorDescriptor) opDesc;
-        try {
-            ITreeIndexMetaDataFrameFactory metaDataFrameFactory = new LIFOMetaDataFrameFactory();
-            IInMemoryBufferCache memBufferCache = new DualIndexInMemoryBufferCache(new HeapBufferAllocator(),
-                    memPageSize, memNumPages);
-            IInMemoryFreePageManager memFreePageManager = new DualIndexInMemoryFreePageManager(memNumPages,
-                    metaDataFrameFactory);
-            IBufferCache diskBufferCache = opDesc.getStorageManager().getBufferCache(ctx);
-            IFileMapProvider diskFileMapProvider = opDesc.getStorageManager().getFileMapProvider(ctx);
-            LSMInvertedIndex invIndex = InvertedIndexUtils.createLSMInvertedIndex(memBufferCache, memFreePageManager,
-                    diskFileMapProvider, invIndexOpDesc.getInvListsTypeTraits(),
-                    invIndexOpDesc.getInvListsComparatorFactories(), invIndexOpDesc.getTokenTypeTraits(),
-                    invIndexOpDesc.getTokenComparatorFactories(), invIndexOpDesc.getTokenizerFactory(),
-                    diskBufferCache, ctx.getIOManager(), file.getFile().getPath(), mergePolicy, opTrackerFactory,
-                    ioScheduler, ioOpCallbackProvider, partition);
-            return invIndex;
-        } catch (IndexException e) {
-            throw new HyracksDataException(e);
-        }
-    }
-}
\ No newline at end of file
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/dataflow/LSMInvertedIndexDataflowHelperFactory.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/dataflow/LSMInvertedIndexDataflowHelperFactory.java
deleted file mode 100644
index 9796ebc..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/dataflow/LSMInvertedIndexDataflowHelperFactory.java
+++ /dev/null
@@ -1,45 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.dataflow;
-
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IndexDataflowHelper;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallbackProvider;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationSchedulerProvider;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMMergePolicyProvider;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMOperationTrackerFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.common.dataflow.AbstractLSMIndexDataflowHelperFactory;
-
-public class LSMInvertedIndexDataflowHelperFactory extends AbstractLSMIndexDataflowHelperFactory {
-
-    private static final long serialVersionUID = 1L;
-
-    public LSMInvertedIndexDataflowHelperFactory(ILSMMergePolicyProvider mergePolicyProvider,
-            ILSMOperationTrackerFactory opTrackerProvider, ILSMIOOperationSchedulerProvider ioSchedulerProvider,
-            ILSMIOOperationCallbackProvider ioOpCallbackProvider, int memPageSize, int memNumPages) {
-        super(mergePolicyProvider, opTrackerProvider, ioSchedulerProvider, ioOpCallbackProvider, memPageSize,
-                memNumPages);
-    }
-
-    @Override
-    public IndexDataflowHelper createIndexDataflowHelper(IIndexOperatorDescriptor opDesc, IHyracksTaskContext ctx,
-            int partition) {
-        return new LSMInvertedIndexDataflowHelper(opDesc, ctx, partition, memPageSize, memNumPages, mergePolicyProvider.getMergePolicy(ctx),
-                opTrackerFactory, ioSchedulerProvider.getIOScheduler(ctx), ioOpCallbackProvider);
-    }
-
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/dataflow/LSMInvertedIndexInsertUpdateDeleteOperator.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/dataflow/LSMInvertedIndexInsertUpdateDeleteOperator.java
deleted file mode 100644
index 963c653..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/dataflow/LSMInvertedIndexInsertUpdateDeleteOperator.java
+++ /dev/null
@@ -1,63 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.dataflow;
-
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.job.IOperatorDescriptorRegistry;
-import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexLifecycleManagerProvider;
-import edu.uci.ics.hyracks.storage.am.common.api.IModificationOperationCallbackFactory;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOperation;
-import edu.uci.ics.hyracks.storage.am.lsm.common.dataflow.LSMIndexInsertUpdateDeleteOperatorNodePushable;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.tokenizers.IBinaryTokenizerFactory;
-import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
-import edu.uci.ics.hyracks.storage.common.file.NoOpLocalResourceFactoryProvider;
-
-public class LSMInvertedIndexInsertUpdateDeleteOperator extends AbstractLSMInvertedIndexOperatorDescriptor {
-
-    private static final long serialVersionUID = 1L;
-
-    private final int[] fieldPermutation;
-    private final IndexOperation op;
-
-    public LSMInvertedIndexInsertUpdateDeleteOperator(IOperatorDescriptorRegistry spec,
-            IStorageManagerInterface storageManager, IFileSplitProvider fileSplitProvider,
-            IIndexLifecycleManagerProvider lifecycleManagerProvider, ITypeTraits[] tokenTypeTraits,
-            IBinaryComparatorFactory[] tokenComparatorFactories, ITypeTraits[] invListsTypeTraits,
-            IBinaryComparatorFactory[] invListComparatorFactories, IBinaryTokenizerFactory tokenizerFactory,
-            int[] fieldPermutation, IndexOperation op, IIndexDataflowHelperFactory dataflowHelperFactory,
-            IModificationOperationCallbackFactory modificationOpCallbackFactory) {
-        super(spec, 1, 1, null, storageManager, fileSplitProvider, lifecycleManagerProvider, tokenTypeTraits,
-                tokenComparatorFactories, invListsTypeTraits, invListComparatorFactories, tokenizerFactory,
-                dataflowHelperFactory, null, false, NoOpLocalResourceFactoryProvider.INSTANCE,
-                NoOpOperationCallbackFactory.INSTANCE, modificationOpCallbackFactory);
-        this.fieldPermutation = fieldPermutation;
-        this.op = op;
-    }
-
-    @Override
-    public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
-            IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) {
-        return new LSMIndexInsertUpdateDeleteOperatorNodePushable(this, ctx, partition, fieldPermutation,
-                recordDescProvider, op);
-    }
-}
\ No newline at end of file
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/dataflow/LSMInvertedIndexSearchOperatorDescriptor.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/dataflow/LSMInvertedIndexSearchOperatorDescriptor.java
deleted file mode 100644
index 4f97060..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/dataflow/LSMInvertedIndexSearchOperatorDescriptor.java
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.dataflow;
-
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.job.IOperatorDescriptorRegistry;
-import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexLifecycleManagerProvider;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchOperationCallbackFactory;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndexSearchModifier;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndexSearchModifierFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.tokenizers.IBinaryTokenizerFactory;
-import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
-import edu.uci.ics.hyracks.storage.common.file.NoOpLocalResourceFactoryProvider;
-
-public class LSMInvertedIndexSearchOperatorDescriptor extends AbstractLSMInvertedIndexOperatorDescriptor {
-    private static final long serialVersionUID = 1L;
-
-    private final int queryField;
-    private final IInvertedIndexSearchModifierFactory searchModifierFactory;
-
-    public LSMInvertedIndexSearchOperatorDescriptor(IOperatorDescriptorRegistry spec, int queryField,
-            IStorageManagerInterface storageManager, IFileSplitProvider fileSplitProvider,
-            IIndexLifecycleManagerProvider lifecycleManagerProvider, ITypeTraits[] tokenTypeTraits,
-            IBinaryComparatorFactory[] tokenComparatorFactories, ITypeTraits[] invListsTypeTraits,
-            IBinaryComparatorFactory[] invListComparatorFactories,
-            IIndexDataflowHelperFactory btreeDataflowHelperFactory, IBinaryTokenizerFactory queryTokenizerFactory,
-            IInvertedIndexSearchModifierFactory searchModifierFactory, RecordDescriptor recDesc, boolean retainInput,
-            ISearchOperationCallbackFactory searchOpCallbackProvider) {
-        super(spec, 1, 1, recDesc, storageManager, fileSplitProvider, lifecycleManagerProvider, tokenTypeTraits,
-                tokenComparatorFactories, invListsTypeTraits, invListComparatorFactories, queryTokenizerFactory,
-                btreeDataflowHelperFactory, null, retainInput, NoOpLocalResourceFactoryProvider.INSTANCE,
-                searchOpCallbackProvider, NoOpOperationCallbackFactory.INSTANCE);
-        this.queryField = queryField;
-        this.searchModifierFactory = searchModifierFactory;
-    }
-
-    @Override
-    public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
-            IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) throws HyracksDataException {
-        IInvertedIndexSearchModifier searchModifier = searchModifierFactory.createSearchModifier();
-        return new LSMInvertedIndexSearchOperatorNodePushable(this, ctx, partition, recordDescProvider, queryField,
-                searchModifier);
-    }
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/dataflow/LSMInvertedIndexSearchOperatorNodePushable.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/dataflow/LSMInvertedIndexSearchOperatorNodePushable.java
deleted file mode 100644
index d825c02..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/dataflow/LSMInvertedIndexSearchOperatorNodePushable.java
+++ /dev/null
@@ -1,60 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.dataflow;
-
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.FrameTupleReference;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchPredicate;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IndexSearchOperatorNodePushable;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndexSearchModifier;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.search.InvertedIndexSearchPredicate;
-
-public class LSMInvertedIndexSearchOperatorNodePushable extends IndexSearchOperatorNodePushable {
-
-    protected final IInvertedIndexSearchModifier searchModifier;
-    protected final int queryFieldIndex;
-    protected final int invListFields;
-
-    public LSMInvertedIndexSearchOperatorNodePushable(IIndexOperatorDescriptor opDesc, IHyracksTaskContext ctx,
-            int partition, IRecordDescriptorProvider recordDescProvider, int queryFieldIndex,
-            IInvertedIndexSearchModifier searchModifier) {
-        super(opDesc, ctx, partition, recordDescProvider);
-        this.searchModifier = searchModifier;
-        this.queryFieldIndex = queryFieldIndex;
-        // If retainInput is true, the frameTuple is created in IndexSearchOperatorNodePushable.open().
-        if (!opDesc.getRetainInput()) {
-            this.frameTuple = new FrameTupleReference();
-        }
-        AbstractLSMInvertedIndexOperatorDescriptor invIndexOpDesc = (AbstractLSMInvertedIndexOperatorDescriptor) opDesc;
-        invListFields = invIndexOpDesc.getInvListsTypeTraits().length;
-    }
-
-    @Override
-    protected ISearchPredicate createSearchPredicate() {
-        AbstractLSMInvertedIndexOperatorDescriptor invIndexOpDesc = (AbstractLSMInvertedIndexOperatorDescriptor) opDesc;
-        return new InvertedIndexSearchPredicate(invIndexOpDesc.getTokenizerFactory().createTokenizer(), searchModifier);
-    }
-
-    @Override
-    protected void resetSearchPredicate(int tupleIndex) {
-        frameTuple.reset(accessor, tupleIndex);
-        InvertedIndexSearchPredicate invIndexSearchPred = (InvertedIndexSearchPredicate) searchPred;
-        invIndexSearchPred.setQueryTuple(frameTuple);
-        invIndexSearchPred.setQueryFieldIndex(queryFieldIndex);
-    }
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/dataflow/PartitionedLSMInvertedIndexDataflowHelper.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/dataflow/PartitionedLSMInvertedIndexDataflowHelper.java
deleted file mode 100644
index c5b4f07..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/dataflow/PartitionedLSMInvertedIndexDataflowHelper.java
+++ /dev/null
@@ -1,79 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.dataflow;
-
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.storage.am.common.api.IInMemoryFreePageManager;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndex;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrameFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.frames.LIFOMetaDataFrameFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.IInMemoryBufferCache;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallbackProvider;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationScheduler;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMMergePolicy;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMOperationTrackerFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.common.dataflow.AbstractLSMIndexDataflowHelper;
-import edu.uci.ics.hyracks.storage.am.lsm.common.freepage.DualIndexInMemoryBufferCache;
-import edu.uci.ics.hyracks.storage.am.lsm.common.freepage.DualIndexInMemoryFreePageManager;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndexOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.impls.PartitionedLSMInvertedIndex;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.util.InvertedIndexUtils;
-import edu.uci.ics.hyracks.storage.common.buffercache.HeapBufferAllocator;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
-
-public final class PartitionedLSMInvertedIndexDataflowHelper extends AbstractLSMIndexDataflowHelper {
-
-    public PartitionedLSMInvertedIndexDataflowHelper(IIndexOperatorDescriptor opDesc, IHyracksTaskContext ctx,
-            int partition, ILSMMergePolicy mergePolicy, ILSMOperationTrackerFactory opTrackerFactory,
-            ILSMIOOperationScheduler ioScheduler, ILSMIOOperationCallbackProvider ioOpCallbackProvider) {
-        this(opDesc, ctx, partition, DEFAULT_MEM_PAGE_SIZE, DEFAULT_MEM_NUM_PAGES, mergePolicy, opTrackerFactory,
-                ioScheduler, ioOpCallbackProvider);
-    }
-
-    public PartitionedLSMInvertedIndexDataflowHelper(IIndexOperatorDescriptor opDesc, IHyracksTaskContext ctx,
-            int partition, int memPageSize, int memNumPages, ILSMMergePolicy mergePolicy,
-            ILSMOperationTrackerFactory opTrackerFactory, ILSMIOOperationScheduler ioScheduler,
-            ILSMIOOperationCallbackProvider ioOpCallbackProvider) {
-        super(opDesc, ctx, partition, memPageSize, memNumPages, mergePolicy, opTrackerFactory, ioScheduler,
-                ioOpCallbackProvider);
-    }
-
-    @Override
-    public IIndex createIndexInstance() throws HyracksDataException {
-        IInvertedIndexOperatorDescriptor invIndexOpDesc = (IInvertedIndexOperatorDescriptor) opDesc;
-        try {
-            ITreeIndexMetaDataFrameFactory metaDataFrameFactory = new LIFOMetaDataFrameFactory();
-            IInMemoryBufferCache memBufferCache = new DualIndexInMemoryBufferCache(new HeapBufferAllocator(),
-                    memPageSize, memNumPages);
-            IInMemoryFreePageManager memFreePageManager = new DualIndexInMemoryFreePageManager(memNumPages,
-                    metaDataFrameFactory);
-            IBufferCache diskBufferCache = opDesc.getStorageManager().getBufferCache(ctx);
-            IFileMapProvider diskFileMapProvider = opDesc.getStorageManager().getFileMapProvider(ctx);
-            PartitionedLSMInvertedIndex invIndex = InvertedIndexUtils.createPartitionedLSMInvertedIndex(memBufferCache,
-                    memFreePageManager, diskFileMapProvider, invIndexOpDesc.getInvListsTypeTraits(),
-                    invIndexOpDesc.getInvListsComparatorFactories(), invIndexOpDesc.getTokenTypeTraits(),
-                    invIndexOpDesc.getTokenComparatorFactories(), invIndexOpDesc.getTokenizerFactory(),
-                    diskBufferCache, ctx.getIOManager(), file.getFile().getPath(), mergePolicy, opTrackerFactory,
-                    ioScheduler, ioOpCallbackProvider, partition);
-            return invIndex;
-        } catch (IndexException e) {
-            throw new HyracksDataException(e);
-        }
-    }
-}
\ No newline at end of file
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/dataflow/PartitionedLSMInvertedIndexDataflowHelperFactory.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/dataflow/PartitionedLSMInvertedIndexDataflowHelperFactory.java
deleted file mode 100644
index 8a8aad2..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/dataflow/PartitionedLSMInvertedIndexDataflowHelperFactory.java
+++ /dev/null
@@ -1,46 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.dataflow;
-
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IndexDataflowHelper;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallbackProvider;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationSchedulerProvider;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMMergePolicyProvider;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMOperationTrackerFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.common.dataflow.AbstractLSMIndexDataflowHelperFactory;
-
-public class PartitionedLSMInvertedIndexDataflowHelperFactory extends AbstractLSMIndexDataflowHelperFactory {
-
-    private static final long serialVersionUID = 1L;
-
-    public PartitionedLSMInvertedIndexDataflowHelperFactory(ILSMMergePolicyProvider mergePolicyProvider,
-            ILSMOperationTrackerFactory opTrackerProvider, ILSMIOOperationSchedulerProvider ioSchedulerProvider,
-            ILSMIOOperationCallbackProvider ioOpCallbackProvider, int memPageSize, int memNumPages) {
-        super(mergePolicyProvider, opTrackerProvider, ioSchedulerProvider, ioOpCallbackProvider, memNumPages,
-                memNumPages);
-    }
-
-    @Override
-    public IndexDataflowHelper createIndexDataflowHelper(IIndexOperatorDescriptor opDesc, IHyracksTaskContext ctx,
-            int partition) {
-        return new PartitionedLSMInvertedIndexDataflowHelper(opDesc, ctx, partition, memPageSize, memNumPages,
-                mergePolicyProvider.getMergePolicy(ctx), opTrackerFactory, ioSchedulerProvider.getIOScheduler(ctx),
-                ioOpCallbackProvider);
-    }
-
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/exceptions/InvertedIndexException.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/exceptions/InvertedIndexException.java
deleted file mode 100644
index cc7ff87..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/exceptions/InvertedIndexException.java
+++ /dev/null
@@ -1,30 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.exceptions;
-
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-
-public class InvertedIndexException extends IndexException {
-    private static final long serialVersionUID = 1L;
-
-    public InvertedIndexException(Exception e) {        
-        super(e);
-    }
-    
-    public InvertedIndexException(String msg) {
-        super(msg);
-    }
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/exceptions/OccurrenceThresholdPanicException.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/exceptions/OccurrenceThresholdPanicException.java
deleted file mode 100644
index bedaa60..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/exceptions/OccurrenceThresholdPanicException.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.exceptions;
-
-
-
-public class OccurrenceThresholdPanicException extends InvertedIndexException {
-    private static final long serialVersionUID = 1L;
-
-    public OccurrenceThresholdPanicException(String msg) {
-        super(msg);
-    }
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndex.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndex.java
deleted file mode 100644
index c69a8df..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndex.java
+++ /dev/null
@@ -1,732 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.impls;
-
-import java.io.File;
-import java.util.ArrayList;
-import java.util.List;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-import edu.uci.ics.hyracks.storage.am.bloomfilter.impls.BloomCalculations;
-import edu.uci.ics.hyracks.storage.am.bloomfilter.impls.BloomFilterFactory;
-import edu.uci.ics.hyracks.storage.am.bloomfilter.impls.BloomFilterSpecification;
-import edu.uci.ics.hyracks.storage.am.btree.exceptions.BTreeDuplicateKeyException;
-import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTree.BTreeAccessor;
-import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
-import edu.uci.ics.hyracks.storage.am.btree.util.BTreeUtils;
-import edu.uci.ics.hyracks.storage.am.common.api.ICursorInitialState;
-import edu.uci.ics.hyracks.storage.am.common.api.IInMemoryFreePageManager;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexAccessor;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexBulkLoader;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexCursor;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexOperationContext;
-import edu.uci.ics.hyracks.storage.am.common.api.IModificationOperationCallback;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchOperationCallback;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchPredicate;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallback;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOperation;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-import edu.uci.ics.hyracks.storage.am.common.tuples.PermutingTupleReference;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.IInMemoryBufferCache;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMComponent;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMComponentFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperation;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallback;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallbackProvider;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationScheduler;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIndexAccessor;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIndexAccessorInternal;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIndexFileManager;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIndexOperationContext;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMMergePolicy;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMOperationTrackerFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.common.freepage.InMemoryBufferCache;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.AbstractLSMIndex;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.BTreeFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.BlockingIOOperationCallbackWrapper;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.LSMComponentFileReferences;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndex;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedListCursor;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.inmemory.InMemoryInvertedIndex;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.inmemory.InMemoryInvertedIndexAccessor;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.ondisk.OnDiskInvertedIndex;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.ondisk.OnDiskInvertedIndexFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.search.InvertedIndexSearchPredicate;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.tokenizers.IBinaryTokenizerFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.util.InvertedIndexUtils;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
-
-public class LSMInvertedIndex extends AbstractLSMIndex implements IInvertedIndex {
-
-    // In-memory components.
-    protected final LSMInvertedIndexMutableComponent mutableComponent;
-    protected final IInMemoryFreePageManager memFreePageManager;
-    protected final IBinaryTokenizerFactory tokenizerFactory;
-
-    // On-disk components.
-    // For creating inverted indexes in flush and merge.
-    protected final ILSMComponentFactory componentFactory;
-
-    // Type traits and comparators for tokens and inverted-list elements.
-    protected final ITypeTraits[] invListTypeTraits;
-    protected final IBinaryComparatorFactory[] invListCmpFactories;
-    protected final ITypeTraits[] tokenTypeTraits;
-    protected final IBinaryComparatorFactory[] tokenCmpFactories;
-
-    public LSMInvertedIndex(IInMemoryBufferCache memBufferCache, IInMemoryFreePageManager memFreePageManager,
-            OnDiskInvertedIndexFactory diskInvIndexFactory, BTreeFactory deletedKeysBTreeFactory,
-            BloomFilterFactory bloomFilterFactory, ILSMIndexFileManager fileManager,
-            IFileMapProvider diskFileMapProvider, ITypeTraits[] invListTypeTraits,
-            IBinaryComparatorFactory[] invListCmpFactories, ITypeTraits[] tokenTypeTraits,
-            IBinaryComparatorFactory[] tokenCmpFactories, IBinaryTokenizerFactory tokenizerFactory,
-            ILSMMergePolicy mergePolicy, ILSMOperationTrackerFactory opTrackerFactory,
-            ILSMIOOperationScheduler ioScheduler, ILSMIOOperationCallbackProvider ioOpCallbackProvider)
-            throws IndexException {
-        super(memFreePageManager, diskInvIndexFactory.getBufferCache(), fileManager, diskFileMapProvider, mergePolicy,
-                opTrackerFactory, ioScheduler, ioOpCallbackProvider);
-        this.memFreePageManager = memFreePageManager;
-        this.tokenizerFactory = tokenizerFactory;
-        this.invListTypeTraits = invListTypeTraits;
-        this.invListCmpFactories = invListCmpFactories;
-        this.tokenTypeTraits = tokenTypeTraits;
-        this.tokenCmpFactories = tokenCmpFactories;
-        // Create in-memory component.
-        InMemoryInvertedIndex memInvIndex = createInMemoryInvertedIndex(memBufferCache);
-        BTree deleteKeysBTree = BTreeUtils.createBTree(memBufferCache, memFreePageManager,
-                ((InMemoryBufferCache) memBufferCache).getFileMapProvider(), invListTypeTraits, invListCmpFactories,
-                BTreeLeafFrameType.REGULAR_NSM, new FileReference(new File("membtree")));
-        mutableComponent = new LSMInvertedIndexMutableComponent(memInvIndex, deleteKeysBTree, memFreePageManager);
-        componentFactory = new LSMInvertedIndexComponentFactory(diskInvIndexFactory, deletedKeysBTreeFactory,
-                bloomFilterFactory);
-    }
-
-    @Override
-    public synchronized void create() throws HyracksDataException {
-        if (isActivated) {
-            throw new HyracksDataException("Failed to create the index since it is activated.");
-        }
-
-        fileManager.deleteDirs();
-        fileManager.createDirs();
-        componentsRef.get().clear();
-    }
-
-    @Override
-    public synchronized void activate() throws HyracksDataException {
-        if (isActivated) {
-            return;
-        }
-        try {
-            List<ILSMComponent> immutableComponents = componentsRef.get();
-            ((InMemoryBufferCache) mutableComponent.getInvIndex().getBufferCache()).open();
-            mutableComponent.getInvIndex().create();
-            mutableComponent.getInvIndex().activate();
-            mutableComponent.getDeletedKeysBTree().create();
-            mutableComponent.getDeletedKeysBTree().activate();
-            immutableComponents.clear();
-            List<LSMComponentFileReferences> validFileReferences = fileManager.cleanupAndGetValidFiles();
-            for (LSMComponentFileReferences lsmComonentFileReference : validFileReferences) {
-                LSMInvertedIndexImmutableComponent component;
-                try {
-                    component = createDiskInvIndexComponent(componentFactory,
-                            lsmComonentFileReference.getInsertIndexFileReference(),
-                            lsmComonentFileReference.getDeleteIndexFileReference(),
-                            lsmComonentFileReference.getBloomFilterFileReference(), false);
-                } catch (IndexException e) {
-                    throw new HyracksDataException(e);
-                }
-                immutableComponents.add(component);
-            }
-            isActivated = true;
-            // TODO: Maybe we can make activate throw an index exception?
-        } catch (IndexException e) {
-            throw new HyracksDataException(e);
-        }
-    }
-
-    @Override
-    public void clear() throws HyracksDataException {
-        if (!isActivated) {
-            throw new HyracksDataException("Failed to clear the index since it is not activated.");
-        }
-        List<ILSMComponent> immutableComponents = componentsRef.get();
-        mutableComponent.getInvIndex().clear();
-        mutableComponent.getDeletedKeysBTree().clear();
-        for (ILSMComponent c : immutableComponents) {
-            LSMInvertedIndexImmutableComponent component = (LSMInvertedIndexImmutableComponent) c;
-            component.getBloomFilter().deactivate();
-            component.getInvIndex().deactivate();
-            component.getDeletedKeysBTree().deactivate();
-            component.getBloomFilter().destroy();
-            component.getInvIndex().destroy();
-            component.getDeletedKeysBTree().destroy();
-        }
-        immutableComponents.clear();
-    }
-
-    @Override
-    public synchronized void deactivate(boolean flushOnExit) throws HyracksDataException {
-        if (!isActivated) {
-            return;
-        }
-
-        isActivated = false;
-
-        if (flushOnExit) {
-            BlockingIOOperationCallbackWrapper blockingCallBack = new BlockingIOOperationCallbackWrapper(
-                    ioOpCallbackProvider.getIOOperationCallback(this));
-            ILSMIndexAccessor accessor = (ILSMIndexAccessor) createAccessor(NoOpOperationCallback.INSTANCE,
-                    NoOpOperationCallback.INSTANCE);
-            accessor.scheduleFlush(blockingCallBack);
-            try {
-                blockingCallBack.waitForIO();
-            } catch (InterruptedException e) {
-                throw new HyracksDataException(e);
-            }
-        }
-
-        List<ILSMComponent> immutableComponents = componentsRef.get();
-        for (ILSMComponent c : immutableComponents) {
-            LSMInvertedIndexImmutableComponent component = (LSMInvertedIndexImmutableComponent) c;
-            component.getBloomFilter().deactivate();
-            component.getInvIndex().deactivate();
-            component.getDeletedKeysBTree().deactivate();
-        }
-        mutableComponent.getInvIndex().deactivate();
-        mutableComponent.getDeletedKeysBTree().deactivate();
-        mutableComponent.getInvIndex().destroy();
-        mutableComponent.getDeletedKeysBTree().destroy();
-        ((InMemoryBufferCache) mutableComponent.getInvIndex().getBufferCache()).close();
-    }
-
-    @Override
-    public synchronized void deactivate() throws HyracksDataException {
-        deactivate(true);
-    }
-
-    @Override
-    public synchronized void destroy() throws HyracksDataException {
-        if (isActivated) {
-            throw new HyracksDataException("Failed to destroy the index since it is activated.");
-        }
-
-        mutableComponent.getInvIndex().destroy();
-        mutableComponent.getDeletedKeysBTree().destroy();
-        List<ILSMComponent> immutableComponents = componentsRef.get();
-        for (ILSMComponent c : immutableComponents) {
-            LSMInvertedIndexImmutableComponent component = (LSMInvertedIndexImmutableComponent) c;
-            component.getInvIndex().destroy();
-            component.getDeletedKeysBTree().destroy();
-            component.getBloomFilter().destroy();
-        }
-        fileManager.deleteDirs();
-    }
-
-    @Override
-    public void getOperationalComponents(ILSMIndexOperationContext ctx) {
-        List<ILSMComponent> immutableComponents = componentsRef.get();
-        List<ILSMComponent> operationalComponents = ctx.getComponentHolder();
-        operationalComponents.clear();
-        switch (ctx.getOperation()) {
-            case FLUSH:
-            case DELETE:
-            case INSERT:
-                operationalComponents.add(mutableComponent);
-                break;
-            case SEARCH:
-                operationalComponents.add(mutableComponent);
-                operationalComponents.addAll(immutableComponents);
-                break;
-            case MERGE:
-                operationalComponents.addAll(immutableComponents);
-                break;
-            default:
-                throw new UnsupportedOperationException("Operation " + ctx.getOperation() + " not supported.");
-        }
-    }
-
-    /**
-     * The keys in the in-memory deleted-keys BTree only refer to on-disk components.
-     * We delete documents from the in-memory inverted index by deleting its entries directly,
-     * while still adding the deleted key to the deleted-keys BTree.
-     * Otherwise, inserts would have to remove keys from the in-memory deleted-keys BTree which
-     * may cause incorrect behavior (lost deletes) in the following pathological case:
-     * Insert doc 1, flush, delete doc 1, insert doc 1
-     * After the sequence above doc 1 will now appear twice because the delete of the on-disk doc 1 has been lost.
-     * Insert:
-     * - Insert document into in-memory inverted index.
-     * Delete:
-     * - Delete document from in-memory inverted index (ignore if it does not exist).
-     * - Insert key into deleted-keys BTree.
-     */
-    @Override
-    public void modify(IIndexOperationContext ictx, ITupleReference tuple) throws HyracksDataException, IndexException {
-        LSMInvertedIndexOpContext ctx = (LSMInvertedIndexOpContext) ictx;
-        // TODO: This is a hack to support logging properly in ASTERIX.
-        // The proper undo operations are only dependent on the after image so 
-        // it is correct to say we found nothing (null) as the before image (at least 
-        // in the perspective of ASTERIX). The semantics for the operation callbacks 
-        // are violated here (and they are somewhat unclear in the first place as to 
-        // what they should be for an inverted index).
-        ctx.modificationCallback.before(tuple);
-        ctx.modificationCallback.found(null, tuple);
-        switch (ctx.getOperation()) {
-            case INSERT: {
-                // Insert into the in-memory inverted index.                
-                ctx.memInvIndexAccessor.insert(tuple);
-                break;
-            }
-            case DELETE: {
-                // First remove all entries in the in-memory inverted index (if any).
-                ctx.memInvIndexAccessor.delete(tuple);
-                // Insert key into the deleted-keys BTree.
-                ctx.keysOnlyTuple.reset(tuple);
-                try {
-                    ctx.deletedKeysBTreeAccessor.insert(ctx.keysOnlyTuple);
-                } catch (BTreeDuplicateKeyException e) {
-                    // Key has already been deleted.
-                }
-                break;
-            }
-            default: {
-                throw new UnsupportedOperationException("Operation " + ctx.getOperation() + " not supported.");
-            }
-        }
-    }
-
-    @Override
-    public void search(ILSMIndexOperationContext ictx, IIndexCursor cursor, ISearchPredicate pred)
-            throws HyracksDataException, IndexException {
-        List<ILSMComponent> operationalComponents = ictx.getComponentHolder();
-        int numComponents = operationalComponents.size();
-        assert numComponents > 0;
-        boolean includeMutableComponent = operationalComponents.get(0) == mutableComponent;
-        ArrayList<IIndexAccessor> indexAccessors = new ArrayList<IIndexAccessor>(numComponents);
-        ArrayList<IIndexAccessor> deletedKeysBTreeAccessors = new ArrayList<IIndexAccessor>(numComponents);
-        if (includeMutableComponent) {
-            IIndexAccessor invIndexAccessor = mutableComponent.getInvIndex().createAccessor(
-                    NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
-            indexAccessors.add(invIndexAccessor);
-            IIndexAccessor deletedKeysAccessor = mutableComponent.getDeletedKeysBTree().createAccessor(
-                    NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
-            deletedKeysBTreeAccessors.add(deletedKeysAccessor);
-        }
-
-        for (int i = includeMutableComponent ? 1 : 0; i < operationalComponents.size(); i++) {
-            LSMInvertedIndexImmutableComponent component = (LSMInvertedIndexImmutableComponent) operationalComponents
-                    .get(i);
-            IIndexAccessor invIndexAccessor = component.getInvIndex().createAccessor(NoOpOperationCallback.INSTANCE,
-                    NoOpOperationCallback.INSTANCE);
-            indexAccessors.add(invIndexAccessor);
-            IIndexAccessor deletedKeysAccessor = component.getDeletedKeysBTree().createAccessor(
-                    NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
-            deletedKeysBTreeAccessors.add(deletedKeysAccessor);
-        }
-
-        ICursorInitialState initState = createCursorInitialState(pred, ictx, includeMutableComponent, indexAccessors,
-                deletedKeysBTreeAccessors);
-        cursor.open(initState, pred);
-    }
-
-    private ICursorInitialState createCursorInitialState(ISearchPredicate pred, IIndexOperationContext ictx,
-            boolean includeMutableComponent, ArrayList<IIndexAccessor> indexAccessors,
-            ArrayList<IIndexAccessor> deletedKeysBTreeAccessors) {
-        List<ILSMComponent> immutableComponents = componentsRef.get();
-        ICursorInitialState initState = null;
-        PermutingTupleReference keysOnlyTuple = createKeysOnlyTupleReference();
-        MultiComparator keyCmp = MultiComparator.createIgnoreFieldLength(invListCmpFactories);
-        List<ILSMComponent> operationalComponents = new ArrayList<ILSMComponent>();
-        if (includeMutableComponent) {
-            operationalComponents.add(mutableComponent);
-        }
-        operationalComponents.addAll(immutableComponents);
-
-        // TODO: This check is not pretty, but it does the job. Come up with something more OO in the future.
-        // Distinguish between regular searches and range searches (mostly used in merges).
-        if (pred instanceof InvertedIndexSearchPredicate) {
-            initState = new LSMInvertedIndexSearchCursorInitialState(keyCmp, keysOnlyTuple, indexAccessors,
-                    deletedKeysBTreeAccessors, mutableComponent.getDeletedKeysBTree().getLeafFrameFactory(), ictx,
-                    includeMutableComponent, lsmHarness, operationalComponents);
-        } else {
-            InMemoryInvertedIndex memInvIndex = (InMemoryInvertedIndex) mutableComponent.getInvIndex();
-            MultiComparator tokensAndKeysCmp = MultiComparator.create(memInvIndex.getBTree().getComparatorFactories());
-            initState = new LSMInvertedIndexRangeSearchCursorInitialState(tokensAndKeysCmp, keyCmp, keysOnlyTuple,
-                    mutableComponent.getDeletedKeysBTree().getLeafFrameFactory(), includeMutableComponent, lsmHarness,
-                    indexAccessors, deletedKeysBTreeAccessors, pred, operationalComponents);
-        }
-        return initState;
-    }
-
-    /**
-     * Returns a permuting tuple reference that projects away the document field(s) of a tuple, only leaving the key fields.
-     */
-    private PermutingTupleReference createKeysOnlyTupleReference() {
-        // Project away token fields.
-        int[] keyFieldPermutation = new int[invListTypeTraits.length];
-        int numTokenFields = tokenTypeTraits.length;
-        for (int i = 0; i < invListTypeTraits.length; i++) {
-            keyFieldPermutation[i] = numTokenFields + i;
-        }
-        return new PermutingTupleReference(keyFieldPermutation);
-    }
-
-    @Override
-    public void scheduleFlush(ILSMIndexOperationContext ctx, ILSMIOOperationCallback callback)
-            throws HyracksDataException {
-        LSMComponentFileReferences componentFileRefs = fileManager.getRelFlushFileReference();
-        LSMInvertedIndexOpContext opCtx = createOpContext(NoOpOperationCallback.INSTANCE,
-                NoOpOperationCallback.INSTANCE);
-        ILSMComponent flushingComponent = ctx.getComponentHolder().get(0);
-        opCtx.setOperation(IndexOperation.FLUSH);
-        opCtx.getComponentHolder().add(flushingComponent);
-        ioScheduler.scheduleOperation(new LSMInvertedIndexFlushOperation(new LSMInvertedIndexAccessor(this, lsmHarness,
-                fileManager, opCtx), mutableComponent, componentFileRefs.getInsertIndexFileReference(),
-                componentFileRefs.getDeleteIndexFileReference(), componentFileRefs.getBloomFilterFileReference(),
-                callback));
-    }
-
-    @Override
-    public ILSMComponent flush(ILSMIOOperation operation) throws HyracksDataException, IndexException {
-        LSMInvertedIndexFlushOperation flushOp = (LSMInvertedIndexFlushOperation) operation;
-
-        // Create an inverted index instance to be bulk loaded.
-        LSMInvertedIndexImmutableComponent component = createDiskInvIndexComponent(componentFactory,
-                flushOp.getDictBTreeFlushTarget(), flushOp.getDeletedKeysBTreeFlushTarget(),
-                flushOp.getBloomFilterFlushTarget(), true);
-        IInvertedIndex diskInvertedIndex = component.getInvIndex();
-
-        // Create a scan cursor on the BTree underlying the in-memory inverted index.
-        LSMInvertedIndexMutableComponent flushingComponent = flushOp.getFlushingComponent();
-        InMemoryInvertedIndexAccessor memInvIndexAccessor = (InMemoryInvertedIndexAccessor) flushingComponent
-                .getInvIndex().createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
-        BTreeAccessor memBTreeAccessor = memInvIndexAccessor.getBTreeAccessor();
-        RangePredicate nullPred = new RangePredicate(null, null, true, true, null, null);
-        IIndexCursor scanCursor = memBTreeAccessor.createSearchCursor();
-        memBTreeAccessor.search(scanCursor, nullPred);
-
-        // Bulk load the disk inverted index from the in-memory inverted index.
-        IIndexBulkLoader invIndexBulkLoader = diskInvertedIndex.createBulkLoader(1.0f, false, 0L);
-        try {
-            while (scanCursor.hasNext()) {
-                scanCursor.next();
-                invIndexBulkLoader.add(scanCursor.getTuple());
-            }
-        } finally {
-            scanCursor.close();
-        }
-        invIndexBulkLoader.end();
-
-        IIndexAccessor deletedKeysBTreeAccessor = flushingComponent.getDeletedKeysBTree().createAccessor(
-                NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
-        IIndexCursor btreeCountingCursor = ((BTreeAccessor) deletedKeysBTreeAccessor).createCountingSearchCursor();
-        deletedKeysBTreeAccessor.search(btreeCountingCursor, nullPred);
-        long numBTreeTuples = 0L;
-        try {
-            while (btreeCountingCursor.hasNext()) {
-                btreeCountingCursor.next();
-                ITupleReference countTuple = btreeCountingCursor.getTuple();
-                numBTreeTuples = IntegerSerializerDeserializer.getInt(countTuple.getFieldData(0),
-                        countTuple.getFieldStart(0));
-            }
-        } finally {
-            btreeCountingCursor.close();
-        }
-
-        if (numBTreeTuples > 0) {
-            int maxBucketsPerElement = BloomCalculations.maxBucketsPerElement(numBTreeTuples);
-            BloomFilterSpecification bloomFilterSpec = BloomCalculations.computeBloomSpec(maxBucketsPerElement,
-                    MAX_BLOOM_FILTER_ACCEPTABLE_FALSE_POSITIVE_RATE);
-
-            // Create an BTree instance for the deleted keys.
-            BTree diskDeletedKeysBTree = component.getDeletedKeysBTree();
-
-            // Create a scan cursor on the deleted keys BTree underlying the in-memory inverted index.
-            IIndexCursor deletedKeysScanCursor = deletedKeysBTreeAccessor.createSearchCursor();
-            deletedKeysBTreeAccessor.search(deletedKeysScanCursor, nullPred);
-
-            // Bulk load the deleted-keys BTree.
-            IIndexBulkLoader deletedKeysBTreeBulkLoader = diskDeletedKeysBTree.createBulkLoader(1.0f, false, 0L);
-            IIndexBulkLoader builder = component.getBloomFilter().createBuilder(numBTreeTuples,
-                    bloomFilterSpec.getNumHashes(), bloomFilterSpec.getNumBucketsPerElements());
-
-            try {
-                while (deletedKeysScanCursor.hasNext()) {
-                    deletedKeysScanCursor.next();
-                    deletedKeysBTreeBulkLoader.add(deletedKeysScanCursor.getTuple());
-                    builder.add(deletedKeysScanCursor.getTuple());
-                }
-            } finally {
-                deletedKeysScanCursor.close();
-                builder.end();
-            }
-            deletedKeysBTreeBulkLoader.end();
-        }
-
-        return component;
-    }
-
-    @Override
-    public void scheduleMerge(ILSMIndexOperationContext ctx, ILSMIOOperationCallback callback)
-            throws HyracksDataException, IndexException {
-        LSMInvertedIndexOpContext ictx = createOpContext(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
-        List<ILSMComponent> mergingComponents = ctx.getComponentHolder();
-        ictx.getComponentHolder().addAll(mergingComponents);
-        IIndexCursor cursor = new LSMInvertedIndexRangeSearchCursor(ictx);
-        RangePredicate mergePred = new RangePredicate(null, null, true, true, null, null);
-
-        // Scan diskInvertedIndexes ignoring the memoryInvertedIndex.
-        search(ictx, cursor, mergePred);
-
-        ictx.setOperation(IndexOperation.MERGE);
-        LSMInvertedIndexImmutableComponent firstComponent = (LSMInvertedIndexImmutableComponent) mergingComponents
-                .get(0);
-        OnDiskInvertedIndex firstInvIndex = (OnDiskInvertedIndex) firstComponent.getInvIndex();
-        String firstFileName = firstInvIndex.getBTree().getFileReference().getFile().getName();
-
-        LSMInvertedIndexImmutableComponent lastComponent = (LSMInvertedIndexImmutableComponent) mergingComponents
-                .get(mergingComponents.size() - 1);
-        OnDiskInvertedIndex lastInvIndex = (OnDiskInvertedIndex) lastComponent.getInvIndex();
-        String lastFileName = lastInvIndex.getBTree().getFileReference().getFile().getName();
-
-        LSMComponentFileReferences relMergeFileRefs = fileManager.getRelMergeFileReference(firstFileName, lastFileName);
-        ILSMIndexAccessorInternal accessor = new LSMInvertedIndexAccessor(this, lsmHarness, fileManager, ictx);
-        ioScheduler.scheduleOperation(new LSMInvertedIndexMergeOperation(accessor, mergingComponents, cursor,
-                relMergeFileRefs.getInsertIndexFileReference(), relMergeFileRefs.getDeleteIndexFileReference(),
-                relMergeFileRefs.getBloomFilterFileReference(), callback));
-    }
-
-    @Override
-    public ILSMComponent merge(List<ILSMComponent> mergedComponents, ILSMIOOperation operation)
-            throws HyracksDataException, IndexException {
-        LSMInvertedIndexMergeOperation mergeOp = (LSMInvertedIndexMergeOperation) operation;
-
-        // Create an inverted index instance.
-        LSMInvertedIndexImmutableComponent component = createDiskInvIndexComponent(componentFactory,
-                mergeOp.getDictBTreeMergeTarget(), mergeOp.getDeletedKeysBTreeMergeTarget(),
-                mergeOp.getBloomFilterMergeTarget(), true);
-
-        IInvertedIndex mergedDiskInvertedIndex = component.getInvIndex();
-        IIndexCursor cursor = mergeOp.getCursor();
-        IIndexBulkLoader invIndexBulkLoader = mergedDiskInvertedIndex.createBulkLoader(1.0f, true, 0L);
-        try {
-            while (cursor.hasNext()) {
-                cursor.next();
-                ITupleReference tuple = cursor.getTuple();
-                invIndexBulkLoader.add(tuple);
-            }
-        } finally {
-            cursor.close();
-        }
-        invIndexBulkLoader.end();
-
-        // Add the merged components for cleanup.
-        mergedComponents.addAll(mergeOp.getMergingComponents());
-
-        return component;
-    }
-
-    private ILSMComponent createBulkLoadTarget() throws HyracksDataException, IndexException {
-        LSMComponentFileReferences componentFileRefs = fileManager.getRelFlushFileReference();
-        return createDiskInvIndexComponent(componentFactory, componentFileRefs.getInsertIndexFileReference(),
-                componentFileRefs.getDeleteIndexFileReference(), componentFileRefs.getBloomFilterFileReference(), true);
-    }
-
-    @Override
-    public IIndexBulkLoader createBulkLoader(float fillFactor, boolean verifyInput, long numElementsHint)
-            throws IndexException {
-        return new LSMInvertedIndexBulkLoader(fillFactor, verifyInput, numElementsHint);
-    }
-
-    public class LSMInvertedIndexBulkLoader implements IIndexBulkLoader {
-        private final ILSMComponent component;
-        private final IIndexBulkLoader invIndexBulkLoader;
-
-        public LSMInvertedIndexBulkLoader(float fillFactor, boolean verifyInput, long numElementsHint)
-                throws IndexException {
-            // Note that by using a flush target file name, we state that the
-            // new bulk loaded tree is "newer" than any other merged tree.
-            try {
-                component = createBulkLoadTarget();
-            } catch (HyracksDataException e) {
-                throw new TreeIndexException(e);
-            } catch (IndexException e) {
-                throw new TreeIndexException(e);
-            }
-            invIndexBulkLoader = ((LSMInvertedIndexImmutableComponent) component).getInvIndex().createBulkLoader(
-                    fillFactor, verifyInput, numElementsHint);
-        }
-
-        @Override
-        public void add(ITupleReference tuple) throws IndexException, HyracksDataException {
-            try {
-                invIndexBulkLoader.add(tuple);
-            } catch (IndexException e) {
-                handleException();
-                throw e;
-            } catch (HyracksDataException e) {
-                handleException();
-                throw e;
-            } catch (RuntimeException e) {
-                handleException();
-                throw e;
-            }
-        }
-
-        protected void handleException() throws HyracksDataException {
-            ((LSMInvertedIndexImmutableComponent) component).getInvIndex().deactivate();
-            ((LSMInvertedIndexImmutableComponent) component).getInvIndex().destroy();
-            ((LSMInvertedIndexImmutableComponent) component).getDeletedKeysBTree().deactivate();
-            ((LSMInvertedIndexImmutableComponent) component).getDeletedKeysBTree().destroy();
-            ((LSMInvertedIndexImmutableComponent) component).getBloomFilter().deactivate();
-            ((LSMInvertedIndexImmutableComponent) component).getBloomFilter().destroy();
-        }
-
-        @Override
-        public void end() throws IndexException, HyracksDataException {
-            invIndexBulkLoader.end();
-            lsmHarness.addBulkLoadedComponent(component);
-        }
-    }
-
-    protected InMemoryInvertedIndex createInMemoryInvertedIndex(IInMemoryBufferCache memBufferCache)
-            throws IndexException {
-        return InvertedIndexUtils.createInMemoryBTreeInvertedindex(memBufferCache, memFreePageManager,
-                invListTypeTraits, invListCmpFactories, tokenTypeTraits, tokenCmpFactories, tokenizerFactory);
-    }
-
-    protected LSMInvertedIndexImmutableComponent createDiskInvIndexComponent(ILSMComponentFactory factory,
-            FileReference dictBTreeFileRef, FileReference btreeFileRef, FileReference bloomFilterFileRef, boolean create)
-            throws HyracksDataException, IndexException {
-        LSMInvertedIndexImmutableComponent component = (LSMInvertedIndexImmutableComponent) factory
-                .createLSMComponentInstance(new LSMComponentFileReferences(dictBTreeFileRef, btreeFileRef,
-                        bloomFilterFileRef));
-        if (create) {
-            component.getInvIndex().create();
-            component.getDeletedKeysBTree().create();
-            component.getBloomFilter().create();
-        }
-        // Will be closed during cleanup of merge().
-        component.getInvIndex().activate();
-        component.getDeletedKeysBTree().activate();
-        component.getBloomFilter().activate();
-        return component;
-    }
-
-    @Override
-    public ILSMIndexAccessorInternal createAccessor(IModificationOperationCallback modificationCallback,
-            ISearchOperationCallback searchCallback) {
-        return new LSMInvertedIndexAccessor(this, lsmHarness, fileManager, createOpContext(modificationCallback,
-                searchCallback));
-    }
-
-    private LSMInvertedIndexOpContext createOpContext(IModificationOperationCallback modificationCallback,
-            ISearchOperationCallback searchCallback) {
-        return new LSMInvertedIndexOpContext(mutableComponent.getInvIndex(), mutableComponent.getDeletedKeysBTree(),
-                modificationCallback, searchCallback);
-    }
-
-    @Override
-    public IInvertedListCursor createInvertedListCursor() {
-        throw new UnsupportedOperationException("Cannot create inverted list cursor on lsm inverted index.");
-    }
-
-    @Override
-    public void openInvertedListCursor(IInvertedListCursor listCursor, ITupleReference searchKey,
-            IIndexOperationContext ictx) throws HyracksDataException, IndexException {
-        throw new UnsupportedOperationException("Cannot open inverted list cursor on lsm inverted index.");
-    }
-
-    @Override
-    public ITypeTraits[] getInvListTypeTraits() {
-        return invListTypeTraits;
-    }
-
-    @Override
-    public IBinaryComparatorFactory[] getInvListCmpFactories() {
-        return invListCmpFactories;
-    }
-
-    @Override
-    public long getMemoryAllocationSize() {
-        InMemoryBufferCache memBufferCache = (InMemoryBufferCache) mutableComponent.getInvIndex().getBufferCache();
-        return memBufferCache.getNumPages() * memBufferCache.getPageSize();
-    }
-
-    @Override
-    public ITypeTraits[] getTokenTypeTraits() {
-        return tokenTypeTraits;
-    }
-
-    @Override
-    public IBinaryComparatorFactory[] getTokenCmpFactories() {
-        return tokenCmpFactories;
-    }
-
-    public IBinaryTokenizerFactory getTokenizerFactory() {
-        return tokenizerFactory;
-    }
-
-    protected void forceFlushInvListsFileDirtyPages(OnDiskInvertedIndex invIndex) throws HyracksDataException {
-        int fileId = invIndex.getInvListsFileId();
-        IBufferCache bufferCache = invIndex.getBufferCache();
-        int startPageId = 0;
-        int maxPageId = invIndex.getInvListsMaxPageId();
-        forceFlushDirtyPages(bufferCache, fileId, startPageId, maxPageId);
-    }
-
-    @Override
-    public void markAsValid(ILSMComponent lsmComponent) throws HyracksDataException {
-        LSMInvertedIndexImmutableComponent invIndexComponent = (LSMInvertedIndexImmutableComponent) lsmComponent;
-        OnDiskInvertedIndex invIndex = (OnDiskInvertedIndex) invIndexComponent.getInvIndex();
-        // Flush the bloom filter first.
-        int fileId = invIndexComponent.getBloomFilter().getFileId();
-        IBufferCache bufferCache = invIndex.getBufferCache();
-        int startPage = 0;
-        int maxPage = invIndexComponent.getBloomFilter().getNumPages();
-        forceFlushDirtyPages(bufferCache, fileId, startPage, maxPage);
-
-        ITreeIndex treeIndex = invIndex.getBTree();
-        // Flush inverted index second.
-        forceFlushDirtyPages(treeIndex);
-        forceFlushInvListsFileDirtyPages(invIndex);
-        // Flush deleted keys BTree.
-        forceFlushDirtyPages(invIndexComponent.getDeletedKeysBTree());
-        // We use the dictionary BTree for marking the inverted index as valid.
-        markAsValidInternal(treeIndex);
-    }
-
-    @Override
-    public void validate() throws HyracksDataException {
-        mutableComponent.getInvIndex().validate();
-        mutableComponent.getDeletedKeysBTree().validate();
-        List<ILSMComponent> immutableComponents = componentsRef.get();
-        for (ILSMComponent c : immutableComponents) {
-            LSMInvertedIndexImmutableComponent component = (LSMInvertedIndexImmutableComponent) c;
-            component.getInvIndex().validate();
-            component.getDeletedKeysBTree().validate();
-        }
-    }
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexAccessor.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexAccessor.java
deleted file mode 100644
index 7f7d3cd..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexAccessor.java
+++ /dev/null
@@ -1,171 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.impls;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexCursor;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchPredicate;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOperation;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMHarness;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperation;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallback;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIndexAccessorInternal;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIndexFileManager;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIndexOperationContext;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndexAccessor;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedListCursor;
-
-public class LSMInvertedIndexAccessor implements ILSMIndexAccessorInternal, IInvertedIndexAccessor {
-
-    protected final ILSMHarness lsmHarness;
-    protected final ILSMIndexFileManager fileManager;
-    protected final ILSMIndexOperationContext ctx;
-    protected final LSMInvertedIndex invIndex;
-
-    public LSMInvertedIndexAccessor(LSMInvertedIndex invIndex, ILSMHarness lsmHarness,
-            ILSMIndexFileManager fileManager, ILSMIndexOperationContext ctx) {
-        this.lsmHarness = lsmHarness;
-        this.fileManager = fileManager;
-        this.ctx = ctx;
-        this.invIndex = invIndex;
-    }
-
-    @Override
-    public void insert(ITupleReference tuple) throws HyracksDataException, IndexException {
-        ctx.setOperation(IndexOperation.INSERT);
-        lsmHarness.modify(ctx, false, tuple);
-    }
-
-    @Override
-    public void delete(ITupleReference tuple) throws HyracksDataException, IndexException {
-        ctx.setOperation(IndexOperation.DELETE);
-        lsmHarness.modify(ctx, false, tuple);
-    }
-
-    @Override
-    public boolean tryInsert(ITupleReference tuple) throws HyracksDataException, IndexException {
-        ctx.setOperation(IndexOperation.INSERT);
-        return lsmHarness.modify(ctx, true, tuple);
-    }
-
-    @Override
-    public boolean tryDelete(ITupleReference tuple) throws HyracksDataException, IndexException {
-        ctx.setOperation(IndexOperation.DELETE);
-        return lsmHarness.modify(ctx, true, tuple);
-    }
-
-    public void search(IIndexCursor cursor, ISearchPredicate searchPred) throws HyracksDataException, IndexException {
-        ctx.setOperation(IndexOperation.SEARCH);
-        lsmHarness.search(ctx, cursor, searchPred);
-    }
-
-    public IIndexCursor createSearchCursor() {
-        return new LSMInvertedIndexSearchCursor();
-    }
-
-    @Override
-    public void scheduleFlush(ILSMIOOperationCallback callback) throws HyracksDataException {
-        ctx.setOperation(IndexOperation.FLUSH);
-        lsmHarness.scheduleFlush(ctx, callback);
-    }
-
-    @Override
-    public void flush(ILSMIOOperation operation) throws HyracksDataException, IndexException {
-        lsmHarness.flush(ctx, operation);
-    }
-
-    @Override
-    public void scheduleMerge(ILSMIOOperationCallback callback) throws HyracksDataException, IndexException {
-        ctx.setOperation(IndexOperation.MERGE);
-        lsmHarness.scheduleMerge(ctx, callback);
-    }
-
-    @Override
-    public void merge(ILSMIOOperation operation) throws HyracksDataException, IndexException {
-        lsmHarness.merge(ctx, operation);
-    }
-
-    @Override
-    public IIndexCursor createRangeSearchCursor() {
-        return new LSMInvertedIndexRangeSearchCursor(ctx);
-    }
-
-    @Override
-    public void rangeSearch(IIndexCursor cursor, ISearchPredicate searchPred) throws IndexException,
-            HyracksDataException {
-        search(cursor, searchPred);
-    }
-
-    @Override
-    public void noOp() throws HyracksDataException {
-        lsmHarness.noOp(ctx);
-    }
-
-    @Override
-    public void forcePhysicalDelete(ITupleReference tuple) throws HyracksDataException, IndexException {
-        throw new UnsupportedOperationException("Physical delete not supported by lsm inverted index.");
-    }
-
-    @Override
-    public void forceInsert(ITupleReference tuple) throws HyracksDataException, IndexException {
-        ctx.setOperation(IndexOperation.INSERT);
-        lsmHarness.forceModify(ctx, tuple);
-    }
-
-    @Override
-    public void forceDelete(ITupleReference tuple) throws HyracksDataException, IndexException {
-        ctx.setOperation(IndexOperation.DELETE);
-        lsmHarness.forceModify(ctx, tuple);
-    }
-
-    @Override
-    public void physicalDelete(ITupleReference tuple) throws HyracksDataException, IndexException {
-        throw new UnsupportedOperationException("Physical delete not supported by lsm inverted index.");
-    }
-
-    @Override
-    public void update(ITupleReference tuple) throws HyracksDataException, IndexException {
-        throw new UnsupportedOperationException("Update not supported by lsm inverted index.");
-    }
-
-    @Override
-    public void upsert(ITupleReference tuple) throws HyracksDataException, IndexException {
-        throw new UnsupportedOperationException("Upsert not supported by lsm inverted index.");
-    }
-
-    @Override
-    public boolean tryUpdate(ITupleReference tuple) throws HyracksDataException, IndexException {
-        throw new UnsupportedOperationException("Update not supported by lsm inverted index.");
-    }
-
-    @Override
-    public boolean tryUpsert(ITupleReference tuple) throws HyracksDataException, IndexException {
-        throw new UnsupportedOperationException("Upsert not supported by lsm inverted index.");
-    }
-
-    @Override
-    public IInvertedListCursor createInvertedListCursor() {
-        throw new UnsupportedOperationException("Cannot create inverted list cursor on lsm inverted index.");
-    }
-
-    @Override
-    public void openInvertedListCursor(IInvertedListCursor listCursor, ITupleReference searchKey)
-            throws HyracksDataException, IndexException {
-        throw new UnsupportedOperationException("Cannot open inverted list cursor on lsm inverted index.");
-    }
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexComponentFactory.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexComponentFactory.java
deleted file mode 100644
index 1f4db63..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexComponentFactory.java
+++ /dev/null
@@ -1,53 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.impls;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.storage.am.bloomfilter.impls.BloomFilterFactory;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMComponent;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMComponentFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.LSMComponentFileReferences;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.TreeIndexFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.ondisk.OnDiskInvertedIndexFactory;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-
-public class LSMInvertedIndexComponentFactory implements ILSMComponentFactory {
-    private final OnDiskInvertedIndexFactory diskInvIndexFactory;
-    private final TreeIndexFactory<BTree> btreeFactory;
-    private final BloomFilterFactory bloomFilterFactory;
-
-    public LSMInvertedIndexComponentFactory(OnDiskInvertedIndexFactory diskInvIndexFactory,
-            TreeIndexFactory<BTree> btreeFactory, BloomFilterFactory bloomFilterFactory) {
-        this.diskInvIndexFactory = diskInvIndexFactory;
-        this.btreeFactory = btreeFactory;
-        this.bloomFilterFactory = bloomFilterFactory;
-    }
-
-    @Override
-    public ILSMComponent createLSMComponentInstance(LSMComponentFileReferences cfr) throws IndexException,
-            HyracksDataException {
-        return new LSMInvertedIndexImmutableComponent(diskInvIndexFactory.createIndexInstance(cfr
-                .getInsertIndexFileReference()), btreeFactory.createIndexInstance(cfr.getDeleteIndexFileReference()),
-                bloomFilterFactory.createBloomFiltertInstance(cfr.getBloomFilterFileReference()));
-    }
-
-    @Override
-    public IBufferCache getBufferCache() {
-        return diskInvIndexFactory.getBufferCache();
-    }
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexFileManager.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexFileManager.java
deleted file mode 100644
index 15a1633..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexFileManager.java
+++ /dev/null
@@ -1,244 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.impls;
-
-import java.io.File;
-import java.io.FilenameFilter;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.Date;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.List;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.api.io.IIOManager;
-import edu.uci.ics.hyracks.api.io.IODeviceHandle;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.AbstractLSMIndexFileManager;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.BTreeFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.LSMComponentFileReferences;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndexFileNameMapper;
-import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
-
-// TODO: Refactor for better code sharing with other file managers.
-public class LSMInvertedIndexFileManager extends AbstractLSMIndexFileManager implements IInvertedIndexFileNameMapper {
-    private static final String DICT_BTREE_SUFFIX = "b";
-    private static final String INVLISTS_SUFFIX = "i";
-    private static final String DELETED_KEYS_BTREE_SUFFIX = "d";
-
-    // We only need a BTree factory because the inverted indexes consistency is validated against its dictionary BTree.
-    private final BTreeFactory btreeFactory;
-
-    private static FilenameFilter dictBTreeFilter = new FilenameFilter() {
-        public boolean accept(File dir, String name) {
-            return !name.startsWith(".") && name.endsWith(DICT_BTREE_SUFFIX);
-        }
-    };
-
-    private static FilenameFilter deletedKeysBTreeFilter = new FilenameFilter() {
-        public boolean accept(File dir, String name) {
-            return !name.startsWith(".") && name.endsWith(DELETED_KEYS_BTREE_SUFFIX);
-        }
-    };
-
-    public LSMInvertedIndexFileManager(IIOManager ioManager, IFileMapProvider fileMapProvider, FileReference file,
-            BTreeFactory btreeFactory, int startIODeviceIndex) {
-        super(ioManager, fileMapProvider, file, null, startIODeviceIndex);
-        this.btreeFactory = btreeFactory;
-    }
-
-    @Override
-    public LSMComponentFileReferences getRelFlushFileReference() {
-        Date date = new Date();
-        String ts = formatter.format(date);
-        String baseName = baseDir + ts + SPLIT_STRING + ts;
-        // Begin timestamp and end timestamp are identical since it is a flush
-        return new LSMComponentFileReferences(createFlushFile(baseName + SPLIT_STRING + DICT_BTREE_SUFFIX),
-                createFlushFile(baseName + SPLIT_STRING + DELETED_KEYS_BTREE_SUFFIX), createFlushFile(baseName
-                        + SPLIT_STRING + BLOOM_FILTER_STRING));
-    }
-
-    @Override
-    public LSMComponentFileReferences getRelMergeFileReference(String firstFileName, String lastFileName)
-            throws HyracksDataException {
-        String[] firstTimestampRange = firstFileName.split(SPLIT_STRING);
-        String[] lastTimestampRange = lastFileName.split(SPLIT_STRING);
-
-        String baseName = baseDir + firstTimestampRange[0] + SPLIT_STRING + lastTimestampRange[1];
-        // Get the range of timestamps by taking the earliest and the latest timestamps
-        return new LSMComponentFileReferences(createMergeFile(baseName + SPLIT_STRING + DICT_BTREE_SUFFIX),
-                createMergeFile(baseName + SPLIT_STRING + DELETED_KEYS_BTREE_SUFFIX), createMergeFile(baseName
-                        + SPLIT_STRING + BLOOM_FILTER_STRING));
-    }
-
-    @Override
-    public List<LSMComponentFileReferences> cleanupAndGetValidFiles() throws HyracksDataException, IndexException {
-        List<LSMComponentFileReferences> validFiles = new ArrayList<LSMComponentFileReferences>();
-        ArrayList<ComparableFileName> allDictBTreeFiles = new ArrayList<ComparableFileName>();
-        ArrayList<ComparableFileName> allDeletedKeysBTreeFiles = new ArrayList<ComparableFileName>();
-        ArrayList<ComparableFileName> allBloomFilterFiles = new ArrayList<ComparableFileName>();
-
-        // Gather files from all IODeviceHandles.
-        for (IODeviceHandle dev : ioManager.getIODevices()) {
-            cleanupAndGetValidFilesInternal(dev, bloomFilterFilter, null, allBloomFilterFiles);
-            HashSet<String> bloomFilterFilesSet = new HashSet<String>();
-            for (ComparableFileName cmpFileName : allBloomFilterFiles) {
-                int index = cmpFileName.fileName.lastIndexOf(SPLIT_STRING);
-                bloomFilterFilesSet.add(cmpFileName.fileName.substring(0, index));
-            }
-            // List of valid BTree files that may or may not have a bloom filter buddy. Will check for buddies below.
-            ArrayList<ComparableFileName> tmpAllDeletedBTreeFiles = new ArrayList<ComparableFileName>();
-            cleanupAndGetValidFilesInternal(dev, deletedKeysBTreeFilter, btreeFactory, tmpAllDeletedBTreeFiles);
-
-            // Look for buddy bloom filters for all valid BTrees. 
-            // If no buddy is found, delete the file, otherwise add the BTree to allBTreeFiles. 
-            HashSet<String> deletedKeysBTreeFilesSet = new HashSet<String>();
-            for (ComparableFileName cmpFileName : tmpAllDeletedBTreeFiles) {
-                int index = cmpFileName.fileName.lastIndexOf(SPLIT_STRING);
-                String file = cmpFileName.fileName.substring(0, index);
-                if (bloomFilterFilesSet.contains(file)) {
-                    allDeletedKeysBTreeFiles.add(cmpFileName);
-                    deletedKeysBTreeFilesSet.add(cmpFileName.fileName.substring(0, index));
-                } else {
-                    // Couldn't find the corresponding BTree file; thus, delete
-                    // the deleted-keys BTree file.
-                    // There is no need to delete the inverted-lists file corresponding to the non-existent
-                    // dictionary BTree, because we flush the dictionary BTree first. So if a dictionary BTree 
-                    // file does not exists, then neither can its inverted-list file.
-                    File invalidDeletedKeysBTreeFile = new File(cmpFileName.fullPath);
-                    invalidDeletedKeysBTreeFile.delete();
-                }
-            }
-
-            // We use the dictionary BTree of the inverted index for validation.
-            // List of valid dictionary BTree files that may or may not have a deleted-keys BTree buddy. Will check for buddies below.
-            ArrayList<ComparableFileName> tmpAllBTreeFiles = new ArrayList<ComparableFileName>();
-            cleanupAndGetValidFilesInternal(dev, dictBTreeFilter, btreeFactory, tmpAllBTreeFiles);
-            // Look for buddy deleted-keys BTrees for all valid dictionary BTrees. 
-            // If no buddy is found, delete the file, otherwise add the dictionary BTree to allBTreeFiles. 
-            for (ComparableFileName cmpFileName : tmpAllBTreeFiles) {
-                int index = cmpFileName.fileName.lastIndexOf(SPLIT_STRING);
-                String file = cmpFileName.fileName.substring(0, index);
-                if (deletedKeysBTreeFilesSet.contains(file)) {
-                    allDictBTreeFiles.add(cmpFileName);
-                } else {
-                    // Couldn't find the corresponding BTree file; thus, delete
-                    // the deleted-keys BTree file.
-                    // There is no need to delete the inverted-lists file corresponding to the non-existent
-                    // dictionary BTree, because we flush the dictionary BTree first. So if a dictionary BTree 
-                    // file does not exists, then neither can its inverted-list file.
-                    File invalidDeletedKeysBTreeFile = new File(cmpFileName.fullPath);
-                    invalidDeletedKeysBTreeFile.delete();
-                }
-            }
-        }
-        // Sanity check.
-        if (allDictBTreeFiles.size() != allDeletedKeysBTreeFiles.size()
-                || allDictBTreeFiles.size() != allBloomFilterFiles.size()) {
-            throw new HyracksDataException(
-                    "Unequal number of valid Dictionary BTree, Deleted BTree, and Bloom Filter files found. Aborting cleanup.");
-        }
-
-        // Trivial cases.
-        if (allDictBTreeFiles.isEmpty() || allDeletedKeysBTreeFiles.isEmpty() || allBloomFilterFiles.isEmpty()) {
-            return validFiles;
-        }
-
-        if (allDictBTreeFiles.size() == 1 && allDeletedKeysBTreeFiles.size() == 1 && allBloomFilterFiles.size() == 1) {
-            validFiles.add(new LSMComponentFileReferences(allDictBTreeFiles.get(0).fileRef, allDeletedKeysBTreeFiles
-                    .get(0).fileRef, allBloomFilterFiles.get(0).fileRef));
-            return validFiles;
-        }
-
-        // Sorts files names from earliest to latest timestamp.
-        Collections.sort(allDeletedKeysBTreeFiles);
-        Collections.sort(allDictBTreeFiles);
-        Collections.sort(allBloomFilterFiles);
-
-        List<ComparableFileName> validComparableDictBTreeFiles = new ArrayList<ComparableFileName>();
-        ComparableFileName lastDictBTree = allDictBTreeFiles.get(0);
-        validComparableDictBTreeFiles.add(lastDictBTree);
-
-        List<ComparableFileName> validComparableDeletedKeysBTreeFiles = new ArrayList<ComparableFileName>();
-        ComparableFileName lastDeletedKeysBTree = allDeletedKeysBTreeFiles.get(0);
-        validComparableDeletedKeysBTreeFiles.add(lastDeletedKeysBTree);
-
-        List<ComparableFileName> validComparableBloomFilterFiles = new ArrayList<ComparableFileName>();
-        ComparableFileName lastBloomFilter = allBloomFilterFiles.get(0);
-        validComparableBloomFilterFiles.add(lastBloomFilter);
-
-        for (int i = 1; i < allDictBTreeFiles.size(); i++) {
-            ComparableFileName currentRTree = allDictBTreeFiles.get(i);
-            ComparableFileName currentBTree = allDictBTreeFiles.get(i);
-            ComparableFileName currentBloomFilter = allBloomFilterFiles.get(i);
-            // Current start timestamp is greater than last stop timestamp.
-            if (currentRTree.interval[0].compareTo(lastDeletedKeysBTree.interval[1]) > 0
-                    && currentBTree.interval[0].compareTo(lastDeletedKeysBTree.interval[1]) > 0
-                    && currentBloomFilter.interval[0].compareTo(lastBloomFilter.interval[1]) > 0) {
-                validComparableDictBTreeFiles.add(currentRTree);
-                validComparableDeletedKeysBTreeFiles.add(currentBTree);
-                validComparableBloomFilterFiles.add(currentBloomFilter);
-                lastDictBTree = currentRTree;
-                lastDeletedKeysBTree = currentBTree;
-                lastBloomFilter = currentBloomFilter;
-            } else if (currentRTree.interval[0].compareTo(lastDictBTree.interval[0]) >= 0
-                    && currentRTree.interval[1].compareTo(lastDictBTree.interval[1]) <= 0
-                    && currentBTree.interval[0].compareTo(lastDeletedKeysBTree.interval[0]) >= 0
-                    && currentBTree.interval[1].compareTo(lastDeletedKeysBTree.interval[1]) <= 0
-                    && currentBloomFilter.interval[0].compareTo(lastBloomFilter.interval[0]) >= 0
-                    && currentBloomFilter.interval[1].compareTo(lastBloomFilter.interval[1]) <= 0) {
-                // Invalid files are completely contained in last interval.
-                File invalidRTreeFile = new File(currentRTree.fullPath);
-                invalidRTreeFile.delete();
-                File invalidBTreeFile = new File(currentBTree.fullPath);
-                invalidBTreeFile.delete();
-                File invalidBloomFilterFile = new File(currentBloomFilter.fullPath);
-                invalidBloomFilterFile.delete();
-            } else {
-                // This scenario should not be possible.
-                throw new HyracksDataException("Found LSM files with overlapping but not contained timetamp intervals.");
-            }
-        }
-
-        // Sort valid files in reverse lexicographical order, such that newer
-        // files come first.
-        Collections.sort(validComparableDictBTreeFiles, recencyCmp);
-        Collections.sort(validComparableDeletedKeysBTreeFiles, recencyCmp);
-        Collections.sort(validComparableBloomFilterFiles, recencyCmp);
-
-        Iterator<ComparableFileName> dictBTreeFileIter = validComparableDictBTreeFiles.iterator();
-        Iterator<ComparableFileName> deletedKeysBTreeIter = validComparableDeletedKeysBTreeFiles.iterator();
-        Iterator<ComparableFileName> bloomFilterFileIter = validComparableBloomFilterFiles.iterator();
-        while (dictBTreeFileIter.hasNext() && deletedKeysBTreeIter.hasNext()) {
-            ComparableFileName cmpDictBTreeFile = dictBTreeFileIter.next();
-            ComparableFileName cmpDeletedKeysBTreeFile = deletedKeysBTreeIter.next();
-            ComparableFileName cmpBloomFilterFileName = bloomFilterFileIter.next();
-            validFiles.add(new LSMComponentFileReferences(cmpDictBTreeFile.fileRef, cmpDeletedKeysBTreeFile.fileRef,
-                    cmpBloomFilterFileName.fileRef));
-        }
-
-        return validFiles;
-    }
-
-    @Override
-    public String getInvListsFilePath(String dictBTreeFilePath) {
-        int index = dictBTreeFilePath.lastIndexOf(SPLIT_STRING);
-        String file = dictBTreeFilePath.substring(0, index);
-        return file + SPLIT_STRING + INVLISTS_SUFFIX;
-    }
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexFlushOperation.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexFlushOperation.java
deleted file mode 100644
index eedf0da..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexFlushOperation.java
+++ /dev/null
@@ -1,90 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.impls;
-
-import java.util.Collections;
-import java.util.HashSet;
-import java.util.Set;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.api.io.IODeviceHandle;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperation;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallback;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIndexAccessorInternal;
-
-public class LSMInvertedIndexFlushOperation implements ILSMIOOperation {
-    private final ILSMIndexAccessorInternal accessor;
-    private final LSMInvertedIndexMutableComponent flushingComponent;
-    private final FileReference dictBTreeFlushTarget;
-    private final FileReference deletedKeysBTreeFlushTarget;
-    private final FileReference bloomFilterFlushTarget;
-    private final ILSMIOOperationCallback callback;
-
-    public LSMInvertedIndexFlushOperation(ILSMIndexAccessorInternal accessor,
-            LSMInvertedIndexMutableComponent flushingComponent, FileReference dictBTreeFlushTarget,
-            FileReference deletedKeysBTreeFlushTarget, FileReference bloomFilterFlushTarget,
-            ILSMIOOperationCallback callback) {
-        this.accessor = accessor;
-        this.flushingComponent = flushingComponent;
-        this.dictBTreeFlushTarget = dictBTreeFlushTarget;
-        this.deletedKeysBTreeFlushTarget = deletedKeysBTreeFlushTarget;
-        this.bloomFilterFlushTarget = bloomFilterFlushTarget;
-        this.callback = callback;
-    }
-
-    @Override
-    public Set<IODeviceHandle> getReadDevices() {
-        return Collections.emptySet();
-    }
-
-    @Override
-    public Set<IODeviceHandle> getWriteDevices() {
-        Set<IODeviceHandle> devs = new HashSet<IODeviceHandle>();
-        devs.add(dictBTreeFlushTarget.getDeviceHandle());
-        devs.add(deletedKeysBTreeFlushTarget.getDeviceHandle());
-        devs.add(bloomFilterFlushTarget.getDeviceHandle());
-        return devs;
-
-    }
-
-    @Override
-    public void perform() throws HyracksDataException, IndexException {
-        accessor.flush(this);
-    }
-
-    @Override
-    public ILSMIOOperationCallback getCallback() {
-        return callback;
-    }
-
-    public FileReference getDictBTreeFlushTarget() {
-        return dictBTreeFlushTarget;
-    }
-
-    public FileReference getDeletedKeysBTreeFlushTarget() {
-        return deletedKeysBTreeFlushTarget;
-    }
-
-    public FileReference getBloomFilterFlushTarget() {
-        return bloomFilterFlushTarget;
-    }
-
-    public LSMInvertedIndexMutableComponent getFlushingComponent() {
-        return flushingComponent;
-    }
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexImmutableComponent.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexImmutableComponent.java
deleted file mode 100644
index 4c9b5e8..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexImmutableComponent.java
+++ /dev/null
@@ -1,42 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.impls;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.storage.am.bloomfilter.impls.BloomFilter;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.AbstractImmutableLSMComponent;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndex;
-
-public class LSMInvertedIndexImmutableComponent extends AbstractImmutableLSMComponent {
-
-    private final IInvertedIndex invIndex;
-    private final BTree deletedKeysBTree;
-    private final BloomFilter bloomFilter;
-
-    public LSMInvertedIndexImmutableComponent(IInvertedIndex invIndex, BTree deletedKeysBTree, BloomFilter bloomFilter) {
-        this.invIndex = invIndex;
-        this.deletedKeysBTree = deletedKeysBTree;
-        this.bloomFilter = bloomFilter;
-    }
-
-    @Override
-    public void destroy() throws HyracksDataException {
-        invIndex.deactivate();
-        invIndex.destroy();
-        deletedKeysBTree.deactivate();
-        deletedKeysBTree.destroy();
-        bloomFilter.deactivate();
-        bloomFilter.destroy();
-    }
-
-    public IInvertedIndex getInvIndex() {
-        return invIndex;
-    }
-
-    public BTree getDeletedKeysBTree() {
-        return deletedKeysBTree;
-    }
-
-    public BloomFilter getBloomFilter() {
-        return bloomFilter;
-    }
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexMergeOperation.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexMergeOperation.java
deleted file mode 100644
index dea628c..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexMergeOperation.java
+++ /dev/null
@@ -1,106 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.impls;
-
-import java.util.HashSet;
-import java.util.List;
-import java.util.Set;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.api.io.IODeviceHandle;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexCursor;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMComponent;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperation;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallback;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIndexAccessorInternal;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.ondisk.OnDiskInvertedIndex;
-
-public class LSMInvertedIndexMergeOperation implements ILSMIOOperation {
-    private final ILSMIndexAccessorInternal accessor;
-    private final List<ILSMComponent> mergingComponents;
-    private final IIndexCursor cursor;
-    private final FileReference dictBTreeMergeTarget;
-    private final FileReference deletedKeysBTreeMergeTarget;
-    private final FileReference bloomFilterMergeTarget;
-    private final ILSMIOOperationCallback callback;
-
-    public LSMInvertedIndexMergeOperation(ILSMIndexAccessorInternal accessor, List<ILSMComponent> mergingComponents,
-            IIndexCursor cursor, FileReference dictBTreeMergeTarget, FileReference deletedKeysBTreeMergeTarget,
-            FileReference bloomFilterMergeTarget, ILSMIOOperationCallback callback) {
-        this.accessor = accessor;
-        this.mergingComponents = mergingComponents;
-        this.cursor = cursor;
-        this.dictBTreeMergeTarget = dictBTreeMergeTarget;
-        this.deletedKeysBTreeMergeTarget = deletedKeysBTreeMergeTarget;
-        this.bloomFilterMergeTarget = bloomFilterMergeTarget;
-        this.callback = callback;
-    }
-
-    @Override
-    public Set<IODeviceHandle> getReadDevices() {
-        Set<IODeviceHandle> devs = new HashSet<IODeviceHandle>();
-        for (Object o : mergingComponents) {
-            LSMInvertedIndexImmutableComponent component = (LSMInvertedIndexImmutableComponent) o;
-            OnDiskInvertedIndex invIndex = (OnDiskInvertedIndex) component.getInvIndex();
-            devs.add(invIndex.getBTree().getFileReference().getDeviceHandle());
-            devs.add(component.getDeletedKeysBTree().getFileReference().getDeviceHandle());
-            devs.add(component.getBloomFilter().getFileReference().getDeviceHandle());
-        }
-        return devs;
-    }
-
-    @Override
-    public Set<IODeviceHandle> getWriteDevices() {
-        Set<IODeviceHandle> devs = new HashSet<IODeviceHandle>();
-        devs.add(dictBTreeMergeTarget.getDeviceHandle());
-        devs.add(deletedKeysBTreeMergeTarget.getDeviceHandle());
-        devs.add(bloomFilterMergeTarget.getDeviceHandle());
-        return devs;
-    }
-
-    @Override
-    public void perform() throws HyracksDataException, IndexException {
-        accessor.merge(this);
-    }
-
-    @Override
-    public ILSMIOOperationCallback getCallback() {
-        return callback;
-    }
-
-    public FileReference getDictBTreeMergeTarget() {
-        return dictBTreeMergeTarget;
-    }
-
-    public FileReference getDeletedKeysBTreeMergeTarget() {
-        return deletedKeysBTreeMergeTarget;
-    }
-
-    public FileReference getBloomFilterMergeTarget() {
-        return bloomFilterMergeTarget;
-    }
-
-    public IIndexCursor getCursor() {
-        return cursor;
-    }
-
-    public List<ILSMComponent> getMergingComponents() {
-        return mergingComponents;
-    }
-
-}
\ No newline at end of file
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexMutableComponent.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexMutableComponent.java
deleted file mode 100644
index c36319d..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexMutableComponent.java
+++ /dev/null
@@ -1,55 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.impls;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
-import edu.uci.ics.hyracks.storage.am.common.api.IInMemoryFreePageManager;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.AbstractMutableLSMComponent;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndex;
-
-public class LSMInvertedIndexMutableComponent extends AbstractMutableLSMComponent {
-
-    private final IInvertedIndex invIndex;
-    private final BTree deletedKeysBTree;
-    private final IInMemoryFreePageManager mfpm;
-
-    public LSMInvertedIndexMutableComponent(IInvertedIndex invIndex, BTree deletedKeysBTree,
-            IInMemoryFreePageManager mfpm) {
-        this.invIndex = invIndex;
-        this.deletedKeysBTree = deletedKeysBTree;
-        this.mfpm = mfpm;
-    }
-
-    public IInvertedIndex getInvIndex() {
-        return invIndex;
-    }
-
-    public BTree getDeletedKeysBTree() {
-        return deletedKeysBTree;
-    }
-
-    @Override
-    protected boolean isFull() {
-        return mfpm.isFull();
-    }
-
-    @Override
-    protected void reset() throws HyracksDataException {
-        invIndex.clear();
-        deletedKeysBTree.clear();
-    }
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexOpContext.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexOpContext.java
deleted file mode 100644
index b961b7a..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexOpContext.java
+++ /dev/null
@@ -1,113 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.impls;
-
-import java.util.LinkedList;
-import java.util.List;
-
-import edu.uci.ics.hyracks.storage.am.common.api.IIndex;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexAccessor;
-import edu.uci.ics.hyracks.storage.am.common.api.IModificationOperationCallback;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchOperationCallback;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallback;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOperation;
-import edu.uci.ics.hyracks.storage.am.common.tuples.PermutingTupleReference;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMComponent;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIndexOperationContext;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndex;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndexAccessor;
-
-public class LSMInvertedIndexOpContext implements ILSMIndexOperationContext {
-
-    private static final int NUM_DOCUMENT_FIELDS = 1;
-
-    private IndexOperation op;
-    private final IInvertedIndex memInvIndex;
-    private final IIndex memDeletedKeysBTree;
-    private final List<ILSMComponent> componentHolder;
-
-    public final IModificationOperationCallback modificationCallback;
-    public final ISearchOperationCallback searchCallback;
-
-    // Tuple that only has the inverted-index elements (aka keys), projecting away the document fields.
-    public PermutingTupleReference keysOnlyTuple;
-
-    // Accessor to the in-memory inverted index.
-    public IInvertedIndexAccessor memInvIndexAccessor;
-    // Accessor to the deleted-keys BTree.
-    public IIndexAccessor deletedKeysBTreeAccessor;
-
-    public LSMInvertedIndexOpContext(IInvertedIndex memInvIndex, IIndex memDeletedKeysBTree,
-            IModificationOperationCallback modificationCallback, ISearchOperationCallback searchCallback) {
-        this.memInvIndex = memInvIndex;
-        this.memDeletedKeysBTree = memDeletedKeysBTree;
-        this.componentHolder = new LinkedList<ILSMComponent>();
-        this.modificationCallback = modificationCallback;
-        this.searchCallback = searchCallback;
-    }
-
-    @Override
-    public void reset() {
-        componentHolder.clear();
-    }
-
-    @Override
-    // TODO: Ignore opcallback for now.
-    public void setOperation(IndexOperation newOp) {
-        reset();
-        switch (newOp) {
-            case INSERT:
-            case DELETE:
-            case PHYSICALDELETE: {
-                if (deletedKeysBTreeAccessor == null) {
-                    memInvIndexAccessor = (IInvertedIndexAccessor) memInvIndex.createAccessor(
-                            NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
-                    deletedKeysBTreeAccessor = memDeletedKeysBTree.createAccessor(NoOpOperationCallback.INSTANCE,
-                            NoOpOperationCallback.INSTANCE);
-                    // Project away the document fields, leaving only the key fields.
-                    int numKeyFields = memInvIndex.getInvListTypeTraits().length;
-                    int[] keyFieldPermutation = new int[numKeyFields];
-                    for (int i = 0; i < numKeyFields; i++) {
-                        keyFieldPermutation[i] = NUM_DOCUMENT_FIELDS + i;
-                    }
-                    keysOnlyTuple = new PermutingTupleReference(keyFieldPermutation);
-                }
-                break;
-            }
-        }
-        op = newOp;
-    }
-
-    @Override
-    public IndexOperation getOperation() {
-        return op;
-    }
-
-    @Override
-    public List<ILSMComponent> getComponentHolder() {
-        return componentHolder;
-    }
-
-    @Override
-    public ISearchOperationCallback getSearchOperationCallback() {
-        return searchCallback;
-    }
-
-    @Override
-    public IModificationOperationCallback getModificationCallback() {
-        return modificationCallback;
-    }
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexRangeSearchCursor.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexRangeSearchCursor.java
deleted file mode 100644
index 1b5949a..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexRangeSearchCursor.java
+++ /dev/null
@@ -1,162 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.impls;
-
-import java.util.ArrayList;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
-import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
-import edu.uci.ics.hyracks.storage.am.common.api.ICursorInitialState;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexAccessor;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexCursor;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchPredicate;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-import edu.uci.ics.hyracks.storage.am.common.tuples.PermutingTupleReference;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIndexOperationContext;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.BloomFilterAwareBTreePointSearchCursor;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.LSMIndexSearchCursor;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndexAccessor;
-
-public class LSMInvertedIndexRangeSearchCursor extends LSMIndexSearchCursor {
-
-    // Assuming the cursor for all deleted-keys indexes are of the same type.
-    private IIndexCursor[] deletedKeysBTreeCursors;
-    protected ArrayList<IIndexAccessor> deletedKeysBTreeAccessors;
-    protected PermutingTupleReference keysOnlyTuple;
-    protected RangePredicate keySearchPred;
-
-    public LSMInvertedIndexRangeSearchCursor(ILSMIndexOperationContext opCtx) {
-        super(opCtx);
-    }
-
-    @Override
-    public void next() throws HyracksDataException {
-        super.next();
-    }
-
-    @Override
-    public void open(ICursorInitialState initState, ISearchPredicate searchPred) throws IndexException,
-            HyracksDataException {
-        LSMInvertedIndexRangeSearchCursorInitialState lsmInitState = (LSMInvertedIndexRangeSearchCursorInitialState) initState;
-        cmp = lsmInitState.getOriginalKeyComparator();
-        int numComponents = lsmInitState.getNumComponents();
-        rangeCursors = new IIndexCursor[numComponents];
-        for (int i = 0; i < numComponents; i++) {
-            IInvertedIndexAccessor invIndexAccessor = (IInvertedIndexAccessor) lsmInitState.getIndexAccessors().get(i);
-            rangeCursors[i] = invIndexAccessor.createRangeSearchCursor();
-            invIndexAccessor.rangeSearch(rangeCursors[i], lsmInitState.getSearchPredicate());
-        }
-        lsmHarness = lsmInitState.getLSMHarness();
-        operationalComponents = lsmInitState.getOperationalComponents();
-        includeMemComponent = lsmInitState.getIncludeMemComponent();
-
-        // For searching the deleted-keys BTrees.
-        this.keysOnlyTuple = lsmInitState.getKeysOnlyTuple();
-        deletedKeysBTreeAccessors = lsmInitState.getDeletedKeysBTreeAccessors();
-
-        if (!deletedKeysBTreeAccessors.isEmpty()) {
-            deletedKeysBTreeCursors = new IIndexCursor[deletedKeysBTreeAccessors.size()];
-            int i = 0;
-            if (includeMemComponent) {
-                // No need for a bloom filter for the in-memory BTree.
-                deletedKeysBTreeCursors[i] = deletedKeysBTreeAccessors.get(i).createSearchCursor();
-                ++i;
-            }
-            for (; i < deletedKeysBTreeCursors.length; i++) {
-                deletedKeysBTreeCursors[i] = new BloomFilterAwareBTreePointSearchCursor((IBTreeLeafFrame) lsmInitState
-                        .getgetDeletedKeysBTreeLeafFrameFactory().createFrame(), false,
-                        ((LSMInvertedIndexImmutableComponent) operationalComponents.get(i)).getBloomFilter());
-            }
-
-        }
-        MultiComparator keyCmp = lsmInitState.getKeyComparator();
-        keySearchPred = new RangePredicate(keysOnlyTuple, keysOnlyTuple, true, true, keyCmp, keyCmp);
-
-        setPriorityQueueComparator();
-        initPriorityQueue();
-    }
-
-    /**
-     * Check deleted-keys BTrees whether they contain the key in the checkElement's tuple.
-     */
-    @Override
-    protected boolean isDeleted(PriorityQueueElement checkElement) throws HyracksDataException, IndexException {
-        keysOnlyTuple.reset(checkElement.getTuple());
-        int end = checkElement.getCursorIndex();
-        for (int i = 0; i < end; i++) {
-            deletedKeysBTreeCursors[i].reset();
-            try {
-                deletedKeysBTreeAccessors.get(i).search(deletedKeysBTreeCursors[i], keySearchPred);
-                if (deletedKeysBTreeCursors[i].hasNext()) {
-                    return true;
-                }
-            } catch (IndexException e) {
-                throw new HyracksDataException(e);
-            } finally {
-                deletedKeysBTreeCursors[i].close();
-            }
-        }
-        return false;
-    }
-    
-    @Override
-    protected void checkPriorityQueue() throws HyracksDataException, IndexException {
-        while (!outputPriorityQueue.isEmpty() || needPush == true) {
-            if (!outputPriorityQueue.isEmpty()) {
-                PriorityQueueElement checkElement = outputPriorityQueue.peek();
-                // If there is no previous tuple or the previous tuple can be ignored
-                if (outputElement == null) {
-                    if (isDeleted(checkElement)) {
-                        // If the key has been deleted then pop it and set needPush to true.
-                        // We cannot push immediately because the tuple may be
-                        // modified if hasNext() is called
-                        outputElement = outputPriorityQueue.poll();
-                        needPush = true;
-                    } else {
-                        break;
-                    }
-                } else {
-                    // Compare the previous tuple and the head tuple in the PQ
-                    if (compare(cmp, outputElement.getTuple(), checkElement.getTuple()) == 0) {
-                        // If the previous tuple and the head tuple are
-                        // identical
-                        // then pop the head tuple and push the next tuple from
-                        // the tree of head tuple
-
-                        // the head element of PQ is useless now
-                        PriorityQueueElement e = outputPriorityQueue.poll();
-                        pushIntoPriorityQueue(e);
-                    } else {
-                        // If the previous tuple and the head tuple are different
-                        // the info of previous tuple is useless
-                        if (needPush == true) {
-                            pushIntoPriorityQueue(outputElement);
-                            needPush = false;
-                        }
-                        outputElement = null;
-                    }
-                }
-            } else {
-                // the priority queue is empty and needPush
-                pushIntoPriorityQueue(outputElement);
-                needPush = false;
-                outputElement = null;
-            }
-        }
-    }
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexRangeSearchCursorInitialState.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexRangeSearchCursorInitialState.java
deleted file mode 100644
index 0cec92e..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexRangeSearchCursorInitialState.java
+++ /dev/null
@@ -1,132 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.impls;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import edu.uci.ics.hyracks.storage.am.common.api.ICursorInitialState;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexAccessor;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchOperationCallback;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchPredicate;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-import edu.uci.ics.hyracks.storage.am.common.tuples.PermutingTupleReference;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMComponent;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMHarness;
-import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
-
-public class LSMInvertedIndexRangeSearchCursorInitialState implements ICursorInitialState {
-
-    private final MultiComparator tokensAndKeysCmp;
-    private final MultiComparator keyCmp;
-    private final ILSMHarness lsmHarness;
-
-    private final ArrayList<IIndexAccessor> indexAccessors;
-    private final ArrayList<IIndexAccessor> deletedKeysBTreeAccessors;
-    private final ISearchPredicate predicate;
-    private final PermutingTupleReference keysOnlyTuple;
-    private final ITreeIndexFrameFactory deletedKeysBtreeLeafFrameFactory;
-
-    private final boolean includeMemComponent;
-    private final List<ILSMComponent> operationalComponents;
-
-    public LSMInvertedIndexRangeSearchCursorInitialState(MultiComparator tokensAndKeysCmp, MultiComparator keyCmp,
-            PermutingTupleReference keysOnlyTuple, ITreeIndexFrameFactory deletedKeysBtreeLeafFrameFactory,
-            boolean includeMemComponent, ILSMHarness lsmHarness, ArrayList<IIndexAccessor> indexAccessors,
-            ArrayList<IIndexAccessor> deletedKeysBTreeAccessors, ISearchPredicate predicate,
-            List<ILSMComponent> operationalComponents) {
-        this.tokensAndKeysCmp = tokensAndKeysCmp;
-        this.keyCmp = keyCmp;
-        this.keysOnlyTuple = keysOnlyTuple;
-        this.deletedKeysBtreeLeafFrameFactory = deletedKeysBtreeLeafFrameFactory;
-        this.lsmHarness = lsmHarness;
-        this.indexAccessors = indexAccessors;
-        this.deletedKeysBTreeAccessors = deletedKeysBTreeAccessors;
-        this.predicate = predicate;
-        this.includeMemComponent = includeMemComponent;
-        this.operationalComponents = operationalComponents;
-    }
-
-    public int getNumComponents() {
-        return indexAccessors.size();
-    }
-
-    @Override
-    public ICachedPage getPage() {
-        return null;
-    }
-
-    @Override
-    public void setPage(ICachedPage page) {
-    }
-
-    public List<ILSMComponent> getOperationalComponents() {
-        return operationalComponents;
-    }
-
-    public ILSMHarness getLSMHarness() {
-        return lsmHarness;
-    }
-
-    @Override
-    public ISearchOperationCallback getSearchOperationCallback() {
-        return null;
-    }
-
-    @Override
-    public void setSearchOperationCallback(ISearchOperationCallback searchCallback) {
-        // Do nothing.
-    }
-
-    public ArrayList<IIndexAccessor> getIndexAccessors() {
-        return indexAccessors;
-    }
-
-    public ArrayList<IIndexAccessor> getDeletedKeysBTreeAccessors() {
-        return deletedKeysBTreeAccessors;
-    }
-
-    public ISearchPredicate getSearchPredicate() {
-        return predicate;
-    }
-
-    @Override
-    public void setOriginialKeyComparator(MultiComparator originalCmp) {
-        // Do nothing.
-    }
-
-    @Override
-    public MultiComparator getOriginalKeyComparator() {
-        return tokensAndKeysCmp;
-    }
-
-    public MultiComparator getKeyComparator() {
-        return keyCmp;
-    }
-
-    public ITreeIndexFrameFactory getgetDeletedKeysBTreeLeafFrameFactory() {
-        return deletedKeysBtreeLeafFrameFactory;
-    }
-
-    public boolean getIncludeMemComponent() {
-        return includeMemComponent;
-    }
-
-    public PermutingTupleReference getKeysOnlyTuple() {
-        return keysOnlyTuple;
-    }
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexSearchCursor.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexSearchCursor.java
deleted file mode 100644
index 36ad51b..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexSearchCursor.java
+++ /dev/null
@@ -1,186 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.impls;
-
-import java.util.List;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
-import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
-import edu.uci.ics.hyracks.storage.am.common.api.ICursorInitialState;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexAccessor;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexCursor;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchOperationCallback;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchPredicate;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMComponent;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMHarness;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIndexOperationContext;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.BloomFilterAwareBTreePointSearchCursor;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.exceptions.OccurrenceThresholdPanicException;
-
-/**
- * Searches the components one-by-one, completely consuming a cursor before moving on to the next one.
- * Therefore, the are no guarantees about sort order of the results.
- */
-public class LSMInvertedIndexSearchCursor implements IIndexCursor {
-
-    private IIndexAccessor currentAccessor;
-    private IIndexCursor currentCursor;
-    private int accessorIndex = -1;
-    private boolean tupleConsumed = true;
-    private ILSMHarness harness;
-    private boolean includeMemComponent;
-    private List<IIndexAccessor> indexAccessors;
-    private ISearchPredicate searchPred;
-    private ISearchOperationCallback searchCallback;
-
-    // Assuming the cursor for all deleted-keys indexes are of the same type.
-    private IIndexCursor[] deletedKeysBTreeCursors;
-    private List<IIndexAccessor> deletedKeysBTreeAccessors;
-    private RangePredicate keySearchPred;
-    private ILSMIndexOperationContext opCtx;
-
-    private List<ILSMComponent> operationalComponents;
-
-    @Override
-    public void open(ICursorInitialState initialState, ISearchPredicate searchPred) throws HyracksDataException {
-        LSMInvertedIndexSearchCursorInitialState lsmInitState = (LSMInvertedIndexSearchCursorInitialState) initialState;
-        harness = lsmInitState.getLSMHarness();
-        includeMemComponent = lsmInitState.getIncludeMemComponent();
-        operationalComponents = lsmInitState.getOperationalComponents();
-        indexAccessors = lsmInitState.getIndexAccessors();
-        opCtx = lsmInitState.getOpContext();
-        accessorIndex = 0;
-        this.searchPred = searchPred;
-        this.searchCallback = lsmInitState.getSearchOperationCallback();
-
-        // For searching the deleted-keys BTrees.
-        deletedKeysBTreeAccessors = lsmInitState.getDeletedKeysBTreeAccessors();
-        deletedKeysBTreeCursors = new IIndexCursor[deletedKeysBTreeAccessors.size()];
-        int i = 0;
-        if (includeMemComponent) {
-            // No need for a bloom filter for the in-memory BTree.
-            deletedKeysBTreeCursors[i] = deletedKeysBTreeAccessors.get(i).createSearchCursor();
-            ++i;
-        }
-        for (; i < deletedKeysBTreeCursors.length; i++) {
-            deletedKeysBTreeCursors[i] = new BloomFilterAwareBTreePointSearchCursor((IBTreeLeafFrame) lsmInitState
-                    .getgetDeletedKeysBTreeLeafFrameFactory().createFrame(), false,
-                    ((LSMInvertedIndexImmutableComponent) operationalComponents.get(i)).getBloomFilter());
-        }
-
-        MultiComparator keyCmp = lsmInitState.getKeyComparator();
-        keySearchPred = new RangePredicate(null, null, true, true, keyCmp, keyCmp);
-    }
-
-    protected boolean isDeleted(ITupleReference key) throws HyracksDataException, IndexException {
-        keySearchPred.setLowKey(key, true);
-        keySearchPred.setHighKey(key, true);
-        for (int i = 0; i < accessorIndex; i++) {
-            deletedKeysBTreeCursors[i].reset();
-            try {
-                deletedKeysBTreeAccessors.get(i).search(deletedKeysBTreeCursors[i], keySearchPred);
-                if (deletedKeysBTreeCursors[i].hasNext()) {
-                    return true;
-                }
-            } catch (IndexException e) {
-                throw new HyracksDataException(e);
-            } finally {
-                deletedKeysBTreeCursors[i].close();
-            }
-        }
-        return false;
-    }
-
-    // Move to the next tuple that has not been deleted.
-    private boolean nextValidTuple() throws HyracksDataException, IndexException {
-        while (currentCursor.hasNext()) {
-            currentCursor.next();
-            if (!isDeleted(currentCursor.getTuple())) {
-                tupleConsumed = false;
-                return true;
-            }
-        }
-        return false;
-    }
-
-    @Override
-    public boolean hasNext() throws HyracksDataException, IndexException {
-        if (!tupleConsumed) {
-            return true;
-        }
-        if (currentCursor != null) {
-            if (nextValidTuple()) {
-                return true;
-            }
-            currentCursor.close();
-            accessorIndex++;
-        }
-        while (accessorIndex < indexAccessors.size()) {
-            // Current cursor has been exhausted, switch to next accessor/cursor.
-            currentAccessor = indexAccessors.get(accessorIndex);
-            currentCursor = currentAccessor.createSearchCursor();
-            try {
-                currentAccessor.search(currentCursor, searchPred);
-            } catch (OccurrenceThresholdPanicException e) {
-                throw e;
-            } catch (IndexException e) {
-                throw new HyracksDataException(e);
-            }
-            if (nextValidTuple()) {
-                return true;
-            }
-            // Close as we go to release resources.
-            currentCursor.close();
-            accessorIndex++;
-        }
-        return false;
-    }
-
-    @Override
-    public void next() throws HyracksDataException {
-        // Mark the tuple as consumed, so hasNext() can move on.
-        tupleConsumed = true;
-        // We assume that the underlying cursors materialize their results such that
-        // there is no need to reposition the result cursor after reconciliation.
-        if (!searchCallback.proceed(currentCursor.getTuple())) {
-            searchCallback.reconcile(currentCursor.getTuple());
-        }
-    }
-
-    @Override
-    public void close() throws HyracksDataException {
-        reset();
-        accessorIndex = -1;
-        harness.endSearch(opCtx);
-    }
-
-    @Override
-    public void reset() throws HyracksDataException {
-        if (currentCursor != null) {
-            currentCursor.close();
-            currentCursor = null;
-        }
-        accessorIndex = 0;
-    }
-
-    @Override
-    public ITupleReference getTuple() {
-        return currentCursor.getTuple();
-    }
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexSearchCursorInitialState.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexSearchCursorInitialState.java
deleted file mode 100644
index eb6f338..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexSearchCursorInitialState.java
+++ /dev/null
@@ -1,128 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.impls;
-
-import java.util.List;
-
-import edu.uci.ics.hyracks.storage.am.common.api.ICursorInitialState;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexAccessor;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexOperationContext;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchOperationCallback;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-import edu.uci.ics.hyracks.storage.am.common.tuples.PermutingTupleReference;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMComponent;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMHarness;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIndexOperationContext;
-import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
-
-public class LSMInvertedIndexSearchCursorInitialState implements ICursorInitialState {
-
-    private final boolean includeMemComponent;
-    private final ILSMHarness lsmHarness;
-    private final List<IIndexAccessor> indexAccessors;
-    private final List<IIndexAccessor> deletedKeysBTreeAccessors;
-    private final LSMInvertedIndexOpContext ctx;
-    private ISearchOperationCallback searchCallback;
-    private MultiComparator originalCmp;
-    private final MultiComparator keyCmp;
-    private final PermutingTupleReference keysOnlyTuple;
-    private final ITreeIndexFrameFactory deletedKeysBtreeLeafFrameFactory;
-
-    private final List<ILSMComponent> operationalComponents;
-
-    public LSMInvertedIndexSearchCursorInitialState(final MultiComparator keyCmp,
-            PermutingTupleReference keysOnlyTuple, List<IIndexAccessor> indexAccessors,
-            List<IIndexAccessor> deletedKeysBTreeAccessors, ITreeIndexFrameFactory deletedKeysBtreeLeafFrameFactory,
-            IIndexOperationContext ctx, boolean includeMemComponent, ILSMHarness lsmHarness,
-            List<ILSMComponent> operationalComponents) {
-        this.keyCmp = keyCmp;
-        this.keysOnlyTuple = keysOnlyTuple;
-        this.indexAccessors = indexAccessors;
-        this.deletedKeysBTreeAccessors = deletedKeysBTreeAccessors;
-        this.deletedKeysBtreeLeafFrameFactory = deletedKeysBtreeLeafFrameFactory;
-        this.includeMemComponent = includeMemComponent;
-        this.operationalComponents = operationalComponents;
-        this.lsmHarness = lsmHarness;
-        this.ctx = (LSMInvertedIndexOpContext) ctx;
-        this.searchCallback = this.ctx.searchCallback;
-    }
-
-    @Override
-    public ICachedPage getPage() {
-        return null;
-    }
-
-    @Override
-    public void setPage(ICachedPage page) {
-    }
-
-    public List<ILSMComponent> getOperationalComponents() {
-        return operationalComponents;
-    }
-
-    public List<IIndexAccessor> getIndexAccessors() {
-        return indexAccessors;
-    }
-
-    public boolean getIncludeMemComponent() {
-        return includeMemComponent;
-    }
-
-    public ILSMHarness getLSMHarness() {
-        return lsmHarness;
-    }
-
-    public ILSMIndexOperationContext getOpContext() {
-        return ctx;
-    }
-
-    @Override
-    public ISearchOperationCallback getSearchOperationCallback() {
-        return searchCallback;
-    }
-
-    @Override
-    public void setSearchOperationCallback(ISearchOperationCallback searchCallback) {
-        this.searchCallback = searchCallback;
-    }
-
-    @Override
-    public MultiComparator getOriginalKeyComparator() {
-        return originalCmp;
-    }
-
-    @Override
-    public void setOriginialKeyComparator(MultiComparator originalCmp) {
-        this.originalCmp = originalCmp;
-    }
-
-    public MultiComparator getKeyComparator() {
-        return keyCmp;
-    }
-
-    public List<IIndexAccessor> getDeletedKeysBTreeAccessors() {
-        return deletedKeysBTreeAccessors;
-    }
-    
-    public ITreeIndexFrameFactory getgetDeletedKeysBTreeLeafFrameFactory() {
-        return deletedKeysBtreeLeafFrameFactory;
-    }
-
-    public PermutingTupleReference getKeysOnlyTuple() {
-        return keysOnlyTuple;
-    }
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/impls/PartitionedLSMInvertedIndex.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/impls/PartitionedLSMInvertedIndex.java
deleted file mode 100644
index 1b293eb..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/impls/PartitionedLSMInvertedIndex.java
+++ /dev/null
@@ -1,58 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.impls;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.storage.am.bloomfilter.impls.BloomFilterFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.IInMemoryFreePageManager;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.IInMemoryBufferCache;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallbackProvider;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationScheduler;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIndexFileManager;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMMergePolicy;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMOperationTrackerFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.BTreeFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.inmemory.InMemoryInvertedIndex;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.ondisk.OnDiskInvertedIndexFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.tokenizers.IBinaryTokenizerFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.util.InvertedIndexUtils;
-import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
-
-public class PartitionedLSMInvertedIndex extends LSMInvertedIndex {
-
-    public PartitionedLSMInvertedIndex(IInMemoryBufferCache memBufferCache,
-            IInMemoryFreePageManager memFreePageManager, OnDiskInvertedIndexFactory diskInvIndexFactory,
-            BTreeFactory deletedKeysBTreeFactory, BloomFilterFactory bloomFilterFactory,
-            ILSMIndexFileManager fileManager, IFileMapProvider diskFileMapProvider, ITypeTraits[] invListTypeTraits,
-            IBinaryComparatorFactory[] invListCmpFactories, ITypeTraits[] tokenTypeTraits,
-            IBinaryComparatorFactory[] tokenCmpFactories, IBinaryTokenizerFactory tokenizerFactory,
-            ILSMMergePolicy mergePolicy, ILSMOperationTrackerFactory opTrackerFactory,
-            ILSMIOOperationScheduler ioScheduler, ILSMIOOperationCallbackProvider ioOpCallbackProvider)
-            throws IndexException {
-        super(memBufferCache, memFreePageManager, diskInvIndexFactory, deletedKeysBTreeFactory, bloomFilterFactory,
-                fileManager, diskFileMapProvider, invListTypeTraits, invListCmpFactories, tokenTypeTraits,
-                tokenCmpFactories, tokenizerFactory, mergePolicy, opTrackerFactory, ioScheduler, ioOpCallbackProvider);
-    }
-
-    protected InMemoryInvertedIndex createInMemoryInvertedIndex(IInMemoryBufferCache memBufferCache)
-            throws IndexException {
-        return InvertedIndexUtils.createPartitionedInMemoryBTreeInvertedindex(memBufferCache, memFreePageManager,
-                invListTypeTraits, invListCmpFactories, tokenTypeTraits, tokenCmpFactories, tokenizerFactory);
-    }
-
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/inmemory/InMemoryInvertedIndex.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/inmemory/InMemoryInvertedIndex.java
deleted file mode 100644
index d5a074e..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/inmemory/InMemoryInvertedIndex.java
+++ /dev/null
@@ -1,208 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.inmemory;
-
-import java.io.File;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.btree.exceptions.BTreeDuplicateKeyException;
-import edu.uci.ics.hyracks.storage.am.btree.exceptions.BTreeException;
-import edu.uci.ics.hyracks.storage.am.btree.exceptions.BTreeNonExistentKeyException;
-import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTree.BTreeAccessor;
-import edu.uci.ics.hyracks.storage.am.btree.util.BTreeUtils;
-import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManager;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexAccessor;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexBulkLoader;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexOperationContext;
-import edu.uci.ics.hyracks.storage.am.common.api.IModificationOperationCallback;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchOperationCallback;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOperation;
-import edu.uci.ics.hyracks.storage.am.lsm.common.freepage.InMemoryBufferCache;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndex;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedListCursor;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.tokenizers.IBinaryTokenizerFactory;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-
-public class InMemoryInvertedIndex implements IInvertedIndex {
-
-    protected final BTree btree;
-    protected final FileReference memBTreeFile = new FileReference(new File("memBTree"));
-    protected final ITypeTraits[] tokenTypeTraits;
-    protected final IBinaryComparatorFactory[] tokenCmpFactories;
-    protected final ITypeTraits[] invListTypeTraits;
-    protected final IBinaryComparatorFactory[] invListCmpFactories;
-    protected final IBinaryTokenizerFactory tokenizerFactory;
-
-    protected final ITypeTraits[] btreeTypeTraits;
-    protected final IBinaryComparatorFactory[] btreeCmpFactories;
-
-    public InMemoryInvertedIndex(IBufferCache memBufferCache, IFreePageManager memFreePageManager,
-            ITypeTraits[] invListTypeTraits, IBinaryComparatorFactory[] invListCmpFactories,
-            ITypeTraits[] tokenTypeTraits, IBinaryComparatorFactory[] tokenCmpFactories,
-            IBinaryTokenizerFactory tokenizerFactory) throws BTreeException {
-        this.tokenTypeTraits = tokenTypeTraits;
-        this.tokenCmpFactories = tokenCmpFactories;
-        this.invListTypeTraits = invListTypeTraits;
-        this.invListCmpFactories = invListCmpFactories;
-        this.tokenizerFactory = tokenizerFactory;
-        // BTree tuples: <tokens, inverted-list elements>.
-        int numBTreeFields = tokenTypeTraits.length + invListTypeTraits.length;
-        btreeTypeTraits = new ITypeTraits[numBTreeFields];
-        btreeCmpFactories = new IBinaryComparatorFactory[numBTreeFields];
-        for (int i = 0; i < tokenTypeTraits.length; i++) {
-            btreeTypeTraits[i] = tokenTypeTraits[i];
-            btreeCmpFactories[i] = tokenCmpFactories[i];
-        }
-        for (int i = 0; i < invListTypeTraits.length; i++) {
-            btreeTypeTraits[tokenTypeTraits.length + i] = invListTypeTraits[i];
-            btreeCmpFactories[tokenTypeTraits.length + i] = invListCmpFactories[i];
-        }
-        this.btree = BTreeUtils.createBTree(memBufferCache, memFreePageManager,
-                ((InMemoryBufferCache) memBufferCache).getFileMapProvider(), btreeTypeTraits, btreeCmpFactories,
-                BTreeLeafFrameType.REGULAR_NSM, memBTreeFile);
-    }
-
-    @Override
-    public void create() throws HyracksDataException {
-        btree.create();
-    }
-
-    @Override
-    public void activate() throws HyracksDataException {
-        btree.activate();
-    }
-
-    @Override
-    public void clear() throws HyracksDataException {
-        btree.clear();
-    }
-
-    @Override
-    public void deactivate() throws HyracksDataException {
-        btree.deactivate();
-    }
-
-    @Override
-    public void destroy() throws HyracksDataException {
-        btree.destroy();
-    }
-
-    @Override
-    public void validate() throws HyracksDataException {
-        btree.validate();
-    }
-
-    public void insert(ITupleReference tuple, BTreeAccessor btreeAccessor, IIndexOperationContext ictx)
-            throws HyracksDataException, IndexException {
-        InMemoryInvertedIndexOpContext ctx = (InMemoryInvertedIndexOpContext) ictx;
-        ctx.tupleIter.reset(tuple);
-        while (ctx.tupleIter.hasNext()) {
-            ctx.tupleIter.next();
-            ITupleReference insertTuple = ctx.tupleIter.getTuple();
-            try {
-                btreeAccessor.insert(insertTuple);
-            } catch (BTreeDuplicateKeyException e) {
-                // This exception may be caused by duplicate tokens in the same insert "document".
-                // We ignore such duplicate tokens in all inverted-index implementations, hence
-                // we can safely ignore this exception.
-            }
-        }
-    }
-
-    public void delete(ITupleReference tuple, BTreeAccessor btreeAccessor, IIndexOperationContext ictx)
-            throws HyracksDataException, IndexException {
-        InMemoryInvertedIndexOpContext ctx = (InMemoryInvertedIndexOpContext) ictx;
-        ctx.tupleIter.reset(tuple);
-        while (ctx.tupleIter.hasNext()) {
-            ctx.tupleIter.next();
-            ITupleReference deleteTuple = ctx.tupleIter.getTuple();
-            try {
-                btreeAccessor.delete(deleteTuple);
-            } catch (BTreeNonExistentKeyException e) {
-                // Ignore this exception, since a document may have duplicate tokens.
-            }
-        }
-    }
-
-    @Override
-    public long getMemoryAllocationSize() {
-        InMemoryBufferCache memBufferCache = (InMemoryBufferCache) btree.getBufferCache();
-        return memBufferCache.getNumPages() * memBufferCache.getPageSize();
-    }
-
-    @Override
-    public IInvertedListCursor createInvertedListCursor() {
-        return new InMemoryInvertedListCursor(invListTypeTraits.length, tokenTypeTraits.length);
-    }
-
-    @Override
-    public void openInvertedListCursor(IInvertedListCursor listCursor, ITupleReference searchKey,
-            IIndexOperationContext ictx) throws HyracksDataException, IndexException {
-        InMemoryInvertedIndexOpContext ctx = (InMemoryInvertedIndexOpContext) ictx;
-        ctx.setOperation(IndexOperation.SEARCH);
-        InMemoryInvertedListCursor inMemListCursor = (InMemoryInvertedListCursor) listCursor;
-        inMemListCursor.prepare(ctx.btreeAccessor, ctx.btreePred, ctx.tokenFieldsCmp, ctx.btreeCmp);
-        inMemListCursor.reset(searchKey);
-    }
-
-    @Override
-    public IIndexAccessor createAccessor(IModificationOperationCallback modificationCallback,
-            ISearchOperationCallback searchCallback) {
-        return new InMemoryInvertedIndexAccessor(this, new InMemoryInvertedIndexOpContext(btree, tokenCmpFactories,
-                tokenizerFactory));
-    }
-
-    @Override
-    public IBufferCache getBufferCache() {
-        return btree.getBufferCache();
-    }
-
-    public BTree getBTree() {
-        return btree;
-    }
-
-    @Override
-    public IBinaryComparatorFactory[] getInvListCmpFactories() {
-        return invListCmpFactories;
-    }
-
-    @Override
-    public ITypeTraits[] getInvListTypeTraits() {
-        return invListTypeTraits;
-    }
-
-    @Override
-    public IIndexBulkLoader createBulkLoader(float fillFactor, boolean verifyInput, long numElementsHint)
-            throws IndexException {
-        throw new UnsupportedOperationException("Bulk load not supported by in-memory inverted index.");
-    }
-
-    @Override
-    public ITypeTraits[] getTokenTypeTraits() {
-        return tokenTypeTraits;
-    }
-
-    @Override
-    public IBinaryComparatorFactory[] getTokenCmpFactories() {
-        return tokenCmpFactories;
-    }
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/inmemory/InMemoryInvertedIndexAccessor.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/inmemory/InMemoryInvertedIndexAccessor.java
deleted file mode 100644
index a62aaf1..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/inmemory/InMemoryInvertedIndexAccessor.java
+++ /dev/null
@@ -1,116 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.inmemory;
-
-import edu.uci.ics.hyracks.api.context.IHyracksCommonContext;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTree.BTreeAccessor;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeRangeSearchCursor;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexCursor;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexOperationContext;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchPredicate;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallback;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOperation;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndexAccessor;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndexSearcher;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedListCursor;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.ondisk.OnDiskInvertedIndex.DefaultHyracksCommonContext;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.ondisk.OnDiskInvertedIndexSearchCursor;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.search.InvertedIndexSearchPredicate;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.search.TOccurrenceSearcher;
-
-public class InMemoryInvertedIndexAccessor implements IInvertedIndexAccessor {
-    // TODO: This ctx needs to go away.
-    protected final IHyracksCommonContext hyracksCtx = new DefaultHyracksCommonContext();
-    protected final IInvertedIndexSearcher searcher;
-    protected IIndexOperationContext opCtx;
-    protected InMemoryInvertedIndex index;
-    protected BTreeAccessor btreeAccessor;
-
-    public InMemoryInvertedIndexAccessor(InMemoryInvertedIndex index, IIndexOperationContext opCtx) {
-        this.opCtx = opCtx;
-        this.index = index;
-        this.searcher = createSearcher();
-        this.btreeAccessor = (BTreeAccessor) index.getBTree().createAccessor(NoOpOperationCallback.INSTANCE,
-                NoOpOperationCallback.INSTANCE);
-    }
-
-    @Override
-    public void insert(ITupleReference tuple) throws HyracksDataException, IndexException {
-        opCtx.setOperation(IndexOperation.INSERT);
-        index.insert(tuple, btreeAccessor, opCtx);
-    }
-
-    @Override
-    public void delete(ITupleReference tuple) throws HyracksDataException, IndexException {
-        opCtx.setOperation(IndexOperation.DELETE);
-        index.delete(tuple, btreeAccessor, opCtx);
-    }
-
-    @Override
-    public IIndexCursor createSearchCursor() {
-        return new OnDiskInvertedIndexSearchCursor(searcher, index.getInvListTypeTraits().length);
-    }
-
-    @Override
-    public void search(IIndexCursor cursor, ISearchPredicate searchPred) throws HyracksDataException, IndexException {
-        searcher.search((OnDiskInvertedIndexSearchCursor) cursor, (InvertedIndexSearchPredicate) searchPred, opCtx);
-    }
-
-    @Override
-    public IInvertedListCursor createInvertedListCursor() {
-        return index.createInvertedListCursor();
-    }
-
-    @Override
-    public void openInvertedListCursor(IInvertedListCursor listCursor, ITupleReference searchKey)
-            throws HyracksDataException, IndexException {
-        index.openInvertedListCursor(listCursor, searchKey, opCtx);
-    }
-
-    @Override
-    public IIndexCursor createRangeSearchCursor() {
-        IBTreeLeafFrame leafFrame = (IBTreeLeafFrame) index.getBTree().getLeafFrameFactory().createFrame();
-        return new BTreeRangeSearchCursor(leafFrame, false);
-    }
-
-    @Override
-    public void rangeSearch(IIndexCursor cursor, ISearchPredicate searchPred) throws IndexException,
-            HyracksDataException {
-        btreeAccessor.search(cursor, searchPred);
-    }
-
-    public BTreeAccessor getBTreeAccessor() {
-        return btreeAccessor;
-    }
-
-    @Override
-    public void update(ITupleReference tuple) throws HyracksDataException, IndexException {
-        throw new UnsupportedOperationException("Update not supported by in-memory inverted index.");
-    }
-
-    @Override
-    public void upsert(ITupleReference tuple) throws HyracksDataException, IndexException {
-        throw new UnsupportedOperationException("Upsert not supported by in-memory inverted index.");
-    }
-
-    protected IInvertedIndexSearcher createSearcher() {
-        return new TOccurrenceSearcher(hyracksCtx, index);
-    }
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/inmemory/InMemoryInvertedIndexOpContext.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/inmemory/InMemoryInvertedIndexOpContext.java
deleted file mode 100644
index 9c8865e..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/inmemory/InMemoryInvertedIndexOpContext.java
+++ /dev/null
@@ -1,94 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.inmemory;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTree.BTreeAccessor;
-import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexOperationContext;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallback;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOperation;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.tokenizers.IBinaryTokenizer;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.tokenizers.IBinaryTokenizerFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.util.InvertedIndexTokenizingTupleIterator;
-
-public class InMemoryInvertedIndexOpContext implements IIndexOperationContext {
-    public IndexOperation op;
-    public final BTree btree;
-
-    // Needed for search operations,    
-    public RangePredicate btreePred;
-    public BTreeAccessor btreeAccessor;
-    public MultiComparator btreeCmp;
-    public IBinaryComparatorFactory[] tokenCmpFactories;
-    public MultiComparator tokenFieldsCmp;
-
-    // To generate in-memory BTree tuples for insertions.
-    protected final IBinaryTokenizerFactory tokenizerFactory;
-    public InvertedIndexTokenizingTupleIterator tupleIter;
-
-    public InMemoryInvertedIndexOpContext(BTree btree, IBinaryComparatorFactory[] tokenCmpFactories,
-            IBinaryTokenizerFactory tokenizerFactory) {
-        this.btree = btree;
-        this.tokenCmpFactories = tokenCmpFactories;
-        this.tokenizerFactory = tokenizerFactory;
-    }
-
-    @Override
-    public void setOperation(IndexOperation newOp) {
-        switch (newOp) {
-            case INSERT:
-            case DELETE: {
-                if (tupleIter == null) {
-                    setTokenizingTupleIterator();
-                }
-                break;
-            }
-            case SEARCH: {
-                if (btreePred == null) {
-                    btreePred = new RangePredicate(null, null, true, true, null, null);
-                    btreeAccessor = (BTreeAccessor) btree.createAccessor(NoOpOperationCallback.INSTANCE,
-                            NoOpOperationCallback.INSTANCE);
-                    btreeCmp = MultiComparator.createIgnoreFieldLength(btree.getComparatorFactories());
-                    tokenFieldsCmp = MultiComparator.createIgnoreFieldLength(tokenCmpFactories);
-                }
-                break;
-            }
-            default: {
-                throw new UnsupportedOperationException("Unsupported operation " + newOp);
-            }
-        }
-        op = newOp;
-    }
-
-    @Override
-    public void reset() {
-        op = null;
-    }
-
-    @Override
-    public IndexOperation getOperation() {
-        return op;
-    }
-
-    protected void setTokenizingTupleIterator() {
-        IBinaryTokenizer tokenizer = tokenizerFactory.createTokenizer();
-        tupleIter = new InvertedIndexTokenizingTupleIterator(tokenCmpFactories.length, btree.getFieldCount()
-                - tokenCmpFactories.length, tokenizer);
-    }
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/inmemory/InMemoryInvertedListCursor.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/inmemory/InMemoryInvertedListCursor.java
deleted file mode 100644
index 6af3bd2..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/inmemory/InMemoryInvertedListCursor.java
+++ /dev/null
@@ -1,240 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.inmemory;
-
-import java.io.ByteArrayInputStream;
-import java.io.DataInput;
-import java.io.DataInputStream;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.util.TupleUtils;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTree.BTreeAccessor;
-import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexCursor;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-import edu.uci.ics.hyracks.storage.am.common.tuples.ConcatenatingTupleReference;
-import edu.uci.ics.hyracks.storage.am.common.tuples.PermutingTupleReference;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedListCursor;
-
-public class InMemoryInvertedListCursor implements IInvertedListCursor {
-    private RangePredicate btreePred;
-    private BTreeAccessor btreeAccessor;
-    private IIndexCursor btreeCursor;
-    private boolean cursorNeedsClose = false;
-    private IIndexCursor countingCursor;
-    private MultiComparator tokenFieldsCmp;
-    private MultiComparator btreeCmp;
-    private final PermutingTupleReference resultTuple;
-    private final ConcatenatingTupleReference btreeSearchTuple;
-
-    private final ArrayTupleBuilder tokenTupleBuilder;
-    private final ArrayTupleReference tokenTuple = new ArrayTupleReference();
-
-    private int numElements = -1;
-
-    public InMemoryInvertedListCursor(int invListFieldCount, int tokenFieldCount) {
-        int[] fieldPermutation = new int[invListFieldCount];
-        for (int i = 0; i < invListFieldCount; i++) {
-            fieldPermutation[i] = tokenFieldCount + i;
-        }
-        resultTuple = new PermutingTupleReference(fieldPermutation);
-        // Concatenating the tuple with tokens, and the tuple with inverted-list elements.
-        btreeSearchTuple = new ConcatenatingTupleReference(2);
-        tokenTupleBuilder = new ArrayTupleBuilder(tokenFieldCount);
-    }
-
-    public void prepare(BTreeAccessor btreeAccessor, RangePredicate btreePred, MultiComparator tokenFieldsCmp,
-            MultiComparator btreeCmp) throws HyracksDataException, IndexException {
-        // Avoid object creation if this.btreeAccessor == btreeAccessor.
-        if (this.btreeAccessor != btreeAccessor) {
-            this.btreeAccessor = btreeAccessor;
-            this.btreeCursor = btreeAccessor.createSearchCursor();
-            this.countingCursor = btreeAccessor.createCountingSearchCursor();
-            this.btreePred = btreePred;
-            this.btreePred.setLowKeyComparator(tokenFieldsCmp);
-            this.btreePred.setHighKeyComparator(tokenFieldsCmp);
-            this.tokenFieldsCmp = tokenFieldsCmp;
-            this.btreeCmp = btreeCmp;
-        }
-    }
-
-    @Override
-    public int compareTo(IInvertedListCursor cursor) {
-        return size() - cursor.size();
-    }
-
-    public void reset(ITupleReference tuple) throws HyracksDataException, IndexException {
-        numElements = -1;
-        // Copy the tokens tuple for later use in btree probes.
-        TupleUtils.copyTuple(tokenTupleBuilder, tuple, tuple.getFieldCount());
-        tokenTuple.reset(tokenTupleBuilder.getFieldEndOffsets(), tokenTupleBuilder.getByteArray());
-        btreeSearchTuple.reset();
-        btreeSearchTuple.addTuple(tokenTuple);
-        btreeCursor.reset();
-        countingCursor.reset();
-    }
-
-    @Override
-    public void reset(int startPageId, int endPageId, int startOff, int numElements) {
-        // Do nothing
-    }
-
-    @Override
-    public void pinPages() throws HyracksDataException, IndexException {
-        btreePred.setLowKeyComparator(tokenFieldsCmp);
-        btreePred.setHighKeyComparator(tokenFieldsCmp);
-        btreePred.setLowKey(tokenTuple, true);
-        btreePred.setHighKey(tokenTuple, true);
-        btreeAccessor.search(btreeCursor, btreePred);
-        cursorNeedsClose = true;
-    }
-
-    @Override
-    public void unpinPages() throws HyracksDataException {
-        if (cursorNeedsClose) {
-            btreeCursor.close();
-            cursorNeedsClose = false;
-        }
-    }
-
-    @Override
-    public boolean hasNext() throws HyracksDataException, IndexException {
-        return btreeCursor.hasNext();
-    }
-
-    @Override
-    public void next() throws HyracksDataException {
-        btreeCursor.next();
-    }
-
-    @Override
-    public ITupleReference getTuple() {
-        resultTuple.reset(btreeCursor.getTuple());
-        return resultTuple;
-    }
-
-    @Override
-    public int size() {
-        if (numElements < 0) {
-            btreePred.setLowKeyComparator(tokenFieldsCmp);
-            btreePred.setHighKeyComparator(tokenFieldsCmp);
-            btreePred.setLowKey(tokenTuple, true);
-            btreePred.setHighKey(tokenTuple, true);
-
-            // Perform the count.
-            try {
-                btreeAccessor.search(countingCursor, btreePred);
-                while (countingCursor.hasNext()) {
-                    countingCursor.next();
-                    ITupleReference countTuple = countingCursor.getTuple();
-                    numElements = IntegerSerializerDeserializer.getInt(countTuple.getFieldData(0),
-                            countTuple.getFieldStart(0));
-                }
-            } catch (HyracksDataException e) {
-                e.printStackTrace();
-            } catch (IndexException e) {
-                e.printStackTrace();
-            } finally {
-                try {
-                    countingCursor.close();
-                } catch (HyracksDataException e) {
-                    e.printStackTrace();
-                }
-            }
-        }
-        return numElements;
-    }
-
-    @Override
-    public int getStartPageId() {
-        return 0;
-    }
-
-    @Override
-    public int getEndPageId() {
-        return 0;
-    }
-
-    @Override
-    public int getStartOff() {
-        return 0;
-    }
-
-    @Override
-    public boolean containsKey(ITupleReference searchTuple, MultiComparator invListCmp) throws HyracksDataException,
-            IndexException {
-        // Close cursor if necessary.
-        unpinPages();
-        btreeSearchTuple.addTuple(searchTuple);
-        btreePred.setLowKeyComparator(btreeCmp);
-        btreePred.setHighKeyComparator(btreeCmp);
-        btreePred.setLowKey(btreeSearchTuple, true);
-        btreePred.setHighKey(btreeSearchTuple, true);
-        try {
-            btreeAccessor.search(btreeCursor, btreePred);
-        } catch (TreeIndexException e) {
-            btreeSearchTuple.removeLastTuple();
-            throw new HyracksDataException(e);
-        }
-        boolean containsKey = false;
-        try {
-            containsKey = btreeCursor.hasNext();
-        } finally {
-            btreeCursor.close();
-            btreeCursor.reset();
-            btreeSearchTuple.removeLastTuple();
-        }
-        return containsKey;
-    }
-
-    @SuppressWarnings("rawtypes")
-    @Override
-    public String printInvList(ISerializerDeserializer[] serdes) throws HyracksDataException, IndexException {
-        StringBuilder strBuilder = new StringBuilder();
-        try {
-            while (btreeCursor.hasNext()) {
-                btreeCursor.next();
-                ITupleReference tuple = btreeCursor.getTuple();
-                ByteArrayInputStream inStream = new ByteArrayInputStream(tuple.getFieldData(1), tuple.getFieldStart(1),
-                        tuple.getFieldLength(1));
-                DataInput dataIn = new DataInputStream(inStream);
-                Object o = serdes[0].deserialize(dataIn);
-                strBuilder.append(o.toString() + " ");
-            }
-        } finally {
-            btreeCursor.close();
-            btreeCursor.reset();
-        }
-        try {
-            btreeAccessor.search(btreeCursor, btreePred);
-        } catch (TreeIndexException e) {
-            throw new HyracksDataException(e);
-        }
-        return strBuilder.toString();
-    }
-
-    @SuppressWarnings("rawtypes")
-    @Override
-    public String printCurrentElement(ISerializerDeserializer[] serdes) throws HyracksDataException {
-        return null;
-    }
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/inmemory/PartitionedInMemoryInvertedIndex.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/inmemory/PartitionedInMemoryInvertedIndex.java
deleted file mode 100644
index 7c3f4e4..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/inmemory/PartitionedInMemoryInvertedIndex.java
+++ /dev/null
@@ -1,146 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.inmemory;
-
-import java.util.ArrayList;
-import java.util.concurrent.locks.ReentrantReadWriteLock;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.btree.exceptions.BTreeException;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTree.BTreeAccessor;
-import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManager;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexAccessor;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexOperationContext;
-import edu.uci.ics.hyracks.storage.am.common.api.IModificationOperationCallback;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchOperationCallback;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOperation;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndexSearcher;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedListCursor;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IPartitionedInvertedIndex;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.search.InvertedListPartitions;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.search.PartitionedTOccurrenceSearcher;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.tokenizers.IBinaryTokenizerFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.util.PartitionedInvertedIndexTokenizingTupleIterator;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-
-public class PartitionedInMemoryInvertedIndex extends InMemoryInvertedIndex implements IPartitionedInvertedIndex {
-
-    protected final ReentrantReadWriteLock partitionIndexLock = new ReentrantReadWriteLock(true);
-    protected short minPartitionIndex = Short.MAX_VALUE;
-    protected short maxPartitionIndex = Short.MIN_VALUE;
-
-    public PartitionedInMemoryInvertedIndex(IBufferCache memBufferCache, IFreePageManager memFreePageManager,
-            ITypeTraits[] invListTypeTraits, IBinaryComparatorFactory[] invListCmpFactories,
-            ITypeTraits[] tokenTypeTraits, IBinaryComparatorFactory[] tokenCmpFactories,
-            IBinaryTokenizerFactory tokenizerFactory) throws BTreeException {
-        super(memBufferCache, memFreePageManager, invListTypeTraits, invListCmpFactories, tokenTypeTraits,
-                tokenCmpFactories, tokenizerFactory);
-    }
-
-    @Override
-    public void insert(ITupleReference tuple, BTreeAccessor btreeAccessor, IIndexOperationContext ictx)
-            throws HyracksDataException, IndexException {
-        super.insert(tuple, btreeAccessor, ictx);
-        PartitionedInMemoryInvertedIndexOpContext ctx = (PartitionedInMemoryInvertedIndexOpContext) ictx;
-        PartitionedInvertedIndexTokenizingTupleIterator tupleIter = (PartitionedInvertedIndexTokenizingTupleIterator) ctx.tupleIter;
-        updatePartitionIndexes(tupleIter.getNumTokens());
-    }
-
-    @Override
-    public void clear() throws HyracksDataException {
-        super.clear();
-        minPartitionIndex = Short.MAX_VALUE;
-        maxPartitionIndex = Short.MIN_VALUE;
-    }
-
-    public void updatePartitionIndexes(short numTokens) {
-        partitionIndexLock.writeLock().lock();
-        if (numTokens < minPartitionIndex) {
-            minPartitionIndex = numTokens;
-        }
-        if (numTokens > maxPartitionIndex) {
-            maxPartitionIndex = numTokens;
-        }
-        partitionIndexLock.writeLock().unlock();
-    }
-
-    @Override
-    public IIndexAccessor createAccessor(IModificationOperationCallback modificationCallback,
-            ISearchOperationCallback searchCallback) {
-        return new PartitionedInMemoryInvertedIndexAccessor(this, new PartitionedInMemoryInvertedIndexOpContext(btree,
-                tokenCmpFactories, tokenizerFactory));
-    }
-
-    @Override
-    public boolean openInvertedListPartitionCursors(IInvertedIndexSearcher searcher, IIndexOperationContext ictx,
-            short numTokensLowerBound, short numTokensUpperBound, InvertedListPartitions invListPartitions,
-            ArrayList<IInvertedListCursor> cursorsOrderedByTokens) throws HyracksDataException, IndexException {
-        short minPartitionIndex;
-        short maxPartitionIndex;
-        partitionIndexLock.readLock().lock();
-        minPartitionIndex = this.minPartitionIndex;
-        maxPartitionIndex = this.maxPartitionIndex;
-        partitionIndexLock.readLock().unlock();
-
-        if (minPartitionIndex == Short.MAX_VALUE && maxPartitionIndex == Short.MIN_VALUE) {
-            // Index must be empty.
-            return false;
-        }
-        short partitionStartIndex = minPartitionIndex;
-        short partitionEndIndex = maxPartitionIndex;
-        if (numTokensLowerBound >= 0) {
-            partitionStartIndex = (short) Math.max(minPartitionIndex, numTokensLowerBound);
-        }
-        if (numTokensUpperBound >= 0) {
-            partitionEndIndex = (short) Math.min(maxPartitionIndex, numTokensUpperBound);
-        }
-
-        PartitionedTOccurrenceSearcher partSearcher = (PartitionedTOccurrenceSearcher) searcher;
-        PartitionedInMemoryInvertedIndexOpContext ctx = (PartitionedInMemoryInvertedIndexOpContext) ictx;
-        ctx.setOperation(IndexOperation.SEARCH);
-        // We can pick either of the full low or high search key, since they should be identical here.
-        ITupleReference searchKey = partSearcher.getFullLowSearchKey();
-        ctx.btreePred.setLowKey(searchKey, true);
-        ctx.btreePred.setHighKey(searchKey, true);
-        // Go through all possibly partitions and see if the token matches.
-        // TODO: This procedure could be made more efficient by determining the next partition to search
-        // using the last existing partition and re-searching the BTree with an open interval as low key.
-        for (short i = partitionStartIndex; i <= partitionEndIndex; i++) {
-            partSearcher.setNumTokensBoundsInSearchKeys(i, i);
-            InMemoryInvertedListCursor inMemListCursor = (InMemoryInvertedListCursor) partSearcher
-                    .getCachedInvertedListCursor();
-            inMemListCursor.prepare(ctx.btreeAccessor, ctx.btreePred, ctx.tokenFieldsCmp, ctx.btreeCmp);
-            inMemListCursor.reset(searchKey);
-            invListPartitions.addInvertedListCursor(inMemListCursor, i);
-        }
-        return true;
-    }
-
-    @Override
-    public boolean isEmpty() {
-        partitionIndexLock.readLock().lock();
-        if (minPartitionIndex == Short.MAX_VALUE && maxPartitionIndex == Short.MIN_VALUE) {
-            // Index must be empty.
-            partitionIndexLock.readLock().unlock();
-            return true;
-        }
-        partitionIndexLock.readLock().unlock();
-        return false;
-    }
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/inmemory/PartitionedInMemoryInvertedIndexAccessor.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/inmemory/PartitionedInMemoryInvertedIndexAccessor.java
deleted file mode 100644
index 813961c..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/inmemory/PartitionedInMemoryInvertedIndexAccessor.java
+++ /dev/null
@@ -1,31 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.inmemory;
-
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexOperationContext;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndexSearcher;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.search.PartitionedTOccurrenceSearcher;
-
-public class PartitionedInMemoryInvertedIndexAccessor extends InMemoryInvertedIndexAccessor {
-
-    public PartitionedInMemoryInvertedIndexAccessor(InMemoryInvertedIndex index, IIndexOperationContext opCtx) {
-        super(index, opCtx);
-    }
-
-    protected IInvertedIndexSearcher createSearcher() {
-        return new PartitionedTOccurrenceSearcher(hyracksCtx, index);
-    }
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/inmemory/PartitionedInMemoryInvertedIndexOpContext.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/inmemory/PartitionedInMemoryInvertedIndexOpContext.java
deleted file mode 100644
index f0e5046..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/inmemory/PartitionedInMemoryInvertedIndexOpContext.java
+++ /dev/null
@@ -1,36 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.inmemory;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.tokenizers.IBinaryTokenizer;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.tokenizers.IBinaryTokenizerFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.util.PartitionedInvertedIndexTokenizingTupleIterator;
-
-public class PartitionedInMemoryInvertedIndexOpContext extends InMemoryInvertedIndexOpContext {
-
-    public PartitionedInMemoryInvertedIndexOpContext(BTree btree, IBinaryComparatorFactory[] tokenCmpFactories,
-            IBinaryTokenizerFactory tokenizerFactory) {
-        super(btree, tokenCmpFactories, tokenizerFactory);
-    }
-
-    protected void setTokenizingTupleIterator() {
-        IBinaryTokenizer tokenizer = tokenizerFactory.createTokenizer();
-        tupleIter = new PartitionedInvertedIndexTokenizingTupleIterator(tokenCmpFactories.length, btree.getFieldCount()
-                - tokenCmpFactories.length, tokenizer);
-    }
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/ondisk/FixedSizeElementInvertedListBuilder.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/ondisk/FixedSizeElementInvertedListBuilder.java
deleted file mode 100644
index fd12792..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/ondisk/FixedSizeElementInvertedListBuilder.java
+++ /dev/null
@@ -1,80 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.ondisk;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedListBuilder;
-
-public class FixedSizeElementInvertedListBuilder implements IInvertedListBuilder {
-    private final int listElementSize;
-    private int listSize = 0;
-
-    private byte[] targetBuf;
-    private int pos;
-
-    public FixedSizeElementInvertedListBuilder(ITypeTraits[] invListFields) {
-        int tmp = 0;
-        for (int i = 0; i < invListFields.length; i++) {
-            tmp += invListFields[i].getFixedLength();
-        }
-        listElementSize = tmp;
-    }
-
-    @Override
-    public boolean startNewList(ITupleReference tuple, int tokenField) {
-        if (pos + listElementSize > targetBuf.length) {
-            return false;
-        } else {
-            listSize = 0;
-            return true;
-        }
-    }
-
-    @Override
-    public boolean appendElement(ITupleReference tuple, int numTokenFields, int numElementFields) {
-        if (pos + listElementSize > targetBuf.length) {
-            return false;
-        }
-
-        for (int i = 0; i < numElementFields; i++) {
-            int field = numTokenFields + i;
-            System.arraycopy(tuple.getFieldData(field), tuple.getFieldStart(field), targetBuf, pos,
-                    tuple.getFieldLength(field));
-        }
-
-        listSize++;
-        pos += listElementSize;
-
-        return true;
-    }
-
-    @Override
-    public void setTargetBuffer(byte[] targetBuf, int startPos) {
-        this.targetBuf = targetBuf;
-        this.pos = startPos;
-    }
-
-    @Override
-    public int getListSize() {
-        return listSize;
-    }
-
-    @Override
-    public int getPos() {
-        return pos;
-    }
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/ondisk/FixedSizeElementInvertedListBuilderFactory.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/ondisk/FixedSizeElementInvertedListBuilderFactory.java
deleted file mode 100644
index ca2d3b6..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/ondisk/FixedSizeElementInvertedListBuilderFactory.java
+++ /dev/null
@@ -1,34 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.ondisk;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedListBuilder;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedListBuilderFactory;
-
-public class FixedSizeElementInvertedListBuilderFactory implements IInvertedListBuilderFactory {
-
-    private final ITypeTraits[] invListFields;
-
-    public FixedSizeElementInvertedListBuilderFactory(ITypeTraits[] invListFields) {
-        this.invListFields = invListFields;
-    }
-
-    @Override
-    public IInvertedListBuilder create() {
-        return new FixedSizeElementInvertedListBuilder(invListFields);
-    }
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/ondisk/FixedSizeElementInvertedListCursor.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/ondisk/FixedSizeElementInvertedListCursor.java
deleted file mode 100644
index f55a700..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/ondisk/FixedSizeElementInvertedListCursor.java
+++ /dev/null
@@ -1,295 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.ondisk;
-
-import java.io.ByteArrayInputStream;
-import java.io.DataInput;
-import java.io.DataInputStream;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedListCursor;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
-import edu.uci.ics.hyracks.storage.common.file.BufferedFileHandle;
-
-public class FixedSizeElementInvertedListCursor implements IInvertedListCursor {
-
-    private final IBufferCache bufferCache;
-    private final int fileId;
-    private final int elementSize;
-    private int currentElementIx;
-    private int currentOff;
-    private int currentPageIx;
-
-    private int startPageId;
-    private int endPageId;
-    private int startOff;
-    private int numElements;
-
-    private final FixedSizeTupleReference tuple;
-    private ICachedPage[] pages = new ICachedPage[10];
-    private int[] elementIndexes = new int[10];
-    
-    private boolean pinned = false;
-
-    public FixedSizeElementInvertedListCursor(IBufferCache bufferCache, int fileId, ITypeTraits[] invListFields) {
-        this.bufferCache = bufferCache;
-        this.fileId = fileId;
-        this.currentElementIx = 0;
-        this.currentPageIx = 0;
-
-        int tmp = 0;
-        for (int i = 0; i < invListFields.length; i++) {
-            tmp += invListFields[i].getFixedLength();
-        }
-        elementSize = tmp;
-        this.currentOff = -elementSize;
-        this.tuple = new FixedSizeTupleReference(invListFields);
-    }
-
-    @Override
-    public boolean hasNext() {
-        if (currentElementIx < numElements)
-            return true;
-        else
-            return false;
-    }
-
-    @Override
-    public void next() {
-        if (currentOff + 2 * elementSize > bufferCache.getPageSize()) {
-            currentPageIx++;
-            currentOff = 0;
-        } else {
-            currentOff += elementSize;
-        }
-        currentElementIx++;
-        tuple.reset(pages[currentPageIx].getBuffer().array(), currentOff);
-    }
-
-    @Override
-    public void pinPages() throws HyracksDataException {
-        if (pinned) {
-            return;
-        }
-        int pix = 0;
-        for (int i = startPageId; i <= endPageId; i++) {
-            pages[pix] = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, i), false);
-            pages[pix].acquireReadLatch();
-            pix++;
-        }
-        pinned = true;
-    }
-
-    @Override
-    public void unpinPages() throws HyracksDataException {
-        int numPages = endPageId - startPageId + 1;
-        for (int i = 0; i < numPages; i++) {
-            pages[i].releaseReadLatch();
-            bufferCache.unpin(pages[i]);
-        }
-        pinned = false;
-    }
-
-    private void positionCursor(int elementIx) {
-        int numPages = endPageId - startPageId + 1;
-
-        currentPageIx = binarySearch(elementIndexes, 0, numPages, elementIx);
-        if (currentPageIx < 0) {
-            throw new IndexOutOfBoundsException("Requested index: " + elementIx + " from array with numElements: "
-                    + numElements);
-        }
-
-        if (currentPageIx == 0) {
-            currentOff = startOff + elementIx * elementSize;
-        } else {
-            int relativeElementIx = elementIx - elementIndexes[currentPageIx - 1] - 1;
-            currentOff = relativeElementIx * elementSize;
-        }
-
-        currentElementIx = elementIx;
-        tuple.reset(pages[currentPageIx].getBuffer().array(), currentOff);
-    }
-
-    @Override
-    public boolean containsKey(ITupleReference searchTuple, MultiComparator invListCmp) {
-        int mid;
-        int begin = 0;
-        int end = numElements - 1;
-
-        while (begin <= end) {
-            mid = (begin + end) / 2;
-            positionCursor(mid);
-            int cmp = invListCmp.compare(searchTuple, tuple);
-            if (cmp < 0) {
-                end = mid - 1;
-            } else if (cmp > 0) {
-                begin = mid + 1;
-            } else {
-                return true;
-            }
-        }
-
-        return false;
-    }
-
-    @Override
-    public void reset(int startPageId, int endPageId, int startOff, int numElements) {
-        this.startPageId = startPageId;
-        this.endPageId = endPageId;
-        this.startOff = startOff;
-        this.numElements = numElements;
-        this.currentElementIx = 0;
-        this.currentPageIx = 0;
-        this.currentOff = startOff - elementSize;
-
-        int numPages = endPageId - startPageId + 1;
-        if (numPages > pages.length) {
-            pages = new ICachedPage[endPageId - startPageId + 1];
-            elementIndexes = new int[endPageId - startPageId + 1];
-        }
-
-        // fill elementIndexes
-        // first page
-        int cumulElements = (bufferCache.getPageSize() - startOff) / elementSize;
-        elementIndexes[0] = cumulElements - 1;
-
-        // middle, full pages
-        for (int i = 1; i < numPages - 1; i++) {
-            elementIndexes[i] = elementIndexes[i - 1] + (bufferCache.getPageSize() / elementSize);
-        }
-
-        // last page
-        elementIndexes[numPages - 1] = numElements - 1;
-    }
-
-    @SuppressWarnings("rawtypes")
-    @Override
-    public String printInvList(ISerializerDeserializer[] serdes) throws HyracksDataException {
-        int oldCurrentOff = currentOff;
-        int oldCurrentPageId = currentPageIx;
-        int oldCurrentElementIx = currentElementIx;
-
-        currentOff = startOff - elementSize;
-        currentPageIx = 0;
-        currentElementIx = 0;
-
-        StringBuilder strBuilder = new StringBuilder();
-
-        while (hasNext()) {
-            next();
-            for (int i = 0; i < tuple.getFieldCount(); i++) {
-                ByteArrayInputStream inStream = new ByteArrayInputStream(tuple.getFieldData(i), tuple.getFieldStart(i),
-                        tuple.getFieldLength(i));
-                DataInput dataIn = new DataInputStream(inStream);
-                Object o = serdes[i].deserialize(dataIn);
-                strBuilder.append(o.toString());
-                if (i + 1 < tuple.getFieldCount())
-                    strBuilder.append(",");
-            }
-            strBuilder.append(" ");
-        }
-
-        // reset previous state
-        currentOff = oldCurrentOff;
-        currentPageIx = oldCurrentPageId;
-        currentElementIx = oldCurrentElementIx;
-
-        return strBuilder.toString();
-    }
-
-    @SuppressWarnings("rawtypes")
-    public String printCurrentElement(ISerializerDeserializer[] serdes) throws HyracksDataException {
-        StringBuilder strBuilder = new StringBuilder();
-        for (int i = 0; i < tuple.getFieldCount(); i++) {
-            ByteArrayInputStream inStream = new ByteArrayInputStream(tuple.getFieldData(i), tuple.getFieldStart(i),
-                    tuple.getFieldLength(i));
-            DataInput dataIn = new DataInputStream(inStream);
-            Object o = serdes[i].deserialize(dataIn);
-            strBuilder.append(o.toString());
-            if (i + 1 < tuple.getFieldCount())
-                strBuilder.append(",");
-        }
-        return strBuilder.toString();
-    }
-
-    private int binarySearch(int[] arr, int arrStart, int arrLength, int key) {
-        int mid;
-        int begin = arrStart;
-        int end = arrStart + arrLength - 1;
-
-        while (begin <= end) {
-            mid = (begin + end) / 2;
-            int cmp = (key - arr[mid]);
-            if (cmp < 0) {
-                end = mid - 1;
-            } else if (cmp > 0) {
-                begin = mid + 1;
-            } else {
-                return mid;
-            }
-        }
-
-        if (begin > arr.length - 1)
-            return -1;
-        if (key < arr[begin])
-            return begin;
-        else
-            return -1;
-    }
-
-    @Override
-    public int compareTo(IInvertedListCursor invListCursor) {
-        return numElements - invListCursor.size();
-    }
-
-    @Override
-    public int getEndPageId() {
-        return endPageId;
-    }
-
-    @Override
-    public int size() {
-        return numElements;
-    }
-
-    @Override
-    public int getStartOff() {
-        return startOff;
-    }
-
-    @Override
-    public int getStartPageId() {
-        return startPageId;
-    }
-
-    public int getOffset() {
-        return currentOff;
-    }
-
-    public ICachedPage getPage() {
-        return pages[currentPageIx];
-    }
-
-    @Override
-    public ITupleReference getTuple() {
-        return tuple;
-    }
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/ondisk/FixedSizeFrameTupleAccessor.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/ondisk/FixedSizeFrameTupleAccessor.java
deleted file mode 100644
index d00c6d3..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/ondisk/FixedSizeFrameTupleAccessor.java
+++ /dev/null
@@ -1,98 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.ondisk;
-
-import java.nio.ByteBuffer;
-
-import edu.uci.ics.hyracks.api.comm.FrameHelper;
-import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-
-public class FixedSizeFrameTupleAccessor implements IFrameTupleAccessor {
-
-    private final int frameSize;
-    private ByteBuffer buffer;
-
-    private final ITypeTraits[] fields;
-    private final int[] fieldStartOffsets;
-    private final int tupleSize;
-
-    public FixedSizeFrameTupleAccessor(int frameSize, ITypeTraits[] fields) {
-        this.frameSize = frameSize;
-        this.fields = fields;
-        this.fieldStartOffsets = new int[fields.length];
-        this.fieldStartOffsets[0] = 0;
-        for (int i = 1; i < fields.length; i++) {
-            fieldStartOffsets[i] = fieldStartOffsets[i - 1] + fields[i - 1].getFixedLength();
-        }
-
-        int tmp = 0;
-        for (int i = 0; i < fields.length; i++) {
-            tmp += fields[i].getFixedLength();
-        }
-        tupleSize = tmp;
-    }
-
-    @Override
-    public ByteBuffer getBuffer() {
-        return buffer;
-    }
-
-    @Override
-    public int getFieldCount() {
-        return fields.length;
-    }
-
-    @Override
-    public int getFieldEndOffset(int tupleIndex, int fIdx) {
-        return getTupleStartOffset(tupleIndex) + fieldStartOffsets[fIdx] + fields[fIdx].getFixedLength();
-    }
-
-    @Override
-    public int getFieldLength(int tupleIndex, int fIdx) {
-        return fields[fIdx].getFixedLength();
-    }
-
-    @Override
-    public int getFieldSlotsLength() {
-        return 0;
-    }
-
-    @Override
-    public int getFieldStartOffset(int tupleIndex, int fIdx) {
-        return tupleIndex * tupleSize + fieldStartOffsets[fIdx];
-    }
-
-    @Override
-    public int getTupleCount() {
-        return buffer.getInt(FrameHelper.getTupleCountOffset(frameSize));
-    }
-
-    @Override
-    public int getTupleEndOffset(int tupleIndex) {
-        return getFieldEndOffset(tupleIndex, fields.length - 1);
-    }
-
-    @Override
-    public int getTupleStartOffset(int tupleIndex) {
-        return tupleIndex * tupleSize;
-    }
-
-    @Override
-    public void reset(ByteBuffer buffer) {
-        this.buffer = buffer;
-    }
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/ondisk/FixedSizeFrameTupleAppender.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/ondisk/FixedSizeFrameTupleAppender.java
deleted file mode 100644
index 2a8bf8d..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/ondisk/FixedSizeFrameTupleAppender.java
+++ /dev/null
@@ -1,127 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.ondisk;
-
-import java.nio.ByteBuffer;
-
-import edu.uci.ics.hyracks.api.comm.FrameHelper;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-
-public class FixedSizeFrameTupleAppender {
-
-    private static final int TUPLE_COUNT_SIZE = 4;
-    private final int frameSize;
-    private final int tupleSize;
-    private ByteBuffer buffer;
-    private int tupleCount;
-    private int tupleDataEndOffset;
-
-    public FixedSizeFrameTupleAppender(int frameSize, ITypeTraits[] fields) {
-        this.frameSize = frameSize;
-        int tmp = 0;
-        for (int i = 0; i < fields.length; i++) {
-            tmp += fields[i].getFixedLength();
-        }
-        tupleSize = tmp;
-    }
-
-    public void reset(ByteBuffer buffer, boolean clear) {
-        this.buffer = buffer;
-        if (clear) {
-            buffer.putInt(FrameHelper.getTupleCountOffset(frameSize), 0);
-            tupleCount = 0;
-            tupleDataEndOffset = 0;
-        }
-    }
-
-    public boolean append(byte[] bytes, int offset) {
-        if (tupleDataEndOffset + tupleSize + TUPLE_COUNT_SIZE <= frameSize) {
-            System.arraycopy(bytes, offset, buffer.array(), tupleDataEndOffset, tupleSize);
-            tupleDataEndOffset += tupleSize;
-            tupleCount++;
-            return true;
-        }
-        return false;
-    }
-
-    public boolean append(byte[] bytes, int offset, int length) {
-        if (tupleDataEndOffset + length + TUPLE_COUNT_SIZE <= frameSize) {
-            System.arraycopy(bytes, offset, buffer.array(), tupleDataEndOffset, length);
-            tupleDataEndOffset += length;
-            return true;
-        }
-        return false;
-    }
-
-    public boolean append(int fieldValue) {
-        if (tupleDataEndOffset + 4 + TUPLE_COUNT_SIZE <= frameSize) {
-            buffer.putInt(tupleDataEndOffset, fieldValue);
-            tupleDataEndOffset += 4;
-            tupleCount++;
-            return true;
-        }
-        return false;
-    }
-
-    public boolean append(long fieldValue) {
-        if (tupleDataEndOffset + 8 + TUPLE_COUNT_SIZE <= frameSize) {
-            buffer.putLong(tupleDataEndOffset, fieldValue);
-            tupleDataEndOffset += 8;
-            tupleCount++;
-            return true;
-        }
-        return false;
-    }
-
-    public boolean append(char fieldValue) {
-        if (tupleDataEndOffset + 2 + TUPLE_COUNT_SIZE <= frameSize) {
-            buffer.putLong(tupleDataEndOffset, fieldValue);
-            tupleDataEndOffset += 2;
-            tupleCount++;
-            return true;
-        }
-        return false;
-    }
-
-    public boolean append(byte fieldValue) {
-        if (tupleDataEndOffset + 1 + TUPLE_COUNT_SIZE <= frameSize) {
-            buffer.put(tupleDataEndOffset, fieldValue);
-            tupleDataEndOffset += 1;
-            tupleCount++;
-            return true;
-        }
-        return false;
-    }
-
-    // returns true if an entire tuple fits
-    // returns false otherwise
-    public boolean hasSpace() {
-        return tupleDataEndOffset + tupleSize + TUPLE_COUNT_SIZE <= frameSize;
-    }
-
-    public void incrementTupleCount(int count) {
-        buffer.putInt(FrameHelper.getTupleCountOffset(frameSize),
-                buffer.getInt(FrameHelper.getTupleCountOffset(frameSize)) + count);
-    }
-
-    public int getTupleCount() {
-        return tupleCount;
-    }
-
-    public ByteBuffer getBuffer() {
-        return buffer;
-    }
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/ondisk/FixedSizeTupleReference.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/ondisk/FixedSizeTupleReference.java
deleted file mode 100644
index 97f6937..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/ondisk/FixedSizeTupleReference.java
+++ /dev/null
@@ -1,61 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.ondisk;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-
-public class FixedSizeTupleReference implements ITupleReference {
-
-    private final ITypeTraits[] typeTraits;
-    private final int[] fieldStartOffsets;
-    private byte[] data;
-    private int startOff;
-
-    public FixedSizeTupleReference(ITypeTraits[] typeTraits) {
-        this.typeTraits = typeTraits;
-        this.fieldStartOffsets = new int[typeTraits.length];
-        this.fieldStartOffsets[0] = 0;
-        for (int i = 1; i < typeTraits.length; i++) {
-            fieldStartOffsets[i] = fieldStartOffsets[i - 1] + typeTraits[i - 1].getFixedLength();
-        }
-    }
-
-    public void reset(byte[] data, int startOff) {
-        this.data = data;
-        this.startOff = startOff;
-    }
-
-    @Override
-    public int getFieldCount() {
-        return typeTraits.length;
-    }
-
-    @Override
-    public byte[] getFieldData(int fIdx) {
-        return data;
-    }
-
-    @Override
-    public int getFieldLength(int fIdx) {
-        return typeTraits[fIdx].getFixedLength();
-    }
-
-    @Override
-    public int getFieldStart(int fIdx) {
-        return startOff + fieldStartOffsets[fIdx];
-    }
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/ondisk/OnDiskInvertedIndex.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/ondisk/OnDiskInvertedIndex.java
deleted file mode 100644
index afeaf90..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/ondisk/OnDiskInvertedIndex.java
+++ /dev/null
@@ -1,663 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.ondisk;
-
-import java.io.DataOutput;
-import java.io.IOException;
-import java.nio.ByteBuffer;
-
-import edu.uci.ics.hyracks.api.context.IHyracksCommonContext;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.api.io.IIOManager;
-import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.util.TupleUtils;
-import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
-import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
-import edu.uci.ics.hyracks.storage.am.btree.util.BTreeUtils;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexAccessor;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexBulkLoader;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexCursor;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexOperationContext;
-import edu.uci.ics.hyracks.storage.am.common.api.IModificationOperationCallback;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchOperationCallback;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchPredicate;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
-import edu.uci.ics.hyracks.storage.am.common.api.UnsortedInputException;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallback;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-import edu.uci.ics.hyracks.storage.am.common.tuples.PermutingTupleReference;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndex;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndexAccessor;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndexSearcher;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedListBuilder;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedListCursor;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.exceptions.InvertedIndexException;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.search.InvertedIndexSearchPredicate;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.search.TOccurrenceSearcher;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
-import edu.uci.ics.hyracks.storage.common.file.BufferedFileHandle;
-import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
-
-/**
- * An inverted index consists of two files: 1. a file storing (paginated)
- * inverted lists 2. a BTree-file mapping from tokens to inverted lists.
- * Implemented features: bulk loading and searching (based on T-Occurrence) Not
- * implemented features: updates (insert/update/delete) Limitations: a query
- * cannot exceed the size of a Hyracks frame.
- */
-public class OnDiskInvertedIndex implements IInvertedIndex {
-    protected final IHyracksCommonContext ctx = new DefaultHyracksCommonContext();
-
-    // Schema of BTree tuples, set in constructor.    
-    protected final int invListStartPageIdField;
-    protected final int invListEndPageIdField;
-    protected final int invListStartOffField;
-    protected final int invListNumElementsField;
-
-    // Type traits to be appended to the token type trait which finally form the BTree field type traits.
-    protected static final ITypeTraits[] btreeValueTypeTraits = new ITypeTraits[4];
-    static {
-        // startPageId
-        btreeValueTypeTraits[0] = IntegerPointable.TYPE_TRAITS;
-        // endPageId
-        btreeValueTypeTraits[1] = IntegerPointable.TYPE_TRAITS;
-        // startOff
-        btreeValueTypeTraits[2] = IntegerPointable.TYPE_TRAITS;
-        // numElements
-        btreeValueTypeTraits[3] = IntegerPointable.TYPE_TRAITS;
-    }
-
-    protected BTree btree;
-    protected int rootPageId = 0;
-    protected IBufferCache bufferCache;
-    protected IFileMapProvider fileMapProvider;
-    protected int fileId = -1;
-    protected final ITypeTraits[] invListTypeTraits;
-    protected final IBinaryComparatorFactory[] invListCmpFactories;
-    protected final ITypeTraits[] tokenTypeTraits;
-    protected final IBinaryComparatorFactory[] tokenCmpFactories;
-    protected final IInvertedListBuilder invListBuilder;
-    protected final int numTokenFields;
-    protected final int numInvListKeys;
-    protected final FileReference invListsFile;
-    // Last page id of inverted-lists file (inclusive). Set during bulk load.
-    protected int invListsMaxPageId = -1;
-    protected boolean isOpen = false;
-
-    public OnDiskInvertedIndex(IBufferCache bufferCache, IFileMapProvider fileMapProvider,
-            IInvertedListBuilder invListBuilder, ITypeTraits[] invListTypeTraits,
-            IBinaryComparatorFactory[] invListCmpFactories, ITypeTraits[] tokenTypeTraits,
-            IBinaryComparatorFactory[] tokenCmpFactories, FileReference btreeFile, FileReference invListsFile)
-            throws IndexException {
-        this.bufferCache = bufferCache;
-        this.fileMapProvider = fileMapProvider;
-        this.invListBuilder = invListBuilder;
-        this.invListTypeTraits = invListTypeTraits;
-        this.invListCmpFactories = invListCmpFactories;
-        this.tokenTypeTraits = tokenTypeTraits;
-        this.tokenCmpFactories = tokenCmpFactories;
-        this.btree = BTreeUtils.createBTree(bufferCache, fileMapProvider, getBTreeTypeTraits(tokenTypeTraits),
-                tokenCmpFactories, BTreeLeafFrameType.REGULAR_NSM, btreeFile);
-        this.numTokenFields = btree.getComparatorFactories().length;
-        this.numInvListKeys = invListCmpFactories.length;
-        this.invListsFile = invListsFile;
-        this.invListStartPageIdField = numTokenFields;
-        this.invListEndPageIdField = numTokenFields + 1;
-        this.invListStartOffField = numTokenFields + 2;
-        this.invListNumElementsField = numTokenFields + 3;
-    }
-
-    @Override
-    public synchronized void create() throws HyracksDataException {
-        if (isOpen) {
-            throw new HyracksDataException("Failed to create since index is already open.");
-        }
-        btree.create();
-
-        boolean fileIsMapped = false;
-        synchronized (fileMapProvider) {
-            fileIsMapped = fileMapProvider.isMapped(invListsFile);
-            if (!fileIsMapped) {
-                bufferCache.createFile(invListsFile);
-            }
-            fileId = fileMapProvider.lookupFileId(invListsFile);
-            try {
-                // Also creates the file if it doesn't exist yet.
-                bufferCache.openFile(fileId);
-            } catch (HyracksDataException e) {
-                // Revert state of buffer cache since file failed to open.
-                if (!fileIsMapped) {
-                    bufferCache.deleteFile(fileId, false);
-                }
-                throw e;
-            }
-        }
-        bufferCache.closeFile(fileId);
-    }
-
-    @Override
-    public synchronized void activate() throws HyracksDataException {
-        if (isOpen) {
-            return;
-        }
-
-        btree.activate();
-        boolean fileIsMapped = false;
-        synchronized (fileMapProvider) {
-            fileIsMapped = fileMapProvider.isMapped(invListsFile);
-            if (!fileIsMapped) {
-                bufferCache.createFile(invListsFile);
-            }
-            fileId = fileMapProvider.lookupFileId(invListsFile);
-            try {
-                // Also creates the file if it doesn't exist yet.
-                bufferCache.openFile(fileId);
-            } catch (HyracksDataException e) {
-                // Revert state of buffer cache since file failed to open.
-                if (!fileIsMapped) {
-                    bufferCache.deleteFile(fileId, false);
-                }
-                throw e;
-            }
-        }
-
-        isOpen = true;
-    }
-
-    @Override
-    public synchronized void deactivate() throws HyracksDataException {
-        if (!isOpen) {
-            return;
-        }
-
-        btree.deactivate();
-        bufferCache.closeFile(fileId);
-
-        isOpen = false;
-    }
-
-    @Override
-    public synchronized void destroy() throws HyracksDataException {
-        if (isOpen) {
-            throw new HyracksDataException("Failed to destroy since index is already open.");
-        }
-
-        btree.destroy();
-        invListsFile.delete();
-        if (fileId == -1) {
-            return;
-        }
-
-        bufferCache.deleteFile(fileId, false);
-        fileId = -1;
-    }
-
-    @Override
-    public synchronized void clear() throws HyracksDataException {
-        if (!isOpen) {
-            throw new HyracksDataException("Failed to clear since index is not open.");
-        }
-        btree.clear();
-        bufferCache.closeFile(fileId);
-        bufferCache.deleteFile(fileId, false);
-        invListsFile.getFile().delete();
-
-        boolean fileIsMapped = false;
-        synchronized (fileMapProvider) {
-            fileIsMapped = fileMapProvider.isMapped(invListsFile);
-            if (!fileIsMapped) {
-                bufferCache.createFile(invListsFile);
-            }
-            fileId = fileMapProvider.lookupFileId(invListsFile);
-            try {
-                // Also creates the file if it doesn't exist yet.
-                bufferCache.openFile(fileId);
-            } catch (HyracksDataException e) {
-                // Revert state of buffer cache since file failed to open.
-                if (!fileIsMapped) {
-                    bufferCache.deleteFile(fileId, false);
-                }
-                throw e;
-            }
-        }
-    }
-
-    @Override
-    public IInvertedListCursor createInvertedListCursor() {
-        return new FixedSizeElementInvertedListCursor(bufferCache, fileId, invListTypeTraits);
-    }
-
-    @Override
-    public void openInvertedListCursor(IInvertedListCursor listCursor, ITupleReference searchKey,
-            IIndexOperationContext ictx) throws HyracksDataException, IndexException {
-        OnDiskInvertedIndexOpContext ctx = (OnDiskInvertedIndexOpContext) ictx;
-        ctx.btreePred.setLowKeyComparator(ctx.searchCmp);
-        ctx.btreePred.setHighKeyComparator(ctx.searchCmp);
-        ctx.btreePred.setLowKey(searchKey, true);
-        ctx.btreePred.setHighKey(searchKey, true);
-        ctx.btreeAccessor.search(ctx.btreeCursor, ctx.btreePred);
-        try {
-            if (ctx.btreeCursor.hasNext()) {
-                ctx.btreeCursor.next();
-                resetInvertedListCursor(ctx.btreeCursor.getTuple(), listCursor);
-            } else {
-                listCursor.reset(0, 0, 0, 0);
-            }
-        } finally {
-            ctx.btreeCursor.close();
-            ctx.btreeCursor.reset();
-        }
-    }
-
-    public void resetInvertedListCursor(ITupleReference btreeTuple, IInvertedListCursor listCursor) {
-        int startPageId = IntegerSerializerDeserializer.getInt(btreeTuple.getFieldData(invListStartPageIdField),
-                btreeTuple.getFieldStart(invListStartPageIdField));
-        int endPageId = IntegerSerializerDeserializer.getInt(btreeTuple.getFieldData(invListEndPageIdField),
-                btreeTuple.getFieldStart(invListEndPageIdField));
-        int startOff = IntegerSerializerDeserializer.getInt(btreeTuple.getFieldData(invListStartOffField),
-                btreeTuple.getFieldStart(invListStartOffField));
-        int numElements = IntegerSerializerDeserializer.getInt(btreeTuple.getFieldData(invListNumElementsField),
-                btreeTuple.getFieldStart(invListNumElementsField));
-        listCursor.reset(startPageId, endPageId, startOff, numElements);
-    }
-
-    public final class OnDiskInvertedIndexBulkLoader implements IIndexBulkLoader {
-        private final ArrayTupleBuilder btreeTupleBuilder;
-        private final ArrayTupleReference btreeTupleReference;
-        private final IIndexBulkLoader btreeBulkloader;
-
-        private int currentInvListStartPageId;
-        private int currentInvListStartOffset;
-        private final ArrayTupleBuilder lastTupleBuilder;
-        private final ArrayTupleReference lastTuple;
-
-        private int currentPageId;
-        private ICachedPage currentPage;
-        private final MultiComparator tokenCmp;
-        private final MultiComparator invListCmp;
-
-        private final boolean verifyInput;
-        private final MultiComparator allCmp;
-
-        public OnDiskInvertedIndexBulkLoader(float btreeFillFactor, boolean verifyInput, long numElementsHint,
-                int startPageId, int fileId) throws IndexException, HyracksDataException {
-            this.verifyInput = verifyInput;
-            this.tokenCmp = MultiComparator.create(btree.getComparatorFactories());
-            this.invListCmp = MultiComparator.create(invListCmpFactories);
-            if (verifyInput) {
-                allCmp = MultiComparator.create(btree.getComparatorFactories(), invListCmpFactories);
-            } else {
-                allCmp = null;
-            }
-            this.btreeTupleBuilder = new ArrayTupleBuilder(btree.getFieldCount());
-            this.btreeTupleReference = new ArrayTupleReference();
-            this.lastTupleBuilder = new ArrayTupleBuilder(numTokenFields + numInvListKeys);
-            this.lastTuple = new ArrayTupleReference();
-            this.btreeBulkloader = btree.createBulkLoader(btreeFillFactor, verifyInput, numElementsHint);
-            currentPageId = startPageId;
-            currentPage = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, currentPageId), true);
-            currentPage.acquireWriteLatch();
-            invListBuilder.setTargetBuffer(currentPage.getBuffer().array(), 0);
-        }
-
-        public void pinNextPage() throws HyracksDataException {
-            currentPage.releaseWriteLatch();
-            bufferCache.unpin(currentPage);
-            currentPageId++;
-            currentPage = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, currentPageId), true);
-            currentPage.acquireWriteLatch();
-        }
-
-        private void createAndInsertBTreeTuple() throws IndexException, HyracksDataException {
-            // Build tuple.        
-            btreeTupleBuilder.reset();
-            DataOutput output = btreeTupleBuilder.getDataOutput();
-            // Add key fields.
-            for (int i = 0; i < numTokenFields; i++) {
-                btreeTupleBuilder.addField(lastTuple.getFieldData(i), lastTuple.getFieldStart(i),
-                        lastTuple.getFieldLength(i));
-            }
-            // Add inverted-list 'pointer' value fields.
-            try {
-                output.writeInt(currentInvListStartPageId);
-                btreeTupleBuilder.addFieldEndOffset();
-                output.writeInt(currentPageId);
-                btreeTupleBuilder.addFieldEndOffset();
-                output.writeInt(currentInvListStartOffset);
-                btreeTupleBuilder.addFieldEndOffset();
-                output.writeInt(invListBuilder.getListSize());
-                btreeTupleBuilder.addFieldEndOffset();
-            } catch (IOException e) {
-                throw new HyracksDataException(e);
-            }
-            // Reset tuple reference and add it into the BTree load.
-            btreeTupleReference.reset(btreeTupleBuilder.getFieldEndOffsets(), btreeTupleBuilder.getByteArray());
-            btreeBulkloader.add(btreeTupleReference);
-        }
-
-        /**
-         * Assumptions:
-         * The first btree.getMultiComparator().getKeyFieldCount() fields in tuple
-         * are btree keys (e.g., a string token).
-         * The next invListCmp.getKeyFieldCount() fields in tuple are keys of the
-         * inverted list (e.g., primary key).
-         * Key fields of inverted list are fixed size.
-         */
-        @Override
-        public void add(ITupleReference tuple) throws IndexException, HyracksDataException {
-            boolean firstElement = lastTupleBuilder.getSize() == 0;
-            boolean startNewList = firstElement;
-            if (!firstElement) {
-                // If the current and the last token don't match, we start a new list.
-                lastTuple.reset(lastTupleBuilder.getFieldEndOffsets(), lastTupleBuilder.getByteArray());
-                startNewList = tokenCmp.compare(tuple, lastTuple) != 0;
-            }
-            if (startNewList) {
-                if (!firstElement) {
-                    // Create entry in btree for last inverted list.
-                    createAndInsertBTreeTuple();
-                }
-                if (!invListBuilder.startNewList(tuple, numTokenFields)) {
-                    pinNextPage();
-                    invListBuilder.setTargetBuffer(currentPage.getBuffer().array(), 0);
-                    if (!invListBuilder.startNewList(tuple, numTokenFields)) {
-                        throw new IllegalStateException("Failed to create first inverted list.");
-                    }
-                }
-                currentInvListStartPageId = currentPageId;
-                currentInvListStartOffset = invListBuilder.getPos();
-            } else {
-                if (invListCmp.compare(tuple, lastTuple, numTokenFields) == 0) {
-                    // Duplicate inverted-list element.
-                    return;
-                }
-            }
-
-            // Append to current inverted list.
-            if (!invListBuilder.appendElement(tuple, numTokenFields, numInvListKeys)) {
-                pinNextPage();
-                invListBuilder.setTargetBuffer(currentPage.getBuffer().array(), 0);
-                if (!invListBuilder.appendElement(tuple, numTokenFields, numInvListKeys)) {
-                    throw new IllegalStateException(
-                            "Failed to append element to inverted list after switching to a new page.");
-                }
-            }
-
-            if (verifyInput && lastTupleBuilder.getSize() != 0) {
-                if (allCmp.compare(tuple, lastTuple) <= 0) {
-                    throw new UnsortedInputException(
-                            "Input stream given to OnDiskInvertedIndex bulk load is not sorted.");
-                }
-            }
-
-            // Remember last tuple by creating a copy.
-            // TODO: This portion can be optimized by only copying the token when it changes, and using the last appended inverted-list element as a reference.
-            lastTupleBuilder.reset();
-            for (int i = 0; i < tuple.getFieldCount(); i++) {
-                lastTupleBuilder.addField(tuple.getFieldData(i), tuple.getFieldStart(i), tuple.getFieldLength(i));
-            }
-        }
-
-        @Override
-        public void end() throws IndexException, HyracksDataException {
-            // The last tuple builder is empty if add() was never called.
-            if (lastTupleBuilder.getSize() != 0) {
-                createAndInsertBTreeTuple();
-            }
-            btreeBulkloader.end();
-
-            if (currentPage != null) {
-                currentPage.releaseWriteLatch();
-                bufferCache.unpin(currentPage);
-            }
-            invListsMaxPageId = currentPageId;
-        }
-    }
-
-    @Override
-    public IBufferCache getBufferCache() {
-        return bufferCache;
-    }
-
-    public int getInvListsFileId() {
-        return fileId;
-    }
-
-    public int getInvListsMaxPageId() {
-        return invListsMaxPageId;
-    }
-
-    public IBinaryComparatorFactory[] getInvListCmpFactories() {
-        return invListCmpFactories;
-    }
-
-    public ITypeTraits[] getInvListTypeTraits() {
-        return invListTypeTraits;
-    }
-
-    public BTree getBTree() {
-        return btree;
-    }
-
-    public class OnDiskInvertedIndexAccessor implements IInvertedIndexAccessor {
-        private final OnDiskInvertedIndex index;
-        private final IInvertedIndexSearcher searcher;
-        private final IIndexOperationContext opCtx = new OnDiskInvertedIndexOpContext(btree);
-
-        public OnDiskInvertedIndexAccessor(OnDiskInvertedIndex index) {
-            this.index = index;
-            this.searcher = new TOccurrenceSearcher(ctx, index);
-        }
-
-        // Let subclasses initialize.
-        protected OnDiskInvertedIndexAccessor(OnDiskInvertedIndex index, IInvertedIndexSearcher searcher) {
-            this.index = index;
-            this.searcher = searcher;
-        }
-
-        @Override
-        public IIndexCursor createSearchCursor() {
-            return new OnDiskInvertedIndexSearchCursor(searcher, index.getInvListTypeTraits().length);
-        }
-
-        @Override
-        public void search(IIndexCursor cursor, ISearchPredicate searchPred) throws HyracksDataException,
-                IndexException {
-            searcher.search((OnDiskInvertedIndexSearchCursor) cursor, (InvertedIndexSearchPredicate) searchPred, opCtx);
-        }
-
-        @Override
-        public IInvertedListCursor createInvertedListCursor() {
-            return index.createInvertedListCursor();
-        }
-
-        @Override
-        public void openInvertedListCursor(IInvertedListCursor listCursor, ITupleReference searchKey)
-                throws HyracksDataException, IndexException {
-            index.openInvertedListCursor(listCursor, searchKey, opCtx);
-        }
-
-        @Override
-        public IIndexCursor createRangeSearchCursor() {
-            return new OnDiskInvertedIndexRangeSearchCursor(index, opCtx);
-        }
-
-        @Override
-        public void rangeSearch(IIndexCursor cursor, ISearchPredicate searchPred) throws HyracksDataException,
-                IndexException {
-            OnDiskInvertedIndexRangeSearchCursor rangeSearchCursor = (OnDiskInvertedIndexRangeSearchCursor) cursor;
-            rangeSearchCursor.open(null, searchPred);
-        }
-
-        @Override
-        public void insert(ITupleReference tuple) throws HyracksDataException, IndexException {
-            throw new UnsupportedOperationException("Insert not supported by inverted index.");
-        }
-
-        @Override
-        public void update(ITupleReference tuple) throws HyracksDataException, IndexException {
-            throw new UnsupportedOperationException("Update not supported by inverted index.");
-        }
-
-        @Override
-        public void delete(ITupleReference tuple) throws HyracksDataException, IndexException {
-            throw new UnsupportedOperationException("Delete not supported by inverted index.");
-        }
-
-        @Override
-        public void upsert(ITupleReference tuple) throws HyracksDataException, TreeIndexException {
-            throw new UnsupportedOperationException("Upsert not supported by inverted index.");
-        }
-    }
-
-    @Override
-    public IIndexAccessor createAccessor(IModificationOperationCallback modificationCallback,
-            ISearchOperationCallback searchCallback) {
-        return new OnDiskInvertedIndexAccessor(this);
-    }
-
-    // This is just a dummy hyracks context for allocating frames for temporary
-    // results during inverted index searches.
-    // TODO: In the future we should use the real HyracksTaskContext to track
-    // frame usage.
-    public static class DefaultHyracksCommonContext implements IHyracksCommonContext {
-        private final int FRAME_SIZE = 32768;
-
-        @Override
-        public int getFrameSize() {
-            return FRAME_SIZE;
-        }
-
-        @Override
-        public IIOManager getIOManager() {
-            return null;
-        }
-
-        @Override
-        public ByteBuffer allocateFrame() {
-            return ByteBuffer.allocate(FRAME_SIZE);
-        }
-    }
-
-    @Override
-    public IIndexBulkLoader createBulkLoader(float fillFactor, boolean verifyInput, long numElementsHint)
-            throws IndexException {
-        try {
-            return new OnDiskInvertedIndexBulkLoader(fillFactor, verifyInput, numElementsHint, rootPageId, fileId);
-        } catch (HyracksDataException e) {
-            throw new InvertedIndexException(e);
-        }
-    }
-
-    @Override
-    public void validate() throws HyracksDataException {
-        btree.validate();
-        // Scan the btree and validate the order of elements in each inverted-list.
-        IIndexAccessor btreeAccessor = btree.createAccessor(NoOpOperationCallback.INSTANCE,
-                NoOpOperationCallback.INSTANCE);
-        IIndexCursor btreeCursor = btreeAccessor.createSearchCursor();
-        MultiComparator btreeCmp = MultiComparator.createIgnoreFieldLength(btree.getComparatorFactories());
-        RangePredicate rangePred = new RangePredicate(null, null, true, true, btreeCmp, btreeCmp);
-        int[] fieldPermutation = new int[tokenTypeTraits.length];
-        for (int i = 0; i < tokenTypeTraits.length; i++) {
-            fieldPermutation[i] = i;
-        }
-        PermutingTupleReference tokenTuple = new PermutingTupleReference(fieldPermutation);
-
-        IInvertedIndexAccessor invIndexAccessor = (IInvertedIndexAccessor) createAccessor(
-                NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
-        IInvertedListCursor invListCursor = invIndexAccessor.createInvertedListCursor();
-        MultiComparator invListCmp = MultiComparator.createIgnoreFieldLength(invListCmpFactories);
-
-        try {
-            // Search key for finding an inverted-list in the actual index.
-            ArrayTupleBuilder prevBuilder = new ArrayTupleBuilder(invListTypeTraits.length);
-            ArrayTupleReference prevTuple = new ArrayTupleReference();
-            btreeAccessor.search(btreeCursor, rangePred);
-            while (btreeCursor.hasNext()) {
-                btreeCursor.next();
-                tokenTuple.reset(btreeCursor.getTuple());
-                // Validate inverted list by checking that the elements are totally ordered.
-                invIndexAccessor.openInvertedListCursor(invListCursor, tokenTuple);
-                invListCursor.pinPages();
-                try {
-                    if (invListCursor.hasNext()) {
-                        invListCursor.next();
-                        ITupleReference invListElement = invListCursor.getTuple();
-                        // Initialize prev tuple.
-                        TupleUtils.copyTuple(prevBuilder, invListElement, invListElement.getFieldCount());
-                        prevTuple.reset(prevBuilder.getFieldEndOffsets(), prevBuilder.getByteArray());
-                    }
-                    while (invListCursor.hasNext()) {
-                        invListCursor.next();
-                        ITupleReference invListElement = invListCursor.getTuple();
-                        // Compare with previous element.
-                        if (invListCmp.compare(invListElement, prevTuple) <= 0) {
-                            throw new HyracksDataException("Index validation failed.");
-                        }
-                        // Set new prevTuple.
-                        TupleUtils.copyTuple(prevBuilder, invListElement, invListElement.getFieldCount());
-                        prevTuple.reset(prevBuilder.getFieldEndOffsets(), prevBuilder.getByteArray());
-                    }
-                } finally {
-                    invListCursor.unpinPages();
-                }
-            }
-        } catch (IndexException e) {
-            throw new HyracksDataException(e);
-        } finally {
-            btreeCursor.close();
-        }
-    }
-
-    @Override
-    public long getMemoryAllocationSize() {
-        return 0;
-    }
-
-    protected static ITypeTraits[] getBTreeTypeTraits(ITypeTraits[] tokenTypeTraits) {
-        ITypeTraits[] btreeTypeTraits = new ITypeTraits[tokenTypeTraits.length + btreeValueTypeTraits.length];
-        // Set key type traits.
-        for (int i = 0; i < tokenTypeTraits.length; i++) {
-            btreeTypeTraits[i] = tokenTypeTraits[i];
-        }
-        // Set value type traits.
-        for (int i = 0; i < btreeValueTypeTraits.length; i++) {
-            btreeTypeTraits[i + tokenTypeTraits.length] = btreeValueTypeTraits[i];
-        }
-        return btreeTypeTraits;
-    }
-
-    @Override
-    public ITypeTraits[] getTokenTypeTraits() {
-        return tokenTypeTraits;
-    }
-
-    @Override
-    public IBinaryComparatorFactory[] getTokenCmpFactories() {
-        return tokenCmpFactories;
-    }
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/ondisk/OnDiskInvertedIndexFactory.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/ondisk/OnDiskInvertedIndexFactory.java
deleted file mode 100644
index 5401519..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/ondisk/OnDiskInvertedIndexFactory.java
+++ /dev/null
@@ -1,61 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.ondisk;
-
-import java.io.File;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.IndexFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndex;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndexFileNameMapper;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedListBuilder;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedListBuilderFactory;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
-
-public class OnDiskInvertedIndexFactory extends IndexFactory<IInvertedIndex> {
-
-    protected final IInvertedListBuilderFactory invListBuilderFactory;
-    protected final ITypeTraits[] invListTypeTraits;
-    protected final IBinaryComparatorFactory[] invListCmpFactories;
-    protected final ITypeTraits[] tokenTypeTraits;
-    protected final IBinaryComparatorFactory[] tokenCmpFactories;
-    protected final IInvertedIndexFileNameMapper fileNameMapper;
-
-    public OnDiskInvertedIndexFactory(IBufferCache bufferCache, IFileMapProvider fileMapProvider,
-            IInvertedListBuilderFactory invListBuilderFactory, ITypeTraits[] invListTypeTraits,
-            IBinaryComparatorFactory[] invListCmpFactories, ITypeTraits[] tokenTypeTraits,
-            IBinaryComparatorFactory[] tokenCmpFactories, IInvertedIndexFileNameMapper fileNameMapper) {
-        super(bufferCache, fileMapProvider, null);
-        this.invListBuilderFactory = invListBuilderFactory;
-        this.invListTypeTraits = invListTypeTraits;
-        this.invListCmpFactories = invListCmpFactories;
-        this.tokenTypeTraits = tokenTypeTraits;
-        this.tokenCmpFactories = tokenCmpFactories;
-        this.fileNameMapper = fileNameMapper;
-    }
-
-    @Override
-    public IInvertedIndex createIndexInstance(FileReference dictBTreeFile) throws IndexException {
-        String invListsFilePath = fileNameMapper.getInvListsFilePath(dictBTreeFile.getFile().getPath());
-        FileReference invListsFile = new FileReference(new File(invListsFilePath));
-        IInvertedListBuilder invListBuilder = invListBuilderFactory.create();
-        return new OnDiskInvertedIndex(bufferCache, fileMapProvider, invListBuilder, invListTypeTraits,
-                invListCmpFactories, tokenTypeTraits, tokenCmpFactories, dictBTreeFile, invListsFile);
-    }
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/ondisk/OnDiskInvertedIndexOpContext.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/ondisk/OnDiskInvertedIndexOpContext.java
deleted file mode 100644
index 9e6194e..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/ondisk/OnDiskInvertedIndexOpContext.java
+++ /dev/null
@@ -1,60 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.ondisk;
-
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
-import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexAccessor;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexCursor;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexOperationContext;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallback;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOperation;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-
-public class OnDiskInvertedIndexOpContext implements IIndexOperationContext {
-
-    public final RangePredicate btreePred = new RangePredicate(null, null, true, true, null, null);
-    public IIndexAccessor btreeAccessor;
-    public IIndexCursor btreeCursor;
-    public MultiComparator searchCmp;
-    // For prefix search on partitioned indexes.
-    public MultiComparator prefixSearchCmp;
-
-    public OnDiskInvertedIndexOpContext(BTree btree) {
-        // TODO: Ignore opcallbacks for now.
-        btreeAccessor = btree.createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
-        btreeCursor = btreeAccessor.createSearchCursor();
-        searchCmp = MultiComparator.createIgnoreFieldLength(btree.getComparatorFactories());
-        if (btree.getComparatorFactories().length > 1) {
-            prefixSearchCmp = MultiComparator.create(btree.getComparatorFactories(), 0, 1);
-        }
-    }
-
-    @Override
-    public void reset() {
-        // Nothing to be done here, only search operation supported.
-    }
-
-    @Override
-    public void setOperation(IndexOperation newOp) {
-        // Nothing to be done here, only search operation supported.
-    }
-
-    @Override
-    public IndexOperation getOperation() {
-        return IndexOperation.SEARCH;
-    }
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/ondisk/OnDiskInvertedIndexRangeSearchCursor.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/ondisk/OnDiskInvertedIndexRangeSearchCursor.java
deleted file mode 100644
index b41b0d1..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/ondisk/OnDiskInvertedIndexRangeSearchCursor.java
+++ /dev/null
@@ -1,138 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.ondisk;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
-import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
-import edu.uci.ics.hyracks.storage.am.common.api.ICursorInitialState;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexAccessor;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexCursor;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexOperationContext;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchPredicate;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallback;
-import edu.uci.ics.hyracks.storage.am.common.tuples.ConcatenatingTupleReference;
-import edu.uci.ics.hyracks.storage.am.common.tuples.PermutingTupleReference;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndex;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedListCursor;
-
-/**
- * Scans a range of tokens, returning tuples containing a token and an inverted-list element.
- */
-public class OnDiskInvertedIndexRangeSearchCursor implements IIndexCursor {
-
-    private final BTree btree;
-    private final IIndexAccessor btreeAccessor;
-    private final IInvertedIndex invIndex;
-    private final IIndexOperationContext opCtx;
-    private final IInvertedListCursor invListCursor;
-    private boolean unpinNeeded;
-    
-    private final IIndexCursor btreeCursor;
-    private RangePredicate btreePred;
-
-    private final PermutingTupleReference tokenTuple;
-    private ConcatenatingTupleReference concatTuple;
-
-    public OnDiskInvertedIndexRangeSearchCursor(IInvertedIndex invIndex, IIndexOperationContext opCtx) {
-        this.btree = ((OnDiskInvertedIndex) invIndex).getBTree();
-        this.btreeAccessor = btree.createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
-        this.invIndex = invIndex;
-        this.opCtx = opCtx;
-        // Project away non-token fields of the BTree tuples.
-        int[] fieldPermutation = new int[invIndex.getTokenTypeTraits().length];
-        for (int i = 0; i < invIndex.getTokenTypeTraits().length; i++) {
-            fieldPermutation[i] = i;
-        }
-        tokenTuple = new PermutingTupleReference(fieldPermutation);
-        btreeCursor = btreeAccessor.createSearchCursor();
-        concatTuple = new ConcatenatingTupleReference(2);
-        invListCursor = invIndex.createInvertedListCursor();
-        unpinNeeded = false;
-    }
-
-    @Override
-    public void open(ICursorInitialState initialState, ISearchPredicate searchPred) throws HyracksDataException, IndexException {
-        this.btreePred = (RangePredicate) searchPred;
-        try {
-            btreeAccessor.search(btreeCursor, btreePred);
-        } catch (IndexException e) {
-            throw new HyracksDataException(e);
-        }        
-        invListCursor.pinPages();
-        unpinNeeded = true;
-    }
-
-    @Override
-    public boolean hasNext() throws HyracksDataException, IndexException {
-        if (invListCursor.hasNext()) {
-            return true;
-        }
-        if (unpinNeeded) {
-            invListCursor.unpinPages();
-            unpinNeeded = false;
-        }
-        if (!btreeCursor.hasNext()) {
-            return false;
-        }
-        btreeCursor.next();
-        tokenTuple.reset(btreeCursor.getTuple());
-        try {
-            invIndex.openInvertedListCursor(invListCursor, tokenTuple, opCtx);
-        } catch (IndexException e) {
-            throw new HyracksDataException(e);
-        }
-        invListCursor.pinPages();
-        invListCursor.hasNext();
-        unpinNeeded = true;
-        concatTuple.reset();
-        concatTuple.addTuple(tokenTuple);
-        return true;
-    }
-
-    @Override
-    public void next() throws HyracksDataException {
-        invListCursor.next();
-        if (concatTuple.hasMaxTuples()) {
-            concatTuple.removeLastTuple();
-        }
-        concatTuple.addTuple(invListCursor.getTuple());
-    }
-
-    @Override
-    public void close() throws HyracksDataException {
-        if (unpinNeeded) {
-            invListCursor.unpinPages();
-            unpinNeeded = false;
-        }
-        btreeCursor.close();
-    }
-
-    @Override
-    public void reset() throws HyracksDataException, IndexException {
-        if (unpinNeeded) {
-            invListCursor.unpinPages();
-            unpinNeeded = false;
-        }
-        btreeCursor.close();
-    }
-
-    @Override
-    public ITupleReference getTuple() {
-        return concatTuple;
-    }
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/ondisk/OnDiskInvertedIndexSearchCursor.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/ondisk/OnDiskInvertedIndexSearchCursor.java
deleted file mode 100644
index 3060ef4..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/ondisk/OnDiskInvertedIndexSearchCursor.java
+++ /dev/null
@@ -1,108 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.ondisk;
-
-import java.nio.ByteBuffer;
-import java.util.List;
-
-import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.common.api.ICursorInitialState;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexCursor;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchPredicate;
-import edu.uci.ics.hyracks.storage.am.common.tuples.PermutingTupleReference;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndexSearcher;
-
-public class OnDiskInvertedIndexSearchCursor implements IIndexCursor {
-
-    private List<ByteBuffer> resultBuffers;
-    private int numResultBuffers;
-    private int currentBufferIndex = 0;
-    private int tupleIndex = 0;
-    private final IInvertedIndexSearcher invIndexSearcher;
-    private final IFrameTupleAccessor fta;
-    private final FixedSizeTupleReference frameTuple;
-    private final PermutingTupleReference resultTuple;
-    
-    public OnDiskInvertedIndexSearchCursor(IInvertedIndexSearcher invIndexSearcher, int numInvListFields) {
-        this.invIndexSearcher = invIndexSearcher;
-        this.fta = invIndexSearcher.createResultFrameTupleAccessor();
-        this.frameTuple = (FixedSizeTupleReference) invIndexSearcher.createResultFrameTupleReference();
-        // Project away the occurrence count from the result tuples.
-        int[] fieldPermutation = new int[numInvListFields];
-        for (int i = 0; i < numInvListFields; i++) {
-            fieldPermutation[i] = i;
-        }
-        resultTuple = new PermutingTupleReference(fieldPermutation);
-    }
-
-    @Override
-    public void open(ICursorInitialState initialState, ISearchPredicate searchPred) throws HyracksDataException {
-        currentBufferIndex = 0;
-        tupleIndex = 0;
-        resultBuffers = invIndexSearcher.getResultBuffers();
-        numResultBuffers = invIndexSearcher.getNumValidResultBuffers();
-        if (numResultBuffers > 0) {
-            fta.reset(resultBuffers.get(0));
-        }
-    }
-    
-    @Override
-    public boolean hasNext() {
-        if (currentBufferIndex < numResultBuffers && tupleIndex < fta.getTupleCount()) {
-            return true;
-        } else {
-            return false;
-        }
-    }
-
-    @Override
-    public void next() {
-        frameTuple.reset(fta.getBuffer().array(), fta.getTupleStartOffset(tupleIndex));
-        resultTuple.reset(frameTuple);
-        tupleIndex++;
-        if (tupleIndex >= fta.getTupleCount()) {
-            if (currentBufferIndex + 1 < numResultBuffers) {
-                currentBufferIndex++;
-                fta.reset(resultBuffers.get(currentBufferIndex));
-                tupleIndex = 0;
-            }
-        }        
-    }
-
-    @Override
-    public ITupleReference getTuple() {
-        return resultTuple;
-    }
-
-    @Override
-    public void reset() {
-        currentBufferIndex = 0;
-        tupleIndex = 0;
-        invIndexSearcher.reset();
-        resultBuffers = invIndexSearcher.getResultBuffers();
-        numResultBuffers = invIndexSearcher.getNumValidResultBuffers();
-    }
-
-    @Override
-    public void close() throws HyracksDataException {
-        currentBufferIndex = 0;
-        tupleIndex = 0;
-        resultBuffers = null;
-        numResultBuffers = 0;
-    }
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/ondisk/PartitionedOnDiskInvertedIndex.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/ondisk/PartitionedOnDiskInvertedIndex.java
deleted file mode 100644
index 6e395e7..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/ondisk/PartitionedOnDiskInvertedIndex.java
+++ /dev/null
@@ -1,116 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.ondisk;
-
-import java.util.ArrayList;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.ShortSerializerDeserializer;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexAccessor;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexOperationContext;
-import edu.uci.ics.hyracks.storage.am.common.api.IModificationOperationCallback;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchOperationCallback;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndexSearcher;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedListBuilder;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedListCursor;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IPartitionedInvertedIndex;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.search.InvertedListPartitions;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.search.PartitionedTOccurrenceSearcher;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
-
-public class PartitionedOnDiskInvertedIndex extends OnDiskInvertedIndex implements IPartitionedInvertedIndex {
-
-    protected final int PARTITIONING_NUM_TOKENS_FIELD = 1;
-
-    public PartitionedOnDiskInvertedIndex(IBufferCache bufferCache, IFileMapProvider fileMapProvider,
-            IInvertedListBuilder invListBuilder, ITypeTraits[] invListTypeTraits,
-            IBinaryComparatorFactory[] invListCmpFactories, ITypeTraits[] tokenTypeTraits,
-            IBinaryComparatorFactory[] tokenCmpFactories, FileReference btreeFile, FileReference invListsFile)
-            throws IndexException {
-        super(bufferCache, fileMapProvider, invListBuilder, invListTypeTraits, invListCmpFactories, tokenTypeTraits,
-                tokenCmpFactories, btreeFile, invListsFile);
-    }
-
-    public class PartitionedOnDiskInvertedIndexAccessor extends OnDiskInvertedIndexAccessor {
-        public PartitionedOnDiskInvertedIndexAccessor(OnDiskInvertedIndex index) {
-            super(index, new PartitionedTOccurrenceSearcher(ctx, index));
-        }
-    }
-
-    @Override
-    public IIndexAccessor createAccessor(IModificationOperationCallback modificationCallback,
-            ISearchOperationCallback searchCallback) {
-        return new PartitionedOnDiskInvertedIndexAccessor(this);
-    }
-
-    @Override
-    public boolean openInvertedListPartitionCursors(IInvertedIndexSearcher searcher, IIndexOperationContext ictx,
-            short numTokensLowerBound, short numTokensUpperBound, InvertedListPartitions invListPartitions,
-            ArrayList<IInvertedListCursor> cursorsOrderedByTokens) throws HyracksDataException, IndexException {
-        PartitionedTOccurrenceSearcher partSearcher = (PartitionedTOccurrenceSearcher) searcher;
-        OnDiskInvertedIndexOpContext ctx = (OnDiskInvertedIndexOpContext) ictx;
-        ITupleReference lowSearchKey = null;
-        ITupleReference highSearchKey = null;
-        partSearcher.setNumTokensBoundsInSearchKeys(numTokensLowerBound, numTokensUpperBound);
-        if (numTokensLowerBound < 0) {
-            ctx.btreePred.setLowKeyComparator(ctx.prefixSearchCmp);
-            lowSearchKey = partSearcher.getPrefixSearchKey();
-        } else {
-            ctx.btreePred.setLowKeyComparator(ctx.searchCmp);
-            lowSearchKey = partSearcher.getFullLowSearchKey();
-        }
-        if (numTokensUpperBound < 0) {
-            ctx.btreePred.setHighKeyComparator(ctx.prefixSearchCmp);
-            highSearchKey = partSearcher.getPrefixSearchKey();
-        } else {
-            ctx.btreePred.setHighKeyComparator(ctx.searchCmp);
-            highSearchKey = partSearcher.getFullHighSearchKey();
-        }
-        ctx.btreePred.setLowKey(lowSearchKey, true);
-        ctx.btreePred.setHighKey(highSearchKey, true);
-        ctx.btreeAccessor.search(ctx.btreeCursor, ctx.btreePred);
-        boolean tokenExists = false;
-        try {
-            while (ctx.btreeCursor.hasNext()) {
-                ctx.btreeCursor.next();
-                ITupleReference btreeTuple = ctx.btreeCursor.getTuple();
-                short numTokens = ShortSerializerDeserializer.getShort(
-                        btreeTuple.getFieldData(PARTITIONING_NUM_TOKENS_FIELD),
-                        btreeTuple.getFieldStart(PARTITIONING_NUM_TOKENS_FIELD));
-                IInvertedListCursor invListCursor = partSearcher.getCachedInvertedListCursor();
-                resetInvertedListCursor(btreeTuple, invListCursor);
-                cursorsOrderedByTokens.add(invListCursor);
-                invListPartitions.addInvertedListCursor(invListCursor, numTokens);
-                tokenExists = true;
-            }
-        } finally {
-            ctx.btreeCursor.close();
-            ctx.btreeCursor.reset();
-        }
-        return tokenExists;
-    }
-
-    @Override
-    public boolean isEmpty() {
-        return false;
-    }
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/ondisk/PartitionedOnDiskInvertedIndexFactory.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/ondisk/PartitionedOnDiskInvertedIndexFactory.java
deleted file mode 100644
index 854a30f..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/ondisk/PartitionedOnDiskInvertedIndexFactory.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.ondisk;
-
-import java.io.File;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndex;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndexFileNameMapper;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedListBuilder;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedListBuilderFactory;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
-
-public class PartitionedOnDiskInvertedIndexFactory extends OnDiskInvertedIndexFactory {
-    
-    public PartitionedOnDiskInvertedIndexFactory(IBufferCache bufferCache, IFileMapProvider fileMapProvider,
-            IInvertedListBuilderFactory invListBuilderFactory, ITypeTraits[] invListTypeTraits,
-            IBinaryComparatorFactory[] invListCmpFactories, ITypeTraits[] tokenTypeTraits,
-            IBinaryComparatorFactory[] tokenCmpFactories, IInvertedIndexFileNameMapper fileNameMapper) {
-        super(bufferCache, fileMapProvider, invListBuilderFactory, invListTypeTraits, invListCmpFactories, tokenTypeTraits,
-                tokenCmpFactories, fileNameMapper);
-    }
-
-    @Override
-    public IInvertedIndex createIndexInstance(FileReference dictBTreeFile) throws IndexException {
-        String invListsFilePath = fileNameMapper.getInvListsFilePath(dictBTreeFile.getFile().getPath());
-        FileReference invListsFile = new FileReference(new File(invListsFilePath));
-        IInvertedListBuilder invListBuilder = invListBuilderFactory.create();
-        return new PartitionedOnDiskInvertedIndex(bufferCache, fileMapProvider, invListBuilder, invListTypeTraits,
-                invListCmpFactories, tokenTypeTraits, tokenCmpFactories, dictBTreeFile, invListsFile);
-    }
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/search/AbstractTOccurrenceSearcher.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/search/AbstractTOccurrenceSearcher.java
deleted file mode 100644
index d973967..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/search/AbstractTOccurrenceSearcher.java
+++ /dev/null
@@ -1,154 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.search;
-
-import java.io.IOException;
-import java.nio.ByteBuffer;
-import java.util.List;
-
-import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
-import edu.uci.ics.hyracks.api.context.IHyracksCommonContext;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.FrameTupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndex;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndexSearcher;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedListCursor;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IObjectFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.exceptions.OccurrenceThresholdPanicException;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.ondisk.FixedSizeFrameTupleAccessor;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.ondisk.FixedSizeTupleReference;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.tokenizers.IBinaryTokenizer;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.tokenizers.IToken;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.util.ObjectCache;
-
-public abstract class AbstractTOccurrenceSearcher implements IInvertedIndexSearcher {
-    protected static final RecordDescriptor QUERY_TOKEN_REC_DESC = new RecordDescriptor(
-            new ISerializerDeserializer[] { UTF8StringSerializerDeserializer.INSTANCE });
-
-    protected final int OBJECT_CACHE_INIT_SIZE = 10;
-    protected final int OBJECT_CACHE_EXPAND_SIZE = 10;
-
-    protected final IHyracksCommonContext ctx;
-
-    protected final InvertedListMerger invListMerger;
-    protected final SearchResult searchResult;
-    protected final IInvertedIndex invIndex;
-    protected final MultiComparator invListCmp;
-
-    protected final ArrayTupleBuilder queryTokenBuilder = new ArrayTupleBuilder(QUERY_TOKEN_REC_DESC.getFieldCount());
-    protected final ByteBuffer queryTokenFrame;
-    protected final FrameTupleAppender queryTokenAppender;
-    protected final FrameTupleAccessor queryTokenAccessor;
-    protected final FrameTupleReference searchKey = new FrameTupleReference();
-
-    protected int occurrenceThreshold;
-
-    protected final IObjectFactory<IInvertedListCursor> invListCursorFactory;
-    protected final ObjectCache<IInvertedListCursor> invListCursorCache;
-
-    public AbstractTOccurrenceSearcher(IHyracksCommonContext ctx, IInvertedIndex invIndex) {
-        this.ctx = ctx;
-        this.invListMerger = new InvertedListMerger(ctx, invIndex);
-        this.searchResult = new SearchResult(invIndex.getInvListTypeTraits(), ctx);
-        this.invIndex = invIndex;
-        this.invListCmp = MultiComparator.create(invIndex.getInvListCmpFactories());
-        this.invListCursorFactory = new InvertedListCursorFactory(invIndex);
-        this.invListCursorCache = new ObjectCache<IInvertedListCursor>(invListCursorFactory, OBJECT_CACHE_INIT_SIZE,
-                OBJECT_CACHE_EXPAND_SIZE);
-        this.queryTokenFrame = ctx.allocateFrame();
-        this.queryTokenAppender = new FrameTupleAppender(ctx.getFrameSize());
-        this.queryTokenAccessor = new FrameTupleAccessor(ctx.getFrameSize(), QUERY_TOKEN_REC_DESC);
-        this.queryTokenAccessor.reset(queryTokenFrame);
-    }
-
-    public void reset() {
-        searchResult.clear();
-        invListMerger.reset();
-    }
-
-    protected void tokenizeQuery(InvertedIndexSearchPredicate searchPred) throws HyracksDataException,
-            OccurrenceThresholdPanicException {
-        ITupleReference queryTuple = searchPred.getQueryTuple();
-        int queryFieldIndex = searchPred.getQueryFieldIndex();
-        IBinaryTokenizer queryTokenizer = searchPred.getQueryTokenizer();
-
-        queryTokenAppender.reset(queryTokenFrame, true);
-        queryTokenizer.reset(queryTuple.getFieldData(queryFieldIndex), queryTuple.getFieldStart(queryFieldIndex),
-                queryTuple.getFieldLength(queryFieldIndex));
-
-        while (queryTokenizer.hasNext()) {
-            queryTokenizer.next();
-            queryTokenBuilder.reset();
-            try {
-                IToken token = queryTokenizer.getToken();
-                token.serializeToken(queryTokenBuilder.getFieldData());
-                queryTokenBuilder.addFieldEndOffset();
-                // WARNING: assuming one frame is big enough to hold all tokens
-                queryTokenAppender.append(queryTokenBuilder.getFieldEndOffsets(), queryTokenBuilder.getByteArray(), 0,
-                        queryTokenBuilder.getSize());
-            } catch (IOException e) {
-                throw new HyracksDataException(e);
-            }
-        }
-    }
-
-    public IFrameTupleAccessor createResultFrameTupleAccessor() {
-        return new FixedSizeFrameTupleAccessor(ctx.getFrameSize(), searchResult.getTypeTraits());
-    }
-
-    public ITupleReference createResultFrameTupleReference() {
-        return new FixedSizeTupleReference(searchResult.getTypeTraits());
-    }
-
-    @Override
-    public List<ByteBuffer> getResultBuffers() {
-        return searchResult.getBuffers();
-    }
-
-    @Override
-    public int getNumValidResultBuffers() {
-        return searchResult.getCurrentBufferIndex() + 1;
-    }
-
-    public int getOccurrenceThreshold() {
-        return occurrenceThreshold;
-    }
-
-    public void printNewResults(int maxResultBufIdx, List<ByteBuffer> buffer) {
-        StringBuffer strBuffer = new StringBuffer();
-        FixedSizeFrameTupleAccessor resultFrameTupleAcc = searchResult.getAccessor();
-        for (int i = 0; i <= maxResultBufIdx; i++) {
-            ByteBuffer testBuf = buffer.get(i);
-            resultFrameTupleAcc.reset(testBuf);
-            for (int j = 0; j < resultFrameTupleAcc.getTupleCount(); j++) {
-                strBuffer.append(IntegerSerializerDeserializer.getInt(resultFrameTupleAcc.getBuffer().array(),
-                        resultFrameTupleAcc.getFieldStartOffset(j, 0)) + ",");
-                strBuffer.append(IntegerSerializerDeserializer.getInt(resultFrameTupleAcc.getBuffer().array(),
-                        resultFrameTupleAcc.getFieldStartOffset(j, 1)) + " ");
-            }
-        }
-        System.out.println(strBuffer.toString());
-    }
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/search/ArrayListFactory.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/search/ArrayListFactory.java
deleted file mode 100644
index 493063e..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/search/ArrayListFactory.java
+++ /dev/null
@@ -1,27 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.search;
-
-import java.util.ArrayList;
-
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IObjectFactory;
-
-public class ArrayListFactory<T> implements IObjectFactory<ArrayList<T>>{
-    @Override
-    public ArrayList<T> create() {
-        return new ArrayList<T>();
-    }
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/search/ConjunctiveSearchModifier.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/search/ConjunctiveSearchModifier.java
deleted file mode 100644
index 318f1e1..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/search/ConjunctiveSearchModifier.java
+++ /dev/null
@@ -1,46 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.search;
-
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndexSearchModifier;
-
-public class ConjunctiveSearchModifier implements IInvertedIndexSearchModifier {
-
-    @Override
-    public int getOccurrenceThreshold(int numQueryTokens) {
-        return numQueryTokens;
-    }
-
-    @Override
-    public int getNumPrefixLists(int occurrenceThreshold, int numInvLists) {
-        return 1;
-    }
-    
-    @Override
-    public String toString() {
-        return "Conjunctive Search Modifier";
-    }
-
-    @Override
-    public short getNumTokensLowerBound(short numQueryTokens) {
-        return -1;
-    }
-
-    @Override
-    public short getNumTokensUpperBound(short numQueryTokens) {
-        return -1;
-    }
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/search/ConjunctiveSearchModifierFactory.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/search/ConjunctiveSearchModifierFactory.java
deleted file mode 100644
index 83e1f4b..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/search/ConjunctiveSearchModifierFactory.java
+++ /dev/null
@@ -1,28 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.search;
-
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndexSearchModifier;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndexSearchModifierFactory;
-
-public class ConjunctiveSearchModifierFactory implements IInvertedIndexSearchModifierFactory {
-    private static final long serialVersionUID = 1L;
-
-    @Override
-    public IInvertedIndexSearchModifier createSearchModifier() {
-        return new ConjunctiveSearchModifier();
-    }
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/search/EditDistanceSearchModifier.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/search/EditDistanceSearchModifier.java
deleted file mode 100644
index 9c06f4d..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/search/EditDistanceSearchModifier.java
+++ /dev/null
@@ -1,70 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.search;
-
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndexSearchModifier;
-
-public class EditDistanceSearchModifier implements IInvertedIndexSearchModifier {
-
-    private int gramLength;
-    private int edThresh;
-
-    public EditDistanceSearchModifier(int gramLength, int edThresh) {
-        this.gramLength = gramLength;
-        this.edThresh = edThresh;
-    }
-
-    @Override
-    public int getOccurrenceThreshold(int numQueryTokens) {
-        return numQueryTokens - edThresh * gramLength;
-    }
-
-    @Override
-    public int getNumPrefixLists(int occurrenceThreshold, int numInvLists) {
-        return numInvLists - occurrenceThreshold + 1;
-    }
-
-    @Override
-    public short getNumTokensLowerBound(short numQueryTokens) {
-        return (short) (numQueryTokens - edThresh);
-    }
-
-    @Override
-    public short getNumTokensUpperBound(short numQueryTokens) {
-        return (short) (numQueryTokens + edThresh);
-    }
-
-    public int getGramLength() {
-        return gramLength;
-    }
-
-    public void setGramLength(int gramLength) {
-        this.gramLength = gramLength;
-    }
-
-    public int getEdThresh() {
-        return edThresh;
-    }
-
-    public void setEdThresh(int edThresh) {
-        this.edThresh = edThresh;
-    }
-
-    @Override
-    public String toString() {
-        return "Edit Distance Search Modifier, GramLen: " + gramLength + ", Threshold: " + edThresh;
-    }
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/search/EditDistanceSearchModifierFactory.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/search/EditDistanceSearchModifierFactory.java
deleted file mode 100644
index 879c34e..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/search/EditDistanceSearchModifierFactory.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.search;
-
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndexSearchModifier;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndexSearchModifierFactory;
-
-public class EditDistanceSearchModifierFactory implements IInvertedIndexSearchModifierFactory {
-
-    private static final long serialVersionUID = 1L;
-
-    private final int gramLength;
-    private final int edThresh;
-    
-    public EditDistanceSearchModifierFactory(int gramLength, int edThresh) {
-        this.gramLength = gramLength;
-        this.edThresh = edThresh;
-    }
-    
-    @Override
-    public IInvertedIndexSearchModifier createSearchModifier() {
-        return new EditDistanceSearchModifier(gramLength, edThresh);
-    }
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/search/InvertedIndexSearchPredicate.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/search/InvertedIndexSearchPredicate.java
deleted file mode 100644
index 2065691..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/search/InvertedIndexSearchPredicate.java
+++ /dev/null
@@ -1,72 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.search;
-
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchPredicate;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndexSearchModifier;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.tokenizers.IBinaryTokenizer;
-
-public class InvertedIndexSearchPredicate implements ISearchPredicate {
-    private static final long serialVersionUID = 1L;
-
-    private ITupleReference queryTuple;
-    private int queryFieldIndex;
-    private final IBinaryTokenizer queryTokenizer;
-    private final IInvertedIndexSearchModifier searchModifier;    
-    
-    public InvertedIndexSearchPredicate(IBinaryTokenizer queryTokenizer, IInvertedIndexSearchModifier searchModifier) {
-        this.queryTokenizer = queryTokenizer;
-        this.searchModifier = searchModifier;
-    }
-    
-    public void setQueryTuple(ITupleReference queryTuple) {
-        this.queryTuple = queryTuple;
-    }
-    
-    public ITupleReference getQueryTuple() {
-        return queryTuple;
-    }
-    
-    public void setQueryFieldIndex(int queryFieldIndex) {
-        this.queryFieldIndex = queryFieldIndex;
-    }
-    
-    public int getQueryFieldIndex() {
-        return queryFieldIndex;
-    }
-    
-    public IInvertedIndexSearchModifier getSearchModifier() {
-        return searchModifier;
-    }
-    
-    public IBinaryTokenizer getQueryTokenizer() {
-        return queryTokenizer;
-    }
-    
-    @Override
-    public MultiComparator getLowKeyComparator() {
-        // TODO: This doesn't make sense for an inverted index. Change ISearchPredicate interface.
-        return null;
-    }
-
-    @Override
-    public MultiComparator getHighKeyComparator() {
-        // TODO: This doesn't make sense for an inverted index. Change ISearchPredicate interface.
-        return null;
-    }
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/search/InvertedListCursorFactory.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/search/InvertedListCursorFactory.java
deleted file mode 100644
index b4b3c43..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/search/InvertedListCursorFactory.java
+++ /dev/null
@@ -1,34 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.search;
-
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndex;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedListCursor;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IObjectFactory;
-
-public class InvertedListCursorFactory implements IObjectFactory<IInvertedListCursor> {
-
-    private final IInvertedIndex invIndex;
-
-    public InvertedListCursorFactory(IInvertedIndex invIndex) {
-        this.invIndex = invIndex;
-    }
-
-    @Override
-    public IInvertedListCursor create() {
-        return invIndex.createInvertedListCursor();
-    }
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/search/InvertedListMerger.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/search/InvertedListMerger.java
deleted file mode 100644
index fbdfd64..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/search/InvertedListMerger.java
+++ /dev/null
@@ -1,330 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.search;
-
-import java.nio.ByteBuffer;
-import java.util.ArrayList;
-import java.util.Collections;
-
-import edu.uci.ics.hyracks.api.context.IHyracksCommonContext;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndex;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedListCursor;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.ondisk.FixedSizeFrameTupleAccessor;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.ondisk.FixedSizeTupleReference;
-
-// TODO: The merge procedure is rather confusing regarding cursor positions, hasNext() calls etc.
-// Needs an overhaul some time.
-public class InvertedListMerger {
-
-    protected final MultiComparator invListCmp;
-    protected SearchResult prevSearchResult;
-    protected SearchResult newSearchResult;
-
-    public InvertedListMerger(IHyracksCommonContext ctx, IInvertedIndex invIndex) {
-        this.invListCmp = MultiComparator.createIgnoreFieldLength(invIndex.getInvListCmpFactories());
-        this.prevSearchResult = new SearchResult(invIndex.getInvListTypeTraits(), ctx);
-        this.newSearchResult = new SearchResult(prevSearchResult);
-    }
-
-    public void merge(ArrayList<IInvertedListCursor> invListCursors, int occurrenceThreshold, int numPrefixLists,
-            SearchResult searchResult) throws HyracksDataException, IndexException {
-        Collections.sort(invListCursors);
-        int numInvLists = invListCursors.size();
-        SearchResult result = null;
-        for (int i = 0; i < numInvLists; i++) {
-            SearchResult swapTemp = prevSearchResult;
-            prevSearchResult = newSearchResult;
-            newSearchResult = swapTemp;
-            newSearchResult.reset();
-            if (i + 1 != numInvLists) {
-                // Use temporary search results when not merging last list.
-                result = newSearchResult;
-            } else {
-                // When merging the last list, append results to the final search result.
-                result = searchResult;
-            }
-            IInvertedListCursor invListCursor = invListCursors.get(i);
-            invListCursor.pinPages();
-            if (i < numPrefixLists) {
-                // Merge prefix list.
-                mergePrefixList(invListCursor, prevSearchResult, result);
-            } else {
-                // Merge suffix list.
-                int numInvListElements = invListCursor.size();
-                int currentNumResults = prevSearchResult.getNumResults();
-                // Should we binary search the next list or should we sort-merge it?
-                if (currentNumResults * Math.log(numInvListElements) < currentNumResults + numInvListElements) {
-                    mergeSuffixListProbe(invListCursor, prevSearchResult, result, i, numInvLists,
-                            occurrenceThreshold);
-                } else {
-                    mergeSuffixListScan(invListCursor, prevSearchResult, result, i, numInvLists,
-                            occurrenceThreshold);
-                }
-            }
-            invListCursor.unpinPages();
-        }
-    }
-
-    protected void mergeSuffixListProbe(IInvertedListCursor invListCursor, SearchResult prevSearchResult,
-            SearchResult newSearchResult, int invListIx, int numInvLists, int occurrenceThreshold)
-            throws HyracksDataException, IndexException {
-
-        int prevBufIdx = 0;
-        int maxPrevBufIdx = prevSearchResult.getCurrentBufferIndex();
-        ByteBuffer prevCurrentBuffer = prevSearchResult.getBuffers().get(0);
-
-        FixedSizeFrameTupleAccessor resultFrameTupleAcc = prevSearchResult.getAccessor();
-        FixedSizeTupleReference resultTuple = prevSearchResult.getTuple();
-
-        int resultTidx = 0;
-
-        resultFrameTupleAcc.reset(prevCurrentBuffer);
-
-        while (resultTidx < resultFrameTupleAcc.getTupleCount()) {
-
-            resultTuple.reset(prevCurrentBuffer.array(), resultFrameTupleAcc.getTupleStartOffset(resultTidx));
-            int count = IntegerSerializerDeserializer.getInt(resultTuple.getFieldData(0),
-                    resultTuple.getFieldStart(resultTuple.getFieldCount() - 1));
-
-            if (invListCursor.containsKey(resultTuple, invListCmp)) {
-                count++;
-                newSearchResult.append(resultTuple, count);
-            } else {
-                if (count + numInvLists - invListIx > occurrenceThreshold) {
-                    newSearchResult.append(resultTuple, count);
-                }
-            }
-
-            resultTidx++;
-            if (resultTidx >= resultFrameTupleAcc.getTupleCount()) {
-                prevBufIdx++;
-                if (prevBufIdx <= maxPrevBufIdx) {
-                    prevCurrentBuffer = prevSearchResult.getBuffers().get(prevBufIdx);
-                    resultFrameTupleAcc.reset(prevCurrentBuffer);
-                    resultTidx = 0;
-                }
-            }
-        }
-    }
-
-    protected void mergeSuffixListScan(IInvertedListCursor invListCursor, SearchResult prevSearchResult,
-            SearchResult newSearchResult, int invListIx, int numInvLists, int occurrenceThreshold)
-            throws HyracksDataException, IndexException {
-
-        int prevBufIdx = 0;
-        int maxPrevBufIdx = prevSearchResult.getCurrentBufferIndex();
-        ByteBuffer prevCurrentBuffer = prevSearchResult.getBuffers().get(0);
-
-        FixedSizeFrameTupleAccessor resultFrameTupleAcc = prevSearchResult.getAccessor();
-        FixedSizeTupleReference resultTuple = prevSearchResult.getTuple();
-
-        boolean advanceCursor = true;
-        boolean advancePrevResult = false;
-        int resultTidx = 0;
-
-        resultFrameTupleAcc.reset(prevCurrentBuffer);
-
-        int invListTidx = 0;
-        int invListNumTuples = invListCursor.size();
-
-        if (invListCursor.hasNext())
-            invListCursor.next();
-
-        while (invListTidx < invListNumTuples && resultTidx < resultFrameTupleAcc.getTupleCount()) {
-
-            ITupleReference invListTuple = invListCursor.getTuple();
-
-            resultTuple.reset(prevCurrentBuffer.array(), resultFrameTupleAcc.getTupleStartOffset(resultTidx));
-
-            int cmp = invListCmp.compare(invListTuple, resultTuple);
-            if (cmp == 0) {
-                int count = IntegerSerializerDeserializer.getInt(resultTuple.getFieldData(0),
-                        resultTuple.getFieldStart(resultTuple.getFieldCount() - 1)) + 1;
-                newSearchResult.append(resultTuple, count);
-                advanceCursor = true;
-                advancePrevResult = true;
-            } else {
-                if (cmp < 0) {
-                    advanceCursor = true;
-                    advancePrevResult = false;
-                } else {
-                    int count = IntegerSerializerDeserializer.getInt(resultTuple.getFieldData(0),
-                            resultTuple.getFieldStart(resultTuple.getFieldCount() - 1));
-                    if (count + numInvLists - invListIx > occurrenceThreshold) {
-                        newSearchResult.append(resultTuple, count);
-                    }
-                    advanceCursor = false;
-                    advancePrevResult = true;
-                }
-            }
-
-            if (advancePrevResult) {
-                resultTidx++;
-                if (resultTidx >= resultFrameTupleAcc.getTupleCount()) {
-                    prevBufIdx++;
-                    if (prevBufIdx <= maxPrevBufIdx) {
-                        prevCurrentBuffer = prevSearchResult.getBuffers().get(prevBufIdx);
-                        resultFrameTupleAcc.reset(prevCurrentBuffer);
-                        resultTidx = 0;
-                    }
-                }
-            }
-
-            if (advanceCursor) {
-                invListTidx++;
-                if (invListCursor.hasNext()) {
-                    invListCursor.next();
-                }
-            }
-        }
-
-        // append remaining elements from previous result set
-        while (resultTidx < resultFrameTupleAcc.getTupleCount()) {
-
-            resultTuple.reset(prevCurrentBuffer.array(), resultFrameTupleAcc.getTupleStartOffset(resultTidx));
-
-            int count = IntegerSerializerDeserializer.getInt(resultTuple.getFieldData(0),
-                    resultTuple.getFieldStart(resultTuple.getFieldCount() - 1));
-            if (count + numInvLists - invListIx > occurrenceThreshold) {
-                newSearchResult.append(resultTuple, count);
-            }
-
-            resultTidx++;
-            if (resultTidx >= resultFrameTupleAcc.getTupleCount()) {
-                prevBufIdx++;
-                if (prevBufIdx <= maxPrevBufIdx) {
-                    prevCurrentBuffer = prevSearchResult.getBuffers().get(prevBufIdx);
-                    resultFrameTupleAcc.reset(prevCurrentBuffer);
-                    resultTidx = 0;
-                }
-            }
-        }
-    }
-
-    protected void mergePrefixList(IInvertedListCursor invListCursor, SearchResult prevSearchResult,
-            SearchResult newSearchResult) throws HyracksDataException, IndexException {
-
-        int prevBufIdx = 0;
-        int maxPrevBufIdx = prevSearchResult.getCurrentBufferIndex();
-        ByteBuffer prevCurrentBuffer = prevSearchResult.getBuffers().get(0);
-
-        FixedSizeFrameTupleAccessor resultFrameTupleAcc = prevSearchResult.getAccessor();
-        FixedSizeTupleReference resultTuple = prevSearchResult.getTuple();
-
-        boolean advanceCursor = true;
-        boolean advancePrevResult = false;
-        int resultTidx = 0;
-
-        resultFrameTupleAcc.reset(prevCurrentBuffer);
-
-        int invListTidx = 0;
-        int invListNumTuples = invListCursor.size();
-
-        if (invListCursor.hasNext())
-            invListCursor.next();
-
-        while (invListTidx < invListNumTuples && resultTidx < resultFrameTupleAcc.getTupleCount()) {
-
-            ITupleReference invListTuple = invListCursor.getTuple();
-            resultTuple.reset(prevCurrentBuffer.array(), resultFrameTupleAcc.getTupleStartOffset(resultTidx));
-
-            int cmp = invListCmp.compare(invListTuple, resultTuple);
-            if (cmp == 0) {
-                int count = IntegerSerializerDeserializer.getInt(resultTuple.getFieldData(0),
-                        resultTuple.getFieldStart(resultTuple.getFieldCount() - 1)) + 1;
-                newSearchResult.append(resultTuple, count);
-                advanceCursor = true;
-                advancePrevResult = true;
-            } else {
-                if (cmp < 0) {
-                    int count = 1;
-                    newSearchResult.append(invListTuple, count);
-                    advanceCursor = true;
-                    advancePrevResult = false;
-                } else {
-                    int count = IntegerSerializerDeserializer.getInt(resultTuple.getFieldData(0),
-                            resultTuple.getFieldStart(resultTuple.getFieldCount() - 1));
-                    newSearchResult.append(resultTuple, count);
-                    advanceCursor = false;
-                    advancePrevResult = true;
-                }
-            }
-
-            if (advancePrevResult) {
-                resultTidx++;
-                if (resultTidx >= resultFrameTupleAcc.getTupleCount()) {
-                    prevBufIdx++;
-                    if (prevBufIdx <= maxPrevBufIdx) {
-                        prevCurrentBuffer = prevSearchResult.getBuffers().get(prevBufIdx);
-                        resultFrameTupleAcc.reset(prevCurrentBuffer);
-                        resultTidx = 0;
-                    }
-                }
-            }
-
-            if (advanceCursor) {
-                invListTidx++;
-                if (invListCursor.hasNext()) {
-                    invListCursor.next();
-                }
-            }
-        }
-
-        // append remaining new elements from inverted list
-        while (invListTidx < invListNumTuples) {
-            ITupleReference invListTuple = invListCursor.getTuple();
-            newSearchResult.append(invListTuple, 1);
-            invListTidx++;
-            if (invListCursor.hasNext()) {
-                invListCursor.next();
-            }
-        }
-
-        // append remaining elements from previous result set
-        while (resultTidx < resultFrameTupleAcc.getTupleCount()) {
-
-            resultTuple.reset(prevCurrentBuffer.array(), resultFrameTupleAcc.getTupleStartOffset(resultTidx));
-
-            int count = IntegerSerializerDeserializer.getInt(resultTuple.getFieldData(0),
-                    resultTuple.getFieldStart(resultTuple.getFieldCount() - 1));
-            newSearchResult.append(resultTuple, count);
-
-            resultTidx++;
-            if (resultTidx >= resultFrameTupleAcc.getTupleCount()) {
-                prevBufIdx++;
-                if (prevBufIdx <= maxPrevBufIdx) {
-                    prevCurrentBuffer = prevSearchResult.getBuffers().get(prevBufIdx);
-                    resultFrameTupleAcc.reset(prevCurrentBuffer);
-                    resultTidx = 0;
-                }
-            }
-        }
-    }
-
-    public SearchResult createSearchResult() {
-        return new SearchResult(prevSearchResult);
-    }
-
-    public void reset() {
-        prevSearchResult.clear();
-        newSearchResult.clear();
-    }
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/search/InvertedListPartitions.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/search/InvertedListPartitions.java
deleted file mode 100644
index 1b060e5..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/search/InvertedListPartitions.java
+++ /dev/null
@@ -1,94 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.search;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedListCursor;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IObjectFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.util.ObjectCache;
-
-public class InvertedListPartitions {
-    private final int DEFAULT_NUM_PARTITIONS = 10;
-    private final int PARTITIONS_SLACK_SIZE = 10;
-    private final int OBJECT_CACHE_INIT_SIZE = 10;
-    private final int OBJECT_CACHE_EXPAND_SIZE = 10;
-    private final IObjectFactory<ArrayList<IInvertedListCursor>> arrayListFactory;
-    private final ObjectCache<ArrayList<IInvertedListCursor>> arrayListCache;
-    private ArrayList<IInvertedListCursor>[] partitions;
-    private short minValidPartitionIndex;
-    private short maxValidPartitionIndex;
-
-    public InvertedListPartitions() {
-        this.arrayListFactory = new ArrayListFactory<IInvertedListCursor>();
-        this.arrayListCache = new ObjectCache<ArrayList<IInvertedListCursor>>(arrayListFactory, OBJECT_CACHE_INIT_SIZE,
-                OBJECT_CACHE_EXPAND_SIZE);
-    }
-
-    @SuppressWarnings("unchecked")
-    public void reset(short numTokensLowerBound, short numTokensUpperBound) {
-        if (partitions == null) {
-            int initialSize;
-            if (numTokensUpperBound < 0) {
-                initialSize = DEFAULT_NUM_PARTITIONS;
-            } else {
-                initialSize = numTokensUpperBound + 1;
-            }
-            partitions = (ArrayList<IInvertedListCursor>[]) new ArrayList[initialSize];
-        } else {
-            if (numTokensUpperBound + 1 >= partitions.length) {
-                partitions = Arrays.copyOf(partitions, numTokensUpperBound + 1);
-            }
-            Arrays.fill(partitions, null);
-        }
-        arrayListCache.reset();
-        minValidPartitionIndex = Short.MAX_VALUE;
-        maxValidPartitionIndex = Short.MIN_VALUE;
-    }
-
-    public void addInvertedListCursor(IInvertedListCursor listCursor, short numTokens) {
-        if (numTokens + 1 >= partitions.length) {
-            partitions = Arrays.copyOf(partitions, numTokens + PARTITIONS_SLACK_SIZE);
-        }
-        ArrayList<IInvertedListCursor> partitionCursors = partitions[numTokens];
-        if (partitionCursors == null) {
-            partitionCursors = arrayListCache.getNext();
-            partitionCursors.clear();
-            partitions[numTokens] = partitionCursors;
-            // Update range of valid partitions.
-            if (numTokens < minValidPartitionIndex) {
-                minValidPartitionIndex = numTokens;
-            }
-            if (numTokens > maxValidPartitionIndex) {
-                maxValidPartitionIndex = numTokens;
-            }
-        }
-        partitionCursors.add(listCursor);
-    }
-
-    public ArrayList<IInvertedListCursor>[] getPartitions() {
-        return partitions;
-    }
-
-    public short getMinValidPartitionIndex() {
-        return minValidPartitionIndex;
-    }
-
-    public short getMaxValidPartitionIndex() {
-        return maxValidPartitionIndex;
-    }
-}
\ No newline at end of file
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/search/JaccardSearchModifier.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/search/JaccardSearchModifier.java
deleted file mode 100644
index ede6041..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/search/JaccardSearchModifier.java
+++ /dev/null
@@ -1,63 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.search;
-
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndexSearchModifier;
-
-public class JaccardSearchModifier implements IInvertedIndexSearchModifier {
-
-    private float jaccThresh;
-
-    public JaccardSearchModifier(float jaccThresh) {
-        this.jaccThresh = jaccThresh;
-    }
-
-    @Override
-    public int getOccurrenceThreshold(int numQueryTokens) {
-        return Math.max((int) Math.floor((float) numQueryTokens * jaccThresh), 1);
-    }
-
-    @Override
-    public int getNumPrefixLists(int occurrenceThreshold, int numInvLists) {
-        if (numInvLists == 0) {
-            return 0;
-        }
-        return numInvLists - occurrenceThreshold + 1;
-    }
-
-    @Override
-    public short getNumTokensLowerBound(short numQueryTokens) {
-        return (short) Math.floor(numQueryTokens * jaccThresh);
-    }
-
-    @Override
-    public short getNumTokensUpperBound(short numQueryTokens) {
-        return (short) Math.ceil(numQueryTokens / jaccThresh);
-    }
-
-    public float getJaccThresh() {
-        return jaccThresh;
-    }
-
-    public void setJaccThresh(float jaccThresh) {
-        this.jaccThresh = jaccThresh;
-    }
-
-    @Override
-    public String toString() {
-        return "Jaccard Search Modifier, Threshold: " + jaccThresh;
-    }
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/search/JaccardSearchModifierFactory.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/search/JaccardSearchModifierFactory.java
deleted file mode 100644
index 270862b..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/search/JaccardSearchModifierFactory.java
+++ /dev/null
@@ -1,35 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.search;
-
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndexSearchModifier;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndexSearchModifierFactory;
-
-public class JaccardSearchModifierFactory implements IInvertedIndexSearchModifierFactory {
-
-    private static final long serialVersionUID = 1L;
-
-    private final float jaccThresh;
-
-    public JaccardSearchModifierFactory(float jaccThresh) {
-        this.jaccThresh = jaccThresh;
-    }
-
-    @Override
-    public IInvertedIndexSearchModifier createSearchModifier() {
-        return new JaccardSearchModifier(jaccThresh);
-    }
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/search/ListEditDistanceSearchModifier.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/search/ListEditDistanceSearchModifier.java
deleted file mode 100644
index 8ae22a5..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/search/ListEditDistanceSearchModifier.java
+++ /dev/null
@@ -1,55 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.search;
-
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndexSearchModifier;
-
-public class ListEditDistanceSearchModifier implements IInvertedIndexSearchModifier {
-
-    private int edThresh;
-
-    public ListEditDistanceSearchModifier(int edThresh) {
-        this.edThresh = edThresh;
-    }
-
-    public int getEdThresh() {
-        return edThresh;
-    }
-
-    public void setEdThresh(int edThresh) {
-        this.edThresh = edThresh;
-    }
-
-    @Override
-    public int getOccurrenceThreshold(int numQueryTokens) {
-        return numQueryTokens - edThresh;
-    }
-
-    @Override
-    public int getNumPrefixLists(int occurrenceThreshold, int numInvLists) {
-        return numInvLists - occurrenceThreshold + 1;
-    }
-
-    @Override
-    public short getNumTokensLowerBound(short numQueryTokens) {
-        return (short) (numQueryTokens - edThresh);
-    }
-
-    @Override
-    public short getNumTokensUpperBound(short numQueryTokens) {
-        return (short) (numQueryTokens + edThresh);
-    }
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/search/ListEditDistanceSearchModifierFactory.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/search/ListEditDistanceSearchModifierFactory.java
deleted file mode 100644
index d8f57b1..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/search/ListEditDistanceSearchModifierFactory.java
+++ /dev/null
@@ -1,35 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.search;
-
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndexSearchModifier;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndexSearchModifierFactory;
-
-public class ListEditDistanceSearchModifierFactory implements IInvertedIndexSearchModifierFactory {
-
-    private static final long serialVersionUID = 1L;
-
-    private final int edThresh;
-
-    public ListEditDistanceSearchModifierFactory(int edThresh) {
-        this.edThresh = edThresh;
-    }
-
-    @Override
-    public IInvertedIndexSearchModifier createSearchModifier() {
-        return new ListEditDistanceSearchModifier(edThresh);
-    }
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/search/PartitionedTOccurrenceSearcher.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/search/PartitionedTOccurrenceSearcher.java
deleted file mode 100644
index 3ce1f48..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/search/PartitionedTOccurrenceSearcher.java
+++ /dev/null
@@ -1,189 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.search;
-
-import java.io.IOException;
-import java.util.ArrayList;
-
-import edu.uci.ics.hyracks.api.context.IHyracksCommonContext;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.ShortSerializerDeserializer;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexOperationContext;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.am.common.tuples.ConcatenatingTupleReference;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndex;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndexSearchModifier;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedListCursor;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IPartitionedInvertedIndex;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.exceptions.OccurrenceThresholdPanicException;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.ondisk.OnDiskInvertedIndexSearchCursor;
-
-public class PartitionedTOccurrenceSearcher extends AbstractTOccurrenceSearcher {
-
-    protected final ArrayTupleBuilder lowerBoundTupleBuilder = new ArrayTupleBuilder(1);
-    protected final ArrayTupleReference lowerBoundTuple = new ArrayTupleReference();
-    protected final ArrayTupleBuilder upperBoundTupleBuilder = new ArrayTupleBuilder(1);
-    protected final ArrayTupleReference upperBoundTuple = new ArrayTupleReference();
-    protected final ConcatenatingTupleReference fullLowSearchKey = new ConcatenatingTupleReference(2);
-    protected final ConcatenatingTupleReference fullHighSearchKey = new ConcatenatingTupleReference(2);
-
-    // Inverted list cursors ordered by token. Used to read relevant inverted-list partitions of one token one after
-    // the other for better I/O performance (because the partitions of one inverted list are stored contiguously in a file).
-    // The above implies that we currently require holding all inverted list for a query in memory.
-    protected final ArrayList<IInvertedListCursor> cursorsOrderedByTokens = new ArrayList<IInvertedListCursor>();
-    protected final InvertedListPartitions partitions = new InvertedListPartitions();
-
-    public PartitionedTOccurrenceSearcher(IHyracksCommonContext ctx, IInvertedIndex invIndex) {
-        super(ctx, invIndex);
-        initHelperTuples();
-    }
-
-    private void initHelperTuples() {
-        try {
-            lowerBoundTupleBuilder.reset();
-            // Write dummy value.
-            lowerBoundTupleBuilder.getDataOutput().writeShort(Short.MIN_VALUE);
-            lowerBoundTupleBuilder.addFieldEndOffset();
-            lowerBoundTuple.reset(lowerBoundTupleBuilder.getFieldEndOffsets(), lowerBoundTupleBuilder.getByteArray());
-            // Only needed for setting the number of fields in searchKey.
-            searchKey.reset(queryTokenAccessor, 0);
-            fullLowSearchKey.reset();
-            fullLowSearchKey.addTuple(searchKey);
-            fullLowSearchKey.addTuple(lowerBoundTuple);
-
-            upperBoundTupleBuilder.reset();
-            // Write dummy value.
-            upperBoundTupleBuilder.getDataOutput().writeShort(Short.MAX_VALUE);
-            upperBoundTupleBuilder.addFieldEndOffset();
-            upperBoundTuple.reset(upperBoundTupleBuilder.getFieldEndOffsets(), upperBoundTupleBuilder.getByteArray());
-            // Only needed for setting the number of fields in searchKey.
-            searchKey.reset(queryTokenAccessor, 0);
-            fullHighSearchKey.reset();
-            fullHighSearchKey.addTuple(searchKey);
-            fullHighSearchKey.addTuple(upperBoundTuple);
-        } catch (IOException e) {
-            throw new IllegalStateException(e);
-        }
-    }
-
-    public void search(OnDiskInvertedIndexSearchCursor resultCursor, InvertedIndexSearchPredicate searchPred,
-            IIndexOperationContext ictx) throws HyracksDataException, IndexException {
-        IPartitionedInvertedIndex partInvIndex = (IPartitionedInvertedIndex) invIndex;
-        searchResult.reset();
-        if (partInvIndex.isEmpty()) {
-            return;
-        }
-        
-        tokenizeQuery(searchPred);
-        short numQueryTokens = (short) queryTokenAccessor.getTupleCount();
-
-        IInvertedIndexSearchModifier searchModifier = searchPred.getSearchModifier();
-        short numTokensLowerBound = searchModifier.getNumTokensLowerBound(numQueryTokens);
-        short numTokensUpperBound = searchModifier.getNumTokensUpperBound(numQueryTokens);
-        
-        occurrenceThreshold = searchModifier.getOccurrenceThreshold(numQueryTokens);
-        if (occurrenceThreshold <= 0) {
-            throw new OccurrenceThresholdPanicException("Merge Threshold is <= 0. Failing Search.");
-        }
-        
-        short maxCountPossible = numQueryTokens;
-        invListCursorCache.reset();
-        partitions.reset(numTokensLowerBound, numTokensUpperBound);
-        cursorsOrderedByTokens.clear();
-        for (int i = 0; i < numQueryTokens; i++) {
-            searchKey.reset(queryTokenAccessor, i);
-            if (!partInvIndex.openInvertedListPartitionCursors(this, ictx, numTokensLowerBound, numTokensUpperBound,
-                    partitions, cursorsOrderedByTokens)) {
-                maxCountPossible--;
-                // No results possible.
-                if (maxCountPossible < occurrenceThreshold) {                    
-                    return;
-                }
-            }
-        }
-        
-        ArrayList<IInvertedListCursor>[] partitionCursors = partitions.getPartitions();
-        short start = partitions.getMinValidPartitionIndex();
-        short end = partitions.getMaxValidPartitionIndex();
-        
-        // Typically, we only enter this case for disk-based inverted indexes. 
-        // TODO: This behavior could potentially lead to a deadlock if we cannot pin 
-        // all inverted lists in memory, and are forced to wait for a page to get evicted
-        // (other concurrent searchers may be in the same situation).
-        // We should detect such cases, then unpin all pages, and then keep retrying to pin until we succeed.
-        // This will require a different "tryPin()" mechanism in the BufferCache that will return false
-        // if we'd have to wait for a page to get evicted.
-        if (!cursorsOrderedByTokens.isEmpty()) {
-            for (int i = start; i <= end; i++) {
-                if (partitionCursors[i] == null) {
-                    continue;
-                }
-                // Prune partition because no element in it can satisfy the occurrence threshold.
-                if (partitionCursors[i].size() < occurrenceThreshold) {
-                    cursorsOrderedByTokens.removeAll(partitionCursors[i]);
-                }
-            }
-            // Pin all the cursors in the order of tokens.
-            int numCursors = cursorsOrderedByTokens.size();
-            for (int i = 0; i < numCursors; i++) {
-                cursorsOrderedByTokens.get(i).pinPages();
-            }
-        }
-        
-        // Process the partitions one-by-one.
-        for (int i = start; i <= end; i++) {
-            if (partitionCursors[i] == null) {
-                continue;
-            }
-            // Prune partition because no element in it can satisfy the occurrence threshold.
-            if (partitionCursors[i].size() < occurrenceThreshold) {
-                continue;
-            }
-            // Merge inverted lists of current partition.
-            int numPrefixLists = searchModifier.getNumPrefixLists(occurrenceThreshold, partitionCursors[i].size());
-            invListMerger.reset();
-            invListMerger.merge(partitionCursors[i], occurrenceThreshold, numPrefixLists, searchResult);
-        }
-        
-        resultCursor.open(null, searchPred);
-    }
-
-    public void setNumTokensBoundsInSearchKeys(short numTokensLowerBound, short numTokensUpperBound) {
-        ShortSerializerDeserializer.putShort(numTokensLowerBound, lowerBoundTuple.getFieldData(0),
-                lowerBoundTuple.getFieldStart(0));
-        ShortSerializerDeserializer.putShort(numTokensUpperBound, upperBoundTuple.getFieldData(0),
-                upperBoundTuple.getFieldStart(0));
-    }
-
-    public ITupleReference getPrefixSearchKey() {
-        return searchKey;
-    }
-
-    public ITupleReference getFullLowSearchKey() {
-        return fullLowSearchKey;
-    }
-
-    public ITupleReference getFullHighSearchKey() {
-        return fullHighSearchKey;
-    }
-
-    public IInvertedListCursor getCachedInvertedListCursor() {
-        return invListCursorCache.getNext();
-    }
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/search/SearchResult.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/search/SearchResult.java
deleted file mode 100644
index aa0d3f2..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/search/SearchResult.java
+++ /dev/null
@@ -1,182 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.search;
-
-import java.nio.ByteBuffer;
-import java.util.ArrayList;
-
-import edu.uci.ics.hyracks.api.context.IHyracksCommonContext;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.ondisk.FixedSizeFrameTupleAccessor;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.ondisk.FixedSizeFrameTupleAppender;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.ondisk.FixedSizeTupleReference;
-
-/**
- * Byte-buffer backed storage for intermediate and final results of inverted-index searches.
- */
-// TODO: Rename members.
-public class SearchResult {
-    protected final ArrayList<ByteBuffer> buffers = new ArrayList<ByteBuffer>();
-    protected final IHyracksCommonContext ctx;
-    protected final FixedSizeFrameTupleAppender appender;
-    protected final FixedSizeFrameTupleAccessor accessor;
-    protected final FixedSizeTupleReference tuple;
-    protected final ITypeTraits[] typeTraits;
-    protected final int invListElementSize;
-
-    protected int currBufIdx;
-    protected int numResults;
-
-    public SearchResult(ITypeTraits[] invListFields, IHyracksCommonContext ctx) {
-        typeTraits = new ITypeTraits[invListFields.length + 1];
-        int tmp = 0;
-        for (int i = 0; i < invListFields.length; i++) {
-            typeTraits[i] = invListFields[i];
-            tmp += invListFields[i].getFixedLength();
-        }
-        invListElementSize = tmp;
-        // Integer for counting occurrences.
-        typeTraits[invListFields.length] = IntegerPointable.TYPE_TRAITS;
-        this.ctx = ctx;
-        appender = new FixedSizeFrameTupleAppender(ctx.getFrameSize(), typeTraits);
-        accessor = new FixedSizeFrameTupleAccessor(ctx.getFrameSize(), typeTraits);
-        tuple = new FixedSizeTupleReference(typeTraits);
-        buffers.add(ctx.allocateFrame());
-    }
-
-    /**
-     * Initialize from other search-result object to share member instances except for result buffers.
-     */
-    public SearchResult(SearchResult other) {
-        this.ctx = other.ctx;
-        this.appender = other.appender;
-        this.accessor = other.accessor;
-        this.tuple = other.tuple;
-        this.typeTraits = other.typeTraits;
-        this.invListElementSize = other.invListElementSize;
-        buffers.add(ctx.allocateFrame());
-    }
-
-    public FixedSizeFrameTupleAccessor getAccessor() {
-        return accessor;
-    }
-
-    public FixedSizeFrameTupleAppender getAppender() {
-        return appender;
-    }
-
-    public FixedSizeTupleReference getTuple() {
-        return tuple;
-    }
-
-    public ArrayList<ByteBuffer> getBuffers() {
-        return buffers;
-    }
-
-    public void reset() {
-        currBufIdx = 0;
-        numResults = 0;
-        appender.reset(buffers.get(0), true);
-    }
-
-    public void clear() {
-        currBufIdx = 0;
-        numResults = 0;
-        for (ByteBuffer buffer : buffers) {
-            appender.reset(buffer, true);
-        }
-    }
-
-    public void append(ITupleReference invListElement, int count) {
-        ByteBuffer currentBuffer = buffers.get(currBufIdx);
-        if (!appender.hasSpace()) {
-            currBufIdx++;
-            if (currBufIdx >= buffers.size()) {
-                buffers.add(ctx.allocateFrame());
-            }
-            currentBuffer = buffers.get(currBufIdx);
-            appender.reset(currentBuffer, true);
-        }
-        // Append inverted-list element.
-        if (!appender.append(invListElement.getFieldData(0), invListElement.getFieldStart(0), invListElementSize)) {
-            throw new IllegalStateException();
-        }
-        // Append count.
-        if (!appender.append(count)) {
-            throw new IllegalStateException();
-        }
-        appender.incrementTupleCount(1);
-        numResults++;
-    }
-
-    public int getCurrentBufferIndex() {
-        return currBufIdx;
-    }
-
-    public ITypeTraits[] getTypeTraits() {
-        return typeTraits;
-    }
-
-    public int getNumResults() {
-        return numResults;
-    }
-
-    // TODO: This code may help to clean up the core list-merging algorithms.
-    /*
-    public SearchResultCursor getCursor() {
-        cursor.reset();
-        return cursor;
-    }
-    
-    public class SearchResultCursor {
-        private int bufferIndex;
-        private int resultIndex;
-        private int frameResultIndex;
-        private ByteBuffer currentBuffer;
-
-        public void reset() {
-            bufferIndex = 0;
-            resultIndex = 0;
-            frameResultIndex = 0;
-            currentBuffer = buffers.get(0);
-            resultFrameTupleAcc.reset(currentBuffer);
-        }
-
-        public boolean hasNext() {
-            return resultIndex < numResults;
-        }
-
-        public void next() {
-            resultTuple.reset(currentBuffer.array(), resultFrameTupleAcc.getTupleStartOffset(frameResultIndex));            
-            if (frameResultIndex < resultFrameTupleAcc.getTupleCount()) {
-                frameResultIndex++;
-            } else {
-                bufferIndex++;
-                currentBuffer = buffers.get(bufferIndex);
-                resultFrameTupleAcc.reset(currentBuffer);
-                frameResultIndex = 0;
-            }            
-            resultIndex++;
-        }
-
-        public ITupleReference getTuple() {
-            return resultTuple;
-        }
-    }
-    */
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/search/TOccurrenceSearcher.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/search/TOccurrenceSearcher.java
deleted file mode 100644
index 4513540..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/search/TOccurrenceSearcher.java
+++ /dev/null
@@ -1,63 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.search;
-
-import java.util.ArrayList;
-
-import edu.uci.ics.hyracks.api.context.IHyracksCommonContext;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexOperationContext;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndex;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndexSearchModifier;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedListCursor;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.exceptions.OccurrenceThresholdPanicException;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.ondisk.OnDiskInvertedIndexSearchCursor;
-
-public class TOccurrenceSearcher extends AbstractTOccurrenceSearcher {
-
-    protected final ArrayList<IInvertedListCursor> invListCursors = new ArrayList<IInvertedListCursor>();
-
-    public TOccurrenceSearcher(IHyracksCommonContext ctx, IInvertedIndex invIndex) {
-        super(ctx, invIndex);
-    }
-
-    public void search(OnDiskInvertedIndexSearchCursor resultCursor, InvertedIndexSearchPredicate searchPred,
-            IIndexOperationContext ictx) throws HyracksDataException, IndexException {
-        tokenizeQuery(searchPred);
-        int numQueryTokens = queryTokenAccessor.getTupleCount();
-
-        invListCursors.clear();
-        invListCursorCache.reset();
-        for (int i = 0; i < numQueryTokens; i++) {
-            searchKey.reset(queryTokenAccessor, i);
-            IInvertedListCursor invListCursor = invListCursorCache.getNext();
-            invIndex.openInvertedListCursor(invListCursor, searchKey, ictx);
-            invListCursors.add(invListCursor);
-        }
-
-        IInvertedIndexSearchModifier searchModifier = searchPred.getSearchModifier();
-        occurrenceThreshold = searchModifier.getOccurrenceThreshold(numQueryTokens);
-        if (occurrenceThreshold <= 0) {
-            throw new OccurrenceThresholdPanicException("Merge threshold is <= 0. Failing Search.");
-        }
-        int numPrefixLists = searchModifier.getNumPrefixLists(occurrenceThreshold, invListCursors.size());
-
-        searchResult.reset();
-        invListMerger.merge(invListCursors, occurrenceThreshold, numPrefixLists, searchResult);
-        resultCursor.open(null, searchPred);
-    }
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/tokenizers/AbstractUTF8StringBinaryTokenizer.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/tokenizers/AbstractUTF8StringBinaryTokenizer.java
deleted file mode 100644
index 7c0ec4d..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/tokenizers/AbstractUTF8StringBinaryTokenizer.java
+++ /dev/null
@@ -1,77 +0,0 @@
-/**
- * Copyright 2010-2011 The Regents of the University of California
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on
- * an "AS IS"; BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations under
- * the License.
- * 
- * Author: Alexander Behm <abehm (at) ics.uci.edu>
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.tokenizers;
-
-import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
-
-public abstract class AbstractUTF8StringBinaryTokenizer implements IBinaryTokenizer {
-
-    protected byte[] data;
-    protected int start;
-    protected int length;
-    protected int tokenLength;
-    protected int index;
-    protected int utf8Length;
-
-    protected final IntArray tokensStart;
-    protected final IntArray tokensLength;
-    protected final IToken token;
-
-    protected final boolean ignoreTokenCount;
-    protected final boolean sourceHasTypeTag;
-
-    public AbstractUTF8StringBinaryTokenizer(boolean ignoreTokenCount, boolean sourceHasTypeTag,
-            ITokenFactory tokenFactory) {
-        this.ignoreTokenCount = ignoreTokenCount;
-        this.sourceHasTypeTag = sourceHasTypeTag;
-        if (!ignoreTokenCount) {
-            tokensStart = new IntArray();
-            tokensLength = new IntArray();
-        } else {
-            tokensStart = null;
-            tokensLength = null;
-        }
-        token = tokenFactory.createToken();
-    }
-
-    @Override
-    public IToken getToken() {
-        return token;
-    }
-
-    @Override
-    public void reset(byte[] data, int start, int length) {
-        this.start = start;
-        index = this.start;
-        if (sourceHasTypeTag) {
-            index++; // skip type tag
-        }
-        utf8Length = UTF8StringPointable.getUTFLength(data, index);
-        index += 2; // skip utf8 length indicator
-        this.data = data;
-        this.length = length + start;
-
-        tokenLength = 0;
-        if (!ignoreTokenCount) {
-            tokensStart.reset();
-            tokensLength.reset();
-        }
-    }
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/tokenizers/AbstractUTF8Token.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/tokenizers/AbstractUTF8Token.java
deleted file mode 100644
index c9b6e1f..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/tokenizers/AbstractUTF8Token.java
+++ /dev/null
@@ -1,105 +0,0 @@
-/**
- * Copyright 2010-2011 The Regents of the University of California
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on
- * an "AS IS"; BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations under
- * the License.
- * 
- * Author: Alexander Behm <abehm (at) ics.uci.edu>
- */
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.tokenizers;
-
-import java.io.DataOutput;
-import java.io.IOException;
-
-import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
-import edu.uci.ics.hyracks.data.std.util.GrowableArray;
-
-public abstract class AbstractUTF8Token implements IToken {
-    public static final int GOLDEN_RATIO_32 = 0x09e3779b9;
-
-    protected int length;
-    protected int tokenLength;
-    protected int start;
-    protected int tokenCount;
-    protected byte[] data;
-    protected final byte tokenTypeTag;
-    protected final byte countTypeTag;
-
-    public AbstractUTF8Token() {
-        tokenTypeTag = -1;
-        countTypeTag = -1;
-    }
-
-    public AbstractUTF8Token(byte tokenTypeTag, byte countTypeTag) {
-        this.tokenTypeTag = tokenTypeTag;
-        this.countTypeTag = countTypeTag;
-    }
-
-    @Override
-    public byte[] getData() {
-        return data;
-    }
-
-    @Override
-    public int getLength() {
-        return length;
-    }
-
-    public int getLowerCaseUTF8Len(int size) {
-        int lowerCaseUTF8Len = 0;
-        int pos = start;
-        for (int i = 0; i < size; i++) {
-            char c = Character.toLowerCase(UTF8StringPointable.charAt(data, pos));
-            lowerCaseUTF8Len += UTF8StringPointable.getModifiedUTF8Len(c);
-            pos += UTF8StringPointable.charSize(data, pos);
-        }
-        return lowerCaseUTF8Len;
-    }
-
-    @Override
-    public int getStart() {
-        return start;
-    }
-
-    @Override
-    public int getTokenLength() {
-        return tokenLength;
-    }
-
-    public void handleCountTypeTag(DataOutput dos) throws IOException {
-        if (countTypeTag > 0) {
-            dos.write(countTypeTag);
-        }
-    }
-
-    public void handleTokenTypeTag(DataOutput dos) throws IOException {
-        if (tokenTypeTag > 0) {
-            dos.write(tokenTypeTag);
-        }
-    }
-
-    @Override
-    public void reset(byte[] data, int start, int length, int tokenLength, int tokenCount) {
-        this.data = data;
-        this.start = start;
-        this.length = length;
-        this.tokenLength = tokenLength;
-        this.tokenCount = tokenCount;
-    }
-
-    @Override
-    public void serializeTokenCount(GrowableArray out) throws IOException {
-        handleCountTypeTag(out.getDataOutput());
-        out.getDataOutput().writeInt(tokenCount);
-    }
-}
\ No newline at end of file
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/tokenizers/AbstractUTF8TokenFactory.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/tokenizers/AbstractUTF8TokenFactory.java
deleted file mode 100644
index 1507613..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/tokenizers/AbstractUTF8TokenFactory.java
+++ /dev/null
@@ -1,36 +0,0 @@
-/**
- * Copyright 2010-2011 The Regents of the University of California
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on
- * an "AS IS"; BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations under
- * the License.
- * 
- * Author: Alexander Behm <abehm (at) ics.uci.edu>
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.tokenizers;
-
-public abstract class AbstractUTF8TokenFactory implements ITokenFactory {
-	private static final long serialVersionUID = 1L;
-	protected final byte tokenTypeTag;
-	protected final byte countTypeTag;
-
-	public AbstractUTF8TokenFactory() {
-		tokenTypeTag = -1;
-		countTypeTag = -1;
-	}
-
-	public AbstractUTF8TokenFactory(byte tokenTypeTag, byte countTypeTag) {
-		this.tokenTypeTag = tokenTypeTag;
-		this.countTypeTag = countTypeTag;
-	}
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/tokenizers/DelimitedUTF8StringBinaryTokenizer.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/tokenizers/DelimitedUTF8StringBinaryTokenizer.java
deleted file mode 100644
index 4c11523..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/tokenizers/DelimitedUTF8StringBinaryTokenizer.java
+++ /dev/null
@@ -1,81 +0,0 @@
-/**
- * Copyright 2010-2011 The Regents of the University of California
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on
- * an "AS IS"; BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations under
- * the License.
- * 
- * Author: Alexander Behm <abehm (at) ics.uci.edu>
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.tokenizers;
-
-import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
-
-public class DelimitedUTF8StringBinaryTokenizer extends AbstractUTF8StringBinaryTokenizer {
-
-    public DelimitedUTF8StringBinaryTokenizer(boolean ignoreTokenCount, boolean sourceHasTypeTag,
-            ITokenFactory tokenFactory) {
-        super(ignoreTokenCount, sourceHasTypeTag, tokenFactory);
-    }
-
-    @Override
-    public boolean hasNext() {
-        // skip delimiters
-        while (index < length && isSeparator(UTF8StringPointable.charAt(data, index))) {
-            index += UTF8StringPointable.charSize(data, index);
-        }
-        return index < length;
-    }
-
-    private boolean isSeparator(char c) {
-        return !(Character.isLetterOrDigit(c) || Character.getType(c) == Character.OTHER_LETTER || Character.getType(c) == Character.OTHER_NUMBER);
-    }
-
-    @Override
-    public void next() {
-        tokenLength = 0;
-        int currentTokenStart = index;
-        while (index < length && !isSeparator(UTF8StringPointable.charAt(data, index))) {
-            index += UTF8StringPointable.charSize(data, index);
-            tokenLength++;
-        }
-        int tokenCount = 1;
-        if (tokenLength > 0 && !ignoreTokenCount) {
-            // search if we got the same token before
-            for (int i = 0; i < tokensStart.length(); ++i) {
-                if (tokenLength == tokensLength.get(i)) {
-                    int tokenStart = tokensStart.get(i);
-                    tokenCount++; // assume we found it
-                    int offset = 0;
-                    int currLength = 0;
-                    while (currLength < tokenLength) {
-                        // case insensitive comparison
-                        if (Character.toLowerCase(UTF8StringPointable.charAt(data, currentTokenStart + offset)) != Character
-                                .toLowerCase(UTF8StringPointable.charAt(data, tokenStart + offset))) {
-                            tokenCount--;
-                            break;
-                        }
-                        offset += UTF8StringPointable.charSize(data, currentTokenStart + offset);
-                        currLength++;
-                    }
-                }
-            }
-            // add the new token to the list of seen tokens
-            tokensStart.add(currentTokenStart);
-            tokensLength.add(tokenLength);
-        }
-
-        // set token
-        token.reset(data, currentTokenStart, index, tokenLength, tokenCount);
-    }
-}
\ No newline at end of file
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/tokenizers/DelimitedUTF8StringBinaryTokenizerFactory.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/tokenizers/DelimitedUTF8StringBinaryTokenizerFactory.java
deleted file mode 100644
index 08b962b..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/tokenizers/DelimitedUTF8StringBinaryTokenizerFactory.java
+++ /dev/null
@@ -1,42 +0,0 @@
-/**
- * Copyright 2010-2011 The Regents of the University of California
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on
- * an "AS IS"; BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations under
- * the License.
- * 
- * Author: Alexander Behm <abehm (at) ics.uci.edu>
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.tokenizers;
-
-public class DelimitedUTF8StringBinaryTokenizerFactory implements
-		IBinaryTokenizerFactory {
-
-	private static final long serialVersionUID = 1L;
-	private final boolean ignoreTokenCount;
-	private final boolean sourceHasTypeTag;
-	private final ITokenFactory tokenFactory;
-
-	public DelimitedUTF8StringBinaryTokenizerFactory(boolean ignoreTokenCount,
-			boolean sourceHasTypeTag, ITokenFactory tokenFactory) {
-		this.ignoreTokenCount = ignoreTokenCount;
-		this.sourceHasTypeTag = sourceHasTypeTag;
-		this.tokenFactory = tokenFactory;
-	}
-
-	@Override
-	public IBinaryTokenizer createTokenizer() {
-		return new DelimitedUTF8StringBinaryTokenizer(ignoreTokenCount,
-				sourceHasTypeTag, tokenFactory);
-	}
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/tokenizers/HashedUTF8NGramToken.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/tokenizers/HashedUTF8NGramToken.java
deleted file mode 100644
index 632bf9a..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/tokenizers/HashedUTF8NGramToken.java
+++ /dev/null
@@ -1,64 +0,0 @@
-/**
- * Copyright 2010-2011 The Regents of the University of California
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on
- * an "AS IS"; BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations under
- * the License.
- * 
- * Author: Alexander Behm <abehm (at) ics.uci.edu>
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.tokenizers;
-
-import java.io.IOException;
-
-import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
-import edu.uci.ics.hyracks.data.std.util.GrowableArray;
-
-public class HashedUTF8NGramToken extends UTF8NGramToken {
-    public HashedUTF8NGramToken(byte tokenTypeTag, byte countTypeTag) {
-        super(tokenTypeTag, countTypeTag);
-    }
-
-    @Override
-    public void serializeToken(GrowableArray out) throws IOException {
-        handleTokenTypeTag(out.getDataOutput());
-
-        int hash = GOLDEN_RATIO_32;
-
-        // pre chars
-        for (int i = 0; i < numPreChars; i++) {
-            hash ^= PRECHAR;
-            hash *= GOLDEN_RATIO_32;
-        }
-
-        // regular chars
-        int numRegGrams = tokenLength - numPreChars - numPostChars;
-        int pos = start;
-        for (int i = 0; i < numRegGrams; i++) {
-            hash ^= Character.toLowerCase(UTF8StringPointable.charAt(data, pos));
-            hash *= GOLDEN_RATIO_32;
-            pos += UTF8StringPointable.charSize(data, pos);
-        }
-
-        // post chars
-        for (int i = 0; i < numPostChars; i++) {
-            hash ^= POSTCHAR;
-            hash *= GOLDEN_RATIO_32;
-        }
-
-        // token count
-        hash += tokenCount;
-
-        out.getDataOutput().writeInt(hash);
-    }
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/tokenizers/HashedUTF8NGramTokenFactory.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/tokenizers/HashedUTF8NGramTokenFactory.java
deleted file mode 100644
index e1d8e31..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/tokenizers/HashedUTF8NGramTokenFactory.java
+++ /dev/null
@@ -1,38 +0,0 @@
-/**
- * Copyright 2010-2011 The Regents of the University of California
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on
- * an "AS IS"; BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations under
- * the License.
- * 
- * Author: Alexander Behm <abehm (at) ics.uci.edu>
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.tokenizers;
-
-public class HashedUTF8NGramTokenFactory extends AbstractUTF8TokenFactory {
-
-	private static final long serialVersionUID = 1L;
-
-	public HashedUTF8NGramTokenFactory() {
-		super();
-	}
-
-	public HashedUTF8NGramTokenFactory(byte tokenTypeTag, byte countTypeTag) {
-		super(tokenTypeTag, countTypeTag);
-	}
-
-	@Override
-	public IToken createToken() {
-		return new HashedUTF8NGramToken(tokenTypeTag, countTypeTag);
-	}
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/tokenizers/HashedUTF8WordToken.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/tokenizers/HashedUTF8WordToken.java
deleted file mode 100644
index 32954f9..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/tokenizers/HashedUTF8WordToken.java
+++ /dev/null
@@ -1,87 +0,0 @@
-/**
- * Copyright 2010-2011 The Regents of the University of California
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on
- * an "AS IS"; BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations under
- * the License.
- * 
- * Author: Alexander Behm <abehm (at) ics.uci.edu>
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.tokenizers;
-
-import java.io.IOException;
-
-import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
-import edu.uci.ics.hyracks.data.std.util.GrowableArray;
-
-public class HashedUTF8WordToken extends UTF8WordToken {
-
-    private int hash = 0;
-
-    public HashedUTF8WordToken(byte tokenTypeTag, byte countTypeTag) {
-        super(tokenTypeTag, countTypeTag);
-    }
-
-    @Override
-    public boolean equals(Object o) {
-        if (o == null) {
-            return false;
-        }
-        if (!(o instanceof IToken)) {
-            return false;
-        }
-        IToken t = (IToken) o;
-        if (t.getTokenLength() != tokenLength) {
-            return false;
-        }
-        int offset = 0;
-        for (int i = 0; i < tokenLength; i++) {
-            if (UTF8StringPointable.charAt(t.getData(), t.getStart() + offset) != UTF8StringPointable.charAt(data,
-                    start + offset)) {
-                return false;
-            }
-            offset += UTF8StringPointable.charSize(data, start + offset);
-        }
-        return true;
-    }
-
-    @Override
-    public int hashCode() {
-        return hash;
-    }
-
-    @Override
-    public void reset(byte[] data, int start, int length, int tokenLength, int tokenCount) {
-        super.reset(data, start, length, tokenLength, tokenCount);
-
-        // pre-compute hash value using JAQL-like string hashing
-        int pos = start;
-        hash = GOLDEN_RATIO_32;
-        for (int i = 0; i < tokenLength; i++) {
-            hash ^= Character.toLowerCase(UTF8StringPointable.charAt(data, pos));
-            hash *= GOLDEN_RATIO_32;
-            pos += UTF8StringPointable.charSize(data, pos);
-        }
-        hash += tokenCount;
-    }
-
-    @Override
-    public void serializeToken(GrowableArray out) throws IOException {
-        if (tokenTypeTag > 0) {
-            out.getDataOutput().write(tokenTypeTag);
-        }
-
-        // serialize hash value
-        out.getDataOutput().writeInt(hash);
-    }
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/tokenizers/HashedUTF8WordTokenFactory.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/tokenizers/HashedUTF8WordTokenFactory.java
deleted file mode 100644
index a4788c4..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/tokenizers/HashedUTF8WordTokenFactory.java
+++ /dev/null
@@ -1,38 +0,0 @@
-/**
- * Copyright 2010-2011 The Regents of the University of California
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on
- * an "AS IS"; BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations under
- * the License.
- * 
- * Author: Alexander Behm <abehm (at) ics.uci.edu>
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.tokenizers;
-
-public class HashedUTF8WordTokenFactory extends AbstractUTF8TokenFactory {
-
-	private static final long serialVersionUID = 1L;
-
-	public HashedUTF8WordTokenFactory() {
-		super();
-	}
-
-	public HashedUTF8WordTokenFactory(byte tokenTypeTag, byte countTypeTag) {
-		super(tokenTypeTag, countTypeTag);
-	}
-
-	@Override
-	public IToken createToken() {
-		return new HashedUTF8WordToken(tokenTypeTag, countTypeTag);
-	}
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/tokenizers/IBinaryTokenizer.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/tokenizers/IBinaryTokenizer.java
deleted file mode 100644
index f88e744..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/tokenizers/IBinaryTokenizer.java
+++ /dev/null
@@ -1,30 +0,0 @@
-/**
- * Copyright 2010-2011 The Regents of the University of California
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on
- * an "AS IS"; BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations under
- * the License.
- * 
- * Author: Alexander Behm <abehm (at) ics.uci.edu>
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.tokenizers;
-
-public interface IBinaryTokenizer {
-	public IToken getToken();
-
-	public boolean hasNext();
-
-	public void next();
-
-	public void reset(byte[] data, int start, int length);
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/tokenizers/IBinaryTokenizerFactory.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/tokenizers/IBinaryTokenizerFactory.java
deleted file mode 100644
index 5890124..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/tokenizers/IBinaryTokenizerFactory.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/**
- * Copyright 2010-2011 The Regents of the University of California
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on
- * an "AS IS"; BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations under
- * the License.
- * 
- * Author: Alexander Behm <abehm (at) ics.uci.edu>
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.tokenizers;
-
-import java.io.Serializable;
-
-public interface IBinaryTokenizerFactory extends Serializable {
-	public IBinaryTokenizer createTokenizer();
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/tokenizers/INGramToken.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/tokenizers/INGramToken.java
deleted file mode 100644
index 40351c4..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/tokenizers/INGramToken.java
+++ /dev/null
@@ -1,28 +0,0 @@
-/**
- * Copyright 2010-2011 The Regents of the University of California
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on
- * an "AS IS"; BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations under
- * the License.
- * 
- * Author: Alexander Behm <abehm (at) ics.uci.edu>
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.tokenizers;
-
-public interface INGramToken {
-	public int getNumPostChars();
-
-	public int getNumPreChars();
-
-	public void setNumPrePostChars(int numPreChars, int numPostChars);
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/tokenizers/IToken.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/tokenizers/IToken.java
deleted file mode 100644
index 7b1a130..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/tokenizers/IToken.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/**
- * Copyright 2010-2011 The Regents of the University of California
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on
- * an "AS IS"; BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations under
- * the License.
- * 
- * Author: Alexander Behm <abehm (at) ics.uci.edu>
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.tokenizers;
-
-import java.io.IOException;
-
-import edu.uci.ics.hyracks.data.std.util.GrowableArray;
-
-public interface IToken {
-	public byte[] getData();
-
-	public int getLength();
-
-	public int getStart();
-
-	public int getTokenLength();
-
-	public void reset(byte[] data, int start, int length, int tokenLength,
-			int tokenCount);
-
-	public void serializeToken(GrowableArray out) throws IOException;
-
-	public void serializeTokenCount(GrowableArray out) throws IOException;
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/tokenizers/ITokenFactory.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/tokenizers/ITokenFactory.java
deleted file mode 100644
index 5765263..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/tokenizers/ITokenFactory.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/**
- * Copyright 2010-2011 The Regents of the University of California
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on
- * an "AS IS"; BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations under
- * the License.
- * 
- * Author: Alexander Behm <abehm (at) ics.uci.edu>
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.tokenizers;
-
-import java.io.Serializable;
-
-public interface ITokenFactory extends Serializable {
-    public IToken createToken();
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/tokenizers/IntArray.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/tokenizers/IntArray.java
deleted file mode 100644
index 6bae90b..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/tokenizers/IntArray.java
+++ /dev/null
@@ -1,80 +0,0 @@
-/**
- * Copyright 2010-2011 The Regents of the University of California
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on
- * an "AS IS"; BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations under
- * the License.
- * 
- * Author: Rares Vernica <rares (at) ics.uci.edu>
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.tokenizers;
-
-import java.util.Arrays;
-
-public class IntArray {
-    private static final int SIZE = 128;
-
-    private int[] data;
-    private int length;
-
-    public IntArray() {
-        data = new int[SIZE];
-        length = 0;
-    }
-
-    public void add(int d) {
-        if (length == data.length) {
-            data = Arrays.copyOf(data, data.length << 1);
-        }
-        data[length++] = d;
-    }
-
-    public int[] get() {
-        return data;
-    }
-
-    public int get(int i) {
-        return data[i];
-    }
-
-    public int length() {
-        return length;
-    }
-
-    public void reset() {
-        length = 0;
-    }
-
-    public void sort() {
-        sort(0, length);
-    }
-
-    public void sort(int start, int end) {
-        Arrays.sort(data, start, end);
-    }
-
-    @Override
-    public String toString() {
-        StringBuilder out = new StringBuilder();
-        out.append('[');
-        for (int i = 0; i < length; ++i) {
-            out.append(data[i]);
-            if (i < length - 1) {
-                out.append(',');
-                out.append(' ');
-            }
-        }
-        out.append(']');
-        return out.toString();
-    }
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/tokenizers/NGramUTF8StringBinaryTokenizer.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/tokenizers/NGramUTF8StringBinaryTokenizer.java
deleted file mode 100644
index 0af0335..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/tokenizers/NGramUTF8StringBinaryTokenizer.java
+++ /dev/null
@@ -1,118 +0,0 @@
-/**
- * Copyright 2010-2011 The Regents of the University of California
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on
- * an "AS IS"; BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations under
- * the License.
- * 
- * Author: Alexander Behm <abehm (at) ics.uci.edu>
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.tokenizers;
-
-import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
-
-public class NGramUTF8StringBinaryTokenizer extends AbstractUTF8StringBinaryTokenizer {
-
-    private int gramLength;
-    private boolean usePrePost;
-
-    private int gramNum;
-    private int totalGrams;
-
-    private final INGramToken concreteToken;
-
-    public NGramUTF8StringBinaryTokenizer(int gramLength, boolean usePrePost, boolean ignoreTokenCount,
-            boolean sourceHasTypeTag, ITokenFactory tokenFactory) {
-        super(ignoreTokenCount, sourceHasTypeTag, tokenFactory);
-        this.gramLength = gramLength;
-        this.usePrePost = usePrePost;
-        concreteToken = (INGramToken) token;
-    }
-
-    @Override
-    public boolean hasNext() {
-        if (gramNum < totalGrams) {
-            return true;
-        } else {
-            return false;
-        }
-    }
-
-    @Override
-    public void next() {
-        int currentTokenStart = index;
-        int tokenCount = 1;
-        int numPreChars = 0;
-        int numPostChars = 0;
-        if (usePrePost) {
-            numPreChars = Math.max(gramLength - gramNum - 1, 0);
-            numPostChars = (gramNum > totalGrams - gramLength) ? gramLength - totalGrams + gramNum : 0;
-        }
-        gramNum++;
-
-        concreteToken.setNumPrePostChars(numPreChars, numPostChars);
-        if (numPreChars == 0) {
-            index += UTF8StringPointable.charSize(data, index);
-        }
-
-        // compute token count
-        // ignore pre and post grams for duplicate detection
-        if (!ignoreTokenCount && numPreChars == 0 && numPostChars == 0) {
-            int tmpIndex = start;
-            while (tmpIndex < currentTokenStart) {
-                tokenCount++; // assume found
-                int offset = 0;
-                for (int j = 0; j < gramLength; j++) {
-                    if (Character.toLowerCase(UTF8StringPointable.charAt(data, currentTokenStart + offset)) != Character
-                            .toLowerCase(UTF8StringPointable.charAt(data, tmpIndex + offset))) {
-                        tokenCount--;
-                        break;
-                    }
-                    offset += UTF8StringPointable.charSize(data, tmpIndex + offset);
-                }
-                tmpIndex += UTF8StringPointable.charSize(data, tmpIndex);
-            }
-        }
-
-        // set token
-        token.reset(data, currentTokenStart, length, gramLength, tokenCount);
-    }
-
-    @Override
-    public void reset(byte[] data, int start, int length) {
-        super.reset(data, start, length);
-        gramNum = 0;
-
-        int numChars = 0;
-        int pos = index;
-        int end = pos + utf8Length;
-        while (pos < end) {
-            numChars++;
-            pos += UTF8StringPointable.charSize(data, pos);
-        }
-
-        if (usePrePost) {
-            totalGrams = numChars + gramLength - 1;
-        } else {
-            totalGrams = numChars - gramLength + 1;
-        }
-    }
-
-    public void setGramlength(int gramLength) {
-        this.gramLength = gramLength;
-    }
-
-    public void setPrePost(boolean usePrePost) {
-        this.usePrePost = usePrePost;
-    }
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/tokenizers/NGramUTF8StringBinaryTokenizerFactory.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/tokenizers/NGramUTF8StringBinaryTokenizerFactory.java
deleted file mode 100644
index da3d411..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/tokenizers/NGramUTF8StringBinaryTokenizerFactory.java
+++ /dev/null
@@ -1,42 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.tokenizers;
-
-public class NGramUTF8StringBinaryTokenizerFactory implements IBinaryTokenizerFactory {
-
-    private static final long serialVersionUID = 1L;
-    private final int gramLength;
-    private final boolean usePrePost;
-    private final boolean ignoreTokenCount;
-    private final boolean sourceHasTypeTag;
-    private final ITokenFactory tokenFactory;
-
-    public NGramUTF8StringBinaryTokenizerFactory(int gramLength, boolean usePrePost, boolean ignoreTokenCount,
-            boolean sourceHasTypeTag, ITokenFactory tokenFactory) {
-        this.gramLength = gramLength;
-        this.usePrePost = usePrePost;
-        this.ignoreTokenCount = ignoreTokenCount;
-        this.sourceHasTypeTag = sourceHasTypeTag;
-        this.tokenFactory = tokenFactory;
-    }
-
-    @Override
-    public IBinaryTokenizer createTokenizer() {
-        return new NGramUTF8StringBinaryTokenizer(gramLength, usePrePost, ignoreTokenCount, sourceHasTypeTag,
-                tokenFactory);
-    }
-
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/tokenizers/UTF8NGramToken.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/tokenizers/UTF8NGramToken.java
deleted file mode 100644
index 8713499..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/tokenizers/UTF8NGramToken.java
+++ /dev/null
@@ -1,91 +0,0 @@
-/**
- * Copyright 2010-2011 The Regents of the University of California
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on
- * an "AS IS"; BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations under
- * the License.
- * 
- * Author: Alexander Behm <abehm (at) ics.uci.edu>
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.tokenizers;
-
-import java.io.IOException;
-
-import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
-import edu.uci.ics.hyracks.data.std.util.GrowableArray;
-import edu.uci.ics.hyracks.dataflow.common.data.util.StringUtils;
-
-public class UTF8NGramToken extends AbstractUTF8Token implements INGramToken {
-
-    public final static char PRECHAR = '#';
-
-    public final static char POSTCHAR = '$';
-
-    protected int numPreChars;
-    protected int numPostChars;
-
-    public UTF8NGramToken(byte tokenTypeTag, byte countTypeTag) {
-        super(tokenTypeTag, countTypeTag);
-    }
-
-    @Override
-    public int getNumPostChars() {
-        return numPreChars;
-    }
-
-    @Override
-    public int getNumPreChars() {
-        return numPostChars;
-    }
-
-    @Override
-    public void serializeToken(GrowableArray out) throws IOException {
-        handleTokenTypeTag(out.getDataOutput());
-        int tokenUTF8LenOff = out.getLength();
-
-        // regular chars
-        int numRegChars = tokenLength - numPreChars - numPostChars;
-
-        // assuming pre and post char need 1-byte each in utf8
-        int tokenUTF8Len = numPreChars + numPostChars;
-
-        // Write dummy UTF length which will be correctly set later.
-        out.getDataOutput().writeShort(0);
-
-        // pre chars
-        for (int i = 0; i < numPreChars; i++) {
-            StringUtils.writeCharAsModifiedUTF8(PRECHAR, out.getDataOutput());
-        }
-
-        int pos = start;
-        for (int i = 0; i < numRegChars; i++) {
-            char c = Character.toLowerCase(UTF8StringPointable.charAt(data, pos));
-            tokenUTF8Len += StringUtils.writeCharAsModifiedUTF8(c, out.getDataOutput());
-            pos += UTF8StringPointable.charSize(data, pos);
-        }
-
-        // post chars
-        for (int i = 0; i < numPostChars; i++) {
-            StringUtils.writeCharAsModifiedUTF8(POSTCHAR, out.getDataOutput());
-        }
-
-        // Set UTF length of token.
-        out.getByteArray()[tokenUTF8LenOff] = (byte) ((tokenUTF8Len >>> 8) & 0xFF);
-        out.getByteArray()[tokenUTF8LenOff + 1] = (byte) ((tokenUTF8Len >>> 0) & 0xFF);
-    }
-
-    public void setNumPrePostChars(int numPreChars, int numPostChars) {
-        this.numPreChars = numPreChars;
-        this.numPostChars = numPostChars;
-    }
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/tokenizers/UTF8NGramTokenFactory.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/tokenizers/UTF8NGramTokenFactory.java
deleted file mode 100644
index d26a409..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/tokenizers/UTF8NGramTokenFactory.java
+++ /dev/null
@@ -1,39 +0,0 @@
-/**
- * Copyright 2010-2011 The Regents of the University of California
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on
- * an "AS IS"; BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations under
- * the License.
- * 
- * Author: Alexander Behm <abehm (at) ics.uci.edu>
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.tokenizers;
-
-public class UTF8NGramTokenFactory extends AbstractUTF8TokenFactory {
-
-	private static final long serialVersionUID = 1L;
-
-	public UTF8NGramTokenFactory() {
-		super();
-	}
-
-	public UTF8NGramTokenFactory(byte tokenTypeTag, byte countTypeTag) {
-		super(tokenTypeTag, countTypeTag);
-	}
-
-	@Override
-	public IToken createToken() {
-		return new UTF8NGramToken(tokenTypeTag, countTypeTag);
-	}
-
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/tokenizers/UTF8WordToken.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/tokenizers/UTF8WordToken.java
deleted file mode 100644
index dbfc76f..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/tokenizers/UTF8WordToken.java
+++ /dev/null
@@ -1,51 +0,0 @@
-/**
- * Copyright 2010-2011 The Regents of the University of California
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on
- * an "AS IS"; BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations under
- * the License.
- * 
- * Author: Alexander Behm <abehm (at) ics.uci.edu>
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.tokenizers;
-
-import java.io.IOException;
-
-import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
-import edu.uci.ics.hyracks.data.std.util.GrowableArray;
-import edu.uci.ics.hyracks.dataflow.common.data.util.StringUtils;
-
-public class UTF8WordToken extends AbstractUTF8Token {
-
-    public UTF8WordToken(byte tokenTypeTag, byte countTypeTag) {
-        super(tokenTypeTag, countTypeTag);
-    }
-
-    @Override
-    public void serializeToken(GrowableArray out) throws IOException {
-        handleTokenTypeTag(out.getDataOutput());
-        int tokenUTF8LenOff = out.getLength();
-        int tokenUTF8Len = 0;
-        // Write dummy UTF length which will be correctly set later.
-        out.getDataOutput().writeShort(0);
-        int pos = start;
-        for (int i = 0; i < tokenLength; i++) {
-            char c = Character.toLowerCase(UTF8StringPointable.charAt(data, pos));
-            tokenUTF8Len += StringUtils.writeCharAsModifiedUTF8(c, out.getDataOutput());
-            pos += UTF8StringPointable.charSize(data, pos);
-        }
-        // Set UTF length of token.
-        out.getByteArray()[tokenUTF8LenOff] = (byte) ((tokenUTF8Len >>> 8) & 0xFF);
-        out.getByteArray()[tokenUTF8LenOff + 1] = (byte) ((tokenUTF8Len >>> 0) & 0xFF);
-    }
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/tokenizers/UTF8WordTokenFactory.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/tokenizers/UTF8WordTokenFactory.java
deleted file mode 100644
index 023e957..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/tokenizers/UTF8WordTokenFactory.java
+++ /dev/null
@@ -1,39 +0,0 @@
-/**
- * Copyright 2010-2011 The Regents of the University of California
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on
- * an "AS IS"; BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations under
- * the License.
- * 
- * Author: Alexander Behm <abehm (at) ics.uci.edu>
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.tokenizers;
-
-public class UTF8WordTokenFactory extends AbstractUTF8TokenFactory {
-
-	private static final long serialVersionUID = 1L;
-
-	public UTF8WordTokenFactory() {
-		super();
-	}
-
-	public UTF8WordTokenFactory(byte tokenTypeTag, byte countTypeTag) {
-		super(tokenTypeTag, countTypeTag);
-	}
-
-	@Override
-	public IToken createToken() {
-		return new UTF8WordToken(tokenTypeTag, countTypeTag);
-	}
-
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/util/InvertedIndexTokenizingTupleIterator.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/util/InvertedIndexTokenizingTupleIterator.java
deleted file mode 100644
index b9f9362..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/util/InvertedIndexTokenizingTupleIterator.java
+++ /dev/null
@@ -1,78 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.util;
-
-import java.io.IOException;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.tokenizers.IBinaryTokenizer;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.tokenizers.IToken;
-
-// TODO: We can possibly avoid copying the data into a new tuple here.
-public class InvertedIndexTokenizingTupleIterator {
-    // Field that is expected to be tokenized.
-    protected final int DOC_FIELD_INDEX = 0;
-
-    protected final int invListFieldCount;
-    protected final ArrayTupleBuilder tupleBuilder;
-    protected final ArrayTupleReference tupleReference;
-    protected final IBinaryTokenizer tokenizer;
-    protected ITupleReference inputTuple;
-
-    public InvertedIndexTokenizingTupleIterator(int tokensFieldCount, int invListFieldCount, IBinaryTokenizer tokenizer) {
-        this.invListFieldCount = invListFieldCount;
-        this.tupleBuilder = new ArrayTupleBuilder(tokensFieldCount + invListFieldCount);
-        this.tupleReference = new ArrayTupleReference();
-        this.tokenizer = tokenizer;
-    }
-
-    public void reset(ITupleReference inputTuple) {
-        this.inputTuple = inputTuple;
-        tokenizer.reset(inputTuple.getFieldData(DOC_FIELD_INDEX), inputTuple.getFieldStart(DOC_FIELD_INDEX),
-                inputTuple.getFieldLength(DOC_FIELD_INDEX));
-    }
-
-    public boolean hasNext() {
-        return tokenizer.hasNext();
-    }
-
-    public void next() throws HyracksDataException {
-        tokenizer.next();
-        IToken token = tokenizer.getToken();
-        tupleBuilder.reset();
-        // Add token field.
-        try {
-            token.serializeToken(tupleBuilder.getFieldData());
-        } catch (IOException e) {
-            throw new HyracksDataException(e);
-        }
-        tupleBuilder.addFieldEndOffset();
-        // Add inverted-list element fields.
-        for (int i = 0; i < invListFieldCount; i++) {
-            tupleBuilder.addField(inputTuple.getFieldData(i + 1), inputTuple.getFieldStart(i + 1),
-                    inputTuple.getFieldLength(i + 1));
-        }
-        // Reset tuple reference for insert operation.
-        tupleReference.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());
-    }
-
-    public ITupleReference getTuple() {
-        return tupleReference;
-    }
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/util/InvertedIndexUtils.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/util/InvertedIndexUtils.java
deleted file mode 100644
index 79c8ccf..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/util/InvertedIndexUtils.java
+++ /dev/null
@@ -1,218 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.util;
-
-import java.io.File;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.api.io.IIOManager;
-import edu.uci.ics.hyracks.storage.am.bloomfilter.impls.BloomFilterFactory;
-import edu.uci.ics.hyracks.storage.am.btree.exceptions.BTreeException;
-import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
-import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMInteriorFrameFactory;
-import edu.uci.ics.hyracks.storage.am.btree.util.BTreeUtils;
-import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManager;
-import edu.uci.ics.hyracks.storage.am.common.api.IInMemoryFreePageManager;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrameFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.am.common.frames.LIFOMetaDataFrameFactory;
-import edu.uci.ics.hyracks.storage.am.common.freepage.LinkedListFreePageManagerFactory;
-import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriterFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.IInMemoryBufferCache;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallbackProvider;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationScheduler;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMMergePolicy;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMOperationTrackerFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.BTreeFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedListBuilder;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedListBuilderFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.impls.LSMInvertedIndex;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.impls.LSMInvertedIndexFileManager;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.impls.PartitionedLSMInvertedIndex;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.inmemory.InMemoryInvertedIndex;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.inmemory.PartitionedInMemoryInvertedIndex;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.ondisk.FixedSizeElementInvertedListBuilder;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.ondisk.FixedSizeElementInvertedListBuilderFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.ondisk.OnDiskInvertedIndex;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.ondisk.OnDiskInvertedIndexFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.ondisk.PartitionedOnDiskInvertedIndex;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.ondisk.PartitionedOnDiskInvertedIndexFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.tokenizers.IBinaryTokenizerFactory;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
-
-public class InvertedIndexUtils {
-
-    public static InMemoryInvertedIndex createInMemoryBTreeInvertedindex(IBufferCache memBufferCache,
-            IFreePageManager memFreePageManager, ITypeTraits[] invListTypeTraits,
-            IBinaryComparatorFactory[] invListCmpFactories, ITypeTraits[] tokenTypeTraits,
-            IBinaryComparatorFactory[] tokenCmpFactories, IBinaryTokenizerFactory tokenizerFactory)
-            throws BTreeException {
-        return new InMemoryInvertedIndex(memBufferCache, memFreePageManager, invListTypeTraits, invListCmpFactories,
-                tokenTypeTraits, tokenCmpFactories, tokenizerFactory);
-    }
-
-    public static InMemoryInvertedIndex createPartitionedInMemoryBTreeInvertedindex(IBufferCache memBufferCache,
-            IFreePageManager memFreePageManager, ITypeTraits[] invListTypeTraits,
-            IBinaryComparatorFactory[] invListCmpFactories, ITypeTraits[] tokenTypeTraits,
-            IBinaryComparatorFactory[] tokenCmpFactories, IBinaryTokenizerFactory tokenizerFactory)
-            throws BTreeException {
-        return new PartitionedInMemoryInvertedIndex(memBufferCache, memFreePageManager, invListTypeTraits,
-                invListCmpFactories, tokenTypeTraits, tokenCmpFactories, tokenizerFactory);
-    }
-
-    public static OnDiskInvertedIndex createOnDiskInvertedIndex(IBufferCache bufferCache,
-            IFileMapProvider fileMapProvider, ITypeTraits[] invListTypeTraits,
-            IBinaryComparatorFactory[] invListCmpFactories, ITypeTraits[] tokenTypeTraits,
-            IBinaryComparatorFactory[] tokenCmpFactories, FileReference invListsFile) throws IndexException {
-        IInvertedListBuilder builder = new FixedSizeElementInvertedListBuilder(invListTypeTraits);
-        FileReference btreeFile = getBTreeFile(invListsFile);
-        return new OnDiskInvertedIndex(bufferCache, fileMapProvider, builder, invListTypeTraits, invListCmpFactories,
-                tokenTypeTraits, tokenCmpFactories, btreeFile, invListsFile);
-    }
-
-    public static PartitionedOnDiskInvertedIndex createPartitionedOnDiskInvertedIndex(IBufferCache bufferCache,
-            IFileMapProvider fileMapProvider, ITypeTraits[] invListTypeTraits,
-            IBinaryComparatorFactory[] invListCmpFactories, ITypeTraits[] tokenTypeTraits,
-            IBinaryComparatorFactory[] tokenCmpFactories, FileReference invListsFile) throws IndexException {
-        IInvertedListBuilder builder = new FixedSizeElementInvertedListBuilder(invListTypeTraits);
-        FileReference btreeFile = getBTreeFile(invListsFile);
-        return new PartitionedOnDiskInvertedIndex(bufferCache, fileMapProvider, builder, invListTypeTraits,
-                invListCmpFactories, tokenTypeTraits, tokenCmpFactories, btreeFile, invListsFile);
-    }
-
-    public static FileReference getBTreeFile(FileReference invListsFile) {
-        return new FileReference(new File(invListsFile.getFile().getPath() + "_btree"));
-    }
-
-    public static BTreeFactory createDeletedKeysBTreeFactory(IFileMapProvider diskFileMapProvider,
-            ITypeTraits[] invListTypeTraits, IBinaryComparatorFactory[] invListCmpFactories,
-            IBufferCache diskBufferCache) throws BTreeException {
-        TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(invListTypeTraits);
-        ITreeIndexFrameFactory leafFrameFactory = BTreeUtils.getLeafFrameFactory(tupleWriterFactory,
-                BTreeLeafFrameType.REGULAR_NSM);
-        ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(tupleWriterFactory);
-        ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
-        LinkedListFreePageManagerFactory freePageManagerFactory = new LinkedListFreePageManagerFactory(diskBufferCache,
-                metaFrameFactory);
-        BTreeFactory deletedKeysBTreeFactory = new BTreeFactory(diskBufferCache, diskFileMapProvider,
-                freePageManagerFactory, interiorFrameFactory, leafFrameFactory, invListCmpFactories,
-                invListCmpFactories.length);
-        return deletedKeysBTreeFactory;
-    }
-
-    public static LSMInvertedIndex createLSMInvertedIndex(IInMemoryBufferCache memBufferCache,
-            IInMemoryFreePageManager memFreePageManager, IFileMapProvider diskFileMapProvider,
-            ITypeTraits[] invListTypeTraits, IBinaryComparatorFactory[] invListCmpFactories,
-            ITypeTraits[] tokenTypeTraits, IBinaryComparatorFactory[] tokenCmpFactories,
-            IBinaryTokenizerFactory tokenizerFactory, IBufferCache diskBufferCache, IIOManager ioManager,
-            String onDiskDir, ILSMMergePolicy mergePolicy, ILSMOperationTrackerFactory opTrackerFactory,
-            ILSMIOOperationScheduler ioScheduler, ILSMIOOperationCallbackProvider ioOpCallbackProvider)
-            throws IndexException {
-        return createLSMInvertedIndex(memBufferCache, memFreePageManager, diskFileMapProvider, invListTypeTraits,
-                invListCmpFactories, tokenTypeTraits, tokenCmpFactories, tokenizerFactory, diskBufferCache, ioManager,
-                onDiskDir, mergePolicy, opTrackerFactory, ioScheduler, ioOpCallbackProvider, 0);
-    }
-
-    public static LSMInvertedIndex createLSMInvertedIndex(IInMemoryBufferCache memBufferCache,
-            IInMemoryFreePageManager memFreePageManager, IFileMapProvider diskFileMapProvider,
-            ITypeTraits[] invListTypeTraits, IBinaryComparatorFactory[] invListCmpFactories,
-            ITypeTraits[] tokenTypeTraits, IBinaryComparatorFactory[] tokenCmpFactories,
-            IBinaryTokenizerFactory tokenizerFactory, IBufferCache diskBufferCache, IIOManager ioManager,
-            String onDiskDir, ILSMMergePolicy mergePolicy, ILSMOperationTrackerFactory opTrackerFactory,
-            ILSMIOOperationScheduler ioScheduler, ILSMIOOperationCallbackProvider ioOpCallbackProvider,
-            int startIODeviceIndex) throws IndexException {
-
-        BTreeFactory deletedKeysBTreeFactory = createDeletedKeysBTreeFactory(diskFileMapProvider, invListTypeTraits,
-                invListCmpFactories, diskBufferCache);
-
-        int[] bloomFilterKeyFields = new int[invListCmpFactories.length];
-        for (int i = 0; i < invListCmpFactories.length; i++) {
-            bloomFilterKeyFields[i] = i;
-        }
-        BloomFilterFactory bloomFilterFactory = new BloomFilterFactory(diskBufferCache, diskFileMapProvider,
-                bloomFilterKeyFields);
-
-        FileReference onDiskDirFileRef = new FileReference(new File(onDiskDir));
-        LSMInvertedIndexFileManager fileManager = new LSMInvertedIndexFileManager(ioManager, diskFileMapProvider,
-                onDiskDirFileRef, deletedKeysBTreeFactory, startIODeviceIndex);
-
-        IInvertedListBuilderFactory invListBuilderFactory = new FixedSizeElementInvertedListBuilderFactory(
-                invListTypeTraits);
-        OnDiskInvertedIndexFactory invIndexFactory = new OnDiskInvertedIndexFactory(diskBufferCache,
-                diskFileMapProvider, invListBuilderFactory, invListTypeTraits, invListCmpFactories, tokenTypeTraits,
-                tokenCmpFactories, fileManager);
-
-        LSMInvertedIndex invIndex = new LSMInvertedIndex(memBufferCache, memFreePageManager, invIndexFactory,
-                deletedKeysBTreeFactory, bloomFilterFactory, fileManager, diskFileMapProvider, invListTypeTraits,
-                invListCmpFactories, tokenTypeTraits, tokenCmpFactories, tokenizerFactory, mergePolicy,
-                opTrackerFactory, ioScheduler, ioOpCallbackProvider);
-        return invIndex;
-    }
-
-    public static PartitionedLSMInvertedIndex createPartitionedLSMInvertedIndex(IInMemoryBufferCache memBufferCache,
-            IInMemoryFreePageManager memFreePageManager, IFileMapProvider diskFileMapProvider,
-            ITypeTraits[] invListTypeTraits, IBinaryComparatorFactory[] invListCmpFactories,
-            ITypeTraits[] tokenTypeTraits, IBinaryComparatorFactory[] tokenCmpFactories,
-            IBinaryTokenizerFactory tokenizerFactory, IBufferCache diskBufferCache, IIOManager ioManager,
-            String onDiskDir, ILSMMergePolicy mergePolicy, ILSMOperationTrackerFactory opTrackerFactory,
-            ILSMIOOperationScheduler ioScheduler, ILSMIOOperationCallbackProvider ioOpCallbackProvider)
-            throws IndexException {
-        return createPartitionedLSMInvertedIndex(memBufferCache, memFreePageManager, diskFileMapProvider,
-                invListTypeTraits, invListCmpFactories, tokenTypeTraits, tokenCmpFactories, tokenizerFactory,
-                diskBufferCache, ioManager, onDiskDir, mergePolicy, opTrackerFactory, ioScheduler,
-                ioOpCallbackProvider, 0);
-    }
-
-    public static PartitionedLSMInvertedIndex createPartitionedLSMInvertedIndex(IInMemoryBufferCache memBufferCache,
-            IInMemoryFreePageManager memFreePageManager, IFileMapProvider diskFileMapProvider,
-            ITypeTraits[] invListTypeTraits, IBinaryComparatorFactory[] invListCmpFactories,
-            ITypeTraits[] tokenTypeTraits, IBinaryComparatorFactory[] tokenCmpFactories,
-            IBinaryTokenizerFactory tokenizerFactory, IBufferCache diskBufferCache, IIOManager ioManager,
-            String onDiskDir, ILSMMergePolicy mergePolicy, ILSMOperationTrackerFactory opTrackerFactory,
-            ILSMIOOperationScheduler ioScheduler, ILSMIOOperationCallbackProvider ioOpCallbackProvider,
-            int startIODeviceIndex) throws IndexException {
-
-        BTreeFactory deletedKeysBTreeFactory = createDeletedKeysBTreeFactory(diskFileMapProvider, invListTypeTraits,
-                invListCmpFactories, diskBufferCache);
-
-        int[] bloomFilterKeyFields = new int[invListCmpFactories.length];
-        for (int i = 0; i < invListCmpFactories.length; i++) {
-            bloomFilterKeyFields[i] = i;
-        }
-        BloomFilterFactory bloomFilterFactory = new BloomFilterFactory(diskBufferCache, diskFileMapProvider,
-                bloomFilterKeyFields);
-
-        FileReference onDiskDirFileRef = new FileReference(new File(onDiskDir));
-        LSMInvertedIndexFileManager fileManager = new LSMInvertedIndexFileManager(ioManager, diskFileMapProvider,
-                onDiskDirFileRef, deletedKeysBTreeFactory, startIODeviceIndex);
-
-        IInvertedListBuilderFactory invListBuilderFactory = new FixedSizeElementInvertedListBuilderFactory(
-                invListTypeTraits);
-        PartitionedOnDiskInvertedIndexFactory invIndexFactory = new PartitionedOnDiskInvertedIndexFactory(
-                diskBufferCache, diskFileMapProvider, invListBuilderFactory, invListTypeTraits, invListCmpFactories,
-                tokenTypeTraits, tokenCmpFactories, fileManager);
-
-        PartitionedLSMInvertedIndex invIndex = new PartitionedLSMInvertedIndex(memBufferCache, memFreePageManager,
-                invIndexFactory, deletedKeysBTreeFactory, bloomFilterFactory, fileManager, diskFileMapProvider,
-                invListTypeTraits, invListCmpFactories, tokenTypeTraits, tokenCmpFactories, tokenizerFactory,
-                mergePolicy, opTrackerFactory, ioScheduler, ioOpCallbackProvider);
-        return invIndex;
-    }
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/util/ObjectCache.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/util/ObjectCache.java
deleted file mode 100644
index b073f20..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/util/ObjectCache.java
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.util;
-
-import java.util.ArrayList;
-
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IObjectFactory;
-
-public class ObjectCache<T> {
-    protected final int expandSize;
-    protected final IObjectFactory<T> objFactory;
-    protected final ArrayList<T> cache;
-    protected int lastReturned = 0;
-
-    public ObjectCache(IObjectFactory<T> objFactory, int initialSize, int expandSize) {
-        this.objFactory = objFactory;
-        this.cache = new ArrayList<T>(initialSize);
-        this.expandSize = expandSize;
-        expand(initialSize);
-    }
-
-    private void expand(int expandSize) {
-        for (int i = 0; i < expandSize; i++) {
-            cache.add(objFactory.create());
-        }
-    }
-
-    public void reset() {
-        lastReturned = 0;
-    }
-
-    public T getNext() {
-        if (lastReturned >= cache.size()) {
-            expand(expandSize);
-        }
-        return cache.get(lastReturned++);
-    }
-}
diff --git a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/util/PartitionedInvertedIndexTokenizingTupleIterator.java b/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/util/PartitionedInvertedIndexTokenizingTupleIterator.java
deleted file mode 100644
index 8a18a91..0000000
--- a/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/util/PartitionedInvertedIndexTokenizingTupleIterator.java
+++ /dev/null
@@ -1,72 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.util;
-
-import java.io.IOException;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.tokenizers.IBinaryTokenizer;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.tokenizers.IToken;
-
-// TODO: We can possibly avoid copying the data into a new tuple here.
-public class PartitionedInvertedIndexTokenizingTupleIterator extends InvertedIndexTokenizingTupleIterator {
-
-    protected short numTokens = 0;
-
-    public PartitionedInvertedIndexTokenizingTupleIterator(int tokensFieldCount, int invListFieldCount,
-            IBinaryTokenizer tokenizer) {
-        super(tokensFieldCount, invListFieldCount, tokenizer);
-    }
-
-    public void reset(ITupleReference inputTuple) {
-        super.reset(inputTuple);
-        // Run through the tokenizer once to get the total number of tokens.
-        numTokens = 0;
-        while (tokenizer.hasNext()) {
-            tokenizer.next();
-            numTokens++;
-        }
-        super.reset(inputTuple);
-    }
-
-    public void next() throws HyracksDataException {
-        tokenizer.next();
-        IToken token = tokenizer.getToken();
-        tupleBuilder.reset();
-        try {
-            // Add token field.
-            token.serializeToken(tupleBuilder.getFieldData());
-            tupleBuilder.addFieldEndOffset();
-            // Add field with number of tokens.
-            tupleBuilder.getDataOutput().writeShort(numTokens);
-            tupleBuilder.addFieldEndOffset();
-        } catch (IOException e) {
-            throw new HyracksDataException(e);
-        }
-        // Add inverted-list element fields.
-        for (int i = 0; i < invListFieldCount; i++) {
-            tupleBuilder.addField(inputTuple.getFieldData(i + 1), inputTuple.getFieldStart(i + 1),
-                    inputTuple.getFieldLength(i + 1));
-        }
-        // Reset tuple reference for insert operation.
-        tupleReference.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());
-    }
-
-    public short getNumTokens() {
-        return numTokens;
-    }
-}
diff --git a/hyracks-storage-am-lsm-rtree/pom.xml b/hyracks-storage-am-lsm-rtree/pom.xml
deleted file mode 100644
index 968d620..0000000
--- a/hyracks-storage-am-lsm-rtree/pom.xml
+++ /dev/null
@@ -1,49 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <groupId>edu.uci.ics.hyracks</groupId>
-  <artifactId>hyracks-storage-am-lsm-rtree</artifactId>
-  <version>0.2.2-SNAPSHOT</version>
-
-  <parent>
-    <groupId>edu.uci.ics.hyracks</groupId>
-    <artifactId>hyracks</artifactId>
-    <version>0.2.2-SNAPSHOT</version>
-  </parent>
-
-  <build>
-    <plugins>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-compiler-plugin</artifactId>
-        <version>2.0.2</version>
-        <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
-        </configuration>
-      </plugin>
-    </plugins>
-  </build>
-  <dependencies>
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks</groupId>
-  		<artifactId>hyracks-storage-am-lsm-common</artifactId>
-  		<version>0.2.2-SNAPSHOT</version>
-  		<type>jar</type>
-  		<scope>compile</scope>
-  	</dependency>
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks</groupId>
-  		<artifactId>hyracks-storage-am-btree</artifactId>
-  		<version>0.2.2-SNAPSHOT</version>
-  		<type>jar</type>
-  		<scope>compile</scope>
-  	</dependency>
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks</groupId>
-  		<artifactId>hyracks-storage-am-rtree</artifactId>
-  		<version>0.2.2-SNAPSHOT</version>
-  		<type>jar</type>
-  		<scope>compile</scope>
-  	</dependency>  		
-  </dependencies>
-</project>
diff --git a/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/dataflow/AbstractLSMRTreeDataflowHelper.java b/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/dataflow/AbstractLSMRTreeDataflowHelper.java
deleted file mode 100644
index c363c99..0000000
--- a/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/dataflow/AbstractLSMRTreeDataflowHelper.java
+++ /dev/null
@@ -1,98 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.rtree.dataflow;
-
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ILinearizeComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.api.io.IIOManager;
-import edu.uci.ics.hyracks.storage.am.common.api.IInMemoryFreePageManager;
-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrameFactory;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.AbstractTreeIndexOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.frames.LIFOMetaDataFrameFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.IInMemoryBufferCache;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallbackProvider;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationScheduler;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMMergePolicy;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMOperationTrackerFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.common.dataflow.AbstractLSMIndexDataflowHelper;
-import edu.uci.ics.hyracks.storage.am.lsm.common.freepage.DualIndexInMemoryBufferCache;
-import edu.uci.ics.hyracks.storage.am.lsm.common.freepage.DualIndexInMemoryFreePageManager;
-import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreePolicyType;
-import edu.uci.ics.hyracks.storage.common.buffercache.HeapBufferAllocator;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
-
-public abstract class AbstractLSMRTreeDataflowHelper extends AbstractLSMIndexDataflowHelper {
-
-    protected final IBinaryComparatorFactory[] btreeComparatorFactories;
-    protected final IPrimitiveValueProviderFactory[] valueProviderFactories;
-    protected final RTreePolicyType rtreePolicyType;
-    protected final ILinearizeComparatorFactory linearizeCmpFactory;
-
-    public AbstractLSMRTreeDataflowHelper(IIndexOperatorDescriptor opDesc, IHyracksTaskContext ctx, int partition,
-            IBinaryComparatorFactory[] btreeComparatorFactories,
-            IPrimitiveValueProviderFactory[] valueProviderFactories, RTreePolicyType rtreePolicyType,
-            ILSMMergePolicy mergePolicy, ILSMOperationTrackerFactory opTrackerFactory,
-            ILSMIOOperationScheduler ioScheduler, ILSMIOOperationCallbackProvider ioOpCallbackProvider,
-            ILinearizeComparatorFactory linearizeCmpFactory) {
-        this(opDesc, ctx, partition, DEFAULT_MEM_PAGE_SIZE, DEFAULT_MEM_NUM_PAGES, btreeComparatorFactories,
-                valueProviderFactories, rtreePolicyType, mergePolicy, opTrackerFactory, ioScheduler,
-                ioOpCallbackProvider, linearizeCmpFactory);
-    }
-
-    public AbstractLSMRTreeDataflowHelper(IIndexOperatorDescriptor opDesc, IHyracksTaskContext ctx, int partition,
-            int memPageSize, int memNumPages, IBinaryComparatorFactory[] btreeComparatorFactories,
-            IPrimitiveValueProviderFactory[] valueProviderFactories, RTreePolicyType rtreePolicyType,
-            ILSMMergePolicy mergePolicy, ILSMOperationTrackerFactory opTrackerFactory,
-            ILSMIOOperationScheduler ioScheduler, ILSMIOOperationCallbackProvider ioOpCallbackProvider,
-            ILinearizeComparatorFactory linearizeCmpFactory) {
-        super(opDesc, ctx, partition, memPageSize, memNumPages, mergePolicy, opTrackerFactory, ioScheduler,
-                ioOpCallbackProvider);
-        this.btreeComparatorFactories = btreeComparatorFactories;
-        this.valueProviderFactories = valueProviderFactories;
-        this.rtreePolicyType = rtreePolicyType;
-        this.linearizeCmpFactory = linearizeCmpFactory;
-    }
-
-    @Override
-    public ITreeIndex createIndexInstance() throws HyracksDataException {
-        AbstractTreeIndexOperatorDescriptor treeOpDesc = (AbstractTreeIndexOperatorDescriptor) opDesc;
-        ITreeIndexMetaDataFrameFactory metaDataFrameFactory = new LIFOMetaDataFrameFactory();
-        IInMemoryBufferCache memBufferCache = new DualIndexInMemoryBufferCache(new HeapBufferAllocator(), memPageSize,
-                memNumPages);
-        IInMemoryFreePageManager memFreePageManager = new DualIndexInMemoryFreePageManager(memNumPages,
-                metaDataFrameFactory);
-        return createLSMTree(memBufferCache, memFreePageManager, ctx.getIOManager(), file, opDesc.getStorageManager()
-                .getBufferCache(ctx), opDesc.getStorageManager().getFileMapProvider(ctx),
-                treeOpDesc.getTreeIndexTypeTraits(), treeOpDesc.getTreeIndexComparatorFactories(),
-                btreeComparatorFactories, valueProviderFactories, rtreePolicyType, linearizeCmpFactory, partition);
-
-    }
-
-    protected abstract ITreeIndex createLSMTree(IInMemoryBufferCache memBufferCache,
-            IInMemoryFreePageManager memFreePageManager, IIOManager ioManager, FileReference file,
-            IBufferCache diskBufferCache, IFileMapProvider diskFileMapProvider, ITypeTraits[] typeTraits,
-            IBinaryComparatorFactory[] rtreeCmpFactories, IBinaryComparatorFactory[] btreeCmpFactories,
-            IPrimitiveValueProviderFactory[] valueProviderFactories, RTreePolicyType rtreePolicyType,
-            ILinearizeComparatorFactory linearizeCmpFactory, int startIODeviceIndex) throws HyracksDataException;
-}
diff --git a/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/dataflow/LSMRTreeDataflowHelper.java b/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/dataflow/LSMRTreeDataflowHelper.java
deleted file mode 100644
index 1df914e..0000000
--- a/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/dataflow/LSMRTreeDataflowHelper.java
+++ /dev/null
@@ -1,78 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.rtree.dataflow;
-
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ILinearizeComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.api.io.IIOManager;
-import edu.uci.ics.hyracks.storage.am.common.api.IInMemoryFreePageManager;
-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
-import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.IInMemoryBufferCache;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallbackProvider;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationScheduler;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMMergePolicy;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMOperationTrackerFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.rtree.utils.LSMRTreeUtils;
-import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreePolicyType;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
-
-public class LSMRTreeDataflowHelper extends AbstractLSMRTreeDataflowHelper {
-
-    public LSMRTreeDataflowHelper(IIndexOperatorDescriptor opDesc, IHyracksTaskContext ctx, int partition,
-            IBinaryComparatorFactory[] btreeComparatorFactories,
-            IPrimitiveValueProviderFactory[] valueProviderFactories, RTreePolicyType rtreePolicyType,
-            ILSMMergePolicy mergePolicy, ILSMOperationTrackerFactory opTrackerFactory,
-            ILSMIOOperationScheduler ioScheduler, ILSMIOOperationCallbackProvider ioOpCallbackProvider,
-            ILinearizeComparatorFactory linearizeCmpFactory) {
-        super(opDesc, ctx, partition, btreeComparatorFactories, valueProviderFactories, rtreePolicyType, mergePolicy,
-                opTrackerFactory, ioScheduler, ioOpCallbackProvider, linearizeCmpFactory);
-    }
-
-    public LSMRTreeDataflowHelper(IIndexOperatorDescriptor opDesc, IHyracksTaskContext ctx, int partition,
-            int memPageSize, int memNumPages, IBinaryComparatorFactory[] btreeComparatorFactories,
-            IPrimitiveValueProviderFactory[] valueProviderFactories, RTreePolicyType rtreePolicyType,
-            ILSMMergePolicy mergePolicy, ILSMOperationTrackerFactory opTrackerFactory,
-            ILSMIOOperationScheduler ioScheduler, ILSMIOOperationCallbackProvider ioOpCallbackProvider,
-            ILinearizeComparatorFactory linearizeCmpFactory) {
-        super(opDesc, ctx, partition, memPageSize, memNumPages, btreeComparatorFactories, valueProviderFactories,
-                rtreePolicyType, mergePolicy, opTrackerFactory, ioScheduler, ioOpCallbackProvider, linearizeCmpFactory);
-    }
-
-    @Override
-    protected ITreeIndex createLSMTree(IInMemoryBufferCache memBufferCache,
-            IInMemoryFreePageManager memFreePageManager, IIOManager ioManager, FileReference file,
-            IBufferCache diskBufferCache, IFileMapProvider diskFileMapProvider, ITypeTraits[] typeTraits,
-            IBinaryComparatorFactory[] rtreeCmpFactories, IBinaryComparatorFactory[] btreeCmpFactories,
-            IPrimitiveValueProviderFactory[] valueProviderFactories, RTreePolicyType rtreePolicyType,
-            ILinearizeComparatorFactory linearizeCmpFactory, int startIODeviceIndex) throws HyracksDataException {
-        try {
-            return LSMRTreeUtils.createLSMTree(memBufferCache, memFreePageManager, ioManager, file, diskBufferCache,
-                    diskFileMapProvider, typeTraits, rtreeCmpFactories, btreeCmpFactories, valueProviderFactories,
-                    rtreePolicyType, mergePolicy, opTrackerFactory, ioScheduler, ioOpCallbackProvider,
-                    linearizeCmpFactory, startIODeviceIndex);
-        } catch (TreeIndexException e) {
-            throw new HyracksDataException(e);
-        }
-    }
-}
diff --git a/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/dataflow/LSMRTreeDataflowHelperFactory.java b/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/dataflow/LSMRTreeDataflowHelperFactory.java
deleted file mode 100644
index a730895..0000000
--- a/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/dataflow/LSMRTreeDataflowHelperFactory.java
+++ /dev/null
@@ -1,60 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.rtree.dataflow;
-
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ILinearizeComparatorFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IndexDataflowHelper;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallbackProvider;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationSchedulerProvider;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMMergePolicyProvider;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMOperationTrackerFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.common.dataflow.AbstractLSMIndexDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreePolicyType;
-
-public class LSMRTreeDataflowHelperFactory extends AbstractLSMIndexDataflowHelperFactory {
-
-    private static final long serialVersionUID = 1L;
-
-    private final IBinaryComparatorFactory[] btreeComparatorFactories;
-    private final IPrimitiveValueProviderFactory[] valueProviderFactories;
-    private final RTreePolicyType rtreePolicyType;
-    private final ILinearizeComparatorFactory linearizeCmpFactory;
-
-    public LSMRTreeDataflowHelperFactory(IPrimitiveValueProviderFactory[] valueProviderFactories,
-            RTreePolicyType rtreePolicyType, IBinaryComparatorFactory[] btreeComparatorFactories,
-            ILSMMergePolicyProvider mergePolicyProvider, ILSMOperationTrackerFactory opTrackerFactory,
-            ILSMIOOperationSchedulerProvider ioSchedulerProvider, ILSMIOOperationCallbackProvider ioOpCallbackProvider,
-            ILinearizeComparatorFactory linearizeCmpFactory, int memPageSize, int memNumPages) {
-        super(mergePolicyProvider, opTrackerFactory, ioSchedulerProvider, ioOpCallbackProvider, memPageSize,
-                memNumPages);
-        this.btreeComparatorFactories = btreeComparatorFactories;
-        this.valueProviderFactories = valueProviderFactories;
-        this.rtreePolicyType = rtreePolicyType;
-        this.linearizeCmpFactory = linearizeCmpFactory;
-    }
-
-    @Override
-    public IndexDataflowHelper createIndexDataflowHelper(IIndexOperatorDescriptor opDesc, IHyracksTaskContext ctx,
-            int partition) {
-        return new LSMRTreeDataflowHelper(opDesc, ctx, partition, memPageSize, memNumPages, btreeComparatorFactories, valueProviderFactories,
-                rtreePolicyType, mergePolicyProvider.getMergePolicy(ctx), opTrackerFactory,
-                ioSchedulerProvider.getIOScheduler(ctx), ioOpCallbackProvider, linearizeCmpFactory);
-    }
-}
diff --git a/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/dataflow/LSMRTreeWithAntiMatterTuplesDataflowHelper.java b/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/dataflow/LSMRTreeWithAntiMatterTuplesDataflowHelper.java
deleted file mode 100644
index 6f5ecb1..0000000
--- a/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/dataflow/LSMRTreeWithAntiMatterTuplesDataflowHelper.java
+++ /dev/null
@@ -1,77 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.rtree.dataflow;
-
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ILinearizeComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.api.io.IIOManager;
-import edu.uci.ics.hyracks.storage.am.common.api.IInMemoryFreePageManager;
-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
-import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.IInMemoryBufferCache;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallbackProvider;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationScheduler;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMMergePolicy;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMOperationTrackerFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.rtree.utils.LSMRTreeUtils;
-import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreePolicyType;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
-
-public class LSMRTreeWithAntiMatterTuplesDataflowHelper extends AbstractLSMRTreeDataflowHelper {
-    public LSMRTreeWithAntiMatterTuplesDataflowHelper(IIndexOperatorDescriptor opDesc, IHyracksTaskContext ctx,
-            int partition, IBinaryComparatorFactory[] btreeComparatorFactories,
-            IPrimitiveValueProviderFactory[] valueProviderFactories, RTreePolicyType rtreePolicyType,
-            ILSMMergePolicy mergePolicy, ILSMOperationTrackerFactory opTrackerFactory,
-            ILSMIOOperationScheduler ioScheduler, ILSMIOOperationCallbackProvider ioOpCallbackProvider,
-            ILinearizeComparatorFactory linearizeCmpFactory) {
-        super(opDesc, ctx, partition, btreeComparatorFactories, valueProviderFactories, rtreePolicyType, mergePolicy,
-                opTrackerFactory, ioScheduler, ioOpCallbackProvider, linearizeCmpFactory);
-    }
-
-    public LSMRTreeWithAntiMatterTuplesDataflowHelper(IIndexOperatorDescriptor opDesc, IHyracksTaskContext ctx,
-            int partition, int memPageSize, int memNumPages, IBinaryComparatorFactory[] btreeComparatorFactories,
-            IPrimitiveValueProviderFactory[] valueProviderFactories, RTreePolicyType rtreePolicyType,
-            ILSMMergePolicy mergePolicy, ILSMOperationTrackerFactory opTrackerFactory,
-            ILSMIOOperationScheduler ioScheduler, ILSMIOOperationCallbackProvider ioOpCallbackProvider,
-            ILinearizeComparatorFactory linearizeCmpFactory) {
-        super(opDesc, ctx, partition, memPageSize, memNumPages, btreeComparatorFactories, valueProviderFactories,
-                rtreePolicyType, mergePolicy, opTrackerFactory, ioScheduler, ioOpCallbackProvider, linearizeCmpFactory);
-    }
-
-    @Override
-    protected ITreeIndex createLSMTree(IInMemoryBufferCache memBufferCache,
-            IInMemoryFreePageManager memFreePageManager, IIOManager ioManager, FileReference file,
-            IBufferCache diskBufferCache, IFileMapProvider diskFileMapProvider, ITypeTraits[] typeTraits,
-            IBinaryComparatorFactory[] rtreeCmpFactories, IBinaryComparatorFactory[] btreeCmpFactories,
-            IPrimitiveValueProviderFactory[] valueProviderFactories, RTreePolicyType rtreePolicyType,
-            ILinearizeComparatorFactory linearizeCmpFactory, int startIODeviceIndex) throws HyracksDataException {
-        try {
-            return LSMRTreeUtils.createLSMTreeWithAntiMatterTuples(memBufferCache, memFreePageManager, ioManager, file,
-                    diskBufferCache, diskFileMapProvider, typeTraits, rtreeCmpFactories, btreeCmpFactories,
-                    valueProviderFactories, rtreePolicyType, mergePolicy, opTrackerFactory, ioScheduler,
-                    ioOpCallbackProvider, linearizeCmpFactory, startIODeviceIndex);
-        } catch (TreeIndexException e) {
-            throw new HyracksDataException(e);
-        }
-    }
-}
diff --git a/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/dataflow/LSMRTreeWithAntiMatterTuplesDataflowHelperFactory.java b/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/dataflow/LSMRTreeWithAntiMatterTuplesDataflowHelperFactory.java
deleted file mode 100644
index b27e84f..0000000
--- a/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/dataflow/LSMRTreeWithAntiMatterTuplesDataflowHelperFactory.java
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.rtree.dataflow;
-
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ILinearizeComparatorFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IndexDataflowHelper;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallbackProvider;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationSchedulerProvider;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMMergePolicyProvider;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMOperationTrackerFactory;
-import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreePolicyType;
-
-public class LSMRTreeWithAntiMatterTuplesDataflowHelperFactory implements IIndexDataflowHelperFactory {
-
-    private static final long serialVersionUID = 1L;
-
-    private final IBinaryComparatorFactory[] btreeComparatorFactories;
-    private final IPrimitiveValueProviderFactory[] valueProviderFactories;
-    private final RTreePolicyType rtreePolicyType;
-    private final ILSMMergePolicyProvider mergePolicyProvider;
-    private final ILSMOperationTrackerFactory opTrackerProvider;
-    private final ILSMIOOperationSchedulerProvider ioSchedulerProvider;
-    private final ILSMIOOperationCallbackProvider ioOpCallbackProvider;
-    private final ILinearizeComparatorFactory linearizeCmpFactory;
-
-    public LSMRTreeWithAntiMatterTuplesDataflowHelperFactory(IPrimitiveValueProviderFactory[] valueProviderFactories,
-            RTreePolicyType rtreePolicyType, IBinaryComparatorFactory[] btreeComparatorFactories,
-            ILSMMergePolicyProvider mergePolicyProvider, ILSMOperationTrackerFactory opTrackerProvider,
-            ILSMIOOperationSchedulerProvider ioSchedulerProvider, ILSMIOOperationCallbackProvider ioOpCallbackProvider,
-            ILinearizeComparatorFactory linearizeCmpFactory) {
-        this.btreeComparatorFactories = btreeComparatorFactories;
-        this.valueProviderFactories = valueProviderFactories;
-        this.rtreePolicyType = rtreePolicyType;
-        this.mergePolicyProvider = mergePolicyProvider;
-        this.ioSchedulerProvider = ioSchedulerProvider;
-        this.opTrackerProvider = opTrackerProvider;
-        this.ioOpCallbackProvider = ioOpCallbackProvider;
-        this.linearizeCmpFactory = linearizeCmpFactory;
-    }
-
-    @Override
-    public IndexDataflowHelper createIndexDataflowHelper(IIndexOperatorDescriptor opDesc, IHyracksTaskContext ctx,
-            int partition) {
-        return new LSMRTreeWithAntiMatterTuplesDataflowHelper(opDesc, ctx, partition, btreeComparatorFactories,
-                valueProviderFactories, rtreePolicyType, mergePolicyProvider.getMergePolicy(ctx), opTrackerProvider,
-                ioSchedulerProvider.getIOScheduler(ctx), ioOpCallbackProvider, linearizeCmpFactory);
-    }
-}
diff --git a/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/AbstractLSMRTree.java b/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/AbstractLSMRTree.java
deleted file mode 100644
index 23137ab..0000000
--- a/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/AbstractLSMRTree.java
+++ /dev/null
@@ -1,357 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.rtree.impls;
-
-import java.io.File;
-import java.util.List;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ILinearizeComparatorFactory;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.btree.exceptions.BTreeDuplicateKeyException;
-import edu.uci.ics.hyracks.storage.am.btree.exceptions.BTreeNonExistentKeyException;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
-import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
-import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManager;
-import edu.uci.ics.hyracks.storage.am.common.api.IInMemoryFreePageManager;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexOperationContext;
-import edu.uci.ics.hyracks.storage.am.common.api.IModificationOperationCallback;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallback;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOperation;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.IInMemoryBufferCache;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMComponent;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMComponentFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallbackProvider;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationScheduler;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIndexAccessor;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIndexFileManager;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIndexOperationContext;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMMergePolicy;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMOperationTrackerFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.common.freepage.InMemoryBufferCache;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.AbstractLSMIndex;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.BlockingIOOperationCallbackWrapper;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.LSMComponentFileReferences;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.TreeIndexFactory;
-import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeInteriorFrame;
-import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeLeafFrame;
-import edu.uci.ics.hyracks.storage.am.rtree.impls.RTree;
-import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
-
-public abstract class AbstractLSMRTree extends AbstractLSMIndex implements ITreeIndex {
-
-    protected final ILinearizeComparatorFactory linearizer;
-    protected final int[] comparatorFields;
-    protected final IBinaryComparatorFactory[] linearizerArray;
-
-    // In-memory components.
-    protected final LSMRTreeMutableComponent mutableComponent;
-    protected final IInMemoryBufferCache memBufferCache;
-
-    protected TreeTupleSorter rTreeTupleSorter;
-
-    // On-disk components.
-    // For creating RTree's used in flush and merge.
-    protected final ILSMComponentFactory componentFactory;
-
-    private IBinaryComparatorFactory[] btreeCmpFactories;
-    private IBinaryComparatorFactory[] rtreeCmpFactories;
-
-    // Common for in-memory and on-disk components.
-    protected final ITreeIndexFrameFactory rtreeInteriorFrameFactory;
-    protected final ITreeIndexFrameFactory btreeInteriorFrameFactory;
-    protected final ITreeIndexFrameFactory rtreeLeafFrameFactory;
-    protected final ITreeIndexFrameFactory btreeLeafFrameFactory;
-
-    public AbstractLSMRTree(IInMemoryBufferCache memBufferCache, IInMemoryFreePageManager memFreePageManager,
-            ITreeIndexFrameFactory rtreeInteriorFrameFactory, ITreeIndexFrameFactory rtreeLeafFrameFactory,
-            ITreeIndexFrameFactory btreeInteriorFrameFactory, ITreeIndexFrameFactory btreeLeafFrameFactory,
-            ILSMIndexFileManager fileManager, TreeIndexFactory<RTree> diskRTreeFactory,
-            ILSMComponentFactory componentFactory, IFileMapProvider diskFileMapProvider, int fieldCount,
-            IBinaryComparatorFactory[] rtreeCmpFactories, IBinaryComparatorFactory[] btreeCmpFactories,
-            ILinearizeComparatorFactory linearizer, int[] comparatorFields, IBinaryComparatorFactory[] linearizerArray,
-            ILSMMergePolicy mergePolicy, ILSMOperationTrackerFactory opTrackerFactory,
-            ILSMIOOperationScheduler ioScheduler, ILSMIOOperationCallbackProvider ioOpCallbackProvider) {
-        super(memFreePageManager, diskRTreeFactory.getBufferCache(), fileManager, diskFileMapProvider, mergePolicy,
-                opTrackerFactory, ioScheduler, ioOpCallbackProvider);
-        RTree memRTree = new RTree(memBufferCache, ((InMemoryBufferCache) memBufferCache).getFileMapProvider(),
-                memFreePageManager, rtreeInteriorFrameFactory, rtreeLeafFrameFactory, rtreeCmpFactories, fieldCount,
-                new FileReference(new File("memrtree")));
-        BTree memBTree = new BTree(memBufferCache, ((InMemoryBufferCache) memBufferCache).getFileMapProvider(),
-                memFreePageManager, btreeInteriorFrameFactory, btreeLeafFrameFactory, btreeCmpFactories, fieldCount,
-                new FileReference(new File("membtree")));
-        mutableComponent = new LSMRTreeMutableComponent(memRTree, memBTree, memFreePageManager);
-        this.memBufferCache = memBufferCache;
-        this.rtreeInteriorFrameFactory = rtreeInteriorFrameFactory;
-        this.rtreeLeafFrameFactory = rtreeLeafFrameFactory;
-        this.btreeInteriorFrameFactory = btreeInteriorFrameFactory;
-        this.btreeLeafFrameFactory = btreeLeafFrameFactory;
-        this.componentFactory = componentFactory;
-        this.btreeCmpFactories = btreeCmpFactories;
-        this.rtreeCmpFactories = rtreeCmpFactories;
-        this.linearizer = linearizer;
-        this.comparatorFields = comparatorFields;
-        this.linearizerArray = linearizerArray;
-        rTreeTupleSorter = null;
-    }
-
-    @Override
-    public synchronized void create() throws HyracksDataException {
-        if (isActivated) {
-            throw new HyracksDataException("Failed to create the index since it is activated.");
-        }
-
-        fileManager.deleteDirs();
-        fileManager.createDirs();
-        componentsRef.get().clear();
-    }
-
-    @Override
-    public synchronized void activate() throws HyracksDataException {
-        if (isActivated) {
-            return;
-        }
-
-        ((InMemoryBufferCache) mutableComponent.getRTree().getBufferCache()).open();
-        mutableComponent.getRTree().create();
-        mutableComponent.getBTree().create();
-        mutableComponent.getRTree().activate();
-        mutableComponent.getBTree().activate();
-    }
-
-    @Override
-    public synchronized void deactivate(boolean flushOnExit) throws HyracksDataException {
-        if (!isActivated) {
-            return;
-        }
-
-        if (flushOnExit) {
-            BlockingIOOperationCallbackWrapper cb = new BlockingIOOperationCallbackWrapper(
-                    ioOpCallbackProvider.getIOOperationCallback(this));
-            ILSMIndexAccessor accessor = (ILSMIndexAccessor) createAccessor(NoOpOperationCallback.INSTANCE,
-                    NoOpOperationCallback.INSTANCE);
-            accessor.scheduleFlush(cb);
-            try {
-                cb.waitForIO();
-            } catch (InterruptedException e) {
-                throw new HyracksDataException(e);
-            }
-        }
-
-        mutableComponent.getRTree().deactivate();
-        mutableComponent.getBTree().deactivate();
-        mutableComponent.getRTree().destroy();
-        mutableComponent.getBTree().destroy();
-        ((InMemoryBufferCache) mutableComponent.getRTree().getBufferCache()).close();
-    }
-
-    @Override
-    public synchronized void destroy() throws HyracksDataException {
-        if (isActivated) {
-            throw new HyracksDataException("Failed to destroy the index since it is activated.");
-        }
-
-        mutableComponent.getRTree().deactivate();
-        mutableComponent.getBTree().deactivate();
-    }
-
-    @Override
-    public synchronized void clear() throws HyracksDataException {
-        if (!isActivated) {
-            throw new HyracksDataException("Failed to clear the index since it is not activated.");
-        }
-
-        mutableComponent.getRTree().clear();
-        mutableComponent.getBTree().clear();
-    }
-
-    @Override
-    public void getOperationalComponents(ILSMIndexOperationContext ctx) {
-        List<ILSMComponent> operationalComponents = ctx.getComponentHolder();
-        operationalComponents.clear();
-        List<ILSMComponent> immutableComponents = componentsRef.get();
-        switch (ctx.getOperation()) {
-            case INSERT:
-            case DELETE:
-            case FLUSH:
-                operationalComponents.add(mutableComponent);
-                break;
-            case SEARCH:
-                operationalComponents.add(mutableComponent);
-                operationalComponents.addAll(immutableComponents);
-                break;
-            case MERGE:
-                operationalComponents.addAll(immutableComponents);
-                break;
-            default:
-                throw new UnsupportedOperationException("Operation " + ctx.getOperation() + " not supported.");
-        }
-    }
-
-    protected LSMComponentFileReferences getMergeTargetFileName(List<ILSMComponent> mergingDiskComponents)
-            throws HyracksDataException {
-        RTree firstTree = ((LSMRTreeImmutableComponent) mergingDiskComponents.get(0)).getRTree();
-        RTree lastTree = ((LSMRTreeImmutableComponent) mergingDiskComponents.get(mergingDiskComponents.size() - 1))
-                .getRTree();
-        FileReference firstFile = diskFileMapProvider.lookupFileName(firstTree.getFileId());
-        FileReference lastFile = diskFileMapProvider.lookupFileName(lastTree.getFileId());
-        LSMComponentFileReferences fileRefs = fileManager.getRelMergeFileReference(firstFile.getFile().getName(),
-                lastFile.getFile().getName());
-        return fileRefs;
-    }
-
-    protected LSMRTreeImmutableComponent createDiskComponent(ILSMComponentFactory factory, FileReference insertFileRef,
-            FileReference deleteFileRef, FileReference bloomFilterFileRef, boolean createComponent)
-            throws HyracksDataException, IndexException {
-        // Create new tree instance.
-        LSMRTreeImmutableComponent component = (LSMRTreeImmutableComponent) factory
-                .createLSMComponentInstance(new LSMComponentFileReferences(insertFileRef, deleteFileRef,
-                        bloomFilterFileRef));
-        if (createComponent) {
-            component.getRTree().create();
-            if (component.getBTree() != null) {
-                component.getBTree().create();
-                component.getBloomFilter().create();
-            }
-        }
-        // Tree will be closed during cleanup of merge().
-        component.getRTree().activate();
-        if (component.getBTree() != null) {
-            component.getBTree().activate();
-            component.getBloomFilter().activate();
-        }
-        return component;
-    }
-
-    @Override
-    public ITreeIndexFrameFactory getLeafFrameFactory() {
-        return mutableComponent.getRTree().getLeafFrameFactory();
-    }
-
-    @Override
-    public ITreeIndexFrameFactory getInteriorFrameFactory() {
-        return mutableComponent.getRTree().getInteriorFrameFactory();
-    }
-
-    @Override
-    public IFreePageManager getFreePageManager() {
-        return mutableComponent.getRTree().getFreePageManager();
-    }
-
-    @Override
-    public int getFieldCount() {
-        return mutableComponent.getRTree().getFieldCount();
-    }
-
-    @Override
-    public int getRootPageId() {
-        return mutableComponent.getRTree().getRootPageId();
-    }
-
-    @Override
-    public int getFileId() {
-        return mutableComponent.getRTree().getFileId();
-    }
-
-    @Override
-    public void modify(IIndexOperationContext ictx, ITupleReference tuple) throws HyracksDataException, IndexException {
-        LSMRTreeOpContext ctx = (LSMRTreeOpContext) ictx;
-        if (ctx.getOperation() == IndexOperation.PHYSICALDELETE) {
-            throw new UnsupportedOperationException("Physical delete not supported in the LSM-RTree");
-        }
-
-        if (ctx.getOperation() == IndexOperation.INSERT) {
-            // Before each insert, we must check whether there exist a killer
-            // tuple in the memBTree. If we find a killer tuple, we must truly
-            // delete the existing tuple from the BTree, and then insert it to
-            // memRTree. Otherwise, the old killer tuple will kill the newly
-            // added RTree tuple.
-            RangePredicate btreeRangePredicate = new RangePredicate(tuple, tuple, true, true,
-                    ctx.getBTreeMultiComparator(), ctx.getBTreeMultiComparator());
-            ITreeIndexCursor cursor = ctx.memBTreeAccessor.createSearchCursor();
-            ctx.memBTreeAccessor.search(cursor, btreeRangePredicate);
-            boolean foundTupleInMemoryBTree = false;
-            try {
-                if (cursor.hasNext()) {
-                    foundTupleInMemoryBTree = true;
-                }
-            } finally {
-                cursor.close();
-            }
-            if (foundTupleInMemoryBTree) {
-                try {
-                    ctx.memBTreeAccessor.delete(tuple);
-                } catch (BTreeNonExistentKeyException e) {
-                    // Tuple has been deleted in the meantime. Do nothing.
-                    // This normally shouldn't happen if we are dealing with
-                    // good citizens since LSMRTree is used as a secondary
-                    // index and a tuple shouldn't be deleted twice without
-                    // insert between them.
-                }
-            } else {
-                ctx.memRTreeAccessor.insert(tuple);
-            }
-
-        } else {
-            try {
-                ctx.memBTreeAccessor.insert(tuple);
-            } catch (BTreeDuplicateKeyException e) {
-                // Do nothing, because one delete tuple is enough to indicate
-                // that all the corresponding insert tuples are deleted
-            }
-        }
-    }
-
-    protected LSMRTreeOpContext createOpContext(IModificationOperationCallback modCallback) {
-        return new LSMRTreeOpContext((RTree.RTreeAccessor) mutableComponent.getRTree().createAccessor(modCallback,
-                NoOpOperationCallback.INSTANCE), (IRTreeLeafFrame) rtreeLeafFrameFactory.createFrame(),
-                (IRTreeInteriorFrame) rtreeInteriorFrameFactory.createFrame(), memFreePageManager
-                        .getMetaDataFrameFactory().createFrame(), 4, (BTree.BTreeAccessor) mutableComponent.getBTree()
-                        .createAccessor(modCallback, NoOpOperationCallback.INSTANCE), btreeLeafFrameFactory,
-                btreeInteriorFrameFactory, memFreePageManager.getMetaDataFrameFactory().createFrame(),
-                rtreeCmpFactories, btreeCmpFactories, null, null);
-    }
-
-    @Override
-    public IBinaryComparatorFactory[] getComparatorFactories() {
-        return rtreeCmpFactories;
-    }
-
-    public boolean isEmptyIndex() throws HyracksDataException {
-        return componentsRef.get().isEmpty()
-                && mutableComponent.getBTree().isEmptyTree(
-                        mutableComponent.getBTree().getInteriorFrameFactory().createFrame())
-                && mutableComponent.getRTree().isEmptyTree(
-                        mutableComponent.getRTree().getInteriorFrameFactory().createFrame());
-    }
-
-    @Override
-    public void validate() throws HyracksDataException {
-        throw new UnsupportedOperationException("Validation not implemented for LSM R-Trees.");
-    }
-
-    @Override
-    public long getMemoryAllocationSize() {
-        InMemoryBufferCache memBufferCache = (InMemoryBufferCache) mutableComponent.getRTree().getBufferCache();
-        return memBufferCache.getNumPages() * memBufferCache.getPageSize();
-    }
-}
diff --git a/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTree.java b/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTree.java
deleted file mode 100644
index 3bffb43..0000000
--- a/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTree.java
+++ /dev/null
@@ -1,474 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.rtree.impls;
-
-import java.util.List;
-import java.util.ListIterator;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ILinearizeComparatorFactory;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-import edu.uci.ics.hyracks.storage.am.bloomfilter.impls.BloomCalculations;
-import edu.uci.ics.hyracks.storage.am.bloomfilter.impls.BloomFilter;
-import edu.uci.ics.hyracks.storage.am.bloomfilter.impls.BloomFilterFactory;
-import edu.uci.ics.hyracks.storage.am.bloomfilter.impls.BloomFilterSpecification;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTree.BTreeAccessor;
-import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
-import edu.uci.ics.hyracks.storage.am.common.api.IInMemoryFreePageManager;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexBulkLoader;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexCursor;
-import edu.uci.ics.hyracks.storage.am.common.api.IModificationOperationCallback;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchOperationCallback;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchPredicate;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexAccessor;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallback;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOperation;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.IInMemoryBufferCache;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMComponent;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMHarness;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperation;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallback;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallbackProvider;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationScheduler;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIndexAccessorInternal;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIndexFileManager;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIndexOperationContext;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMMergePolicy;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMOperationTrackerFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.LSMComponentFileReferences;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.LSMTreeIndexAccessor;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.TreeIndexFactory;
-import edu.uci.ics.hyracks.storage.am.rtree.impls.RTree;
-import edu.uci.ics.hyracks.storage.am.rtree.impls.RTreeSearchCursor;
-import edu.uci.ics.hyracks.storage.am.rtree.impls.SearchPredicate;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
-
-public class LSMRTree extends AbstractLSMRTree {
-
-    public LSMRTree(IInMemoryBufferCache memBufferCache, IInMemoryFreePageManager memFreePageManager,
-            ITreeIndexFrameFactory rtreeInteriorFrameFactory, ITreeIndexFrameFactory rtreeLeafFrameFactory,
-            ITreeIndexFrameFactory btreeInteriorFrameFactory, ITreeIndexFrameFactory btreeLeafFrameFactory,
-            ILSMIndexFileManager fileNameManager, TreeIndexFactory<RTree> diskRTreeFactory,
-            TreeIndexFactory<BTree> diskBTreeFactory, BloomFilterFactory bloomFilterFactory,
-            IFileMapProvider diskFileMapProvider, int fieldCount, IBinaryComparatorFactory[] rtreeCmpFactories,
-            IBinaryComparatorFactory[] btreeCmpFactories, ILinearizeComparatorFactory linearizer,
-            int[] comparatorFields, IBinaryComparatorFactory[] linearizerArray, ILSMMergePolicy mergePolicy,
-            ILSMOperationTrackerFactory opTrackerFactory, ILSMIOOperationScheduler ioScheduler,
-            ILSMIOOperationCallbackProvider ioOpCallbackProvider) {
-        super(memBufferCache, memFreePageManager, rtreeInteriorFrameFactory, rtreeLeafFrameFactory,
-                btreeInteriorFrameFactory, btreeLeafFrameFactory, fileNameManager, diskRTreeFactory,
-                new LSMRTreeComponentFactory(diskRTreeFactory, diskBTreeFactory, bloomFilterFactory),
-                diskFileMapProvider, fieldCount, rtreeCmpFactories, btreeCmpFactories, linearizer, comparatorFields,
-                linearizerArray, mergePolicy, opTrackerFactory, ioScheduler, ioOpCallbackProvider);
-    }
-
-    /**
-     * Opens LSMRTree, cleaning up invalid files from base dir, and registering
-     * all valid files as on-disk RTrees and BTrees.
-     * 
-     * @param fileReference
-     *            Dummy file id.
-     * @throws HyracksDataException
-     */
-    @Override
-    public synchronized void activate() throws HyracksDataException {
-        super.activate();
-        List<ILSMComponent> immutableComponents = componentsRef.get();
-        List<LSMComponentFileReferences> validFileReferences;
-        try {
-            validFileReferences = fileManager.cleanupAndGetValidFiles();
-        } catch (IndexException e) {
-            throw new HyracksDataException(e);
-        }
-        immutableComponents.clear();
-        for (LSMComponentFileReferences lsmComonentFileReference : validFileReferences) {
-            LSMRTreeImmutableComponent component;
-            try {
-                component = createDiskComponent(componentFactory,
-                        lsmComonentFileReference.getInsertIndexFileReference(),
-                        lsmComonentFileReference.getDeleteIndexFileReference(),
-                        lsmComonentFileReference.getBloomFilterFileReference(), false);
-            } catch (IndexException e) {
-                throw new HyracksDataException(e);
-            }
-            immutableComponents.add(component);
-        }
-        isActivated = true;
-    }
-
-    @Override
-    public synchronized void deactivate(boolean flushOnExit) throws HyracksDataException {
-        super.deactivate(flushOnExit);
-        List<ILSMComponent> immutableComponents = componentsRef.get();
-        for (ILSMComponent c : immutableComponents) {
-            LSMRTreeImmutableComponent component = (LSMRTreeImmutableComponent) c;
-            RTree rtree = component.getRTree();
-            BTree btree = component.getBTree();
-            BloomFilter bloomFilter = component.getBloomFilter();
-            rtree.deactivate();
-            btree.deactivate();
-            bloomFilter.deactivate();
-        }
-        isActivated = false;
-    }
-
-    @Override
-    public synchronized void deactivate() throws HyracksDataException {
-        deactivate(true);
-    }
-
-    @Override
-    public synchronized void destroy() throws HyracksDataException {
-        super.destroy();
-        List<ILSMComponent> immutableComponents = componentsRef.get();
-        for (ILSMComponent c : immutableComponents) {
-            LSMRTreeImmutableComponent component = (LSMRTreeImmutableComponent) c;
-            component.getBTree().destroy();
-            component.getBloomFilter().destroy();
-            component.getRTree().destroy();
-        }
-        fileManager.deleteDirs();
-    }
-
-    @Override
-    public synchronized void clear() throws HyracksDataException {
-        super.clear();
-        List<ILSMComponent> immutableComponents = componentsRef.get();
-        for (ILSMComponent c : immutableComponents) {
-            LSMRTreeImmutableComponent component = (LSMRTreeImmutableComponent) c;
-            component.getBTree().deactivate();
-            component.getBloomFilter().deactivate();
-            component.getRTree().deactivate();
-            component.getBTree().destroy();
-            component.getBloomFilter().destroy();
-            component.getRTree().destroy();
-        }
-        immutableComponents.clear();
-    }
-
-    @Override
-    public void search(ILSMIndexOperationContext ictx, IIndexCursor cursor, ISearchPredicate pred)
-            throws HyracksDataException, IndexException {
-        LSMRTreeOpContext ctx = (LSMRTreeOpContext) ictx;
-        List<ILSMComponent> operationalComponents = ctx.getComponentHolder();
-        boolean includeMutableComponent = operationalComponents.get(0) == mutableComponent;
-        int numTrees = operationalComponents.size();
-
-        ListIterator<ILSMComponent> diskComponentIter = operationalComponents.listIterator();
-        ITreeIndexAccessor[] rTreeAccessors = new ITreeIndexAccessor[numTrees];
-        ITreeIndexAccessor[] bTreeAccessors = new ITreeIndexAccessor[numTrees];
-        int diskComponentIx = 0;
-        if (includeMutableComponent) {
-            rTreeAccessors[0] = ctx.memRTreeAccessor;
-            bTreeAccessors[0] = ctx.memBTreeAccessor;
-            diskComponentIx++;
-            diskComponentIter.next();
-        }
-
-        while (diskComponentIter.hasNext()) {
-            LSMRTreeImmutableComponent component = (LSMRTreeImmutableComponent) diskComponentIter.next();
-            RTree diskRTree = component.getRTree();
-            BTree diskBTree = component.getBTree();
-            rTreeAccessors[diskComponentIx] = diskRTree.createAccessor(NoOpOperationCallback.INSTANCE,
-                    NoOpOperationCallback.INSTANCE);
-            bTreeAccessors[diskComponentIx] = diskBTree.createAccessor(NoOpOperationCallback.INSTANCE,
-                    NoOpOperationCallback.INSTANCE);
-            diskComponentIx++;
-        }
-
-        LSMRTreeCursorInitialState initialState = new LSMRTreeCursorInitialState(numTrees, rtreeLeafFrameFactory,
-                rtreeInteriorFrameFactory, btreeLeafFrameFactory, ctx.getBTreeMultiComparator(), rTreeAccessors,
-                bTreeAccessors, includeMutableComponent, lsmHarness, comparatorFields, linearizerArray,
-                ctx.searchCallback, operationalComponents);
-        cursor.open(initialState, pred);
-    }
-
-    @Override
-    public void scheduleFlush(ILSMIndexOperationContext ctx, ILSMIOOperationCallback callback)
-            throws HyracksDataException {
-        LSMComponentFileReferences componentFileRefs = fileManager.getRelFlushFileReference();
-        ILSMIndexOperationContext rctx = createOpContext(NoOpOperationCallback.INSTANCE);
-        LSMRTreeMutableComponent flushingComponent = (LSMRTreeMutableComponent) ctx.getComponentHolder().get(0);
-        rctx.setOperation(IndexOperation.FLUSH);
-        rctx.getComponentHolder().addAll(ctx.getComponentHolder());
-        LSMRTreeAccessor accessor = new LSMRTreeAccessor(lsmHarness, rctx);
-        ioScheduler.scheduleOperation(new LSMRTreeFlushOperation(accessor, flushingComponent, componentFileRefs
-                .getInsertIndexFileReference(), componentFileRefs.getDeleteIndexFileReference(), componentFileRefs
-                .getBloomFilterFileReference(), callback));
-    }
-
-    @Override
-    public ILSMComponent flush(ILSMIOOperation operation) throws HyracksDataException, IndexException {
-        LSMRTreeFlushOperation flushOp = (LSMRTreeFlushOperation) operation;
-        LSMRTreeMutableComponent flushingComponent = (LSMRTreeMutableComponent) flushOp.getFlushingComponent();
-        // Renaming order is critical because we use assume ordering when we
-        // read the file names when we open the tree.
-        // The RTree should be renamed before the BTree.
-
-        // scan the memory RTree
-        ITreeIndexAccessor memRTreeAccessor = flushingComponent.getRTree().createAccessor(
-                NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
-        RTreeSearchCursor rtreeScanCursor = (RTreeSearchCursor) memRTreeAccessor.createSearchCursor();
-        SearchPredicate rtreeNullPredicate = new SearchPredicate(null, null);
-        memRTreeAccessor.search(rtreeScanCursor, rtreeNullPredicate);
-        LSMRTreeImmutableComponent component = createDiskComponent(componentFactory, flushOp.getRTreeFlushTarget(),
-                flushOp.getBTreeFlushTarget(), flushOp.getBloomFilterFlushTarget(), true);
-        RTree diskRTree = component.getRTree();
-        IIndexBulkLoader rTreeBulkloader;
-        ITreeIndexCursor cursor;
-
-        IBinaryComparatorFactory[] linearizerArray = { linearizer };
-
-        if (rTreeTupleSorter == null) {
-            rTreeTupleSorter = new TreeTupleSorter(flushingComponent.getRTree().getFileId(), linearizerArray,
-                    rtreeLeafFrameFactory.createFrame(), rtreeLeafFrameFactory.createFrame(), flushingComponent
-                            .getRTree().getBufferCache(), comparatorFields);
-        } else {
-            rTreeTupleSorter.reset();
-        }
-        // BulkLoad the tuples from the in-memory tree into the new disk
-        // RTree.
-
-        boolean isEmpty = true;
-        try {
-            while (rtreeScanCursor.hasNext()) {
-                isEmpty = false;
-                rtreeScanCursor.next();
-                rTreeTupleSorter.insertTupleEntry(rtreeScanCursor.getPageId(), rtreeScanCursor.getTupleOffset());
-            }
-        } finally {
-            rtreeScanCursor.close();
-        }
-        if (!isEmpty) {
-            rTreeTupleSorter.sort();
-
-            rTreeBulkloader = diskRTree.createBulkLoader(1.0f, false, 0L);
-            cursor = rTreeTupleSorter;
-
-            try {
-                while (cursor.hasNext()) {
-                    cursor.next();
-                    ITupleReference frameTuple = cursor.getTuple();
-                    rTreeBulkloader.add(frameTuple);
-                }
-            } finally {
-                cursor.close();
-            }
-            rTreeBulkloader.end();
-        }
-
-        ITreeIndexAccessor memBTreeAccessor = flushingComponent.getBTree().createAccessor(
-                NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
-        RangePredicate btreeNullPredicate = new RangePredicate(null, null, true, true, null, null);
-        IIndexCursor btreeCountingCursor = ((BTreeAccessor) memBTreeAccessor).createCountingSearchCursor();
-        memBTreeAccessor.search(btreeCountingCursor, btreeNullPredicate);
-        long numBTreeTuples = 0L;
-        try {
-            while (btreeCountingCursor.hasNext()) {
-                btreeCountingCursor.next();
-                ITupleReference countTuple = btreeCountingCursor.getTuple();
-                numBTreeTuples = IntegerSerializerDeserializer.getInt(countTuple.getFieldData(0),
-                        countTuple.getFieldStart(0));
-            }
-        } finally {
-            btreeCountingCursor.close();
-        }
-
-        if (numBTreeTuples > 0) {
-            int maxBucketsPerElement = BloomCalculations.maxBucketsPerElement(numBTreeTuples);
-            BloomFilterSpecification bloomFilterSpec = BloomCalculations.computeBloomSpec(maxBucketsPerElement,
-                    MAX_BLOOM_FILTER_ACCEPTABLE_FALSE_POSITIVE_RATE);
-
-            IIndexCursor btreeScanCursor = memBTreeAccessor.createSearchCursor();
-            memBTreeAccessor.search(btreeScanCursor, btreeNullPredicate);
-            BTree diskBTree = component.getBTree();
-
-            // BulkLoad the tuples from the in-memory tree into the new disk BTree.
-            IIndexBulkLoader bTreeBulkloader = diskBTree.createBulkLoader(1.0f, false, numBTreeTuples);
-            IIndexBulkLoader builder = component.getBloomFilter().createBuilder(numBTreeTuples,
-                    bloomFilterSpec.getNumHashes(), bloomFilterSpec.getNumBucketsPerElements());
-            // scan the memory BTree
-            try {
-                while (btreeScanCursor.hasNext()) {
-                    btreeScanCursor.next();
-                    ITupleReference frameTuple = btreeScanCursor.getTuple();
-                    bTreeBulkloader.add(frameTuple);
-                    builder.add(frameTuple);
-                }
-            } finally {
-                btreeScanCursor.close();
-                builder.end();
-            }
-            bTreeBulkloader.end();
-        }
-
-        return component;
-    }
-
-    @Override
-    public void scheduleMerge(ILSMIndexOperationContext ctx, ILSMIOOperationCallback callback)
-            throws HyracksDataException, IndexException {
-        // Renaming order is critical because we use assume ordering when we
-        // read the file names when we open the tree.
-        // The RTree should be renamed before the BTree.
-        List<ILSMComponent> mergingComponents = ctx.getComponentHolder();
-        ILSMIndexOperationContext rctx = createOpContext(NoOpOperationCallback.INSTANCE);
-        rctx.getComponentHolder().addAll(mergingComponents);
-        ITreeIndexCursor cursor = new LSMRTreeSortedCursor(rctx, linearizer);
-        ISearchPredicate rtreeSearchPred = new SearchPredicate(null, null);
-        search(rctx, cursor, rtreeSearchPred);
-
-        rctx.setOperation(IndexOperation.MERGE);
-        LSMComponentFileReferences relMergeFileRefs = getMergeTargetFileName(mergingComponents);
-        ILSMIndexAccessorInternal accessor = new LSMRTreeAccessor(lsmHarness, rctx);
-        ioScheduler.scheduleOperation(new LSMRTreeMergeOperation((ILSMIndexAccessorInternal) accessor,
-                mergingComponents, cursor, relMergeFileRefs.getInsertIndexFileReference(), relMergeFileRefs
-                        .getDeleteIndexFileReference(), relMergeFileRefs.getBloomFilterFileReference(), callback));
-    }
-
-    @Override
-    public ILSMComponent merge(List<ILSMComponent> mergedComponents, ILSMIOOperation operation)
-            throws HyracksDataException, IndexException {
-        LSMRTreeMergeOperation mergeOp = (LSMRTreeMergeOperation) operation;
-        ITreeIndexCursor cursor = mergeOp.getCursor();
-        mergedComponents.addAll(mergeOp.getMergingComponents());
-
-        LSMRTreeImmutableComponent mergedComponent = createDiskComponent(componentFactory,
-                mergeOp.getRTreeMergeTarget(), mergeOp.getBTreeMergeTarget(), mergeOp.getBloomFilterMergeTarget(), true);
-        IIndexBulkLoader bulkLoader = mergedComponent.getRTree().createBulkLoader(1.0f, false, 0L);
-
-        try {
-            while (cursor.hasNext()) {
-                cursor.next();
-                ITupleReference frameTuple = cursor.getTuple();
-                bulkLoader.add(frameTuple);
-            }
-        } finally {
-            cursor.close();
-        }
-        bulkLoader.end();
-        return mergedComponent;
-    }
-
-    @Override
-    public ILSMIndexAccessorInternal createAccessor(IModificationOperationCallback modificationCallback,
-            ISearchOperationCallback searchCallback) {
-        return new LSMRTreeAccessor(lsmHarness, createOpContext(modificationCallback));
-    }
-
-    public class LSMRTreeAccessor extends LSMTreeIndexAccessor {
-        public LSMRTreeAccessor(ILSMHarness lsmHarness, ILSMIndexOperationContext ctx) {
-            super(lsmHarness, ctx);
-        }
-
-        @Override
-        public ITreeIndexCursor createSearchCursor() {
-            return new LSMRTreeSearchCursor(ctx);
-        }
-
-        public MultiComparator getMultiComparator() {
-            LSMRTreeOpContext concreteCtx = (LSMRTreeOpContext) ctx;
-            return concreteCtx.rtreeOpContext.cmp;
-        }
-    }
-
-    private ILSMComponent createBulkLoadTarget() throws HyracksDataException, IndexException {
-        LSMComponentFileReferences componentFileRefs = fileManager.getRelFlushFileReference();
-        return createDiskComponent(componentFactory, componentFileRefs.getInsertIndexFileReference(),
-                componentFileRefs.getDeleteIndexFileReference(), componentFileRefs.getBloomFilterFileReference(), true);
-    }
-
-    @Override
-    public IIndexBulkLoader createBulkLoader(float fillLevel, boolean verifyInput, long numElementsHint)
-            throws TreeIndexException {
-        return new LSMRTreeBulkLoader(fillLevel, verifyInput, numElementsHint);
-    }
-
-    public class LSMRTreeBulkLoader implements IIndexBulkLoader {
-        private final ILSMComponent component;
-        private final IIndexBulkLoader bulkLoader;
-
-        public LSMRTreeBulkLoader(float fillFactor, boolean verifyInput, long numElementsHint)
-                throws TreeIndexException {
-            // Note that by using a flush target file name, we state that the
-            // new bulk loaded tree is "newer" than any other merged tree.
-            try {
-                component = createBulkLoadTarget();
-            } catch (HyracksDataException e) {
-                throw new TreeIndexException(e);
-            } catch (IndexException e) {
-                throw new TreeIndexException(e);
-            }
-            bulkLoader = ((LSMRTreeImmutableComponent) component).getRTree().createBulkLoader(fillFactor, verifyInput,
-                    numElementsHint);
-        }
-
-        @Override
-        public void add(ITupleReference tuple) throws HyracksDataException, IndexException {
-            try {
-                bulkLoader.add(tuple);
-            } catch (IndexException e) {
-                handleException();
-                throw e;
-            } catch (HyracksDataException e) {
-                handleException();
-                throw e;
-            } catch (RuntimeException e) {
-                handleException();
-                throw e;
-            }
-        }
-
-        @Override
-        public void end() throws HyracksDataException, IndexException {
-            bulkLoader.end();
-            lsmHarness.addBulkLoadedComponent(component);
-        }
-
-        protected void handleException() throws HyracksDataException {
-            ((LSMRTreeImmutableComponent) component).getRTree().deactivate();
-            ((LSMRTreeImmutableComponent) component).getRTree().destroy();
-            ((LSMRTreeImmutableComponent) component).getBTree().deactivate();
-            ((LSMRTreeImmutableComponent) component).getBTree().destroy();
-            ((LSMRTreeImmutableComponent) component).getBloomFilter().deactivate();
-            ((LSMRTreeImmutableComponent) component).getBloomFilter().destroy();
-        }
-    }
-
-    @Override
-    public void markAsValid(ILSMComponent lsmComponent) throws HyracksDataException {
-        LSMRTreeImmutableComponent component = (LSMRTreeImmutableComponent) lsmComponent;
-        // Flush the bloom filter first.
-        int fileId = component.getBloomFilter().getFileId();
-        IBufferCache bufferCache = component.getBTree().getBufferCache();
-        int startPage = 0;
-        int maxPage = component.getBloomFilter().getNumPages();
-        forceFlushDirtyPages(bufferCache, fileId, startPage, maxPage);
-        forceFlushDirtyPages(component.getRTree());
-        markAsValidInternal(component.getRTree());
-        forceFlushDirtyPages(component.getBTree());
-        markAsValidInternal(component.getBTree());
-    }
-}
diff --git a/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTreeAbstractCursor.java b/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTreeAbstractCursor.java
deleted file mode 100644
index 5a72f29..0000000
--- a/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTreeAbstractCursor.java
+++ /dev/null
@@ -1,143 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.lsm.rtree.impls;

-

-import java.util.List;

-

-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;

-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;

-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;

-import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeRangeSearchCursor;

-import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;

-import edu.uci.ics.hyracks.storage.am.common.api.ICursorInitialState;

-import edu.uci.ics.hyracks.storage.am.common.api.ISearchPredicate;

-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexAccessor;

-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;

-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;

-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMComponent;

-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMHarness;

-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIndexOperationContext;

-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.BloomFilterAwareBTreePointSearchCursor;

-import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeInteriorFrame;

-import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeLeafFrame;

-import edu.uci.ics.hyracks.storage.am.rtree.impls.RTreeSearchCursor;

-import edu.uci.ics.hyracks.storage.am.rtree.impls.SearchPredicate;

-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;

-import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;

-

-public abstract class LSMRTreeAbstractCursor implements ITreeIndexCursor {

-

-    protected RTreeSearchCursor[] rtreeCursors;

-    protected boolean open = false;

-    protected ITreeIndexCursor[] btreeCursors;

-    protected ITreeIndexAccessor[] rTreeAccessors;

-    protected ITreeIndexAccessor[] bTreeAccessors;

-    private MultiComparator btreeCmp;

-    protected int numberOfTrees;

-    protected SearchPredicate rtreeSearchPredicate;

-    protected RangePredicate btreeRangePredicate;

-    protected ITupleReference frameTuple;

-    protected boolean includeMemRTree;

-    protected ILSMHarness lsmHarness;

-    protected boolean foundNext;

-    protected final ILSMIndexOperationContext opCtx;

-

-    protected List<ILSMComponent> operationalComponents;

-

-    public LSMRTreeAbstractCursor(ILSMIndexOperationContext opCtx) {

-        super();

-        this.opCtx = opCtx;

-    }

-

-    public RTreeSearchCursor getCursor(int cursorIndex) {

-        return rtreeCursors[cursorIndex];

-    }

-

-    @Override

-    public void open(ICursorInitialState initialState, ISearchPredicate searchPred) throws HyracksDataException {

-        LSMRTreeCursorInitialState lsmInitialState = (LSMRTreeCursorInitialState) initialState;

-        btreeCmp = lsmInitialState.getBTreeCmp();

-        includeMemRTree = lsmInitialState.getIncludeMemComponent();

-        operationalComponents = lsmInitialState.getOperationalComponents();

-        lsmHarness = lsmInitialState.getLSMHarness();

-        numberOfTrees = lsmInitialState.getNumberOfTrees();

-        rTreeAccessors = lsmInitialState.getRTreeAccessors();

-        bTreeAccessors = lsmInitialState.getBTreeAccessors();

-

-        rtreeCursors = new RTreeSearchCursor[numberOfTrees];

-        btreeCursors = new ITreeIndexCursor[numberOfTrees];

-

-        int i = 0;

-        if (includeMemRTree) {

-            rtreeCursors[i] = new RTreeSearchCursor((IRTreeInteriorFrame) lsmInitialState

-                    .getRTreeInteriorFrameFactory().createFrame(), (IRTreeLeafFrame) lsmInitialState

-                    .getRTreeLeafFrameFactory().createFrame());

-

-            // No need for a bloom filter for the in-memory BTree.

-            btreeCursors[i] = new BTreeRangeSearchCursor((IBTreeLeafFrame) lsmInitialState.getBTreeLeafFrameFactory()

-                    .createFrame(), false);

-            ++i;

-        }

-        for (; i < numberOfTrees; i++) {

-            rtreeCursors[i] = new RTreeSearchCursor((IRTreeInteriorFrame) lsmInitialState

-                    .getRTreeInteriorFrameFactory().createFrame(), (IRTreeLeafFrame) lsmInitialState

-                    .getRTreeLeafFrameFactory().createFrame());

-

-            btreeCursors[i] = new BloomFilterAwareBTreePointSearchCursor((IBTreeLeafFrame) lsmInitialState

-                    .getBTreeLeafFrameFactory().createFrame(), false,

-                    ((LSMRTreeImmutableComponent) operationalComponents.get(i)).getBloomFilter());

-        }

-

-        rtreeSearchPredicate = (SearchPredicate) searchPred;

-        btreeRangePredicate = new RangePredicate(null, null, true, true, btreeCmp, btreeCmp);

-

-        open = true;

-    }

-

-    @Override

-    public ICachedPage getPage() {

-        // do nothing

-        return null;

-    }

-

-    @Override

-    public void close() throws HyracksDataException {

-        if (!open) {

-            return;

-        }

-

-        try {

-            if (rtreeCursors != null && btreeCursors != null) {

-                for (int i = 0; i < numberOfTrees; i++) {

-                    rtreeCursors[i].close();

-                    btreeCursors[i].close();

-                }

-            }

-            rtreeCursors = null;

-            btreeCursors = null;

-        } finally {

-            lsmHarness.endSearch(opCtx);

-        }

-

-        open = false;

-    }

-

-    @Override

-    public void setBufferCache(IBufferCache bufferCache) {

-        // do nothing

-    }

-

-    @Override

-    public void setFileId(int fileId) {

-        // do nothing

-    }

-

-    @Override

-    public ITupleReference getTuple() {

-        return frameTuple;

-    }

-

-    @Override

-    public boolean exclusiveLatchNodes() {

-        return false;

-    }

-

-}
\ No newline at end of file
diff --git a/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTreeComponentFactory.java b/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTreeComponentFactory.java
deleted file mode 100644
index 56e3d28..0000000
--- a/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTreeComponentFactory.java
+++ /dev/null
@@ -1,53 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.rtree.impls;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.storage.am.bloomfilter.impls.BloomFilterFactory;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMComponent;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMComponentFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.LSMComponentFileReferences;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.TreeIndexFactory;
-import edu.uci.ics.hyracks.storage.am.rtree.impls.RTree;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-
-public class LSMRTreeComponentFactory implements ILSMComponentFactory {
-    private final TreeIndexFactory<RTree> rtreeFactory;
-    private final TreeIndexFactory<BTree> btreeFactory;
-    private final BloomFilterFactory bloomFilterFactory;
-
-    public LSMRTreeComponentFactory(TreeIndexFactory<RTree> rtreeFactory, TreeIndexFactory<BTree> btreeFactory,
-            BloomFilterFactory bloomFilterFactory) {
-        this.rtreeFactory = rtreeFactory;
-        this.btreeFactory = btreeFactory;
-        this.bloomFilterFactory = bloomFilterFactory;
-    }
-
-    @Override
-    public ILSMComponent createLSMComponentInstance(LSMComponentFileReferences cfr) throws IndexException,
-            HyracksDataException {
-        return new LSMRTreeImmutableComponent(rtreeFactory.createIndexInstance(cfr.getInsertIndexFileReference()),
-                btreeFactory.createIndexInstance(cfr.getDeleteIndexFileReference()),
-                bloomFilterFactory.createBloomFiltertInstance(cfr.getBloomFilterFileReference()));
-    }
-
-    @Override
-    public IBufferCache getBufferCache() {
-        return rtreeFactory.getBufferCache();
-    }
-}
diff --git a/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTreeCursorInitialState.java b/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTreeCursorInitialState.java
deleted file mode 100644
index 590d5d8..0000000
--- a/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTreeCursorInitialState.java
+++ /dev/null
@@ -1,144 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.rtree.impls;
-
-import java.util.List;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.ICursorInitialState;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchOperationCallback;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexAccessor;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMComponent;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMHarness;
-import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
-
-public class LSMRTreeCursorInitialState implements ICursorInitialState {
-
-    private final int numberOfTrees;
-    private final ITreeIndexFrameFactory rtreeInteriorFrameFactory;
-    private final ITreeIndexFrameFactory rtreeLeafFrameFactory;
-    private final ITreeIndexFrameFactory btreeLeafFrameFactory;
-    private final MultiComparator btreeCmp;
-    private final MultiComparator hilbertCmp;
-    private final ITreeIndexAccessor[] rTreeAccessors;
-    private final ITreeIndexAccessor[] bTreeAccessors;
-    private final boolean includeMemRTree;
-    private final ILSMHarness lsmHarness;
-    private final int[] comparatorFields;
-
-    private ISearchOperationCallback searchCallback;
-    private final List<ILSMComponent> operationalComponents;
-
-    public LSMRTreeCursorInitialState(int numberOfTrees, ITreeIndexFrameFactory rtreeLeafFrameFactory,
-            ITreeIndexFrameFactory rtreeInteriorFrameFactory, ITreeIndexFrameFactory btreeLeafFrameFactory,
-            MultiComparator btreeCmp, ITreeIndexAccessor[] rTreeAccessors, ITreeIndexAccessor[] bTreeAccessors,
-            boolean includeMemRTree, ILSMHarness lsmHarness, int[] comparatorFields,
-            IBinaryComparatorFactory[] linearizerArray, ISearchOperationCallback searchCallback,
-            List<ILSMComponent> operationalComponents) {
-        this.numberOfTrees = numberOfTrees;
-        this.rtreeLeafFrameFactory = rtreeLeafFrameFactory;
-        this.rtreeInteriorFrameFactory = rtreeInteriorFrameFactory;
-        this.btreeLeafFrameFactory = btreeLeafFrameFactory;
-        this.btreeCmp = btreeCmp;
-        this.rTreeAccessors = rTreeAccessors;
-        this.bTreeAccessors = bTreeAccessors;
-        this.includeMemRTree = includeMemRTree;
-        this.lsmHarness = lsmHarness;
-        this.comparatorFields = comparatorFields;
-        this.hilbertCmp = MultiComparator.create(linearizerArray);
-        this.searchCallback = searchCallback;
-        this.operationalComponents = operationalComponents;
-    }
-
-    public MultiComparator getHilbertCmp() {
-        return hilbertCmp;
-    }
-
-    public int[] getComparatorFields() {
-        return comparatorFields;
-    }
-
-    public int getNumberOfTrees() {
-        return numberOfTrees;
-    }
-
-    public ITreeIndexFrameFactory getRTreeInteriorFrameFactory() {
-        return rtreeInteriorFrameFactory;
-    }
-
-    public ITreeIndexFrameFactory getRTreeLeafFrameFactory() {
-        return rtreeLeafFrameFactory;
-    }
-
-    public ITreeIndexFrameFactory getBTreeLeafFrameFactory() {
-        return btreeLeafFrameFactory;
-    }
-
-    public MultiComparator getBTreeCmp() {
-        return btreeCmp;
-    }
-
-    @Override
-    public ICachedPage getPage() {
-        return null;
-    }
-
-    @Override
-    public void setPage(ICachedPage page) {
-    }
-
-    public List<ILSMComponent> getOperationalComponents() {
-        return operationalComponents;
-    }
-
-    public ITreeIndexAccessor[] getRTreeAccessors() {
-        return rTreeAccessors;
-    }
-
-    public ITreeIndexAccessor[] getBTreeAccessors() {
-        return bTreeAccessors;
-    }
-
-    public boolean getIncludeMemComponent() {
-        return includeMemRTree;
-    }
-
-    public ILSMHarness getLSMHarness() {
-        return lsmHarness;
-    }
-
-    @Override
-    public ISearchOperationCallback getSearchOperationCallback() {
-        return searchCallback;
-    }
-
-    @Override
-    public void setSearchOperationCallback(ISearchOperationCallback searchCallback) {
-        this.searchCallback = searchCallback;
-    }
-
-    @Override
-    public MultiComparator getOriginalKeyComparator() {
-        return null;
-    }
-
-    @Override
-    public void setOriginialKeyComparator(MultiComparator originalCmp) {
-    }
-
-}
\ No newline at end of file
diff --git a/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTreeFileManager.java b/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTreeFileManager.java
deleted file mode 100644
index e698990..0000000
--- a/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTreeFileManager.java
+++ /dev/null
@@ -1,229 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.rtree.impls;
-
-import java.io.File;
-import java.io.FilenameFilter;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.Date;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.List;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.api.io.IIOManager;
-import edu.uci.ics.hyracks.api.io.IODeviceHandle;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.AbstractLSMIndexFileManager;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.LSMComponentFileReferences;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.TreeIndexFactory;
-import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
-
-public class LSMRTreeFileManager extends AbstractLSMIndexFileManager {
-    private static final String RTREE_STRING = "r";
-    private static final String BTREE_STRING = "b";
-
-    private final TreeIndexFactory<? extends ITreeIndex> rtreeFactory;
-    private final TreeIndexFactory<? extends ITreeIndex> btreeFactory;
-
-    private static FilenameFilter btreeFilter = new FilenameFilter() {
-        public boolean accept(File dir, String name) {
-            return !name.startsWith(".") && name.endsWith(BTREE_STRING);
-        }
-    };
-
-    private static FilenameFilter rtreeFilter = new FilenameFilter() {
-        public boolean accept(File dir, String name) {
-            return !name.startsWith(".") && name.endsWith(RTREE_STRING);
-        }
-    };
-
-    public LSMRTreeFileManager(IIOManager ioManager, IFileMapProvider fileMapProvider, FileReference file,
-            TreeIndexFactory<? extends ITreeIndex> rtreeFactory, TreeIndexFactory<? extends ITreeIndex> btreeFactory,
-            int startIODeviceIndex) {
-        super(ioManager, fileMapProvider, file, null, startIODeviceIndex);
-        this.rtreeFactory = rtreeFactory;
-        this.btreeFactory = btreeFactory;
-    }
-
-    @Override
-    public LSMComponentFileReferences getRelFlushFileReference() {
-        Date date = new Date();
-        String ts = formatter.format(date);
-        String baseName = baseDir + ts + SPLIT_STRING + ts;
-        // Begin timestamp and end timestamp are identical since it is a flush
-        return new LSMComponentFileReferences(createFlushFile(baseName + SPLIT_STRING + RTREE_STRING),
-                createFlushFile(baseName + SPLIT_STRING + BTREE_STRING), createFlushFile(baseName + SPLIT_STRING
-                        + BLOOM_FILTER_STRING));
-    }
-
-    @Override
-    public LSMComponentFileReferences getRelMergeFileReference(String firstFileName, String lastFileName)
-            throws HyracksDataException {
-        String[] firstTimestampRange = firstFileName.split(SPLIT_STRING);
-        String[] lastTimestampRange = lastFileName.split(SPLIT_STRING);
-
-        String baseName = baseDir + firstTimestampRange[0] + SPLIT_STRING + lastTimestampRange[1];
-        // Get the range of timestamps by taking the earliest and the latest timestamps
-        return new LSMComponentFileReferences(createMergeFile(baseName + SPLIT_STRING + RTREE_STRING),
-                createMergeFile(baseName + SPLIT_STRING + BTREE_STRING), createMergeFile(baseName + SPLIT_STRING
-                        + BLOOM_FILTER_STRING));
-    }
-
-    @Override
-    public List<LSMComponentFileReferences> cleanupAndGetValidFiles() throws HyracksDataException, IndexException {
-        List<LSMComponentFileReferences> validFiles = new ArrayList<LSMComponentFileReferences>();
-        ArrayList<ComparableFileName> allRTreeFiles = new ArrayList<ComparableFileName>();
-        ArrayList<ComparableFileName> allBTreeFiles = new ArrayList<ComparableFileName>();
-        ArrayList<ComparableFileName> allBloomFilterFiles = new ArrayList<ComparableFileName>();
-
-        // Gather files from all IODeviceHandles.
-        for (IODeviceHandle dev : ioManager.getIODevices()) {
-            cleanupAndGetValidFilesInternal(dev, bloomFilterFilter, null, allBloomFilterFiles);
-            HashSet<String> bloomFilterFilesSet = new HashSet<String>();
-            for (ComparableFileName cmpFileName : allBloomFilterFiles) {
-                int index = cmpFileName.fileName.lastIndexOf(SPLIT_STRING);
-                bloomFilterFilesSet.add(cmpFileName.fileName.substring(0, index));
-            }
-
-            // List of valid BTree files that may or may not have a bloom filter buddy. Will check for buddies below.
-            ArrayList<ComparableFileName> tmpAllBTreeFiles = new ArrayList<ComparableFileName>();
-            cleanupAndGetValidFilesInternal(dev, btreeFilter, btreeFactory, tmpAllBTreeFiles);
-            // Look for buddy bloom filters for all valid BTrees. 
-            // If no buddy is found, delete the file, otherwise add the BTree to allBTreeFiles. 
-            HashSet<String> btreeFilesSet = new HashSet<String>();
-            for (ComparableFileName cmpFileName : tmpAllBTreeFiles) {
-                int index = cmpFileName.fileName.lastIndexOf(SPLIT_STRING);
-                String file = cmpFileName.fileName.substring(0, index);
-                if (bloomFilterFilesSet.contains(file)) {
-                    allBTreeFiles.add(cmpFileName);
-                    btreeFilesSet.add(cmpFileName.fileName.substring(0, index));
-                } else {
-                    // Couldn't find the corresponding bloom filter file; thus, delete
-                    // the BTree file.
-                    File invalidBTreeFile = new File(cmpFileName.fullPath);
-                    invalidBTreeFile.delete();
-                }
-            }
-
-            // List of valid RTree files that may or may not have a BTree buddy. Will check for buddies below.
-            ArrayList<ComparableFileName> tmpAllRTreeFiles = new ArrayList<ComparableFileName>();
-            cleanupAndGetValidFilesInternal(dev, rtreeFilter, rtreeFactory, tmpAllRTreeFiles);
-            // Look for buddy BTrees for all valid RTrees. 
-            // If no buddy is found, delete the file, otherwise add the RTree to allRTreeFiles. 
-            for (ComparableFileName cmpFileName : tmpAllRTreeFiles) {
-                int index = cmpFileName.fileName.lastIndexOf(SPLIT_STRING);
-                String file = cmpFileName.fileName.substring(0, index);
-                if (btreeFilesSet.contains(file)) {
-                    allRTreeFiles.add(cmpFileName);
-                } else {
-                    // Couldn't find the corresponding BTree file; thus, delete
-                    // the RTree file.
-                    File invalidRTreeFile = new File(cmpFileName.fullPath);
-                    invalidRTreeFile.delete();
-                }
-            }
-        }
-        // Sanity check.
-        if (allRTreeFiles.size() != allBTreeFiles.size() || allBTreeFiles.size() != allBloomFilterFiles.size()) {
-            throw new HyracksDataException(
-                    "Unequal number of valid RTree, BTree, and Bloom Filter files found. Aborting cleanup.");
-        }
-
-        // Trivial cases.
-        if (allRTreeFiles.isEmpty() || allBTreeFiles.isEmpty() || allBloomFilterFiles.isEmpty()) {
-            return validFiles;
-        }
-
-        if (allRTreeFiles.size() == 1 && allBTreeFiles.size() == 1 && allBloomFilterFiles.size() == 1) {
-            validFiles.add(new LSMComponentFileReferences(allRTreeFiles.get(0).fileRef, allBTreeFiles.get(0).fileRef,
-                    allBloomFilterFiles.get(0).fileRef));
-            return validFiles;
-        }
-
-        // Sorts files names from earliest to latest timestamp.
-        Collections.sort(allRTreeFiles);
-        Collections.sort(allBTreeFiles);
-        Collections.sort(allBloomFilterFiles);
-
-        List<ComparableFileName> validComparableRTreeFiles = new ArrayList<ComparableFileName>();
-        ComparableFileName lastRTree = allRTreeFiles.get(0);
-        validComparableRTreeFiles.add(lastRTree);
-
-        List<ComparableFileName> validComparableBTreeFiles = new ArrayList<ComparableFileName>();
-        ComparableFileName lastBTree = allBTreeFiles.get(0);
-        validComparableBTreeFiles.add(lastBTree);
-
-        List<ComparableFileName> validComparableBloomFilterFiles = new ArrayList<ComparableFileName>();
-        ComparableFileName lastBloomFilter = allBloomFilterFiles.get(0);
-        validComparableBloomFilterFiles.add(lastBloomFilter);
-
-        for (int i = 1; i < allRTreeFiles.size(); i++) {
-            ComparableFileName currentRTree = allRTreeFiles.get(i);
-            ComparableFileName currentBTree = allBTreeFiles.get(i);
-            ComparableFileName currentBloomFilter = allBloomFilterFiles.get(i);
-            // Current start timestamp is greater than last stop timestamp.
-            if (currentRTree.interval[0].compareTo(lastRTree.interval[1]) > 0
-                    && currentBTree.interval[0].compareTo(lastBTree.interval[1]) > 0
-                    && currentBloomFilter.interval[0].compareTo(lastBloomFilter.interval[1]) > 0) {
-                validComparableRTreeFiles.add(currentRTree);
-                validComparableBTreeFiles.add(currentBTree);
-                validComparableBloomFilterFiles.add(currentBloomFilter);
-                lastRTree = currentRTree;
-                lastBTree = currentBTree;
-                lastBloomFilter = currentBloomFilter;
-            } else if (currentRTree.interval[0].compareTo(lastRTree.interval[0]) >= 0
-                    && currentRTree.interval[1].compareTo(lastRTree.interval[1]) <= 0
-                    && currentBTree.interval[0].compareTo(lastBTree.interval[0]) >= 0
-                    && currentBTree.interval[1].compareTo(lastBTree.interval[1]) <= 0
-                    && currentBloomFilter.interval[0].compareTo(lastBloomFilter.interval[0]) >= 0
-                    && currentBloomFilter.interval[1].compareTo(lastBloomFilter.interval[1]) <= 0) {
-                // Invalid files are completely contained in last interval.
-                File invalidRTreeFile = new File(currentRTree.fullPath);
-                invalidRTreeFile.delete();
-                File invalidBTreeFile = new File(currentBTree.fullPath);
-                invalidBTreeFile.delete();
-                File invalidBloomFilterFile = new File(currentBloomFilter.fullPath);
-                invalidBloomFilterFile.delete();
-            } else {
-                // This scenario should not be possible.
-                throw new HyracksDataException("Found LSM files with overlapping but not contained timetamp intervals.");
-            }
-        }
-
-        // Sort valid files in reverse lexicographical order, such that newer
-        // files come first.
-        Collections.sort(validComparableRTreeFiles, recencyCmp);
-        Collections.sort(validComparableBTreeFiles, recencyCmp);
-        Collections.sort(validComparableBloomFilterFiles, recencyCmp);
-
-        Iterator<ComparableFileName> rtreeFileIter = validComparableRTreeFiles.iterator();
-        Iterator<ComparableFileName> btreeFileIter = validComparableBTreeFiles.iterator();
-        Iterator<ComparableFileName> bloomFilterFileIter = validComparableBloomFilterFiles.iterator();
-        while (rtreeFileIter.hasNext() && btreeFileIter.hasNext()) {
-            ComparableFileName cmpRTreeFileName = rtreeFileIter.next();
-            ComparableFileName cmpBTreeFileName = btreeFileIter.next();
-            ComparableFileName cmpBloomFilterFileName = bloomFilterFileIter.next();
-            validFiles.add(new LSMComponentFileReferences(cmpRTreeFileName.fileRef, cmpBTreeFileName.fileRef,
-                    cmpBloomFilterFileName.fileRef));
-        }
-
-        return validFiles;
-    }
-}
diff --git a/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTreeFlushOperation.java b/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTreeFlushOperation.java
deleted file mode 100644
index 7b7f2bc..0000000
--- a/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTreeFlushOperation.java
+++ /dev/null
@@ -1,77 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.lsm.rtree.impls;
-
-import java.util.Collections;
-import java.util.HashSet;
-import java.util.Set;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.api.io.IODeviceHandle;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMComponent;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperation;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallback;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIndexAccessorInternal;
-
-public class LSMRTreeFlushOperation implements ILSMIOOperation {
-
-    private final ILSMIndexAccessorInternal accessor;
-    private final ILSMComponent flushingComponent;
-    private final FileReference rtreeFlushTarget;
-    private final FileReference btreeFlushTarget;
-    private final FileReference bloomFilterFlushTarget;
-    private final ILSMIOOperationCallback callback;
-
-    public LSMRTreeFlushOperation(ILSMIndexAccessorInternal accessor, ILSMComponent flushingComponent,
-            FileReference rtreeFlushTarget, FileReference btreeFlushTarget, FileReference bloomFilterFlushTarget,
-            ILSMIOOperationCallback callback) {
-        this.accessor = accessor;
-        this.flushingComponent = flushingComponent;
-        this.rtreeFlushTarget = rtreeFlushTarget;
-        this.btreeFlushTarget = btreeFlushTarget;
-        this.bloomFilterFlushTarget = bloomFilterFlushTarget;
-        this.callback = callback;
-    }
-
-    @Override
-    public Set<IODeviceHandle> getReadDevices() {
-        return Collections.emptySet();
-    }
-
-    @Override
-    public Set<IODeviceHandle> getWriteDevices() {
-        Set<IODeviceHandle> devs = new HashSet<IODeviceHandle>();
-        devs.add(rtreeFlushTarget.getDeviceHandle());
-        if (btreeFlushTarget != null) {
-            devs.add(btreeFlushTarget.getDeviceHandle());
-            devs.add(bloomFilterFlushTarget.getDeviceHandle());
-        }
-        return devs;
-    }
-
-    @Override
-    public void perform() throws HyracksDataException, IndexException {
-        accessor.flush(this);
-    }
-
-    @Override
-    public ILSMIOOperationCallback getCallback() {
-        return callback;
-    }
-
-    public FileReference getRTreeFlushTarget() {
-        return rtreeFlushTarget;
-    }
-
-    public FileReference getBTreeFlushTarget() {
-        return btreeFlushTarget;
-    }
-
-    public FileReference getBloomFilterFlushTarget() {
-        return bloomFilterFlushTarget;
-    }
-
-    public ILSMComponent getFlushingComponent() {
-        return flushingComponent;
-    }
-}
diff --git a/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTreeImmutableComponent.java b/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTreeImmutableComponent.java
deleted file mode 100644
index 8d20c14..0000000
--- a/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTreeImmutableComponent.java
+++ /dev/null
@@ -1,43 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.lsm.rtree.impls;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.storage.am.bloomfilter.impls.BloomFilter;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.AbstractImmutableLSMComponent;
-import edu.uci.ics.hyracks.storage.am.rtree.impls.RTree;
-
-public class LSMRTreeImmutableComponent extends AbstractImmutableLSMComponent {
-    private final RTree rtree;
-    private final BTree btree;
-    private final BloomFilter bloomFilter;
-
-    public LSMRTreeImmutableComponent(RTree rtree, BTree btree, BloomFilter bloomFilter) {
-        this.rtree = rtree;
-        this.btree = btree;
-        this.bloomFilter = bloomFilter;
-    }
-
-    @Override
-    public void destroy() throws HyracksDataException {
-        rtree.deactivate();
-        rtree.destroy();
-        if (btree != null) {
-            btree.deactivate();
-            btree.destroy();
-            bloomFilter.deactivate();
-            bloomFilter.destroy();
-        }
-    }
-
-    public RTree getRTree() {
-        return rtree;
-    }
-
-    public BTree getBTree() {
-        return btree;
-    }
-
-    public BloomFilter getBloomFilter() {
-        return bloomFilter;
-    }
-}
diff --git a/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTreeMergeOperation.java b/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTreeMergeOperation.java
deleted file mode 100644
index 0e05a93..0000000
--- a/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTreeMergeOperation.java
+++ /dev/null
@@ -1,92 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.lsm.rtree.impls;
-
-import java.util.HashSet;
-import java.util.List;
-import java.util.Set;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.api.io.IODeviceHandle;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMComponent;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperation;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallback;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIndexAccessorInternal;
-
-public class LSMRTreeMergeOperation implements ILSMIOOperation {
-    private final ILSMIndexAccessorInternal accessor;
-    private final List<ILSMComponent> mergingComponents;
-    private final ITreeIndexCursor cursor;
-    private final FileReference rtreeMergeTarget;
-    private final FileReference btreeMergeTarget;
-    private final FileReference bloomFilterMergeTarget;
-    private final ILSMIOOperationCallback callback;
-
-    public LSMRTreeMergeOperation(ILSMIndexAccessorInternal accessor, List<ILSMComponent> mergingComponents,
-            ITreeIndexCursor cursor, FileReference rtreeMergeTarget, FileReference btreeMergeTarget,
-            FileReference bloomFilterMergeTarget, ILSMIOOperationCallback callback) {
-        this.accessor = accessor;
-        this.mergingComponents = mergingComponents;
-        this.cursor = cursor;
-        this.rtreeMergeTarget = rtreeMergeTarget;
-        this.btreeMergeTarget = btreeMergeTarget;
-        this.bloomFilterMergeTarget = bloomFilterMergeTarget;
-        this.callback = callback;
-    }
-
-    @Override
-    public Set<IODeviceHandle> getReadDevices() {
-        Set<IODeviceHandle> devs = new HashSet<IODeviceHandle>();
-        for (ILSMComponent o : mergingComponents) {
-            LSMRTreeImmutableComponent component = (LSMRTreeImmutableComponent) o;
-            devs.add(component.getRTree().getFileReference().getDeviceHandle());
-            if (component.getBTree() != null) {
-                devs.add(component.getBTree().getFileReference().getDeviceHandle());
-                devs.add(component.getBloomFilter().getFileReference().getDeviceHandle());
-            }
-        }
-        return devs;
-    }
-
-    @Override
-    public Set<IODeviceHandle> getWriteDevices() {
-        Set<IODeviceHandle> devs = new HashSet<IODeviceHandle>();
-        devs.add(rtreeMergeTarget.getDeviceHandle());
-        if (btreeMergeTarget != null) {
-            devs.add(btreeMergeTarget.getDeviceHandle());
-            devs.add(bloomFilterMergeTarget.getDeviceHandle());
-        }
-        return devs;
-    }
-
-    @Override
-    public void perform() throws HyracksDataException, IndexException {
-        accessor.merge(this);
-    }
-
-    @Override
-    public ILSMIOOperationCallback getCallback() {
-        return callback;
-    }
-
-    public FileReference getRTreeMergeTarget() {
-        return rtreeMergeTarget;
-    }
-
-    public FileReference getBTreeMergeTarget() {
-        return btreeMergeTarget;
-    }
-
-    public FileReference getBloomFilterMergeTarget() {
-        return bloomFilterMergeTarget;
-    }
-
-    public ITreeIndexCursor getCursor() {
-        return cursor;
-    }
-
-    public List<ILSMComponent> getMergingComponents() {
-        return mergingComponents;
-    }
-}
diff --git a/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTreeMutableComponent.java b/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTreeMutableComponent.java
deleted file mode 100644
index 80f76a1..0000000
--- a/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTreeMutableComponent.java
+++ /dev/null
@@ -1,56 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.rtree.impls;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
-import edu.uci.ics.hyracks.storage.am.common.api.IInMemoryFreePageManager;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.AbstractMutableLSMComponent;
-import edu.uci.ics.hyracks.storage.am.rtree.impls.RTree;
-
-public class LSMRTreeMutableComponent extends AbstractMutableLSMComponent {
-
-    private final RTree rtree;
-    private final BTree btree;
-    private final IInMemoryFreePageManager mfpm;
-
-    public LSMRTreeMutableComponent(RTree rtree, BTree btree, IInMemoryFreePageManager mfpm) {
-        this.rtree = rtree;
-        this.btree = btree;
-        this.mfpm = mfpm;
-    }
-
-    public RTree getRTree() {
-        return rtree;
-    }
-
-    public BTree getBTree() {
-        return btree;
-    }
-
-    @Override
-    protected boolean isFull() {
-        return mfpm.isFull();
-    }
-
-    @Override
-    protected void reset() throws HyracksDataException {
-        rtree.clear();
-        if (btree != null) {
-            btree.clear();
-        }
-    }
-}
diff --git a/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTreeOpContext.java b/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTreeOpContext.java
deleted file mode 100644
index b8805d1..0000000
--- a/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTreeOpContext.java
+++ /dev/null
@@ -1,104 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.rtree.impls;
-
-import java.util.LinkedList;
-import java.util.List;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeOpContext;
-import edu.uci.ics.hyracks.storage.am.common.api.IModificationOperationCallback;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchOperationCallback;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrame;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallback;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOperation;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMComponent;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIndexOperationContext;
-import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeInteriorFrame;
-import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeLeafFrame;
-import edu.uci.ics.hyracks.storage.am.rtree.impls.RTree;
-import edu.uci.ics.hyracks.storage.am.rtree.impls.RTreeOpContext;
-
-public final class LSMRTreeOpContext implements ILSMIndexOperationContext {
-
-    public RTreeOpContext rtreeOpContext;
-    public BTreeOpContext btreeOpContext;
-    public final RTree.RTreeAccessor memRTreeAccessor;
-    public final BTree.BTreeAccessor memBTreeAccessor;
-    private IndexOperation op;
-    public final List<ILSMComponent> componentHolder;
-    public final IModificationOperationCallback modificationCallback;
-    public final ISearchOperationCallback searchCallback;
-
-    public LSMRTreeOpContext(RTree.RTreeAccessor memRtreeAccessor, IRTreeLeafFrame rtreeLeafFrame,
-            IRTreeInteriorFrame rtreeInteriorFrame, ITreeIndexMetaDataFrame rtreeMetaFrame, int rTreeHeightHint,
-            BTree.BTreeAccessor memBtreeAccessor, ITreeIndexFrameFactory btreeLeafFrameFactory,
-            ITreeIndexFrameFactory btreeInteriorFrameFactory, ITreeIndexMetaDataFrame btreeMetaFrame,
-            IBinaryComparatorFactory[] rtreeCmpFactories, IBinaryComparatorFactory[] btreeCmpFactories,
-            IModificationOperationCallback modificationCallback, ISearchOperationCallback searchCallback) {
-        this.memRTreeAccessor = memRtreeAccessor;
-        this.memBTreeAccessor = memBtreeAccessor;
-        this.componentHolder = new LinkedList<ILSMComponent>();
-        this.modificationCallback = modificationCallback;
-        this.searchCallback = searchCallback;
-        this.rtreeOpContext = new RTreeOpContext(rtreeLeafFrame, rtreeInteriorFrame, rtreeMetaFrame, rtreeCmpFactories,
-                rTreeHeightHint, NoOpOperationCallback.INSTANCE);
-        this.btreeOpContext = new BTreeOpContext(memBtreeAccessor, btreeLeafFrameFactory, btreeInteriorFrameFactory,
-                btreeMetaFrame, btreeCmpFactories, NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
-    }
-
-    public void setOperation(IndexOperation newOp) {
-        reset();
-        if (newOp == IndexOperation.INSERT) {
-            rtreeOpContext.setOperation(newOp);
-        } else if (newOp == IndexOperation.DELETE) {
-            btreeOpContext.setOperation(IndexOperation.INSERT);
-        }
-        this.op = newOp;
-    }
-
-    @Override
-    public void reset() {
-        componentHolder.clear();
-    }
-
-    @Override
-    public IndexOperation getOperation() {
-        return op;
-    }
-
-    public MultiComparator getBTreeMultiComparator() {
-        return btreeOpContext.cmp;
-    }
-
-    @Override
-    public List<ILSMComponent> getComponentHolder() {
-        return componentHolder;
-    }
-
-    @Override
-    public ISearchOperationCallback getSearchOperationCallback() {
-        return searchCallback;
-    }
-
-    @Override
-    public IModificationOperationCallback getModificationCallback() {
-        return modificationCallback;
-    }
-}
\ No newline at end of file
diff --git a/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTreeSearchCursor.java b/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTreeSearchCursor.java
deleted file mode 100644
index 966ed8d..0000000
--- a/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTreeSearchCursor.java
+++ /dev/null
@@ -1,124 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.rtree.impls;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.common.api.ICursorInitialState;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchPredicate;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIndexOperationContext;
-
-public class LSMRTreeSearchCursor extends LSMRTreeAbstractCursor {
-
-    private int currentCursor;
-
-    public LSMRTreeSearchCursor(ILSMIndexOperationContext opCtx) {
-        super(opCtx);
-        currentCursor = 0;
-    }
-
-    @Override
-    public void close() throws HyracksDataException {
-        super.close();
-        currentCursor = 0;
-    }
-
-    @Override
-    public void reset() throws HyracksDataException {
-        if (!open) {
-            return;
-        }
-
-        currentCursor = 0;
-        foundNext = false;
-        try {
-            for (int i = 0; i < numberOfTrees; i++) {
-                rtreeCursors[i].close();
-                btreeCursors[i].close();
-            }
-            rtreeCursors = null;
-            btreeCursors = null;
-        } finally {
-            lsmHarness.endSearch(opCtx);
-        }
-    }
-
-    private void searchNextCursor() throws HyracksDataException {
-        if (currentCursor < numberOfTrees) {
-            rtreeCursors[currentCursor].reset();
-            try {
-                rTreeAccessors[currentCursor].search(rtreeCursors[currentCursor], rtreeSearchPredicate);
-            } catch (IndexException e) {
-                throw new HyracksDataException(e);
-            }
-        }
-    }
-
-    @Override
-    public boolean hasNext() throws HyracksDataException, IndexException {
-        if (foundNext) {
-            return true;
-        }
-        while (currentCursor < numberOfTrees) {
-            while (rtreeCursors[currentCursor].hasNext()) {
-                rtreeCursors[currentCursor].next();
-                ITupleReference currentTuple = rtreeCursors[currentCursor].getTuple();
-
-                boolean killerTupleFound = false;
-                for (int i = 0; i <= currentCursor; i++) {
-                    try {
-                        btreeCursors[i].reset();
-                        btreeRangePredicate.setHighKey(currentTuple, true);
-                        btreeRangePredicate.setLowKey(currentTuple, true);
-                        bTreeAccessors[i].search(btreeCursors[i], btreeRangePredicate);
-                    } catch (IndexException e) {
-                        throw new HyracksDataException(e);
-                    }
-                    try {
-                        if (btreeCursors[i].hasNext()) {
-                            killerTupleFound = true;
-                            break;
-                        }
-                    } finally {
-                        btreeCursors[i].close();
-                    }
-                }
-                if (!killerTupleFound) {
-                    frameTuple = currentTuple;
-                    foundNext = true;
-                    return true;
-                }
-            }
-            rtreeCursors[currentCursor].close();
-            currentCursor++;
-            searchNextCursor();
-        }
-        return false;
-    }
-
-    @Override
-    public void next() throws HyracksDataException {
-        foundNext = false;
-    }
-
-    @Override
-    public void open(ICursorInitialState initialState, ISearchPredicate searchPred) throws HyracksDataException {
-        super.open(initialState, searchPred);
-        searchNextCursor();
-    }
-
-}
\ No newline at end of file
diff --git a/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTreeSortedCursor.java b/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTreeSortedCursor.java
deleted file mode 100644
index 02a1876..0000000
--- a/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTreeSortedCursor.java
+++ /dev/null
@@ -1,152 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.rtree.impls;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ILinearizeComparator;
-import edu.uci.ics.hyracks.api.dataflow.value.ILinearizeComparatorFactory;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.storage.am.common.api.ICursorInitialState;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchPredicate;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIndexOperationContext;
-
-public class LSMRTreeSortedCursor extends LSMRTreeAbstractCursor {
-
-    private ILinearizeComparator linearizeCmp;
-    private boolean[] depletedRtreeCursors;
-    private int foundIn = -1;
-
-    public LSMRTreeSortedCursor(ILSMIndexOperationContext opCtx, ILinearizeComparatorFactory linearizer)
-            throws HyracksDataException {
-        super(opCtx);
-        this.linearizeCmp = linearizer.createBinaryComparator();
-        reset();
-    }
-
-    @Override
-    public void reset() throws HyracksDataException {
-        depletedRtreeCursors = new boolean[numberOfTrees];
-        foundNext = false;
-        try {
-            for (int i = 0; i < numberOfTrees; i++) {
-                rtreeCursors[i].reset();
-                try {
-                    rTreeAccessors[i].search(rtreeCursors[i], rtreeSearchPredicate);
-                } catch (IndexException e) {
-                    throw new HyracksDataException(e);
-                }
-                if (rtreeCursors[i].hasNext()) {
-                    rtreeCursors[i].next();
-                } else {
-                    depletedRtreeCursors[i] = true;
-                }
-            }
-        } finally {
-            if (open) {
-                lsmHarness.endSearch(opCtx);
-            }
-        }
-    }
-
-    @Override
-    public boolean hasNext() throws HyracksDataException, IndexException {
-        while (!foundNext) {
-            frameTuple = null;
-
-            if (foundIn != -1) {
-                if (rtreeCursors[foundIn].hasNext()) {
-                    rtreeCursors[foundIn].next();
-                } else {
-                    depletedRtreeCursors[foundIn] = true;
-                }
-            }
-
-            foundIn = -1;
-            for (int i = 0; i < numberOfTrees; i++) {
-                if (depletedRtreeCursors[i])
-                    continue;
-
-                if (frameTuple == null) {
-                    frameTuple = rtreeCursors[i].getTuple();
-                    foundIn = i;
-                    continue;
-                }
-
-                if (linearizeCmp.compare(frameTuple.getFieldData(0), frameTuple.getFieldStart(0),
-                        frameTuple.getFieldLength(0) * linearizeCmp.getDimensions(), rtreeCursors[i].getTuple()
-                                .getFieldData(0), rtreeCursors[i].getTuple().getFieldStart(0), rtreeCursors[i]
-                                .getTuple().getFieldLength(0) * linearizeCmp.getDimensions()) <= 0) {
-                    frameTuple = rtreeCursors[i].getTuple();
-                    foundIn = i;
-                }
-            }
-
-            if (foundIn == -1)
-                return false;
-
-            boolean killed = false;
-            for (int i = 0; i < foundIn; i++) {
-                try {
-                    btreeCursors[i].reset();
-                    btreeRangePredicate.setHighKey(frameTuple, true);
-                    btreeRangePredicate.setLowKey(frameTuple, true);
-                    bTreeAccessors[i].search(btreeCursors[i], btreeRangePredicate);
-                } catch (IndexException e) {
-                    throw new HyracksDataException(e);
-                }
-                try {
-                    if (btreeCursors[i].hasNext()) {
-                        killed = true;
-                        break;
-                    }
-                } finally {
-                    btreeCursors[i].close();
-                }
-            }
-            if (!killed) {
-                foundNext = true;
-            }
-        }
-
-        return true;
-    }
-
-    @Override
-    public void next() throws HyracksDataException {
-        foundNext = false;
-    }
-
-    @Override
-    public void open(ICursorInitialState initialState, ISearchPredicate searchPred) throws HyracksDataException {
-        super.open(initialState, searchPred);
-
-        depletedRtreeCursors = new boolean[numberOfTrees];
-        foundNext = false;
-        for (int i = 0; i < numberOfTrees; i++) {
-            rtreeCursors[i].reset();
-            try {
-                rTreeAccessors[i].search(rtreeCursors[i], rtreeSearchPredicate);
-            } catch (IndexException e) {
-                throw new HyracksDataException(e);
-            }
-            if (rtreeCursors[i].hasNext()) {
-                rtreeCursors[i].next();
-            } else {
-                depletedRtreeCursors[i] = true;
-            }
-        }
-    }
-}
\ No newline at end of file
diff --git a/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTreeWithAntiMatterTuples.java b/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTreeWithAntiMatterTuples.java
deleted file mode 100644
index 478d076..0000000
--- a/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTreeWithAntiMatterTuples.java
+++ /dev/null
@@ -1,434 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.rtree.impls;
-
-import java.util.List;
-import java.util.ListIterator;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ILinearizeComparatorFactory;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeRangeSearchCursor;
-import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
-import edu.uci.ics.hyracks.storage.am.common.api.IInMemoryFreePageManager;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexBulkLoader;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexCursor;
-import edu.uci.ics.hyracks.storage.am.common.api.IModificationOperationCallback;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchOperationCallback;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchPredicate;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexAccessor;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallback;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOperation;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.IInMemoryBufferCache;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMComponent;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMComponentFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMHarness;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperation;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallback;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallbackProvider;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationScheduler;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIndexAccessorInternal;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIndexFileManager;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIndexOperationContext;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMMergePolicy;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMOperationTrackerFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.LSMComponentFileReferences;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.LSMTreeIndexAccessor;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.TreeIndexFactory;
-import edu.uci.ics.hyracks.storage.am.rtree.impls.RTree;
-import edu.uci.ics.hyracks.storage.am.rtree.impls.RTreeSearchCursor;
-import edu.uci.ics.hyracks.storage.am.rtree.impls.SearchPredicate;
-import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
-
-public class LSMRTreeWithAntiMatterTuples extends AbstractLSMRTree {
-
-    private TreeTupleSorter bTreeTupleSorter;
-
-    // On-disk components.
-    // For creating RTree's used in bulk load. Different from diskRTreeFactory
-    // because it should have a different tuple writer in it's leaf frames.
-    private final ILSMComponentFactory bulkLoaComponentFactory;
-
-    public LSMRTreeWithAntiMatterTuples(IInMemoryBufferCache memBufferCache,
-            IInMemoryFreePageManager memFreePageManager, ITreeIndexFrameFactory rtreeInteriorFrameFactory,
-            ITreeIndexFrameFactory rtreeLeafFrameFactory, ITreeIndexFrameFactory btreeInteriorFrameFactory,
-            ITreeIndexFrameFactory btreeLeafFrameFactory, ILSMIndexFileManager fileManager,
-            TreeIndexFactory<RTree> diskRTreeFactory, TreeIndexFactory<RTree> bulkLoadRTreeFactory,
-            IFileMapProvider diskFileMapProvider, int fieldCount, IBinaryComparatorFactory[] rtreeCmpFactories,
-            IBinaryComparatorFactory[] btreeCmpFactories, ILinearizeComparatorFactory linearizer,
-            int[] comparatorFields, IBinaryComparatorFactory[] linearizerArray, ILSMMergePolicy mergePolicy,
-            ILSMOperationTrackerFactory opTrackerFactory, ILSMIOOperationScheduler ioScheduler,
-            ILSMIOOperationCallbackProvider ioOpCallbackProvider) {
-        super(memBufferCache, memFreePageManager, rtreeInteriorFrameFactory, rtreeLeafFrameFactory,
-                btreeInteriorFrameFactory, btreeLeafFrameFactory, fileManager, diskRTreeFactory,
-                new LSMRTreeWithAntiMatterTuplesComponentFactory(diskRTreeFactory), diskFileMapProvider, fieldCount,
-                rtreeCmpFactories, btreeCmpFactories, linearizer, comparatorFields, linearizerArray, mergePolicy,
-                opTrackerFactory, ioScheduler, ioOpCallbackProvider);
-        bulkLoaComponentFactory = new LSMRTreeWithAntiMatterTuplesComponentFactory(bulkLoadRTreeFactory);
-        this.bTreeTupleSorter = null;
-    }
-
-    @Override
-    public synchronized void activate() throws HyracksDataException {
-        super.activate();
-        List<ILSMComponent> immutableComponents = componentsRef.get();
-        immutableComponents.clear();
-        List<LSMComponentFileReferences> validFileReferences;
-        try {
-            validFileReferences = fileManager.cleanupAndGetValidFiles();
-        } catch (IndexException e) {
-            throw new HyracksDataException(e);
-        }
-        for (LSMComponentFileReferences lsmComonentFileReference : validFileReferences) {
-            LSMRTreeImmutableComponent component;
-            try {
-                component = createDiskComponent(componentFactory,
-                        lsmComonentFileReference.getInsertIndexFileReference(), null, null, false);
-            } catch (IndexException e) {
-                throw new HyracksDataException(e);
-            }
-            immutableComponents.add(component);
-        }
-        isActivated = true;
-    }
-
-    @Override
-    public synchronized void deactivate(boolean flushOnExit) throws HyracksDataException {
-        super.deactivate(flushOnExit);
-        List<ILSMComponent> immutableComponents = componentsRef.get();
-        for (ILSMComponent c : immutableComponents) {
-            RTree rtree = (RTree) ((LSMRTreeImmutableComponent) c).getRTree();
-            rtree.deactivate();
-        }
-        isActivated = false;
-    }
-
-    @Override
-    public synchronized void deactivate() throws HyracksDataException {
-        deactivate(true);
-    }
-
-    @Override
-    public synchronized void destroy() throws HyracksDataException {
-        super.destroy();
-        List<ILSMComponent> immutableComponents = componentsRef.get();
-        for (ILSMComponent c : immutableComponents) {
-            RTree rtree = (RTree) ((LSMRTreeImmutableComponent) c).getRTree();
-            rtree.destroy();
-        }
-        fileManager.deleteDirs();
-    }
-
-    @Override
-    public synchronized void clear() throws HyracksDataException {
-        super.clear();
-        List<ILSMComponent> immutableComponents = componentsRef.get();
-        for (ILSMComponent c : immutableComponents) {
-            RTree rtree = (RTree) ((LSMRTreeImmutableComponent) c).getRTree();
-            rtree.deactivate();
-            rtree.destroy();
-        }
-        immutableComponents.clear();
-    }
-
-    @Override
-    public void search(ILSMIndexOperationContext ictx, IIndexCursor cursor, ISearchPredicate pred)
-            throws HyracksDataException, IndexException {
-        LSMRTreeOpContext ctx = (LSMRTreeOpContext) ictx;
-        List<ILSMComponent> operationalComponents = ictx.getComponentHolder();
-        boolean includeMutableComponent = operationalComponents.get(0) == mutableComponent;
-        LSMRTreeWithAntiMatterTuplesSearchCursor lsmTreeCursor = (LSMRTreeWithAntiMatterTuplesSearchCursor) cursor;
-        int numDiskRComponents = operationalComponents.size();
-
-        LSMRTreeCursorInitialState initialState;
-        ITreeIndexAccessor[] bTreeAccessors = null;
-        if (includeMutableComponent) {
-            // Only in-memory BTree
-            bTreeAccessors = new ITreeIndexAccessor[1];
-            bTreeAccessors[0] = ctx.memBTreeAccessor;
-        }
-
-        initialState = new LSMRTreeCursorInitialState(numDiskRComponents, rtreeLeafFrameFactory,
-                rtreeInteriorFrameFactory, btreeLeafFrameFactory, ctx.getBTreeMultiComparator(), null, bTreeAccessors,
-                includeMutableComponent, lsmHarness, comparatorFields, linearizerArray, ctx.searchCallback,
-                operationalComponents);
-
-        lsmTreeCursor.open(initialState, pred);
-
-        ListIterator<ILSMComponent> diskComponentsIter = operationalComponents.listIterator();
-        int diskComponentIx = 0;
-        if (includeMutableComponent) {
-            // Open cursor of in-memory RTree
-            ctx.memRTreeAccessor.search(lsmTreeCursor.getMemRTreeCursor(), pred);
-            diskComponentIx++;
-            diskComponentsIter.next();
-        }
-
-        // Open cursors of on-disk RTrees.
-        ITreeIndexAccessor[] diskRTreeAccessors = new ITreeIndexAccessor[numDiskRComponents];
-        while (diskComponentsIter.hasNext()) {
-            RTree diskRTree = (RTree) ((LSMRTreeImmutableComponent) diskComponentsIter.next()).getRTree();
-            diskRTreeAccessors[diskComponentIx] = diskRTree.createAccessor(NoOpOperationCallback.INSTANCE,
-                    NoOpOperationCallback.INSTANCE);
-            diskRTreeAccessors[diskComponentIx].search(lsmTreeCursor.getCursor(diskComponentIx), pred);
-            diskComponentIx++;
-        }
-        lsmTreeCursor.initPriorityQueue();
-    }
-
-    @Override
-    public void scheduleFlush(ILSMIndexOperationContext ctx, ILSMIOOperationCallback callback)
-            throws HyracksDataException {
-        LSMRTreeOpContext opCtx = createOpContext(NoOpOperationCallback.INSTANCE);
-        LSMComponentFileReferences relFlushFileRefs = fileManager.getRelFlushFileReference();
-        ILSMComponent flushingComponent = ctx.getComponentHolder().get(0);
-        opCtx.setOperation(IndexOperation.FLUSH);
-        opCtx.getComponentHolder().add(flushingComponent);
-        ILSMIndexAccessorInternal accessor = new LSMRTreeWithAntiMatterTuplesAccessor(lsmHarness, opCtx);
-        ioScheduler.scheduleOperation(new LSMRTreeFlushOperation(accessor, flushingComponent, relFlushFileRefs
-                .getInsertIndexFileReference(), null, null, callback));
-    }
-
-    @Override
-    public ILSMComponent flush(ILSMIOOperation operation) throws HyracksDataException, IndexException {
-        LSMRTreeFlushOperation flushOp = (LSMRTreeFlushOperation) operation;
-        // Renaming order is critical because we use assume ordering when we
-        // read the file names when we open the tree.
-        // The RTree should be renamed before the BTree.
-        LSMRTreeMutableComponent flushingComponent = (LSMRTreeMutableComponent) flushOp.getFlushingComponent();
-        ITreeIndexAccessor memRTreeAccessor = flushingComponent.getRTree().createAccessor(
-                NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
-        RTreeSearchCursor rtreeScanCursor = (RTreeSearchCursor) memRTreeAccessor.createSearchCursor();
-        SearchPredicate rtreeNullPredicate = new SearchPredicate(null, null);
-        memRTreeAccessor.search(rtreeScanCursor, rtreeNullPredicate);
-        LSMRTreeImmutableComponent component = createDiskComponent(componentFactory, flushOp.getRTreeFlushTarget(),
-                null, null, true);
-        RTree diskRTree = component.getRTree();
-
-        // scan the memory BTree
-        ITreeIndexAccessor memBTreeAccessor = flushingComponent.getBTree().createAccessor(
-                NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
-        BTreeRangeSearchCursor btreeScanCursor = (BTreeRangeSearchCursor) memBTreeAccessor.createSearchCursor();
-        RangePredicate btreeNullPredicate = new RangePredicate(null, null, true, true, null, null);
-        memBTreeAccessor.search(btreeScanCursor, btreeNullPredicate);
-
-        // Since the LSM-RTree is used as a secondary assumption, the
-        // primary key will be the last comparator in the BTree comparators
-        if (rTreeTupleSorter == null) {
-            rTreeTupleSorter = new TreeTupleSorter(flushingComponent.getRTree().getFileId(), linearizerArray,
-                    rtreeLeafFrameFactory.createFrame(), rtreeLeafFrameFactory.createFrame(), flushingComponent
-                            .getRTree().getBufferCache(), comparatorFields);
-
-            bTreeTupleSorter = new TreeTupleSorter(flushingComponent.getBTree().getFileId(), linearizerArray,
-                    btreeLeafFrameFactory.createFrame(), btreeLeafFrameFactory.createFrame(), flushingComponent
-                            .getBTree().getBufferCache(), comparatorFields);
-        } else {
-            rTreeTupleSorter.reset();
-            bTreeTupleSorter.reset();
-        }
-        // BulkLoad the tuples from the in-memory tree into the new disk
-        // RTree.
-
-        boolean isEmpty = true;
-        try {
-            while (rtreeScanCursor.hasNext()) {
-                isEmpty = false;
-                rtreeScanCursor.next();
-                rTreeTupleSorter.insertTupleEntry(rtreeScanCursor.getPageId(), rtreeScanCursor.getTupleOffset());
-            }
-        } finally {
-            rtreeScanCursor.close();
-        }
-        if (!isEmpty) {
-            rTreeTupleSorter.sort();
-        }
-
-        isEmpty = true;
-        try {
-            while (btreeScanCursor.hasNext()) {
-                isEmpty = false;
-                btreeScanCursor.next();
-                bTreeTupleSorter.insertTupleEntry(btreeScanCursor.getPageId(), btreeScanCursor.getTupleOffset());
-            }
-        } finally {
-            btreeScanCursor.close();
-        }
-        if (!isEmpty) {
-            bTreeTupleSorter.sort();
-        }
-
-        IIndexBulkLoader rTreeBulkloader = diskRTree.createBulkLoader(1.0f, false, 0L);
-        LSMRTreeWithAntiMatterTuplesFlushCursor cursor = new LSMRTreeWithAntiMatterTuplesFlushCursor(rTreeTupleSorter,
-                bTreeTupleSorter, comparatorFields, linearizerArray);
-        cursor.open(null, null);
-
-        try {
-            while (cursor.hasNext()) {
-                cursor.next();
-                ITupleReference frameTuple = cursor.getTuple();
-
-                rTreeBulkloader.add(frameTuple);
-            }
-        } finally {
-            cursor.close();
-        }
-
-        rTreeBulkloader.end();
-        return component;
-    }
-
-    @Override
-    public void scheduleMerge(ILSMIndexOperationContext ctx, ILSMIOOperationCallback callback)
-            throws HyracksDataException, IndexException {
-        List<ILSMComponent> mergingComponents = ctx.getComponentHolder();
-        LSMRTreeOpContext rctx = createOpContext(NoOpOperationCallback.INSTANCE);
-        rctx.getComponentHolder().addAll(mergingComponents);
-        ITreeIndexCursor cursor = new LSMRTreeWithAntiMatterTuplesSearchCursor(ctx);
-        ISearchPredicate rtreeSearchPred = new SearchPredicate(null, null);
-        search(rctx, cursor, (SearchPredicate) rtreeSearchPred);
-        rctx.setOperation(IndexOperation.MERGE);
-        LSMComponentFileReferences relMergeFileRefs = getMergeTargetFileName(mergingComponents);
-        ILSMIndexAccessorInternal accessor = new LSMRTreeWithAntiMatterTuplesAccessor(lsmHarness, rctx);
-        ioScheduler.scheduleOperation(new LSMRTreeMergeOperation(accessor, mergingComponents, cursor, relMergeFileRefs
-                .getInsertIndexFileReference(), null, null, callback));
-    }
-
-    @Override
-    public ILSMComponent merge(List<ILSMComponent> mergedComponents, ILSMIOOperation operation)
-            throws HyracksDataException, IndexException {
-        LSMRTreeMergeOperation mergeOp = (LSMRTreeMergeOperation) operation;
-        ITreeIndexCursor cursor = mergeOp.getCursor();
-        mergedComponents.addAll(mergeOp.getMergingComponents());
-
-        // Nothing to merge.
-        if (mergedComponents.size() <= 1) {
-            cursor.close();
-            return null;
-        }
-
-        // Bulk load the tuples from all on-disk RTrees into the new RTree.
-        LSMRTreeImmutableComponent component = createDiskComponent(componentFactory, mergeOp.getRTreeMergeTarget(),
-                null, null, true);
-        RTree mergedRTree = component.getRTree();
-        IIndexBulkLoader bulkloader = mergedRTree.createBulkLoader(1.0f, false, 0L);
-        try {
-            while (cursor.hasNext()) {
-                cursor.next();
-                ITupleReference frameTuple = cursor.getTuple();
-                bulkloader.add(frameTuple);
-            }
-        } finally {
-            cursor.close();
-        }
-        bulkloader.end();
-        return component;
-    }
-
-    @Override
-    public ILSMIndexAccessorInternal createAccessor(IModificationOperationCallback modificationCallback,
-            ISearchOperationCallback searchCallback) {
-        return new LSMRTreeWithAntiMatterTuplesAccessor(lsmHarness, createOpContext(modificationCallback));
-    }
-
-    public class LSMRTreeWithAntiMatterTuplesAccessor extends LSMTreeIndexAccessor {
-        public LSMRTreeWithAntiMatterTuplesAccessor(ILSMHarness lsmHarness, ILSMIndexOperationContext ctx) {
-            super(lsmHarness, ctx);
-        }
-
-        @Override
-        public ITreeIndexCursor createSearchCursor() {
-            return new LSMRTreeWithAntiMatterTuplesSearchCursor(ctx);
-        }
-
-        public MultiComparator getMultiComparator() {
-            LSMRTreeOpContext concreteCtx = (LSMRTreeOpContext) ctx;
-            return concreteCtx.rtreeOpContext.cmp;
-        }
-    }
-
-    @Override
-    public IIndexBulkLoader createBulkLoader(float fillLevel, boolean verifyInput, long numElementsHint)
-            throws TreeIndexException {
-        return new LSMRTreeWithAntiMatterTuplesBulkLoader(fillLevel, verifyInput, numElementsHint);
-    }
-
-    private ILSMComponent createBulkLoadTarget() throws HyracksDataException, IndexException {
-        LSMComponentFileReferences relFlushFileRefs = fileManager.getRelFlushFileReference();
-        return createDiskComponent(bulkLoaComponentFactory, relFlushFileRefs.getInsertIndexFileReference(), null, null,
-                true);
-    }
-
-    public class LSMRTreeWithAntiMatterTuplesBulkLoader implements IIndexBulkLoader {
-        private final ILSMComponent component;
-        private final IIndexBulkLoader bulkLoader;
-
-        public LSMRTreeWithAntiMatterTuplesBulkLoader(float fillFactor, boolean verifyInput, long numElementsHint)
-                throws TreeIndexException {
-            // Note that by using a flush target file name, we state that the
-            // new bulk loaded tree is "newer" than any other merged tree.
-            try {
-                component = createBulkLoadTarget();
-            } catch (HyracksDataException e) {
-                throw new TreeIndexException(e);
-            } catch (IndexException e) {
-                throw new TreeIndexException(e);
-            }
-            bulkLoader = ((LSMRTreeImmutableComponent) component).getRTree().createBulkLoader(fillFactor, verifyInput,
-                    numElementsHint);
-        }
-
-        @Override
-        public void add(ITupleReference tuple) throws HyracksDataException, IndexException {
-            try {
-                bulkLoader.add(tuple);
-            } catch (IndexException e) {
-                handleException();
-                throw e;
-            } catch (HyracksDataException e) {
-                handleException();
-                throw e;
-            } catch (RuntimeException e) {
-                handleException();
-                throw e;
-            }
-        }
-
-        @Override
-        public void end() throws HyracksDataException, IndexException {
-            bulkLoader.end();
-            lsmHarness.addBulkLoadedComponent(component);
-        }
-
-        protected void handleException() throws HyracksDataException {
-            ((LSMRTreeImmutableComponent) component).getRTree().deactivate();
-            ((LSMRTreeImmutableComponent) component).getRTree().destroy();
-        }
-
-    }
-
-    @Override
-    public void markAsValid(ILSMComponent lsmComponent) throws HyracksDataException {
-        RTree rtree = ((LSMRTreeImmutableComponent) lsmComponent).getRTree();
-        forceFlushDirtyPages(rtree);
-        markAsValidInternal(rtree);
-    }
-}
diff --git a/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTreeWithAntiMatterTuplesComponentFactory.java b/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTreeWithAntiMatterTuplesComponentFactory.java
deleted file mode 100644
index 0149800..0000000
--- a/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTreeWithAntiMatterTuplesComponentFactory.java
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.rtree.impls;
-
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMComponent;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMComponentFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.LSMComponentFileReferences;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.TreeIndexFactory;
-import edu.uci.ics.hyracks.storage.am.rtree.impls.RTree;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-
-public class LSMRTreeWithAntiMatterTuplesComponentFactory implements ILSMComponentFactory {
-    private final TreeIndexFactory<RTree> rtreeFactory;
-
-    public LSMRTreeWithAntiMatterTuplesComponentFactory(TreeIndexFactory<RTree> rtreeFactory) {
-        this.rtreeFactory = rtreeFactory;
-    }
-
-    @Override
-    public ILSMComponent createLSMComponentInstance(LSMComponentFileReferences cfr) throws IndexException {
-        return new LSMRTreeImmutableComponent(rtreeFactory.createIndexInstance(cfr.getInsertIndexFileReference()),
-                null, null);
-    }
-
-    @Override
-    public IBufferCache getBufferCache() {
-        return rtreeFactory.getBufferCache();
-    }
-}
diff --git a/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTreeWithAntiMatterTuplesFileManager.java b/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTreeWithAntiMatterTuplesFileManager.java
deleted file mode 100644
index 10b982f..0000000
--- a/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTreeWithAntiMatterTuplesFileManager.java
+++ /dev/null
@@ -1,126 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.rtree.impls;
-
-import java.io.File;
-import java.io.FilenameFilter;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.Date;
-import java.util.List;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.api.io.IIOManager;
-import edu.uci.ics.hyracks.api.io.IODeviceHandle;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.AbstractLSMIndexFileManager;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.LSMComponentFileReferences;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.TreeIndexFactory;
-import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
-
-public class LSMRTreeWithAntiMatterTuplesFileManager extends AbstractLSMIndexFileManager {
-
-    private final TreeIndexFactory<? extends ITreeIndex> rtreeFactory;
-
-    public LSMRTreeWithAntiMatterTuplesFileManager(IIOManager ioManager, IFileMapProvider fileMapProvider,
-            FileReference file, TreeIndexFactory<? extends ITreeIndex> rtreeFactory, int startIODeviceIndex) {
-        super(ioManager, fileMapProvider, file, null, startIODeviceIndex);
-        this.rtreeFactory = rtreeFactory;
-    }
-
-    @Override
-    public LSMComponentFileReferences getRelFlushFileReference() {
-        Date date = new Date();
-        String ts = formatter.format(date);
-        // Begin timestamp and end timestamp are identical since it is a flush
-        return new LSMComponentFileReferences(createFlushFile(baseDir + ts + SPLIT_STRING + ts), null, null);
-    }
-
-    @Override
-    public LSMComponentFileReferences getRelMergeFileReference(String firstFileName, String lastFileName)
-            throws HyracksDataException {
-        String[] firstTimestampRange = firstFileName.split(SPLIT_STRING);
-        String[] lastTimestampRange = lastFileName.split(SPLIT_STRING);
-        // Get the range of timestamps by taking the earliest and the latest timestamps
-        return new LSMComponentFileReferences(createMergeFile(baseDir + firstTimestampRange[0] + SPLIT_STRING
-                + lastTimestampRange[1]), null, null);
-    }
-
-    private static FilenameFilter fileNameFilter = new FilenameFilter() {
-        public boolean accept(File dir, String name) {
-            return !name.startsWith(".");
-        }
-    };
-
-    @Override
-    public List<LSMComponentFileReferences> cleanupAndGetValidFiles() throws HyracksDataException, IndexException {
-        List<LSMComponentFileReferences> validFiles = new ArrayList<LSMComponentFileReferences>();
-        ArrayList<ComparableFileName> allFiles = new ArrayList<ComparableFileName>();
-
-        // Gather files from all IODeviceHandles and delete invalid files
-        // There are two types of invalid files:
-        // (1) The isValid flag is not set
-        // (2) The file's interval is contained by some other file
-        // Here, we only filter out (1).
-        for (IODeviceHandle dev : ioManager.getIODevices()) {
-            cleanupAndGetValidFilesInternal(dev, fileNameFilter, rtreeFactory, allFiles);
-        }
-
-        if (allFiles.isEmpty()) {
-            return validFiles;
-        }
-
-        if (allFiles.size() == 1) {
-            validFiles.add(new LSMComponentFileReferences(allFiles.get(0).fileRef, null, null));
-            return validFiles;
-        }
-
-        // Sorts files names from earliest to latest timestamp.
-        Collections.sort(allFiles);
-
-        List<ComparableFileName> validComparableFiles = new ArrayList<ComparableFileName>();
-        ComparableFileName last = allFiles.get(0);
-        validComparableFiles.add(last);
-        for (int i = 1; i < allFiles.size(); i++) {
-            ComparableFileName current = allFiles.get(i);
-            // The current start timestamp is greater than last stop timestamp so current is valid.
-            if (current.interval[0].compareTo(last.interval[1]) > 0) {
-                validComparableFiles.add(current);
-                last = current;
-            } else if (current.interval[0].compareTo(last.interval[0]) >= 0
-                    && current.interval[1].compareTo(last.interval[1]) <= 0) {
-                // The current file is completely contained in the interval of the 
-                // last file. Thus the last file must contain at least as much information 
-                // as the current file, so delete the current file.
-                current.fileRef.delete();
-            } else {
-                // This scenario should not be possible since timestamps are monotonically increasing.
-                throw new HyracksDataException("Found LSM files with overlapping timestamp intervals, "
-                        + "but the intervals were not contained by another file.");
-            }
-        }
-
-        // Sort valid files in reverse lexicographical order, such that newer files come first.
-        Collections.sort(validComparableFiles, recencyCmp);
-        for (ComparableFileName cmpFileName : validComparableFiles) {
-            validFiles.add(new LSMComponentFileReferences(cmpFileName.fileRef, null, null));
-        }
-
-        return validFiles;
-    }
-}
diff --git a/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTreeWithAntiMatterTuplesFlushCursor.java b/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTreeWithAntiMatterTuplesFlushCursor.java
deleted file mode 100644
index 22e6929..0000000
--- a/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTreeWithAntiMatterTuplesFlushCursor.java
+++ /dev/null
@@ -1,164 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.rtree.impls;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.common.api.ICursorInitialState;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchPredicate;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
-
-public class LSMRTreeWithAntiMatterTuplesFlushCursor implements ITreeIndexCursor {
-    private final TreeTupleSorter rTreeTupleSorter;
-    private final TreeTupleSorter bTreeTupleSorter;
-    private final int[] comparatorFields;
-    private final MultiComparator cmp;
-    private ITupleReference frameTuple;
-    private ITupleReference leftOverTuple;
-    private ITupleReference rtreeTuple;
-    private ITupleReference btreeTuple;
-    private boolean foundNext = false;
-
-    public LSMRTreeWithAntiMatterTuplesFlushCursor(TreeTupleSorter rTreeTupleSorter, TreeTupleSorter bTreeTupleSorter,
-            int[] comparatorFields, IBinaryComparatorFactory[] comparatorFactories) {
-        this.rTreeTupleSorter = rTreeTupleSorter;
-        this.bTreeTupleSorter = bTreeTupleSorter;
-        this.comparatorFields = comparatorFields;
-        cmp = MultiComparator.create(comparatorFactories);
-    }
-
-    @Override
-    public void open(ICursorInitialState initialState, ISearchPredicate searchPred) throws HyracksDataException {
-
-    }
-
-    @Override
-    public boolean hasNext() throws HyracksDataException {
-        if (foundNext) {
-            return true;
-        }
-        while (true) {
-            if (leftOverTuple != null && leftOverTuple == rtreeTuple) {
-                if (bTreeTupleSorter.hasNext()) {
-                    bTreeTupleSorter.next();
-                    btreeTuple = bTreeTupleSorter.getTuple();
-                } else {
-                    frameTuple = rtreeTuple;
-                    foundNext = true;
-                    leftOverTuple = null;
-                    return true;
-                }
-            } else if (leftOverTuple != null && leftOverTuple == btreeTuple) {
-                if (rTreeTupleSorter.hasNext()) {
-                    rTreeTupleSorter.next();
-                    rtreeTuple = rTreeTupleSorter.getTuple();
-                } else {
-                    frameTuple = btreeTuple;
-                    foundNext = true;
-                    leftOverTuple = null;
-                    return true;
-                }
-            } else {
-                if (rTreeTupleSorter.hasNext() && bTreeTupleSorter.hasNext()) {
-                    rTreeTupleSorter.next();
-                    bTreeTupleSorter.next();
-                    rtreeTuple = rTreeTupleSorter.getTuple();
-                    btreeTuple = bTreeTupleSorter.getTuple();
-                } else if (rTreeTupleSorter.hasNext()) {
-                    rTreeTupleSorter.next();
-                    rtreeTuple = rTreeTupleSorter.getTuple();
-                    frameTuple = rtreeTuple;
-                    leftOverTuple = null;
-                    foundNext = true;
-                    return true;
-                } else if (bTreeTupleSorter.hasNext()) {
-                    bTreeTupleSorter.next();
-                    btreeTuple = bTreeTupleSorter.getTuple();
-                    frameTuple = btreeTuple;
-                    leftOverTuple = null;
-                    foundNext = true;
-                    return true;
-                } else {
-                    return false;
-                }
-            }
-
-            int c = cmp.selectiveFieldCompare(rtreeTuple, btreeTuple, comparatorFields);
-            if (c == 0) {
-                leftOverTuple = null;
-                continue;
-            } else if (c < 0) {
-                frameTuple = rtreeTuple;
-                leftOverTuple = btreeTuple;
-                foundNext = true;
-                return true;
-            } else {
-                frameTuple = btreeTuple;
-                leftOverTuple = rtreeTuple;
-                foundNext = true;
-                return true;
-            }
-        }
-    }
-
-    @Override
-    public void next() throws HyracksDataException {
-        foundNext = false;
-
-    }
-
-    @Override
-    public void close() throws HyracksDataException {
-    }
-
-    @Override
-    public void reset() throws HyracksDataException {
-
-    }
-
-    @Override
-    public ITupleReference getTuple() {
-        return frameTuple;
-    }
-
-    @Override
-    public ICachedPage getPage() {
-        return null;
-    }
-
-    @Override
-    public void setBufferCache(IBufferCache bufferCache) {
-        // TODO Auto-generated method stub
-
-    }
-
-    @Override
-    public void setFileId(int fileId) {
-        // TODO Auto-generated method stub
-
-    }
-
-    @Override
-    public boolean exclusiveLatchNodes() {
-        // TODO Auto-generated method stub
-        return false;
-    }
-
-}
diff --git a/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTreeWithAntiMatterTuplesSearchCursor.java b/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTreeWithAntiMatterTuplesSearchCursor.java
deleted file mode 100644
index 47d00c0..0000000
--- a/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTreeWithAntiMatterTuplesSearchCursor.java
+++ /dev/null
@@ -1,249 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.rtree.impls;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeRangeSearchCursor;
-import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
-import edu.uci.ics.hyracks.storage.am.common.api.ICursorInitialState;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchPredicate;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexAccessor;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIndexOperationContext;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.LSMIndexSearchCursor;
-import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeInteriorFrame;
-import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeLeafFrame;
-import edu.uci.ics.hyracks.storage.am.rtree.impls.RTreeSearchCursor;
-
-public class LSMRTreeWithAntiMatterTuplesSearchCursor extends LSMIndexSearchCursor {
-
-    private RTreeSearchCursor memRTreeCursor;
-    private BTreeRangeSearchCursor memBTreeCursor;
-    private RangePredicate btreeRangePredicate;
-    private ITreeIndexAccessor memBTreeAccessor;
-    private boolean foundNext;
-    private ITupleReference frameTuple;
-    private int[] comparatorFields;
-    private MultiComparator btreeCmp;
-
-    public LSMRTreeWithAntiMatterTuplesSearchCursor(ILSMIndexOperationContext opCtx) {
-        super(opCtx);
-    }
-
-    @Override
-    public void open(ICursorInitialState initialState, ISearchPredicate searchPred) throws HyracksDataException {
-        LSMRTreeCursorInitialState lsmInitialState = (LSMRTreeCursorInitialState) initialState;
-        cmp = lsmInitialState.getHilbertCmp();
-        btreeCmp = lsmInitialState.getBTreeCmp();
-        int numDiskRTrees = lsmInitialState.getNumberOfTrees();
-        rangeCursors = new RTreeSearchCursor[numDiskRTrees];
-        for (int i = 0; i < numDiskRTrees; i++) {
-            rangeCursors[i] = new RTreeSearchCursor((IRTreeInteriorFrame) lsmInitialState
-                    .getRTreeInteriorFrameFactory().createFrame(), (IRTreeLeafFrame) lsmInitialState
-                    .getRTreeLeafFrameFactory().createFrame());
-        }
-        includeMemComponent = lsmInitialState.getIncludeMemComponent();
-        operationalComponents = lsmInitialState.getOperationalComponents();
-        if (includeMemComponent) {
-            memRTreeCursor = new RTreeSearchCursor((IRTreeInteriorFrame) lsmInitialState.getRTreeInteriorFrameFactory()
-                    .createFrame(), (IRTreeLeafFrame) lsmInitialState.getRTreeLeafFrameFactory().createFrame());
-            memBTreeCursor = new BTreeRangeSearchCursor((IBTreeLeafFrame) lsmInitialState.getBTreeLeafFrameFactory()
-                    .createFrame(), false);
-            memBTreeAccessor = lsmInitialState.getBTreeAccessors()[0];
-            btreeRangePredicate = new RangePredicate(null, null, true, true, btreeCmp, btreeCmp);
-        }
-        lsmHarness = lsmInitialState.getLSMHarness();
-        comparatorFields = lsmInitialState.getComparatorFields();
-        setPriorityQueueComparator();
-    }
-
-    @Override
-    public boolean hasNext() throws HyracksDataException, IndexException {
-        if (includeMemComponent) {
-            if (foundNext) {
-                return true;
-            }
-            while (memRTreeCursor.hasNext()) {
-                memRTreeCursor.next();
-                ITupleReference memRTreeTuple = memRTreeCursor.getTuple();
-                if (searchMemBTree(memRTreeTuple)) {
-                    foundNext = true;
-                    frameTuple = memRTreeTuple;
-                    return true;
-                }
-            }
-            while (super.hasNext()) {
-                super.next();
-                ITupleReference diskRTreeTuple = super.getTuple();
-                if (searchMemBTree(diskRTreeTuple)) {
-                    foundNext = true;
-                    frameTuple = diskRTreeTuple;
-                    return true;
-                }
-            }
-        } else {
-            return super.hasNext();
-        }
-
-        return false;
-    }
-
-    @Override
-    public void next() throws HyracksDataException {
-        if (includeMemComponent) {
-            foundNext = false;
-        } else {
-            super.next();
-        }
-
-    }
-
-    @Override
-    public ITupleReference getTuple() {
-        if (includeMemComponent) {
-            return frameTuple;
-        } else {
-            return super.getTuple();
-        }
-
-    }
-
-    @Override
-    public void reset() throws HyracksDataException, IndexException {
-        if (includeMemComponent) {
-            memRTreeCursor.reset();
-            memBTreeCursor.reset();
-        }
-        super.reset();
-    }
-
-    @Override
-    public void close() throws HyracksDataException {
-        if (includeMemComponent) {
-            memRTreeCursor.close();
-            memBTreeCursor.close();
-        }
-        super.close();
-    }
-
-    public ITreeIndexCursor getMemRTreeCursor() {
-        return memRTreeCursor;
-    }
-
-    @Override
-    protected int compare(MultiComparator cmp, ITupleReference tupleA, ITupleReference tupleB) {
-        return cmp.selectiveFieldCompare(tupleA, tupleB, comparatorFields);
-    }
-
-    private boolean searchMemBTree(ITupleReference tuple) throws HyracksDataException {
-        try {
-            btreeRangePredicate.setHighKey(tuple, true);
-            btreeRangePredicate.setLowKey(tuple, true);
-            memBTreeAccessor.search(memBTreeCursor, btreeRangePredicate);
-        } catch (IndexException e) {
-            throw new HyracksDataException(e);
-        }
-        try {
-            if (memBTreeCursor.hasNext()) {
-                return false;
-            } else {
-                return true;
-            }
-        } finally {
-            memBTreeCursor.close();
-        }
-    }
-
-    @Override
-    protected void setPriorityQueueComparator() {
-        if (pqCmp == null || cmp != pqCmp.getMultiComparator()) {
-            pqCmp = new PriorityQueueHilbertComparator(cmp, comparatorFields);
-        }
-    }
-
-    public class PriorityQueueHilbertComparator extends PriorityQueueComparator {
-
-        private final int[] comparatorFields;
-
-        public PriorityQueueHilbertComparator(MultiComparator cmp, int[] comparatorFields) {
-            super(cmp);
-            this.comparatorFields = comparatorFields;
-        }
-
-        @Override
-        public int compare(PriorityQueueElement elementA, PriorityQueueElement elementB) {
-            int result = cmp.selectiveFieldCompare(elementA.getTuple(), elementB.getTuple(), comparatorFields);
-            if (result != 0) {
-                return result;
-            }
-            if (elementA.getCursorIndex() > elementB.getCursorIndex()) {
-                return 1;
-            } else {
-                return -1;
-            }
-        }
-    }
-
-    @Override
-    protected void checkPriorityQueue() throws HyracksDataException, IndexException {
-        while (!outputPriorityQueue.isEmpty() || needPush == true) {
-            if (!outputPriorityQueue.isEmpty()) {
-                PriorityQueueElement checkElement = outputPriorityQueue.peek();
-                // If there is no previous tuple or the previous tuple can be ignored
-                if (outputElement == null) {
-                    if (isDeleted(checkElement)) {
-                        // If the key has been deleted then pop it and set needPush to true.
-                        // We cannot push immediately because the tuple may be
-                        // modified if hasNext() is called
-                        outputElement = outputPriorityQueue.poll();
-                        needPush = true;
-                    } else {
-                        break;
-                    }
-                } else {
-                    // Compare the previous tuple and the head tuple in the PQ
-                    if (compare(cmp, outputElement.getTuple(), checkElement.getTuple()) == 0) {
-                        // If the previous tuple and the head tuple are
-                        // identical
-                        // then pop the head tuple and push the next tuple from
-                        // the tree of head tuple
-
-                        // the head element of PQ is useless now
-                        PriorityQueueElement e = outputPriorityQueue.poll();
-                        pushIntoPriorityQueue(e);
-                    } else {
-                        // If the previous tuple and the head tuple are different
-                        // the info of previous tuple is useless
-                        if (needPush == true) {
-                            pushIntoPriorityQueue(outputElement);
-                            needPush = false;
-                        }
-                        outputElement = null;
-                    }
-                }
-            } else {
-                // the priority queue is empty and needPush
-                pushIntoPriorityQueue(outputElement);
-                needPush = false;
-                outputElement = null;
-            }
-        }
-    }
-}
diff --git a/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/RTreeFactory.java b/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/RTreeFactory.java
deleted file mode 100644
index 71e228b..0000000
--- a/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/RTreeFactory.java
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.rtree.impls;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManagerFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.TreeIndexFactory;
-import edu.uci.ics.hyracks.storage.am.rtree.impls.RTree;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
-
-public class RTreeFactory extends TreeIndexFactory<RTree> {
-
-    public RTreeFactory(IBufferCache bufferCache, IFileMapProvider fileMapProvider,
-            IFreePageManagerFactory freePageManagerFactory, ITreeIndexFrameFactory interiorFrameFactory,
-            ITreeIndexFrameFactory leafFrameFactory, IBinaryComparatorFactory[] cmpFactories, int fieldCount) {
-        super(bufferCache, fileMapProvider, freePageManagerFactory, interiorFrameFactory, leafFrameFactory,
-                cmpFactories, fieldCount);
-    }
-
-    @Override
-    public RTree createIndexInstance(FileReference file) throws IndexException {
-        return new RTree(bufferCache, fileMapProvider, freePageManagerFactory.createFreePageManager(),
-                interiorFrameFactory, leafFrameFactory, cmpFactories, fieldCount, file);
-    }
-
-}
diff --git a/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/TreeTupleSorter.java b/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/TreeTupleSorter.java
deleted file mode 100644
index 294c2b8..0000000
--- a/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/TreeTupleSorter.java
+++ /dev/null
@@ -1,225 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.rtree.impls;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.common.api.ICursorInitialState;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchPredicate;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
-import edu.uci.ics.hyracks.storage.common.file.BufferedFileHandle;
-
-public class TreeTupleSorter implements ITreeIndexCursor {
-    private final static int INITIAL_SIZE = 1000000;
-    private int numTuples;
-    private int currentTupleIndex;
-    private int[] tPointers;
-    private IBufferCache bufferCache;
-    private final ITreeIndexFrame leafFrame1;
-    private final ITreeIndexFrame leafFrame2;
-    private ITreeIndexTupleReference frameTuple1;
-    private ITreeIndexTupleReference frameTuple2;
-    private final int fileId;
-    private final static int ARRAY_GROWTH = 1000000; // Must be at least of size 2
-    private final int[] comparatorFields;
-    private final MultiComparator cmp;
-
-    public TreeTupleSorter(int fileId, IBinaryComparatorFactory[] comparatorFactories, ITreeIndexFrame leafFrame1,
-            ITreeIndexFrame leafFrame2, IBufferCache bufferCache, int[] comparatorFields) {
-        this.fileId = fileId;
-        this.leafFrame1 = leafFrame1;
-        this.leafFrame2 = leafFrame2;
-        this.bufferCache = bufferCache;
-        this.comparatorFields = comparatorFields;
-        tPointers = new int[INITIAL_SIZE * 2];
-        frameTuple1 = leafFrame1.createTupleReference();
-        frameTuple2 = leafFrame2.createTupleReference();
-        currentTupleIndex = 0;
-        cmp = MultiComparator.create(comparatorFactories);
-    }
-
-    public void reset() {
-        numTuples = 0;
-        currentTupleIndex = 0;
-    }
-
-    public boolean hasNext() throws HyracksDataException {
-        if (numTuples <= currentTupleIndex) {
-            return false;
-        }
-        // We don't latch pages since this code is only used by flush () before
-        // bulk-loading the r-tree to disk and flush is not concurrent.
-        //
-        ICachedPage node1 = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, tPointers[currentTupleIndex * 2]),
-                false);
-        try {
-            leafFrame1.setPage(node1);
-            frameTuple1.resetByTupleOffset(leafFrame1.getBuffer(), tPointers[currentTupleIndex * 2 + 1]);
-        } finally {
-            bufferCache.unpin(node1);
-        }
-        return true;
-    }
-
-    public void next() {
-        currentTupleIndex++;
-    }
-
-    public ITupleReference getTuple() {
-        return frameTuple1;
-    }
-
-    public void insertTupleEntry(int pageId, int tupleOffset) {
-        if (numTuples * 2 == tPointers.length) {
-            int[] newData = new int[tPointers.length + ARRAY_GROWTH];
-            System.arraycopy(tPointers, 0, newData, 0, tPointers.length);
-            tPointers = newData;
-        }
-
-        tPointers[numTuples * 2] = pageId;
-        tPointers[numTuples * 2 + 1] = tupleOffset;
-        numTuples++;
-    }
-
-    public void sort() throws HyracksDataException {
-        sort(tPointers, 0, numTuples);
-    }
-
-    private void sort(int[] tPointers, int offset, int length) throws HyracksDataException {
-        int m = offset + (length >> 1);
-        int mi = tPointers[m * 2];
-        int mj = tPointers[m * 2 + 1];
-
-        int a = offset;
-        int b = a;
-        int c = offset + length - 1;
-        int d = c;
-        while (true) {
-            while (b <= c) {
-                int cmp = compare(tPointers, b, mi, mj);
-                if (cmp > 0) {
-                    break;
-                }
-                if (cmp == 0) {
-                    swap(tPointers, a++, b);
-                }
-                ++b;
-            }
-            while (c >= b) {
-                int cmp = compare(tPointers, c, mi, mj);
-                if (cmp < 0) {
-                    break;
-                }
-                if (cmp == 0) {
-                    swap(tPointers, c, d--);
-                }
-                --c;
-            }
-            if (b > c)
-                break;
-            swap(tPointers, b++, c--);
-        }
-
-        int s;
-        int n = offset + length;
-        s = Math.min(a - offset, b - a);
-        vecswap(tPointers, offset, b - s, s);
-        s = Math.min(d - c, n - d - 1);
-        vecswap(tPointers, b, n - s, s);
-
-        if ((s = b - a) > 1) {
-            sort(tPointers, offset, s);
-        }
-        if ((s = d - c) > 1) {
-            sort(tPointers, n - s, s);
-        }
-    }
-
-    private void swap(int x[], int a, int b) {
-        for (int i = 0; i < 2; ++i) {
-            int t = x[a * 2 + i];
-            x[a * 2 + i] = x[b * 2 + i];
-            x[b * 2 + i] = t;
-        }
-    }
-
-    private void vecswap(int x[], int a, int b, int n) {
-        for (int i = 0; i < n; i++, a++, b++) {
-            swap(x, a, b);
-        }
-    }
-
-    private int compare(int[] tPointers, int tp1, int tp2i, int tp2j) throws HyracksDataException {
-        int i1 = tPointers[tp1 * 2];
-        int j1 = tPointers[tp1 * 2 + 1];
-
-        int i2 = tp2i;
-        int j2 = tp2j;
-
-        ICachedPage node1 = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, i1), false);
-        leafFrame1.setPage(node1);
-        ICachedPage node2 = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, i2), false);
-        leafFrame2.setPage(node2);
-
-        try {
-            frameTuple1.resetByTupleOffset(leafFrame1.getBuffer(), j1);
-            frameTuple2.resetByTupleOffset(leafFrame2.getBuffer(), j2);
-
-            return cmp.selectiveFieldCompare(frameTuple1, frameTuple2, comparatorFields);
-
-        } finally {
-            bufferCache.unpin(node1);
-            bufferCache.unpin(node2);
-        }
-    }
-
-    @Override
-    public void open(ICursorInitialState initialState, ISearchPredicate searchPred) throws HyracksDataException {
-        // do nothing
-    }
-
-    @Override
-    public void close() throws HyracksDataException {
-        // do nothing
-    }
-
-    @Override
-    public ICachedPage getPage() {
-        return null;
-    }
-
-    @Override
-    public void setBufferCache(IBufferCache bufferCache) {
-        // do nothing
-    }
-
-    @Override
-    public void setFileId(int fileId) {
-        // do nothing
-    }
-
-    @Override
-    public boolean exclusiveLatchNodes() {
-        return false;
-    }
-
-}
\ No newline at end of file
diff --git a/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/tuples/LSMRTreeCopyTupleWriter.java b/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/tuples/LSMRTreeCopyTupleWriter.java
deleted file mode 100644
index 1852b51..0000000
--- a/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/tuples/LSMRTreeCopyTupleWriter.java
+++ /dev/null
@@ -1,35 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.rtree.tuples;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-
-public class LSMRTreeCopyTupleWriter extends LSMRTreeTupleWriter {
-    public LSMRTreeCopyTupleWriter(ITypeTraits[] typeTraits) {
-        // Third parameter is never used locally, just give false.
-        super(typeTraits, false);
-    }
-
-    @Override
-    public int writeTuple(ITupleReference tuple, byte[] targetBuf, int targetOff) {
-        int tupleSize = bytesRequired(tuple);
-        byte[] buf = tuple.getFieldData(0);
-        int tupleStartOff = ((LSMRTreeTupleReference) tuple).getTupleStart();
-        System.arraycopy(buf, tupleStartOff, targetBuf, targetOff, tupleSize);
-        return tupleSize;
-    }
-}
diff --git a/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/tuples/LSMRTreeCopyTupleWriterFactory.java b/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/tuples/LSMRTreeCopyTupleWriterFactory.java
deleted file mode 100644
index 39a8e4d..0000000
--- a/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/tuples/LSMRTreeCopyTupleWriterFactory.java
+++ /dev/null
@@ -1,35 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.rtree.tuples;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriter;
-import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriterFactory;
-
-public class LSMRTreeCopyTupleWriterFactory extends TypeAwareTupleWriterFactory {
-    private static final long serialVersionUID = 1L;
-    private final ITypeTraits[] typeTraits;
-
-    public LSMRTreeCopyTupleWriterFactory(ITypeTraits[] typeTraits) {
-        super(typeTraits);
-        this.typeTraits = typeTraits;
-    }
-
-    @Override
-    public ITreeIndexTupleWriter createTupleWriter() {
-        return new LSMRTreeCopyTupleWriter(typeTraits);
-    }
-}
diff --git a/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/tuples/LSMRTreeTupleReference.java b/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/tuples/LSMRTreeTupleReference.java
deleted file mode 100644
index 70072e1..0000000
--- a/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/tuples/LSMRTreeTupleReference.java
+++ /dev/null
@@ -1,47 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.rtree.tuples;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleReference;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMTreeTupleReference;
-
-public class LSMRTreeTupleReference extends TypeAwareTupleReference implements ILSMTreeTupleReference {
-
-    public LSMRTreeTupleReference(ITypeTraits[] typeTraits) {
-        super(typeTraits);
-    }
-
-    @Override
-    protected int getNullFlagsBytes() {
-        // +1.0 is for matter/antimatter bit.
-        return (int) Math.ceil((fieldCount + 1.0) / 8.0);
-    }
-
-    @Override
-    public boolean isAntimatter() {
-        // Check if the leftmost bit is 0 or 1.
-        final byte mask = (byte) (1 << 7);
-        if ((buf.array()[tupleStartOff] & mask) != 0) {
-            return true;
-        }
-        return false;
-    }
-
-    public int getTupleStart() {
-        return tupleStartOff;
-    }
-}
diff --git a/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/tuples/LSMRTreeTupleWriter.java b/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/tuples/LSMRTreeTupleWriter.java
deleted file mode 100644
index 932a307..0000000
--- a/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/tuples/LSMRTreeTupleWriter.java
+++ /dev/null
@@ -1,67 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.rtree.tuples;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
-import edu.uci.ics.hyracks.storage.am.rtree.tuples.RTreeTypeAwareTupleWriter;
-
-public class LSMRTreeTupleWriter extends RTreeTypeAwareTupleWriter {
-    private final boolean isAntimatter;
-
-    public LSMRTreeTupleWriter(ITypeTraits[] typeTraits, boolean isAntimatter) {
-        super(typeTraits);
-        this.isAntimatter = isAntimatter;
-    }
-
-    @Override
-    public ITreeIndexTupleReference createTupleReference() {
-        return new LSMRTreeTupleReference(typeTraits);
-    }
-
-    @Override
-    public int bytesRequired(ITupleReference tuple) {
-        return super.bytesRequired(tuple);
-    }
-
-    @Override
-    public int writeTuple(ITupleReference tuple, byte[] targetBuf, int targetOff) {
-        int bytesWritten = super.writeTuple(tuple, targetBuf, targetOff);
-        if (isAntimatter) {
-            setAntimatterBit(targetBuf, targetOff);
-        }
-        return bytesWritten;
-    }
-
-    @Override
-    protected int getNullFlagsBytes(int numFields) {
-        // +1.0 is for matter/antimatter bit.
-        return (int) Math.ceil(((double) numFields + 1.0) / 8.0);
-    }
-
-    @Override
-    protected int getNullFlagsBytes(ITupleReference tuple) {
-        // +1.0 is for matter/antimatter bit.
-        return (int) Math.ceil(((double) tuple.getFieldCount() + 1.0) / 8.0);
-    }
-
-    protected void setAntimatterBit(byte[] targetBuf, int targetOff) {
-        // Set leftmost bit to 1.
-        targetBuf[targetOff] = (byte) (targetBuf[targetOff] | (1 << 7));
-    }
-
-}
diff --git a/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/tuples/LSMRTreeTupleWriterFactory.java b/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/tuples/LSMRTreeTupleWriterFactory.java
deleted file mode 100644
index 493d368..0000000
--- a/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/tuples/LSMRTreeTupleWriterFactory.java
+++ /dev/null
@@ -1,38 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.rtree.tuples;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriter;
-import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriterFactory;
-
-public class LSMRTreeTupleWriterFactory extends TypeAwareTupleWriterFactory {
-
-    private static final long serialVersionUID = 1L;
-    private final ITypeTraits[] typeTraits;
-    private final boolean isDelete;
-
-    public LSMRTreeTupleWriterFactory(ITypeTraits[] typeTraits, boolean isDelete) {
-        super(typeTraits);
-        this.typeTraits = typeTraits;
-        this.isDelete = isDelete;
-    }
-
-    @Override
-    public ITreeIndexTupleWriter createTupleWriter() {
-        return new LSMRTreeTupleWriter(typeTraits, isDelete);
-    }
-}
diff --git a/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/tuples/LSMTypeAwareTupleWriterFactory.java b/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/tuples/LSMTypeAwareTupleWriterFactory.java
deleted file mode 100644
index 876df56..0000000
--- a/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/tuples/LSMTypeAwareTupleWriterFactory.java
+++ /dev/null
@@ -1,30 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.lsm.rtree.tuples;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriter;
-import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriter;
-import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriterFactory;
-import edu.uci.ics.hyracks.storage.am.rtree.tuples.RTreeTypeAwareTupleWriter;
-
-public class LSMTypeAwareTupleWriterFactory extends TypeAwareTupleWriterFactory {
-
-	private static final long serialVersionUID = 1L;
-	private ITypeTraits[] typeTraits;
-	private final boolean isDelete;
-	
-	public LSMTypeAwareTupleWriterFactory(ITypeTraits[] typeTraits, boolean isDelete) {
-		super(typeTraits);
-		this.typeTraits = typeTraits;
-		this.isDelete = isDelete;
-	}
-
-	@Override
-	public ITreeIndexTupleWriter createTupleWriter() {
-	    if (isDelete) {
-	        return new TypeAwareTupleWriter(typeTraits);
-	    } else {
-	        return new RTreeTypeAwareTupleWriter(typeTraits);
-	    }
-	}
-
-}
diff --git a/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/utils/LSMRTreeUtils.java b/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/utils/LSMRTreeUtils.java
deleted file mode 100644
index 6c9fce6..0000000
--- a/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/utils/LSMRTreeUtils.java
+++ /dev/null
@@ -1,211 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.rtree.utils;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ILinearizeComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.api.io.IIOManager;
-import edu.uci.ics.hyracks.data.std.primitive.DoublePointable;
-import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
-import edu.uci.ics.hyracks.storage.am.bloomfilter.impls.BloomFilterFactory;
-import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMInteriorFrameFactory;
-import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMLeafFrameFactory;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
-import edu.uci.ics.hyracks.storage.am.common.api.IInMemoryFreePageManager;
-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrameFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
-import edu.uci.ics.hyracks.storage.am.common.frames.LIFOMetaDataFrameFactory;
-import edu.uci.ics.hyracks.storage.am.common.freepage.LinkedListFreePageManagerFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.IInMemoryBufferCache;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallbackProvider;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationScheduler;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIndexFileManager;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMMergePolicy;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMOperationTrackerFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.BTreeFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.TreeIndexFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.rtree.impls.LSMRTree;
-import edu.uci.ics.hyracks.storage.am.lsm.rtree.impls.LSMRTreeFileManager;
-import edu.uci.ics.hyracks.storage.am.lsm.rtree.impls.LSMRTreeWithAntiMatterTuples;
-import edu.uci.ics.hyracks.storage.am.lsm.rtree.impls.LSMRTreeWithAntiMatterTuplesFileManager;
-import edu.uci.ics.hyracks.storage.am.lsm.rtree.impls.RTreeFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.rtree.tuples.LSMRTreeCopyTupleWriterFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.rtree.tuples.LSMRTreeTupleWriterFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.rtree.tuples.LSMTypeAwareTupleWriterFactory;
-import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreeNSMInteriorFrameFactory;
-import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreeNSMLeafFrameFactory;
-import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreePolicyType;
-import edu.uci.ics.hyracks.storage.am.rtree.impls.RTree;
-import edu.uci.ics.hyracks.storage.am.rtree.linearize.HilbertDoubleComparatorFactory;
-import edu.uci.ics.hyracks.storage.am.rtree.linearize.ZCurveDoubleComparatorFactory;
-import edu.uci.ics.hyracks.storage.am.rtree.linearize.ZCurveIntComparatorFactory;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
-
-public class LSMRTreeUtils {
-    public static LSMRTree createLSMTree(IInMemoryBufferCache memBufferCache,
-            IInMemoryFreePageManager memFreePageManager, IIOManager ioManager, FileReference file,
-            IBufferCache diskBufferCache, IFileMapProvider diskFileMapProvider, ITypeTraits[] typeTraits,
-            IBinaryComparatorFactory[] rtreeCmpFactories, IBinaryComparatorFactory[] btreeCmpFactories,
-            IPrimitiveValueProviderFactory[] valueProviderFactories, RTreePolicyType rtreePolicyType,
-            ILSMMergePolicy mergePolicy, ILSMOperationTrackerFactory opTrackerFactory,
-            ILSMIOOperationScheduler ioScheduler, ILSMIOOperationCallbackProvider ioOpCallbackProvider,
-            ILinearizeComparatorFactory linearizeCmpFactory) throws TreeIndexException {
-        return createLSMTree(memBufferCache, memFreePageManager, ioManager, file, diskBufferCache, diskFileMapProvider,
-                typeTraits, rtreeCmpFactories, btreeCmpFactories, valueProviderFactories, rtreePolicyType, mergePolicy,
-                opTrackerFactory, ioScheduler, ioOpCallbackProvider, linearizeCmpFactory, 0);
-    }
-
-    public static LSMRTree createLSMTree(IInMemoryBufferCache memBufferCache,
-            IInMemoryFreePageManager memFreePageManager, IIOManager ioManager, FileReference file,
-            IBufferCache diskBufferCache, IFileMapProvider diskFileMapProvider, ITypeTraits[] typeTraits,
-            IBinaryComparatorFactory[] rtreeCmpFactories, IBinaryComparatorFactory[] btreeCmpFactories,
-            IPrimitiveValueProviderFactory[] valueProviderFactories, RTreePolicyType rtreePolicyType,
-            ILSMMergePolicy mergePolicy, ILSMOperationTrackerFactory opTrackerFactory,
-            ILSMIOOperationScheduler ioScheduler, ILSMIOOperationCallbackProvider ioOpCallbackProvider,
-            ILinearizeComparatorFactory linearizeCmpFactory, int startIODeviceIndex) throws TreeIndexException {
-        LSMTypeAwareTupleWriterFactory rtreeTupleWriterFactory = new LSMTypeAwareTupleWriterFactory(typeTraits, false);
-        LSMTypeAwareTupleWriterFactory btreeTupleWriterFactory = new LSMTypeAwareTupleWriterFactory(typeTraits, true);
-
-        ITreeIndexFrameFactory rtreeInteriorFrameFactory = new RTreeNSMInteriorFrameFactory(rtreeTupleWriterFactory,
-                valueProviderFactories, rtreePolicyType);
-        ITreeIndexFrameFactory rtreeLeafFrameFactory = new RTreeNSMLeafFrameFactory(rtreeTupleWriterFactory,
-                valueProviderFactories, rtreePolicyType);
-
-        ITreeIndexFrameFactory btreeInteriorFrameFactory = new BTreeNSMInteriorFrameFactory(btreeTupleWriterFactory);
-        ITreeIndexFrameFactory btreeLeafFrameFactory = new BTreeNSMLeafFrameFactory(btreeTupleWriterFactory);
-
-        ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
-        LinkedListFreePageManagerFactory freePageManagerFactory = new LinkedListFreePageManagerFactory(diskBufferCache,
-                metaFrameFactory);
-
-        TreeIndexFactory<RTree> diskRTreeFactory = new RTreeFactory(diskBufferCache, diskFileMapProvider,
-                freePageManagerFactory, rtreeInteriorFrameFactory, rtreeLeafFrameFactory, rtreeCmpFactories,
-                typeTraits.length);
-        TreeIndexFactory<BTree> diskBTreeFactory = new BTreeFactory(diskBufferCache, diskFileMapProvider,
-                freePageManagerFactory, btreeInteriorFrameFactory, btreeLeafFrameFactory, btreeCmpFactories,
-                typeTraits.length);
-
-        int[] comparatorFields = { 0 };
-        IBinaryComparatorFactory[] linearizerArray = { linearizeCmpFactory };
-
-        int[] bloomFilterKeyFields = new int[btreeCmpFactories.length];
-        for (int i = 0; i < btreeCmpFactories.length; i++) {
-            bloomFilterKeyFields[i] = i;
-        }
-        BloomFilterFactory bloomFilterFactory = new BloomFilterFactory(diskBufferCache, diskFileMapProvider,
-                bloomFilterKeyFields);
-
-        ILSMIndexFileManager fileNameManager = new LSMRTreeFileManager(ioManager, diskFileMapProvider, file,
-                diskRTreeFactory, diskBTreeFactory, startIODeviceIndex);
-        LSMRTree lsmTree = new LSMRTree(memBufferCache, memFreePageManager, rtreeInteriorFrameFactory,
-                rtreeLeafFrameFactory, btreeInteriorFrameFactory, btreeLeafFrameFactory, fileNameManager,
-                diskRTreeFactory, diskBTreeFactory, bloomFilterFactory, diskFileMapProvider, typeTraits.length,
-                rtreeCmpFactories, btreeCmpFactories, linearizeCmpFactory, comparatorFields, linearizerArray,
-                mergePolicy, opTrackerFactory, ioScheduler, ioOpCallbackProvider);
-        return lsmTree;
-    }
-
-    public static LSMRTreeWithAntiMatterTuples createLSMTreeWithAntiMatterTuples(IInMemoryBufferCache memBufferCache,
-            IInMemoryFreePageManager memFreePageManager, IIOManager ioManager, FileReference file,
-            IBufferCache diskBufferCache, IFileMapProvider diskFileMapProvider, ITypeTraits[] typeTraits,
-            IBinaryComparatorFactory[] rtreeCmpFactories, IBinaryComparatorFactory[] btreeCmpFactories,
-            IPrimitiveValueProviderFactory[] valueProviderFactories, RTreePolicyType rtreePolicyType,
-            ILSMMergePolicy mergePolicy, ILSMOperationTrackerFactory opTrackerFactory,
-            ILSMIOOperationScheduler ioScheduler, ILSMIOOperationCallbackProvider ioOpCallbackProvider,
-            ILinearizeComparatorFactory linearizerCmpFactory) throws TreeIndexException {
-        return createLSMTreeWithAntiMatterTuples(memBufferCache, memFreePageManager, ioManager, file, diskBufferCache,
-                diskFileMapProvider, typeTraits, rtreeCmpFactories, btreeCmpFactories, valueProviderFactories,
-                rtreePolicyType, mergePolicy, opTrackerFactory, ioScheduler, ioOpCallbackProvider,
-                linearizerCmpFactory, 0);
-    }
-
-    public static LSMRTreeWithAntiMatterTuples createLSMTreeWithAntiMatterTuples(IInMemoryBufferCache memBufferCache,
-            IInMemoryFreePageManager memFreePageManager, IIOManager ioManager, FileReference file,
-            IBufferCache diskBufferCache, IFileMapProvider diskFileMapProvider, ITypeTraits[] typeTraits,
-            IBinaryComparatorFactory[] rtreeCmpFactories, IBinaryComparatorFactory[] btreeCmpFactories,
-            IPrimitiveValueProviderFactory[] valueProviderFactories, RTreePolicyType rtreePolicyType,
-            ILSMMergePolicy mergePolicy, ILSMOperationTrackerFactory opTrackerFactory,
-            ILSMIOOperationScheduler ioScheduler, ILSMIOOperationCallbackProvider ioOpCallbackProvider,
-            ILinearizeComparatorFactory linearizerCmpFactory, int startIODeviceIndex) throws TreeIndexException {
-
-        LSMRTreeTupleWriterFactory rtreeTupleWriterFactory = new LSMRTreeTupleWriterFactory(typeTraits, false);
-        LSMRTreeTupleWriterFactory btreeTupleWriterFactory = new LSMRTreeTupleWriterFactory(typeTraits, true);
-
-        LSMRTreeCopyTupleWriterFactory copyTupleWriterFactory = new LSMRTreeCopyTupleWriterFactory(typeTraits);
-
-        ITreeIndexFrameFactory rtreeInteriorFrameFactory = new RTreeNSMInteriorFrameFactory(rtreeTupleWriterFactory,
-                valueProviderFactories, rtreePolicyType);
-        ITreeIndexFrameFactory rtreeLeafFrameFactory = new RTreeNSMLeafFrameFactory(rtreeTupleWriterFactory,
-                valueProviderFactories, rtreePolicyType);
-
-        ITreeIndexFrameFactory btreeInteriorFrameFactory = new BTreeNSMInteriorFrameFactory(btreeTupleWriterFactory);
-        ITreeIndexFrameFactory btreeLeafFrameFactory = new BTreeNSMLeafFrameFactory(btreeTupleWriterFactory);
-
-        ITreeIndexFrameFactory copyTupleLeafFrameFactory = new RTreeNSMLeafFrameFactory(copyTupleWriterFactory,
-                valueProviderFactories, rtreePolicyType);
-
-        ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
-        LinkedListFreePageManagerFactory freePageManagerFactory = new LinkedListFreePageManagerFactory(diskBufferCache,
-                metaFrameFactory);
-
-        TreeIndexFactory<RTree> diskRTreeFactory = new RTreeFactory(diskBufferCache, diskFileMapProvider,
-                freePageManagerFactory, rtreeInteriorFrameFactory, copyTupleLeafFrameFactory, rtreeCmpFactories,
-                typeTraits.length);
-
-        TreeIndexFactory<RTree> bulkLoadRTreeFactory = new RTreeFactory(diskBufferCache, diskFileMapProvider,
-                freePageManagerFactory, rtreeInteriorFrameFactory, rtreeLeafFrameFactory, rtreeCmpFactories,
-                typeTraits.length);
-
-        // The first field is for the sorted curve (e.g. Hilbert curve), and the
-        // second field is for the primary key.
-        int[] comparatorFields = { 0, btreeCmpFactories.length - 1 };
-        IBinaryComparatorFactory[] linearizerArray = { linearizerCmpFactory,
-                btreeCmpFactories[btreeCmpFactories.length - 1] };
-
-        ILSMIndexFileManager fileNameManager = new LSMRTreeWithAntiMatterTuplesFileManager(ioManager,
-                diskFileMapProvider, file, diskRTreeFactory, startIODeviceIndex);
-        LSMRTreeWithAntiMatterTuples lsmTree = new LSMRTreeWithAntiMatterTuples(memBufferCache, memFreePageManager,
-                rtreeInteriorFrameFactory, rtreeLeafFrameFactory, btreeInteriorFrameFactory, btreeLeafFrameFactory,
-                fileNameManager, diskRTreeFactory, bulkLoadRTreeFactory, diskFileMapProvider, typeTraits.length,
-                rtreeCmpFactories, btreeCmpFactories, linearizerCmpFactory, comparatorFields, linearizerArray,
-                mergePolicy, opTrackerFactory, ioScheduler, ioOpCallbackProvider);
-        return lsmTree;
-    }
-
-    public static ILinearizeComparatorFactory proposeBestLinearizer(ITypeTraits[] typeTraits, int numKeyFields)
-            throws TreeIndexException {
-        for (int i = 0; i < numKeyFields; i++) {
-            if (!(typeTraits[i].getClass().equals(typeTraits[0].getClass()))) {
-                throw new TreeIndexException("Cannot propose linearizer if dimensions have different types");
-            }
-        }
-
-        if (numKeyFields / 2 == 2 && (typeTraits[0].getClass() == DoublePointable.TYPE_TRAITS.getClass())) {
-            return new HilbertDoubleComparatorFactory(2);
-        } else if (typeTraits[0].getClass() == DoublePointable.TYPE_TRAITS.getClass()) {
-            return new ZCurveDoubleComparatorFactory(numKeyFields / 2);
-        } else if (typeTraits[0].getClass() == IntegerPointable.TYPE_TRAITS.getClass()) {
-            return new ZCurveIntComparatorFactory(numKeyFields / 2);
-        }
-
-        throw new TreeIndexException("Cannot propose linearizer");
-    }
-}
diff --git a/hyracks-storage-am-rtree/pom.xml b/hyracks-storage-am-rtree/pom.xml
deleted file mode 100644
index 4c49a95..0000000
--- a/hyracks-storage-am-rtree/pom.xml
+++ /dev/null
@@ -1,56 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <groupId>edu.uci.ics.hyracks</groupId>
-  <artifactId>hyracks-storage-am-rtree</artifactId>
-  <version>0.2.2-SNAPSHOT</version>
-
-  <parent>
-    <groupId>edu.uci.ics.hyracks</groupId>
-    <artifactId>hyracks</artifactId>
-    <version>0.2.2-SNAPSHOT</version>
-  </parent>
-
-  <build>
-    <plugins>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-compiler-plugin</artifactId>
-        <version>2.0.2</version>
-        <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
-        </configuration>
-      </plugin>
-    </plugins>
-  </build>
-  <dependencies>
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks</groupId>
-  		<artifactId>hyracks-storage-am-common</artifactId>
-  		<version>0.2.2-SNAPSHOT</version>
-  		<type>jar</type>
-  		<scope>compile</scope>
-  	</dependency>  	
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks</groupId>
-  		<artifactId>hyracks-dataflow-common</artifactId>
-  		<version>0.2.2-SNAPSHOT</version>
-  		<type>jar</type>
-  		<scope>compile</scope>
-  	</dependency>  	
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks</groupId>
-  		<artifactId>hyracks-dataflow-std</artifactId>
-  		<version>0.2.2-SNAPSHOT</version>
-  		<type>jar</type>
-  		<scope>compile</scope>
-  	</dependency>  	
-  	<dependency>
-  		<groupId>junit</groupId>
-  		<artifactId>junit</artifactId>
-  		<version>4.8.1</version>
-  		<type>jar</type>
-  		<scope>test</scope>
-  	</dependency>  	  		
-  </dependencies>
-</project>
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/api/IRTreeInteriorFrame.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/api/IRTreeInteriorFrame.java
deleted file mode 100644
index 59c047c..0000000
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/api/IRTreeInteriorFrame.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.rtree.api;
-
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-import edu.uci.ics.hyracks.storage.am.rtree.impls.PathList;
-
-public interface IRTreeInteriorFrame extends IRTreeFrame {
-
-    public int findBestChild(ITupleReference tuple, MultiComparator cmp);
-
-    public boolean checkIfEnlarementIsNeeded(ITupleReference tuple, MultiComparator cmp);
-
-    public int getChildPageId(int tupleIndex);
-
-    public int getChildPageIdIfIntersect(ITupleReference tuple, int tupleIndex, MultiComparator cmp);
-
-    public int findTupleByPointer(ITupleReference tuple, MultiComparator cmp);
-
-    public int findTupleByPointer(ITupleReference tuple, PathList traverseList, int parentIndex, MultiComparator cmp);
-
-    public void adjustKey(ITupleReference tuple, int tupleIndex, MultiComparator cmp) throws TreeIndexException;
-
-    public void enlarge(ITupleReference tuple, MultiComparator cmp);
-
-}
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/api/IRTreeLeafFrame.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/api/IRTreeLeafFrame.java
deleted file mode 100644
index 858a40d..0000000
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/api/IRTreeLeafFrame.java
+++ /dev/null
@@ -1,28 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.rtree.api;
-
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-
-public interface IRTreeLeafFrame extends IRTreeFrame {
-
-    public int findTupleIndex(ITupleReference tuple, MultiComparator cmp);
-
-    public boolean intersect(ITupleReference tuple, int tupleIndex, MultiComparator cmp);
-
-    public ITupleReference getBeforeTuple(ITupleReference tuple, int targetTupleIndex, MultiComparator cmp);
-}
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/api/IRTreePolicy.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/api/IRTreePolicy.java
deleted file mode 100644
index a0cc5e8..0000000
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/api/IRTreePolicy.java
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.rtree.api;
-
-import java.nio.ByteBuffer;
-
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.common.api.ISlotManager;
-import edu.uci.ics.hyracks.storage.am.common.api.ISplitKey;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-
-public interface IRTreePolicy {
-    public void split(ITreeIndexFrame leftFrame, ByteBuffer buf, ITreeIndexFrame rightFrame, ISlotManager slotManager,
-            ITreeIndexTupleReference frameTuple, ITupleReference tuple, ISplitKey splitKey);
-
-    public int findBestChildPosition(ITreeIndexFrame frame, ITupleReference tuple, ITreeIndexTupleReference frameTuple,
-            MultiComparator cmp);
-}
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/dataflow/RTreeDataflowHelper.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/dataflow/RTreeDataflowHelper.java
deleted file mode 100644
index 5c3b314..0000000
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/dataflow/RTreeDataflowHelper.java
+++ /dev/null
@@ -1,47 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.rtree.dataflow;
-
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.AbstractTreeIndexOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexDataflowHelper;
-import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreePolicyType;
-import edu.uci.ics.hyracks.storage.am.rtree.util.RTreeUtils;
-
-public class RTreeDataflowHelper extends TreeIndexDataflowHelper {
-
-    private final IPrimitiveValueProviderFactory[] valueProviderFactories;
-    private final RTreePolicyType rtreePolicyType;
-
-    public RTreeDataflowHelper(IIndexOperatorDescriptor opDesc, IHyracksTaskContext ctx, int partition,
-            IPrimitiveValueProviderFactory[] valueProviderFactories, RTreePolicyType rtreePolicyType) {
-        super(opDesc, ctx, partition);
-        this.valueProviderFactories = valueProviderFactories;
-        this.rtreePolicyType = rtreePolicyType;
-    }
-
-    @Override
-    public ITreeIndex createIndexInstance() throws HyracksDataException {
-        AbstractTreeIndexOperatorDescriptor treeOpDesc = (AbstractTreeIndexOperatorDescriptor) opDesc;
-        return RTreeUtils.createRTree(treeOpDesc.getStorageManager().getBufferCache(ctx), treeOpDesc
-                .getStorageManager().getFileMapProvider(ctx), treeOpDesc.getTreeIndexTypeTraits(),
-                valueProviderFactories, treeOpDesc.getTreeIndexComparatorFactories(), rtreePolicyType, file);
-    }
-}
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/dataflow/RTreeDataflowHelperFactory.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/dataflow/RTreeDataflowHelperFactory.java
deleted file mode 100644
index 06af8ee..0000000
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/dataflow/RTreeDataflowHelperFactory.java
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.rtree.dataflow;
-
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IndexDataflowHelper;
-import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreePolicyType;
-
-public class RTreeDataflowHelperFactory implements IIndexDataflowHelperFactory {
-
-    private static final long serialVersionUID = 1L;
-
-    private final IPrimitiveValueProviderFactory[] valueProviderFactories;
-    private final RTreePolicyType rtreePolicyType;
-
-    public RTreeDataflowHelperFactory(IPrimitiveValueProviderFactory[] valueProviderFactories,
-            RTreePolicyType rtreePolicyType) {
-        this.valueProviderFactories = valueProviderFactories;
-        this.rtreePolicyType = rtreePolicyType;
-    }
-
-    @Override
-    public IndexDataflowHelper createIndexDataflowHelper(IIndexOperatorDescriptor opDesc, IHyracksTaskContext ctx,
-            int partition) {
-        return new RTreeDataflowHelper(opDesc, ctx, partition, valueProviderFactories, rtreePolicyType);
-    }
-}
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/dataflow/RTreeSearchOperatorDescriptor.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/dataflow/RTreeSearchOperatorDescriptor.java
deleted file mode 100644
index d718c69..0000000
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/dataflow/RTreeSearchOperatorDescriptor.java
+++ /dev/null
@@ -1,59 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.rtree.dataflow;
-
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.job.IOperatorDescriptorRegistry;
-import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexLifecycleManagerProvider;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchOperationCallbackFactory;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.AbstractTreeIndexOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
-import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
-import edu.uci.ics.hyracks.storage.common.file.NoOpLocalResourceFactoryProvider;
-
-public class RTreeSearchOperatorDescriptor extends AbstractTreeIndexOperatorDescriptor {
-
-    private static final long serialVersionUID = 1L;
-
-    protected int[] keyFields; // fields in input tuple to be used as keys
-
-    public RTreeSearchOperatorDescriptor(IOperatorDescriptorRegistry spec, RecordDescriptor recDesc,
-            IStorageManagerInterface storageManager, IIndexLifecycleManagerProvider lifecycleManagerProvider,
-            IFileSplitProvider fileSplitProvider, ITypeTraits[] typeTraits,
-            IBinaryComparatorFactory[] comparatorFactories, int[] keyFields,
-            IIndexDataflowHelperFactory dataflowHelperFactory, boolean retainInput,
-            ISearchOperationCallbackFactory searchOpCallbackFactory) {
-        super(spec, 1, 1, recDesc, storageManager, lifecycleManagerProvider, fileSplitProvider, typeTraits,
-                comparatorFactories, null, dataflowHelperFactory, null, retainInput,
-                NoOpLocalResourceFactoryProvider.INSTANCE, searchOpCallbackFactory,
-                NoOpOperationCallbackFactory.INSTANCE);
-
-        this.keyFields = keyFields;
-    }
-
-    @Override
-    public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx,
-            IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) {
-        return new RTreeSearchOperatorNodePushable(this, ctx, partition, recordDescProvider, keyFields);
-    }
-}
\ No newline at end of file
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/dataflow/RTreeSearchOperatorNodePushable.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/dataflow/RTreeSearchOperatorNodePushable.java
deleted file mode 100644
index de4961b..0000000
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/dataflow/RTreeSearchOperatorNodePushable.java
+++ /dev/null
@@ -1,55 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.rtree.dataflow;
-
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchPredicate;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.AbstractTreeIndexOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IndexSearchOperatorNodePushable;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-import edu.uci.ics.hyracks.storage.am.common.tuples.PermutingFrameTupleReference;
-import edu.uci.ics.hyracks.storage.am.rtree.impls.SearchPredicate;
-import edu.uci.ics.hyracks.storage.am.rtree.util.RTreeUtils;
-
-public class RTreeSearchOperatorNodePushable extends IndexSearchOperatorNodePushable {
-    protected PermutingFrameTupleReference searchKey;
-    protected MultiComparator cmp;
-
-    public RTreeSearchOperatorNodePushable(AbstractTreeIndexOperatorDescriptor opDesc, IHyracksTaskContext ctx,
-            int partition, IRecordDescriptorProvider recordDescProvider, int[] keyFields) {
-        super(opDesc, ctx, partition, recordDescProvider);
-        if (keyFields != null && keyFields.length > 0) {
-            searchKey = new PermutingFrameTupleReference();
-            searchKey.setFieldPermutation(keyFields);
-        }
-    }
-
-    @Override
-    protected ISearchPredicate createSearchPredicate() {
-        ITreeIndex treeIndex = (ITreeIndex) index;
-        cmp = RTreeUtils.getSearchMultiComparator(treeIndex.getComparatorFactories(), searchKey);
-        return new SearchPredicate(searchKey, cmp);
-    }
-
-    @Override
-    protected void resetSearchPredicate(int tupleIndex) {
-        if (searchKey != null) {
-            searchKey.reset(accessor, tupleIndex);
-        }
-    }
-}
\ No newline at end of file
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RStarTreePolicy.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RStarTreePolicy.java
deleted file mode 100644
index aafecd5..0000000
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RStarTreePolicy.java
+++ /dev/null
@@ -1,354 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.rtree.frames;
-
-import java.nio.ByteBuffer;
-
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProvider;
-import edu.uci.ics.hyracks.storage.am.common.api.ISlotManager;
-import edu.uci.ics.hyracks.storage.am.common.api.ISplitKey;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriter;
-import edu.uci.ics.hyracks.storage.am.common.frames.FrameOpSpaceStatus;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeFrame;
-import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreePolicy;
-import edu.uci.ics.hyracks.storage.am.rtree.impls.EntriesOrder;
-import edu.uci.ics.hyracks.storage.am.rtree.impls.RTreeSplitKey;
-import edu.uci.ics.hyracks.storage.am.rtree.impls.Rectangle;
-import edu.uci.ics.hyracks.storage.am.rtree.impls.TupleEntryArrayList;
-import edu.uci.ics.hyracks.storage.am.rtree.impls.UnorderedSlotManager;
-import edu.uci.ics.hyracks.storage.am.rtree.tuples.RTreeTypeAwareTupleWriter;
-
-public class RStarTreePolicy implements IRTreePolicy {
-
-    private TupleEntryArrayList tupleEntries1;
-    private TupleEntryArrayList tupleEntries2;
-    private Rectangle[] rec;
-
-    private static final int nearMinimumOverlapFactor = 32;
-    private static final double splitFactor = 0.4;
-    private static final int numTuplesEntries = 100;
-
-    private final ITreeIndexTupleWriter tupleWriter;
-    private final IPrimitiveValueProvider[] keyValueProviders;
-    private ITreeIndexTupleReference cmpFrameTuple;
-    private final int totalFreeSpaceOff;
-
-    public RStarTreePolicy(ITreeIndexTupleWriter tupleWriter, IPrimitiveValueProvider[] keyValueProviders,
-            ITreeIndexTupleReference cmpFrameTuple, int totalFreeSpaceOff) {
-        this.tupleWriter = tupleWriter;
-        this.keyValueProviders = keyValueProviders;
-        this.totalFreeSpaceOff = totalFreeSpaceOff;
-        this.cmpFrameTuple = cmpFrameTuple;
-        tupleEntries1 = new TupleEntryArrayList(numTuplesEntries, numTuplesEntries);
-        tupleEntries2 = new TupleEntryArrayList(numTuplesEntries, numTuplesEntries);
-        rec = new Rectangle[4];
-        for (int i = 0; i < 4; i++) {
-            rec[i] = new Rectangle(keyValueProviders.length / 2);
-        }
-    }
-
-    @Override
-    public void split(ITreeIndexFrame leftFrame, ByteBuffer buf, ITreeIndexFrame rightFrame, ISlotManager slotManager,
-            ITreeIndexTupleReference frameTuple, ITupleReference tuple, ISplitKey splitKey) {
-        RTreeSplitKey rTreeSplitKey = ((RTreeSplitKey) splitKey);
-        RTreeTypeAwareTupleWriter rTreeTupleWriterleftRTreeFrame = ((RTreeTypeAwareTupleWriter) tupleWriter);
-        RTreeTypeAwareTupleWriter rTreeTupleWriterRightFrame = ((RTreeTypeAwareTupleWriter) rightFrame.getTupleWriter());
-
-        RTreeNSMFrame leftRTreeFrame = ((RTreeNSMFrame) leftFrame);
-
-        // calculations are based on the R*-tree paper
-        int m = (int) Math.floor((leftRTreeFrame.getTupleCount() + 1) * splitFactor);
-        int splitDistribution = leftRTreeFrame.getTupleCount() - (2 * m) + 2;
-
-        // to calculate the minimum margin in order to pick the split axis
-        double minMargin = Double.MAX_VALUE;
-        int splitAxis = 0, sortOrder = 0;
-
-        int maxFieldPos = keyValueProviders.length / 2;
-        for (int i = 0; i < maxFieldPos; i++) {
-            int j = maxFieldPos + i;
-            for (int k = 0; k < leftRTreeFrame.getTupleCount(); ++k) {
-
-                frameTuple.resetByTupleIndex(leftRTreeFrame, k);
-                double LowerKey = keyValueProviders[i]
-                        .getValue(frameTuple.getFieldData(i), frameTuple.getFieldStart(i));
-                double UpperKey = keyValueProviders[j]
-                        .getValue(frameTuple.getFieldData(j), frameTuple.getFieldStart(j));
-
-                tupleEntries1.add(k, LowerKey);
-                tupleEntries2.add(k, UpperKey);
-            }
-            double LowerKey = keyValueProviders[i].getValue(tuple.getFieldData(i), tuple.getFieldStart(i));
-            double UpperKey = keyValueProviders[j].getValue(tuple.getFieldData(j), tuple.getFieldStart(j));
-
-            tupleEntries1.add(-1, LowerKey);
-            tupleEntries2.add(-1, UpperKey);
-
-            tupleEntries1.sort(EntriesOrder.ASCENDING, leftRTreeFrame.getTupleCount() + 1);
-            tupleEntries2.sort(EntriesOrder.ASCENDING, leftRTreeFrame.getTupleCount() + 1);
-
-            double lowerMargin = 0.0, upperMargin = 0.0;
-            // generate distribution
-            for (int k = 1; k <= splitDistribution; ++k) {
-                int d = m - 1 + k;
-
-                generateDist(leftRTreeFrame, frameTuple, tuple, tupleEntries1, rec[0], 0, d);
-                generateDist(leftRTreeFrame, frameTuple, tuple, tupleEntries2, rec[1], 0, d);
-                generateDist(leftRTreeFrame, frameTuple, tuple, tupleEntries1, rec[2], d,
-                        leftRTreeFrame.getTupleCount() + 1);
-                generateDist(leftRTreeFrame, frameTuple, tuple, tupleEntries2, rec[3], d,
-                        leftRTreeFrame.getTupleCount() + 1);
-
-                // calculate the margin of the distributions
-                lowerMargin += rec[0].margin() + rec[2].margin();
-                upperMargin += rec[1].margin() + rec[3].margin();
-            }
-            double margin = Math.min(lowerMargin, upperMargin);
-
-            // store minimum margin as split axis
-            if (margin < minMargin) {
-                minMargin = margin;
-                splitAxis = i;
-                sortOrder = (lowerMargin < upperMargin) ? 0 : 2;
-            }
-
-            tupleEntries1.clear();
-            tupleEntries2.clear();
-        }
-
-        for (int i = 0; i < leftRTreeFrame.getTupleCount(); ++i) {
-            frameTuple.resetByTupleIndex(leftRTreeFrame, i);
-            double key = keyValueProviders[splitAxis + sortOrder].getValue(
-                    frameTuple.getFieldData(splitAxis + sortOrder), frameTuple.getFieldStart(splitAxis + sortOrder));
-            tupleEntries1.add(i, key);
-        }
-        double key = keyValueProviders[splitAxis + sortOrder].getValue(tuple.getFieldData(splitAxis + sortOrder),
-                tuple.getFieldStart(splitAxis + sortOrder));
-        tupleEntries1.add(-1, key);
-        tupleEntries1.sort(EntriesOrder.ASCENDING, leftRTreeFrame.getTupleCount() + 1);
-
-        double minArea = Double.MAX_VALUE;
-        double minOverlap = Double.MAX_VALUE;
-        int splitPoint = 0;
-        for (int i = 1; i <= splitDistribution; ++i) {
-            int d = m - 1 + i;
-
-            generateDist(leftRTreeFrame, frameTuple, tuple, tupleEntries1, rec[0], 0, d);
-            generateDist(leftRTreeFrame, frameTuple, tuple, tupleEntries1, rec[2], d,
-                    leftRTreeFrame.getTupleCount() + 1);
-
-            double overlap = rec[0].overlappedArea(rec[2]);
-            if (overlap < minOverlap) {
-                splitPoint = d;
-                minOverlap = overlap;
-                minArea = rec[0].area() + rec[2].area();
-            } else if (overlap == minOverlap) {
-                double area = rec[0].area() + rec[2].area();
-                if (area < minArea) {
-                    splitPoint = d;
-                    minArea = area;
-                }
-            }
-        }
-        int startIndex, endIndex;
-        if (splitPoint < (leftRTreeFrame.getTupleCount() + 1) / 2) {
-            startIndex = 0;
-            endIndex = splitPoint;
-        } else {
-            startIndex = splitPoint;
-            endIndex = (leftRTreeFrame.getTupleCount() + 1);
-        }
-        boolean insertedNewTupleInRightFrame = false;
-        int totalBytes = 0, numOfDeletedTuples = 0;
-        for (int i = startIndex; i < endIndex; i++) {
-            if (tupleEntries1.get(i).getTupleIndex() != -1) {
-                frameTuple.resetByTupleIndex(leftRTreeFrame, tupleEntries1.get(i).getTupleIndex());
-                rightFrame.insert(frameTuple, -1);
-                ((UnorderedSlotManager) slotManager).modifySlot(
-                        slotManager.getSlotOff(tupleEntries1.get(i).getTupleIndex()), -1);
-                totalBytes += leftRTreeFrame.getTupleSize(frameTuple);
-                numOfDeletedTuples++;
-            } else {
-                insertedNewTupleInRightFrame = true;
-            }
-        }
-
-        ((UnorderedSlotManager) slotManager).deleteEmptySlots();
-
-        // maintain space information
-        buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) + totalBytes
-                + (slotManager.getSlotSize() * numOfDeletedTuples));
-
-        // compact both pages
-        rightFrame.compact();
-        leftRTreeFrame.compact();
-
-        // The assumption here is that the new tuple cannot be larger than page
-        // size, thus it must fit in either pages.
-        if (insertedNewTupleInRightFrame) {
-            if (rightFrame.hasSpaceInsert(tuple) == FrameOpSpaceStatus.SUFFICIENT_CONTIGUOUS_SPACE) {
-                rightFrame.insert(tuple, -1);
-            } else {
-                leftRTreeFrame.insert(tuple, -1);
-            }
-        } else if (leftRTreeFrame.hasSpaceInsert(tuple) == FrameOpSpaceStatus.SUFFICIENT_CONTIGUOUS_SPACE) {
-            leftRTreeFrame.insert(tuple, -1);
-        } else {
-            rightFrame.insert(tuple, -1);
-        }
-
-        int tupleOff = slotManager.getTupleOff(slotManager.getSlotEndOff());
-        frameTuple.resetByTupleOffset(buf, tupleOff);
-        int splitKeySize = tupleWriter.bytesRequired(frameTuple, 0, keyValueProviders.length);
-
-        splitKey.initData(splitKeySize);
-        leftRTreeFrame.adjustMBR();
-        rTreeTupleWriterleftRTreeFrame.writeTupleFields(leftRTreeFrame.getTuples(), 0,
-                rTreeSplitKey.getLeftPageBuffer(), 0);
-        rTreeSplitKey.getLeftTuple().resetByTupleOffset(rTreeSplitKey.getLeftPageBuffer(), 0);
-
-        ((IRTreeFrame) rightFrame).adjustMBR();
-        rTreeTupleWriterRightFrame.writeTupleFields(((RTreeNSMFrame) rightFrame).getTuples(), 0,
-                rTreeSplitKey.getRightPageBuffer(), 0);
-        rTreeSplitKey.getRightTuple().resetByTupleOffset(rTreeSplitKey.getRightPageBuffer(), 0);
-
-        tupleEntries1.clear();
-        tupleEntries2.clear();
-    }
-
-    public void generateDist(ITreeIndexFrame leftRTreeFrame, ITreeIndexTupleReference frameTuple,
-            ITupleReference tuple, TupleEntryArrayList entries, Rectangle rec, int start, int end) {
-        int j = 0;
-        while (entries.get(j).getTupleIndex() == -1) {
-            j++;
-        }
-        frameTuple.resetByTupleIndex(leftRTreeFrame, entries.get(j).getTupleIndex());
-        rec.set(frameTuple, keyValueProviders);
-        for (int i = start; i < end; ++i) {
-            if (i != j) {
-                if (entries.get(i).getTupleIndex() != -1) {
-                    frameTuple.resetByTupleIndex(leftRTreeFrame, entries.get(i).getTupleIndex());
-                    rec.enlarge(frameTuple, keyValueProviders);
-                } else {
-                    rec.enlarge(tuple, keyValueProviders);
-                }
-            }
-        }
-    }
-
-    @Override
-    public int findBestChildPosition(ITreeIndexFrame frame, ITupleReference tuple, ITreeIndexTupleReference frameTuple,
-            MultiComparator cmp) {
-        cmpFrameTuple.setFieldCount(cmp.getKeyFieldCount());
-        frameTuple.setFieldCount(cmp.getKeyFieldCount());
-
-        int bestChild = 0;
-        double minEnlargedArea = Double.MAX_VALUE;
-
-        // the children pointers in the node point to leaves
-        if (frame.getLevel() == 1) {
-            // find least overlap enlargement, use minimum enlarged area to
-            // break tie, if tie still exists use minimum area to break it
-            for (int i = 0; i < frame.getTupleCount(); ++i) {
-                frameTuple.resetByTupleIndex(frame, i);
-                double enlargedArea = RTreeComputationUtils.enlargedArea(frameTuple, tuple, cmp, keyValueProviders);
-                tupleEntries1.add(i, enlargedArea);
-                if (enlargedArea < minEnlargedArea) {
-                    minEnlargedArea = enlargedArea;
-                    bestChild = i;
-                }
-            }
-            if (minEnlargedArea < RTreeNSMFrame.doubleEpsilon() || minEnlargedArea > RTreeNSMFrame.doubleEpsilon()) {
-                minEnlargedArea = Double.MAX_VALUE;
-                int k;
-                if (frame.getTupleCount() > nearMinimumOverlapFactor) {
-                    // sort the entries based on their area enlargement needed
-                    // to include the object
-                    tupleEntries1.sort(EntriesOrder.ASCENDING, frame.getTupleCount());
-                    k = nearMinimumOverlapFactor;
-                } else {
-                    k = frame.getTupleCount();
-                }
-
-                double minOverlap = Double.MAX_VALUE;
-                int id = 0;
-                for (int i = 0; i < k; ++i) {
-                    double difference = 0.0;
-                    for (int j = 0; j < frame.getTupleCount(); ++j) {
-                        frameTuple.resetByTupleIndex(frame, j);
-                        cmpFrameTuple.resetByTupleIndex(frame, tupleEntries1.get(i).getTupleIndex());
-
-                        int c = ((RTreeNSMInteriorFrame) frame).pointerCmp(frameTuple, cmpFrameTuple, cmp);
-                        if (c != 0) {
-                            double intersection = RTreeComputationUtils.overlappedArea(frameTuple, tuple,
-                                    cmpFrameTuple, cmp, keyValueProviders);
-                            if (intersection != 0.0) {
-                                difference += intersection
-                                        - RTreeComputationUtils.overlappedArea(frameTuple, null, cmpFrameTuple, cmp,
-                                                keyValueProviders);
-                            }
-                        } else {
-                            id = j;
-                        }
-                    }
-
-                    double enlargedArea = RTreeComputationUtils.enlargedArea(cmpFrameTuple, tuple, cmp,
-                            keyValueProviders);
-                    if (difference < minOverlap) {
-                        minOverlap = difference;
-                        minEnlargedArea = enlargedArea;
-                        bestChild = id;
-                    } else if (difference == minOverlap) {
-                        if (enlargedArea < minEnlargedArea) {
-                            minEnlargedArea = enlargedArea;
-                            bestChild = id;
-                        } else if (enlargedArea == minEnlargedArea) {
-                            double area = RTreeComputationUtils.area(cmpFrameTuple, cmp, keyValueProviders);
-                            frameTuple.resetByTupleIndex(frame, bestChild);
-                            double minArea = RTreeComputationUtils.area(frameTuple, cmp, keyValueProviders);
-                            if (area < minArea) {
-                                bestChild = id;
-                            }
-                        }
-                    }
-                }
-            }
-        } else { // find minimum enlarged area, use minimum area to break tie
-            for (int i = 0; i < frame.getTupleCount(); i++) {
-                frameTuple.resetByTupleIndex(frame, i);
-                double enlargedArea = RTreeComputationUtils.enlargedArea(frameTuple, tuple, cmp, keyValueProviders);
-                if (enlargedArea < minEnlargedArea) {
-                    minEnlargedArea = enlargedArea;
-                    bestChild = i;
-                } else if (enlargedArea == minEnlargedArea) {
-                    double area = RTreeComputationUtils.area(frameTuple, cmp, keyValueProviders);
-                    frameTuple.resetByTupleIndex(frame, bestChild);
-                    double minArea = RTreeComputationUtils.area(frameTuple, cmp, keyValueProviders);
-                    if (area < minArea) {
-                        bestChild = i;
-                    }
-                }
-            }
-        }
-        tupleEntries1.clear();
-
-        return bestChild;
-    }
-}
\ No newline at end of file
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeComputationUtils.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeComputationUtils.java
deleted file mode 100644
index f0122b3..0000000
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeComputationUtils.java
+++ /dev/null
@@ -1,137 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.rtree.frames;
-
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProvider;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-
-public class RTreeComputationUtils {
-
-    public static double enlargedArea(ITupleReference tuple, ITupleReference tupleToBeInserted, MultiComparator cmp,
-            IPrimitiveValueProvider[] keyValueProviders) {
-        double areaBeforeEnlarge = RTreeComputationUtils.area(tuple, cmp, keyValueProviders);
-        double areaAfterEnlarge = 1.0;
-
-        int maxFieldPos = cmp.getKeyFieldCount() / 2;
-        for (int i = 0; i < maxFieldPos; i++) {
-            int j = maxFieldPos + i;
-            double pHigh, pLow;
-            int c = cmp.getComparators()[i].compare(tuple.getFieldData(i), tuple.getFieldStart(i),
-                    tuple.getFieldLength(i), tupleToBeInserted.getFieldData(i), tupleToBeInserted.getFieldStart(i),
-                    tupleToBeInserted.getFieldLength(i));
-            if (c < 0) {
-                pLow = keyValueProviders[i].getValue(tuple.getFieldData(i), tuple.getFieldStart(i));
-            } else {
-                pLow = keyValueProviders[i].getValue(tupleToBeInserted.getFieldData(i),
-                        tupleToBeInserted.getFieldStart(i));
-            }
-
-            c = cmp.getComparators()[j].compare(tuple.getFieldData(j), tuple.getFieldStart(j), tuple.getFieldLength(j),
-                    tupleToBeInserted.getFieldData(j), tupleToBeInserted.getFieldStart(j),
-                    tupleToBeInserted.getFieldLength(j));
-            if (c > 0) {
-                pHigh = keyValueProviders[j].getValue(tuple.getFieldData(j), tuple.getFieldStart(j));
-            } else {
-                pHigh = keyValueProviders[j].getValue(tupleToBeInserted.getFieldData(j),
-                        tupleToBeInserted.getFieldStart(j));
-            }
-            areaAfterEnlarge *= pHigh - pLow;
-        }
-        return areaAfterEnlarge - areaBeforeEnlarge;
-    }
-
-    public static double overlappedArea(ITupleReference tuple1, ITupleReference tupleToBeInserted,
-            ITupleReference tuple2, MultiComparator cmp, IPrimitiveValueProvider[] keyValueProviders) {
-        double area = 1.0;
-        double f1, f2;
-
-        int maxFieldPos = cmp.getKeyFieldCount() / 2;
-        for (int i = 0; i < maxFieldPos; i++) {
-            int j = maxFieldPos + i;
-            double pHigh1, pLow1;
-            if (tupleToBeInserted != null) {
-                int c = cmp.getComparators()[i].compare(tuple1.getFieldData(i), tuple1.getFieldStart(i),
-                        tuple1.getFieldLength(i), tupleToBeInserted.getFieldData(i),
-                        tupleToBeInserted.getFieldStart(i), tupleToBeInserted.getFieldLength(i));
-                if (c < 0) {
-                    pLow1 = keyValueProviders[i].getValue(tuple1.getFieldData(i), tuple1.getFieldStart(i));
-                } else {
-                    pLow1 = keyValueProviders[i].getValue(tupleToBeInserted.getFieldData(i),
-                            tupleToBeInserted.getFieldStart(i));
-                }
-
-                c = cmp.getComparators()[j].compare(tuple1.getFieldData(j), tuple1.getFieldStart(j),
-                        tuple1.getFieldLength(j), tupleToBeInserted.getFieldData(j),
-                        tupleToBeInserted.getFieldStart(j), tupleToBeInserted.getFieldLength(j));
-                if (c > 0) {
-                    pHigh1 = keyValueProviders[j].getValue(tuple1.getFieldData(j), tuple1.getFieldStart(j));
-                } else {
-                    pHigh1 = keyValueProviders[j].getValue(tupleToBeInserted.getFieldData(j),
-                            tupleToBeInserted.getFieldStart(j));
-                }
-            } else {
-                pLow1 = keyValueProviders[i].getValue(tuple1.getFieldData(i), tuple1.getFieldStart(i));
-                pHigh1 = keyValueProviders[j].getValue(tuple1.getFieldData(j), tuple1.getFieldStart(j));
-            }
-
-            double pLow2 = keyValueProviders[i].getValue(tuple2.getFieldData(i), tuple2.getFieldStart(i));
-            double pHigh2 = keyValueProviders[j].getValue(tuple2.getFieldData(j), tuple2.getFieldStart(j));
-
-            if (pLow1 > pHigh2 || pHigh1 < pLow2) {
-                return 0.0;
-            }
-
-            f1 = Math.max(pLow1, pLow2);
-            f2 = Math.min(pHigh1, pHigh2);
-            area *= f2 - f1;
-        }
-        return area;
-    }
-
-    public static double area(ITupleReference tuple, MultiComparator cmp, IPrimitiveValueProvider[] keyValueProviders) {
-        double area = 1.0;
-        int maxFieldPos = cmp.getKeyFieldCount() / 2;
-        for (int i = 0; i < maxFieldPos; i++) {
-            int j = maxFieldPos + i;
-            area *= keyValueProviders[j].getValue(tuple.getFieldData(j), tuple.getFieldStart(j))
-                    - keyValueProviders[i].getValue(tuple.getFieldData(i), tuple.getFieldStart(i));
-        }
-        return area;
-    }
-
-    public static boolean containsRegion(ITupleReference tuple1, ITupleReference tuple2, MultiComparator cmp,
-            IPrimitiveValueProvider[] keyValueProviders) {
-        int maxFieldPos = cmp.getKeyFieldCount() / 2;
-        for (int i = 0; i < maxFieldPos; i++) {
-            int j = maxFieldPos + i;
-            int c = cmp.getComparators()[i]
-                    .compare(tuple1.getFieldData(i), tuple1.getFieldStart(i), tuple1.getFieldLength(i),
-                            tuple2.getFieldData(i), tuple2.getFieldStart(i), tuple2.getFieldLength(i));
-            if (c > 0) {
-                return false;
-            }
-
-            c = cmp.getComparators()[j]
-                    .compare(tuple1.getFieldData(j), tuple1.getFieldStart(j), tuple1.getFieldLength(j),
-                            tuple2.getFieldData(j), tuple2.getFieldStart(j), tuple2.getFieldLength(j));
-            if (c < 0) {
-                return false;
-            }
-        }
-        return true;
-    }
-}
\ No newline at end of file
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMFrame.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMFrame.java
deleted file mode 100644
index eeada0a..0000000
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMFrame.java
+++ /dev/null
@@ -1,161 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.rtree.frames;
-
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProvider;
-import edu.uci.ics.hyracks.storage.am.common.api.ISplitKey;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriter;
-import edu.uci.ics.hyracks.storage.am.common.frames.TreeIndexNSMFrame;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeFrame;
-import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreePolicy;
-import edu.uci.ics.hyracks.storage.am.rtree.impls.UnorderedSlotManager;
-
-public abstract class RTreeNSMFrame extends TreeIndexNSMFrame implements IRTreeFrame {
-    protected static final int pageNsnOff = smFlagOff + 1;
-    protected static final int rightPageOff = pageNsnOff + 8;
-
-    protected ITreeIndexTupleReference[] tuples;
-    protected ITreeIndexTupleReference cmpFrameTuple;
-
-    private static final double doubleEpsilon = computeDoubleEpsilon();
-    protected final IPrimitiveValueProvider[] keyValueProviders;
-
-    protected IRTreePolicy rtreePolicy;
-
-    public RTreeNSMFrame(ITreeIndexTupleWriter tupleWriter, IPrimitiveValueProvider[] keyValueProviders,
-            RTreePolicyType rtreePolicyType) {
-        super(tupleWriter, new UnorderedSlotManager());
-        this.tuples = new ITreeIndexTupleReference[keyValueProviders.length];
-        for (int i = 0; i < keyValueProviders.length; i++) {
-            this.tuples[i] = tupleWriter.createTupleReference();
-        }
-        cmpFrameTuple = tupleWriter.createTupleReference();
-        this.keyValueProviders = keyValueProviders;
-
-        if (rtreePolicyType == RTreePolicyType.RTREE) {
-            rtreePolicy = new RTreePolicy(tupleWriter, keyValueProviders, cmpFrameTuple, totalFreeSpaceOff);
-        } else {
-            rtreePolicy = new RStarTreePolicy(tupleWriter, keyValueProviders, cmpFrameTuple, totalFreeSpaceOff);
-        }
-    }
-
-    private static double computeDoubleEpsilon() {
-        double doubleEpsilon = 1.0;
-
-        do {
-            doubleEpsilon /= 2.0;
-        } while (1.0 + (doubleEpsilon / 2.0) != 1.0);
-        return doubleEpsilon;
-    }
-
-    public static double doubleEpsilon() {
-        return doubleEpsilon;
-    }
-
-    @Override
-    public void initBuffer(byte level) {
-        super.initBuffer(level);
-        buf.putLong(pageNsnOff, 0);
-        buf.putInt(rightPageOff, -1);
-    }
-
-    public void setTupleCount(int tupleCount) {
-        buf.putInt(tupleCountOff, tupleCount);
-    }
-
-    @Override
-    public void setPageNsn(long pageNsn) {
-        buf.putLong(pageNsnOff, pageNsn);
-    }
-
-    @Override
-    public long getPageNsn() {
-        return buf.getLong(pageNsnOff);
-    }
-
-    @Override
-    protected void resetSpaceParams() {
-        buf.putInt(freeSpaceOff, rightPageOff + 4);
-        buf.putInt(totalFreeSpaceOff, buf.capacity() - (rightPageOff + 4));
-    }
-
-    @Override
-    public int getRightPage() {
-        return buf.getInt(rightPageOff);
-    }
-
-    @Override
-    public void setRightPage(int rightPage) {
-        buf.putInt(rightPageOff, rightPage);
-    }
-
-    public ITreeIndexTupleReference[] getTuples() {
-        return tuples;
-    }
-
-    @Override
-    public void split(ITreeIndexFrame rightFrame, ITupleReference tuple, ISplitKey splitKey) {
-        rtreePolicy.split(this, buf, rightFrame, slotManager, frameTuple, tuple, splitKey);
-    }
-
-    abstract public int getTupleSize(ITupleReference tuple);
-
-    public void adjustMBRImpl(ITreeIndexTupleReference[] tuples) {
-        int maxFieldPos = keyValueProviders.length / 2;
-        for (int i = 1; i < getTupleCount(); i++) {
-            frameTuple.resetByTupleIndex(this, i);
-            for (int j = 0; j < maxFieldPos; j++) {
-                int k = maxFieldPos + j;
-                double valA = keyValueProviders[j].getValue(frameTuple.getFieldData(j), frameTuple.getFieldStart(j));
-                double valB = keyValueProviders[j].getValue(tuples[j].getFieldData(j), tuples[j].getFieldStart(j));
-                if (valA < valB) {
-                    tuples[j].resetByTupleIndex(this, i);
-                }
-                valA = keyValueProviders[k].getValue(frameTuple.getFieldData(k), frameTuple.getFieldStart(k));
-                valB = keyValueProviders[k].getValue(tuples[k].getFieldData(k), tuples[k].getFieldStart(k));
-                if (valA > valB) {
-                    tuples[k].resetByTupleIndex(this, i);
-                }
-            }
-        }
-    }
-
-    @Override
-    public void adjustMBR() {
-        for (int i = 0; i < tuples.length; i++) {
-            tuples[i].setFieldCount(getFieldCount());
-            tuples[i].resetByTupleIndex(this, 0);
-        }
-
-        adjustMBRImpl(tuples);
-    }
-
-    public abstract int getFieldCount();
-
-    @Override
-    public int getPageHeaderSize() {
-        return rightPageOff + 4;
-    }
-
-    @Override
-    public void setMultiComparator(MultiComparator cmp) {
-        // currently, R-Tree Frames are unsorted
-    }
-}
\ No newline at end of file
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMInteriorFrame.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMInteriorFrame.java
deleted file mode 100644
index 5ab9632..0000000
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMInteriorFrame.java
+++ /dev/null
@@ -1,288 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.rtree.frames;
-
-import java.util.ArrayList;
-import java.util.Collections;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
-import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
-import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProvider;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriter;
-import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
-import edu.uci.ics.hyracks.storage.am.common.frames.FrameOpSpaceStatus;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.SlotOffTupleOff;
-import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeInteriorFrame;
-import edu.uci.ics.hyracks.storage.am.rtree.impls.PathList;
-
-public class RTreeNSMInteriorFrame extends RTreeNSMFrame implements IRTreeInteriorFrame {
-
-    private static final int childPtrSize = 4;
-    private IBinaryComparator childPtrCmp = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY)
-            .createBinaryComparator();
-    private final int keyFieldCount;
-
-    public RTreeNSMInteriorFrame(ITreeIndexTupleWriter tupleWriter, IPrimitiveValueProvider[] keyValueProviders,
-            RTreePolicyType rtreePolicyType) {
-        super(tupleWriter, keyValueProviders, rtreePolicyType);
-        keyFieldCount = keyValueProviders.length;
-        frameTuple.setFieldCount(keyFieldCount);
-    }
-
-    @Override
-    public int findBestChild(ITupleReference tuple, MultiComparator cmp) {
-        int bestChild = rtreePolicy.findBestChildPosition(this, tuple, frameTuple, cmp);
-        frameTuple.resetByTupleIndex(this, bestChild);
-        return buf.getInt(getChildPointerOff(frameTuple));
-    }
-
-    // frameTuple is assumed to have the tuple to be tested against.
-    @Override
-    public boolean checkIfEnlarementIsNeeded(ITupleReference tuple, MultiComparator cmp) {
-        return !RTreeComputationUtils.containsRegion(frameTuple, tuple, cmp, keyValueProviders);
-    }
-
-    @Override
-    public ITreeIndexTupleReference createTupleReference() {
-        ITreeIndexTupleReference tuple = tupleWriter.createTupleReference();
-        tuple.setFieldCount(keyFieldCount);
-        return tuple;
-    }
-
-    @Override
-    public int findTupleByPointer(ITupleReference tuple, MultiComparator cmp) {
-        frameTuple.setFieldCount(cmp.getKeyFieldCount());
-        for (int i = 0; i < getTupleCount(); i++) {
-            frameTuple.resetByTupleIndex(this, i);
-            int c = pointerCmp(frameTuple, tuple, cmp);
-            if (c == 0) {
-                return i;
-            }
-        }
-        return -1;
-    }
-
-    @Override
-    public int getChildPageId(int tupleIndex) {
-        frameTuple.resetByTupleIndex(this, tupleIndex);
-        return buf.getInt(getChildPointerOff(frameTuple));
-    }
-
-    @Override
-    public int getChildPageIdIfIntersect(ITupleReference tuple, int tupleIndex, MultiComparator cmp) {
-        frameTuple.setFieldCount(cmp.getKeyFieldCount());
-        frameTuple.resetByTupleIndex(this, tupleIndex);
-        int maxFieldPos = cmp.getKeyFieldCount() / 2;
-        for (int i = 0; i < maxFieldPos; i++) {
-            int j = maxFieldPos + i;
-            int c = cmp.getComparators()[i].compare(tuple.getFieldData(i), tuple.getFieldStart(i),
-                    tuple.getFieldLength(i), frameTuple.getFieldData(j), frameTuple.getFieldStart(j),
-                    frameTuple.getFieldLength(j));
-            if (c > 0) {
-                return -1;
-            }
-            c = cmp.getComparators()[i].compare(tuple.getFieldData(j), tuple.getFieldStart(j), tuple.getFieldLength(j),
-                    frameTuple.getFieldData(i), frameTuple.getFieldStart(i), frameTuple.getFieldLength(i));
-            if (c < 0) {
-                return -1;
-            }
-        }
-        return buf.getInt(getChildPointerOff(frameTuple));
-    }
-
-    @Override
-    public int findTupleByPointer(ITupleReference tuple, PathList traverseList, int parentIndex, MultiComparator cmp) {
-        frameTuple.setFieldCount(cmp.getKeyFieldCount());
-        for (int i = 0; i < getTupleCount(); i++) {
-            frameTuple.resetByTupleIndex(this, i);
-
-            int c = pointerCmp(frameTuple, tuple, cmp);
-            if (c == 0) {
-                return i;
-            } else {
-                int pageId = IntegerSerializerDeserializer.getInt(frameTuple.getFieldData(cmp.getKeyFieldCount() - 1),
-                        getChildPointerOff(frameTuple));
-                traverseList.add(pageId, -1, parentIndex);
-            }
-        }
-        return -1;
-    }
-
-    @Override
-    public boolean compact() {
-        resetSpaceParams();
-
-        int tupleCount = buf.getInt(tupleCountOff);
-        int freeSpace = buf.getInt(freeSpaceOff);
-
-        ArrayList<SlotOffTupleOff> sortedTupleOffs = new ArrayList<SlotOffTupleOff>();
-        sortedTupleOffs.ensureCapacity(tupleCount);
-        for (int i = 0; i < tupleCount; i++) {
-            int slotOff = slotManager.getSlotOff(i);
-            int tupleOff = slotManager.getTupleOff(slotOff);
-            sortedTupleOffs.add(new SlotOffTupleOff(i, slotOff, tupleOff));
-        }
-        Collections.sort(sortedTupleOffs);
-
-        for (int i = 0; i < sortedTupleOffs.size(); i++) {
-            int tupleOff = sortedTupleOffs.get(i).tupleOff;
-            frameTuple.resetByTupleOffset(buf, tupleOff);
-
-            int tupleEndOff = frameTuple.getFieldStart(frameTuple.getFieldCount() - 1)
-                    + frameTuple.getFieldLength(frameTuple.getFieldCount() - 1);
-            int tupleLength = tupleEndOff - tupleOff + childPtrSize;
-            System.arraycopy(buf.array(), tupleOff, buf.array(), freeSpace, tupleLength);
-
-            slotManager.setSlot(sortedTupleOffs.get(i).slotOff, freeSpace);
-            freeSpace += tupleLength;
-        }
-
-        buf.putInt(freeSpaceOff, freeSpace);
-        buf.putInt(totalFreeSpaceOff, buf.capacity() - freeSpace - tupleCount * slotManager.getSlotSize());
-
-        return false;
-    }
-
-    @Override
-    public FrameOpSpaceStatus hasSpaceInsert(ITupleReference tuple) {
-        int bytesRequired = tupleWriter.bytesRequired(tuple) + childPtrSize;
-        if (bytesRequired + slotManager.getSlotSize() <= buf.capacity() - buf.getInt(freeSpaceOff)
-                - (buf.getInt(tupleCountOff) * slotManager.getSlotSize()))
-            return FrameOpSpaceStatus.SUFFICIENT_CONTIGUOUS_SPACE;
-        else if (bytesRequired + slotManager.getSlotSize() <= buf.getInt(totalFreeSpaceOff))
-            return FrameOpSpaceStatus.SUFFICIENT_SPACE;
-        else
-            return FrameOpSpaceStatus.INSUFFICIENT_SPACE;
-    }
-
-    @Override
-    public void adjustKey(ITupleReference tuple, int tupleIndex, MultiComparator cmp) throws TreeIndexException {
-        frameTuple.setFieldCount(cmp.getKeyFieldCount());
-        if (tupleIndex == -1) {
-            tupleIndex = findTupleByPointer(tuple, cmp);
-        }
-        if (tupleIndex != -1) {
-            tupleWriter.writeTuple(tuple, buf.array(), getTupleOffset(tupleIndex));
-        } else {
-            throw new TreeIndexException("Error: Faild to find a tuple in a page");
-
-        }
-
-    }
-
-    protected int pointerCmp(ITupleReference tupleA, ITupleReference tupleB, MultiComparator cmp) {
-        return childPtrCmp
-                .compare(tupleA.getFieldData(cmp.getKeyFieldCount() - 1), getChildPointerOff(tupleA), childPtrSize,
-                        tupleB.getFieldData(cmp.getKeyFieldCount() - 1), getChildPointerOff(tupleB), childPtrSize);
-    }
-
-    public int getTupleSize(ITupleReference tuple) {
-        return tupleWriter.bytesRequired(tuple) + childPtrSize;
-    }
-
-    private int getChildPointerOff(ITupleReference tuple) {
-        return tuple.getFieldStart(tuple.getFieldCount() - 1) + tuple.getFieldLength(tuple.getFieldCount() - 1);
-    }
-
-    @Override
-    public void insert(ITupleReference tuple, int tupleIndex) {
-        frameTuple.setFieldCount(tuple.getFieldCount());
-        slotManager.insertSlot(-1, buf.getInt(freeSpaceOff));
-        int freeSpace = buf.getInt(freeSpaceOff);
-        int bytesWritten = tupleWriter.writeTupleFields(tuple, 0, tuple.getFieldCount(), buf.array(), freeSpace);
-        System.arraycopy(tuple.getFieldData(tuple.getFieldCount() - 1), getChildPointerOff(tuple), buf.array(),
-                freeSpace + bytesWritten, childPtrSize);
-        int tupleSize = bytesWritten + childPtrSize;
-
-        buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) + 1);
-        buf.putInt(freeSpaceOff, buf.getInt(freeSpaceOff) + tupleSize);
-        buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) - tupleSize - slotManager.getSlotSize());
-
-    }
-
-    @Override
-    public void delete(int tupleIndex, MultiComparator cmp) {
-        frameTuple.setFieldCount(cmp.getKeyFieldCount());
-        int slotOff = slotManager.getSlotOff(tupleIndex);
-
-        int tupleOff = slotManager.getTupleOff(slotOff);
-        frameTuple.resetByTupleOffset(buf, tupleOff);
-        int tupleSize = tupleWriter.bytesRequired(frameTuple);
-
-        // perform deletion (we just do a memcpy to overwrite the slot)
-        int slotStartOff = slotManager.getSlotEndOff();
-        int length = slotOff - slotStartOff;
-        System.arraycopy(buf.array(), slotStartOff, buf.array(), slotStartOff + slotManager.getSlotSize(), length);
-
-        // maintain space information
-        buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) - 1);
-        buf.putInt(totalFreeSpaceOff,
-                buf.getInt(totalFreeSpaceOff) + tupleSize + childPtrSize + slotManager.getSlotSize());
-    }
-
-    @Override
-    public void enlarge(ITupleReference tuple, MultiComparator cmp) {
-        int maxFieldPos = cmp.getKeyFieldCount() / 2;
-        for (int i = 0; i < maxFieldPos; i++) {
-            int j = maxFieldPos + i;
-            int c = cmp.getComparators()[i].compare(frameTuple.getFieldData(i), frameTuple.getFieldStart(i),
-                    frameTuple.getFieldLength(i), tuple.getFieldData(i), tuple.getFieldStart(i),
-                    tuple.getFieldLength(i));
-            if (c > 0) {
-                System.arraycopy(tuple.getFieldData(i), tuple.getFieldStart(i), frameTuple.getFieldData(i),
-                        frameTuple.getFieldStart(i), tuple.getFieldLength(i));
-            }
-            c = cmp.getComparators()[j].compare(frameTuple.getFieldData(j), frameTuple.getFieldStart(j),
-                    frameTuple.getFieldLength(j), tuple.getFieldData(j), tuple.getFieldStart(j),
-                    tuple.getFieldLength(j));
-            if (c < 0) {
-                System.arraycopy(tuple.getFieldData(j), tuple.getFieldStart(j), frameTuple.getFieldData(j),
-                        frameTuple.getFieldStart(j), tuple.getFieldLength(j));
-            }
-        }
-    }
-
-    // For debugging.
-    public ArrayList<Integer> getChildren(MultiComparator cmp) {
-        ArrayList<Integer> ret = new ArrayList<Integer>();
-        frameTuple.setFieldCount(cmp.getKeyFieldCount());
-        int tupleCount = buf.getInt(tupleCountOff);
-        for (int i = 0; i < tupleCount; i++) {
-            int tupleOff = slotManager.getTupleOff(slotManager.getSlotOff(i));
-            frameTuple.resetByTupleOffset(buf, tupleOff);
-            int intVal = IntegerSerializerDeserializer.getInt(
-                    buf.array(),
-                    frameTuple.getFieldStart(frameTuple.getFieldCount() - 1)
-                            + frameTuple.getFieldLength(frameTuple.getFieldCount() - 1));
-            ret.add(intVal);
-        }
-        return ret;
-    }
-
-    @Override
-    public int getFieldCount() {
-        return keyValueProviders.length;
-    }
-
-    public int getChildPointerSize() {
-        return childPtrSize;
-    }
-}
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMInteriorFrameFactory.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMInteriorFrameFactory.java
deleted file mode 100644
index fdb0e0a..0000000
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMInteriorFrameFactory.java
+++ /dev/null
@@ -1,54 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.rtree.frames;
-
-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProvider;
-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriterFactory;
-import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeInteriorFrame;
-
-public class RTreeNSMInteriorFrameFactory implements ITreeIndexFrameFactory {
-
-    private static final long serialVersionUID = 1L;
-    private final ITreeIndexTupleWriterFactory tupleWriterFactory;
-    private final IPrimitiveValueProviderFactory[] keyValueProviderFactories;
-    private final RTreePolicyType rtreePolicyType;
-
-    public RTreeNSMInteriorFrameFactory(ITreeIndexTupleWriterFactory tupleWriterFactory,
-            IPrimitiveValueProviderFactory[] keyValueProviderFactories, RTreePolicyType rtreePolicyType) {
-        this.tupleWriterFactory = tupleWriterFactory;
-        if (keyValueProviderFactories.length % 2 != 0) {
-            throw new IllegalArgumentException("The key has different number of dimensions.");
-        }
-        this.keyValueProviderFactories = keyValueProviderFactories;
-        this.rtreePolicyType = rtreePolicyType;
-    }
-
-    @Override
-    public IRTreeInteriorFrame createFrame() {
-        IPrimitiveValueProvider[] keyValueProviders = new IPrimitiveValueProvider[keyValueProviderFactories.length];
-        for (int i = 0; i < keyValueProviders.length; i++) {
-            keyValueProviders[i] = keyValueProviderFactories[i].createPrimitiveValueProvider();
-        }
-        return new RTreeNSMInteriorFrame(tupleWriterFactory.createTupleWriter(), keyValueProviders, rtreePolicyType);
-    }
-
-    @Override
-    public ITreeIndexTupleWriterFactory getTupleWriterFactory() {
-        return tupleWriterFactory;
-    }
-}
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMLeafFrame.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMLeafFrame.java
deleted file mode 100644
index d52ef16..0000000
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMLeafFrame.java
+++ /dev/null
@@ -1,115 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.rtree.frames;
-
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProvider;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriter;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeLeafFrame;
-
-public class RTreeNSMLeafFrame extends RTreeNSMFrame implements IRTreeLeafFrame {
-
-    public RTreeNSMLeafFrame(ITreeIndexTupleWriter tupleWriter, IPrimitiveValueProvider[] keyValueProviders,
-            RTreePolicyType rtreePolicyType) {
-        super(tupleWriter, keyValueProviders, rtreePolicyType);
-    }
-
-    @Override
-    public ITreeIndexTupleReference createTupleReference() {
-        return tupleWriter.createTupleReference();
-    }
-
-    @Override
-    public int findTupleIndex(ITupleReference tuple, MultiComparator cmp) {
-        return slotManager.findTupleIndex(tuple, frameTuple, cmp, null, null);
-    }
-
-    @Override
-    public boolean intersect(ITupleReference tuple, int tupleIndex, MultiComparator cmp) {
-        frameTuple.resetByTupleIndex(this, tupleIndex);
-        int maxFieldPos = cmp.getKeyFieldCount() / 2;
-        for (int i = 0; i < maxFieldPos; i++) {
-            int j = maxFieldPos + i;
-            int c = cmp.getComparators()[i].compare(tuple.getFieldData(i), tuple.getFieldStart(i),
-                    tuple.getFieldLength(i), frameTuple.getFieldData(j), frameTuple.getFieldStart(j),
-                    frameTuple.getFieldLength(j));
-            if (c > 0) {
-                return false;
-            }
-            c = cmp.getComparators()[i].compare(tuple.getFieldData(j), tuple.getFieldStart(j), tuple.getFieldLength(j),
-                    frameTuple.getFieldData(i), frameTuple.getFieldStart(i), frameTuple.getFieldLength(i));
-
-            if (c < 0) {
-                return false;
-            }
-        }
-        return true;
-    }
-
-    public int getTupleSize(ITupleReference tuple) {
-        return tupleWriter.bytesRequired(tuple);
-    }
-
-    @Override
-    public void insert(ITupleReference tuple, int tupleIndex) {
-        slotManager.insertSlot(-1, buf.getInt(freeSpaceOff));
-        int bytesWritten = tupleWriter.writeTuple(tuple, buf.array(), buf.getInt(freeSpaceOff));
-
-        buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) + 1);
-        buf.putInt(freeSpaceOff, buf.getInt(freeSpaceOff) + bytesWritten);
-        buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) - bytesWritten - slotManager.getSlotSize());
-    }
-
-    @Override
-    public void delete(int tupleIndex, MultiComparator cmp) {
-        int slotOff = slotManager.getSlotOff(tupleIndex);
-
-        int tupleOff = slotManager.getTupleOff(slotOff);
-        frameTuple.resetByTupleOffset(buf, tupleOff);
-        int tupleSize = tupleWriter.bytesRequired(frameTuple);
-
-        // perform deletion (we just do a memcpy to overwrite the slot)
-        int slotStartOff = slotManager.getSlotEndOff();
-        int length = slotOff - slotStartOff;
-        System.arraycopy(buf.array(), slotStartOff, buf.array(), slotStartOff + slotManager.getSlotSize(), length);
-
-        // maintain space information
-        buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) - 1);
-        buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) + tupleSize + slotManager.getSlotSize());
-    }
-
-    @Override
-    public int getFieldCount() {
-        return frameTuple.getFieldCount();
-    }
-
-    public ITupleReference getBeforeTuple(ITupleReference tuple, int targetTupleIndex, MultiComparator cmp) {
-        // Examine the tuple index to determine whether it is valid or not.
-        if (targetTupleIndex != slotManager.getGreatestKeyIndicator()) {
-            // We need to check the key to determine whether it's an insert or an update.
-            frameTuple.resetByTupleIndex(this, targetTupleIndex);
-            if (cmp.compare(tuple, frameTuple) == 0) {
-                // The keys match, it's an update.
-                return frameTuple;
-            }
-        }
-        // Either the tuple index is a special indicator, or the keys don't match.
-        // In those cases, we are definitely dealing with an insert.
-        return null;
-    }
-}
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMLeafFrameFactory.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMLeafFrameFactory.java
deleted file mode 100644
index b4d382b..0000000
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMLeafFrameFactory.java
+++ /dev/null
@@ -1,54 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.rtree.frames;
-
-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProvider;
-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriterFactory;
-import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeLeafFrame;
-
-public class RTreeNSMLeafFrameFactory implements ITreeIndexFrameFactory {
-
-    private static final long serialVersionUID = 1L;
-    private final ITreeIndexTupleWriterFactory tupleWriterFactory;
-    private final IPrimitiveValueProviderFactory[] keyValueProviderFactories;
-    private final RTreePolicyType rtreePolicyType;
-
-    public RTreeNSMLeafFrameFactory(ITreeIndexTupleWriterFactory tupleWriterFactory,
-            IPrimitiveValueProviderFactory[] keyValueProviderFactories, RTreePolicyType rtreePolicyType) {
-        this.tupleWriterFactory = tupleWriterFactory;
-        if (keyValueProviderFactories.length % 2 != 0) {
-            throw new IllegalArgumentException("The key has different number of dimensions.");
-        }
-        this.keyValueProviderFactories = keyValueProviderFactories;
-        this.rtreePolicyType = rtreePolicyType;
-    }
-
-    @Override
-    public IRTreeLeafFrame createFrame() {
-        IPrimitiveValueProvider[] keyValueProviders = new IPrimitiveValueProvider[keyValueProviderFactories.length];
-        for (int i = 0; i < keyValueProviders.length; i++) {
-            keyValueProviders[i] = keyValueProviderFactories[i].createPrimitiveValueProvider();
-        }
-        return new RTreeNSMLeafFrame(tupleWriterFactory.createTupleWriter(), keyValueProviders, rtreePolicyType);
-    }
-
-    @Override
-    public ITreeIndexTupleWriterFactory getTupleWriterFactory() {
-        return tupleWriterFactory;
-    }
-}
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreePolicy.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreePolicy.java
deleted file mode 100644
index 9d94794..0000000
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreePolicy.java
+++ /dev/null
@@ -1,232 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.rtree.frames;
-
-import java.nio.ByteBuffer;
-
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProvider;
-import edu.uci.ics.hyracks.storage.am.common.api.ISlotManager;
-import edu.uci.ics.hyracks.storage.am.common.api.ISplitKey;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriter;
-import edu.uci.ics.hyracks.storage.am.common.frames.FrameOpSpaceStatus;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeFrame;
-import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreePolicy;
-import edu.uci.ics.hyracks.storage.am.rtree.impls.RTreeSplitKey;
-import edu.uci.ics.hyracks.storage.am.rtree.impls.Rectangle;
-import edu.uci.ics.hyracks.storage.am.rtree.impls.UnorderedSlotManager;
-import edu.uci.ics.hyracks.storage.am.rtree.tuples.RTreeTypeAwareTupleWriter;
-
-public class RTreePolicy implements IRTreePolicy {
-
-    private Rectangle[] rec;
-
-    private final ITreeIndexTupleWriter tupleWriter;
-    private final IPrimitiveValueProvider[] keyValueProviders;
-    private ITreeIndexTupleReference cmpFrameTuple;
-    private final int totalFreeSpaceOff;
-
-    public RTreePolicy(ITreeIndexTupleWriter tupleWriter, IPrimitiveValueProvider[] keyValueProviders,
-            ITreeIndexTupleReference cmpFrameTuple, int totalFreeSpaceOff) {
-        this.tupleWriter = tupleWriter;
-        this.keyValueProviders = keyValueProviders;
-        this.cmpFrameTuple = cmpFrameTuple;
-        this.totalFreeSpaceOff = totalFreeSpaceOff;
-
-        rec = new Rectangle[2];
-        for (int i = 0; i < 2; i++) {
-            rec[i] = new Rectangle(keyValueProviders.length / 2);
-        }
-    }
-
-    @Override
-    public void split(ITreeIndexFrame leftFrame, ByteBuffer buf, ITreeIndexFrame rightFrame, ISlotManager slotManager,
-            ITreeIndexTupleReference frameTuple, ITupleReference tuple, ISplitKey splitKey) {
-        RTreeSplitKey rTreeSplitKey = ((RTreeSplitKey) splitKey);
-        RTreeTypeAwareTupleWriter rTreeTupleWriterLeftFrame = ((RTreeTypeAwareTupleWriter) tupleWriter);
-        RTreeTypeAwareTupleWriter rTreeTupleWriterRightFrame = ((RTreeTypeAwareTupleWriter) rightFrame.getTupleWriter());
-
-        RTreeNSMFrame leftRTreeFrame = ((RTreeNSMFrame) leftFrame);
-
-        double separation = Double.NEGATIVE_INFINITY;
-        int seed1 = 0, seed2 = 0;
-        int maxFieldPos = keyValueProviders.length / 2;
-        for (int i = 0; i < maxFieldPos; i++) {
-            int j = maxFieldPos + i;
-            frameTuple.resetByTupleIndex(leftRTreeFrame, 0);
-            double leastLowerValue = keyValueProviders[i].getValue(frameTuple.getFieldData(i),
-                    frameTuple.getFieldStart(i));
-            double greatestUpperValue = keyValueProviders[j].getValue(frameTuple.getFieldData(j),
-                    frameTuple.getFieldStart(j));
-            double leastUpperValue = leastLowerValue;
-            double greatestLowerValue = greatestUpperValue;
-            int leastUpperIndex = 0;
-            int greatestLowerIndex = 0;
-            double width;
-
-            int tupleCount = leftRTreeFrame.getTupleCount();
-            for (int k = 1; k < tupleCount; ++k) {
-                frameTuple.resetByTupleIndex(leftRTreeFrame, k);
-                double lowerValue = keyValueProviders[i].getValue(frameTuple.getFieldData(i),
-                        frameTuple.getFieldStart(i));
-                if (lowerValue > greatestLowerValue) {
-                    greatestLowerIndex = k;
-                    cmpFrameTuple.resetByTupleIndex(leftRTreeFrame, k);
-                    greatestLowerValue = keyValueProviders[i].getValue(cmpFrameTuple.getFieldData(i),
-                            cmpFrameTuple.getFieldStart(i));
-                }
-                double higherValue = keyValueProviders[j].getValue(frameTuple.getFieldData(j),
-                        frameTuple.getFieldStart(j));
-                if (higherValue < leastUpperValue) {
-                    leastUpperIndex = k;
-                    cmpFrameTuple.resetByTupleIndex(leftRTreeFrame, k);
-                    leastUpperValue = keyValueProviders[j].getValue(cmpFrameTuple.getFieldData(j),
-                            cmpFrameTuple.getFieldStart(j));
-                }
-
-                leastLowerValue = Math.min(lowerValue, leastLowerValue);
-                greatestUpperValue = Math.max(higherValue, greatestUpperValue);
-            }
-
-            width = greatestUpperValue - leastLowerValue;
-            if (width <= 0) {
-                width = 1;
-            }
-
-            double f = (greatestLowerValue - leastUpperValue) / width;
-
-            if (f > separation) {
-                seed1 = leastUpperIndex;
-                seed2 = greatestLowerIndex;
-                separation = f;
-            }
-        }
-
-        if (seed1 == seed2) {
-            if (seed1 == 0) {
-                seed2 = 1;
-            } else {
-                --seed2;
-            }
-        }
-
-        int totalBytes = 0, numOfDeletedTuples = 0;
-
-        frameTuple.resetByTupleIndex(leftRTreeFrame, seed1);
-        rec[0].set(frameTuple, keyValueProviders);
-        rightFrame.insert(frameTuple, -1);
-        ((UnorderedSlotManager) slotManager).modifySlot(slotManager.getSlotOff(seed1), -1);
-        totalBytes += leftRTreeFrame.getTupleSize(frameTuple);
-        numOfDeletedTuples++;
-
-        frameTuple.resetByTupleIndex(leftRTreeFrame, seed2);
-        rec[1].set(frameTuple, keyValueProviders);
-
-        int remainingTuplestoBeInsertedInRightFrame;
-        for (int k = 0; k < leftRTreeFrame.getTupleCount(); ++k) {
-            remainingTuplestoBeInsertedInRightFrame = leftRTreeFrame.getTupleCount() / 2 - rightFrame.getTupleCount();
-            if (remainingTuplestoBeInsertedInRightFrame == 0) {
-                break;
-            }
-            if (k != seed1 && k != seed2) {
-                frameTuple.resetByTupleIndex(leftRTreeFrame, k);
-                if (rec[0].enlargedArea(frameTuple, keyValueProviders) < rec[1].enlargedArea(frameTuple,
-                        keyValueProviders)
-                        || leftRTreeFrame.getTupleCount() - k <= remainingTuplestoBeInsertedInRightFrame) {
-                    rightFrame.insert(frameTuple, -1);
-                    rec[0].enlarge(frameTuple, keyValueProviders);
-                    ((UnorderedSlotManager) slotManager).modifySlot(slotManager.getSlotOff(k), -1);
-                    totalBytes += leftRTreeFrame.getTupleSize(frameTuple);
-                    numOfDeletedTuples++;
-                } else {
-                    rec[1].enlarge(frameTuple, keyValueProviders);
-                }
-            }
-
-        }
-
-        ((UnorderedSlotManager) slotManager).deleteEmptySlots();
-
-        // maintain space information
-        buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) + totalBytes
-                + (slotManager.getSlotSize() * numOfDeletedTuples));
-
-        // compact both pages
-        rightFrame.compact();
-        leftRTreeFrame.compact();
-
-        // The assumption here is that the new tuple cannot be larger than page
-        // size, thus it must fit in either pages.
-        if (rec[0].enlargedArea(tuple, keyValueProviders) < rec[1].enlargedArea(tuple, keyValueProviders)) {
-            if (rightFrame.hasSpaceInsert(tuple) == FrameOpSpaceStatus.SUFFICIENT_CONTIGUOUS_SPACE) {
-                rightFrame.insert(tuple, -1);
-            } else {
-                leftRTreeFrame.insert(tuple, -1);
-            }
-        } else if (leftRTreeFrame.hasSpaceInsert(tuple) == FrameOpSpaceStatus.SUFFICIENT_CONTIGUOUS_SPACE) {
-            leftRTreeFrame.insert(tuple, -1);
-        } else {
-            rightFrame.insert(tuple, -1);
-        }
-
-        int tupleOff = slotManager.getTupleOff(slotManager.getSlotEndOff());
-        frameTuple.resetByTupleOffset(buf, tupleOff);
-        int splitKeySize = tupleWriter.bytesRequired(frameTuple, 0, keyValueProviders.length);
-
-        splitKey.initData(splitKeySize);
-        leftRTreeFrame.adjustMBR();
-        rTreeTupleWriterLeftFrame.writeTupleFields(leftRTreeFrame.getTuples(), 0, rTreeSplitKey.getLeftPageBuffer(), 0);
-        rTreeSplitKey.getLeftTuple().resetByTupleOffset(rTreeSplitKey.getLeftPageBuffer(), 0);
-
-        ((IRTreeFrame) rightFrame).adjustMBR();
-        rTreeTupleWriterRightFrame.writeTupleFields(((RTreeNSMFrame) rightFrame).getTuples(), 0,
-                rTreeSplitKey.getRightPageBuffer(), 0);
-        rTreeSplitKey.getRightTuple().resetByTupleOffset(rTreeSplitKey.getRightPageBuffer(), 0);
-    }
-
-    @Override
-    public int findBestChildPosition(ITreeIndexFrame frame, ITupleReference tuple, ITreeIndexTupleReference frameTuple,
-            MultiComparator cmp) {
-        cmpFrameTuple.setFieldCount(cmp.getKeyFieldCount());
-        frameTuple.setFieldCount(cmp.getKeyFieldCount());
-
-        int bestChild = 0;
-        double minEnlargedArea = Double.MAX_VALUE;
-
-        // find minimum enlarged area, use minimum area to break tie
-        for (int i = 0; i < frame.getTupleCount(); i++) {
-            frameTuple.resetByTupleIndex(frame, i);
-            double enlargedArea = RTreeComputationUtils.enlargedArea(frameTuple, tuple, cmp, keyValueProviders);
-            if (enlargedArea < minEnlargedArea) {
-                minEnlargedArea = enlargedArea;
-                bestChild = i;
-            } else if (enlargedArea == minEnlargedArea) {
-                double area = RTreeComputationUtils.area(frameTuple, cmp, keyValueProviders);
-                frameTuple.resetByTupleIndex(frame, bestChild);
-                double minArea = RTreeComputationUtils.area(frameTuple, cmp, keyValueProviders);
-                if (area < minArea) {
-                    bestChild = i;
-                }
-            }
-        }
-
-        return bestChild;
-    }
-
-}
\ No newline at end of file
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreePolicyType.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreePolicyType.java
deleted file mode 100644
index 712c424..0000000
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreePolicyType.java
+++ /dev/null
@@ -1,20 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.rtree.frames;
-
-public enum RTreePolicyType {
-    RTREE, RSTARTREE
-}
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTree.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTree.java
deleted file mode 100644
index c12dc50..0000000
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTree.java
+++ /dev/null
@@ -1,982 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.rtree.impls;
-
-import java.nio.ByteBuffer;
-import java.util.ArrayList;
-import java.util.concurrent.atomic.AtomicLong;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManager;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexBulkLoader;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexCursor;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexOperationContext;
-import edu.uci.ics.hyracks.storage.am.common.api.IModificationOperationCallback;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchOperationCallback;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchPredicate;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexAccessor;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
-import edu.uci.ics.hyracks.storage.am.common.frames.FrameOpSpaceStatus;
-import edu.uci.ics.hyracks.storage.am.common.impls.AbstractTreeIndex;
-import edu.uci.ics.hyracks.storage.am.common.impls.NodeFrontier;
-import edu.uci.ics.hyracks.storage.am.common.impls.TreeIndexDiskOrderScanCursor;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOperation;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-import edu.uci.ics.hyracks.storage.am.common.util.TreeIndexUtils;
-import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeFrame;
-import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeInteriorFrame;
-import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeLeafFrame;
-import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreeNSMFrame;
-import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreeNSMInteriorFrame;
-import edu.uci.ics.hyracks.storage.am.rtree.tuples.RTreeTypeAwareTupleWriter;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
-import edu.uci.ics.hyracks.storage.common.file.BufferedFileHandle;
-import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
-
-public class RTree extends AbstractTreeIndex {
-
-    // Global node sequence number used for the concurrency control protocol
-    private final AtomicLong globalNsn;
-
-    public RTree(IBufferCache bufferCache, IFileMapProvider fileMapProvider, IFreePageManager freePageManager,
-            ITreeIndexFrameFactory interiorFrameFactory, ITreeIndexFrameFactory leafFrameFactory,
-            IBinaryComparatorFactory[] cmpFactories, int fieldCount, FileReference file) {
-        super(bufferCache, fileMapProvider, freePageManager, interiorFrameFactory, leafFrameFactory, cmpFactories,
-                fieldCount, file);
-        globalNsn = new AtomicLong();
-    }
-
-    private long incrementGlobalNsn() {
-        return globalNsn.incrementAndGet();
-    }
-
-    @SuppressWarnings("rawtypes")
-    public String printTree(IRTreeLeafFrame leafFrame, IRTreeInteriorFrame interiorFrame,
-            ISerializerDeserializer[] keySerdes) throws Exception {
-        MultiComparator cmp = MultiComparator.create(cmpFactories);
-        byte treeHeight = getTreeHeight(leafFrame);
-        StringBuilder strBuilder = new StringBuilder();
-        printTree(rootPage, null, false, leafFrame, interiorFrame, treeHeight, keySerdes, strBuilder, cmp);
-        return strBuilder.toString();
-    }
-
-    @SuppressWarnings("rawtypes")
-    public void printTree(int pageId, ICachedPage parent, boolean unpin, IRTreeLeafFrame leafFrame,
-            IRTreeInteriorFrame interiorFrame, byte treeHeight, ISerializerDeserializer[] keySerdes,
-            StringBuilder strBuilder, MultiComparator cmp) throws Exception {
-        ICachedPage node = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
-        node.acquireReadLatch();
-        try {
-            if (parent != null && unpin == true) {
-                parent.releaseReadLatch();
-                bufferCache.unpin(parent);
-            }
-            interiorFrame.setPage(node);
-            int level = interiorFrame.getLevel();
-            strBuilder.append(String.format("%1d ", level));
-            strBuilder.append(String.format("%3d ", pageId) + ": ");
-            for (int i = 0; i < treeHeight - level; i++) {
-                strBuilder.append("    ");
-            }
-
-            String keyString;
-            long LSN, NSN;
-            int rightPage;
-            if (interiorFrame.isLeaf()) {
-                leafFrame.setPage(node);
-                keyString = TreeIndexUtils.printFrameTuples(leafFrame, keySerdes);
-                LSN = leafFrame.getPageLsn();
-                NSN = leafFrame.getPageNsn();
-                rightPage = leafFrame.getRightPage();
-
-            } else {
-                keyString = TreeIndexUtils.printFrameTuples(interiorFrame, keySerdes);
-                LSN = interiorFrame.getPageLsn();
-                NSN = interiorFrame.getPageNsn();
-                rightPage = interiorFrame.getRightPage();
-            }
-
-            strBuilder.append(keyString + "\n" + "pageId: " + pageId + " LSN: " + LSN + " NSN: " + NSN + " rightPage: "
-                    + rightPage + "\n");
-            if (!interiorFrame.isLeaf()) {
-                ArrayList<Integer> children = ((RTreeNSMInteriorFrame) (interiorFrame)).getChildren(cmp);
-                for (int i = 0; i < children.size(); i++) {
-                    printTree(children.get(i), node, i == children.size() - 1, leafFrame, interiorFrame, treeHeight,
-                            keySerdes, strBuilder, cmp);
-                }
-            } else {
-                node.releaseReadLatch();
-                bufferCache.unpin(node);
-            }
-        } catch (Exception e) {
-            node.releaseReadLatch();
-            bufferCache.unpin(node);
-            e.printStackTrace();
-        }
-    }
-
-    private RTreeOpContext createOpContext(IModificationOperationCallback modificationCallback) {
-        return new RTreeOpContext((IRTreeLeafFrame) leafFrameFactory.createFrame(),
-                (IRTreeInteriorFrame) interiorFrameFactory.createFrame(), freePageManager.getMetaDataFrameFactory()
-                        .createFrame(), cmpFactories, 8, modificationCallback);
-    }
-
-    private void insert(ITupleReference tuple, IIndexOperationContext ictx) throws HyracksDataException,
-            TreeIndexException {
-        RTreeOpContext ctx = (RTreeOpContext) ictx;
-        ctx.reset();
-        ctx.setTuple(tuple);
-        ctx.splitKey.reset();
-        ctx.splitKey.getLeftTuple().setFieldCount(cmpFactories.length);
-        ctx.splitKey.getRightTuple().setFieldCount(cmpFactories.length);
-        ctx.modificationCallback.before(tuple);
-
-        int maxFieldPos = cmpFactories.length / 2;
-        for (int i = 0; i < maxFieldPos; i++) {
-            int j = maxFieldPos + i;
-            int c = ctx.cmp.getComparators()[i].compare(tuple.getFieldData(i), tuple.getFieldStart(i),
-                    tuple.getFieldLength(i), tuple.getFieldData(j), tuple.getFieldStart(j), tuple.getFieldLength(j));
-            if (c > 0) {
-                throw new IllegalArgumentException("The low key point has larger coordinates than the high key point.");
-            }
-        }
-
-        try {
-            ICachedPage leafNode = findLeaf(ctx);
-
-            int pageId = ctx.pathList.getLastPageId();
-            ctx.pathList.moveLast();
-            insertTuple(leafNode, pageId, ctx.getTuple(), ctx, true);
-
-            while (true) {
-                if (ctx.splitKey.getLeftPageBuffer() != null) {
-                    updateParentForInsert(ctx);
-                } else {
-                    break;
-                }
-            }
-        } finally {
-            for (int i = ctx.NSNUpdates.size() - 1; i >= 0; i--) {
-                ICachedPage node = ctx.NSNUpdates.get(i);
-                ctx.interiorFrame.setPage(node);
-                ctx.interiorFrame.setPageNsn(incrementGlobalNsn());
-            }
-
-            for (int i = ctx.LSNUpdates.size() - 1; i >= 0; i--) {
-                ICachedPage node = ctx.LSNUpdates.get(i);
-                ctx.interiorFrame.setPage(node);
-                ctx.interiorFrame.setPageLsn(incrementGlobalNsn());
-                node.releaseWriteLatch();
-                bufferCache.unpin(node);
-            }
-        }
-    }
-
-    private ICachedPage findLeaf(RTreeOpContext ctx) throws HyracksDataException {
-        int pageId = rootPage;
-        boolean writeLatched = false;
-        boolean readLatched = false;
-        boolean succeeded = false;
-        ICachedPage node = null;
-        boolean isLeaf = false;
-        long pageLsn = 0, parentLsn = 0;
-
-        try {
-
-            while (true) {
-                if (!writeLatched) {
-                    node = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
-                    ctx.interiorFrame.setPage(node);
-                    isLeaf = ctx.interiorFrame.isLeaf();
-                    if (isLeaf) {
-                        node.acquireWriteLatch();
-                        writeLatched = true;
-
-                        if (!ctx.interiorFrame.isLeaf()) {
-                            node.releaseWriteLatch();
-                            writeLatched = false;
-                            bufferCache.unpin(node);
-                            continue;
-                        }
-                    } else {
-                        // Be optimistic and grab read latch first. We will swap
-                        // it to write latch if we need to enlarge the best
-                        // child tuple.
-                        node.acquireReadLatch();
-                        readLatched = true;
-                    }
-                }
-
-                if (pageId != rootPage && parentLsn < ctx.interiorFrame.getPageNsn()) {
-                    // Concurrent split detected, go back to parent and
-                    // re-choose the best child
-                    if (writeLatched) {
-                        node.releaseWriteLatch();
-                        writeLatched = false;
-                        bufferCache.unpin(node);
-                    } else {
-                        node.releaseReadLatch();
-                        readLatched = false;
-                        bufferCache.unpin(node);
-                    }
-
-                    pageId = ctx.pathList.getLastPageId();
-                    if (pageId != rootPage) {
-                        parentLsn = ctx.pathList.getPageLsn(ctx.pathList.size() - 2);
-                    }
-                    ctx.pathList.moveLast();
-                    continue;
-                }
-
-                pageLsn = ctx.interiorFrame.getPageLsn();
-                ctx.pathList.add(pageId, pageLsn, -1);
-
-                if (!isLeaf) {
-                    // findBestChild must be called *before* checkIfEnlarementIsNeeded
-                    int childPageId = ctx.interiorFrame.findBestChild(ctx.getTuple(), ctx.cmp);
-                    boolean enlarementIsNeeded = ctx.interiorFrame.checkIfEnlarementIsNeeded(ctx.getTuple(), ctx.cmp);
-
-                    if (enlarementIsNeeded) {
-                        if (!writeLatched) {
-                            node.releaseReadLatch();
-                            readLatched = false;
-                            bufferCache.unpin(node);
-
-                            node = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
-                            node.acquireWriteLatch();
-                            writeLatched = true;
-                            ctx.interiorFrame.setPage(node);
-
-                            if (ctx.interiorFrame.getPageLsn() != pageLsn) {
-                                // The page was changed while we unlocked it;
-                                // thus, retry (re-choose best child)
-
-                                ctx.pathList.moveLast();
-                                continue;
-                            }
-                        }
-                        // We don't need to reset the frameTuple because it is
-                        // already pointing to the best child
-                        ctx.interiorFrame.enlarge(ctx.getTuple(), ctx.cmp);
-
-                        node.releaseWriteLatch();
-                        writeLatched = false;
-                        bufferCache.unpin(node);
-                    } else {
-                        if (readLatched) {
-                            node.releaseReadLatch();
-                            readLatched = false;
-                            bufferCache.unpin(node);
-                        } else if (writeLatched) {
-                            node.releaseWriteLatch();
-                            writeLatched = false;
-                            bufferCache.unpin(node);
-                        }
-                    }
-
-                    pageId = childPageId;
-                    parentLsn = pageLsn;
-                } else {
-                    ctx.leafFrame.setPage(node);
-                    succeeded = true;
-                    return node;
-                }
-            }
-        } finally {
-            if (!succeeded) {
-                if (readLatched) {
-                    node.releaseReadLatch();
-                    readLatched = false;
-                    bufferCache.unpin(node);
-                } else if (writeLatched) {
-                    node.releaseWriteLatch();
-                    writeLatched = false;
-                    bufferCache.unpin(node);
-                }
-            }
-        }
-    }
-
-    private void insertTuple(ICachedPage node, int pageId, ITupleReference tuple, RTreeOpContext ctx, boolean isLeaf)
-            throws HyracksDataException, TreeIndexException {
-        boolean succeeded = false;
-        FrameOpSpaceStatus spaceStatus;
-        if (!isLeaf) {
-            spaceStatus = ctx.interiorFrame.hasSpaceInsert(tuple);
-        } else {
-            spaceStatus = ctx.leafFrame.hasSpaceInsert(tuple);
-        }
-
-        switch (spaceStatus) {
-            case SUFFICIENT_CONTIGUOUS_SPACE: {
-                try {
-                    if (!isLeaf) {
-                        ctx.interiorFrame.insert(tuple, -1);
-                    } else {
-                        ctx.modificationCallback.found(null, tuple);
-                        ctx.leafFrame.insert(tuple, -1);
-                    }
-                    succeeded = true;
-                } finally {
-                    if (succeeded) {
-                        ctx.LSNUpdates.add(node);
-                        ctx.splitKey.reset();
-                    } else if (isLeaf) {
-                        // In case of a crash, we un-latch the interior node
-                        // inside updateParentForInsert.
-                        node.releaseWriteLatch();
-                        bufferCache.unpin(node);
-                    }
-                }
-                break;
-            }
-
-            case SUFFICIENT_SPACE: {
-                try {
-                    if (!isLeaf) {
-                        ctx.interiorFrame.compact();
-                        ctx.interiorFrame.insert(tuple, -1);
-                    } else {
-                        ctx.leafFrame.compact();
-                        ctx.modificationCallback.found(null, tuple);
-                        ctx.leafFrame.insert(tuple, -1);
-                    }
-                    succeeded = true;
-                } finally {
-                    if (succeeded) {
-                        ctx.LSNUpdates.add(node);
-                        ctx.splitKey.reset();
-                    } else if (isLeaf) {
-                        // In case of a crash, we un-latch the interior node
-                        // inside updateParentForInsert.
-                        node.releaseWriteLatch();
-                        bufferCache.unpin(node);
-                    }
-                }
-                break;
-            }
-
-            case INSUFFICIENT_SPACE: {
-                int rightPageId = freePageManager.getFreePage(ctx.metaFrame);
-                ICachedPage rightNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, rightPageId), true);
-                rightNode.acquireWriteLatch();
-
-                try {
-                    IRTreeFrame rightFrame;
-                    if (!isLeaf) {
-                        rightFrame = (IRTreeFrame) interiorFrameFactory.createFrame();
-                        rightFrame.setPage(rightNode);
-                        rightFrame.initBuffer((byte) ctx.interiorFrame.getLevel());
-                        rightFrame.setRightPage(ctx.interiorFrame.getRightPage());
-                        ctx.interiorFrame.split(rightFrame, tuple, ctx.splitKey);
-                        ctx.interiorFrame.setRightPage(rightPageId);
-                    } else {
-                        rightFrame = (IRTreeFrame) leafFrameFactory.createFrame();
-                        rightFrame.setPage(rightNode);
-                        rightFrame.initBuffer((byte) 0);
-                        rightFrame.setRightPage(ctx.interiorFrame.getRightPage());
-                        ctx.modificationCallback.found(null, tuple);
-                        ctx.leafFrame.split(rightFrame, tuple, ctx.splitKey);
-                        ctx.leafFrame.setRightPage(rightPageId);
-                    }
-                    succeeded = true;
-                } finally {
-                    if (succeeded) {
-                        ctx.NSNUpdates.add(rightNode);
-                        ctx.LSNUpdates.add(rightNode);
-                        ctx.NSNUpdates.add(node);
-                        ctx.LSNUpdates.add(node);
-                    } else if (isLeaf) {
-                        // In case of a crash, we un-latch the interior node
-                        // inside updateParentForInsert.
-                        node.releaseWriteLatch();
-                        bufferCache.unpin(node);
-                        rightNode.releaseWriteLatch();
-                        bufferCache.unpin(rightNode);
-                    } else {
-                        rightNode.releaseWriteLatch();
-                        bufferCache.unpin(rightNode);
-                    }
-
-                }
-                ctx.splitKey.setPages(pageId, rightPageId);
-                if (pageId == rootPage) {
-                    int newLeftId = freePageManager.getFreePage(ctx.metaFrame);
-                    ICachedPage newLeftNode = bufferCache
-                            .pin(BufferedFileHandle.getDiskPageId(fileId, newLeftId), true);
-                    newLeftNode.acquireWriteLatch();
-                    succeeded = false;
-                    try {
-                        // copy left child to new left child
-                        System.arraycopy(node.getBuffer().array(), 0, newLeftNode.getBuffer().array(), 0, newLeftNode
-                                .getBuffer().capacity());
-
-                        // initialize new root (leftNode becomes new root)
-                        ctx.interiorFrame.setPage(node);
-                        ctx.interiorFrame.initBuffer((byte) (ctx.interiorFrame.getLevel() + 1));
-
-                        ctx.splitKey.setLeftPage(newLeftId);
-                        ctx.interiorFrame.insert(ctx.splitKey.getLeftTuple(), -1);
-                        ctx.interiorFrame.insert(ctx.splitKey.getRightTuple(), -1);
-
-                        succeeded = true;
-                    } finally {
-                        if (succeeded) {
-                            ctx.NSNUpdates.remove(ctx.NSNUpdates.size() - 1);
-                            ctx.LSNUpdates.remove(ctx.LSNUpdates.size() - 1);
-
-                            ctx.NSNUpdates.add(newLeftNode);
-                            ctx.LSNUpdates.add(newLeftNode);
-
-                            ctx.NSNUpdates.add(node);
-                            ctx.LSNUpdates.add(node);
-                            ctx.splitKey.reset();
-                        } else if (isLeaf) {
-                            // In case of a crash, we un-latch the interior node
-                            // inside updateParentForInsert.
-                            node.releaseWriteLatch();
-                            bufferCache.unpin(node);
-                            rightNode.releaseWriteLatch();
-                            bufferCache.unpin(rightNode);
-                            newLeftNode.releaseWriteLatch();
-                            bufferCache.unpin(newLeftNode);
-                        } else {
-                            rightNode.releaseWriteLatch();
-                            bufferCache.unpin(rightNode);
-                            newLeftNode.releaseWriteLatch();
-                            bufferCache.unpin(newLeftNode);
-                        }
-                    }
-                }
-                break;
-            }
-        }
-    }
-
-    private void updateParentForInsert(RTreeOpContext ctx) throws HyracksDataException, TreeIndexException {
-        boolean succeeded = false;
-        boolean writeLatched = false;
-        int parentId = ctx.pathList.getLastPageId();
-        ICachedPage parentNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, parentId), false);
-        parentNode.acquireWriteLatch();
-        writeLatched = true;
-        ctx.interiorFrame.setPage(parentNode);
-        boolean foundParent = true;
-
-        try {
-            if (ctx.interiorFrame.getPageLsn() != ctx.pathList.getLastPageLsn()) {
-                foundParent = false;
-                while (true) {
-                    if (ctx.interiorFrame.findTupleByPointer(ctx.splitKey.getLeftTuple(), ctx.cmp) != -1) {
-                        // found the parent
-                        foundParent = true;
-                        break;
-                    }
-                    int rightPage = ctx.interiorFrame.getRightPage();
-                    parentNode.releaseWriteLatch();
-                    writeLatched = false;
-                    bufferCache.unpin(parentNode);
-
-                    if (rightPage == -1) {
-                        break;
-                    }
-
-                    parentId = rightPage;
-                    parentNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, parentId), false);
-                    parentNode.acquireWriteLatch();
-                    writeLatched = true;
-                    ctx.interiorFrame.setPage(parentNode);
-                }
-            }
-
-            if (foundParent) {
-                try {
-                    ctx.interiorFrame.adjustKey(ctx.splitKey.getLeftTuple(), -1, ctx.cmp);
-                } catch (TreeIndexException e) {
-                    if (writeLatched) {
-                        parentNode.releaseWriteLatch();
-                        writeLatched = false;
-                        bufferCache.unpin(parentNode);
-                    }
-                    throw e;
-                }
-                insertTuple(parentNode, parentId, ctx.splitKey.getRightTuple(), ctx, ctx.interiorFrame.isLeaf());
-                ctx.pathList.moveLast();
-                succeeded = true;
-                return;
-
-            }
-        } finally {
-            if (!succeeded) {
-                if (writeLatched) {
-                    parentNode.releaseWriteLatch();
-                    writeLatched = false;
-                    bufferCache.unpin(parentNode);
-                }
-            }
-        }
-
-        ctx.traverseList.clear();
-        findPath(ctx);
-        updateParentForInsert(ctx);
-    }
-
-    private void findPath(RTreeOpContext ctx) throws TreeIndexException, HyracksDataException {
-        boolean readLatched = false;
-        int pageId = rootPage;
-        int parentIndex = -1;
-        long parentLsn = 0;
-        long pageLsn;
-        int pageIndex;
-        ICachedPage node = null;
-        ctx.traverseList.add(pageId, -1, parentIndex);
-        try {
-            while (!ctx.traverseList.isLast()) {
-                pageId = ctx.traverseList.getFirstPageId();
-                parentIndex = ctx.traverseList.getFirstPageIndex();
-
-                node = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
-                node.acquireReadLatch();
-                readLatched = true;
-                ctx.interiorFrame.setPage(node);
-                pageLsn = ctx.interiorFrame.getPageLsn();
-                pageIndex = ctx.traverseList.first();
-                ctx.traverseList.setPageLsn(pageIndex, pageLsn);
-
-                ctx.traverseList.moveFirst();
-
-                if (ctx.interiorFrame.isLeaf()) {
-                    throw new TreeIndexException("Error: Failed to re-find parent of a page in the tree.");
-                }
-
-                if (pageId != rootPage) {
-                    parentLsn = ctx.traverseList.getPageLsn(ctx.traverseList.getPageIndex(pageIndex));
-                }
-                if (pageId != rootPage && parentLsn < ctx.interiorFrame.getPageNsn()) {
-                    int rightPage = ctx.interiorFrame.getRightPage();
-                    if (rightPage != -1) {
-                        ctx.traverseList.addFirst(rightPage, -1, parentIndex);
-                    }
-                }
-
-                if (ctx.interiorFrame.findTupleByPointer(ctx.splitKey.getLeftTuple(), ctx.traverseList, pageIndex,
-                        ctx.cmp) != -1) {
-                    ctx.pathList.clear();
-                    fillPath(ctx, pageIndex);
-                    return;
-                }
-                node.releaseReadLatch();
-                readLatched = false;
-                bufferCache.unpin(node);
-            }
-        } finally {
-            if (readLatched) {
-                node.releaseReadLatch();
-                readLatched = false;
-                bufferCache.unpin(node);
-            }
-        }
-    }
-
-    private void fillPath(RTreeOpContext ctx, int pageIndex) {
-        if (pageIndex != -1) {
-            fillPath(ctx, ctx.traverseList.getPageIndex(pageIndex));
-            ctx.pathList.add(ctx.traverseList.getPageId(pageIndex), ctx.traverseList.getPageLsn(pageIndex), -1);
-        }
-    }
-
-    private void delete(ITupleReference tuple, RTreeOpContext ctx) throws HyracksDataException, TreeIndexException {
-        ctx.reset();
-        ctx.setTuple(tuple);
-        ctx.splitKey.reset();
-        ctx.splitKey.getLeftTuple().setFieldCount(cmpFactories.length);
-
-        // We delete the first matching tuple (including the payload data.
-        // We don't update the MBRs of the parents after deleting the record.
-        int tupleIndex = findTupleToDelete(ctx);
-
-        if (tupleIndex != -1) {
-            try {
-                deleteTuple(tupleIndex, ctx);
-            } finally {
-                ctx.leafFrame.getPage().releaseWriteLatch();
-                bufferCache.unpin(ctx.leafFrame.getPage());
-            }
-        }
-    }
-
-    private int findTupleToDelete(RTreeOpContext ctx) throws HyracksDataException {
-        boolean writeLatched = false;
-        boolean readLatched = false;
-        boolean succeeded = false;
-        ICachedPage node = null;
-        ctx.pathList.add(rootPage, -1, -1);
-
-        try {
-            while (!ctx.pathList.isEmpty()) {
-                int pageId = ctx.pathList.getLastPageId();
-                long parentLsn = ctx.pathList.getLastPageLsn();
-                ctx.pathList.moveLast();
-                node = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
-                node.acquireReadLatch();
-                readLatched = true;
-                ctx.interiorFrame.setPage(node);
-                boolean isLeaf = ctx.interiorFrame.isLeaf();
-                long pageLsn = ctx.interiorFrame.getPageLsn();
-
-                if (pageId != rootPage && parentLsn < ctx.interiorFrame.getPageNsn()) {
-                    // Concurrent split detected, we need to visit the right
-                    // page
-                    int rightPage = ctx.interiorFrame.getRightPage();
-                    if (rightPage != -1) {
-                        ctx.pathList.add(rightPage, parentLsn, -1);
-                    }
-                }
-
-                if (!isLeaf) {
-                    for (int i = 0; i < ctx.interiorFrame.getTupleCount(); i++) {
-                        int childPageId = ctx.interiorFrame.getChildPageIdIfIntersect(ctx.tuple, i, ctx.cmp);
-                        if (childPageId != -1) {
-                            ctx.pathList.add(childPageId, pageLsn, -1);
-                        }
-                    }
-                } else {
-                    ctx.leafFrame.setPage(node);
-                    int tupleIndex = ctx.leafFrame.findTupleIndex(ctx.tuple, ctx.cmp);
-                    if (tupleIndex != -1) {
-
-                        node.releaseReadLatch();
-                        readLatched = false;
-                        bufferCache.unpin(node);
-
-                        node = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
-                        node.acquireWriteLatch();
-                        writeLatched = true;
-                        ctx.leafFrame.setPage(node);
-
-                        // A rare case only happen when a root is no longer a
-                        // leaf page. Simply we restart the search.
-                        if (!ctx.leafFrame.isLeaf()) {
-                            ctx.pathList.add(pageId, -1, -1);
-
-                            node.releaseWriteLatch();
-                            writeLatched = false;
-                            bufferCache.unpin(node);
-                            continue;
-                        }
-
-                        if (ctx.leafFrame.getPageLsn() != pageLsn) {
-                            // The page was changed while we unlocked it
-
-                            tupleIndex = ctx.leafFrame.findTupleIndex(ctx.tuple, ctx.cmp);
-                            if (tupleIndex == -1) {
-                                ctx.pathList.add(pageId, parentLsn, -1);
-
-                                node.releaseWriteLatch();
-                                writeLatched = false;
-                                bufferCache.unpin(node);
-                                continue;
-                            } else {
-                                succeeded = true;
-                                return tupleIndex;
-                            }
-                        } else {
-                            succeeded = true;
-                            return tupleIndex;
-                        }
-                    }
-                }
-                node.releaseReadLatch();
-                readLatched = false;
-                bufferCache.unpin(node);
-            }
-        } finally {
-            if (!succeeded) {
-                if (readLatched) {
-                    node.releaseReadLatch();
-                    readLatched = false;
-                    bufferCache.unpin(node);
-                } else if (writeLatched) {
-                    node.releaseWriteLatch();
-                    writeLatched = false;
-                    bufferCache.unpin(node);
-                }
-            }
-        }
-        return -1;
-    }
-
-    private void deleteTuple(int tupleIndex, RTreeOpContext ctx) throws HyracksDataException {
-        ITupleReference beforeTuple = ctx.leafFrame.getBeforeTuple(ctx.getTuple(), tupleIndex, ctx.cmp);
-        ctx.modificationCallback.found(beforeTuple, ctx.getTuple());
-        ctx.leafFrame.delete(tupleIndex, ctx.cmp);
-        ctx.leafFrame.setPageLsn(incrementGlobalNsn());
-    }
-
-    private void search(ITreeIndexCursor cursor, ISearchPredicate searchPred, RTreeOpContext ctx)
-            throws HyracksDataException, IndexException {
-        ctx.reset();
-        ctx.cursor = cursor;
-
-        cursor.setBufferCache(bufferCache);
-        cursor.setFileId(fileId);
-        ctx.cursorInitialState.setRootPage(rootPage);
-        ctx.cursor.open(ctx.cursorInitialState, (SearchPredicate) searchPred);
-    }
-
-    private void update(ITupleReference tuple, RTreeOpContext ctx) {
-        throw new UnsupportedOperationException("RTree Update not implemented.");
-    }
-
-    private void diskOrderScan(ITreeIndexCursor icursor, RTreeOpContext ctx) throws HyracksDataException {
-        TreeIndexDiskOrderScanCursor cursor = (TreeIndexDiskOrderScanCursor) icursor;
-        ctx.reset();
-
-        MultiComparator cmp = MultiComparator.create(cmpFactories);
-        SearchPredicate searchPred = new SearchPredicate(null, cmp);
-
-        int currentPageId = rootPage;
-        int maxPageId = freePageManager.getMaxPage(ctx.metaFrame);
-
-        ICachedPage page = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, currentPageId), false);
-        page.acquireReadLatch();
-        try {
-            cursor.setBufferCache(bufferCache);
-            cursor.setFileId(fileId);
-            cursor.setCurrentPageId(currentPageId);
-            cursor.setMaxPageId(maxPageId);
-            ctx.cursorInitialState.setOriginialKeyComparator(ctx.cmp);
-            ctx.cursorInitialState.setPage(page);
-            cursor.open(ctx.cursorInitialState, searchPred);
-        } catch (Exception e) {
-            page.releaseReadLatch();
-            bufferCache.unpin(page);
-            throw new HyracksDataException(e);
-        }
-    }
-
-    @Override
-    public ITreeIndexAccessor createAccessor(IModificationOperationCallback modificationCallback,
-            ISearchOperationCallback searchCallback) {
-        return new RTreeAccessor(this, modificationCallback, searchCallback);
-    }
-
-    public class RTreeAccessor implements ITreeIndexAccessor {
-        private RTree rtree;
-        private RTreeOpContext ctx;
-
-        public RTreeAccessor(RTree rtree, IModificationOperationCallback modificationCallback,
-                ISearchOperationCallback searchCallback) {
-            this.rtree = rtree;
-            this.ctx = rtree.createOpContext(modificationCallback);
-        }
-
-        @Override
-        public void insert(ITupleReference tuple) throws HyracksDataException, TreeIndexException {
-            ctx.setOperation(IndexOperation.INSERT);
-            rtree.insert(tuple, ctx);
-        }
-
-        @Override
-        public void update(ITupleReference tuple) throws HyracksDataException, TreeIndexException {
-            ctx.setOperation(IndexOperation.UPDATE);
-            rtree.update(tuple, ctx);
-        }
-
-        @Override
-        public void delete(ITupleReference tuple) throws HyracksDataException, TreeIndexException {
-            ctx.setOperation(IndexOperation.DELETE);
-            rtree.delete(tuple, ctx);
-        }
-
-        @Override
-        public ITreeIndexCursor createSearchCursor() {
-            return new RTreeSearchCursor((IRTreeInteriorFrame) interiorFrameFactory.createFrame(),
-                    (IRTreeLeafFrame) leafFrameFactory.createFrame());
-        }
-
-        @Override
-        public void search(IIndexCursor cursor, ISearchPredicate searchPred) throws HyracksDataException,
-                IndexException {
-            ctx.setOperation(IndexOperation.SEARCH);
-            rtree.search((ITreeIndexCursor) cursor, searchPred, ctx);
-        }
-
-        @Override
-        public ITreeIndexCursor createDiskOrderScanCursor() {
-            return new TreeIndexDiskOrderScanCursor(leafFrameFactory.createFrame());
-        }
-
-        @Override
-        public void diskOrderScan(ITreeIndexCursor cursor) throws HyracksDataException {
-            ctx.setOperation(IndexOperation.DISKORDERSCAN);
-            rtree.diskOrderScan(cursor, ctx);
-        }
-
-        public RTreeOpContext getOpContext() {
-            return ctx;
-        }
-
-        @Override
-        public void upsert(ITupleReference tuple) throws HyracksDataException, TreeIndexException {
-            throw new UnsupportedOperationException(
-                    "The RTree does not support the notion of keys, therefore upsert does not make sense.");
-        }
-    }
-
-    @Override
-    public IIndexBulkLoader createBulkLoader(float fillFactor, boolean verifyInput, long numElementsHint)
-            throws TreeIndexException {
-        // TODO: verifyInput currently does nothing.
-        try {
-            return new RTreeBulkLoader(fillFactor);
-        } catch (HyracksDataException e) {
-            throw new TreeIndexException(e);
-        }
-    }
-
-    public class RTreeBulkLoader extends AbstractTreeIndex.AbstractTreeIndexBulkLoader {
-        ITreeIndexFrame lowerFrame, prevInteriorFrame;
-        RTreeTypeAwareTupleWriter tupleWriter = ((RTreeTypeAwareTupleWriter) interiorFrame.getTupleWriter());
-        ITreeIndexTupleReference mbrTuple = interiorFrame.createTupleReference();
-        ByteBuffer mbr;
-
-        public RTreeBulkLoader(float fillFactor) throws TreeIndexException, HyracksDataException {
-            super(fillFactor);
-            prevInteriorFrame = interiorFrameFactory.createFrame();
-        }
-
-        @Override
-        public void add(ITupleReference tuple) throws HyracksDataException {
-            try {
-                NodeFrontier leafFrontier = nodeFrontiers.get(0);
-
-                int spaceNeeded = tupleWriter.bytesRequired(tuple) + slotSize;
-                int spaceUsed = leafFrame.getBuffer().capacity() - leafFrame.getTotalFreeSpace();
-
-                // try to free space by compression
-                if (spaceUsed + spaceNeeded > leafMaxBytes) {
-                    leafFrame.compress();
-                    spaceUsed = leafFrame.getBuffer().capacity() - leafFrame.getTotalFreeSpace();
-                }
-
-                if (spaceUsed + spaceNeeded > leafMaxBytes) {
-                    propagateBulk(1, false);
-
-                    leafFrontier.pageId = freePageManager.getFreePage(metaFrame);
-
-                    leafFrontier.page.releaseWriteLatch();
-                    bufferCache.unpin(leafFrontier.page);
-
-                    leafFrontier.page = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, leafFrontier.pageId),
-                            true);
-                    leafFrontier.page.acquireWriteLatch();
-                    leafFrame.setPage(leafFrontier.page);
-                    leafFrame.initBuffer((byte) 0);
-                }
-
-                leafFrame.setPage(leafFrontier.page);
-                leafFrame.insert(tuple, -1);
-            } catch (HyracksDataException e) {
-                handleException();
-                throw e;
-            } catch (RuntimeException e) {
-                handleException();
-                throw e;
-            }
-
-        }
-
-        public void end() throws HyracksDataException {
-            propagateBulk(1, true);
-
-            super.end();
-        }
-
-        protected void propagateBulk(int level, boolean toRoot) throws HyracksDataException {
-            boolean propagated = false;
-
-            if (level == 1)
-                lowerFrame = leafFrame;
-
-            if (lowerFrame.getTupleCount() == 0)
-                return;
-
-            if (level >= nodeFrontiers.size())
-                addLevel();
-
-            ((RTreeNSMFrame) lowerFrame).adjustMBR();
-
-            if (mbr == null) {
-                int bytesRequired = tupleWriter.bytesRequired(((RTreeNSMFrame) lowerFrame).getTuples()[0], 0,
-                        cmp.getKeyFieldCount())
-                        + ((RTreeNSMInteriorFrame) interiorFrame).getChildPointerSize();
-                mbr = ByteBuffer.allocate(bytesRequired);
-            }
-            tupleWriter.writeTupleFields(((RTreeNSMFrame) lowerFrame).getTuples(), 0, mbr, 0);
-            mbrTuple.resetByTupleOffset(mbr, 0);
-
-            NodeFrontier frontier = nodeFrontiers.get(level);
-            interiorFrame.setPage(frontier.page);
-
-            interiorFrame.insert(mbrTuple, -1);
-
-            interiorFrame.getBuffer().putInt(
-                    interiorFrame.getTupleOffset(interiorFrame.getTupleCount() - 1) + mbrTuple.getTupleSize(),
-                    nodeFrontiers.get(level - 1).pageId);
-
-            if (interiorFrame.hasSpaceInsert(mbrTuple) != FrameOpSpaceStatus.SUFFICIENT_CONTIGUOUS_SPACE && !toRoot) {
-                lowerFrame = prevInteriorFrame;
-                lowerFrame.setPage(frontier.page);
-
-                propagateBulk(level + 1, toRoot);
-                propagated = true;
-
-                frontier.page.releaseWriteLatch();
-                bufferCache.unpin(frontier.page);
-                frontier.pageId = freePageManager.getFreePage(metaFrame);
-
-                frontier.page = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, frontier.pageId), true);
-                frontier.page.acquireWriteLatch();
-                interiorFrame.setPage(frontier.page);
-                interiorFrame.initBuffer((byte) level);
-            }
-
-            if (toRoot && !propagated && level < nodeFrontiers.size() - 1) {
-                lowerFrame = prevInteriorFrame;
-                lowerFrame.setPage(frontier.page);
-                propagateBulk(level + 1, true);
-            }
-
-            leafFrame.setPage(nodeFrontiers.get(0).page);
-        }
-    }
-
-    @Override
-    public void validate() throws HyracksDataException {
-        throw new UnsupportedOperationException("Validation not implemented for R-Trees.");
-    }
-}
\ No newline at end of file
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTreeCursorInitialState.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTreeCursorInitialState.java
deleted file mode 100644
index 8a7ea8d..0000000
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTreeCursorInitialState.java
+++ /dev/null
@@ -1,74 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.rtree.impls;
-
-import edu.uci.ics.hyracks.storage.am.common.api.ICursorInitialState;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchOperationCallback;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
-
-public class RTreeCursorInitialState implements ICursorInitialState {
-
-    private PathList pathList;
-    private int rootPage;
-    private ICachedPage page; // for disk order scan
-    private MultiComparator originalKeyCmp;
-
-    public RTreeCursorInitialState(PathList pathList, int rootPage) {
-        this.pathList = pathList;
-        this.rootPage = rootPage;
-    }
-
-    public PathList getPathList() {
-        return pathList;
-    }
-
-    public int getRootPage() {
-        return rootPage;
-    }
-
-    public void setRootPage(int rootPage) {
-        this.rootPage = rootPage;
-    }
-
-    public ICachedPage getPage() {
-        return page;
-    }
-
-    public void setPage(ICachedPage page) {
-        this.page = page;
-    }
-
-    @Override
-    public MultiComparator getOriginalKeyComparator() {
-        return originalKeyCmp;
-    }
-
-    @Override
-    public void setOriginialKeyComparator(MultiComparator originalCmp) {
-        this.originalKeyCmp = originalCmp;
-    }
-
-    @Override
-    public ISearchOperationCallback getSearchOperationCallback() {
-        return null;
-    }
-
-    @Override
-    public void setSearchOperationCallback(ISearchOperationCallback searchCallback) {
-        // Do nothing
-    }
-}
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTreeOpContext.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTreeOpContext.java
deleted file mode 100644
index 219ab30..0000000
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTreeOpContext.java
+++ /dev/null
@@ -1,115 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.rtree.impls;
-
-import java.util.ArrayList;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexOperationContext;
-import edu.uci.ics.hyracks.storage.am.common.api.IModificationOperationCallback;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrame;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOperation;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeInteriorFrame;
-import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeLeafFrame;
-import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
-
-public class RTreeOpContext implements IIndexOperationContext {
-    private static final int INITIAL_TRAVERSE_LIST_SIZE = 100;
-    public final MultiComparator cmp;
-    public final IRTreeInteriorFrame interiorFrame;
-    public final IRTreeLeafFrame leafFrame;
-    public IndexOperation op;
-    public ITreeIndexCursor cursor;
-    public RTreeCursorInitialState cursorInitialState;
-    public ITreeIndexMetaDataFrame metaFrame;
-    public RTreeSplitKey splitKey;
-    public ITupleReference tuple;
-    // Used to record the pageIds and pageLsns of the visited pages.
-    public PathList pathList;
-    // Used for traversing the tree.
-    public PathList traverseList;
-
-    public ArrayList<ICachedPage> NSNUpdates;
-    public ArrayList<ICachedPage> LSNUpdates;
-
-    public final IModificationOperationCallback modificationCallback;
-
-    public RTreeOpContext(IRTreeLeafFrame leafFrame, IRTreeInteriorFrame interiorFrame,
-            ITreeIndexMetaDataFrame metaFrame, IBinaryComparatorFactory[] cmpFactories, int treeHeightHint,
-            IModificationOperationCallback modificationCallback) {
-        
-        if (cmpFactories[0] != null) { 
-            this.cmp = MultiComparator.create(cmpFactories);
-        } else {
-            this.cmp = null;
-        }
-        
-        this.interiorFrame = interiorFrame;
-        this.leafFrame = leafFrame;
-        this.metaFrame = metaFrame;
-        this.modificationCallback = modificationCallback;
-        pathList = new PathList(treeHeightHint, treeHeightHint);
-        NSNUpdates = new ArrayList<ICachedPage>();
-        LSNUpdates = new ArrayList<ICachedPage>();
-    }
-
-    public ITupleReference getTuple() {
-        return tuple;
-    }
-
-    public void setTuple(ITupleReference tuple) {
-        this.tuple = tuple;
-    }
-
-    public void reset() {
-        if (pathList != null) {
-            pathList.clear();
-        }
-        if (traverseList != null) {
-            traverseList.clear();
-        }
-        NSNUpdates.clear();
-        LSNUpdates.clear();
-    }
-
-    @Override
-    public void setOperation(IndexOperation newOp) {
-        if (op != null && newOp == op) {
-            return;
-        }
-        if (op != IndexOperation.SEARCH && op != IndexOperation.DISKORDERSCAN) {
-            if (splitKey == null) {
-                splitKey = new RTreeSplitKey(interiorFrame.getTupleWriter().createTupleReference(), interiorFrame
-                        .getTupleWriter().createTupleReference());
-            }
-            if (traverseList == null) {
-                traverseList = new PathList(INITIAL_TRAVERSE_LIST_SIZE, INITIAL_TRAVERSE_LIST_SIZE);
-            }
-        }
-        if (cursorInitialState == null) {
-            cursorInitialState = new RTreeCursorInitialState(pathList, 1);
-        }
-        this.op = newOp;
-    }
-
-    @Override
-    public IndexOperation getOperation() {
-        return op;
-    }
-}
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTreeSearchCursor.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTreeSearchCursor.java
deleted file mode 100644
index 6b5b1b5..0000000
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTreeSearchCursor.java
+++ /dev/null
@@ -1,255 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.rtree.impls;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.common.api.ICursorInitialState;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchPredicate;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeInteriorFrame;
-import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeLeafFrame;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
-import edu.uci.ics.hyracks.storage.common.file.BufferedFileHandle;
-
-public class RTreeSearchCursor implements ITreeIndexCursor {
-
-    private int fileId = -1;
-    private ICachedPage page = null;
-    private IRTreeInteriorFrame interiorFrame = null;
-    protected IRTreeLeafFrame leafFrame = null;
-    private IBufferCache bufferCache = null;
-
-    private SearchPredicate pred;
-    private PathList pathList;
-    private int rootPage;
-    protected ITupleReference searchKey;
-
-    private int tupleIndex = 0;
-    private int tupleIndexInc = 0;
-    private int currentTupleIndex = 0;
-    private int pageId = -1;
-
-    protected MultiComparator cmp;
-
-    private ITreeIndexTupleReference frameTuple;
-    private boolean readLatched = false;
-
-    public RTreeSearchCursor(IRTreeInteriorFrame interiorFrame, IRTreeLeafFrame leafFrame) {
-        this.interiorFrame = interiorFrame;
-        this.leafFrame = leafFrame;
-        this.frameTuple = leafFrame.createTupleReference();
-    }
-
-    @Override
-    public void close() throws HyracksDataException {
-        if (readLatched) {
-            page.releaseReadLatch();
-            bufferCache.unpin(page);
-            readLatched = false;
-        }
-        tupleIndex = 0;
-        tupleIndexInc = 0;
-        page = null;
-        pathList = null;
-    }
-
-    @Override
-    public ITupleReference getTuple() {
-        return frameTuple;
-    }
-
-    public int getTupleOffset() {
-        return leafFrame.getTupleOffset(currentTupleIndex);
-    }
-
-    public int getPageId() {
-        return pageId;
-    }
-
-    @Override
-    public ICachedPage getPage() {
-        return page;
-    }
-
-    protected boolean fetchNextLeafPage() throws HyracksDataException {
-        boolean succeeded = false;
-        if (readLatched) {
-            page.releaseReadLatch();
-            bufferCache.unpin(page);
-            readLatched = false;
-        }
-
-        while (!pathList.isEmpty()) {
-            int pageId = pathList.getLastPageId();
-            long parentLsn = pathList.getLastPageLsn();
-            pathList.moveLast();
-            ICachedPage node = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
-            node.acquireReadLatch();
-            readLatched = true;
-            try {
-                interiorFrame.setPage(node);
-                boolean isLeaf = interiorFrame.isLeaf();
-                long pageLsn = interiorFrame.getPageLsn();
-
-                if (pageId != rootPage && parentLsn < interiorFrame.getPageNsn()) {
-                    // Concurrent split detected, we need to visit the right
-                    // page
-                    int rightPage = interiorFrame.getRightPage();
-                    if (rightPage != -1) {
-                        pathList.add(rightPage, parentLsn, -1);
-                    }
-                }
-
-                if (!isLeaf) {
-                    // We do DFS so that we get the tuples ordered (for disk
-                    // RTrees only) in the case we we are using total order
-                    // (such as Hilbert order)
-                    if (searchKey != null) {
-                        for (int i = interiorFrame.getTupleCount() - 1; i >= 0; i--) {
-                            int childPageId = interiorFrame.getChildPageIdIfIntersect(searchKey, i, cmp);
-                            if (childPageId != -1) {
-                                pathList.add(childPageId, pageLsn, -1);
-                            }
-                        }
-                    } else {
-                        for (int i = interiorFrame.getTupleCount() - 1; i >= 0; i--) {
-                            int childPageId = interiorFrame.getChildPageId(i);
-                            pathList.add(childPageId, pageLsn, -1);
-                        }
-                    }
-
-                } else {
-                    page = node;
-                    this.pageId = pageId; // This is only needed for the
-                                          // LSMRTree flush operation
-                    leafFrame.setPage(page);
-                    tupleIndex = 0;
-                    succeeded = true;
-                    return true;
-                }
-            } finally {
-                if (!succeeded) {
-                    if (readLatched) {
-                        node.releaseReadLatch();
-                        readLatched = false;
-                        bufferCache.unpin(node);
-                    }
-                }
-            }
-        }
-        return false;
-    }
-
-    @Override
-    public boolean hasNext() throws HyracksDataException {
-        if (page == null) {
-            return false;
-        }
-
-        if (tupleIndex == leafFrame.getTupleCount()) {
-            if (!fetchNextLeafPage()) {
-                return false;
-            }
-        }
-
-        do {
-            for (int i = tupleIndex; i < leafFrame.getTupleCount(); i++) {
-                if (searchKey != null) {
-                    if (leafFrame.intersect(searchKey, i, cmp)) {
-                        frameTuple.resetByTupleIndex(leafFrame, i);
-                        currentTupleIndex = i; // This is only needed for the
-                                               // LSMRTree flush operation
-                        tupleIndexInc = i + 1;
-                        return true;
-                    }
-                } else {
-                    frameTuple.resetByTupleIndex(leafFrame, i);
-                    currentTupleIndex = i; // This is only needed for the
-                                           // LSMRTree
-                                           // flush operation
-                    tupleIndexInc = i + 1;
-                    return true;
-                }
-            }
-        } while (fetchNextLeafPage());
-        return false;
-    }
-
-    @Override
-    public void next() throws HyracksDataException {
-        tupleIndex = tupleIndexInc;
-    }
-
-    @Override
-    public void open(ICursorInitialState initialState, ISearchPredicate searchPred) throws HyracksDataException {
-        // in case open is called multiple times without closing
-        if (this.page != null) {
-            this.page.releaseReadLatch();
-            readLatched = false;
-            bufferCache.unpin(this.page);
-            pathList.clear();
-        }
-
-        pathList = ((RTreeCursorInitialState) initialState).getPathList();
-        rootPage = ((RTreeCursorInitialState) initialState).getRootPage();
-
-        pred = (SearchPredicate) searchPred;
-        cmp = pred.getLowKeyComparator();
-        searchKey = pred.getSearchKey();
-
-        if (searchKey != null) {
-            int maxFieldPos = cmp.getKeyFieldCount() / 2;
-            for (int i = 0; i < maxFieldPos; i++) {
-                int j = maxFieldPos + i;
-                int c = cmp.getComparators()[i].compare(searchKey.getFieldData(i), searchKey.getFieldStart(i),
-                        searchKey.getFieldLength(i), searchKey.getFieldData(j), searchKey.getFieldStart(j),
-                        searchKey.getFieldLength(j));
-                if (c > 0) {
-                    throw new IllegalArgumentException(
-                            "The low key point has larger coordinates than the high key point.");
-                }
-            }
-        }
-
-        pathList.add(this.rootPage, -1, -1);
-        tupleIndex = 0;
-        fetchNextLeafPage();
-    }
-
-    @Override
-    public void reset() throws HyracksDataException {
-        close();
-    }
-
-    @Override
-    public void setBufferCache(IBufferCache bufferCache) {
-        this.bufferCache = bufferCache;
-    }
-
-    @Override
-    public void setFileId(int fileId) {
-        this.fileId = fileId;
-    }
-
-    @Override
-    public boolean exclusiveLatchNodes() {
-        return false;
-    }
-}
\ No newline at end of file
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/Rectangle.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/Rectangle.java
deleted file mode 100644
index d0f4c71..0000000
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/Rectangle.java
+++ /dev/null
@@ -1,138 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.rtree.impls;
-
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProvider;
-
-public class Rectangle {
-    private int dim;
-    private double[] low;
-    private double[] high;
-
-    public Rectangle(int dim) {
-        this.dim = dim;
-        low = new double[this.dim];
-        high = new double[this.dim];
-    }
-
-    public int getDim() {
-        return dim;
-    }
-
-    public double getLow(int i) {
-        return low[i];
-    }
-
-    public double getHigh(int i) {
-        return high[i];
-    }
-
-    public void setLow(int i, double value) {
-        low[i] = value;
-    }
-
-    public void setHigh(int i, double value) {
-        high[i] = value;
-    }
-
-    public void set(ITupleReference tuple, IPrimitiveValueProvider[] valueProviders) {
-        for (int i = 0; i < getDim(); i++) {
-            int j = i + getDim();
-            setLow(i, valueProviders[i].getValue(tuple.getFieldData(i), tuple.getFieldStart(i)));
-            setHigh(i, valueProviders[j].getValue(tuple.getFieldData(j), tuple.getFieldStart(j)));
-        }
-    }
-
-    public void enlarge(ITupleReference tupleToBeInserted, IPrimitiveValueProvider[] valueProviders) {
-        for (int i = 0; i < getDim(); i++) {
-            int j = getDim() + i;
-            double low = valueProviders[i].getValue(tupleToBeInserted.getFieldData(i),
-                    tupleToBeInserted.getFieldStart(i));
-            if (getLow(i) > low) {
-                setLow(i, low);
-            }
-            double high = valueProviders[j].getValue(tupleToBeInserted.getFieldData(j),
-                    tupleToBeInserted.getFieldStart(j));
-            if (getHigh(i) < high) {
-                setHigh(i, high);
-            }
-        }
-    }
-
-    public double enlargedArea(ITupleReference tupleToBeInserted, IPrimitiveValueProvider[] valueProviders) {
-        double areaBeforeEnlarge = area();
-        double areaAfterEnlarge = 1.0;
-
-        for (int i = 0; i < getDim(); i++) {
-            int j = getDim() + i;
-
-            double low = valueProviders[i].getValue(tupleToBeInserted.getFieldData(i),
-                    tupleToBeInserted.getFieldStart(i));
-            double lowAfterEnlargement;
-            if (getLow(i) > low) {
-                lowAfterEnlargement = low;
-            } else {
-                lowAfterEnlargement = getLow(i);
-            }
-
-            double high = valueProviders[j].getValue(tupleToBeInserted.getFieldData(j),
-                    tupleToBeInserted.getFieldStart(j));
-            double highAfterEnlargement;
-            if (getHigh(i) < high) {
-                highAfterEnlargement = high;
-            } else {
-                highAfterEnlargement = getHigh(i);
-            }
-
-            areaAfterEnlarge *= highAfterEnlargement - lowAfterEnlargement;
-        }
-        return areaAfterEnlarge - areaBeforeEnlarge;
-    }
-
-    public double margin() {
-        double margin = 0.0;
-        double mul = Math.pow(2, (double) getDim() - 1.0);
-        for (int i = 0; i < getDim(); i++) {
-            margin += (getHigh(i) - getLow(i)) * mul;
-        }
-        return margin;
-    }
-
-    public double overlappedArea(Rectangle rec) {
-        double area = 1.0;
-        double f1, f2;
-
-        for (int i = 0; i < getDim(); i++) {
-            if (getLow(i) > rec.getHigh(i) || getHigh(i) < rec.getLow(i)) {
-                return 0.0;
-            }
-
-            f1 = Math.max(getLow(i), rec.getLow(i));
-            f2 = Math.min(getHigh(i), rec.getHigh(i));
-            area *= f2 - f1;
-        }
-        return area;
-    }
-
-    public double area() {
-        double area = 1.0;
-        for (int i = 0; i < getDim(); i++) {
-            area *= getHigh(i) - getLow(i);
-        }
-        return area;
-    }
-}
\ No newline at end of file
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/linearize/HilbertDoubleComparator.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/linearize/HilbertDoubleComparator.java
deleted file mode 100644
index 7fce7e0..0000000
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/linearize/HilbertDoubleComparator.java
+++ /dev/null
@@ -1,180 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.rtree.linearize;

-

-import edu.uci.ics.hyracks.api.dataflow.value.ILinearizeComparator;

-import edu.uci.ics.hyracks.data.std.primitive.DoublePointable;

-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.DoubleSerializerDeserializer;

-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProvider;

-import edu.uci.ics.hyracks.storage.am.common.ophelpers.DoubleArrayList;

-import edu.uci.ics.hyracks.storage.am.common.ophelpers.IntArrayList;

-import edu.uci.ics.hyracks.storage.am.rtree.impls.DoublePrimitiveValueProviderFactory;

-

-/*

- * This compares two points based on the hilbert curve. Currently, it only supports

- * doubles (this can be changed by changing all doubles to ints as there are no

- * number generics in Java) in the two-dimensional space. For more dimensions, the

- * state machine has to be automatically generated. The idea of the fractal generation

- * of the curve is described e.g. in http://dl.acm.org/ft_gateway.cfm?id=383528&type=pdf

- * 

- * Unlike the described approach, this comparator does not compute the hilbert value at 

- * any point. Instead, it only evaluates how the two inputs compare to each other. This

- * is done by starting at the lowest hilbert resolution and zooming in on the fractal until

- * the two points are in different quadrants.

- * 

- * As a performance optimization, the state of the state machine is saved in a stack and 

- * maintained over comparisons. The idea behind this is that comparisons are usually in a

- * similar area (e.g. geo coordinates). Zooming in from [-MAX_VALUE, MAX_VALUE] would take

- * ~300 steps every time. Instead, the comparator start from the previous state and zooms out

- * if necessary

- */

-

-public class HilbertDoubleComparator implements ILinearizeComparator {

-    private final int dim; // dimension

-    private final HilbertState[] states;

-

-    private double[] bounds;

-    private double stepsize;

-    private int state;

-    private IntArrayList stateStack = new IntArrayList(1000, 200);

-    private DoubleArrayList boundsStack = new DoubleArrayList(2000, 400);

-

-    private IPrimitiveValueProvider valueProvider = DoublePrimitiveValueProviderFactory.INSTANCE

-            .createPrimitiveValueProvider();

-

-    private double[] a;

-    private double[] b;

-

-    private class HilbertState {

-        public final int[] nextState;

-        public final int[] position;

-

-        public HilbertState(int[] nextState, int[] order) {

-            this.nextState = nextState;

-            this.position = order;

-        }

-    }

-

-    public HilbertDoubleComparator(int dimension) {

-        if (dimension != 2)

-            throw new IllegalArgumentException();

-        dim = dimension;

-        a = new double[dim];

-        b = new double[dim];

-

-        states = new HilbertState[] { new HilbertState(new int[] { 3, 0, 1, 0 }, new int[] { 0, 1, 3, 2 }),

-                new HilbertState(new int[] { 1, 1, 0, 2 }, new int[] { 2, 1, 3, 0 }),

-                new HilbertState(new int[] { 2, 3, 2, 1 }, new int[] { 2, 3, 1, 0 }),

-                new HilbertState(new int[] { 0, 2, 3, 3 }, new int[] { 0, 3, 1, 2 }) };

-

-        resetStateMachine();

-    }

-

-    private void resetStateMachine() {

-        state = 0;

-        stateStack.clear();

-        stepsize = Double.MAX_VALUE / 2;

-        bounds = new double[dim];

-        boundsStack.clear();

-    }

-

-    public int compare() {

-        boolean equal = true;

-        for (int i = 0; i < dim; i++) {

-            if (a[i] != b[i])

-                equal = false;

-        }

-        if (equal)

-            return 0;

-

-        // We keep the state of the state machine after a comparison. In most

-        // cases,

-        // the needed zoom factor is close to the old one. In this step, we

-        // check if we have

-        // to zoom out

-        while (true) {

-            if (stateStack.size() <= dim) {

-                resetStateMachine();

-                break;

-            }

-            boolean zoomOut = false;

-            for (int i = 0; i < dim; i++) {

-                if (Math.min(a[i], b[i]) <= bounds[i] - 2 * stepsize

-                        || Math.max(a[i], b[i]) >= bounds[i] + 2 * stepsize) {

-                    zoomOut = true;

-                    break;

-                }

-            }

-            state = stateStack.getLast();

-            stateStack.removeLast();

-            for (int j = dim - 1; j >= 0; j--) {

-                bounds[j] = boundsStack.getLast();

-                boundsStack.removeLast();

-            }

-            stepsize *= 2;

-            if (!zoomOut) {

-                state = stateStack.getLast();

-                stateStack.removeLast();

-                for (int j = dim - 1; j >= 0; j--) {

-                    bounds[j] = boundsStack.getLast();

-                    boundsStack.removeLast();

-                }

-                stepsize *= 2;

-                break;

-            }

-        }

-

-        while (true) {

-            stateStack.add(state);

-            for (int j = 0; j < dim; j++) {

-                boundsStack.add(bounds[j]);

-            }

-

-            // Find the quadrant in which A and B are

-            int quadrantA = 0, quadrantB = 0;

-            for (int i = dim - 1; i >= 0; i--) {

-                if (a[i] >= bounds[i])

-                    quadrantA ^= (1 << (dim - i - 1));

-                if (b[i] >= bounds[i])

-                    quadrantB ^= (1 << (dim - i - 1));

-

-                if (a[i] >= bounds[i]) {

-                    bounds[i] += stepsize;

-                } else {

-                    bounds[i] -= stepsize;

-                }

-            }

-

-            stepsize /= 2;

-            if (stepsize <= 2 * DoublePointable.getEpsilon())

-                return 0;

-            // avoid infinite loop due to machine epsilon problems

-

-            if (quadrantA != quadrantB) {

-                // find the position of A and B's quadrants

-                int posA = states[state].position[quadrantA];

-                int posB = states[state].position[quadrantB];

-

-                if (posA < posB)

-                    return -1;

-                else

-                    return 1;

-            }

-

-            state = states[state].nextState[quadrantA];

-        }

-    }

-

-    @Override

-    public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {

-        for (int i = 0; i < dim; i++) {

-            a[i] = DoubleSerializerDeserializer.getDouble(b1, s1 + (i * 8));

-            b[i] = DoubleSerializerDeserializer.getDouble(b2, s2 + (i * 8));

-        }

-

-        return compare();

-    }

-

-    @Override

-    public int getDimensions() {

-        return dim;

-    }

-}

diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/linearize/HilbertDoubleComparatorFactory.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/linearize/HilbertDoubleComparatorFactory.java
deleted file mode 100644
index e06dba8..0000000
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/linearize/HilbertDoubleComparatorFactory.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.storage.am.rtree.linearize;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ILinearizeComparator;
-import edu.uci.ics.hyracks.api.dataflow.value.ILinearizeComparatorFactory;
-
-public class HilbertDoubleComparatorFactory implements ILinearizeComparatorFactory {
-    private static final long serialVersionUID = 1L;
-    
-    private int dim;
-
-    public static HilbertDoubleComparatorFactory get(int dim) {
-        return new HilbertDoubleComparatorFactory(dim);
-    }
-    
-    public HilbertDoubleComparatorFactory(int dim) {
-    	this.dim = dim;
-    }
-
-    @Override
-    public ILinearizeComparator createBinaryComparator() {
-        return new HilbertDoubleComparator(dim);
-    }
-}
\ No newline at end of file
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/linearize/ZCurveDoubleComparator.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/linearize/ZCurveDoubleComparator.java
deleted file mode 100644
index ee47761..0000000
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/linearize/ZCurveDoubleComparator.java
+++ /dev/null
@@ -1,136 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.rtree.linearize;

-

-import edu.uci.ics.hyracks.api.dataflow.value.ILinearizeComparator;

-import edu.uci.ics.hyracks.data.std.primitive.DoublePointable;

-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.DoubleSerializerDeserializer;

-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProvider;

-import edu.uci.ics.hyracks.storage.am.common.ophelpers.DoubleArrayList;

-import edu.uci.ics.hyracks.storage.am.rtree.impls.DoublePrimitiveValueProviderFactory;

-

-/*

- * This compares two points based on the z curve. For doubles, we cannot use

- * the simple bit magic approach. There may, however, be a better approach than this.

- */

-

-public class ZCurveDoubleComparator implements ILinearizeComparator {

-    private final int dim; // dimension

-

-    private double[] bounds;

-    private double stepsize;

-    private DoubleArrayList boundsStack = new DoubleArrayList(2000, 400);

-

-    private IPrimitiveValueProvider valueProvider = DoublePrimitiveValueProviderFactory.INSTANCE

-            .createPrimitiveValueProvider();

-

-    private double[] a;

-    private double[] b;

-

-    public ZCurveDoubleComparator(int dimension) {

-        dim = dimension;

-        a = new double[dim];

-        b = new double[dim];

-

-        resetStateMachine();

-    }

-

-    private void resetStateMachine() {

-        stepsize = Double.MAX_VALUE / 2;

-        bounds = new double[dim];

-        boundsStack.clear();

-    }

-

-    public int compare() {

-        boolean equal = true;

-        for (int i = 0; i < dim; i++) {

-            if (a[i] != b[i])

-                equal = false;

-        }

-        if (equal)

-            return 0;

-

-        // We keep the state of the state machine after a comparison. In most

-        // cases,

-        // the needed zoom factor is close to the old one. In this step, we

-        // check if we have

-        // to zoom out

-        while (true) {

-            if (boundsStack.size() <= dim) {

-                resetStateMachine();

-                break;

-            }

-            boolean zoomOut = false;

-            for (int i = 0; i < dim; i++) {

-                if (Math.min(a[i], b[i]) <= bounds[i] - 2 * stepsize

-                        || Math.max(a[i], b[i]) >= bounds[i] + 2 * stepsize) {

-                    zoomOut = true;

-                    break;

-                }

-            }

-

-            for (int j = dim - 1; j >= 0; j--) {

-                bounds[j] = boundsStack.getLast();

-                boundsStack.removeLast();

-            }

-            stepsize *= 2;

-            if (!zoomOut) {

-                for (int j = dim - 1; j >= 0; j--) {

-                    bounds[j] = boundsStack.getLast();

-                    boundsStack.removeLast();

-                }

-                stepsize *= 2;

-                break;

-            }

-        }

-

-        while (true) {

-            for (int j = 0; j < dim; j++) {

-                boundsStack.add(bounds[j]);

-            }

-

-            // Find the quadrant in which A and B are

-            int quadrantA = 0, quadrantB = 0;

-            for (int i = dim - 1; i >= 0; i--) {

-                if (a[i] >= bounds[i])

-                    quadrantA ^= (1 << (dim - i - 1));

-                if (b[i] >= bounds[i])

-                    quadrantB ^= (1 << (dim - i - 1));

-

-                if (a[i] >= bounds[i]) {

-                    bounds[i] += stepsize;

-                } else {

-                    bounds[i] -= stepsize;

-                }

-            }

-

-            stepsize /= 2;

-            if (stepsize <= 2 * DoublePointable.getEpsilon())

-                return 0;

-            // avoid infinite loop due to machine epsilon problems

-

-            if (quadrantA != quadrantB) {

-                // find the position of A and B's quadrants

-                if (quadrantA < quadrantB)

-                    return -1;

-                else if (quadrantA > quadrantB)

-                    return 1;

-                else

-                    return 0;

-            }

-        }

-    }

-

-    @Override

-    public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {

-        for (int i = 0; i < dim; i++) {

-            a[i] = DoubleSerializerDeserializer.getDouble(b1, s1 + (i * 8));

-            b[i] = DoubleSerializerDeserializer.getDouble(b2, s2 + (i * 8));

-        }

-

-        return compare();

-    }

-

-    @Override

-    public int getDimensions() {

-        return dim;

-    }

-}

diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/linearize/ZCurveDoubleComparatorFactory.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/linearize/ZCurveDoubleComparatorFactory.java
deleted file mode 100644
index f1b5806..0000000
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/linearize/ZCurveDoubleComparatorFactory.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.storage.am.rtree.linearize;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ILinearizeComparator;
-import edu.uci.ics.hyracks.api.dataflow.value.ILinearizeComparatorFactory;
-
-public class ZCurveDoubleComparatorFactory implements ILinearizeComparatorFactory {
-    private static final long serialVersionUID = 1L;
-    
-    private int dim;
-
-    public static ZCurveDoubleComparatorFactory get(int dim) {
-        return new ZCurveDoubleComparatorFactory(dim);
-    }
-    
-    public ZCurveDoubleComparatorFactory(int dim) {
-    	this.dim = dim;
-    }
-
-    @Override
-    public ILinearizeComparator createBinaryComparator() {
-        return new ZCurveDoubleComparator(dim);
-    }
-}
\ No newline at end of file
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/linearize/ZCurveIntComparator.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/linearize/ZCurveIntComparator.java
deleted file mode 100644
index 1f26f41..0000000
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/linearize/ZCurveIntComparator.java
+++ /dev/null
@@ -1,129 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.rtree.linearize;

-

-import edu.uci.ics.hyracks.api.dataflow.value.ILinearizeComparator;

-import edu.uci.ics.hyracks.data.std.primitive.DoublePointable;

-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;

-import edu.uci.ics.hyracks.storage.am.common.ophelpers.DoubleArrayList;

-

-/*

- * This compares two points based on the z curve. For doubles, we cannot use

- * the simple bit magic approach. There may, however, be a better approach than this.

- */

-

-public class ZCurveIntComparator implements ILinearizeComparator {

-    private final int dim; // dimension

-

-    private double[] bounds;

-    private double stepsize;

-    private DoubleArrayList boundsStack = new DoubleArrayList(2000, 400);

-

-    private int[] a;

-    private int[] b;

-

-    public ZCurveIntComparator(int dimension) {

-        dim = dimension;

-        a = new int[dim];

-        b = new int[dim];

-

-        resetStateMachine();

-    }

-

-    private void resetStateMachine() {

-        stepsize = Integer.MAX_VALUE / 2;

-        bounds = new double[dim];

-        boundsStack.clear();

-    }

-

-    public int compare() {

-        boolean equal = true;

-        for (int i = 0; i < dim; i++) {

-            if (a[i] != b[i])

-                equal = false;

-        }

-        if (equal)

-            return 0;

-

-        // We keep the state of the state machine after a comparison. In most cases,

-        // the needed zoom factor is close to the old one. In this step, we check if we have

-        // to zoom out

-        while (true) {

-            if (boundsStack.size() <= dim) {

-                resetStateMachine();

-                break;

-            }

-            boolean zoomOut = false;

-            for (int i = 0; i < dim; i++) {

-                if (Math.min(a[i], b[i]) <= bounds[i] - 2 * stepsize

-                        || Math.max(a[i], b[i]) >= bounds[i] + 2 * stepsize) {

-                    zoomOut = true;

-                    break;

-                }

-            }

-

-            for (int j = dim - 1; j >= 0; j--) {

-                bounds[j] = boundsStack.getLast();

-                boundsStack.removeLast();

-            }

-            stepsize *= 2;

-            if (!zoomOut) {

-                for (int j = dim - 1; j >= 0; j--) {

-                    bounds[j] = boundsStack.getLast();

-                    boundsStack.removeLast();

-                }

-                stepsize *= 2;

-                break;

-            }

-        }

-

-        while (true) {

-            for (int j = 0; j < dim; j++) {

-                boundsStack.add(bounds[j]);

-            }

-

-            // Find the quadrant in which A and B are

-            int quadrantA = 0, quadrantB = 0;

-            for (int i = dim - 1; i >= 0; i--) {

-                if (a[i] >= bounds[i])

-                    quadrantA ^= (1 << (dim - i - 1));

-                if (b[i] >= bounds[i])

-                    quadrantB ^= (1 << (dim - i - 1));

-

-                if (a[i] >= bounds[i]) {

-                    bounds[i] += stepsize;

-                } else {

-                    bounds[i] -= stepsize;

-                }

-            }

-

-            stepsize /= 2;

-            if (stepsize <= 2 * DoublePointable.getEpsilon())

-                return 0;

-            // avoid infinite loop due to machine epsilon problems

-

-            if (quadrantA != quadrantB) {

-                // find the position of A and B's quadrants

-                if (quadrantA < quadrantB)

-                    return -1;

-                else if (quadrantA > quadrantB)

-                    return 1;

-                else

-                    return 0;

-            }

-        }

-    }

-

-    @Override

-    public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {

-        for (int i = 0; i < dim; i++) {

-            a[i] = IntegerSerializerDeserializer.getInt(b1, s1 + (i * 8));

-            b[i] = IntegerSerializerDeserializer.getInt(b2, s2 + (i * 8));

-        }

-

-        return compare();

-    }

-

-    @Override

-    public int getDimensions() {

-        return dim;

-    }

-}

diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/linearize/ZCurveIntComparatorFactory.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/linearize/ZCurveIntComparatorFactory.java
deleted file mode 100644
index 4a35a79..0000000
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/linearize/ZCurveIntComparatorFactory.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.storage.am.rtree.linearize;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ILinearizeComparator;
-import edu.uci.ics.hyracks.api.dataflow.value.ILinearizeComparatorFactory;
-
-public class ZCurveIntComparatorFactory implements ILinearizeComparatorFactory {
-    private static final long serialVersionUID = 1L;
-    
-    private int dim;
-
-    public static ZCurveIntComparatorFactory get(int dim) {
-        return new ZCurveIntComparatorFactory(dim);
-    }
-    
-    public ZCurveIntComparatorFactory(int dim) {
-    	this.dim = dim;
-    }
-
-    @Override
-    public ILinearizeComparator createBinaryComparator() {
-        return new ZCurveIntComparator(dim);
-    }
-}
\ No newline at end of file
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/util/RTreeUtils.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/util/RTreeUtils.java
deleted file mode 100644
index 5889abb..0000000
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/util/RTreeUtils.java
+++ /dev/null
@@ -1,78 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.rtree.util;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.data.std.api.IPointableFactory;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManager;
-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrameFactory;
-import edu.uci.ics.hyracks.storage.am.common.data.PointablePrimitiveValueProviderFactory;
-import edu.uci.ics.hyracks.storage.am.common.frames.LIFOMetaDataFrameFactory;
-import edu.uci.ics.hyracks.storage.am.common.freepage.LinkedListFreePageManager;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreeNSMInteriorFrameFactory;
-import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreeNSMLeafFrameFactory;
-import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreePolicyType;
-import edu.uci.ics.hyracks.storage.am.rtree.impls.RTree;
-import edu.uci.ics.hyracks.storage.am.rtree.tuples.RTreeTypeAwareTupleWriterFactory;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
-
-public class RTreeUtils {
-    public static RTree createRTree(IBufferCache bufferCache, IFileMapProvider fileMapProvider,
-            ITypeTraits[] typeTraits, IPrimitiveValueProviderFactory[] valueProviderFactories,
-            IBinaryComparatorFactory[] cmpFactories, RTreePolicyType rtreePolicyType, FileReference file) {
-
-        RTreeTypeAwareTupleWriterFactory tupleWriterFactory = new RTreeTypeAwareTupleWriterFactory(typeTraits);
-        ITreeIndexFrameFactory interiorFrameFactory = new RTreeNSMInteriorFrameFactory(tupleWriterFactory,
-                valueProviderFactories, rtreePolicyType);
-        ITreeIndexFrameFactory leafFrameFactory = new RTreeNSMLeafFrameFactory(tupleWriterFactory,
-                valueProviderFactories, rtreePolicyType);
-        ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
-
-        IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, 0, metaFrameFactory);
-        RTree rtree = new RTree(bufferCache, fileMapProvider, freePageManager, interiorFrameFactory, leafFrameFactory,
-                cmpFactories, typeTraits.length, file);
-        return rtree;
-    }
-
-    // Creates a new MultiComparator by constructing new IBinaryComparators.
-    public static MultiComparator getSearchMultiComparator(IBinaryComparatorFactory[] cmpFactories,
-            ITupleReference searchKey) {
-        if (searchKey == null || cmpFactories.length == searchKey.getFieldCount()) {
-            return MultiComparator.create(cmpFactories);
-        }
-        IBinaryComparator[] newCmps = new IBinaryComparator[searchKey.getFieldCount()];
-        for (int i = 0; i < searchKey.getFieldCount(); i++) {
-            newCmps[i] = cmpFactories[i].createBinaryComparator();
-        }
-        return new MultiComparator(newCmps);
-    }
-
-    public static IPrimitiveValueProviderFactory[] createPrimitiveValueProviderFactories(int len, IPointableFactory pf) {
-        IPrimitiveValueProviderFactory[] pvpfs = new IPrimitiveValueProviderFactory[len];
-        for (int i = 0; i < len; ++i) {
-            pvpfs[i] = new PointablePrimitiveValueProviderFactory(pf);
-        }
-        return pvpfs;
-    }
-}
\ No newline at end of file
diff --git a/hyracks-storage-common/pom.xml b/hyracks-storage-common/pom.xml
deleted file mode 100644
index 0d740e0..0000000
--- a/hyracks-storage-common/pom.xml
+++ /dev/null
@@ -1,35 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <groupId>edu.uci.ics.hyracks</groupId>
-  <artifactId>hyracks-storage-common</artifactId>
-  <version>0.2.2-SNAPSHOT</version>
-
-  <parent>
-    <groupId>edu.uci.ics.hyracks</groupId>
-    <artifactId>hyracks</artifactId>
-    <version>0.2.2-SNAPSHOT</version>
-  </parent>
-
-  <build>
-    <plugins>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-compiler-plugin</artifactId>
-        <version>2.0.2</version>
-        <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
-        </configuration>
-      </plugin>
-    </plugins>
-  </build>
-  <dependencies>
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks</groupId>
-  		<artifactId>hyracks-api</artifactId>
-  		<version>0.2.2-SNAPSHOT</version>
-  		<type>jar</type>
-  		<scope>compile</scope>
-  	</dependency>
-  </dependencies>
-</project>
diff --git a/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/IStorageManagerInterface.java b/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/IStorageManagerInterface.java
deleted file mode 100644
index 5759218..0000000
--- a/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/IStorageManagerInterface.java
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.storage.common;
-
-import java.io.Serializable;
-
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
-import edu.uci.ics.hyracks.storage.common.file.ILocalResourceRepository;
-import edu.uci.ics.hyracks.storage.common.file.ResourceIdFactory;
-
-public interface IStorageManagerInterface extends Serializable {
-    public IBufferCache getBufferCache(IHyracksTaskContext ctx);
-
-    public IFileMapProvider getFileMapProvider(IHyracksTaskContext ctx);
-
-    public ILocalResourceRepository getLocalResourceRepository(IHyracksTaskContext ctx);
-
-    public ResourceIdFactory getResourceIdFactory(IHyracksTaskContext ctx);
-}
\ No newline at end of file
diff --git a/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/buffercache/BufferCache.java b/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/buffercache/BufferCache.java
deleted file mode 100644
index 850162b..0000000
--- a/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/buffercache/BufferCache.java
+++ /dev/null
@@ -1,784 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.storage.common.buffercache;
-
-import java.nio.ByteBuffer;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.concurrent.atomic.AtomicBoolean;
-import java.util.concurrent.atomic.AtomicInteger;
-import java.util.concurrent.locks.Lock;
-import java.util.concurrent.locks.ReadWriteLock;
-import java.util.concurrent.locks.ReentrantLock;
-import java.util.concurrent.locks.ReentrantReadWriteLock;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.api.io.IFileHandle;
-import edu.uci.ics.hyracks.api.io.IIOManager;
-import edu.uci.ics.hyracks.storage.common.file.BufferedFileHandle;
-import edu.uci.ics.hyracks.storage.common.file.IFileMapManager;
-
-public class BufferCache implements IBufferCacheInternal {
-    private static final Logger LOGGER = Logger.getLogger(BufferCache.class.getName());
-    private static final int MAP_FACTOR = 2;
-
-    private static final int MIN_CLEANED_COUNT_DIFF = 3;
-    private static final int PIN_MAX_WAIT_TIME = 50;
-
-    private final int maxOpenFiles;
-
-    private final IIOManager ioManager;
-    private final int pageSize;
-    private final int numPages;
-    private final CachedPage[] cachedPages;
-    private final CacheBucket[] pageMap;
-    private final IPageReplacementStrategy pageReplacementStrategy;
-    private final IFileMapManager fileMapManager;
-    private final CleanerThread cleanerThread;
-    private final Map<Integer, BufferedFileHandle> fileInfoMap;
-
-    private boolean closed;
-
-    public BufferCache(IIOManager ioManager, ICacheMemoryAllocator allocator,
-            IPageReplacementStrategy pageReplacementStrategy, IFileMapManager fileMapManager, int pageSize,
-            int numPages, int maxOpenFiles) {
-        this.ioManager = ioManager;
-        this.pageSize = pageSize;
-        this.numPages = numPages;
-        this.maxOpenFiles = maxOpenFiles;
-        pageReplacementStrategy.setBufferCache(this);
-        ByteBuffer[] buffers = allocator.allocate(pageSize, numPages);
-        cachedPages = new CachedPage[buffers.length];
-        for (int i = 0; i < buffers.length; ++i) {
-            cachedPages[i] = new CachedPage(i, buffers[i], pageReplacementStrategy);
-        }
-        pageMap = new CacheBucket[numPages * MAP_FACTOR];
-        for (int i = 0; i < pageMap.length; ++i) {
-            pageMap[i] = new CacheBucket();
-        }
-        this.pageReplacementStrategy = pageReplacementStrategy;
-        this.fileMapManager = fileMapManager;
-        fileInfoMap = new HashMap<Integer, BufferedFileHandle>();
-        cleanerThread = new CleanerThread();
-        cleanerThread.start();
-        closed = false;
-    }
-
-    @Override
-    public int getPageSize() {
-        return pageSize;
-    }
-
-    @Override
-    public int getNumPages() {
-        return numPages;
-    }
-
-    private void pinSanityCheck(long dpid) throws HyracksDataException {
-        if (closed) {
-            throw new HyracksDataException("pin called on a closed cache");
-        }
-
-        // check whether file has been created and opened
-        int fileId = BufferedFileHandle.getFileId(dpid);
-        BufferedFileHandle fInfo = null;
-        synchronized (fileInfoMap) {
-            fInfo = fileInfoMap.get(fileId);
-        }
-        if (fInfo == null) {
-            throw new HyracksDataException("pin called on a fileId " + fileId + " that has not been created.");
-        } else if (fInfo.getReferenceCount() <= 0) {
-            throw new HyracksDataException("pin called on a fileId " + fileId + " that has not been opened.");
-        }
-    }
-
-    @Override
-    public ICachedPage tryPin(long dpid) throws HyracksDataException {
-        pinSanityCheck(dpid);
-        CachedPage cPage = null;
-        int hash = hash(dpid);
-        CacheBucket bucket = pageMap[hash];
-        bucket.bucketLock.lock();
-        try {
-            cPage = bucket.cachedPage;
-            while (cPage != null) {
-                if (cPage.dpid == dpid) {
-                    cPage.pinCount.incrementAndGet();
-                    pageReplacementStrategy.notifyCachePageAccess(cPage);
-                    return cPage;
-                }
-                cPage = cPage.next;
-            }
-        } finally {
-            bucket.bucketLock.unlock();
-        }
-        return cPage;
-    }
-
-    @Override
-    public ICachedPage pin(long dpid, boolean newPage) throws HyracksDataException {
-        pinSanityCheck(dpid);
-
-        CachedPage cPage = findPage(dpid, newPage);
-        if (cPage == null) {
-            if (LOGGER.isLoggable(Level.FINE)) {
-                LOGGER.fine(dumpState());
-            }
-            throw new HyracksDataException("Failed to pin page " + BufferedFileHandle.getFileId(dpid) + ":"
-                    + BufferedFileHandle.getPageId(dpid) + " because all pages are pinned.");
-        }
-        if (!newPage) {
-            // Resolve race of multiple threads trying to read the page from disk.
-            synchronized (cPage) {
-                if (!cPage.valid) {
-                    read(cPage);
-                    cPage.valid = true;
-                }
-            }
-        } else {
-            cPage.valid = true;
-        }
-        pageReplacementStrategy.notifyCachePageAccess(cPage);
-        return cPage;
-    }
-
-    private CachedPage findPage(long dpid, boolean newPage) {
-        while (true) {
-            int startCleanedCount = cleanerThread.cleanedCount;
-
-            CachedPage cPage = null;
-            /*
-             * Hash dpid to get a bucket and then check if the page exists in the bucket.
-             */
-            int hash = hash(dpid);
-            CacheBucket bucket = pageMap[hash];
-            bucket.bucketLock.lock();
-            try {
-                cPage = bucket.cachedPage;
-                while (cPage != null) {
-                    if (cPage.dpid == dpid) {
-                        cPage.pinCount.incrementAndGet();
-                        return cPage;
-                    }
-                    cPage = cPage.next;
-                }
-            } finally {
-                bucket.bucketLock.unlock();
-            }
-            /*
-             * If we got here, the page was not in the hash table. Now we ask the page replacement strategy to find us a victim.
-             */
-            CachedPage victim = (CachedPage) pageReplacementStrategy.findVictim();
-            if (victim != null) {
-                /*
-                 * We have a victim with the following invariants.
-                 * 1. The dpid on the CachedPage may or may not be valid.
-                 * 2. We have a pin on the CachedPage. We have to deal with three cases here.
-                 *  Case 1: The dpid on the CachedPage is invalid (-1). This indicates that this buffer has never been used.
-                 *  So we are the only ones holding it. Get a lock on the required dpid's hash bucket, check if someone inserted
-                 *  the page we want into the table. If so, decrement the pincount on the victim and return the winner page in the
-                 *  table. If such a winner does not exist, insert the victim and return it.
-                 *  Case 2: The dpid on the CachedPage is valid.
-                 *      Case 2a: The current dpid and required dpid hash to the same bucket.
-                 *      Get the bucket lock, check that the victim is still at pinCount == 1 If so check if there is a winning
-                 *      CachedPage with the required dpid. If so, decrement the pinCount on the victim and return the winner.
-                 *      If not, update the contents of the CachedPage to hold the required dpid and return it. If the picCount
-                 *      on the victim was != 1 or CachedPage was dirty someone used the victim for its old contents -- Decrement
-                 *      the pinCount and retry.
-                 *  Case 2b: The current dpid and required dpid hash to different buckets. Get the two bucket locks in the order
-                 *  of the bucket indexes (Ordering prevents deadlocks). Check for the existence of a winner in the new bucket
-                 *  and for potential use of the victim (pinCount != 1). If everything looks good, remove the CachedPage from
-                 *  the old bucket, and add it to the new bucket and update its header with the new dpid.
-                 */
-                if (victim.dpid < 0) {
-                    /*
-                     * Case 1.
-                     */
-                    bucket.bucketLock.lock();
-                    try {
-                        cPage = bucket.cachedPage;
-                        while (cPage != null) {
-                            if (cPage.dpid == dpid) {
-                                cPage.pinCount.incrementAndGet();
-                                victim.pinCount.decrementAndGet();
-                                return cPage;
-                            }
-                            cPage = cPage.next;
-                        }
-                        victim.reset(dpid);
-                        victim.next = bucket.cachedPage;
-                        bucket.cachedPage = victim;
-                    } finally {
-                        bucket.bucketLock.unlock();
-                    }
-                    return victim;
-                }
-                int victimHash = hash(victim.dpid);
-                if (victimHash == hash) {
-                    /*
-                     * Case 2a.
-                     */
-                    bucket.bucketLock.lock();
-                    try {
-                        if (victim.pinCount.get() != 1) {
-                            victim.pinCount.decrementAndGet();
-                            continue;
-                        }
-                        cPage = bucket.cachedPage;
-                        while (cPage != null) {
-                            if (cPage.dpid == dpid) {
-                                cPage.pinCount.incrementAndGet();
-                                victim.pinCount.decrementAndGet();
-                                return cPage;
-                            }
-                            cPage = cPage.next;
-                        }
-                        victim.reset(dpid);
-                    } finally {
-                        bucket.bucketLock.unlock();
-                    }
-                    return victim;
-                } else {
-                    /*
-                     * Case 2b.
-                     */
-                    CacheBucket victimBucket = pageMap[victimHash];
-                    if (victimHash < hash) {
-                        victimBucket.bucketLock.lock();
-                        bucket.bucketLock.lock();
-                    } else {
-                        bucket.bucketLock.lock();
-                        victimBucket.bucketLock.lock();
-                    }
-                    try {
-                        if (victim.pinCount.get() != 1) {
-                            victim.pinCount.decrementAndGet();
-                            continue;
-                        }
-                        cPage = bucket.cachedPage;
-                        while (cPage != null) {
-                            if (cPage.dpid == dpid) {
-                                cPage.pinCount.incrementAndGet();
-                                victim.pinCount.decrementAndGet();
-                                return cPage;
-                            }
-                            cPage = cPage.next;
-                        }
-                        if (victimBucket.cachedPage == victim) {
-                            victimBucket.cachedPage = victim.next;
-                        } else {
-                            CachedPage victimPrev = victimBucket.cachedPage;
-                            while (victimPrev != null && victimPrev.next != victim) {
-                                victimPrev = victimPrev.next;
-                            }
-                            assert victimPrev != null;
-                            victimPrev.next = victim.next;
-                        }
-                        victim.reset(dpid);
-                        victim.next = bucket.cachedPage;
-                        bucket.cachedPage = victim;
-                    } finally {
-                        victimBucket.bucketLock.unlock();
-                        bucket.bucketLock.unlock();
-                    }
-                    return victim;
-                }
-            }
-            synchronized (cleanerThread) {
-                cleanerThread.notifyAll();
-            }
-            // Heuristic optimization. Check whether the cleaner thread has
-            // cleaned pages since we did our last pin attempt.
-            if (cleanerThread.cleanedCount - startCleanedCount > MIN_CLEANED_COUNT_DIFF) {
-                // Don't go to sleep and wait for notification from the cleaner,
-                // just try to pin again immediately.
-                continue;
-            }
-            synchronized (cleanerThread.cleanNotification) {
-                try {
-                    cleanerThread.cleanNotification.wait(PIN_MAX_WAIT_TIME);
-                } catch (InterruptedException e) {
-                    // Do nothing
-                }
-            }
-        }
-    }
-
-    private String dumpState() {
-        StringBuilder buffer = new StringBuilder();
-        buffer.append("Buffer cache state\n");
-        buffer.append("Page Size: ").append(pageSize).append('\n');
-        buffer.append("Number of physical pages: ").append(numPages).append('\n');
-        buffer.append("Hash table size: ").append(pageMap.length).append('\n');
-        buffer.append("Page Map:\n");
-        int nCachedPages = 0;
-        for (int i = 0; i < pageMap.length; ++i) {
-            CacheBucket cb = pageMap[i];
-            cb.bucketLock.lock();
-            try {
-                CachedPage cp = cb.cachedPage;
-                if (cp != null) {
-                    buffer.append("   ").append(i).append('\n');
-                    while (cp != null) {
-                        buffer.append("      ").append(cp.cpid).append(" -> [")
-                                .append(BufferedFileHandle.getFileId(cp.dpid)).append(':')
-                                .append(BufferedFileHandle.getPageId(cp.dpid)).append(", ").append(cp.pinCount.get())
-                                .append(", ").append(cp.valid ? "valid" : "invalid").append(", ")
-                                .append(cp.dirty.get() ? "dirty" : "clean").append("]\n");
-                        cp = cp.next;
-                        ++nCachedPages;
-                    }
-                }
-            } finally {
-                cb.bucketLock.unlock();
-            }
-        }
-        buffer.append("Number of cached pages: ").append(nCachedPages).append('\n');
-        return buffer.toString();
-    }
-
-    private void read(CachedPage cPage) throws HyracksDataException {
-        BufferedFileHandle fInfo = getFileInfo(cPage);
-        cPage.buffer.clear();
-        ioManager.syncRead(fInfo.getFileHandle(), (long) BufferedFileHandle.getPageId(cPage.dpid) * pageSize,
-                cPage.buffer);
-    }
-
-    private BufferedFileHandle getFileInfo(CachedPage cPage) throws HyracksDataException {
-        synchronized (fileInfoMap) {
-            BufferedFileHandle fInfo = fileInfoMap.get(BufferedFileHandle.getFileId(cPage.dpid));
-            if (fInfo == null) {
-                throw new HyracksDataException("No such file mapped");
-            }
-            return fInfo;
-        }
-    }
-
-    private void write(CachedPage cPage) throws HyracksDataException {
-        BufferedFileHandle fInfo = getFileInfo(cPage);
-        if (fInfo.fileHasBeenDeleted()) {
-            return;
-        }
-        cPage.buffer.position(0);
-        cPage.buffer.limit(pageSize);
-        ioManager.syncWrite(fInfo.getFileHandle(), (long) BufferedFileHandle.getPageId(cPage.dpid) * pageSize,
-                cPage.buffer);
-    }
-
-    @Override
-    public void unpin(ICachedPage page) throws HyracksDataException {
-        if (closed) {
-            throw new HyracksDataException("unpin called on a closed cache");
-        }
-        ((CachedPage) page).pinCount.decrementAndGet();
-    }
-
-    private int hash(long dpid) {
-        return (int) (dpid % pageMap.length);
-    }
-
-    private static class CacheBucket {
-        private final Lock bucketLock;
-        private CachedPage cachedPage;
-
-        public CacheBucket() {
-            bucketLock = new ReentrantLock();
-        }
-    }
-
-    private class CachedPage implements ICachedPageInternal {
-        private final int cpid;
-        private final ByteBuffer buffer;
-        private final AtomicInteger pinCount;
-        private final AtomicBoolean dirty;
-        private final ReadWriteLock latch;
-        private final Object replacementStrategyObject;
-        volatile long dpid;
-        CachedPage next;
-        volatile boolean valid;
-
-        public CachedPage(int cpid, ByteBuffer buffer, IPageReplacementStrategy pageReplacementStrategy) {
-            this.cpid = cpid;
-            this.buffer = buffer;
-            pinCount = new AtomicInteger();
-            dirty = new AtomicBoolean();
-            latch = new ReentrantReadWriteLock(true);
-            replacementStrategyObject = pageReplacementStrategy.createPerPageStrategyObject(cpid);
-            dpid = -1;
-            valid = false;
-        }
-
-        public void reset(long dpid) {
-            this.dpid = dpid;
-            dirty.set(false);
-            valid = false;
-            pageReplacementStrategy.notifyCachePageReset(this);
-        }
-
-        public void invalidate() {
-            reset(-1);
-        }
-
-        @Override
-        public ByteBuffer getBuffer() {
-            return buffer;
-        }
-
-        @Override
-        public Object getReplacementStrategyObject() {
-            return replacementStrategyObject;
-        }
-
-        @Override
-        public boolean pinIfGoodVictim() {
-            return pinCount.compareAndSet(0, 1);
-        }
-
-        @Override
-        public int getCachedPageId() {
-            return cpid;
-        }
-
-        @Override
-        public void acquireReadLatch() {
-            latch.readLock().lock();
-        }
-
-        private void acquireWriteLatch(boolean markDirty) {
-            latch.writeLock().lock();
-            if (markDirty) {
-                if (dirty.compareAndSet(false, true)) {
-                    pinCount.incrementAndGet();
-                }
-            }
-        }
-
-        @Override
-        public void acquireWriteLatch() {
-            acquireWriteLatch(true);
-        }
-
-        @Override
-        public void releaseReadLatch() {
-            latch.readLock().unlock();
-        }
-
-        @Override
-        public void releaseWriteLatch() {
-            latch.writeLock().unlock();
-        }
-    }
-
-    @Override
-    public ICachedPageInternal getPage(int cpid) {
-        return cachedPages[cpid];
-    }
-
-    private class CleanerThread extends Thread {
-        private boolean shutdownStart = false;
-        private boolean shutdownComplete = false;
-        private final Object cleanNotification = new Object();
-        // Simply keeps incrementing this counter when a page is cleaned.
-        // Used to implement wait-for-cleanerthread heuristic optimizations.
-        // A waiter can detect whether pages have been cleaned.
-        // No need to make this var volatile or synchronize it's access in any
-        // way because it is used for heuristics.
-        private int cleanedCount = 0;
-
-        public CleanerThread() {
-            setPriority(MAX_PRIORITY);
-        }
-
-        public void cleanPage(CachedPage cPage, boolean force) {
-            if (cPage.dirty.get()) {
-                boolean proceed = false;
-                if (force) {
-                    cPage.latch.writeLock().lock();
-                    proceed = true;
-                } else {
-                    proceed = cPage.latch.readLock().tryLock();
-                }
-                if (proceed) {
-                    try {
-                        // Make sure page is still dirty.
-                        if (!cPage.dirty.get()) {
-                            return;
-                        }
-                        boolean cleaned = true;
-                        try {
-                            write(cPage);
-                        } catch (HyracksDataException e) {
-                            cleaned = false;
-                        }
-                        if (cleaned) {
-                            cPage.dirty.set(false);
-                            cPage.pinCount.decrementAndGet();
-                            cleanedCount++;
-                            synchronized (cleanNotification) {
-                                cleanNotification.notifyAll();
-                            }
-                        }
-                    } finally {
-                        if (force) {
-                            cPage.latch.writeLock().unlock();
-                        } else {
-                            cPage.latch.readLock().unlock();
-                        }
-                    }
-                } else if (shutdownStart) {
-                    throw new IllegalStateException("Cache closed, but unable to acquire read lock on dirty page: "
-                            + cPage.dpid);
-                }
-            }
-        }
-
-        @Override
-        public synchronized void run() {
-            try {
-                while (true) {
-                    for (int i = 0; i < numPages; ++i) {
-                        CachedPage cPage = cachedPages[i];
-                        cleanPage(cPage, false);
-                    }
-                    if (shutdownStart) {
-                        break;
-                    }
-                    try {
-                        wait(1000);
-                    } catch (InterruptedException e) {
-                        e.printStackTrace();
-                    }
-                }
-            } finally {
-                shutdownComplete = true;
-                notifyAll();
-            }
-        }
-    }
-
-    @Override
-    public void close() {
-        closed = true;
-        synchronized (cleanerThread) {
-            cleanerThread.shutdownStart = true;
-            cleanerThread.notifyAll();
-            while (!cleanerThread.shutdownComplete) {
-                try {
-                    cleanerThread.wait();
-                } catch (InterruptedException e) {
-                    e.printStackTrace();
-                }
-            }
-        }
-
-        synchronized (fileInfoMap) {
-            try {
-                for (Map.Entry<Integer, BufferedFileHandle> entry : fileInfoMap.entrySet()) {
-                    boolean fileHasBeenDeleted = entry.getValue().fileHasBeenDeleted();
-                    sweepAndFlush(entry.getKey(), !fileHasBeenDeleted);
-                    if (!fileHasBeenDeleted) {
-                        ioManager.close(entry.getValue().getFileHandle());
-                    }
-                }
-            } catch (HyracksDataException e) {
-                e.printStackTrace();
-            }
-            fileInfoMap.clear();
-        }
-    }
-
-    @Override
-    public void createFile(FileReference fileRef) throws HyracksDataException {
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("Creating file: " + fileRef + " in cache: " + this);
-        }
-        synchronized (fileInfoMap) {
-            fileMapManager.registerFile(fileRef);
-        }
-    }
-
-    @Override
-    public void openFile(int fileId) throws HyracksDataException {
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("Opening file: " + fileId + " in cache: " + this);
-        }
-        synchronized (fileInfoMap) {
-            BufferedFileHandle fInfo;
-            fInfo = fileInfoMap.get(fileId);
-            if (fInfo == null) {
-
-                // map is full, make room by cleaning up unreferenced files
-                boolean unreferencedFileFound = true;
-                while (fileInfoMap.size() >= maxOpenFiles && unreferencedFileFound) {
-                    unreferencedFileFound = false;
-                    for (Map.Entry<Integer, BufferedFileHandle> entry : fileInfoMap.entrySet()) {
-                        if (entry.getValue().getReferenceCount() <= 0) {
-                            int entryFileId = entry.getKey();
-                            boolean fileHasBeenDeleted = entry.getValue().fileHasBeenDeleted();
-                            sweepAndFlush(entryFileId, !fileHasBeenDeleted);
-                            if (!fileHasBeenDeleted) {
-                                ioManager.close(entry.getValue().getFileHandle());
-                            }
-                            fileInfoMap.remove(entryFileId);
-                            unreferencedFileFound = true;
-                            // for-each iterator is invalid because we changed fileInfoMap
-                            break;
-                        }
-                    }
-                }
-
-                if (fileInfoMap.size() >= maxOpenFiles) {
-                    throw new HyracksDataException("Could not open fileId " + fileId + ". Max number of files "
-                            + maxOpenFiles + " already opened and referenced.");
-                }
-
-                // create, open, and map new file reference
-                FileReference fileRef = fileMapManager.lookupFileName(fileId);
-                IFileHandle fh = ioManager.open(fileRef, IIOManager.FileReadWriteMode.READ_WRITE,
-                        IIOManager.FileSyncMode.METADATA_ASYNC_DATA_ASYNC);
-                fInfo = new BufferedFileHandle(fileId, fh);
-                fileInfoMap.put(fileId, fInfo);
-            }
-            fInfo.incReferenceCount();
-        }
-    }
-
-    private void sweepAndFlush(int fileId, boolean flushDirtyPages) throws HyracksDataException {
-        for (int i = 0; i < pageMap.length; ++i) {
-            CacheBucket bucket = pageMap[i];
-            bucket.bucketLock.lock();
-            try {
-                CachedPage prev = bucket.cachedPage;
-                while (prev != null) {
-                    CachedPage cPage = prev.next;
-                    if (cPage == null) {
-                        break;
-                    }
-                    if (invalidateIfFileIdMatch(fileId, cPage, flushDirtyPages)) {
-                        prev.next = cPage.next;
-                        cPage.next = null;
-                    } else {
-                        prev = cPage;
-                    }
-                }
-                // Take care of the head of the chain.
-                if (bucket.cachedPage != null) {
-                    if (invalidateIfFileIdMatch(fileId, bucket.cachedPage, flushDirtyPages)) {
-                        CachedPage cPage = bucket.cachedPage;
-                        bucket.cachedPage = bucket.cachedPage.next;
-                        cPage.next = null;
-                    }
-                }
-            } finally {
-                bucket.bucketLock.unlock();
-            }
-        }
-    }
-
-    private boolean invalidateIfFileIdMatch(int fileId, CachedPage cPage, boolean flushDirtyPages)
-            throws HyracksDataException {
-        if (BufferedFileHandle.getFileId(cPage.dpid) == fileId) {
-            int pinCount = -1;
-            if (cPage.dirty.get()) {
-                if (flushDirtyPages) {
-                    write(cPage);
-                }
-                cPage.dirty.set(false);
-                pinCount = cPage.pinCount.decrementAndGet();
-            } else {
-                pinCount = cPage.pinCount.get();
-            }
-            if (pinCount != 0) {
-                throw new IllegalStateException("Page is pinned and file is being closed. Pincount is: " + pinCount);
-            }
-            cPage.invalidate();
-            return true;
-        }
-        return false;
-    }
-
-    @Override
-    public void closeFile(int fileId) throws HyracksDataException {
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("Closing file: " + fileId + " in cache: " + this);
-        }
-        if (LOGGER.isLoggable(Level.FINE)) {
-            LOGGER.fine(dumpState());
-        }
-
-        synchronized (fileInfoMap) {
-            BufferedFileHandle fInfo = fileInfoMap.get(fileId);
-            if (fInfo == null) {
-                throw new HyracksDataException("Closing unopened file");
-            }
-            if (fInfo.decReferenceCount() < 0) {
-                throw new HyracksDataException("Closed fileId: " + fileId + " more times than it was opened.");
-            }
-        }
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("Closed file: " + fileId + " in cache: " + this);
-        }
-    }
-
-    @Override
-    public void flushDirtyPage(ICachedPage page) throws HyracksDataException {
-        // Assumes the caller has pinned the page.
-        cleanerThread.cleanPage((CachedPage) page, true);
-    }
-
-    @Override
-    public void force(int fileId, boolean metadata) throws HyracksDataException {
-        BufferedFileHandle fInfo = null;
-        synchronized (fileInfoMap) {
-            fInfo = fileInfoMap.get(fileId);
-            ioManager.sync(fInfo.getFileHandle(), metadata);
-        }
-    }
-
-    @Override
-    public synchronized void deleteFile(int fileId, boolean flushDirtyPages) throws HyracksDataException {
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("Deleting file: " + fileId + " in cache: " + this);
-        }
-        if (flushDirtyPages) {
-            synchronized (fileInfoMap) {
-                sweepAndFlush(fileId, flushDirtyPages);
-            }
-        }
-        synchronized (fileInfoMap) {
-            BufferedFileHandle fInfo = null;
-            try {
-                fInfo = fileInfoMap.get(fileId);
-                if (fInfo != null && fInfo.getReferenceCount() > 0) {
-                    throw new HyracksDataException("Deleting open file");
-                }
-            } finally {
-                fileMapManager.unregisterFile(fileId);
-                if (fInfo != null) {
-                    // Mark the fInfo as deleted, 
-                    // such that when its pages are reclaimed in openFile(),
-                    // the pages are not flushed to disk but only invalidated.
-                    if (!fInfo.fileHasBeenDeleted()) {
-                        ioManager.close(fInfo.getFileHandle());
-                        fInfo.markAsDeleted();
-                    }
-                }
-            }
-        }
-    }
-}
\ No newline at end of file
diff --git a/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/buffercache/DebugBufferCache.java b/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/buffercache/DebugBufferCache.java
deleted file mode 100644
index 13f7d52..0000000
--- a/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/buffercache/DebugBufferCache.java
+++ /dev/null
@@ -1,168 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.common.buffercache;
-
-import java.util.concurrent.atomic.AtomicLong;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.io.FileReference;
-
-/**
- * Implementation of an IBufferCache that counts the number of pins/unpins,
- * latches/unlatches, and file create/delete/open/close called on it. It
- * delegates the actual functionality to another IBufferCache set in the c'tor.
- * The counters are updated in a thread-safe fashion using AtomicLong.
- */
-public class DebugBufferCache implements IBufferCache {
-
-    // Actual BufferCache functionality is delegated to this bufferCache.
-    private final IBufferCache bufferCache;
-    private AtomicLong pinCount = new AtomicLong();
-    private AtomicLong unpinCount = new AtomicLong();
-    private AtomicLong readLatchCount = new AtomicLong();
-    private AtomicLong readUnlatchCount = new AtomicLong();
-    private AtomicLong writeLatchCount = new AtomicLong();
-    private AtomicLong writeUnlatchCount = new AtomicLong();
-    private AtomicLong createFileCount = new AtomicLong();
-    private AtomicLong deleteFileCount = new AtomicLong();
-    private AtomicLong openFileCount = new AtomicLong();
-    private AtomicLong closeFileCount = new AtomicLong();
-
-    public DebugBufferCache(IBufferCache bufferCache) {
-        this.bufferCache = bufferCache;
-        resetCounters();
-    }
-
-    @Override
-    public void createFile(FileReference fileRef) throws HyracksDataException {
-        bufferCache.createFile(fileRef);
-        createFileCount.addAndGet(1);
-    }
-
-    @Override
-    public void openFile(int fileId) throws HyracksDataException {
-        bufferCache.openFile(fileId);
-        openFileCount.addAndGet(1);
-    }
-
-    @Override
-    public void closeFile(int fileId) throws HyracksDataException {
-        bufferCache.closeFile(fileId);
-        closeFileCount.addAndGet(1);
-    }
-
-    @Override
-    public void deleteFile(int fileId, boolean flushDirtyPages) throws HyracksDataException {
-        bufferCache.deleteFile(fileId, flushDirtyPages);
-        deleteFileCount.addAndGet(1);
-    }
-
-    @Override
-    public ICachedPage tryPin(long dpid) throws HyracksDataException {
-        return bufferCache.tryPin(dpid);
-    }
-
-    @Override
-    public ICachedPage pin(long dpid, boolean newPage) throws HyracksDataException {
-        ICachedPage page = bufferCache.pin(dpid, newPage);
-        pinCount.addAndGet(1);
-        return page;
-    }
-
-    @Override
-    public void unpin(ICachedPage page) throws HyracksDataException {
-        bufferCache.unpin(page);
-        unpinCount.addAndGet(1);
-    }
-
-    @Override
-    public int getPageSize() {
-        return bufferCache.getPageSize();
-    }
-
-    @Override
-    public int getNumPages() {
-        return bufferCache.getNumPages();
-    }
-
-    @Override
-    public void close() {
-        bufferCache.close();
-    }
-
-    public void resetCounters() {
-        pinCount.set(0);
-        unpinCount.set(0);
-        readLatchCount.set(0);
-        readUnlatchCount.set(0);
-        writeLatchCount.set(0);
-        writeUnlatchCount.set(0);
-        createFileCount.set(0);
-        deleteFileCount.set(0);
-        openFileCount.set(0);
-        closeFileCount.set(0);
-    }
-
-    public long getPinCount() {
-        return pinCount.get();
-    }
-
-    public long getUnpinCount() {
-        return unpinCount.get();
-    }
-
-    public long getReadLatchCount() {
-        return readLatchCount.get();
-    }
-
-    public long getReadUnlatchCount() {
-        return readUnlatchCount.get();
-    }
-
-    public long getWriteLatchCount() {
-        return writeLatchCount.get();
-    }
-
-    public long getWriteUnlatchCount() {
-        return writeUnlatchCount.get();
-    }
-
-    public long getCreateFileCount() {
-        return createFileCount.get();
-    }
-
-    public long getDeleteFileCount() {
-        return deleteFileCount.get();
-    }
-
-    public long getOpenFileCount() {
-        return openFileCount.get();
-    }
-
-    public long getCloseFileCount() {
-        return closeFileCount.get();
-    }
-
-	@Override
-	public void flushDirtyPage(ICachedPage page) throws HyracksDataException {
-		bufferCache.flushDirtyPage(page);
-	}
-
-	@Override
-	public void force(int fileId, boolean metadata) throws HyracksDataException {
-		bufferCache.force(fileId, metadata);
-	}
-}
diff --git a/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/ILocalResourceFactory.java b/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/ILocalResourceFactory.java
deleted file mode 100644
index c443b0b..0000000
--- a/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/ILocalResourceFactory.java
+++ /dev/null
@@ -1,19 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.storage.common.file;
-
-public interface ILocalResourceFactory {
-    public LocalResource createLocalResource(long resourceId, String resourceName, int partition);
-}
diff --git a/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/ILocalResourceFactoryProvider.java b/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/ILocalResourceFactoryProvider.java
deleted file mode 100644
index fb6a210..0000000
--- a/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/ILocalResourceFactoryProvider.java
+++ /dev/null
@@ -1,21 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.storage.common.file;
-
-import java.io.Serializable;
-
-public interface ILocalResourceFactoryProvider extends Serializable {
-    public ILocalResourceFactory getLocalResourceFactory();
-}
diff --git a/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/ILocalResourceRepository.java b/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/ILocalResourceRepository.java
deleted file mode 100644
index 2dce39e..0000000
--- a/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/ILocalResourceRepository.java
+++ /dev/null
@@ -1,34 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.storage.common.file;
-
-import java.util.List;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-
-public interface ILocalResourceRepository {
-
-    public LocalResource getResourceById(long id) throws HyracksDataException;
-
-    public LocalResource getResourceByName(String name) throws HyracksDataException;
-
-    public void insert(LocalResource resource) throws HyracksDataException;
-
-    public void deleteResourceById(long id) throws HyracksDataException;
-
-    public void deleteResourceByName(String name) throws HyracksDataException;
-
-    public List<LocalResource> getAllResources() throws HyracksDataException;
-}
diff --git a/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/ILocalResourceRepositoryFactory.java b/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/ILocalResourceRepositoryFactory.java
deleted file mode 100644
index 4b3124a..0000000
--- a/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/ILocalResourceRepositoryFactory.java
+++ /dev/null
@@ -1,21 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.storage.common.file;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-
-public interface ILocalResourceRepositoryFactory {
-    public ILocalResourceRepository createRepository() throws HyracksDataException;
-}
diff --git a/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/LocalResource.java b/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/LocalResource.java
deleted file mode 100644
index f4575f3..0000000
--- a/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/LocalResource.java
+++ /dev/null
@@ -1,59 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.storage.common.file;
-
-import java.io.Serializable;
-
-public class LocalResource implements Serializable {
-    private static final long serialVersionUID = 1L;
-    private final long resourceId;
-    private final String resourceName;
-    private final int partition;
-    private final int resourceType;
-    private final Object object;
-
-    public static final int TransientResource = 0;
-    public static final int LSMBTreeResource = 1;
-    public static final int LSMRTreeResource = 2;
-    public static final int LSMInvertedIndexResource = 3;
-
-    public LocalResource(long resourceId, String resourceName, int partition, int resourceType, Object object) {
-        this.resourceId = resourceId;
-        this.resourceName = resourceName;
-        this.partition = partition;
-        this.resourceType = resourceType;
-        this.object = object;
-    }
-
-    public long getResourceId() {
-        return resourceId;
-    }
-
-    public String getResourceName() {
-        return resourceName;
-    }
-
-    public int getPartition() {
-        return partition;
-    }
-
-    public int getResourceType() {
-        return resourceType;
-    }
-
-    public Object getResourceObject() {
-        return object;
-    }
-}
diff --git a/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/NoOpLocalResourceFactoryProvider.java b/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/NoOpLocalResourceFactoryProvider.java
deleted file mode 100644
index e69f4e7..0000000
--- a/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/NoOpLocalResourceFactoryProvider.java
+++ /dev/null
@@ -1,27 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.storage.common.file;
-
-/**
- * Dummy operation factory provider that does nothing.
- */
-public enum NoOpLocalResourceFactoryProvider implements ILocalResourceFactoryProvider {
-    INSTANCE;
-
-    @Override
-    public ILocalResourceFactory getLocalResourceFactory() {
-        return null;
-    }
-}
diff --git a/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/PersistentLocalResourceRepository.java b/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/PersistentLocalResourceRepository.java
deleted file mode 100644
index 1eedc78..0000000
--- a/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/PersistentLocalResourceRepository.java
+++ /dev/null
@@ -1,196 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.storage.common.file;
-
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileOutputStream;
-import java.io.FilenameFilter;
-import java.io.IOException;
-import java.io.ObjectInputStream;
-import java.io.ObjectOutputStream;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-
-public class PersistentLocalResourceRepository implements ILocalResourceRepository {
-
-    private final List<String> mountPoints;
-    private static final String METADATA_FILE_NAME = ".metadata";
-    private Map<String, LocalResource> name2ResourceMap = new HashMap<String, LocalResource>();
-    private Map<Long, LocalResource> id2ResourceMap = new HashMap<Long, LocalResource>();
-
-    public PersistentLocalResourceRepository(List<String> mountPoints, String rootDir) throws HyracksDataException {
-        this.mountPoints = mountPoints;
-
-        File rootFile = new File(this.mountPoints.get(0), rootDir);
-        if (!rootFile.exists()) {
-            throw new HyracksDataException(rootFile.getAbsolutePath() + "doesn't exist.");
-        }
-
-        FilenameFilter filter = new FilenameFilter() {
-            public boolean accept(File dir, String name) {
-                if (name.equalsIgnoreCase(METADATA_FILE_NAME)) {
-                    return true;
-                } else {
-                    return false;
-                }
-            }
-        };
-
-        File[] childFileList = rootFile.listFiles();
-        if (childFileList == null) {
-        	return;
-        }
-        for (File childFile : childFileList) {
-            if (childFile.isDirectory()) {
-                File[] targetFileList = childFile.listFiles(filter);
-                if (targetFileList == null) {
-                	return;
-                }
-                
-                for (File targetFile : targetFileList) {
-                    LocalResource localResource = readLocalResource(targetFile);
-                    id2ResourceMap.put(localResource.getResourceId(), localResource);
-                    name2ResourceMap.put(localResource.getResourceName(), localResource);
-                }
-            }
-        }
-    }
-
-    @Override
-    public LocalResource getResourceById(long id) throws HyracksDataException {
-        return id2ResourceMap.get(id);
-    }
-
-    @Override
-    public LocalResource getResourceByName(String name) throws HyracksDataException {
-        return name2ResourceMap.get(name);
-    }
-
-    @Override
-    public synchronized void insert(LocalResource resource) throws HyracksDataException {
-        long id = resource.getResourceId();
-
-        if (id2ResourceMap.containsKey(id)) {
-            throw new HyracksDataException("Duplicate resource");
-        }
-        id2ResourceMap.put(id, resource);
-        name2ResourceMap.put(resource.getResourceName(), resource);
-
-        FileOutputStream fos = null;
-        ObjectOutputStream oosToFos = null;
-        try {
-            fos = new FileOutputStream(getFileName(mountPoints.get(0), resource.getResourceName()));
-            oosToFos = new ObjectOutputStream(fos);
-            oosToFos.writeObject(resource);
-            oosToFos.flush();
-        } catch (IOException e) {
-            throw new HyracksDataException(e);
-        } finally {
-            if (oosToFos != null) {
-                try {
-                    oosToFos.close();
-                } catch (IOException e) {
-                    throw new HyracksDataException(e);
-                }
-            }
-            if (oosToFos == null && fos != null) {
-                try {
-                    fos.close();
-                } catch (IOException e) {
-                    throw new HyracksDataException(e);
-                }
-            }
-        }
-    }
-
-    @Override
-    public synchronized void deleteResourceById(long id) throws HyracksDataException {
-        LocalResource resource = id2ResourceMap.get(id);
-        if (resource == null) {
-            throw new HyracksDataException("Resource doesn't exist");
-        }
-        id2ResourceMap.remove(id);
-        name2ResourceMap.remove(resource.getResourceName());
-        File file = new File(getFileName(mountPoints.get(0), resource.getResourceName()));
-        file.delete();
-    }
-
-    @Override
-    public synchronized void deleteResourceByName(String name) throws HyracksDataException {
-        LocalResource resource = name2ResourceMap.get(name);
-        if (resource == null) {
-            throw new HyracksDataException("Resource doesn't exist");
-        }
-        id2ResourceMap.remove(resource.getResourceId());
-        name2ResourceMap.remove(name);
-        File file = new File(getFileName(mountPoints.get(0), resource.getResourceName()));
-        file.delete();
-    }
-
-    @Override
-    public List<LocalResource> getAllResources() throws HyracksDataException {
-        List<LocalResource> resources = new ArrayList<LocalResource>();
-        for (LocalResource resource : id2ResourceMap.values()) {
-            resources.add(resource);
-        }
-        return resources;
-    }
-
-    private String getFileName(String mountPoint, String baseDir) {
-
-        String fileName = new String(mountPoint);
-
-        if (!baseDir.endsWith(System.getProperty("file.separator"))) {
-            baseDir += System.getProperty("file.separator");
-        }
-        fileName += baseDir + METADATA_FILE_NAME;
-
-        return fileName;
-    }
-
-    private LocalResource readLocalResource(File file) throws HyracksDataException {
-        FileInputStream fis = null;
-        ObjectInputStream oisFromFis = null;
-
-        try {
-            fis = new FileInputStream(file);
-            oisFromFis = new ObjectInputStream(fis);
-            LocalResource resource = (LocalResource) oisFromFis.readObject();
-            return resource;
-        } catch (Exception e) {
-            throw new HyracksDataException(e);
-        } finally {
-            if (oisFromFis != null) {
-                try {
-                    oisFromFis.close();
-                } catch (IOException e) {
-                    throw new HyracksDataException(e);
-                }
-            }
-            if (oisFromFis == null && fis != null) {
-                try {
-                    fis.close();
-                } catch (IOException e) {
-                    throw new HyracksDataException(e);
-                }
-            }
-        }
-    }
-}
diff --git a/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/PersistentLocalResourceRepositoryFactory.java b/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/PersistentLocalResourceRepositoryFactory.java
deleted file mode 100644
index a2d0fa3..0000000
--- a/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/PersistentLocalResourceRepositoryFactory.java
+++ /dev/null
@@ -1,42 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.storage.common.file;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.io.IIOManager;
-import edu.uci.ics.hyracks.api.io.IODeviceHandle;
-
-public class PersistentLocalResourceRepositoryFactory implements ILocalResourceRepositoryFactory {
-    private static final String rootDir = "";
-    private IIOManager ioManager;
-
-    public PersistentLocalResourceRepositoryFactory(IIOManager ioManager) {
-        this.ioManager = ioManager;
-    }
-
-    @Override
-    public ILocalResourceRepository createRepository() throws HyracksDataException {
-        List<String> mountPoints = new ArrayList<String>();
-        List<IODeviceHandle> devices = ioManager.getIODevices();
-
-        for (IODeviceHandle dev : devices) {
-            mountPoints.add(dev.getPath().getPath());
-        }
-        return new PersistentLocalResourceRepository(mountPoints, rootDir);
-    }
-}
diff --git a/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/ResourceIdFactory.java b/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/ResourceIdFactory.java
deleted file mode 100644
index d4ec01f..0000000
--- a/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/ResourceIdFactory.java
+++ /dev/null
@@ -1,29 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.storage.common.file;
-
-import java.util.concurrent.atomic.AtomicLong;
-
-public class ResourceIdFactory {
-    private AtomicLong id = null;
-
-    public ResourceIdFactory(long initialValue) {
-        id = new AtomicLong(initialValue);
-    }
-
-    public long createId() {
-        return id.getAndIncrement();
-    }
-}
diff --git a/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/ResourceIdFactoryProvider.java b/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/ResourceIdFactoryProvider.java
deleted file mode 100644
index 62cdb36..0000000
--- a/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/ResourceIdFactoryProvider.java
+++ /dev/null
@@ -1,38 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.storage.common.file;
-
-import java.util.List;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-
-public class ResourceIdFactoryProvider {
-    private ILocalResourceRepository localResourceRepository;
-
-    public ResourceIdFactoryProvider(ILocalResourceRepository localResourceRepository) {
-        this.localResourceRepository = localResourceRepository;
-    }
-
-    public ResourceIdFactory createResourceIdFactory() throws HyracksDataException {
-        List<LocalResource> localResources = localResourceRepository.getAllResources();
-        long largestResourceId = 0;
-        for (LocalResource localResource : localResources) {
-            if (largestResourceId < localResource.getResourceId()) {
-                largestResourceId = localResource.getResourceId();
-            }
-        }
-        return new ResourceIdFactory(largestResourceId);
-    }
-}
diff --git a/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/TransientFileMapManager.java b/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/TransientFileMapManager.java
deleted file mode 100644
index e8085da..0000000
--- a/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/TransientFileMapManager.java
+++ /dev/null
@@ -1,56 +0,0 @@
-package edu.uci.ics.hyracks.storage.common.file;
-
-import java.util.HashMap;
-import java.util.Map;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.io.FileReference;
-
-public class TransientFileMapManager implements IFileMapManager {
-    private static final long serialVersionUID = 1L;
-
-    private Map<Integer, FileReference> id2nameMap = new HashMap<Integer, FileReference>();
-    private Map<FileReference, Integer> name2IdMap = new HashMap<FileReference, Integer>();
-    private int idCounter = 0;
-
-    @Override
-    public FileReference lookupFileName(int fileId) throws HyracksDataException {
-        FileReference fRef = id2nameMap.get(fileId);
-        if (fRef == null) {
-            throw new HyracksDataException("No mapping found for id: " + fileId);
-        }
-        return fRef;
-    }
-
-    @Override
-    public int lookupFileId(FileReference fileRef) throws HyracksDataException {
-        Integer fileId = name2IdMap.get(fileRef);
-        if (fileId == null) {
-            throw new HyracksDataException("No mapping found for name: " + fileRef);
-        }
-        return fileId;
-    }
-
-    @Override
-    public boolean isMapped(FileReference fileRef) {
-        return name2IdMap.containsKey(fileRef);
-    }
-
-    @Override
-    public boolean isMapped(int fileId) {
-        return id2nameMap.containsKey(fileId);
-    }
-
-    @Override
-    public void unregisterFile(int fileId) throws HyracksDataException {
-        FileReference fileRef = id2nameMap.remove(fileId);
-        name2IdMap.remove(fileRef);
-    }
-
-    @Override
-    public void registerFile(FileReference fileRef) throws HyracksDataException {
-        Integer fileId = idCounter++;
-        id2nameMap.put(fileId, fileRef);
-        name2IdMap.put(fileRef, fileId);
-    }
-}
\ No newline at end of file
diff --git a/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/TransientLocalResourceFactory.java b/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/TransientLocalResourceFactory.java
deleted file mode 100644
index ffd3dff..0000000
--- a/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/TransientLocalResourceFactory.java
+++ /dev/null
@@ -1,23 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.storage.common.file;
-
-public class TransientLocalResourceFactory implements ILocalResourceFactory {
-
-    @Override
-    public LocalResource createLocalResource(long resourceId, String resourceName, int partition) {
-        return new LocalResource(resourceId, resourceName, partition, LocalResource.TransientResource, null);
-    }
-}
diff --git a/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/TransientLocalResourceFactoryProvider.java b/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/TransientLocalResourceFactoryProvider.java
deleted file mode 100644
index d9a51a3..0000000
--- a/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/TransientLocalResourceFactoryProvider.java
+++ /dev/null
@@ -1,25 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.storage.common.file;
-
-public class TransientLocalResourceFactoryProvider implements ILocalResourceFactoryProvider {
-    
-    private static final long serialVersionUID = 1L;
-
-    @Override
-    public ILocalResourceFactory getLocalResourceFactory() {
-        return new TransientLocalResourceFactory();
-    }
-}
diff --git a/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/TransientLocalResourceRepository.java b/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/TransientLocalResourceRepository.java
deleted file mode 100644
index 55bd807..0000000
--- a/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/TransientLocalResourceRepository.java
+++ /dev/null
@@ -1,78 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.storage.common.file;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-
-public class TransientLocalResourceRepository implements ILocalResourceRepository {
-
-    private Map<String, LocalResource> name2ResourceMap = new HashMap<String, LocalResource>();
-    private Map<Long, LocalResource> id2ResourceMap = new HashMap<Long, LocalResource>();
-
-    @Override
-    public LocalResource getResourceById(long id) throws HyracksDataException {
-        return id2ResourceMap.get(id);
-    }
-
-    @Override
-    public LocalResource getResourceByName(String name) throws HyracksDataException {
-        return name2ResourceMap.get(name);
-    }
-
-    @Override
-    public synchronized void insert(LocalResource resource) throws HyracksDataException {
-        long id = resource.getResourceId();
-
-        if (id2ResourceMap.containsKey(id)) {
-            throw new HyracksDataException("Duplicate resource");
-        }
-        id2ResourceMap.put(id, resource);
-        name2ResourceMap.put(resource.getResourceName(), resource);
-    }
-
-    @Override
-    public synchronized void deleteResourceById(long id) throws HyracksDataException {
-        LocalResource resource = id2ResourceMap.get(id);
-        if (resource == null) {
-            throw new HyracksDataException("Resource doesn't exist");
-        }
-        id2ResourceMap.remove(id);
-        name2ResourceMap.remove(resource.getResourceName());
-    }
-
-    @Override
-    public synchronized void deleteResourceByName(String name) throws HyracksDataException {
-        LocalResource resource = name2ResourceMap.get(name);
-        if (resource == null) {
-            throw new HyracksDataException("Resource doesn't exist");
-        }
-        id2ResourceMap.remove(resource.getResourceId());
-        name2ResourceMap.remove(name);
-    }
-
-    @Override
-    public List<LocalResource> getAllResources() throws HyracksDataException {
-        List<LocalResource> resources = new ArrayList<LocalResource>();
-        for (LocalResource resource : id2ResourceMap.values()) {
-            resources.add(resource);
-        }
-        return resources;
-    }
-}
diff --git a/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/TransientLocalResourceRepositoryFactory.java b/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/TransientLocalResourceRepositoryFactory.java
deleted file mode 100644
index a6b00a4..0000000
--- a/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/TransientLocalResourceRepositoryFactory.java
+++ /dev/null
@@ -1,25 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.storage.common.file;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-
-public class TransientLocalResourceRepositoryFactory implements ILocalResourceRepositoryFactory {
-
-    @Override
-    public ILocalResourceRepository createRepository() throws HyracksDataException {
-        return new TransientLocalResourceRepository();
-    }
-}
diff --git a/hyracks-test-support/pom.xml b/hyracks-test-support/pom.xml
deleted file mode 100644
index 6db8956..0000000
--- a/hyracks-test-support/pom.xml
+++ /dev/null
@@ -1,68 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <groupId>edu.uci.ics.hyracks</groupId>
-  <artifactId>hyracks-test-support</artifactId>
-  <version>0.2.2-SNAPSHOT</version>
-
-  <parent>
-    <groupId>edu.uci.ics.hyracks</groupId>
-    <artifactId>hyracks</artifactId>
-    <version>0.2.2-SNAPSHOT</version>
-  </parent>
-
-  <build>
-    <plugins>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-compiler-plugin</artifactId>
-        <version>2.0.2</version>
-        <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
-        </configuration>
-      </plugin>
-    </plugins>
-  </build>
-  <dependencies>
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks</groupId>
-  		<artifactId>hyracks-control-nc</artifactId>
-  		<version>0.2.2-SNAPSHOT</version>
-  		<scope>compile</scope>
-  	</dependency>
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks</groupId>
-  		<artifactId>hyracks-storage-common</artifactId>
-  		<version>0.2.2-SNAPSHOT</version>
-  		<scope>compile</scope>
-  	</dependency>
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks</groupId>
-  		<artifactId>hyracks-storage-am-btree</artifactId>
-  		<version>0.2.2-SNAPSHOT</version>
-  		<type>jar</type>
-  		<scope>compile</scope>
-  	</dependency>
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks</groupId>
-  		<artifactId>hyracks-storage-am-rtree</artifactId>
-  		<version>0.2.2-SNAPSHOT</version>
-  		<type>jar</type>
-  		<scope>compile</scope>
-  	</dependency>
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks</groupId>
-  		<artifactId>hyracks-storage-am-lsm-invertedindex</artifactId>
-  		<version>0.2.2-SNAPSHOT</version>
-  		<type>jar</type>
-  		<scope>compile</scope>
-  	</dependency>
-  	<dependency>
-  		<groupId>junit</groupId>
-  		<artifactId>junit</artifactId>
-  		<version>4.8.1</version>
-  		<type>jar</type>
-  		<scope>compile</scope>
-  	</dependency>
-  </dependencies>
-</project>
diff --git a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/AbstractModificationOperationCallbackTest.java b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/AbstractModificationOperationCallbackTest.java
deleted file mode 100644
index 30843aa..0000000
--- a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/AbstractModificationOperationCallbackTest.java
+++ /dev/null
@@ -1,84 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.btree;
-
-import org.junit.After;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
-
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.dataflow.common.util.TupleUtils;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexAccessor;
-import edu.uci.ics.hyracks.storage.am.common.api.IModificationOperationCallback;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallback;
-import edu.uci.ics.hyracks.storage.am.config.AccessMethodTestsConfig;
-
-public abstract class AbstractModificationOperationCallbackTest extends AbstractOperationCallbackTest {
-
-    protected final ArrayTupleBuilder builder;
-    protected final ArrayTupleReference tuple;
-    protected final IModificationOperationCallback cb;
-
-    protected boolean isFoundNull;
-
-    public AbstractModificationOperationCallbackTest() {
-        this.builder = new ArrayTupleBuilder(NUM_KEY_FIELDS);
-        this.tuple = new ArrayTupleReference();
-        this.cb = new VeriyfingModificationCallback();
-        this.isFoundNull = true;
-    }
-
-    @Before
-    public void setup() throws Exception {
-        super.setup();
-    }
-
-    @After
-    public void tearDown() throws Exception {
-        super.tearDown();
-    }
-
-    @Test
-    public void modificationCallbackTest() throws Exception {
-        IIndexAccessor accessor = index.createAccessor(cb, NoOpOperationCallback.INSTANCE);
-
-        isFoundNull = true;
-        for (int i = 0; i < AccessMethodTestsConfig.BTREE_NUM_TUPLES_TO_INSERT; i++) {
-            TupleUtils.createIntegerTuple(builder, tuple, i);
-            accessor.insert(tuple);
-        }
-
-        isFoundNull = false;
-        for (int i = 0; i < AccessMethodTestsConfig.BTREE_NUM_TUPLES_TO_INSERT; i++) {
-            TupleUtils.createIntegerTuple(builder, tuple, i);
-            accessor.upsert(tuple);
-        }
-
-        isFoundNull = false;
-        for (int i = 0; i < AccessMethodTestsConfig.BTREE_NUM_TUPLES_TO_INSERT; i++) {
-            TupleUtils.createIntegerTuple(builder, tuple, i);
-            accessor.delete(tuple);
-        }
-    }
-
-    private class VeriyfingModificationCallback implements IModificationOperationCallback {
-
-        @Override
-        public void before(ITupleReference tuple) {
-            Assert.assertEquals(0, cmp.compare(AbstractModificationOperationCallbackTest.this.tuple, tuple));
-        }
-
-        @Override
-        public void found(ITupleReference before, ITupleReference after) {
-            if (isFoundNull) {
-                Assert.assertEquals(null, before);
-            } else {
-                Assert.assertEquals(0, cmp.compare(AbstractModificationOperationCallbackTest.this.tuple, before));
-            }
-            Assert.assertEquals(0, cmp.compare(AbstractModificationOperationCallbackTest.this.tuple, after));
-        }
-
-    }
-
-}
diff --git a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/AbstractOperationCallbackTest.java b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/AbstractOperationCallbackTest.java
deleted file mode 100644
index 41dfdfe..0000000
--- a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/AbstractOperationCallbackTest.java
+++ /dev/null
@@ -1,40 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.btree;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.util.SerdeUtils;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndex;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-
-public abstract class AbstractOperationCallbackTest {
-    protected static final int NUM_KEY_FIELDS = 1;
-
-    @SuppressWarnings("rawtypes")
-    protected final ISerializerDeserializer[] keySerdes;
-    protected final MultiComparator cmp;
-    protected final int[] bloomFilterKeyFields;
-
-    protected IIndex index;
-
-    protected abstract void createIndexInstance() throws Exception;
-
-    public AbstractOperationCallbackTest() {
-        this.keySerdes = new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE };
-        this.cmp = MultiComparator.create(SerdeUtils.serdesToComparatorFactories(keySerdes, keySerdes.length));
-        bloomFilterKeyFields = new int[NUM_KEY_FIELDS];
-        for (int i = 0; i < NUM_KEY_FIELDS; ++i) {
-            bloomFilterKeyFields[i] = i;
-        }
-    }
-
-    public void setup() throws Exception {
-        createIndexInstance();
-        index.create();
-        index.activate();
-    }
-
-    public void tearDown() throws Exception {
-        index.deactivate();
-        index.destroy();
-    }
-}
diff --git a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/AbstractSearchOperationCallbackTest.java b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/AbstractSearchOperationCallbackTest.java
deleted file mode 100644
index ffa7c9a..0000000
--- a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/AbstractSearchOperationCallbackTest.java
+++ /dev/null
@@ -1,230 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.btree;
-
-import java.util.concurrent.Callable;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-import java.util.concurrent.Future;
-import java.util.concurrent.locks.Condition;
-import java.util.concurrent.locks.Lock;
-import java.util.concurrent.locks.ReentrantLock;
-
-import org.junit.After;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.dataflow.common.util.TupleUtils;
-import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexAccessor;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexCursor;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchOperationCallback;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallback;
-
-public abstract class AbstractSearchOperationCallbackTest extends AbstractOperationCallbackTest {
-    private static final int NUM_TASKS = 2;
-
-    protected final Lock lock;
-    protected final Condition condition;
-
-    protected ExecutorService executor;
-    protected boolean insertTaskStarted;
-
-    public AbstractSearchOperationCallbackTest() {
-        this.lock = new ReentrantLock(true);
-        this.condition = lock.newCondition();
-        this.insertTaskStarted = false;
-    }
-
-    @Before
-    public void setup() throws Exception {
-        executor = Executors.newFixedThreadPool(NUM_TASKS);
-        super.setup();
-    }
-
-    @After
-    public void tearDown() throws Exception {
-        executor.shutdown();
-        super.tearDown();
-    }
-
-    @Test
-    public void searchCallbackTest() throws Exception {
-        Future<Boolean> insertFuture = executor.submit(new InsertionTask());
-        Future<Boolean> searchFuture = executor.submit(new SearchTask());
-        Assert.assertTrue(searchFuture.get());
-        Assert.assertTrue(insertFuture.get());
-    }
-
-    private class SearchTask implements Callable<Boolean> {
-        private final ISearchOperationCallback cb;
-        private final IIndexAccessor accessor;
-        private final IIndexCursor cursor;
-        private final RangePredicate predicate;
-        private final ArrayTupleBuilder builder;
-        private final ArrayTupleReference tuple;
-
-        private boolean blockOnHigh;
-        private int blockingValue;
-        private int expectedAfterBlock;
-
-        public SearchTask() {
-            this.cb = new SynchronizingSearchOperationCallback();
-            this.accessor = index.createAccessor(NoOpOperationCallback.INSTANCE, cb);
-            this.cursor = accessor.createSearchCursor();
-            this.predicate = new RangePredicate();
-            this.builder = new ArrayTupleBuilder(NUM_KEY_FIELDS);
-            this.tuple = new ArrayTupleReference();
-
-            this.blockOnHigh = false;
-            this.blockingValue = -1;
-            this.expectedAfterBlock = -1;
-        }
-
-        @Override
-        public Boolean call() throws Exception {
-            lock.lock();
-            try {
-                if (!insertTaskStarted) {
-                    condition.await();
-                }
-
-                // begin a search on [101, +inf), blocking on 101
-                TupleUtils.createIntegerTuple(builder, tuple, 101);
-                predicate.setLowKey(tuple, true);
-                predicate.setHighKey(null, true);
-                accessor.search(cursor, predicate);
-                consumeIntTupleRange(101, 101, true, 101);
-
-                // consume tuples [102, 152], blocking on 151
-                consumeIntTupleRange(102, 151, true, 152);
-
-                // consume tuples [153, 300]
-                consumeIntTupleRange(153, 300, false, -1);
-
-                cursor.close();
-            } finally {
-                lock.unlock();
-            }
-
-            return true;
-        }
-
-        private void consumeIntTupleRange(int begin, int end, boolean blockOnHigh, int expectedAfterBlock)
-                throws Exception {
-            if (end < begin) {
-                throw new IllegalArgumentException("Invalid range: [" + begin + ", " + end + "]");
-            }
-
-            for (int i = begin; i <= end; i++) {
-                if (blockOnHigh == true && i == end) {
-                    this.blockOnHigh = true;
-                    this.blockingValue = end;
-                    this.expectedAfterBlock = expectedAfterBlock;
-                }
-                TupleUtils.createIntegerTuple(builder, tuple, i);
-                if (!cursor.hasNext()) {
-                    Assert.fail("Failed to consume entire tuple range since cursor is exhausted.");
-                }
-                cursor.next();
-
-                if (this.blockOnHigh) {
-                    TupleUtils.createIntegerTuple(builder, tuple, expectedAfterBlock);
-                }
-                Assert.assertEquals(0, cmp.compare(tuple, cursor.getTuple()));
-            }
-        }
-
-        private class SynchronizingSearchOperationCallback implements ISearchOperationCallback {
-
-            @Override
-            public boolean proceed(ITupleReference tuple) {
-                Assert.assertEquals(0, cmp.compare(SearchTask.this.tuple, tuple));
-                return false;
-            }
-
-            @Override
-            public void reconcile(ITupleReference tuple) {
-                Assert.assertEquals(0, cmp.compare(SearchTask.this.tuple, tuple));
-                if (blockOnHigh) {
-                    try {
-                        TupleUtils.createIntegerTuple(builder, SearchTask.this.tuple, expectedAfterBlock);
-                    } catch (HyracksDataException e) {
-                        e.printStackTrace();
-                    }
-                    condition.signal();
-                    condition.awaitUninterruptibly();
-                    blockOnHigh = false;
-                }
-            }
-
-            @Override
-            public void cancel(ITupleReference tuple) {
-                try {
-                    TupleUtils.createIntegerTuple(builder, SearchTask.this.tuple, blockingValue);
-                    Assert.assertEquals(0, cmp.compare(tuple, SearchTask.this.tuple));
-                    TupleUtils.createIntegerTuple(builder, SearchTask.this.tuple, expectedAfterBlock);
-                } catch (HyracksDataException e) {
-                    e.printStackTrace();
-                }
-            }
-
-        }
-    }
-
-    private class InsertionTask implements Callable<Boolean> {
-        private final IIndexAccessor accessor;
-        private final ArrayTupleBuilder builder;
-        private final ArrayTupleReference tuple;
-
-        public InsertionTask() {
-            this.accessor = index.createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
-            this.builder = new ArrayTupleBuilder(NUM_KEY_FIELDS);
-            this.tuple = new ArrayTupleReference();
-        }
-
-        @Override
-        public Boolean call() throws Exception {
-            lock.lock();
-            try {
-                insertTaskStarted = true;
-
-                // insert tuples [101, 200]
-                insertIntTupleRange(101, 200);
-                condition.signal();
-                condition.await();
-
-                // insert tuples [1, 100]
-                insertIntTupleRange(1, 100);
-                condition.signal();
-                condition.await();
-
-                // insert tuples [201, 300] and delete tuple 151
-                insertIntTupleRange(201, 300);
-                TupleUtils.createIntegerTuple(builder, tuple, 151);
-                accessor.delete(tuple);
-                condition.signal();
-            } finally {
-                lock.unlock();
-            }
-
-            return true;
-        }
-
-        private void insertIntTupleRange(int begin, int end) throws Exception {
-            if (end < begin) {
-                throw new IllegalArgumentException("Invalid range: [" + begin + ", " + end + "]");
-            }
-
-            for (int i = begin; i <= end; i++) {
-                TupleUtils.createIntegerTuple(builder, tuple, i);
-                accessor.insert(tuple);
-            }
-        }
-
-    }
-
-}
diff --git a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexBulkLoadTest.java b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexBulkLoadTest.java
deleted file mode 100644
index 6d7ee4d..0000000
--- a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexBulkLoadTest.java
+++ /dev/null
@@ -1,70 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.btree;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
-import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
-
-@SuppressWarnings("rawtypes")
-public abstract class OrderedIndexBulkLoadTest extends OrderedIndexTestDriver {
-
-    private final OrderedIndexTestUtils orderedIndexTestUtils;
-    private final int bulkLoadRounds;
-
-    public OrderedIndexBulkLoadTest(BTreeLeafFrameType[] leafFrameTypesToTest, int bulkLoadRounds) {
-        super(leafFrameTypesToTest);
-        this.bulkLoadRounds = bulkLoadRounds;
-        this.orderedIndexTestUtils = new OrderedIndexTestUtils();
-    }
-
-    @Override
-    protected void runTest(ISerializerDeserializer[] fieldSerdes, int numKeys, BTreeLeafFrameType leafType,
-            ITupleReference lowKey, ITupleReference highKey, ITupleReference prefixLowKey, ITupleReference prefixHighKey)
-            throws Exception {
-        OrderedIndexTestContext ctx = createTestContext(fieldSerdes, numKeys, leafType);
-        ctx.getIndex().create();
-        ctx.getIndex().activate();
-        for (int i = 0; i < bulkLoadRounds; i++) {
-            // We assume all fieldSerdes are of the same type. Check the first
-            // one
-            // to determine which field types to generate.
-            if (fieldSerdes[0] instanceof IntegerSerializerDeserializer) {
-                orderedIndexTestUtils.bulkLoadIntTuples(ctx, numTuplesToInsert, getRandom());
-            } else if (fieldSerdes[0] instanceof UTF8StringSerializerDeserializer) {
-                orderedIndexTestUtils.bulkLoadStringTuples(ctx, numTuplesToInsert, getRandom());
-            }
-            orderedIndexTestUtils.checkPointSearches(ctx);
-            orderedIndexTestUtils.checkScan(ctx);
-            orderedIndexTestUtils.checkDiskOrderScan(ctx);
-            orderedIndexTestUtils.checkRangeSearch(ctx, lowKey, highKey, true, true);
-            if (prefixLowKey != null && prefixHighKey != null) {
-                orderedIndexTestUtils.checkRangeSearch(ctx, prefixLowKey, prefixHighKey, true, true);
-            }
-        }
-
-        ctx.getIndex().validate();
-        ctx.getIndex().deactivate();
-        ctx.getIndex().destroy();
-    }
-    
-    @Override
-    protected String getTestOpName() {
-        return "BulkLoad";
-    }
-}
diff --git a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexDeleteTest.java b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexDeleteTest.java
deleted file mode 100644
index b96f252..0000000
--- a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexDeleteTest.java
+++ /dev/null
@@ -1,76 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.btree;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
-import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
-import edu.uci.ics.hyracks.storage.am.config.AccessMethodTestsConfig;
-
-@SuppressWarnings("rawtypes")
-public abstract class OrderedIndexDeleteTest extends OrderedIndexTestDriver {
-
-    private final OrderedIndexTestUtils orderedIndexTestUtils;
-
-    public OrderedIndexDeleteTest(BTreeLeafFrameType[] leafFrameTypesToTest) {
-        super(leafFrameTypesToTest);
-        this.orderedIndexTestUtils = new OrderedIndexTestUtils();
-    }
-
-    private static final int numInsertRounds = AccessMethodTestsConfig.BTREE_NUM_INSERT_ROUNDS;
-    private static final int numDeleteRounds = AccessMethodTestsConfig.BTREE_NUM_DELETE_ROUNDS;
-
-    @Override
-    protected void runTest(ISerializerDeserializer[] fieldSerdes, int numKeys, BTreeLeafFrameType leafType,
-            ITupleReference lowKey, ITupleReference highKey, ITupleReference prefixLowKey, ITupleReference prefixHighKey)
-            throws Exception {
-        OrderedIndexTestContext ctx = createTestContext(fieldSerdes, numKeys, leafType);
-        ctx.getIndex().create();
-        ctx.getIndex().activate();
-        for (int i = 0; i < numInsertRounds; i++) {
-            // We assume all fieldSerdes are of the same type. Check the first
-            // one to determine which field types to generate.
-            if (fieldSerdes[0] instanceof IntegerSerializerDeserializer) {
-                orderedIndexTestUtils.insertIntTuples(ctx, numTuplesToInsert, getRandom());
-            } else if (fieldSerdes[0] instanceof UTF8StringSerializerDeserializer) {
-                orderedIndexTestUtils.insertStringTuples(ctx, numTuplesToInsert, getRandom());
-            }
-            int numTuplesPerDeleteRound = (int) Math
-                    .ceil((float) ctx.getCheckTuples().size() / (float) numDeleteRounds);
-            for (int j = 0; j < numDeleteRounds; j++) {
-                orderedIndexTestUtils.deleteTuples(ctx, numTuplesPerDeleteRound, getRandom());
-                orderedIndexTestUtils.checkPointSearches(ctx);
-                orderedIndexTestUtils.checkScan(ctx);
-                orderedIndexTestUtils.checkDiskOrderScan(ctx);
-                orderedIndexTestUtils.checkRangeSearch(ctx, lowKey, highKey, true, true);
-                if (prefixLowKey != null && prefixHighKey != null) {
-                    orderedIndexTestUtils.checkRangeSearch(ctx, prefixLowKey, prefixHighKey, true, true);
-                }
-            }
-        }
-
-        ctx.getIndex().validate();
-        ctx.getIndex().deactivate();
-        ctx.getIndex().destroy();
-    }
-
-    @Override
-    protected String getTestOpName() {
-        return "Delete";
-    }
-}
diff --git a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexExamplesTest.java b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexExamplesTest.java
deleted file mode 100644
index 970526e..0000000
--- a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexExamplesTest.java
+++ /dev/null
@@ -1,818 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.btree;
-
-import static org.junit.Assert.fail;
-
-import java.util.Random;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import org.junit.Test;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
-import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
-import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.util.TupleUtils;
-import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
-import edu.uci.ics.hyracks.storage.am.btree.util.BTreeUtils;
-import edu.uci.ics.hyracks.storage.am.common.TestOperationCallback;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexAccessor;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexBulkLoader;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexCursor;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexAccessor;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
-import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
-import edu.uci.ics.hyracks.storage.am.common.api.UnsortedInputException;
-import edu.uci.ics.hyracks.storage.am.common.impls.TreeIndexDiskOrderScanCursor;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-
-@SuppressWarnings("rawtypes")
-public abstract class OrderedIndexExamplesTest {
-    protected static final Logger LOGGER = Logger.getLogger(OrderedIndexExamplesTest.class.getName());
-    protected final Random rnd = new Random(50);
-
-    protected abstract ITreeIndex createTreeIndex(ITypeTraits[] typeTraits, IBinaryComparatorFactory[] cmpFactories,
-            int[] bloomFilterKeyFields) throws TreeIndexException;
-
-    /**
-     * Fixed-Length Key,Value Example. Create a tree index with one fixed-length
-     * key field and one fixed-length value field. Fill index with random values
-     * using insertions (not bulk load). Perform scans and range search.
-     */
-    @Test
-    public void fixedLengthKeyValueExample() throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("Fixed-Length Key,Value Example.");
-        }
-
-        // Declare fields.
-        int fieldCount = 2;
-        ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
-        typeTraits[0] = IntegerPointable.TYPE_TRAITS;
-        typeTraits[1] = IntegerPointable.TYPE_TRAITS;
-        // Declare field serdes.
-        ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE,
-                IntegerSerializerDeserializer.INSTANCE };
-
-        // Declare keys.
-        int keyFieldCount = 1;
-        IBinaryComparatorFactory[] cmpFactories = new IBinaryComparatorFactory[keyFieldCount];
-        cmpFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
-
-        // This is only used for the LSM-BTree.
-        int[] bloomFilterKeyFields = new int[keyFieldCount];
-        bloomFilterKeyFields[0] = 0;
-
-        ITreeIndex treeIndex = createTreeIndex(typeTraits, cmpFactories, bloomFilterKeyFields);
-        treeIndex.create();
-        treeIndex.activate();
-
-        long start = System.currentTimeMillis();
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("Inserting into tree...");
-        }
-        ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
-        ArrayTupleReference tuple = new ArrayTupleReference();
-        IIndexAccessor indexAccessor = (IIndexAccessor) treeIndex.createAccessor(TestOperationCallback.INSTANCE,
-                TestOperationCallback.INSTANCE);
-        int numInserts = 10000;
-        for (int i = 0; i < numInserts; i++) {
-            int f0 = rnd.nextInt() % numInserts;
-            int f1 = 5;
-            TupleUtils.createIntegerTuple(tb, tuple, f0, f1);
-            if (LOGGER.isLoggable(Level.INFO)) {
-                if (i % 1000 == 0) {
-                    LOGGER.info("Inserting " + i + " : " + f0 + " " + f1);
-                }
-            }
-            try {
-                indexAccessor.insert(tuple);
-            } catch (TreeIndexException e) {
-            }
-        }
-        long end = System.currentTimeMillis();
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info(numInserts + " inserts in " + (end - start) + "ms");
-        }
-
-        orderedScan(indexAccessor, fieldSerdes);
-        diskOrderScan(indexAccessor, fieldSerdes);
-
-        // Build low key.
-        ArrayTupleBuilder lowKeyTb = new ArrayTupleBuilder(keyFieldCount);
-        ArrayTupleReference lowKey = new ArrayTupleReference();
-        TupleUtils.createIntegerTuple(lowKeyTb, lowKey, -1000);
-
-        // Build high key.
-        ArrayTupleBuilder highKeyTb = new ArrayTupleBuilder(keyFieldCount);
-        ArrayTupleReference highKey = new ArrayTupleReference();
-        TupleUtils.createIntegerTuple(highKeyTb, highKey, 1000);
-
-        rangeSearch(cmpFactories, indexAccessor, fieldSerdes, lowKey, highKey);
-
-        treeIndex.validate();
-        treeIndex.deactivate();
-        treeIndex.destroy();
-    }
-
-    /**
-     * This test the btree page split. Originally this test didn't pass since
-     * the btree was spliting by cardinality and not size. Thus, we might end
-     * up with a situation where there is not enough space to insert the new
-     * tuple after the split which will throw an error and the split won't be
-     * propagated to upper level; thus, the tree is corrupted. Now, it split
-     * page by size. The correct behavior on abnormally large keys/values.
-     */
-    @Test
-    public void pageSplitTestExample() throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("BTree page split test.");
-        }
-
-        // Declare fields.
-        int fieldCount = 2;
-        ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
-        typeTraits[0] = UTF8StringPointable.TYPE_TRAITS;
-        typeTraits[1] = UTF8StringPointable.TYPE_TRAITS;
-        // Declare field serdes.
-        ISerializerDeserializer[] fieldSerdes = { UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE };
-
-        // Declare keys.
-        int keyFieldCount = 1;
-        IBinaryComparatorFactory[] cmpFactories = new IBinaryComparatorFactory[keyFieldCount];
-        cmpFactories[0] = PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY);
-
-        // This is only used for the LSM-BTree.
-        int[] bloomFilterKeyFields = new int[keyFieldCount];
-        bloomFilterKeyFields[0] = 0;
-
-        ITreeIndex treeIndex = createTreeIndex(typeTraits, cmpFactories, bloomFilterKeyFields);
-        treeIndex.create();
-        treeIndex.activate();
-
-        ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
-        ArrayTupleReference tuple = new ArrayTupleReference();
-        IIndexAccessor indexAccessor = (IIndexAccessor) treeIndex.createAccessor(TestOperationCallback.INSTANCE,
-                TestOperationCallback.INSTANCE);
-
-        String key = "111";
-        String data = "XXX";
-        TupleUtils.createTuple(tb, tuple, fieldSerdes, key, data);
-        indexAccessor.insert(tuple);
-
-        key = "222";
-        data = "XXX";
-        TupleUtils.createTuple(tb, tuple, fieldSerdes, key, data);
-        indexAccessor.insert(tuple);
-
-        key = "333";
-        data = "XXX";
-        TupleUtils.createTuple(tb, tuple, fieldSerdes, key, data);
-        indexAccessor.insert(tuple);
-
-        key = "444";
-        data = "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX";
-        TupleUtils.createTuple(tb, tuple, fieldSerdes, key, data);
-        indexAccessor.insert(tuple);
-
-        key = "555";
-        data = "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX";
-        TupleUtils.createTuple(tb, tuple, fieldSerdes, key, data);
-        indexAccessor.insert(tuple);
-
-        key = "666";
-        data = "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX";
-        TupleUtils.createTuple(tb, tuple, fieldSerdes, key, data);
-        indexAccessor.insert(tuple);
-
-        treeIndex.validate();
-        treeIndex.deactivate();
-        treeIndex.destroy();
-    }
-
-    /**
-     * Composite Key Example (Non-Unique Index). Create a tree index with two
-     * fixed-length key fields and one fixed-length value field. Fill index with
-     * random values using insertions (not bulk load) Perform scans and range
-     * search.
-     */
-    @Test
-    public void twoFixedLengthKeysOneFixedLengthValueExample() throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("Composite Key Test");
-        }
-
-        // Declare fields.
-        int fieldCount = 3;
-        ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
-        typeTraits[0] = IntegerPointable.TYPE_TRAITS;
-        typeTraits[1] = IntegerPointable.TYPE_TRAITS;
-        typeTraits[2] = IntegerPointable.TYPE_TRAITS;
-        // Declare field serdes.
-        ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE,
-                IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
-
-        // declare keys
-        int keyFieldCount = 2;
-        IBinaryComparatorFactory[] cmpFactories = new IBinaryComparatorFactory[keyFieldCount];
-        cmpFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
-        cmpFactories[1] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
-
-        // This is only used for the LSM-BTree.
-        int[] bloomFilterKeyFields = new int[keyFieldCount];
-        bloomFilterKeyFields[0] = 0;
-        bloomFilterKeyFields[1] = 1;
-
-        ITreeIndex treeIndex = createTreeIndex(typeTraits, cmpFactories, bloomFilterKeyFields);
-        treeIndex.create();
-        treeIndex.activate();
-
-        long start = System.currentTimeMillis();
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("Inserting into tree...");
-        }
-        ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
-        ArrayTupleReference tuple = new ArrayTupleReference();
-        IIndexAccessor indexAccessor = (IIndexAccessor) treeIndex.createAccessor(TestOperationCallback.INSTANCE,
-                TestOperationCallback.INSTANCE);
-        int numInserts = 10000;
-        for (int i = 0; i < 10000; i++) {
-            int f0 = rnd.nextInt() % 2000;
-            int f1 = rnd.nextInt() % 1000;
-            int f2 = 5;
-            TupleUtils.createIntegerTuple(tb, tuple, f0, f1, f2);
-            if (LOGGER.isLoggable(Level.INFO)) {
-                if (i % 1000 == 0) {
-                    LOGGER.info("Inserting " + i + " : " + f0 + " " + f1 + " " + f2);
-                }
-            }
-            try {
-                indexAccessor.insert(tuple);
-            } catch (TreeIndexException e) {
-            }
-        }
-        long end = System.currentTimeMillis();
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info(numInserts + " inserts in " + (end - start) + "ms");
-        }
-
-        orderedScan(indexAccessor, fieldSerdes);
-        diskOrderScan(indexAccessor, fieldSerdes);
-
-        // Build low key.
-        ArrayTupleBuilder lowKeyTb = new ArrayTupleBuilder(1);
-        ArrayTupleReference lowKey = new ArrayTupleReference();
-        TupleUtils.createIntegerTuple(lowKeyTb, lowKey, -3);
-
-        // Build high key.
-        ArrayTupleBuilder highKeyTb = new ArrayTupleBuilder(1);
-        ArrayTupleReference highKey = new ArrayTupleReference();
-        TupleUtils.createIntegerTuple(highKeyTb, highKey, 3);
-
-        // Prefix-Range search in [-3, 3]
-        rangeSearch(cmpFactories, indexAccessor, fieldSerdes, lowKey, highKey);
-
-        treeIndex.validate();
-        treeIndex.deactivate();
-        treeIndex.destroy();
-    }
-
-    /**
-     * Variable-Length Example. Create a BTree with one variable-length key
-     * field and one variable-length value field. Fill BTree with random values
-     * using insertions (not bulk load) Perform ordered scans and range search.
-     */
-    @Test
-    public void varLenKeyValueExample() throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("Variable-Length Key,Value Example");
-        }
-
-        // Declare fields.
-        int fieldCount = 2;
-        ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
-        typeTraits[0] = UTF8StringPointable.TYPE_TRAITS;
-        typeTraits[1] = UTF8StringPointable.TYPE_TRAITS;
-        // Declare field serdes.
-        ISerializerDeserializer[] fieldSerdes = { UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE };
-
-        // Declare keys.
-        int keyFieldCount = 1;
-        IBinaryComparatorFactory[] cmpFactories = new IBinaryComparatorFactory[keyFieldCount];
-        cmpFactories[0] = PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY);
-
-        // This is only used for the LSM-BTree.
-        int[] bloomFilterKeyFields = new int[keyFieldCount];
-        bloomFilterKeyFields[0] = 0;
-
-        ITreeIndex treeIndex = createTreeIndex(typeTraits, cmpFactories, bloomFilterKeyFields);
-        treeIndex.create();
-        treeIndex.activate();
-
-        long start = System.currentTimeMillis();
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("Inserting into tree...");
-        }
-        ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
-        ArrayTupleReference tuple = new ArrayTupleReference();
-        IIndexAccessor indexAccessor = (IIndexAccessor) treeIndex.createAccessor(TestOperationCallback.INSTANCE,
-                TestOperationCallback.INSTANCE);
-        // Max string length to be generated.
-        int maxLength = 10;
-        int numInserts = 10000;
-        for (int i = 0; i < 10000; i++) {
-            String f0 = randomString(Math.abs(rnd.nextInt()) % maxLength + 1, rnd);
-            String f1 = randomString(Math.abs(rnd.nextInt()) % maxLength + 1, rnd);
-            TupleUtils.createTuple(tb, tuple, fieldSerdes, f0, f1);
-            if (LOGGER.isLoggable(Level.INFO)) {
-                if (i % 1000 == 0) {
-                    LOGGER.info("Inserting[" + i + "] " + f0 + " " + f1);
-                }
-            }
-            try {
-                indexAccessor.insert(tuple);
-            } catch (TreeIndexException e) {
-            }
-        }
-        long end = System.currentTimeMillis();
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info(numInserts + " inserts in " + (end - start) + "ms");
-        }
-
-        orderedScan(indexAccessor, fieldSerdes);
-        diskOrderScan(indexAccessor, fieldSerdes);
-
-        // Build low key.
-        ArrayTupleBuilder lowKeyTb = new ArrayTupleBuilder(1);
-        ArrayTupleReference lowKey = new ArrayTupleReference();
-        TupleUtils.createTuple(lowKeyTb, lowKey, fieldSerdes, "cbf");
-
-        // Build high key.
-        ArrayTupleBuilder highKeyTb = new ArrayTupleBuilder(1);
-        ArrayTupleReference highKey = new ArrayTupleReference();
-        TupleUtils.createTuple(highKeyTb, highKey, fieldSerdes, "cc7");
-
-        rangeSearch(cmpFactories, indexAccessor, fieldSerdes, lowKey, highKey);
-
-        treeIndex.validate();
-        treeIndex.deactivate();
-        treeIndex.destroy();
-    }
-
-    /**
-     * Deletion Example. Create a BTree with one variable-length key field and
-     * one variable-length value field. Fill B-tree with random values using
-     * insertions, then delete entries one-by-one. Repeat procedure a few times
-     * on same BTree.
-     */
-    @Test
-    public void deleteExample() throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("Deletion Example");
-        }
-
-        // Declare fields.
-        int fieldCount = 2;
-        ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
-        typeTraits[0] = UTF8StringPointable.TYPE_TRAITS;
-        typeTraits[1] = UTF8StringPointable.TYPE_TRAITS;
-        // Declare field serdes.
-        ISerializerDeserializer[] fieldSerdes = { UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE };
-
-        // Declare keys.
-        int keyFieldCount = 1;
-        IBinaryComparatorFactory[] cmpFactories = new IBinaryComparatorFactory[keyFieldCount];
-        cmpFactories[0] = PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY);
-
-        // This is only used for the LSM-BTree.
-        int[] bloomFilterKeyFields = new int[keyFieldCount];
-        bloomFilterKeyFields[0] = 0;
-
-        ITreeIndex treeIndex = createTreeIndex(typeTraits, cmpFactories, bloomFilterKeyFields);
-        treeIndex.create();
-        treeIndex.activate();
-
-        ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
-        ArrayTupleReference tuple = new ArrayTupleReference();
-        IIndexAccessor indexAccessor = (IIndexAccessor) treeIndex.createAccessor(TestOperationCallback.INSTANCE,
-                TestOperationCallback.INSTANCE);
-        // Max string length to be generated.
-        int runs = 3;
-        for (int run = 0; run < runs; run++) {
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("Deletion example run: " + (run + 1) + "/" + runs);
-                LOGGER.info("Inserting into tree...");
-            }
-            int maxLength = 10;
-            int ins = 10000;
-            String[] f0s = new String[ins];
-            String[] f1s = new String[ins];
-            int insDone = 0;
-            int[] insDoneCmp = new int[ins];
-            for (int i = 0; i < ins; i++) {
-                String f0 = randomString(Math.abs(rnd.nextInt()) % maxLength + 1, rnd);
-                String f1 = randomString(Math.abs(rnd.nextInt()) % maxLength + 1, rnd);
-                TupleUtils.createTuple(tb, tuple, fieldSerdes, f0, f1);
-                f0s[i] = f0;
-                f1s[i] = f1;
-                if (LOGGER.isLoggable(Level.INFO)) {
-                    if (i % 1000 == 0) {
-                        LOGGER.info("Inserting " + i);
-                    }
-                }
-                try {
-                    indexAccessor.insert(tuple);
-                    insDone++;
-                } catch (TreeIndexException e) {
-                }
-                insDoneCmp[i] = insDone;
-            }
-
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("Deleting from tree...");
-            }
-            int delDone = 0;
-            for (int i = 0; i < ins; i++) {
-                TupleUtils.createTuple(tb, tuple, fieldSerdes, f0s[i], f1s[i]);
-                if (LOGGER.isLoggable(Level.INFO)) {
-                    if (i % 1000 == 0) {
-                        LOGGER.info("Deleting " + i);
-                    }
-                }
-                try {
-                    indexAccessor.delete(tuple);
-                    delDone++;
-                } catch (TreeIndexException e) {
-                }
-                if (insDoneCmp[i] != delDone) {
-                    if (LOGGER.isLoggable(Level.INFO)) {
-                        LOGGER.info("INCONSISTENT STATE, ERROR IN DELETION EXAMPLE.");
-                        LOGGER.info("INSDONECMP: " + insDoneCmp[i] + " " + delDone);
-                    }
-                    break;
-                }
-            }
-            if (insDone != delDone) {
-                if (LOGGER.isLoggable(Level.INFO)) {
-                    LOGGER.info("ERROR! INSDONE: " + insDone + " DELDONE: " + delDone);
-                }
-                break;
-            }
-        }
-        treeIndex.validate();
-        treeIndex.deactivate();
-        treeIndex.destroy();
-    }
-
-    /**
-     * Update example. Create a BTree with one variable-length key field and one
-     * variable-length value field. Fill B-tree with random values using
-     * insertions, then update entries one-by-one. Repeat procedure a few times
-     * on same BTree.
-     */
-    @Test
-    public void updateExample() throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("Update example");
-        }
-
-        // Declare fields.
-        int fieldCount = 2;
-        ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
-        typeTraits[0] = UTF8StringPointable.TYPE_TRAITS;
-        typeTraits[1] = UTF8StringPointable.TYPE_TRAITS;
-        // Declare field serdes.
-        ISerializerDeserializer[] fieldSerdes = { UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE };
-
-        // Declare keys.
-        int keyFieldCount = 1;
-        IBinaryComparatorFactory[] cmpFactories = new IBinaryComparatorFactory[keyFieldCount];
-        cmpFactories[0] = PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY);
-
-        // This is only used for the LSM-BTree.
-        int[] bloomFilterKeyFields = new int[keyFieldCount];
-        bloomFilterKeyFields[0] = 0;
-
-        ITreeIndex treeIndex = createTreeIndex(typeTraits, cmpFactories, bloomFilterKeyFields);
-        treeIndex.create();
-        treeIndex.activate();
-
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("Inserting into tree...");
-        }
-        IIndexAccessor indexAccessor = (IIndexAccessor) treeIndex.createAccessor(TestOperationCallback.INSTANCE,
-                TestOperationCallback.INSTANCE);
-        ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
-        ArrayTupleReference tuple = new ArrayTupleReference();
-        int maxLength = 10;
-        int ins = 10000;
-        String[] keys = new String[10000];
-        for (int i = 0; i < ins; i++) {
-            String f0 = randomString(Math.abs(rnd.nextInt()) % maxLength + 1, rnd);
-            String f1 = randomString(Math.abs(rnd.nextInt()) % maxLength + 1, rnd);
-            TupleUtils.createTuple(tb, tuple, fieldSerdes, f0, f1);
-            keys[i] = f0;
-            if (LOGGER.isLoggable(Level.INFO)) {
-                if (i % 1000 == 0) {
-                    LOGGER.info("Inserting " + i);
-                }
-            }
-            try {
-                indexAccessor.insert(tuple);
-            } catch (TreeIndexException e) {
-            }
-        }
-        // Print before doing any updates.
-        orderedScan(indexAccessor, fieldSerdes);
-
-        int runs = 3;
-        for (int run = 0; run < runs; run++) {
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("Update test run: " + (run + 1) + "/" + runs);
-                LOGGER.info("Updating BTree");
-            }
-            for (int i = 0; i < ins; i++) {
-                // Generate a new random value for f1.
-                String f1 = randomString(Math.abs(rnd.nextInt()) % maxLength + 1, rnd);
-                TupleUtils.createTuple(tb, tuple, fieldSerdes, keys[i], f1);
-                if (LOGGER.isLoggable(Level.INFO)) {
-                    if (i % 1000 == 0) {
-                        LOGGER.info("Updating " + i);
-                    }
-                }
-                try {
-                    indexAccessor.update(tuple);
-                } catch (TreeIndexException e) {
-                } catch (UnsupportedOperationException e) {
-                }
-            }
-            // Do another scan after a round of updates.
-            orderedScan(indexAccessor, fieldSerdes);
-        }
-        treeIndex.validate();
-        treeIndex.deactivate();
-        treeIndex.destroy();
-    }
-
-    /**
-     * Bulk load example. Load a tree with 100,000 tuples. BTree has a composite
-     * key to "simulate" non-unique index creation.
-     */
-    @Test
-    public void bulkLoadExample() throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("Bulk load example");
-        }
-        // Declare fields.
-        int fieldCount = 3;
-        ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
-        typeTraits[0] = IntegerPointable.TYPE_TRAITS;
-        typeTraits[1] = IntegerPointable.TYPE_TRAITS;
-        typeTraits[2] = IntegerPointable.TYPE_TRAITS;
-        // Declare field serdes.
-        ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE,
-                IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
-
-        // declare keys
-        int keyFieldCount = 2;
-        IBinaryComparatorFactory[] cmpFactories = new IBinaryComparatorFactory[keyFieldCount];
-        cmpFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
-        cmpFactories[1] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
-
-        // This is only used for the LSM-BTree.
-        int[] bloomFilterKeyFields = new int[keyFieldCount];
-        bloomFilterKeyFields[0] = 0;
-        bloomFilterKeyFields[1] = 1;
-
-        ITreeIndex treeIndex = createTreeIndex(typeTraits, cmpFactories, bloomFilterKeyFields);
-        treeIndex.create();
-        treeIndex.activate();
-
-        // Load sorted records.
-        int ins = 100000;
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("Bulk loading " + ins + " tuples");
-        }
-        long start = System.currentTimeMillis();
-        IIndexBulkLoader bulkLoader = treeIndex.createBulkLoader(0.7f, false, ins);
-        ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
-        ArrayTupleReference tuple = new ArrayTupleReference();
-        for (int i = 0; i < ins; i++) {
-            TupleUtils.createIntegerTuple(tb, tuple, i, i, 5);
-            bulkLoader.add(tuple);
-        }
-        bulkLoader.end();
-        long end = System.currentTimeMillis();
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info(ins + " tuples loaded in " + (end - start) + "ms");
-        }
-
-        IIndexAccessor indexAccessor = (IIndexAccessor) treeIndex.createAccessor(TestOperationCallback.INSTANCE,
-                TestOperationCallback.INSTANCE);
-
-        // Build low key.
-        ArrayTupleBuilder lowKeyTb = new ArrayTupleBuilder(1);
-        ArrayTupleReference lowKey = new ArrayTupleReference();
-        TupleUtils.createIntegerTuple(lowKeyTb, lowKey, 44444);
-
-        // Build high key.
-        ArrayTupleBuilder highKeyTb = new ArrayTupleBuilder(1);
-        ArrayTupleReference highKey = new ArrayTupleReference();
-        TupleUtils.createIntegerTuple(highKeyTb, highKey, 44500);
-
-        // Prefix-Range search in [44444, 44500]
-        rangeSearch(cmpFactories, indexAccessor, fieldSerdes, lowKey, highKey);
-
-        treeIndex.validate();
-        treeIndex.deactivate();
-        treeIndex.destroy();
-    }
-
-    /**
-     * Bulk load failure example. Repeatedly loads a tree with 1,000 tuples, of
-     * which one tuple at each possible position does not conform to the
-     * expected order. We expect the bulk load to fail with an exception.
-     */
-    @Test
-    public void bulkOrderVerificationExample() throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("Bulk load order verification example");
-        }
-        // Declare fields.
-        int fieldCount = 2;
-        ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
-        typeTraits[0] = IntegerPointable.TYPE_TRAITS;
-        typeTraits[1] = IntegerPointable.TYPE_TRAITS;
-
-        // declare keys
-        int keyFieldCount = 1;
-        IBinaryComparatorFactory[] cmpFactories = new IBinaryComparatorFactory[keyFieldCount];
-        cmpFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
-
-        Random rnd = new Random();
-        ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
-        ArrayTupleReference tuple = new ArrayTupleReference();
-
-        // This is only used for the LSM-BTree.
-        int[] bloomFilterKeyFields = new int[keyFieldCount];
-        bloomFilterKeyFields[0] = 0;
-
-        int ins = 1000;
-        for (int i = 1; i < ins; i++) {
-            ITreeIndex treeIndex = createTreeIndex(typeTraits, cmpFactories, bloomFilterKeyFields);
-            treeIndex.create();
-            treeIndex.activate();
-
-            // Load sorted records, and expect to fail at tuple i.
-            IIndexBulkLoader bulkLoader = treeIndex.createBulkLoader(0.7f, true, ins);
-            for (int j = 0; j < ins; j++) {
-                if (j > i) {
-                    fail("Bulk load failure test unexpectedly succeeded past tuple: " + j);
-                }
-                int key = j;
-                if (j == i) {
-                    int swapElementCase = Math.abs(rnd.nextInt()) % 2;
-                    if (swapElementCase == 0) {
-                        // Element equal to previous element.
-                        key--;
-                    } else {
-                        // Element smaller than previous element.
-                        key -= Math.abs(Math.random() % (ins - 1)) + 1;
-                    }
-                }
-                TupleUtils.createIntegerTuple(tb, tuple, key, 5);
-                try {
-                    bulkLoader.add(tuple);
-                } catch (UnsortedInputException e) {
-                    if (j != i) {
-                        fail("Unexpected exception: " + e.getMessage());
-                    }
-                    // Success.
-                    break;
-                }
-            }
-
-            treeIndex.deactivate();
-            treeIndex.destroy();
-        }
-    }
-
-    private void orderedScan(IIndexAccessor indexAccessor, ISerializerDeserializer[] fieldSerdes) throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("Ordered Scan:");
-        }
-        IIndexCursor scanCursor = (IIndexCursor) indexAccessor.createSearchCursor();
-        RangePredicate nullPred = new RangePredicate(null, null, true, true, null, null);
-        indexAccessor.search(scanCursor, nullPred);
-        try {
-            while (scanCursor.hasNext()) {
-                scanCursor.next();
-                ITupleReference frameTuple = scanCursor.getTuple();
-                String rec = TupleUtils.printTuple(frameTuple, fieldSerdes);
-                if (LOGGER.isLoggable(Level.INFO)) {
-                    LOGGER.info(rec);
-                }
-            }
-        } finally {
-            scanCursor.close();
-        }
-    }
-
-    private void diskOrderScan(IIndexAccessor indexAccessor, ISerializerDeserializer[] fieldSerdes) throws Exception {
-        try {
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("Disk-Order Scan:");
-            }
-            ITreeIndexAccessor treeIndexAccessor = (ITreeIndexAccessor) indexAccessor;
-            TreeIndexDiskOrderScanCursor diskOrderCursor = (TreeIndexDiskOrderScanCursor) treeIndexAccessor
-                    .createDiskOrderScanCursor();
-            treeIndexAccessor.diskOrderScan(diskOrderCursor);
-            try {
-                while (diskOrderCursor.hasNext()) {
-                    diskOrderCursor.next();
-                    ITupleReference frameTuple = diskOrderCursor.getTuple();
-                    String rec = TupleUtils.printTuple(frameTuple, fieldSerdes);
-                    if (LOGGER.isLoggable(Level.INFO)) {
-                        LOGGER.info(rec);
-                    }
-                }
-            } finally {
-                diskOrderCursor.close();
-            }
-        } catch (UnsupportedOperationException e) {
-            // Ignore exception because some indexes, e.g. the LSMBTree, don't
-            // support disk-order scan.
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("Ignoring disk-order scan since it's not supported.");
-            }
-        } catch (ClassCastException e) {
-            // Ignore exception because IIndexAccessor sometimes isn't
-            // an ITreeIndexAccessor, e.g., for the LSMBTree.
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("Ignoring disk-order scan since it's not supported.");
-            }
-        }
-    }
-
-    private void rangeSearch(IBinaryComparatorFactory[] cmpFactories, IIndexAccessor indexAccessor,
-            ISerializerDeserializer[] fieldSerdes, ITupleReference lowKey, ITupleReference highKey) throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
-            String lowKeyString = TupleUtils.printTuple(lowKey, fieldSerdes);
-            String highKeyString = TupleUtils.printTuple(highKey, fieldSerdes);
-            LOGGER.info("Range-Search in: [ " + lowKeyString + ", " + highKeyString + "]");
-        }
-        ITreeIndexCursor rangeCursor = (ITreeIndexCursor) indexAccessor.createSearchCursor();
-        MultiComparator lowKeySearchCmp = BTreeUtils.getSearchMultiComparator(cmpFactories, lowKey);
-        MultiComparator highKeySearchCmp = BTreeUtils.getSearchMultiComparator(cmpFactories, highKey);
-        RangePredicate rangePred = new RangePredicate(lowKey, highKey, true, true, lowKeySearchCmp, highKeySearchCmp);
-        indexAccessor.search(rangeCursor, rangePred);
-        try {
-            while (rangeCursor.hasNext()) {
-                rangeCursor.next();
-                ITupleReference frameTuple = rangeCursor.getTuple();
-                String rec = TupleUtils.printTuple(frameTuple, fieldSerdes);
-                if (LOGGER.isLoggable(Level.INFO)) {
-                    LOGGER.info(rec);
-                }
-            }
-        } finally {
-            rangeCursor.close();
-        }
-    }
-
-    public static String randomString(int length, Random random) {
-        String s = Long.toHexString(Double.doubleToLongBits(random.nextDouble()));
-        StringBuilder strBuilder = new StringBuilder();
-        for (int i = 0; i < s.length() && i < length; i++) {
-            strBuilder.append(s.charAt(Math.abs(random.nextInt()) % s.length()));
-        }
-        return strBuilder.toString();
-    }
-}
\ No newline at end of file
diff --git a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexInsertTest.java b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexInsertTest.java
deleted file mode 100644
index 32b597c..0000000
--- a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexInsertTest.java
+++ /dev/null
@@ -1,75 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.btree;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
-import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
-
-/**
- * Tests the BTree insert operation with strings and integer fields using
- * various numbers of key and payload fields.
- * Each tests first fills a BTree with randomly generated tuples. We compare the
- * following operations against expected results: 1. Point searches for all
- * tuples. 2. Ordered scan. 3. Disk-order scan. 4. Range search (and prefix
- * search for composite keys).
- */
-@SuppressWarnings("rawtypes")
-public abstract class OrderedIndexInsertTest extends OrderedIndexTestDriver {
-
-    private final OrderedIndexTestUtils orderedIndexTestUtils;
-
-    public OrderedIndexInsertTest(BTreeLeafFrameType[] leafFrameTypesToTest) {
-        super(leafFrameTypesToTest);
-        this.orderedIndexTestUtils = new OrderedIndexTestUtils();
-    }
-
-    @Override
-    protected void runTest(ISerializerDeserializer[] fieldSerdes, int numKeys, BTreeLeafFrameType leafType,
-            ITupleReference lowKey, ITupleReference highKey, ITupleReference prefixLowKey, ITupleReference prefixHighKey)
-            throws Exception {
-        OrderedIndexTestContext ctx = createTestContext(fieldSerdes, numKeys, leafType);
-        ctx.getIndex().create();
-        ctx.getIndex().activate();
-        // We assume all fieldSerdes are of the same type. Check the first one
-        // to determine which field types to generate.
-        if (fieldSerdes[0] instanceof IntegerSerializerDeserializer) {
-            orderedIndexTestUtils.insertIntTuples(ctx, numTuplesToInsert, getRandom());
-        } else if (fieldSerdes[0] instanceof UTF8StringSerializerDeserializer) {
-            orderedIndexTestUtils.insertStringTuples(ctx, numTuplesToInsert, getRandom());
-        }
-
-        orderedIndexTestUtils.checkPointSearches(ctx);
-        orderedIndexTestUtils.checkScan(ctx);
-        orderedIndexTestUtils.checkDiskOrderScan(ctx);
-
-        orderedIndexTestUtils.checkRangeSearch(ctx, lowKey, highKey, true, true);
-        if (prefixLowKey != null && prefixHighKey != null) {
-            orderedIndexTestUtils.checkRangeSearch(ctx, prefixLowKey, prefixHighKey, true, true);
-        }
-
-        ctx.getIndex().validate();
-        ctx.getIndex().deactivate();
-        ctx.getIndex().destroy();
-    }
-
-    @Override
-    protected String getTestOpName() {
-        return "Insert";
-    }
-}
diff --git a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexMultiThreadTest.java b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexMultiThreadTest.java
deleted file mode 100644
index fa22f6b..0000000
--- a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexMultiThreadTest.java
+++ /dev/null
@@ -1,129 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.btree;
-
-import java.util.ArrayList;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import org.junit.Test;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.exceptions.HyracksException;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.util.SerdeUtils;
-import edu.uci.ics.hyracks.storage.am.common.IIndexTestWorkerFactory;
-import edu.uci.ics.hyracks.storage.am.common.IndexMultiThreadTestDriver;
-import edu.uci.ics.hyracks.storage.am.common.TestWorkloadConf;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndex;
-import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
-import edu.uci.ics.hyracks.storage.am.config.AccessMethodTestsConfig;
-
-@SuppressWarnings("rawtypes")
-public abstract class OrderedIndexMultiThreadTest {
-
-    protected final Logger LOGGER = Logger.getLogger(OrderedIndexMultiThreadTest.class.getName());
-
-    // Machine-specific number of threads to use for testing.
-    protected final int REGULAR_NUM_THREADS = Runtime.getRuntime().availableProcessors();
-    // Excessive number of threads for testing.
-    protected final int EXCESSIVE_NUM_THREADS = Runtime.getRuntime().availableProcessors() * 4;
-    protected final int NUM_OPERATIONS = AccessMethodTestsConfig.BTREE_MULTITHREAD_NUM_OPERATIONS;
-
-    protected ArrayList<TestWorkloadConf> workloadConfs = getTestWorkloadConf();
-
-    protected abstract void setUp() throws HyracksException;
-
-    protected abstract void tearDown() throws HyracksDataException;
-
-    protected abstract IIndex createIndex(ITypeTraits[] typeTraits, IBinaryComparatorFactory[] cmpFactories,
-            int[] bloomFilterKeyFields) throws TreeIndexException;
-
-    protected abstract IIndexTestWorkerFactory getWorkerFactory();
-
-    protected abstract ArrayList<TestWorkloadConf> getTestWorkloadConf();
-
-    protected abstract String getIndexTypeName();
-
-    protected void runTest(ISerializerDeserializer[] fieldSerdes, int numKeys, int numThreads, TestWorkloadConf conf,
-            String dataMsg) throws InterruptedException, TreeIndexException, HyracksException {
-        setUp();
-
-        if (LOGGER.isLoggable(Level.INFO)) {
-            String indexTypeName = getIndexTypeName();
-            LOGGER.info(indexTypeName + " MultiThread Test:\nData: " + dataMsg + "; Threads: " + numThreads
-                    + "; Workload: " + conf.toString() + ".");
-        }
-
-        ITypeTraits[] typeTraits = SerdeUtils.serdesToTypeTraits(fieldSerdes);
-        IBinaryComparatorFactory[] cmpFactories = SerdeUtils.serdesToComparatorFactories(fieldSerdes, numKeys);
-
-        // This is only used for the LSM-BTree.
-        int[] bloomFilterKeyFields = new int[numKeys];
-        for (int i = 0; i < numKeys; ++i) {
-            bloomFilterKeyFields[i] = i;
-        }
-
-        IIndex index = createIndex(typeTraits, cmpFactories, bloomFilterKeyFields);
-        IIndexTestWorkerFactory workerFactory = getWorkerFactory();
-
-        // 4 batches per thread.
-        int batchSize = (NUM_OPERATIONS / numThreads) / 4;
-
-        IndexMultiThreadTestDriver driver = new IndexMultiThreadTestDriver(index, workerFactory, fieldSerdes, conf.ops,
-                conf.opProbs);
-        driver.init();
-        long[] times = driver.run(numThreads, 1, NUM_OPERATIONS, batchSize);
-        index.validate();
-        driver.deinit();
-
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("BTree MultiThread Test Time: " + times[0] + "ms");
-        }
-
-        tearDown();
-    }
-
-    @Test
-    public void oneIntKeyAndValue() throws InterruptedException, TreeIndexException, HyracksException {
-        ISerializerDeserializer[] fieldSerdes = new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE,
-                IntegerSerializerDeserializer.INSTANCE };
-        int numKeys = 1;
-        String dataMsg = "One Int Key And Value";
-
-        for (TestWorkloadConf conf : workloadConfs) {
-            runTest(fieldSerdes, numKeys, REGULAR_NUM_THREADS, conf, dataMsg);
-            runTest(fieldSerdes, numKeys, EXCESSIVE_NUM_THREADS, conf, dataMsg);
-        }
-    }
-
-    @Test
-    public void oneStringKeyAndValue() throws InterruptedException, TreeIndexException, HyracksException {
-        ISerializerDeserializer[] fieldSerdes = new ISerializerDeserializer[] {
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE };
-        int numKeys = 1;
-        String dataMsg = "One String Key And Value";
-
-        for (TestWorkloadConf conf : workloadConfs) {
-            runTest(fieldSerdes, numKeys, REGULAR_NUM_THREADS, conf, dataMsg);
-            runTest(fieldSerdes, numKeys, EXCESSIVE_NUM_THREADS, conf, dataMsg);
-        }
-    }
-}
diff --git a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexTestContext.java b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexTestContext.java
deleted file mode 100644
index 4ab0e87..0000000
--- a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexTestContext.java
+++ /dev/null
@@ -1,47 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.btree;
-
-import java.util.Collection;
-import java.util.TreeSet;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.storage.am.common.CheckTuple;
-import edu.uci.ics.hyracks.storage.am.common.IndexTestContext;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndex;
-
-@SuppressWarnings("rawtypes")
-public abstract class OrderedIndexTestContext extends IndexTestContext<CheckTuple> {
-
-    protected final TreeSet<CheckTuple> checkTuples = new TreeSet<CheckTuple>();
-
-    public OrderedIndexTestContext(ISerializerDeserializer[] fieldSerdes, IIndex index) {
-        super(fieldSerdes, index);
-    }
-
-    public void upsertCheckTuple(CheckTuple checkTuple, Collection<CheckTuple> checkTuples) {
-    	if (checkTuples.contains(checkTuple)) {
-            checkTuples.remove(checkTuple);
-        }
-        checkTuples.add(checkTuple);
-    }
-    
-    @Override
-    public TreeSet<CheckTuple> getCheckTuples() {
-        return checkTuples;
-    }
-
-}
\ No newline at end of file
diff --git a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexTestDriver.java b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexTestDriver.java
deleted file mode 100644
index ef2ee0b..0000000
--- a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexTestDriver.java
+++ /dev/null
@@ -1,179 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.btree;
-
-import java.util.Random;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import org.junit.Test;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.util.TupleUtils;
-import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
-import edu.uci.ics.hyracks.storage.am.config.AccessMethodTestsConfig;
-
-@SuppressWarnings("rawtypes")
-public abstract class OrderedIndexTestDriver {
-    protected final Logger LOGGER = Logger.getLogger(OrderedIndexTestDriver.class.getName());
-
-    protected static final int numTuplesToInsert = AccessMethodTestsConfig.BTREE_NUM_TUPLES_TO_INSERT;
-
-    protected abstract OrderedIndexTestContext createTestContext(ISerializerDeserializer[] fieldSerdes, int numKeys,
-            BTreeLeafFrameType leafType) throws Exception;
-
-    protected abstract Random getRandom();
-
-    protected abstract void runTest(ISerializerDeserializer[] fieldSerdes, int numKeys, BTreeLeafFrameType leafType,
-            ITupleReference lowKey, ITupleReference highKey, ITupleReference prefixLowKey, ITupleReference prefixHighKey)
-            throws Exception;
-
-    protected abstract String getTestOpName();
-
-    protected final BTreeLeafFrameType[] leafFrameTypesToTest;
-
-    public OrderedIndexTestDriver(BTreeLeafFrameType[] leafFrameTypesToTest) {
-        this.leafFrameTypesToTest = leafFrameTypesToTest;
-    }
-
-    @Test
-    public void oneIntKeyAndValue() throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("BTree " + getTestOpName() + " Test With One Int Key And Value.");
-        }
-
-        ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE,
-                IntegerSerializerDeserializer.INSTANCE };
-        // Range search in [-1000, 1000]
-        ITupleReference lowKey = TupleUtils.createIntegerTuple(-1000);
-        ITupleReference highKey = TupleUtils.createIntegerTuple(1000);
-
-        for (BTreeLeafFrameType leafFrameType : leafFrameTypesToTest) {
-            runTest(fieldSerdes, 1, leafFrameType, lowKey, highKey, null, null);
-        }
-    }
-
-    @Test
-    public void twoIntKeys() throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("BTree " + getTestOpName() + " Test With Two Int Keys.");
-        }
-
-        ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE,
-                IntegerSerializerDeserializer.INSTANCE };
-
-        // Range search in [50 0, 50 500]
-        ITupleReference lowKey = TupleUtils.createIntegerTuple(50, 0);
-        ITupleReference highKey = TupleUtils.createIntegerTuple(50, 500);
-
-        // Prefix range search in [50, 50]
-        ITupleReference prefixLowKey = TupleUtils.createIntegerTuple(50);
-        ITupleReference prefixHighKey = TupleUtils.createIntegerTuple(50);
-
-        for (BTreeLeafFrameType leafFrameType : leafFrameTypesToTest) {
-            runTest(fieldSerdes, 2, leafFrameType, lowKey, highKey, prefixLowKey, prefixHighKey);
-        }
-    }
-
-    @Test
-    public void twoIntKeysAndValues() throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("BTree " + getTestOpName() + " Test With Two Int Keys And Values.");
-        }
-
-        ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE,
-                IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE,
-                IntegerSerializerDeserializer.INSTANCE };
-
-        // Range search in [50 100, 100 100]
-        ITupleReference lowKey = TupleUtils.createIntegerTuple(-100, -100);
-        ITupleReference highKey = TupleUtils.createIntegerTuple(100, 100);
-
-        // Prefix range search in [50, 50]
-        ITupleReference prefixLowKey = TupleUtils.createIntegerTuple(50);
-        ITupleReference prefixHighKey = TupleUtils.createIntegerTuple(50);
-
-        for (BTreeLeafFrameType leafFrameType : leafFrameTypesToTest) {
-            runTest(fieldSerdes, 2, leafFrameType, lowKey, highKey, prefixLowKey, prefixHighKey);
-        }
-    }
-
-    @Test
-    public void oneStringKeyAndValue() throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("BTree " + getTestOpName() + " Test With One String Key And Value.");
-        }
-
-        ISerializerDeserializer[] fieldSerdes = { UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE };
-
-        // Range search in ["cbf", cc7"]
-        ITupleReference lowKey = TupleUtils.createTuple(fieldSerdes, "cbf");
-        ITupleReference highKey = TupleUtils.createTuple(fieldSerdes, "cc7");
-
-        for (BTreeLeafFrameType leafFrameType : leafFrameTypesToTest) {
-            runTest(fieldSerdes, 1, leafFrameType, lowKey, highKey, null, null);
-        }
-    }
-
-    @Test
-    public void twoStringKeys() throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("BTree " + getTestOpName() + " Test With Two String Keys.");
-        }
-
-        ISerializerDeserializer[] fieldSerdes = { UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE };
-
-        // Range search in ["cbf", "ddd", cc7", "eee"]
-        ITupleReference lowKey = TupleUtils.createTuple(fieldSerdes, "cbf", "ddd");
-        ITupleReference highKey = TupleUtils.createTuple(fieldSerdes, "cc7", "eee");
-
-        // Prefix range search in ["cbf", cc7"]
-        ITupleReference prefixLowKey = TupleUtils.createTuple(fieldSerdes, "cbf");
-        ITupleReference prefixHighKey = TupleUtils.createTuple(fieldSerdes, "cc7");
-
-        for (BTreeLeafFrameType leafFrameType : leafFrameTypesToTest) {
-            runTest(fieldSerdes, 2, leafFrameType, lowKey, highKey, prefixLowKey, prefixHighKey);
-        }
-    }
-
-    @Test
-    public void twoStringKeysAndValues() throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("BTree " + getTestOpName() + " Test With Two String Keys And Values.");
-        }
-
-        ISerializerDeserializer[] fieldSerdes = { UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE };
-
-        // Range search in ["cbf", "ddd", cc7", "eee"]
-        ITupleReference lowKey = TupleUtils.createTuple(fieldSerdes, "cbf", "ddd");
-        ITupleReference highKey = TupleUtils.createTuple(fieldSerdes, "cc7", "eee");
-
-        // Prefix range search in ["cbf", cc7"]
-        ITupleReference prefixLowKey = TupleUtils.createTuple(fieldSerdes, "cbf");
-        ITupleReference prefixHighKey = TupleUtils.createTuple(fieldSerdes, "cc7");
-
-        for (BTreeLeafFrameType leafFrameType : leafFrameTypesToTest) {
-            runTest(fieldSerdes, 2, leafFrameType, lowKey, highKey, prefixLowKey, prefixHighKey);
-        }
-    }
-}
diff --git a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexTestUtils.java b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexTestUtils.java
deleted file mode 100644
index 04c64fe..0000000
--- a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexTestUtils.java
+++ /dev/null
@@ -1,429 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.btree;
-
-import static org.junit.Assert.fail;
-
-import java.io.ByteArrayInputStream;
-import java.io.DataInput;
-import java.io.DataInputStream;
-import java.util.Collection;
-import java.util.Iterator;
-import java.util.Random;
-import java.util.SortedSet;
-import java.util.TreeSet;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.util.TupleUtils;
-import edu.uci.ics.hyracks.storage.am.btree.exceptions.BTreeDuplicateKeyException;
-import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
-import edu.uci.ics.hyracks.storage.am.btree.util.BTreeUtils;
-import edu.uci.ics.hyracks.storage.am.common.CheckTuple;
-import edu.uci.ics.hyracks.storage.am.common.IIndexTestContext;
-import edu.uci.ics.hyracks.storage.am.common.TreeIndexTestUtils;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexCursor;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchPredicate;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-
-@SuppressWarnings("rawtypes")
-public class OrderedIndexTestUtils extends TreeIndexTestUtils {
-    private static final Logger LOGGER = Logger.getLogger(OrderedIndexTestUtils.class.getName());
-
-    private static void compareActualAndExpected(ITupleReference actual, CheckTuple expected,
-            ISerializerDeserializer[] fieldSerdes) throws HyracksDataException {
-        for (int i = 0; i < fieldSerdes.length; i++) {
-            ByteArrayInputStream inStream = new ByteArrayInputStream(actual.getFieldData(i), actual.getFieldStart(i),
-                    actual.getFieldLength(i));
-            DataInput dataIn = new DataInputStream(inStream);
-            Object actualObj = fieldSerdes[i].deserialize(dataIn);
-            if (!actualObj.equals(expected.getField(i))) {
-                fail("Actual and expected fields do not match on field " + i + ".\nExpected: " + expected.getField(i)
-                        + "\nActual  : " + actualObj);
-            }
-        }
-    }
-
-    @SuppressWarnings("unchecked")
-    // Create a new TreeSet containing the elements satisfying the prefix search.
-    // Implementing prefix search by changing compareTo() in CheckTuple does not
-    // work.
-    public static SortedSet<CheckTuple> getPrefixExpectedSubset(TreeSet<CheckTuple> checkTuples, CheckTuple lowKey,
-            CheckTuple highKey) {
-        lowKey.setIsHighKey(false);
-        highKey.setIsHighKey(true);
-        CheckTuple low = checkTuples.ceiling(lowKey);
-        CheckTuple high = checkTuples.floor(highKey);
-        if (low == null || high == null) {
-            // Must be empty.
-            return new TreeSet<CheckTuple>();
-        }
-        if (high.compareTo(low) < 0) {
-            // Must be empty.
-            return new TreeSet<CheckTuple>();
-        }
-        return checkTuples.subSet(low, true, high, true);
-    }
-
-    @SuppressWarnings("unchecked")
-    public void checkRangeSearch(IIndexTestContext ctx, ITupleReference lowKey, ITupleReference highKey,
-            boolean lowKeyInclusive, boolean highKeyInclusive) throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("Testing Range Search.");
-        }
-        MultiComparator lowKeyCmp = BTreeUtils.getSearchMultiComparator(ctx.getComparatorFactories(), lowKey);
-        MultiComparator highKeyCmp = BTreeUtils.getSearchMultiComparator(ctx.getComparatorFactories(), highKey);
-        IIndexCursor searchCursor = ctx.getIndexAccessor().createSearchCursor();
-        RangePredicate rangePred = new RangePredicate(lowKey, highKey, lowKeyInclusive, highKeyInclusive, lowKeyCmp,
-                highKeyCmp);
-        ctx.getIndexAccessor().search(searchCursor, rangePred);
-        // Get the subset of elements from the expected set within given key
-        // range.
-        CheckTuple lowKeyCheck = createCheckTupleFromTuple(lowKey, ctx.getFieldSerdes(), lowKeyCmp.getKeyFieldCount());
-        CheckTuple highKeyCheck = createCheckTupleFromTuple(highKey, ctx.getFieldSerdes(),
-                highKeyCmp.getKeyFieldCount());
-        SortedSet<CheckTuple> expectedSubset = null;
-        if (lowKeyCmp.getKeyFieldCount() < ctx.getKeyFieldCount()
-                || highKeyCmp.getKeyFieldCount() < ctx.getKeyFieldCount()) {
-            // Searching on a key prefix (low key or high key or both).
-            expectedSubset = getPrefixExpectedSubset((TreeSet<CheckTuple>) ctx.getCheckTuples(), lowKeyCheck,
-                    highKeyCheck);
-        } else {
-            // Searching on all key fields.
-            expectedSubset = ((TreeSet<CheckTuple>) ctx.getCheckTuples()).subSet(lowKeyCheck, lowKeyInclusive,
-                    highKeyCheck, highKeyInclusive);
-        }
-        Iterator<CheckTuple> checkIter = expectedSubset.iterator();
-        int actualCount = 0;
-        try {
-            while (searchCursor.hasNext()) {
-                if (!checkIter.hasNext()) {
-                    fail("Range search returned more answers than expected.\nExpected: " + expectedSubset.size());
-                }
-                searchCursor.next();
-                CheckTuple expectedTuple = checkIter.next();
-                ITupleReference tuple = searchCursor.getTuple();
-                compareActualAndExpected(tuple, expectedTuple, ctx.getFieldSerdes());
-                actualCount++;
-            }
-            if (actualCount < expectedSubset.size()) {
-                fail("Range search returned fewer answers than expected.\nExpected: " + expectedSubset.size()
-                        + "\nActual  : " + actualCount);
-            }
-        } finally {
-            searchCursor.close();
-        }
-    }
-
-    public void checkPointSearches(IIndexTestContext ictx) throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("Testing Point Searches On All Expected Keys.");
-        }
-        OrderedIndexTestContext ctx = (OrderedIndexTestContext) ictx;
-        IIndexCursor searchCursor = ctx.getIndexAccessor().createSearchCursor();
-
-        ArrayTupleBuilder lowKeyBuilder = new ArrayTupleBuilder(ctx.getKeyFieldCount());
-        ArrayTupleReference lowKey = new ArrayTupleReference();
-        ArrayTupleBuilder highKeyBuilder = new ArrayTupleBuilder(ctx.getKeyFieldCount());
-        ArrayTupleReference highKey = new ArrayTupleReference();
-        RangePredicate rangePred = new RangePredicate(lowKey, highKey, true, true, null, null);
-
-        // Iterate through expected tuples, and perform a point search in the
-        // BTree to verify the tuple can be reached.
-        for (CheckTuple checkTuple : ctx.getCheckTuples()) {
-            createTupleFromCheckTuple(checkTuple, lowKeyBuilder, lowKey, ctx.getFieldSerdes());
-            createTupleFromCheckTuple(checkTuple, highKeyBuilder, highKey, ctx.getFieldSerdes());
-            MultiComparator lowKeyCmp = BTreeUtils.getSearchMultiComparator(ctx.getComparatorFactories(), lowKey);
-            MultiComparator highKeyCmp = BTreeUtils.getSearchMultiComparator(ctx.getComparatorFactories(), highKey);
-
-            rangePred.setLowKey(lowKey, true);
-            rangePred.setHighKey(highKey, true);
-            rangePred.setLowKeyComparator(lowKeyCmp);
-            rangePred.setHighKeyComparator(highKeyCmp);
-
-            ctx.getIndexAccessor().search(searchCursor, rangePred);
-
-            try {
-                // We expect exactly one answer.
-                if (searchCursor.hasNext()) {
-                    searchCursor.next();
-                    ITupleReference tuple = searchCursor.getTuple();
-                    compareActualAndExpected(tuple, checkTuple, ctx.getFieldSerdes());
-                }
-                if (searchCursor.hasNext()) {
-                    fail("Point search returned more than one answer.");
-                }
-            } finally {
-                searchCursor.close();
-            }
-        }
-    }
-
-    @SuppressWarnings("unchecked")
-    public void insertStringTuples(IIndexTestContext ctx, int numTuples, Random rnd) throws Exception {
-        int fieldCount = ctx.getFieldCount();
-        int numKeyFields = ctx.getKeyFieldCount();
-        String[] fieldValues = new String[fieldCount];
-        for (int i = 0; i < numTuples; i++) {
-            if (LOGGER.isLoggable(Level.INFO)) {
-                if ((i + 1) % (numTuples / Math.min(10, numTuples)) == 0) {
-                    LOGGER.info("Inserting Tuple " + (i + 1) + "/" + numTuples);
-                }
-            }
-            // Set keys.
-            for (int j = 0; j < numKeyFields; j++) {
-                int length = (Math.abs(rnd.nextInt()) % 10) + 1;
-                fieldValues[j] = getRandomString(length, rnd);
-            }
-            // Set values.
-            for (int j = numKeyFields; j < fieldCount; j++) {
-                fieldValues[j] = getRandomString(5, rnd);
-            }
-            TupleUtils.createTuple(ctx.getTupleBuilder(), ctx.getTuple(), ctx.getFieldSerdes(), (Object[]) fieldValues);
-            try {
-                ctx.getIndexAccessor().insert(ctx.getTuple());
-                // Set expected values. Do this only after insertion succeeds
-                // because we ignore duplicate keys.
-                ctx.insertCheckTuple(createStringCheckTuple(fieldValues, ctx.getKeyFieldCount()), ctx.getCheckTuples());
-            } catch (BTreeDuplicateKeyException e) {
-                // Ignore duplicate key insertions.
-            }
-        }
-    }
-
-    public void upsertStringTuples(IIndexTestContext ictx, int numTuples, Random rnd) throws Exception {
-        OrderedIndexTestContext ctx = (OrderedIndexTestContext) ictx;
-        int fieldCount = ctx.getFieldCount();
-        int numKeyFields = ctx.getKeyFieldCount();
-        String[] fieldValues = new String[fieldCount];
-        for (int i = 0; i < numTuples; i++) {
-            if (LOGGER.isLoggable(Level.INFO)) {
-                if ((i + 1) % (numTuples / Math.min(10, numTuples)) == 0) {
-                    LOGGER.info("Inserting Tuple " + (i + 1) + "/" + numTuples);
-                }
-            }
-            // Set keys.
-            for (int j = 0; j < numKeyFields; j++) {
-                int length = (Math.abs(rnd.nextInt()) % 10) + 1;
-                fieldValues[j] = getRandomString(length, rnd);
-            }
-            // Set values.
-            for (int j = numKeyFields; j < fieldCount; j++) {
-                fieldValues[j] = getRandomString(5, rnd);
-            }
-            TupleUtils.createTuple(ctx.getTupleBuilder(), ctx.getTuple(), ctx.getFieldSerdes(), (Object[]) fieldValues);
-            ctx.getIndexAccessor().upsert(ctx.getTuple());
-            ctx.upsertCheckTuple(createStringCheckTuple(fieldValues, ctx.getKeyFieldCount()), ctx.getCheckTuples());
-        }
-    }
-
-    @SuppressWarnings("unchecked")
-    public void bulkLoadStringTuples(IIndexTestContext ctx, int numTuples, Random rnd) throws Exception {
-        int fieldCount = ctx.getFieldCount();
-        int numKeyFields = ctx.getKeyFieldCount();
-        String[] fieldValues = new String[fieldCount];
-        TreeSet<CheckTuple> tmpCheckTuples = new TreeSet<CheckTuple>();
-        for (int i = 0; i < numTuples; i++) {
-            // Set keys.
-            for (int j = 0; j < numKeyFields; j++) {
-                int length = (Math.abs(rnd.nextInt()) % 10) + 1;
-                fieldValues[j] = getRandomString(length, rnd);
-            }
-            // Set values.
-            for (int j = numKeyFields; j < fieldCount; j++) {
-                fieldValues[j] = getRandomString(5, rnd);
-            }
-            // Set expected values. We also use these as the pre-sorted stream
-            // for bulk loading.
-            ctx.insertCheckTuple(createStringCheckTuple(fieldValues, ctx.getKeyFieldCount()), tmpCheckTuples);
-        }
-        bulkLoadCheckTuples(ctx, tmpCheckTuples);
-
-        // Add tmpCheckTuples to ctx check tuples for comparing searches.
-        for (CheckTuple checkTuple : tmpCheckTuples) {
-            ctx.insertCheckTuple(checkTuple, ctx.getCheckTuples());
-        }
-    }
-
-    public void upsertIntTuples(IIndexTestContext ictx, int numTuples, Random rnd) throws Exception {
-        OrderedIndexTestContext ctx = (OrderedIndexTestContext) ictx;
-        int fieldCount = ctx.getFieldCount();
-        int numKeyFields = ctx.getKeyFieldCount();
-        int[] fieldValues = new int[ctx.getFieldCount()];
-        // Scale range of values according to number of keys.
-        // For example, for 2 keys we want the square root of numTuples, for 3
-        // keys the cube root of numTuples, etc.
-        int maxValue = (int) Math.ceil(Math.pow(numTuples, 1.0 / (double) numKeyFields));
-        for (int i = 0; i < numTuples; i++) {
-            // Set keys.
-            setIntKeyFields(fieldValues, numKeyFields, maxValue, rnd);
-            // Set values.
-            setIntPayloadFields(fieldValues, numKeyFields, fieldCount);
-            TupleUtils.createIntegerTuple(ctx.getTupleBuilder(), ctx.getTuple(), fieldValues);
-            if (LOGGER.isLoggable(Level.INFO)) {
-                if ((i + 1) % (numTuples / Math.min(10, numTuples)) == 0) {
-                    LOGGER.info("Inserting Tuple " + (i + 1) + "/" + numTuples);
-                }
-            }
-            ctx.getIndexAccessor().upsert(ctx.getTuple());
-            ctx.upsertCheckTuple(createIntCheckTuple(fieldValues, ctx.getKeyFieldCount()), ctx.getCheckTuples());
-        }
-    }
-
-    @SuppressWarnings("unchecked")
-    public void updateTuples(IIndexTestContext ictx, int numTuples, Random rnd) throws Exception {
-        OrderedIndexTestContext ctx = (OrderedIndexTestContext) ictx;
-        int fieldCount = ctx.getFieldCount();
-        int keyFieldCount = ctx.getKeyFieldCount();
-        // This is a noop because we can only update non-key fields.
-        if (fieldCount == keyFieldCount) {
-            return;
-        }
-        ArrayTupleBuilder updateTupleBuilder = new ArrayTupleBuilder(fieldCount);
-        ArrayTupleReference updateTuple = new ArrayTupleReference();
-        int numCheckTuples = ctx.getCheckTuples().size();
-        // Copy CheckTuple references into array, so we can randomly pick from
-        // there.
-        CheckTuple[] checkTuples = new CheckTuple[numCheckTuples];
-        int idx = 0;
-        for (CheckTuple checkTuple : ctx.getCheckTuples()) {
-            checkTuples[idx++] = checkTuple;
-        }
-        for (int i = 0; i < numTuples && numCheckTuples > 0; i++) {
-            if (LOGGER.isLoggable(Level.INFO)) {
-                if ((i + 1) % (numTuples / Math.min(10, numTuples)) == 0) {
-                    LOGGER.info("Updating Tuple " + (i + 1) + "/" + numTuples);
-                }
-            }
-            int checkTupleIdx = Math.abs(rnd.nextInt() % numCheckTuples);
-            CheckTuple checkTuple = checkTuples[checkTupleIdx];
-            // Update check tuple's non-key fields.
-            for (int j = keyFieldCount; j < fieldCount; j++) {
-                Comparable newValue = getRandomUpdateValue(ctx.getFieldSerdes()[j], rnd);
-                checkTuple.setField(j, newValue);
-            }
-
-            createTupleFromCheckTuple(checkTuple, updateTupleBuilder, updateTuple, ctx.getFieldSerdes());
-            ctx.getIndexAccessor().update(updateTuple);
-
-            // Swap with last "valid" CheckTuple.
-            CheckTuple tmp = checkTuples[numCheckTuples - 1];
-            checkTuples[numCheckTuples - 1] = checkTuple;
-            checkTuples[checkTupleIdx] = tmp;
-            numCheckTuples--;
-        }
-    }
-
-    public CheckTuple createStringCheckTuple(String[] fieldValues, int numKeyFields) {
-        CheckTuple<String> checkTuple = new CheckTuple<String>(fieldValues.length, numKeyFields);
-        for (String s : fieldValues) {
-            checkTuple.appendField((String) s);
-        }
-        return checkTuple;
-    }
-
-    private static Comparable getRandomUpdateValue(ISerializerDeserializer serde, Random rnd) {
-        if (serde instanceof IntegerSerializerDeserializer) {
-            return Integer.valueOf(rnd.nextInt());
-        } else if (serde instanceof UTF8StringSerializerDeserializer) {
-            return getRandomString(10, rnd);
-        }
-        return null;
-    }
-
-    public static String getRandomString(int length, Random rnd) {
-        String s = Long.toHexString(Double.doubleToLongBits(rnd.nextDouble()));
-        StringBuilder strBuilder = new StringBuilder();
-        for (int i = 0; i < s.length() && i < length; i++) {
-            strBuilder.append(s.charAt(Math.abs(rnd.nextInt()) % s.length()));
-        }
-        return strBuilder.toString();
-    }
-
-    @Override
-    protected CheckTuple createCheckTuple(int numFields, int numKeyFields) {
-        return new CheckTuple(numFields, numKeyFields);
-    }
-
-    @Override
-    protected ISearchPredicate createNullSearchPredicate() {
-        return new RangePredicate(null, null, true, true, null, null);
-    }
-
-    @Override
-    public void checkExpectedResults(ITreeIndexCursor cursor, Collection checkTuples,
-            ISerializerDeserializer[] fieldSerdes, int keyFieldCount, Iterator<CheckTuple> checkIter) throws Exception {
-        int actualCount = 0;
-        try {
-            while (cursor.hasNext()) {
-                if (!checkIter.hasNext()) {
-                    fail("Ordered scan returned more answers than expected.\nExpected: " + checkTuples.size());
-                }
-                cursor.next();
-                CheckTuple expectedTuple = checkIter.next();
-                ITupleReference tuple = cursor.getTuple();
-                compareActualAndExpected(tuple, expectedTuple, fieldSerdes);
-                actualCount++;
-            }
-            if (actualCount < checkTuples.size()) {
-                fail("Ordered scan returned fewer answers than expected.\nExpected: " + checkTuples.size()
-                        + "\nActual  : " + actualCount);
-            }
-        } finally {
-            cursor.close();
-        }
-
-    }
-
-    @Override
-    protected CheckTuple createIntCheckTuple(int[] fieldValues, int numKeyFields) {
-        CheckTuple<Integer> checkTuple = new CheckTuple<Integer>(fieldValues.length, numKeyFields);
-        for (int v : fieldValues) {
-            checkTuple.appendField(v);
-        }
-        return checkTuple;
-    }
-
-    @Override
-    protected void setIntKeyFields(int[] fieldValues, int numKeyFields, int maxValue, Random rnd) {
-        for (int j = 0; j < numKeyFields; j++) {
-            fieldValues[j] = rnd.nextInt() % maxValue;
-        }
-    }
-
-    @Override
-    protected void setIntPayloadFields(int[] fieldValues, int numKeyFields, int numFields) {
-        for (int j = numKeyFields; j < numFields; j++) {
-            fieldValues[j] = j;
-        }
-    }
-
-    @Override
-    protected Collection createCheckTuplesCollection() {
-        return new TreeSet<CheckTuple>();
-    }
-
-    @Override
-    protected ArrayTupleBuilder createDeleteTupleBuilder(IIndexTestContext ctx) {
-        return new ArrayTupleBuilder(ctx.getKeyFieldCount());
-    }
-
-    @Override
-    protected boolean checkDiskOrderScanResult(ITupleReference tuple, CheckTuple checkTuple, IIndexTestContext ctx)
-            throws HyracksDataException {
-        @SuppressWarnings("unchecked")
-        TreeSet<CheckTuple> checkTuples = (TreeSet<CheckTuple>) ctx.getCheckTuples();
-        CheckTuple matchingCheckTuple = checkTuples.floor(checkTuple);
-        if (matchingCheckTuple == null) {
-            return false;
-        }
-        compareActualAndExpected(tuple, matchingCheckTuple, ctx.getFieldSerdes());
-        return true;
-    }
-}
diff --git a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexUpdateTest.java b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexUpdateTest.java
deleted file mode 100644
index 049724e..0000000
--- a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexUpdateTest.java
+++ /dev/null
@@ -1,76 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.btree;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
-import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
-import edu.uci.ics.hyracks.storage.am.config.AccessMethodTestsConfig;
-
-@SuppressWarnings("rawtypes")
-public abstract class OrderedIndexUpdateTest extends OrderedIndexTestDriver {
-
-    private final OrderedIndexTestUtils orderedIndexTestUtils;
-
-    public OrderedIndexUpdateTest(BTreeLeafFrameType[] leafFrameTypesToTest) {
-        super(leafFrameTypesToTest);
-        this.orderedIndexTestUtils = new OrderedIndexTestUtils();
-    }
-
-    private static final int numUpdateRounds = AccessMethodTestsConfig.BTREE_NUM_UPDATE_ROUNDS;
-
-    @Override
-    protected void runTest(ISerializerDeserializer[] fieldSerdes, int numKeys, BTreeLeafFrameType leafType,
-            ITupleReference lowKey, ITupleReference highKey, ITupleReference prefixLowKey, ITupleReference prefixHighKey)
-            throws Exception {
-        // This is a noop because we can only update non-key fields.
-        if (fieldSerdes.length == numKeys) {
-            return;
-        }
-        OrderedIndexTestContext ctx = createTestContext(fieldSerdes, numKeys, leafType);
-        ctx.getIndex().create();
-        ctx.getIndex().activate();
-        // We assume all fieldSerdes are of the same type. Check the first one
-        // to determine which field types to generate.
-        if (fieldSerdes[0] instanceof IntegerSerializerDeserializer) {
-            orderedIndexTestUtils.insertIntTuples(ctx, numTuplesToInsert, getRandom());
-        } else if (fieldSerdes[0] instanceof UTF8StringSerializerDeserializer) {
-            orderedIndexTestUtils.insertStringTuples(ctx, numTuplesToInsert, getRandom());
-        }
-        int numTuplesPerDeleteRound = (int) Math.ceil((float) ctx.getCheckTuples().size() / (float) numUpdateRounds);
-        for (int j = 0; j < numUpdateRounds; j++) {
-            orderedIndexTestUtils.updateTuples(ctx, numTuplesPerDeleteRound, getRandom());
-            orderedIndexTestUtils.checkPointSearches(ctx);
-            orderedIndexTestUtils.checkScan(ctx);
-            orderedIndexTestUtils.checkDiskOrderScan(ctx);
-            orderedIndexTestUtils.checkRangeSearch(ctx, lowKey, highKey, true, true);
-            if (prefixLowKey != null && prefixHighKey != null) {
-                orderedIndexTestUtils.checkRangeSearch(ctx, prefixLowKey, prefixHighKey, true, true);
-            }
-        }
-
-        ctx.getIndex().validate();
-        ctx.getIndex().deactivate();
-        ctx.getIndex().destroy();
-    }
-
-    @Override
-    protected String getTestOpName() {
-        return "Update";
-    }
-}
diff --git a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexUpsertTest.java b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexUpsertTest.java
deleted file mode 100644
index d34928f..0000000
--- a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexUpsertTest.java
+++ /dev/null
@@ -1,74 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.btree;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
-import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
-
-/**
- * Tests the BTree insert operation with strings and integer fields using
- * various numbers of key and payload fields.
- * Each tests first fills a BTree with randomly generated tuples. We compare the
- * following operations against expected results: 1. Point searches for all
- * tuples. 2. Ordered scan. 3. Disk-order scan. 4. Range search (and prefix
- * search for composite keys).
- */
-@SuppressWarnings("rawtypes")
-public abstract class OrderedIndexUpsertTest extends OrderedIndexTestDriver {
-
-    private final OrderedIndexTestUtils orderedIndexTestUtils;
-
-    public OrderedIndexUpsertTest(BTreeLeafFrameType[] leafFrameTypesToTest) {
-        super(leafFrameTypesToTest);
-        this.orderedIndexTestUtils = new OrderedIndexTestUtils();
-    }
-
-    @Override
-    protected void runTest(ISerializerDeserializer[] fieldSerdes, int numKeys, BTreeLeafFrameType leafType,
-            ITupleReference lowKey, ITupleReference highKey, ITupleReference prefixLowKey, ITupleReference prefixHighKey)
-            throws Exception {
-        OrderedIndexTestContext ctx = createTestContext(fieldSerdes, numKeys, leafType);
-        ctx.getIndex().create();
-        ctx.getIndex().activate();
-        // We assume all fieldSerdes are of the same type. Check the first one
-        // to determine which field types to generate.
-        if (fieldSerdes[0] instanceof IntegerSerializerDeserializer) {
-            orderedIndexTestUtils.upsertIntTuples(ctx, numTuplesToInsert, getRandom());
-        } else if (fieldSerdes[0] instanceof UTF8StringSerializerDeserializer) {
-            orderedIndexTestUtils.upsertStringTuples(ctx, numTuplesToInsert, getRandom());
-        }
-
-        orderedIndexTestUtils.checkPointSearches(ctx);
-        orderedIndexTestUtils.checkScan(ctx);
-        orderedIndexTestUtils.checkDiskOrderScan(ctx);
-
-        orderedIndexTestUtils.checkRangeSearch(ctx, lowKey, highKey, true, true);
-        if (prefixLowKey != null && prefixHighKey != null) {
-            orderedIndexTestUtils.checkRangeSearch(ctx, prefixLowKey, prefixHighKey, true, true);
-        }
-        ctx.getIndex().validate();
-        ctx.getIndex().deactivate();
-        ctx.getIndex().destroy();
-    }
-
-    @Override
-    protected String getTestOpName() {
-        return "Insert";
-    }
-}
diff --git a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/AbstractIndexLifecycleTest.java b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/AbstractIndexLifecycleTest.java
deleted file mode 100644
index 2226d94..0000000
--- a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/AbstractIndexLifecycleTest.java
+++ /dev/null
@@ -1,96 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.common;
-
-import org.junit.After;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndex;
-
-public abstract class AbstractIndexLifecycleTest {
-
-    protected IIndex index;
-
-    protected abstract boolean persistentStateExists() throws Exception;
-
-    protected abstract boolean isEmptyIndex() throws Exception;
-
-    protected abstract void performInsertions() throws Exception;
-
-    protected abstract void checkInsertions() throws Exception;
-
-    protected abstract void clearCheckableInsertions() throws Exception;
-
-    @Before
-    public abstract void setup() throws Exception;
-
-    @After
-    public abstract void tearDown() throws Exception;
-
-    @Test
-    public void validSequenceTest() throws Exception {
-        // Double create is valid
-        index.create();
-        Assert.assertTrue(persistentStateExists());
-        index.create();
-        Assert.assertTrue(persistentStateExists());
-
-        // Double open is valid
-        index.activate();
-        index.activate();
-        Assert.assertTrue(isEmptyIndex());
-
-        // Insert some stuff
-        performInsertions();
-        checkInsertions();
-
-        // Check that the inserted stuff isn't there
-        clearCheckableInsertions();
-        index.clear();
-        Assert.assertTrue(isEmptyIndex());
-
-        // Insert more stuff
-        performInsertions();
-
-        // Double close is valid
-        index.deactivate();
-        index.deactivate();
-
-        // Check that the inserted stuff is still there
-        index.activate();
-        checkInsertions();
-        index.deactivate();
-
-        // Double destroy is valid
-        index.destroy();
-        Assert.assertFalse(persistentStateExists());
-        index.destroy();
-        Assert.assertFalse(persistentStateExists());
-    }
-
-    @Test(expected = HyracksDataException.class)
-    public void invalidSequenceTest1() throws Exception {
-        index.create();
-        index.activate();
-        index.create();
-    }
-
-    @Test(expected = HyracksDataException.class)
-    public void invalidSequenceTest2() throws Exception {
-        index.create();
-        index.activate();
-        index.destroy();
-    }
-
-    @Test(expected = HyracksDataException.class)
-    public void invalidSequenceTest3() throws Exception {
-        index.create();
-        index.clear();
-    }
-
-    @Test(expected = HyracksDataException.class)
-    public void invalidSequenceTest4() throws Exception {
-        index.clear();
-    }
-}
diff --git a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/AbstractIndexTestWorker.java b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/AbstractIndexTestWorker.java
deleted file mode 100644
index f9ff26a..0000000
--- a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/AbstractIndexTestWorker.java
+++ /dev/null
@@ -1,72 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.common;
-
-import java.util.Random;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.common.TestOperationSelector.TestOperation;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndex;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexAccessor;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexCursor;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.am.common.datagen.DataGenThread;
-import edu.uci.ics.hyracks.storage.am.common.datagen.TupleBatch;
-
-public abstract class AbstractIndexTestWorker extends Thread implements ITreeIndexTestWorker {
-    private final Random rnd;
-    private final DataGenThread dataGen;
-    private final TestOperationSelector opSelector;
-    private final int numBatches;
-
-    protected final IIndexAccessor indexAccessor;
-
-    public AbstractIndexTestWorker(DataGenThread dataGen, TestOperationSelector opSelector, IIndex index, int numBatches) {
-        this.dataGen = dataGen;
-        this.opSelector = opSelector;
-        this.numBatches = numBatches;
-        this.rnd = new Random();
-        this.indexAccessor = index.createAccessor(TestOperationCallback.INSTANCE, TestOperationCallback.INSTANCE);
-    }
-
-    @Override
-    public void run() {
-        try {
-            for (int i = 0; i < numBatches; i++) {
-                TupleBatch batch = dataGen.getBatch();
-                for (int j = 0; j < batch.size(); j++) {
-                    TestOperation op = opSelector.getOp(rnd.nextInt());
-                    ITupleReference tuple = batch.get(j);
-                    performOp(tuple, op);
-                }
-                dataGen.releaseBatch(batch);
-            }
-        } catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    protected void consumeCursorTuples(IIndexCursor cursor) throws HyracksDataException, IndexException {
-        try {
-            while (cursor.hasNext()) {
-                cursor.next();
-            }
-        } finally {
-            cursor.close();
-        }
-    }
-}
diff --git a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/CheckTuple.java b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/CheckTuple.java
deleted file mode 100644
index b7037d6..0000000
--- a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/CheckTuple.java
+++ /dev/null
@@ -1,106 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.common;
-
-@SuppressWarnings({"rawtypes", "unchecked"})
-public class CheckTuple<T extends Comparable<T>> implements Comparable<T> {
-    protected final int numKeys;
-    protected final Comparable[] fields;
-    protected int pos;
-    protected boolean isHighKey;
-
-    public CheckTuple(int numFields, int numKeys) {
-        this.numKeys = numKeys;
-        this.fields = new Comparable[numFields];
-        pos = 0;
-        isHighKey = false;
-    }
-
-    public void appendField(T e) {
-        fields[pos++] = e;
-    }
-
-	@Override
-	public int compareTo(T o) {
-		CheckTuple<T> other = (CheckTuple<T>) o;
-		int cmpFieldCount = Math.min(other.getNumKeys(), numKeys);
-		for (int i = 0; i < cmpFieldCount; i++) {
-			int cmp = fields[i].compareTo(other.getField(i));
-			if (cmp != 0) {
-				return cmp;
-			}
-		}
-		if (other.getNumKeys() == numKeys) {
-		    return 0;
-		}
-		if (other.getNumKeys() < numKeys) {
-		    return (other.isHighKey) ? -1 : 1;
-		}
-		if (other.getNumKeys() > numKeys) {
-            return (isHighKey) ? 1 : -1;
-        }
-		return 0;
-	}
-
-	@Override
-	public boolean equals(Object o) {
-		if (!(o instanceof Comparable<?>)) {
-			return false;
-		}
-		return compareTo((T) o) == 0;
-	}
-    
-	@Override
-	public int hashCode() {
-		int hash = 0;
-		for (int i = 0; i < numKeys; i++) {
-			hash = 37 * hash + fields[i].hashCode();
-		}
-		return hash;
-	}
-	
-	public void setIsHighKey(boolean isHighKey) {
-	    this.isHighKey = isHighKey;
-	}
-	
-	public T getField(int idx) {
-		return (T) fields[idx];
-	}
-    
-    public void setField(int idx, T e) {
-        fields[idx] = e;
-    }
-    
-    public int size() {
-        return fields.length;
-    }
-    
-    public int getNumKeys() {
-        return numKeys;
-    }
-    
-    @Override
-    public String toString() {
-        StringBuilder strBuilder = new StringBuilder();
-        for (int i = 0; i < fields.length; i++) {
-            strBuilder.append(fields[i].toString());
-            if (i != fields.length-1) {
-                strBuilder.append(" ");
-            }
-        }
-        return strBuilder.toString();
-    }
-}
\ No newline at end of file
diff --git a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/IIndexTestContext.java b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/IIndexTestContext.java
deleted file mode 100644
index 3599c5e..0000000
--- a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/IIndexTestContext.java
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.common;
-
-import java.util.Collection;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleReference;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndex;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexAccessor;
-
-@SuppressWarnings("rawtypes")
-public interface IIndexTestContext<T extends CheckTuple> {
-    public int getFieldCount();
-
-    public int getKeyFieldCount();
-
-    public ISerializerDeserializer[] getFieldSerdes();
-
-    public IBinaryComparatorFactory[] getComparatorFactories();
-
-    public IIndexAccessor getIndexAccessor();
-
-    public IIndex getIndex();
-
-    public ArrayTupleReference getTuple();
-
-    public ArrayTupleBuilder getTupleBuilder();
-
-    public void insertCheckTuple(T checkTuple, Collection<T> checkTuples);      
-
-    public void deleteCheckTuple(T checkTuple, Collection<T> checkTuples);
-
-    public Collection<T> getCheckTuples();
-
-}
diff --git a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/IIndexTestWorkerFactory.java b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/IIndexTestWorkerFactory.java
deleted file mode 100644
index d4efb3e..0000000
--- a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/IIndexTestWorkerFactory.java
+++ /dev/null
@@ -1,24 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.common;
-
-import edu.uci.ics.hyracks.storage.am.common.api.IIndex;
-import edu.uci.ics.hyracks.storage.am.common.datagen.DataGenThread;
-
-public interface IIndexTestWorkerFactory {
-    public AbstractIndexTestWorker create(DataGenThread dataGen, TestOperationSelector opSelector, IIndex index,
-            int numBatches);
-}
diff --git a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/IndexMultiThreadTestDriver.java b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/IndexMultiThreadTestDriver.java
deleted file mode 100644
index ca1d28f..0000000
--- a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/IndexMultiThreadTestDriver.java
+++ /dev/null
@@ -1,91 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.common;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.storage.am.common.TestOperationSelector.TestOperation;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndex;
-import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
-import edu.uci.ics.hyracks.storage.am.common.datagen.DataGenThread;
-
-@SuppressWarnings("rawtypes")
-public class IndexMultiThreadTestDriver {
-    protected static final int RANDOM_SEED = 50;
-    // Means no additional payload. Only the specified fields.
-    protected static final int PAYLOAD_SIZE = 0;
-    protected final TestOperationSelector opSelector;
-    protected final ISerializerDeserializer[] fieldSerdes;
-    protected final IIndex index;
-    protected final IIndexTestWorkerFactory workerFactory;
-
-    public IndexMultiThreadTestDriver(IIndex index, IIndexTestWorkerFactory workerFactory,
-            ISerializerDeserializer[] fieldSerdes, TestOperation[] ops, double[] opProbs) {
-        this.index = index;
-        this.workerFactory = workerFactory;
-        this.fieldSerdes = fieldSerdes;
-        this.opSelector = new TestOperationSelector(ops, opProbs);
-    }
-
-    public void init() throws HyracksDataException {
-        index.create();
-        index.activate();
-    }
-
-    public long[] run(int numThreads, int numRepeats, int numOps, int batchSize) throws InterruptedException,
-            TreeIndexException {
-        int numBatches = numOps / batchSize;
-        int threadNumBatches = numBatches / numThreads;
-        if (threadNumBatches <= 0) {
-            throw new TreeIndexException("Inconsistent parameters given. Need at least one batch per thread.");
-        }
-        long[] times = new long[numRepeats];
-        for (int i = 0; i < numRepeats; i++) {
-            DataGenThread dataGen = createDatagenThread(numThreads, numBatches, batchSize);
-            dataGen.start();
-            // Wait until the tupleBatchQueue is filled to capacity.
-            while (dataGen.tupleBatchQueue.remainingCapacity() != 0 && dataGen.tupleBatchQueue.size() != numBatches) {
-                Thread.sleep(10);
-            }
-
-            // Start worker threads.
-            AbstractIndexTestWorker[] workers = new AbstractIndexTestWorker[numThreads];
-            long start = System.currentTimeMillis();
-            for (int j = 0; j < numThreads; j++) {
-                workers[j] = workerFactory.create(dataGen, opSelector, index, threadNumBatches);
-                workers[j].start();
-            }
-            // Join worker threads.
-            for (int j = 0; j < numThreads; j++) {
-                workers[j].join();
-            }
-            long end = System.currentTimeMillis();
-            times[i] = end - start;
-        }
-        return times;
-    }
-
-    public void deinit() throws HyracksDataException {
-        index.deactivate();
-        index.destroy();
-    }
-
-    // To allow subclasses to override the data gen params.
-    public DataGenThread createDatagenThread(int numThreads, int numBatches, int batchSize) {
-        return new DataGenThread(numThreads, numBatches, batchSize, fieldSerdes, PAYLOAD_SIZE, RANDOM_SEED,
-                2 * numThreads, false);
-    }
-}
diff --git a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/IndexTestContext.java b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/IndexTestContext.java
deleted file mode 100644
index e0aa1db..0000000
--- a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/IndexTestContext.java
+++ /dev/null
@@ -1,81 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.common;
-
-import java.util.Collection;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleReference;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndex;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexAccessor;
-
-@SuppressWarnings("rawtypes")
-public abstract class IndexTestContext<T extends CheckTuple> implements IIndexTestContext<T> {
-    protected final ISerializerDeserializer[] fieldSerdes;
-    protected final IIndex index;
-    protected final ArrayTupleBuilder tupleBuilder;
-    protected final ArrayTupleReference tuple = new ArrayTupleReference();
-    protected final IIndexAccessor indexAccessor;
-
-    public IndexTestContext(ISerializerDeserializer[] fieldSerdes, IIndex index) {
-        this.fieldSerdes = fieldSerdes;
-        this.index = index;
-        this.indexAccessor = (IIndexAccessor) index.createAccessor(TestOperationCallback.INSTANCE,
-                TestOperationCallback.INSTANCE);
-        this.tupleBuilder = new ArrayTupleBuilder(fieldSerdes.length);
-    }
-
-    @Override
-    public int getFieldCount() {
-        return fieldSerdes.length;
-    }
-
-    @Override
-    public IIndexAccessor getIndexAccessor() {
-        return indexAccessor;
-    }
-
-    @Override
-    public ArrayTupleReference getTuple() {
-        return tuple;
-    }
-
-    @Override
-    public ArrayTupleBuilder getTupleBuilder() {
-        return tupleBuilder;
-    }
-
-    @Override
-    public ISerializerDeserializer[] getFieldSerdes() {
-        return fieldSerdes;
-    }
-
-    @Override
-    public IIndex getIndex() {
-        return index;
-    }
-
-    @Override
-    public void insertCheckTuple(T checkTuple, Collection<T> checkTuples) {
-        checkTuples.add(checkTuple);
-    }
-
-    @Override
-    public void deleteCheckTuple(T checkTuple, Collection<T> checkTuples) {
-        checkTuples.remove(checkTuple);
-    }
-}
\ No newline at end of file
diff --git a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/TestOperationCallback.java b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/TestOperationCallback.java
deleted file mode 100644
index 04f888b..0000000
--- a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/TestOperationCallback.java
+++ /dev/null
@@ -1,45 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.common;
-
-import java.util.Random;
-
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.common.api.IModificationOperationCallback;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchOperationCallback;
-
-public enum TestOperationCallback implements ISearchOperationCallback, IModificationOperationCallback {
-    INSTANCE;
-
-    private static final int RANDOM_SEED = 50;
-    private final Random random = new Random();
-
-    private TestOperationCallback() {
-        random.setSeed(RANDOM_SEED);
-    }
-
-    @Override
-    public boolean proceed(ITupleReference tuple) {
-        // Always fail
-        return false;
-    }
-
-    @Override
-    public void reconcile(ITupleReference tuple) {
-        // Do nothing.
-    }
-
-    @Override
-    public void before(ITupleReference tuple) {
-        // Do nothing.        
-    }
-
-    @Override
-    public void found(ITupleReference before, ITupleReference after) {
-        // Do nothing.        
-    }
-
-    @Override
-    public void cancel(ITupleReference tuple) {
-        // Do nothing.
-    }
-
-}
diff --git a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/TestOperationSelector.java b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/TestOperationSelector.java
deleted file mode 100644
index 47735e5..0000000
--- a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/TestOperationSelector.java
+++ /dev/null
@@ -1,67 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.common;
-
-import edu.uci.ics.hyracks.storage.am.common.datagen.ProbabilityHelper;
-
-
-public class TestOperationSelector {
-
-    public static enum TestOperation {
-        INSERT,
-        DELETE,
-        UPDATE,
-        UPSERT,
-        POINT_SEARCH,
-        RANGE_SEARCH,
-        SCAN,
-        DISKORDER_SCAN,
-        MERGE        
-    }
-    
-    private final TestOperation[] ops;
-    private final int[] cumulIntRanges;    
-    
-    public TestOperationSelector(TestOperation[] ops, double[] opProbs) {
-        sanityCheck(ops, opProbs);
-        this.ops = ops;
-        this.cumulIntRanges = ProbabilityHelper.getCumulIntRanges(opProbs);
-    }
-    
-    private void sanityCheck(TestOperation[] ops, double[] opProbs) {
-        if (ops.length == 0) {
-            throw new RuntimeException("Empty op array.");
-        }
-        if (opProbs.length == 0) {
-            throw new RuntimeException("Empty op probabilities.");
-        }
-        if (ops.length != opProbs.length) {
-            throw new RuntimeException("Ops and op probabilities have unequal length.");
-        }
-        float sum = 0.0f;
-        for (int i = 0; i < opProbs.length; i++) {
-            sum += opProbs[i];
-        }
-        if (sum != 1.0f) {
-            throw new RuntimeException("Op probabilities don't add up to 1.");
-        }
-    }
-    
-    public TestOperation getOp(int randomInt) {
-        int ix = ProbabilityHelper.choose(cumulIntRanges, randomInt);
-        return ops[ix];
-    }
-}
diff --git a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/TestWorkloadConf.java b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/TestWorkloadConf.java
deleted file mode 100644
index 28845ed..0000000
--- a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/TestWorkloadConf.java
+++ /dev/null
@@ -1,38 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.common;
-
-import edu.uci.ics.hyracks.storage.am.common.TestOperationSelector.TestOperation;
-
-public class TestWorkloadConf {
-    public final TestOperation[] ops;
-    public final double[] opProbs;
-
-    public TestWorkloadConf(TestOperation[] ops, double[] opProbs) {
-        this.ops = ops;
-        this.opProbs = opProbs;
-    }
-    
-    public String toString() {
-        StringBuilder strBuilder = new StringBuilder();
-        for (TestOperation op : ops) {
-            strBuilder.append(op.toString());
-            strBuilder.append(',');
-        }
-        strBuilder.deleteCharAt(strBuilder.length() - 1);
-        return strBuilder.toString();
-    }
-}
\ No newline at end of file
diff --git a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/TreeIndexTestUtils.java b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/TreeIndexTestUtils.java
deleted file mode 100644
index 1a80231..0000000
--- a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/TreeIndexTestUtils.java
+++ /dev/null
@@ -1,298 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.common;
-
-import static org.junit.Assert.fail;
-
-import java.io.ByteArrayInputStream;
-import java.io.DataInput;
-import java.io.DataInputStream;
-import java.io.DataOutput;
-import java.util.Collection;
-import java.util.Iterator;
-import java.util.Random;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.dataflow.common.util.TupleUtils;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexBulkLoader;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchPredicate;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexAccessor;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
-
-@SuppressWarnings("rawtypes")
-public abstract class TreeIndexTestUtils {
-    private static final Logger LOGGER = Logger.getLogger(TreeIndexTestUtils.class.getName());
-
-    protected abstract CheckTuple createCheckTuple(int numFields, int numKeyFields);
-
-    protected abstract ISearchPredicate createNullSearchPredicate();
-
-    public abstract void checkExpectedResults(ITreeIndexCursor cursor, Collection checkTuples,
-            ISerializerDeserializer[] fieldSerdes, int keyFieldCount, Iterator<CheckTuple> checkIter) throws Exception;
-
-    protected abstract CheckTuple createIntCheckTuple(int[] fieldValues, int numKeyFields);
-
-    protected abstract void setIntKeyFields(int[] fieldValues, int numKeyFields, int maxValue, Random rnd);
-
-    protected abstract void setIntPayloadFields(int[] fieldValues, int numKeyFields, int numFields);
-
-    protected abstract Collection createCheckTuplesCollection();
-
-    protected abstract ArrayTupleBuilder createDeleteTupleBuilder(IIndexTestContext ctx);
-
-    // See if tuple with corresponding checkTuple exists in ctx.checkTuples.
-    protected abstract boolean checkDiskOrderScanResult(ITupleReference tuple, CheckTuple checkTuple,
-            IIndexTestContext ctx) throws HyracksDataException;
-
-    @SuppressWarnings("unchecked")
-    public static void createTupleFromCheckTuple(CheckTuple checkTuple, ArrayTupleBuilder tupleBuilder,
-            ArrayTupleReference tuple, ISerializerDeserializer[] fieldSerdes) throws HyracksDataException {
-        int fieldCount = tupleBuilder.getFieldEndOffsets().length;
-        DataOutput dos = tupleBuilder.getDataOutput();
-        tupleBuilder.reset();
-        for (int i = 0; i < fieldCount; i++) {
-            fieldSerdes[i].serialize(checkTuple.getField(i), dos);
-            tupleBuilder.addFieldEndOffset();
-        }
-        tuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());
-    }
-
-    @SuppressWarnings("unchecked")
-    public CheckTuple createCheckTupleFromTuple(ITupleReference tuple, ISerializerDeserializer[] fieldSerdes,
-            int numKeys) throws HyracksDataException {
-        CheckTuple checkTuple = createCheckTuple(fieldSerdes.length, numKeys);
-        int fieldCount = Math.min(fieldSerdes.length, tuple.getFieldCount());
-        for (int i = 0; i < fieldCount; i++) {
-            ByteArrayInputStream inStream = new ByteArrayInputStream(tuple.getFieldData(i), tuple.getFieldStart(i),
-                    tuple.getFieldLength(i));
-            DataInput dataIn = new DataInputStream(inStream);
-            Comparable fieldObj = (Comparable) fieldSerdes[i].deserialize(dataIn);
-            checkTuple.appendField(fieldObj);
-        }
-        return checkTuple;
-    }
-
-    @SuppressWarnings("unchecked")
-    public void checkScan(IIndexTestContext ctx) throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("Testing Scan.");
-        }
-        ITreeIndexCursor scanCursor = (ITreeIndexCursor) ctx.getIndexAccessor().createSearchCursor();
-        ISearchPredicate nullPred = createNullSearchPredicate();
-        ctx.getIndexAccessor().search(scanCursor, nullPred);
-        Iterator<CheckTuple> checkIter = ctx.getCheckTuples().iterator();
-        checkExpectedResults(scanCursor, ctx.getCheckTuples(), ctx.getFieldSerdes(), ctx.getKeyFieldCount(), checkIter);
-    }
-
-    public void checkDiskOrderScan(IIndexTestContext ctx) throws Exception {
-        try {
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("Testing Disk-Order Scan.");
-            }
-            ITreeIndexAccessor treeIndexAccessor = (ITreeIndexAccessor) ctx.getIndexAccessor();
-            ITreeIndexCursor diskOrderCursor = treeIndexAccessor.createDiskOrderScanCursor();
-            treeIndexAccessor.diskOrderScan(diskOrderCursor);
-            int actualCount = 0;
-            try {
-                while (diskOrderCursor.hasNext()) {
-                    diskOrderCursor.next();
-                    ITupleReference tuple = diskOrderCursor.getTuple();
-                    CheckTuple checkTuple = createCheckTupleFromTuple(tuple, ctx.getFieldSerdes(),
-                            ctx.getKeyFieldCount());
-                    if (!checkDiskOrderScanResult(tuple, checkTuple, ctx)) {
-                        fail("Disk-order scan returned unexpected answer: " + checkTuple.toString());
-                    }
-                    actualCount++;
-                }
-                if (actualCount < ctx.getCheckTuples().size()) {
-                    fail("Disk-order scan returned fewer answers than expected.\nExpected: "
-                            + ctx.getCheckTuples().size() + "\nActual  : " + actualCount);
-                }
-                if (actualCount > ctx.getCheckTuples().size()) {
-                    fail("Disk-order scan returned more answers than expected.\nExpected: "
-                            + ctx.getCheckTuples().size() + "\nActual  : " + actualCount);
-                }
-            } finally {
-                diskOrderCursor.close();
-            }
-        } catch (UnsupportedOperationException e) {
-            // Ignore exception because some indexes, e.g. the LSMTrees, don't
-            // support disk-order scan.
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("Ignoring disk-order scan since it's not supported.");
-            }
-        } catch (ClassCastException e) {
-            // Ignore exception because IIndexAccessor sometimes isn't
-            // an ITreeIndexAccessor, e.g., for the LSMBTree.
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("Ignoring disk-order scan since it's not supported.");
-            }
-        }
-    }
-
-    @SuppressWarnings("unchecked")
-    public void insertIntTuples(IIndexTestContext ctx, int numTuples, Random rnd) throws Exception {
-        int fieldCount = ctx.getFieldCount();
-        int numKeyFields = ctx.getKeyFieldCount();
-        int[] fieldValues = new int[ctx.getFieldCount()];
-        // Scale range of values according to number of keys.
-        // For example, for 2 keys we want the square root of numTuples, for 3
-        // keys the cube root of numTuples, etc.
-        int maxValue = (int) Math.ceil(Math.pow(numTuples, 1.0 / (double) numKeyFields));
-        for (int i = 0; i < numTuples; i++) {
-            // Set keys.
-            setIntKeyFields(fieldValues, numKeyFields, maxValue, rnd);
-            // Set values.
-            setIntPayloadFields(fieldValues, numKeyFields, fieldCount);
-            TupleUtils.createIntegerTuple(ctx.getTupleBuilder(), ctx.getTuple(), fieldValues);
-            if (LOGGER.isLoggable(Level.INFO)) {
-                if ((i + 1) % (numTuples / Math.min(10, numTuples)) == 0) {
-                    LOGGER.info("Inserting Tuple " + (i + 1) + "/" + numTuples);
-                }
-            }
-            try {
-                ctx.getIndexAccessor().insert(ctx.getTuple());
-                ctx.insertCheckTuple(createIntCheckTuple(fieldValues, ctx.getKeyFieldCount()), ctx.getCheckTuples());
-            } catch (TreeIndexException e) {
-                // We set expected values only after insertion succeeds because
-                // we ignore duplicate keys.
-            }
-        }
-    }
-
-    @SuppressWarnings("unchecked")
-    public void upsertIntTuples(IIndexTestContext ctx, int numTuples, Random rnd) throws Exception {
-        int fieldCount = ctx.getFieldCount();
-        int numKeyFields = ctx.getKeyFieldCount();
-        int[] fieldValues = new int[ctx.getFieldCount()];
-        // Scale range of values according to number of keys.
-        // For example, for 2 keys we want the square root of numTuples, for 3
-        // keys the cube root of numTuples, etc.
-        int maxValue = (int) Math.ceil(Math.pow(numTuples, 1.0 / (double) numKeyFields));
-        for (int i = 0; i < numTuples; i++) {
-            // Set keys.
-            setIntKeyFields(fieldValues, numKeyFields, maxValue, rnd);
-            // Set values.
-            setIntPayloadFields(fieldValues, numKeyFields, fieldCount);
-            TupleUtils.createIntegerTuple(ctx.getTupleBuilder(), ctx.getTuple(), fieldValues);
-            if (LOGGER.isLoggable(Level.INFO)) {
-                if ((i + 1) % (numTuples / Math.min(10, numTuples)) == 0) {
-                    LOGGER.info("Inserting Tuple " + (i + 1) + "/" + numTuples);
-                }
-            }
-            try {
-                ctx.getIndexAccessor().upsert(ctx.getTuple());
-                ctx.insertCheckTuple(createIntCheckTuple(fieldValues, ctx.getKeyFieldCount()), ctx.getCheckTuples());
-            } catch (TreeIndexException e) {
-                // We set expected values only after insertion succeeds because
-                // we ignore duplicate keys.
-            }
-        }
-    }
-
-    @SuppressWarnings("unchecked")
-    public void bulkLoadIntTuples(IIndexTestContext ctx, int numTuples, Random rnd) throws Exception {
-        int fieldCount = ctx.getFieldCount();
-        int numKeyFields = ctx.getKeyFieldCount();
-        int[] fieldValues = new int[ctx.getFieldCount()];
-        int maxValue = (int) Math.ceil(Math.pow(numTuples, 1.0 / (double) numKeyFields));
-        Collection<CheckTuple> tmpCheckTuples = createCheckTuplesCollection();
-        for (int i = 0; i < numTuples; i++) {
-            // Set keys.
-            setIntKeyFields(fieldValues, numKeyFields, maxValue, rnd);
-            // Set values.
-            setIntPayloadFields(fieldValues, numKeyFields, fieldCount);
-
-            // Set expected values. (We also use these as the pre-sorted stream
-            // for ordered indexes bulk loading).
-            ctx.insertCheckTuple(createIntCheckTuple(fieldValues, ctx.getKeyFieldCount()), tmpCheckTuples);
-        }
-        bulkLoadCheckTuples(ctx, tmpCheckTuples);
-
-        // Add tmpCheckTuples to ctx check tuples for comparing searches.
-        for (CheckTuple checkTuple : tmpCheckTuples) {
-            ctx.insertCheckTuple(checkTuple, ctx.getCheckTuples());
-        }
-    }
-
-    public static void bulkLoadCheckTuples(IIndexTestContext ctx, Collection<CheckTuple> checkTuples)
-            throws HyracksDataException, IndexException {
-        int fieldCount = ctx.getFieldCount();
-        int numTuples = checkTuples.size();
-        ArrayTupleBuilder tupleBuilder = new ArrayTupleBuilder(fieldCount);
-        ArrayTupleReference tuple = new ArrayTupleReference();
-        // Perform bulk load.
-        IIndexBulkLoader bulkLoader = ctx.getIndex().createBulkLoader(0.7f, false, numTuples);
-        int c = 1;
-        for (CheckTuple checkTuple : checkTuples) {
-            if (LOGGER.isLoggable(Level.INFO)) {
-                if (c % (numTuples / 10) == 0) {
-                    LOGGER.info("Bulk Loading Tuple " + c + "/" + numTuples);
-                }
-            }
-            createTupleFromCheckTuple(checkTuple, tupleBuilder, tuple, ctx.getFieldSerdes());
-            bulkLoader.add(tuple);
-            c++;
-        }
-        bulkLoader.end();
-    }
-
-    @SuppressWarnings("unchecked")
-    public void deleteTuples(IIndexTestContext ctx, int numTuples, Random rnd) throws Exception {
-        ArrayTupleBuilder deleteTupleBuilder = createDeleteTupleBuilder(ctx);
-        ArrayTupleReference deleteTuple = new ArrayTupleReference();
-        int numCheckTuples = ctx.getCheckTuples().size();
-        // Copy CheckTuple references into array, so we can randomly pick from
-        // there.
-        CheckTuple[] checkTuples = new CheckTuple[numCheckTuples];
-        int idx = 0;
-        Iterator<CheckTuple> iter = ctx.getCheckTuples().iterator();
-        while (iter.hasNext()) {
-            CheckTuple checkTuple = iter.next();
-            checkTuples[idx++] = checkTuple;
-        }
-
-        for (int i = 0; i < numTuples && numCheckTuples > 0; i++) {
-            if (LOGGER.isLoggable(Level.INFO)) {
-                if ((i + 1) % (numTuples / Math.min(10, numTuples)) == 0) {
-                    LOGGER.info("Deleting Tuple " + (i + 1) + "/" + numTuples);
-                }
-            }
-            int checkTupleIdx = Math.abs(rnd.nextInt() % numCheckTuples);
-            CheckTuple checkTuple = checkTuples[checkTupleIdx];
-            createTupleFromCheckTuple(checkTuple, deleteTupleBuilder, deleteTuple, ctx.getFieldSerdes());
-            ctx.getIndexAccessor().delete(deleteTuple);
-
-            // Remove check tuple from expected results.
-            ctx.deleteCheckTuple(checkTuple, ctx.getCheckTuples());
-
-            // Swap with last "valid" CheckTuple.
-            CheckTuple tmp = checkTuples[numCheckTuples - 1];
-            checkTuples[numCheckTuples - 1] = checkTuple;
-            checkTuples[checkTupleIdx] = tmp;
-            numCheckTuples--;
-        }
-    }
-
-}
diff --git a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/config/AccessMethodTestsConfig.java b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/config/AccessMethodTestsConfig.java
deleted file mode 100644
index f962200..0000000
--- a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/config/AccessMethodTestsConfig.java
+++ /dev/null
@@ -1,182 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.config;
-
-/**
- * Global parameters used for executing access method JUnit tests.
- */
-public class AccessMethodTestsConfig {
-    // Test params for RTree, LSMRTree and LSMRTreeWithAntiMatterTuples.
-    public static final int RTREE_NUM_TUPLES_TO_INSERT = 100;
-    public static final int RTREE_NUM_INSERT_ROUNDS = 2;
-    public static final int RTREE_NUM_DELETE_ROUNDS = 2;
-    public static final int RTREE_MULTITHREAD_NUM_OPERATIONS = 200;
-    public static final boolean RTREE_TEST_RSTAR_POLICY = true;
-    // Test params for LSMRTree and LSMRTreeWithAntiMatterTuples.
-    public static final int LSM_RTREE_BULKLOAD_ROUNDS = 5;
-    public static final int LSM_RTREE_MAX_TREES_TO_MERGE = 3;
-    public static final boolean LSM_RTREE_TEST_RSTAR_POLICY = false;
-
-    // Test params for BTree, LSMBTree.
-    public static final int BTREE_NUM_TUPLES_TO_INSERT = 100;
-    public static final int BTREE_NUM_INSERT_ROUNDS = 3;
-    public static final int BTREE_NUM_DELETE_ROUNDS = 3;
-    public static final int BTREE_NUM_UPDATE_ROUNDS = 3;
-    public static final int BTREE_MULTITHREAD_NUM_OPERATIONS = 200;
-    // Test params for LSMBTree only.
-    public static final int LSM_BTREE_BULKLOAD_ROUNDS = 5;
-    public static final int LSM_BTREE_MAX_TREES_TO_MERGE = 10;
-
-    // Mem configuration for RTree.
-    public static final int RTREE_PAGE_SIZE = 512;
-    public static final int RTREE_NUM_PAGES = 1000;
-    public static final int RTREE_MAX_OPEN_FILES = 10;
-    public static final int RTREE_HYRACKS_FRAME_SIZE = 128;
-
-    // Mem configuration for LSMRTree and LSMRTreeWithAntiMatterTuples.
-    public static final int LSM_RTREE_DISK_PAGE_SIZE = 512;
-    public static final int LSM_RTREE_DISK_NUM_PAGES = 1000;
-    public static final int LSM_RTREE_DISK_MAX_OPEN_FILES = 2000;
-    public static final int LSM_RTREE_MEM_PAGE_SIZE = 512;
-    public static final int LSM_RTREE_MEM_NUM_PAGES = 1000;
-    public static final int LSM_RTREE_HYRACKS_FRAME_SIZE = 128;
-
-    // Mem configuration for BTree.
-    public static final int BTREE_PAGE_SIZE = 256;
-    public static final int BTREE_NUM_PAGES = 100;
-    public static final int BTREE_MAX_OPEN_FILES = 10;
-    public static final int BTREE_HYRACKS_FRAME_SIZE = 128;
-
-    // Mem configuration for LSMBTree.
-    public static final int LSM_BTREE_DISK_PAGE_SIZE = 256;
-    public static final int LSM_BTREE_DISK_NUM_PAGES = 1000;
-    public static final int LSM_BTREE_DISK_MAX_OPEN_FILES = 200;
-    public static final int LSM_BTREE_MEM_PAGE_SIZE = 256;
-    public static final int LSM_BTREE_MEM_NUM_PAGES = 100;
-    public static final int LSM_BTREE_HYRACKS_FRAME_SIZE = 128;
-
-    // Mem configuration for Inverted Index.
-    public static final int LSM_INVINDEX_DISK_PAGE_SIZE = 1024;
-    public static final int LSM_INVINDEX_DISK_NUM_PAGES = 1000;
-    public static final int LSM_INVINDEX_DISK_MAX_OPEN_FILES = 1000;
-    public static final int LSM_INVINDEX_MEM_PAGE_SIZE = 1024;
-    public static final int LSM_INVINDEX_MEM_NUM_PAGES = 100;
-    public static final int LSM_INVINDEX_HYRACKS_FRAME_SIZE = 32768;
-    // Test parameters.
-    public static final int LSM_INVINDEX_NUM_DOCS_TO_INSERT = 100;
-    // Used for full-fledged search test.
-    public static final int LSM_INVINDEX_NUM_DOC_QUERIES = 1000;
-    public static final int LSM_INVINDEX_NUM_RANDOM_QUERIES = 1000;
-    // Used for non-search tests to sanity check index searches.
-    public static final int LSM_INVINDEX_TINY_NUM_DOC_QUERIES = 200;
-    public static final int LSM_INVINDEX_TINY_NUM_RANDOM_QUERIES = 200;
-    public static final int LSM_INVINDEX_NUM_BULKLOAD_ROUNDS = 5;
-    public static final int LSM_INVINDEX_MAX_TREES_TO_MERGE = 5;
-    public static final int LSM_INVINDEX_NUM_INSERT_ROUNDS = 3;
-    public static final int LSM_INVINDEX_NUM_DELETE_ROUNDS = 3;
-    // Allocate a generous size to make sure we have enough elements for all tests.
-    public static final int LSM_INVINDEX_SCAN_COUNT_ARRAY_SIZE = 1000000;
-    public static final int LSM_INVINDEX_MULTITHREAD_NUM_OPERATIONS = 200;
-
-    // Test params for BloomFilter
-    public static final int BLOOM_FILTER_NUM_TUPLES_TO_INSERT = 100;
-
-    // Mem configuration for BloomFilter.
-    public static final int BLOOM_FILTER_PAGE_SIZE = 256;
-    public static final int BLOOM_FILTER_NUM_PAGES = 1000;
-    public static final int BLOOM_FILTER_MAX_OPEN_FILES = 10;
-    public static final int BLOOM_FILTER_HYRACKS_FRAME_SIZE = 128;
-
-}
-
-/* ORIGINAL TEST PARAMETERS: DO NOT EDIT!
-// Test params for RTree, LSMRTree and LSMRTreeWithAntiMatterTuples.
-public static final int RTREE_NUM_TUPLES_TO_INSERT = 10000;
-public static final int RTREE_NUM_INSERT_ROUNDS = 2;
-public static final int RTREE_NUM_DELETE_ROUNDS = 2;
-public static final int RTREE_MULTITHREAD_NUM_OPERATIONS = 10000;
-// Test params for LSMRTree and LSMRTreeWithAntiMatterTuples.
-public static final int LSM_RTREE_BULKLOAD_ROUNDS = 5;
-public static final int LSM_RTREE_MAX_TREES_TO_MERGE = 3;	
-
-// Test params for BTree, LSMBTree.
-public static final int BTREE_NUM_TUPLES_TO_INSERT = 10000;
-public static final int BTREE_NUM_INSERT_ROUNDS = 3;
-public static final int BTREE_NUM_DELETE_ROUNDS = 3;
-public static final int BTREE_NUM_UPDATE_ROUNDS = 3;
-public static final int BTREE_MULTITHREAD_NUM_OPERATIONS = 10000;
-// Test params for LSMBTree only.
-public static final int LSM_BTREE_BULKLOAD_ROUNDS = 5;
-public static final int LSM_BTREE_MAX_TREES_TO_MERGE = 10;
-	
-	
-// Mem configuration for RTree.
-public static final int RTREE_PAGE_SIZE = 512;
-public static final int RTREE_NUM_PAGES = 1000;
-public static final int RTREE_MAX_OPEN_FILES = 10;
-public static final int RTREE_HYRACKS_FRAME_SIZE = 128;
-	
-// Mem configuration for LSMRTree and LSMRTreeWithAntiMatterTuples.
-public static final int LSM_RTREE_DISK_PAGE_SIZE = 256;
-public static final int LSM_RTREE_DISK_NUM_PAGES = 1000;
-public static final int LSM_RTREE_DISK_MAX_OPEN_FILES = 2000;
-public static final int LSM_RTREE_MEM_PAGE_SIZE = 256;
-public static final int LSM_RTREE_MEM_NUM_PAGES = 1000;
-public static final int LSM_RTREE_HYRACKS_FRAME_SIZE = 128;
-	
-// Mem configuration for BTree.
-public static final int BTREE_PAGE_SIZE = 256;
-public static final int BTREE_NUM_PAGES = 100;
-public static final int BTREE_MAX_OPEN_FILES = 10;
-public static final int BTREE_HYRACKS_FRAME_SIZE = 128;
-	
-// Mem configuration for LSMBTree.
-public static final int LSM_BTREE_DISK_PAGE_SIZE = 256;
-public static final int LSM_BTREE_DISK_NUM_PAGES = 1000;
-public static final int LSM_BTREE_DISK_MAX_OPEN_FILES = 200;
-public static final int LSM_BTREE_MEM_PAGE_SIZE = 256;
-public static final int LSM_BTREE_MEM_NUM_PAGES = 100;
-public static final int LSM_BTREE_HYRACKS_FRAME_SIZE = 128;
-
-// Mem configuration for Inverted Index.
-public static final int INVINDEX_PAGE_SIZE = 32768;
-public static final int INVINDEX_NUM_PAGES = 100;
-public static final int INVINDEX_MAX_OPEN_FILES = 10;
-public static final int INVINDEX_HYRACKS_FRAME_SIZE = 32768;
-
-// Mem configuration for Inverted Index.
-public static final int LSM_INVINDEX_DISK_PAGE_SIZE = 1024;
-public static final int LSM_INVINDEX_DISK_NUM_PAGES = 1000;
-public static final int LSM_INVINDEX_DISK_MAX_OPEN_FILES = 1000;
-public static final int LSM_INVINDEX_MEM_PAGE_SIZE = 1024;
-public static final int LSM_INVINDEX_MEM_NUM_PAGES = 100;
-public static final int LSM_INVINDEX_HYRACKS_FRAME_SIZE = 32768;
-// Test parameters.
-public static final int LSM_INVINDEX_NUM_DOCS_TO_INSERT = 10000;
-// Used for full-fledged search test.
-public static final int LSM_INVINDEX_NUM_DOC_QUERIES = 1000;
-public static final int LSM_INVINDEX_NUM_RANDOM_QUERIES = 1000;
-// Used for non-search tests to sanity check index searches.
-public static final int LSM_INVINDEX_TINY_NUM_DOC_QUERIES = 200;
-public static final int LSM_INVINDEX_TINY_NUM_RANDOM_QUERIES = 200;
-public static final int LSM_INVINDEX_NUM_BULKLOAD_ROUNDS = 5;
-public static final int LSM_INVINDEX_MAX_TREES_TO_MERGE = 5;
-public static final int LSM_INVINDEX_NUM_INSERT_ROUNDS = 3;
-public static final int LSM_INVINDEX_NUM_DELETE_ROUNDS = 3;
-// Allocate a generous size to make sure we have enough elements for all tests.
-public static final int LSM_INVINDEX_SCAN_COUNT_ARRAY_SIZE = 1000000;
-public static final int LSM_INVINDEX_MULTITHREAD_NUM_OPERATIONS = 10000;
-*/
diff --git a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/AbstractRTreeBulkLoadTest.java b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/AbstractRTreeBulkLoadTest.java
deleted file mode 100644
index 54ad1fe..0000000
--- a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/AbstractRTreeBulkLoadTest.java
+++ /dev/null
@@ -1,65 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.rtree;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.DoubleSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
-import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreePolicyType;
-
-@SuppressWarnings("rawtypes")
-public abstract class AbstractRTreeBulkLoadTest extends AbstractRTreeTestDriver {
-
-    private final RTreeTestUtils rTreeTestUtils;
-    private final int bulkLoadRounds;
-
-    public AbstractRTreeBulkLoadTest(int bulkLoadRounds, boolean testRstarPolicy) {
-        super(testRstarPolicy);
-        this.bulkLoadRounds = bulkLoadRounds;
-        this.rTreeTestUtils = new RTreeTestUtils();
-    }
-
-    @Override
-    protected void runTest(ISerializerDeserializer[] fieldSerdes,
-            IPrimitiveValueProviderFactory[] valueProviderFactories, int numKeys, ITupleReference key,
-            RTreePolicyType rtreePolicyType) throws Exception {
-        AbstractRTreeTestContext ctx = createTestContext(fieldSerdes, valueProviderFactories, numKeys, rtreePolicyType);
-        ctx.getIndex().create();
-        ctx.getIndex().activate();
-        for (int i = 0; i < bulkLoadRounds; i++) {
-            // We assume all fieldSerdes are of the same type. Check the first
-            // one to determine which field types to generate.
-            if (fieldSerdes[0] instanceof IntegerSerializerDeserializer) {
-                rTreeTestUtils.bulkLoadIntTuples(ctx, numTuplesToInsert, getRandom());
-            } else if (fieldSerdes[0] instanceof DoubleSerializerDeserializer) {
-                rTreeTestUtils.bulkLoadDoubleTuples(ctx, numTuplesToInsert, getRandom());
-            }
-
-            rTreeTestUtils.checkScan(ctx);
-            rTreeTestUtils.checkDiskOrderScan(ctx);
-            rTreeTestUtils.checkRangeSearch(ctx, key);
-        }
-        ctx.getIndex().deactivate();
-        ctx.getIndex().destroy();
-    }
-
-    @Override
-    protected String getTestOpName() {
-        return "BulkLoad";
-    }
-}
diff --git a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/AbstractRTreeDeleteTest.java b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/AbstractRTreeDeleteTest.java
deleted file mode 100644
index 18d042b..0000000
--- a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/AbstractRTreeDeleteTest.java
+++ /dev/null
@@ -1,71 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.rtree;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.DoubleSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
-import edu.uci.ics.hyracks.storage.am.config.AccessMethodTestsConfig;
-import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreePolicyType;
-
-@SuppressWarnings("rawtypes")
-public abstract class AbstractRTreeDeleteTest extends AbstractRTreeTestDriver {
-
-    private final RTreeTestUtils rTreeTestUtils;
-
-    private static final int numInsertRounds = AccessMethodTestsConfig.RTREE_NUM_INSERT_ROUNDS;
-    private static final int numDeleteRounds = AccessMethodTestsConfig.RTREE_NUM_DELETE_ROUNDS;
-
-    public AbstractRTreeDeleteTest(boolean testRstarPolicy) {
-    	super(testRstarPolicy);
-        this.rTreeTestUtils = new RTreeTestUtils();
-    }
-
-    @Override
-    protected void runTest(ISerializerDeserializer[] fieldSerdes,
-            IPrimitiveValueProviderFactory[] valueProviderFactories, int numKeys, ITupleReference key,
-            RTreePolicyType rtreePolicyType) throws Exception {
-        AbstractRTreeTestContext ctx = createTestContext(fieldSerdes, valueProviderFactories, numKeys, rtreePolicyType);
-        ctx.getIndex().create();
-        ctx.getIndex().activate();
-        for (int i = 0; i < numInsertRounds; i++) {
-            // We assume all fieldSerdes are of the same type. Check the first
-            // one to determine which field types to generate.
-            if (fieldSerdes[0] instanceof IntegerSerializerDeserializer) {
-                rTreeTestUtils.insertIntTuples(ctx, numTuplesToInsert, getRandom());
-            } else if (fieldSerdes[0] instanceof DoubleSerializerDeserializer) {
-                rTreeTestUtils.insertDoubleTuples(ctx, numTuplesToInsert, getRandom());
-            }
-            int numTuplesPerDeleteRound = (int) Math
-                    .ceil((float) ctx.getCheckTuples().size() / (float) numDeleteRounds);
-            for (int j = 0; j < numDeleteRounds; j++) {
-                rTreeTestUtils.deleteTuples(ctx, numTuplesPerDeleteRound, getRandom());
-                rTreeTestUtils.checkScan(ctx);
-                rTreeTestUtils.checkDiskOrderScan(ctx);
-                rTreeTestUtils.checkRangeSearch(ctx, key);
-            }
-        }
-        ctx.getIndex().deactivate();
-        ctx.getIndex().destroy();
-    }
-
-    @Override
-    protected String getTestOpName() {
-        return "Delete";
-    }
-}
diff --git a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/AbstractRTreeExamplesTest.java b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/AbstractRTreeExamplesTest.java
deleted file mode 100644
index f93e9b6..0000000
--- a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/AbstractRTreeExamplesTest.java
+++ /dev/null
@@ -1,809 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.rtree;
-
-import java.util.Random;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import org.junit.Test;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
-import edu.uci.ics.hyracks.data.std.primitive.DoublePointable;
-import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
-import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.DoubleSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.util.TupleUtils;
-import edu.uci.ics.hyracks.storage.am.common.TestOperationCallback;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexAccessor;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexBulkLoader;
-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexAccessor;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
-import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallback;
-import edu.uci.ics.hyracks.storage.am.common.impls.TreeIndexDiskOrderScanCursor;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreePolicyType;
-import edu.uci.ics.hyracks.storage.am.rtree.impls.SearchPredicate;
-import edu.uci.ics.hyracks.storage.am.rtree.util.RTreeUtils;
-
-@SuppressWarnings("rawtypes")
-public abstract class AbstractRTreeExamplesTest {
-    protected static final Logger LOGGER = Logger.getLogger(AbstractRTreeExamplesTest.class.getName());
-    protected final Random rnd = new Random(50);
-
-    protected abstract ITreeIndex createTreeIndex(ITypeTraits[] typeTraits,
-            IBinaryComparatorFactory[] rtreeCmpFactories, IBinaryComparatorFactory[] btreeCmpFactories,
-            IPrimitiveValueProviderFactory[] valueProviderFactories, RTreePolicyType rtreePolicyType)
-            throws TreeIndexException;
-
-    /**
-     * Two Dimensions Example. Create an RTree index of two dimensions, where
-     * they keys are of type integer, and the payload is two integer values.
-     * Fill index with random values using insertions (not bulk load). Perform
-     * scans and range search.
-     */
-    @Test
-    public void twoDimensionsExample() throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("Fixed-Length Key,Value Example.");
-        }
-
-        // Declare fields.
-        int fieldCount = 6;
-        ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
-        typeTraits[0] = IntegerPointable.TYPE_TRAITS;
-        typeTraits[1] = IntegerPointable.TYPE_TRAITS;
-        typeTraits[2] = IntegerPointable.TYPE_TRAITS;
-        typeTraits[3] = IntegerPointable.TYPE_TRAITS;
-        typeTraits[4] = IntegerPointable.TYPE_TRAITS;
-        typeTraits[5] = IntegerPointable.TYPE_TRAITS;
-        // Declare field serdes.
-        ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE,
-                IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE,
-                IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE,
-                IntegerSerializerDeserializer.INSTANCE };
-
-        // Declare RTree keys.
-        int rtreeKeyFieldCount = 4;
-        IBinaryComparatorFactory[] rtreeCmpFactories = new IBinaryComparatorFactory[rtreeKeyFieldCount];
-        rtreeCmpFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
-        rtreeCmpFactories[1] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
-        rtreeCmpFactories[2] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
-        rtreeCmpFactories[3] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
-
-        // Declare BTree keys, this will only be used for LSMRTree
-        int btreeKeyFieldCount = 6;
-        IBinaryComparatorFactory[] btreeCmpFactories = new IBinaryComparatorFactory[btreeKeyFieldCount];
-        btreeCmpFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
-        btreeCmpFactories[1] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
-        btreeCmpFactories[2] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
-        btreeCmpFactories[3] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
-        btreeCmpFactories[4] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
-        btreeCmpFactories[5] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
-
-        // create value providers
-        IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(
-                rtreeCmpFactories.length, IntegerPointable.FACTORY);
-
-        ITreeIndex treeIndex = createTreeIndex(typeTraits, rtreeCmpFactories, btreeCmpFactories,
-                valueProviderFactories, RTreePolicyType.RTREE);
-        treeIndex.create();
-        treeIndex.activate();
-
-        long start = System.currentTimeMillis();
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("Inserting into tree...");
-        }
-        ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
-        ArrayTupleReference tuple = new ArrayTupleReference();
-        IIndexAccessor indexAccessor = (IIndexAccessor) treeIndex.createAccessor(NoOpOperationCallback.INSTANCE,
-                NoOpOperationCallback.INSTANCE);
-        int numInserts = 10000;
-        for (int i = 0; i < numInserts; i++) {
-            int p1x = rnd.nextInt();
-            int p1y = rnd.nextInt();
-            int p2x = rnd.nextInt();
-            int p2y = rnd.nextInt();
-
-            int pk1 = 5;
-            int pk2 = 10;
-
-            TupleUtils.createIntegerTuple(tb, tuple, Math.min(p1x, p2x), Math.min(p1y, p2y), Math.max(p1x, p2x),
-                    Math.max(p1y, p2y), pk1, pk2);
-            if (LOGGER.isLoggable(Level.INFO)) {
-                if (i % 1000 == 0) {
-                    LOGGER.info("Inserting " + i + " " + Math.min(p1x, p2x) + " " + Math.min(p1y, p2y) + " "
-                            + Math.max(p1x, p2x) + " " + Math.max(p1y, p2y) + ", " + pk1 + ", " + pk2);
-                }
-            }
-            try {
-                indexAccessor.insert(tuple);
-            } catch (TreeIndexException e) {
-            }
-        }
-        long end = System.currentTimeMillis();
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info(numInserts + " inserts in " + (end - start) + "ms");
-        }
-
-        scan(indexAccessor, fieldSerdes);
-        diskOrderScan(indexAccessor, fieldSerdes);
-
-        // Build key.
-        ArrayTupleBuilder keyTb = new ArrayTupleBuilder(rtreeKeyFieldCount);
-        ArrayTupleReference key = new ArrayTupleReference();
-        TupleUtils.createIntegerTuple(keyTb, key, -1000, -1000, 1000, 1000);
-
-        rangeSearch(rtreeCmpFactories, indexAccessor, fieldSerdes, key);
-
-        treeIndex.deactivate();
-        treeIndex.destroy();
-    }
-
-    /**
-     * This test the rtree page split. Originally this test didn't pass since
-     * the rtree assumes always that there will be enough space for the new
-     * tuple after split. Now it passes since if there is not space in the
-     * designated page, then we will just insert it in the other split page.
-     */
-    @Test
-    public void rTreePageSplitTestExample() throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("RTree page split test.");
-        }
-
-        // Declare fields.
-        int fieldCount = 5;
-        ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
-        typeTraits[0] = IntegerPointable.TYPE_TRAITS;
-        typeTraits[1] = IntegerPointable.TYPE_TRAITS;
-        typeTraits[2] = IntegerPointable.TYPE_TRAITS;
-        typeTraits[3] = IntegerPointable.TYPE_TRAITS;
-        typeTraits[4] = UTF8StringPointable.TYPE_TRAITS;
-        // Declare field serdes.
-        ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE,
-                IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE,
-                IntegerSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE };
-
-        // Declare RTree keys.
-        int rtreeKeyFieldCount = 4;
-        IBinaryComparatorFactory[] rtreeCmpFactories = new IBinaryComparatorFactory[rtreeKeyFieldCount];
-        rtreeCmpFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
-        rtreeCmpFactories[1] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
-        rtreeCmpFactories[2] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
-        rtreeCmpFactories[3] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
-
-        // Declare BTree keys, this will only be used for LSMRTree
-        int btreeKeyFieldCount = 5;
-        IBinaryComparatorFactory[] btreeCmpFactories = new IBinaryComparatorFactory[btreeKeyFieldCount];
-        btreeCmpFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
-        btreeCmpFactories[1] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
-        btreeCmpFactories[2] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
-        btreeCmpFactories[3] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
-        btreeCmpFactories[4] = PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY);
-
-        // create value providers
-        IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(
-                rtreeCmpFactories.length, IntegerPointable.FACTORY);
-
-        ITreeIndex treeIndex = createTreeIndex(typeTraits, rtreeCmpFactories, btreeCmpFactories,
-                valueProviderFactories, RTreePolicyType.RTREE);
-
-        treeIndex.create();
-        treeIndex.activate();
-
-        ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
-        ArrayTupleReference tuple = new ArrayTupleReference();
-        IIndexAccessor indexAccessor = (IIndexAccessor) treeIndex.createAccessor(TestOperationCallback.INSTANCE,
-                TestOperationCallback.INSTANCE);
-
-        int p1x = rnd.nextInt();
-        int p1y = rnd.nextInt();
-        int p2x = rnd.nextInt();
-        int p2y = rnd.nextInt();
-        String data = "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX";
-        TupleUtils.createTuple(tb, tuple, fieldSerdes, Math.min(p1x, p2x), Math.min(p1y, p2y), Math.max(p1x, p2x),
-                Math.max(p1y, p2y), data);
-        indexAccessor.insert(tuple);
-
-        p1x = rnd.nextInt();
-        p1y = rnd.nextInt();
-        p2x = rnd.nextInt();
-        p2y = rnd.nextInt();
-        data = "XXX";
-        TupleUtils.createTuple(tb, tuple, fieldSerdes, Math.min(p1x, p2x), Math.min(p1y, p2y), Math.max(p1x, p2x),
-                Math.max(p1y, p2y), data);
-        indexAccessor.insert(tuple);
-
-        p1x = rnd.nextInt();
-        p1y = rnd.nextInt();
-        p2x = rnd.nextInt();
-        p2y = rnd.nextInt();
-        data = "XXX";
-        TupleUtils.createTuple(tb, tuple, fieldSerdes, Math.min(p1x, p2x), Math.min(p1y, p2y), Math.max(p1x, p2x),
-                Math.max(p1y, p2y), data);
-        indexAccessor.insert(tuple);
-
-        p1x = rnd.nextInt();
-        p1y = rnd.nextInt();
-        p2x = rnd.nextInt();
-        p2y = rnd.nextInt();
-        data = "XXX";
-        TupleUtils.createTuple(tb, tuple, fieldSerdes, Math.min(p1x, p2x), Math.min(p1y, p2y), Math.max(p1x, p2x),
-                Math.max(p1y, p2y), data);
-        indexAccessor.insert(tuple);
-
-        p1x = rnd.nextInt();
-        p1y = rnd.nextInt();
-        p2x = rnd.nextInt();
-        p2y = rnd.nextInt();
-        data = "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX";
-        TupleUtils.createTuple(tb, tuple, fieldSerdes, Math.min(p1x, p2x), Math.min(p1y, p2y), Math.max(p1x, p2x),
-                Math.max(p1y, p2y), data);
-        indexAccessor.insert(tuple);
-
-        p1x = rnd.nextInt();
-        p1y = rnd.nextInt();
-        p2x = rnd.nextInt();
-        p2y = rnd.nextInt();
-        data = "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX";
-        TupleUtils.createTuple(tb, tuple, fieldSerdes, Math.min(p1x, p2x), Math.min(p1y, p2y), Math.max(p1x, p2x),
-                Math.max(p1y, p2y), data);
-        indexAccessor.insert(tuple);
-
-        treeIndex.deactivate();
-        treeIndex.destroy();
-    }
-
-    /**
-     * This test the r*tree page split. Originally this test didn't pass since
-     * the r*tree assumes always that there will be enough space for the new
-     * tuple after split. Now it passes since if there is not space in the
-     * designated page, then we will just insert it in the other split page.
-     */
-    @Test
-    public void rStarTreePageSplitTestExample() throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("R*Tree page split test.");
-        }
-
-        // Declare fields.
-        int fieldCount = 5;
-        ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
-        typeTraits[0] = IntegerPointable.TYPE_TRAITS;
-        typeTraits[1] = IntegerPointable.TYPE_TRAITS;
-        typeTraits[2] = IntegerPointable.TYPE_TRAITS;
-        typeTraits[3] = IntegerPointable.TYPE_TRAITS;
-        typeTraits[4] = UTF8StringPointable.TYPE_TRAITS;
-        // Declare field serdes.
-        ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE,
-                IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE,
-                IntegerSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE };
-
-        // Declare RTree keys.
-        int rtreeKeyFieldCount = 4;
-        IBinaryComparatorFactory[] rtreeCmpFactories = new IBinaryComparatorFactory[rtreeKeyFieldCount];
-        rtreeCmpFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
-        rtreeCmpFactories[1] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
-        rtreeCmpFactories[2] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
-        rtreeCmpFactories[3] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
-
-        // Declare BTree keys, this will only be used for LSMRTree
-        int btreeKeyFieldCount = 5;
-        IBinaryComparatorFactory[] btreeCmpFactories = new IBinaryComparatorFactory[btreeKeyFieldCount];
-        btreeCmpFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
-        btreeCmpFactories[1] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
-        btreeCmpFactories[2] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
-        btreeCmpFactories[3] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
-        btreeCmpFactories[4] = PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY);
-
-        // create value providers
-        IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(
-                rtreeCmpFactories.length, IntegerPointable.FACTORY);
-
-        ITreeIndex treeIndex = createTreeIndex(typeTraits, rtreeCmpFactories, btreeCmpFactories,
-                valueProviderFactories, RTreePolicyType.RSTARTREE);
-
-        treeIndex.create();
-        treeIndex.activate();
-
-        ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
-        ArrayTupleReference tuple = new ArrayTupleReference();
-        IIndexAccessor indexAccessor = (IIndexAccessor) treeIndex.createAccessor(TestOperationCallback.INSTANCE,
-                TestOperationCallback.INSTANCE);
-
-        int p1x = rnd.nextInt();
-        int p1y = rnd.nextInt();
-        int p2x = rnd.nextInt();
-        int p2y = rnd.nextInt();
-        String data = "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX";
-        TupleUtils.createTuple(tb, tuple, fieldSerdes, Math.min(p1x, p2x), Math.min(p1y, p2y), Math.max(p1x, p2x),
-                Math.max(p1y, p2y), data);
-        indexAccessor.insert(tuple);
-
-        p1x = rnd.nextInt();
-        p1y = rnd.nextInt();
-        p2x = rnd.nextInt();
-        p2y = rnd.nextInt();
-        data = "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX";
-        TupleUtils.createTuple(tb, tuple, fieldSerdes, Math.min(p1x, p2x), Math.min(p1y, p2y), Math.max(p1x, p2x),
-                Math.max(p1y, p2y), data);
-        indexAccessor.insert(tuple);
-
-        p1x = rnd.nextInt();
-        p1y = rnd.nextInt();
-        p2x = rnd.nextInt();
-        p2y = rnd.nextInt();
-        data = "XXX";
-        TupleUtils.createTuple(tb, tuple, fieldSerdes, Math.min(p1x, p2x), Math.min(p1y, p2y), Math.max(p1x, p2x),
-                Math.max(p1y, p2y), data);
-        indexAccessor.insert(tuple);
-
-        p1x = rnd.nextInt();
-        p1y = rnd.nextInt();
-        p2x = rnd.nextInt();
-        p2y = rnd.nextInt();
-        data = "XXX";
-        TupleUtils.createTuple(tb, tuple, fieldSerdes, Math.min(p1x, p2x), Math.min(p1y, p2y), Math.max(p1x, p2x),
-                Math.max(p1y, p2y), data);
-        indexAccessor.insert(tuple);
-
-        p1x = rnd.nextInt();
-        p1y = rnd.nextInt();
-        p2x = rnd.nextInt();
-        p2y = rnd.nextInt();
-        data = "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX";
-        TupleUtils.createTuple(tb, tuple, fieldSerdes, Math.min(p1x, p2x), Math.min(p1y, p2y), Math.max(p1x, p2x),
-                Math.max(p1y, p2y), data);
-        indexAccessor.insert(tuple);
-
-        p1x = rnd.nextInt();
-        p1y = rnd.nextInt();
-        p2x = rnd.nextInt();
-        p2y = rnd.nextInt();
-        data = "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX";
-        TupleUtils.createTuple(tb, tuple, fieldSerdes, Math.min(p1x, p2x), Math.min(p1y, p2y), Math.max(p1x, p2x),
-                Math.max(p1y, p2y), data);
-        indexAccessor.insert(tuple);
-
-        treeIndex.deactivate();
-        treeIndex.destroy();
-    }
-
-    /**
-     * Two Dimensions Example. Create an RTree index of three dimensions, where
-     * they keys are of type double, and the payload is one double value. Fill
-     * index with random values using insertions (not bulk load). Perform scans
-     * and range search.
-     */
-    @Test
-    public void threeDimensionsExample() throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("Fixed-Length Key,Value Example.");
-        }
-
-        // Declare fields.
-        int fieldCount = 7;
-        ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
-        typeTraits[0] = DoublePointable.TYPE_TRAITS;
-        typeTraits[1] = DoublePointable.TYPE_TRAITS;
-        typeTraits[2] = DoublePointable.TYPE_TRAITS;
-        typeTraits[3] = DoublePointable.TYPE_TRAITS;
-        typeTraits[4] = DoublePointable.TYPE_TRAITS;
-        typeTraits[5] = DoublePointable.TYPE_TRAITS;
-        typeTraits[6] = DoublePointable.TYPE_TRAITS;
-        // Declare field serdes.
-        ISerializerDeserializer[] fieldSerdes = { DoubleSerializerDeserializer.INSTANCE,
-                DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
-                DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
-                DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE };
-
-        // Declare RTree keys.
-        int rtreeKeyFieldCount = 6;
-        IBinaryComparatorFactory[] rtreeCmpFactories = new IBinaryComparatorFactory[rtreeKeyFieldCount];
-        rtreeCmpFactories[0] = PointableBinaryComparatorFactory.of(DoublePointable.FACTORY);
-        rtreeCmpFactories[1] = PointableBinaryComparatorFactory.of(DoublePointable.FACTORY);
-        rtreeCmpFactories[2] = PointableBinaryComparatorFactory.of(DoublePointable.FACTORY);
-        rtreeCmpFactories[3] = PointableBinaryComparatorFactory.of(DoublePointable.FACTORY);
-        rtreeCmpFactories[4] = PointableBinaryComparatorFactory.of(DoublePointable.FACTORY);
-        rtreeCmpFactories[5] = PointableBinaryComparatorFactory.of(DoublePointable.FACTORY);
-
-        // Declare RTree keys.
-        int btreeKeyFieldCount = 7;
-        IBinaryComparatorFactory[] btreeCmpFactories = new IBinaryComparatorFactory[btreeKeyFieldCount];
-        btreeCmpFactories[0] = PointableBinaryComparatorFactory.of(DoublePointable.FACTORY);
-        btreeCmpFactories[1] = PointableBinaryComparatorFactory.of(DoublePointable.FACTORY);
-        btreeCmpFactories[2] = PointableBinaryComparatorFactory.of(DoublePointable.FACTORY);
-        btreeCmpFactories[3] = PointableBinaryComparatorFactory.of(DoublePointable.FACTORY);
-        btreeCmpFactories[4] = PointableBinaryComparatorFactory.of(DoublePointable.FACTORY);
-        btreeCmpFactories[5] = PointableBinaryComparatorFactory.of(DoublePointable.FACTORY);
-        btreeCmpFactories[6] = PointableBinaryComparatorFactory.of(DoublePointable.FACTORY);
-
-        // create value providers
-        IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(
-                rtreeCmpFactories.length, DoublePointable.FACTORY);
-
-        ITreeIndex treeIndex = createTreeIndex(typeTraits, rtreeCmpFactories, btreeCmpFactories,
-                valueProviderFactories, RTreePolicyType.RTREE);
-        treeIndex.create();
-        treeIndex.activate();
-
-        long start = System.currentTimeMillis();
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("Inserting into tree...");
-        }
-        ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
-        ArrayTupleReference tuple = new ArrayTupleReference();
-        IIndexAccessor indexAccessor = (IIndexAccessor) treeIndex.createAccessor(NoOpOperationCallback.INSTANCE,
-                NoOpOperationCallback.INSTANCE);
-        int numInserts = 10000;
-        for (int i = 0; i < numInserts; i++) {
-            double p1x = rnd.nextDouble();
-            double p1y = rnd.nextDouble();
-            double p1z = rnd.nextDouble();
-            double p2x = rnd.nextDouble();
-            double p2y = rnd.nextDouble();
-            double p2z = rnd.nextDouble();
-
-            double pk = 5.0;
-
-            TupleUtils.createDoubleTuple(tb, tuple, Math.min(p1x, p2x), Math.min(p1y, p2y), Math.min(p1z, p2z),
-                    Math.max(p1x, p2x), Math.max(p1y, p2y), Math.max(p1z, p2z), pk);
-            if (LOGGER.isLoggable(Level.INFO)) {
-                if (i % 1000 == 0) {
-                    LOGGER.info("Inserting " + i + " " + Math.min(p1x, p2x) + " " + Math.min(p1y, p2y) + " "
-                            + Math.min(p1z, p2z) + " " + Math.max(p1x, p2x) + " " + Math.max(p1y, p2y) + " "
-                            + Math.max(p1z, p2z) + ", " + pk);
-                }
-            }
-            try {
-                indexAccessor.insert(tuple);
-            } catch (TreeIndexException e) {
-            }
-        }
-        long end = System.currentTimeMillis();
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info(numInserts + " inserts in " + (end - start) + "ms");
-        }
-
-        scan(indexAccessor, fieldSerdes);
-        diskOrderScan(indexAccessor, fieldSerdes);
-
-        // Build key.
-        ArrayTupleBuilder keyTb = new ArrayTupleBuilder(rtreeKeyFieldCount);
-        ArrayTupleReference key = new ArrayTupleReference();
-        TupleUtils.createDoubleTuple(keyTb, key, -1000.0, -1000.0, -1000.0, 1000.0, 1000.0, 1000.0);
-
-        rangeSearch(rtreeCmpFactories, indexAccessor, fieldSerdes, key);
-
-        treeIndex.deactivate();
-        treeIndex.destroy();
-    }
-
-    /**
-     * Deletion Example. Create an RTree index of two dimensions, where they
-     * keys are of type integer, and the payload is one integer value. Fill
-     * index with random values using insertions, then delete entries
-     * one-by-one. Repeat procedure a few times on same RTree.
-     */
-    @Test
-    public void deleteExample() throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("Deletion Example");
-        }
-
-        // Declare fields.
-        int fieldCount = 5;
-        ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
-        typeTraits[0] = IntegerPointable.TYPE_TRAITS;
-        typeTraits[1] = IntegerPointable.TYPE_TRAITS;
-        typeTraits[2] = IntegerPointable.TYPE_TRAITS;
-        typeTraits[3] = IntegerPointable.TYPE_TRAITS;
-        typeTraits[4] = IntegerPointable.TYPE_TRAITS;
-
-        // Declare RTree keys.
-        int rtreeKeyFieldCount = 4;
-        IBinaryComparatorFactory[] rtreeCmpFactories = new IBinaryComparatorFactory[rtreeKeyFieldCount];
-        rtreeCmpFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
-        rtreeCmpFactories[1] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
-        rtreeCmpFactories[2] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
-        rtreeCmpFactories[3] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
-
-        // Declare BTree keys.
-        int btreeKeyFieldCount = 5;
-        IBinaryComparatorFactory[] btreeCmpFactories = new IBinaryComparatorFactory[btreeKeyFieldCount];
-        btreeCmpFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
-        btreeCmpFactories[1] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
-        btreeCmpFactories[2] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
-        btreeCmpFactories[3] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
-        btreeCmpFactories[4] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
-
-        // create value providers
-        IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(
-                rtreeCmpFactories.length, IntegerPointable.FACTORY);
-
-        ITreeIndex treeIndex = createTreeIndex(typeTraits, rtreeCmpFactories, btreeCmpFactories,
-                valueProviderFactories, RTreePolicyType.RTREE);
-        treeIndex.create();
-        treeIndex.activate();
-
-        ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
-        ArrayTupleReference tuple = new ArrayTupleReference();
-        IIndexAccessor indexAccessor = (IIndexAccessor) treeIndex.createAccessor(NoOpOperationCallback.INSTANCE,
-                NoOpOperationCallback.INSTANCE);
-
-        int runs = 3;
-        for (int run = 0; run < runs; run++) {
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("Deletion example run: " + (run + 1) + "/" + runs);
-                LOGGER.info("Inserting into tree...");
-            }
-
-            int numInserts = 10000;
-            int[] p1xs = new int[numInserts];
-            int[] p1ys = new int[numInserts];
-            int[] p2xs = new int[numInserts];
-            int[] p2ys = new int[numInserts];
-            int[] pks = new int[numInserts];
-            int insDone = 0;
-
-            int[] insDoneCmp = new int[numInserts];
-            for (int i = 0; i < numInserts; i++) {
-                int p1x = rnd.nextInt();
-                int p1y = rnd.nextInt();
-                int p2x = rnd.nextInt();
-                int p2y = rnd.nextInt();
-                int pk = 5;
-
-                p1xs[i] = Math.min(p1x, p2x);
-                p1ys[i] = Math.min(p1y, p2y);
-                p2xs[i] = Math.max(p1x, p2x);
-                p2ys[i] = Math.max(p1y, p2y);
-                pks[i] = pk;
-
-                TupleUtils.createIntegerTuple(tb, tuple, Math.min(p1x, p2x), Math.min(p1y, p2y), Math.max(p1x, p2x),
-                        Math.max(p1y, p2y), pk);
-                if (LOGGER.isLoggable(Level.INFO)) {
-                    if (i % 1000 == 0) {
-                        LOGGER.info("Inserting " + i);
-                    }
-                }
-                try {
-                    indexAccessor.insert(tuple);
-                } catch (TreeIndexException e) {
-                }
-                insDoneCmp[i] = insDone;
-            }
-
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("Deleting from tree...");
-            }
-            int delDone = 0;
-            for (int i = 0; i < numInserts; i++) {
-                TupleUtils.createIntegerTuple(tb, tuple, p1xs[i], p1ys[i], p2xs[i], p2ys[i], pks[i]);
-                if (LOGGER.isLoggable(Level.INFO)) {
-                    if (i % 1000 == 0) {
-                        LOGGER.info("Deleting " + i);
-                    }
-                }
-                try {
-                    indexAccessor.delete(tuple);
-                    delDone++;
-                } catch (TreeIndexException e) {
-                }
-                if (insDoneCmp[i] != delDone) {
-                    if (LOGGER.isLoggable(Level.INFO)) {
-                        LOGGER.info("INCONSISTENT STATE, ERROR IN DELETION EXAMPLE.");
-                        LOGGER.info("INSDONECMP: " + insDoneCmp[i] + " " + delDone);
-                    }
-                    break;
-                }
-            }
-            if (insDone != delDone) {
-                if (LOGGER.isLoggable(Level.INFO)) {
-                    LOGGER.info("ERROR! INSDONE: " + insDone + " DELDONE: " + delDone);
-                }
-                break;
-            }
-        }
-        treeIndex.deactivate();
-        treeIndex.destroy();
-    }
-
-    /**
-     * Bulk load example. Load a tree with 10,000 tuples.
-     */
-    @Test
-    public void bulkLoadExample() throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("Bulk load example");
-        }
-        // Declare fields.
-        int fieldCount = 5;
-        ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
-        typeTraits[0] = IntegerPointable.TYPE_TRAITS;
-        typeTraits[1] = IntegerPointable.TYPE_TRAITS;
-        typeTraits[2] = IntegerPointable.TYPE_TRAITS;
-        typeTraits[3] = IntegerPointable.TYPE_TRAITS;
-        typeTraits[4] = IntegerPointable.TYPE_TRAITS;
-
-        // Declare field serdes.
-        ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE,
-                IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE,
-                IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
-
-        // Declare RTree keys.
-        int rtreeKeyFieldCount = 4;
-        IBinaryComparatorFactory[] rtreeCmpFactories = new IBinaryComparatorFactory[rtreeKeyFieldCount];
-        rtreeCmpFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
-        rtreeCmpFactories[1] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
-        rtreeCmpFactories[2] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
-        rtreeCmpFactories[3] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
-
-        // Declare BTree keys.
-        int btreeKeyFieldCount = 5;
-        IBinaryComparatorFactory[] btreeCmpFactories = new IBinaryComparatorFactory[btreeKeyFieldCount];
-        btreeCmpFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
-        btreeCmpFactories[1] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
-        btreeCmpFactories[2] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
-        btreeCmpFactories[3] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
-        btreeCmpFactories[4] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
-
-        // create value providers
-        IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(
-                rtreeCmpFactories.length, IntegerPointable.FACTORY);
-
-        ITreeIndex treeIndex = createTreeIndex(typeTraits, rtreeCmpFactories, btreeCmpFactories,
-                valueProviderFactories, RTreePolicyType.RTREE);
-        treeIndex.create();
-        treeIndex.activate();
-
-        // Load records.
-        int numInserts = 10000;
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("Bulk loading " + numInserts + " tuples");
-        }
-        long start = System.currentTimeMillis();
-        IIndexBulkLoader bulkLoader = treeIndex.createBulkLoader(0.7f, false, numInserts);
-        ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
-        ArrayTupleReference tuple = new ArrayTupleReference();
-
-        for (int i = 0; i < numInserts; i++) {
-            int p1x = rnd.nextInt();
-            int p1y = rnd.nextInt();
-            int p2x = rnd.nextInt();
-            int p2y = rnd.nextInt();
-
-            int pk = 5;
-
-            TupleUtils.createIntegerTuple(tb, tuple, Math.min(p1x, p2x), Math.min(p1y, p2y), Math.max(p1x, p2x),
-                    Math.max(p1y, p2y), pk);
-            bulkLoader.add(tuple);
-        }
-
-        bulkLoader.end();
-        long end = System.currentTimeMillis();
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info(numInserts + " tuples loaded in " + (end - start) + "ms");
-        }
-
-        IIndexAccessor indexAccessor = (IIndexAccessor) treeIndex.createAccessor(NoOpOperationCallback.INSTANCE,
-                NoOpOperationCallback.INSTANCE);
-
-        // Build key.
-        ArrayTupleBuilder keyTb = new ArrayTupleBuilder(rtreeKeyFieldCount);
-        ArrayTupleReference key = new ArrayTupleReference();
-        TupleUtils.createIntegerTuple(keyTb, key, -1000, -1000, 1000, 1000);
-
-        rangeSearch(rtreeCmpFactories, indexAccessor, fieldSerdes, key);
-
-        treeIndex.deactivate();
-        treeIndex.destroy();
-    }
-
-    private void scan(IIndexAccessor indexAccessor, ISerializerDeserializer[] fieldSerdes) throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("Scan:");
-        }
-        ITreeIndexCursor scanCursor = (ITreeIndexCursor) indexAccessor.createSearchCursor();
-        SearchPredicate nullPred = new SearchPredicate(null, null);
-        indexAccessor.search(scanCursor, nullPred);
-        try {
-            while (scanCursor.hasNext()) {
-                scanCursor.next();
-                ITupleReference frameTuple = scanCursor.getTuple();
-                String rec = TupleUtils.printTuple(frameTuple, fieldSerdes);
-                if (LOGGER.isLoggable(Level.INFO)) {
-                    LOGGER.info(rec);
-                }
-            }
-        } finally {
-            scanCursor.close();
-        }
-    }
-
-    private void diskOrderScan(IIndexAccessor indexAccessor, ISerializerDeserializer[] fieldSerdes) throws Exception {
-        try {
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("Disk-Order Scan:");
-            }
-            ITreeIndexAccessor treeIndexAccessor = (ITreeIndexAccessor) indexAccessor;
-            TreeIndexDiskOrderScanCursor diskOrderCursor = (TreeIndexDiskOrderScanCursor) treeIndexAccessor
-                    .createDiskOrderScanCursor();
-            treeIndexAccessor.diskOrderScan(diskOrderCursor);
-            try {
-                while (diskOrderCursor.hasNext()) {
-                    diskOrderCursor.next();
-                    ITupleReference frameTuple = diskOrderCursor.getTuple();
-                    String rec = TupleUtils.printTuple(frameTuple, fieldSerdes);
-                    if (LOGGER.isLoggable(Level.INFO)) {
-                        LOGGER.info(rec);
-                    }
-                }
-            } finally {
-                diskOrderCursor.close();
-            }
-        } catch (UnsupportedOperationException e) {
-            // Ignore exception because some indexes, e.g. the LSMRTree, don't
-            // support disk-order scan.
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("Ignoring disk-order scan since it's not supported.");
-            }
-        } catch (ClassCastException e) {
-            // Ignore exception because IIndexAccessor sometimes isn't
-            // an ITreeIndexAccessor, e.g., for the LSMRTree.
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("Ignoring disk-order scan since it's not supported.");
-            }
-        }
-    }
-
-    private void rangeSearch(IBinaryComparatorFactory[] cmpFactories, IIndexAccessor indexAccessor,
-            ISerializerDeserializer[] fieldSerdes, ITupleReference key) throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
-            String kString = TupleUtils.printTuple(key, fieldSerdes);
-            LOGGER.info("Range-Search using key: " + kString);
-        }
-        ITreeIndexCursor rangeCursor = (ITreeIndexCursor) indexAccessor.createSearchCursor();
-        MultiComparator cmp = RTreeUtils.getSearchMultiComparator(cmpFactories, key);
-        SearchPredicate rangePred = new SearchPredicate(key, cmp);
-        indexAccessor.search(rangeCursor, rangePred);
-        try {
-            while (rangeCursor.hasNext()) {
-                rangeCursor.next();
-                ITupleReference frameTuple = rangeCursor.getTuple();
-                String rec = TupleUtils.printTuple(frameTuple, fieldSerdes);
-                if (LOGGER.isLoggable(Level.INFO)) {
-                    LOGGER.info(rec);
-                }
-            }
-        } finally {
-            rangeCursor.close();
-        }
-    }
-
-}
\ No newline at end of file
diff --git a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/AbstractRTreeInsertTest.java b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/AbstractRTreeInsertTest.java
deleted file mode 100644
index eb90989..0000000
--- a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/AbstractRTreeInsertTest.java
+++ /dev/null
@@ -1,68 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.rtree;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.DoubleSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
-import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreePolicyType;
-
-/**
- * Tests the RTree insert operation with integer and double fields using various
- * numbers of dimensions and payload fields.
- * Each tests first fills an RTree with randomly generated tuples. We compare
- * the following operations against expected results: 1. RTree scan. 3.
- * Disk-order scan. 4. Range search.
- */
-@SuppressWarnings("rawtypes")
-public abstract class AbstractRTreeInsertTest extends AbstractRTreeTestDriver {
-
-    private final RTreeTestUtils rTreeTestUtils;
-
-    public AbstractRTreeInsertTest(boolean testRstarPolicy) {
-    	super(testRstarPolicy);
-        this.rTreeTestUtils = new RTreeTestUtils();
-    }
-
-    @Override
-    protected void runTest(ISerializerDeserializer[] fieldSerdes,
-            IPrimitiveValueProviderFactory[] valueProviderFactories, int numKeys, ITupleReference key,
-            RTreePolicyType rtreePolicyType) throws Exception {
-        AbstractRTreeTestContext ctx = createTestContext(fieldSerdes, valueProviderFactories, numKeys, rtreePolicyType);
-        ctx.getIndex().create();
-        ctx.getIndex().activate();
-        // We assume all fieldSerdes are of the same type. Check the first one
-        // to determine which field types to generate.
-        if (fieldSerdes[0] instanceof IntegerSerializerDeserializer) {
-            rTreeTestUtils.insertIntTuples(ctx, numTuplesToInsert, getRandom());
-        } else if (fieldSerdes[0] instanceof DoubleSerializerDeserializer) {
-            rTreeTestUtils.insertDoubleTuples(ctx, numTuplesToInsert, getRandom());
-        }
-
-        rTreeTestUtils.checkScan(ctx);
-        rTreeTestUtils.checkDiskOrderScan(ctx);
-        rTreeTestUtils.checkRangeSearch(ctx, key);
-        ctx.getIndex().deactivate();
-        ctx.getIndex().destroy();
-    }
-
-    @Override
-    protected String getTestOpName() {
-        return "Insert";
-    }
-}
diff --git a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/AbstractRTreeMultiThreadTest.java b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/AbstractRTreeMultiThreadTest.java
deleted file mode 100644
index bb0e91d..0000000
--- a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/AbstractRTreeMultiThreadTest.java
+++ /dev/null
@@ -1,262 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.rtree;
-
-import java.util.ArrayList;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import org.junit.Test;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.exceptions.HyracksException;
-import edu.uci.ics.hyracks.data.std.primitive.DoublePointable;
-import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.DoubleSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.util.SerdeUtils;
-import edu.uci.ics.hyracks.storage.am.common.IIndexTestWorkerFactory;
-import edu.uci.ics.hyracks.storage.am.common.IndexMultiThreadTestDriver;
-import edu.uci.ics.hyracks.storage.am.common.TestWorkloadConf;
-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
-import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
-import edu.uci.ics.hyracks.storage.am.config.AccessMethodTestsConfig;
-import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreePolicyType;
-import edu.uci.ics.hyracks.storage.am.rtree.util.RTreeUtils;
-
-@SuppressWarnings("rawtypes")
-public abstract class AbstractRTreeMultiThreadTest {
-
-	protected final boolean testRstarPolicy;
-	
-	public AbstractRTreeMultiThreadTest(boolean testRstarPolicy) {
-		this.testRstarPolicy = testRstarPolicy;
-	}
-	
-    protected final Logger LOGGER = Logger.getLogger(AbstractRTreeMultiThreadTest.class.getName());
-
-    // Machine-specific number of threads to use for testing.
-    protected final int REGULAR_NUM_THREADS = Runtime.getRuntime().availableProcessors();
-    // Excessive number of threads for testing.
-    protected final int EXCESSIVE_NUM_THREADS = Runtime.getRuntime().availableProcessors() * 4;
-    protected final int NUM_OPERATIONS = AccessMethodTestsConfig.RTREE_MULTITHREAD_NUM_OPERATIONS;
-
-    protected ArrayList<TestWorkloadConf> workloadConfs = getTestWorkloadConf();
-
-    protected abstract void setUp() throws HyracksException;
-
-    protected abstract void tearDown() throws HyracksDataException;
-
-    protected abstract ITreeIndex createTreeIndex(ITypeTraits[] typeTraits,
-            IBinaryComparatorFactory[] rtreeCmpFactories, IBinaryComparatorFactory[] btreeCmpFactories,
-            IPrimitiveValueProviderFactory[] valueProviderFactories, RTreePolicyType rtreePolicyType)
-            throws TreeIndexException;
-
-    protected abstract IIndexTestWorkerFactory getWorkerFactory();
-
-    protected abstract ArrayList<TestWorkloadConf> getTestWorkloadConf();
-
-    protected abstract String getIndexTypeName();
-
-    protected void runTest(ISerializerDeserializer[] fieldSerdes,
-            IPrimitiveValueProviderFactory[] valueProviderFactories, int numKeys, RTreePolicyType rtreePolicyType,
-            int numThreads, TestWorkloadConf conf, String dataMsg) throws HyracksException, InterruptedException,
-            TreeIndexException {
-        setUp();
-
-        if (LOGGER.isLoggable(Level.INFO)) {
-            String indexTypeName = getIndexTypeName();
-            LOGGER.info(indexTypeName + " MultiThread Test:\nData: " + dataMsg + "; Threads: " + numThreads
-                    + "; Workload: " + conf.toString() + ".");
-        }
-
-        ITypeTraits[] typeTraits = SerdeUtils.serdesToTypeTraits(fieldSerdes);
-        IBinaryComparatorFactory[] rtreeCmpFactories = SerdeUtils.serdesToComparatorFactories(fieldSerdes, numKeys);
-        IBinaryComparatorFactory[] btreeCmpFactories = SerdeUtils.serdesToComparatorFactories(fieldSerdes,
-                fieldSerdes.length);
-
-        ITreeIndex index = createTreeIndex(typeTraits, rtreeCmpFactories, btreeCmpFactories, valueProviderFactories,
-                rtreePolicyType);
-        IIndexTestWorkerFactory workerFactory = getWorkerFactory();
-
-        // 4 batches per thread.
-        int batchSize = (NUM_OPERATIONS / numThreads) / 4;
-
-        IndexMultiThreadTestDriver driver = new IndexMultiThreadTestDriver(index, workerFactory, fieldSerdes,
-                conf.ops, conf.opProbs);
-        driver.init();
-        long[] times = driver.run(numThreads, 1, NUM_OPERATIONS, batchSize);
-        driver.deinit();
-
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("RTree MultiThread Test Time: " + times[0] + "ms");
-        }
-
-        tearDown();
-    }
-
-    @Test
-    public void rtreeTwoDimensionsInt() throws InterruptedException, HyracksException, TreeIndexException {
-        ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE,
-                IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE,
-                IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
-
-        int numKeys = 4;
-        IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(
-                numKeys, IntegerPointable.FACTORY);
-
-        String dataMsg = "Two Dimensions Of Integer Values";
-
-        for (TestWorkloadConf conf : workloadConfs) {
-            runTest(fieldSerdes, valueProviderFactories, numKeys, RTreePolicyType.RTREE, REGULAR_NUM_THREADS, conf,
-                    dataMsg);
-            runTest(fieldSerdes, valueProviderFactories, numKeys, RTreePolicyType.RTREE, EXCESSIVE_NUM_THREADS, conf,
-                    dataMsg);
-        }
-    }
-
-    @Test
-    public void rtreeTwoDimensionsDouble() throws Exception {
-        ISerializerDeserializer[] fieldSerdes = { DoubleSerializerDeserializer.INSTANCE,
-                DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
-                DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE };
-
-        int numKeys = 4;
-        IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(
-                numKeys, DoublePointable.FACTORY);
-
-        String dataMsg = "Two Dimensions Of Double Values";
-
-        for (TestWorkloadConf conf : workloadConfs) {
-            runTest(fieldSerdes, valueProviderFactories, numKeys, RTreePolicyType.RTREE, REGULAR_NUM_THREADS, conf,
-                    dataMsg);
-            runTest(fieldSerdes, valueProviderFactories, numKeys, RTreePolicyType.RTREE, EXCESSIVE_NUM_THREADS, conf,
-                    dataMsg);
-        }
-
-    }
-
-    @Test
-    public void rtreeFourDimensionsDouble() throws InterruptedException, HyracksException, TreeIndexException {
-    	ISerializerDeserializer[] fieldSerdes = { DoubleSerializerDeserializer.INSTANCE,
-                DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
-                DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
-                DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
-                DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE };
-
-        int numKeys = 8;
-        IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(
-                numKeys, DoublePointable.FACTORY);
-
-        String dataMsg = "Four Dimensions Of Double Values";
-
-        for (TestWorkloadConf conf : workloadConfs) {
-            runTest(fieldSerdes, valueProviderFactories, numKeys, RTreePolicyType.RTREE, REGULAR_NUM_THREADS, conf,
-                    dataMsg);
-            runTest(fieldSerdes, valueProviderFactories, numKeys, RTreePolicyType.RTREE, EXCESSIVE_NUM_THREADS, conf,
-                    dataMsg);
-        }
-    }
-
-    @Test
-    public void rstartreeTwoDimensionsInt() throws InterruptedException, HyracksException, TreeIndexException {
-    	if (!testRstarPolicy) {
-    		if (LOGGER.isLoggable(Level.INFO)) {
-    			LOGGER.info("Ignoring RTree Multithread Test With Two Dimensions With Integer Keys.");
-    		}
-    		return;
-    	}
-    	
-    	ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE,
-                IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE,
-                IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
-
-        int numKeys = 4;
-        IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(
-                numKeys, IntegerPointable.FACTORY);
-
-        String dataMsg = "Two Dimensions Of Integer Values";
-
-        for (TestWorkloadConf conf : workloadConfs) {
-            runTest(fieldSerdes, valueProviderFactories, numKeys, RTreePolicyType.RSTARTREE, REGULAR_NUM_THREADS, conf,
-                    dataMsg);
-            runTest(fieldSerdes, valueProviderFactories, numKeys, RTreePolicyType.RSTARTREE, EXCESSIVE_NUM_THREADS,
-                    conf, dataMsg);
-        }
-    }
-
-    @Test
-    public void rstartreeTwoDimensionsDouble() throws Exception {
-    	if (!testRstarPolicy) {
-    		if (LOGGER.isLoggable(Level.INFO)) {
-    			LOGGER.info("Ignoring RTree Multithread Test With Two Dimensions With Double Keys.");
-    		}
-    		return;
-    	}
-    	
-    	ISerializerDeserializer[] fieldSerdes = { DoubleSerializerDeserializer.INSTANCE,
-                DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
-                DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE };
-
-        int numKeys = 4;
-        IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(
-                numKeys, DoublePointable.FACTORY);
-
-        String dataMsg = "Two Dimensions Of Double Values";
-
-        for (TestWorkloadConf conf : workloadConfs) {
-            runTest(fieldSerdes, valueProviderFactories, numKeys, RTreePolicyType.RSTARTREE, REGULAR_NUM_THREADS, conf,
-                    dataMsg);
-            runTest(fieldSerdes, valueProviderFactories, numKeys, RTreePolicyType.RSTARTREE, EXCESSIVE_NUM_THREADS,
-                    conf, dataMsg);
-        }
-
-    }
-
-    @Test
-    public void rstartreeFourDimensionsDouble() throws InterruptedException, HyracksException, TreeIndexException {
-    	if (!testRstarPolicy) {
-    		if (LOGGER.isLoggable(Level.INFO)) {
-    			LOGGER.info("Ignoring RTree Multithread Test With Four Dimensions With Double Keys.");
-    		}
-    		return;
-    	}
-    	
-    	ISerializerDeserializer[] fieldSerdes = { DoubleSerializerDeserializer.INSTANCE,
-                DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
-                DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
-                DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
-                DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE };
-
-        int numKeys = 8;
-        IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(
-                numKeys, DoublePointable.FACTORY);
-
-        String dataMsg = "Four Dimensions Of Double Values";
-
-        for (TestWorkloadConf conf : workloadConfs) {
-            runTest(fieldSerdes, valueProviderFactories, numKeys, RTreePolicyType.RSTARTREE, REGULAR_NUM_THREADS, conf,
-                    dataMsg);
-            runTest(fieldSerdes, valueProviderFactories, numKeys, RTreePolicyType.RSTARTREE, EXCESSIVE_NUM_THREADS,
-                    conf, dataMsg);
-        }
-    }
-
-}
diff --git a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/AbstractRTreeTestContext.java b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/AbstractRTreeTestContext.java
deleted file mode 100644
index eed8df6..0000000
--- a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/AbstractRTreeTestContext.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.rtree;
-
-import java.util.Collection;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.storage.am.common.IndexTestContext;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
-import edu.uci.ics.hyracks.storage.am.common.util.HashMultiSet;
-
-@SuppressWarnings("rawtypes")
-public abstract class AbstractRTreeTestContext extends IndexTestContext<RTreeCheckTuple> {
-    private final HashMultiSet<RTreeCheckTuple> checkTuples = new HashMultiSet<RTreeCheckTuple>();
-	
-    public AbstractRTreeTestContext(ISerializerDeserializer[] fieldSerdes, ITreeIndex treeIndex) {
-        super(fieldSerdes, treeIndex);
-    }
-
-    @Override
-    public Collection<RTreeCheckTuple> getCheckTuples() {
-        return checkTuples;
-    }
-}
\ No newline at end of file
diff --git a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/AbstractRTreeTestDriver.java b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/AbstractRTreeTestDriver.java
deleted file mode 100644
index a93dcc1..0000000
--- a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/AbstractRTreeTestDriver.java
+++ /dev/null
@@ -1,209 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.rtree;
-
-import java.util.Random;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import org.junit.Test;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.data.std.primitive.DoublePointable;
-import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.DoubleSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.util.TupleUtils;
-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
-import edu.uci.ics.hyracks.storage.am.config.AccessMethodTestsConfig;
-import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreePolicyType;
-import edu.uci.ics.hyracks.storage.am.rtree.util.RTreeUtils;
-
-@SuppressWarnings("rawtypes")
-public abstract class AbstractRTreeTestDriver {
-	protected final boolean testRstarPolicy;
-	
-	public AbstractRTreeTestDriver(boolean testRstarPolicy) {
-		this.testRstarPolicy = testRstarPolicy;
-	}
-	
-    protected final Logger LOGGER = Logger.getLogger(AbstractRTreeTestDriver.class.getName());
-
-    protected static final int numTuplesToInsert = AccessMethodTestsConfig.RTREE_NUM_TUPLES_TO_INSERT;
-
-    protected abstract AbstractRTreeTestContext createTestContext(ISerializerDeserializer[] fieldSerdes,
-            IPrimitiveValueProviderFactory[] valueProviderFactories, int numKeys, RTreePolicyType rtreePolicyType)
-            throws Exception;
-
-    protected abstract Random getRandom();
-
-    protected abstract void runTest(ISerializerDeserializer[] fieldSerdes,
-            IPrimitiveValueProviderFactory[] valueProviderFactories, int numKeys, ITupleReference key,
-            RTreePolicyType rtreePolicyType) throws Exception;
-
-    protected abstract String getTestOpName();
-
-    @Test
-    public void rtreeTwoDimensionsInt() throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("RTree " + getTestOpName() + " Test With Two Dimensions With Integer Keys.");
-        }
-
-        ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE,
-                IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE,
-                IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
-
-        int numKeys = 4;
-        IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(
-                numKeys, IntegerPointable.FACTORY);
-        // Range search, the rectangle bottom left coordinates are -1000, -1000
-        // and the top right coordinates are 1000, 1000
-        ITupleReference key = TupleUtils.createIntegerTuple(-1000, -1000, 1000, 1000);
-
-        runTest(fieldSerdes, valueProviderFactories, numKeys, key, RTreePolicyType.RTREE);
-
-    }
-
-    @Test
-    public void rtreeTwoDimensionsDouble() throws Exception {
-    	if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("RTree " + getTestOpName() + " Test With Two Dimensions With Double Keys.");
-        }
-
-        ISerializerDeserializer[] fieldSerdes = { DoubleSerializerDeserializer.INSTANCE,
-                DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
-                DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE };
-
-        int numKeys = 4;
-        IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(
-                numKeys, DoublePointable.FACTORY);
-        // Range search, the rectangle bottom left coordinates are -1000.0,
-        // -1000.0 and the top right coordinates are 1000.0, 1000.0
-        ITupleReference key = TupleUtils.createDoubleTuple(-1000.0, -1000.0, 1000.0, 1000.0);
-
-        runTest(fieldSerdes, valueProviderFactories, numKeys, key, RTreePolicyType.RTREE);
-
-    }
-
-    @Test
-    public void rtreeFourDimensionsDouble() throws Exception {
-    	if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("RTree " + getTestOpName() + " Test With Four Dimensions With Double Keys.");
-        }
-
-        ISerializerDeserializer[] fieldSerdes = { DoubleSerializerDeserializer.INSTANCE,
-                DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
-                DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
-                DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
-                DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE };
-
-        int numKeys = 8;
-        IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(
-                numKeys, DoublePointable.FACTORY);
-        // Range search, the rectangle bottom left coordinates are -1000.0,
-        // -1000.0, -1000.0, -1000.0 and the top right coordinates are 1000.0,
-        // 1000.0, 1000.0, 1000.0
-        ITupleReference key = TupleUtils.createDoubleTuple(-1000.0, -1000.0, -1000.0, -1000.0, 1000.0, 1000.0, 1000.0,
-                1000.0);
-
-        runTest(fieldSerdes, valueProviderFactories, numKeys, key, RTreePolicyType.RTREE);
-    }
-
-    @Test
-    public void rstartreeTwoDimensionsInt() throws Exception {
-    	if (!testRstarPolicy) {
-    		if (LOGGER.isLoggable(Level.INFO)) {
-    			LOGGER.info("Ignoring RTree " + getTestOpName() + " Test With Two Dimensions With Integer Keys.");
-            }
-    		return;
-    	}
-    	if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("RTree " + getTestOpName() + " Test With Two Dimensions With Integer Keys.");
-        }
-
-        ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE,
-                IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE,
-                IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
-
-        int numKeys = 4;
-        IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(
-                numKeys, IntegerPointable.FACTORY);
-        // Range search, the rectangle bottom left coordinates are -1000, -1000
-        // and the top right coordinates are 1000, 1000
-        ITupleReference key = TupleUtils.createIntegerTuple(-1000, -1000, 1000, 1000);
-
-        runTest(fieldSerdes, valueProviderFactories, numKeys, key, RTreePolicyType.RSTARTREE);
-
-    }
-
-    @Test
-    public void rstartreeTwoDimensionsDouble() throws Exception {
-    	if (!testRstarPolicy) {
-    		if (LOGGER.isLoggable(Level.INFO)) {
-    			LOGGER.info("Ignoring RTree " + getTestOpName() + " Test With Two Dimensions With Double Keys.");
-            }
-    		return;
-    	}
-    	if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("RTree " + getTestOpName() + " Test With Two Dimensions With Double Keys.");
-        }
-
-        ISerializerDeserializer[] fieldSerdes = { DoubleSerializerDeserializer.INSTANCE,
-                DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
-                DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE };
-
-        int numKeys = 4;
-        IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(
-                numKeys, DoublePointable.FACTORY);
-        // Range search, the rectangle bottom left coordinates are -1000.0,
-        // -1000.0 and the top right coordinates are 1000.0, 1000.0
-        ITupleReference key = TupleUtils.createDoubleTuple(-1000.0, -1000.0, 1000.0, 1000.0);
-
-        runTest(fieldSerdes, valueProviderFactories, numKeys, key, RTreePolicyType.RSTARTREE);
-
-    }
-
-    @Test
-    public void rstartreeFourDimensionsDouble() throws Exception {
-    	if (!testRstarPolicy) {
-    		if (LOGGER.isLoggable(Level.INFO)) {
-    			LOGGER.info("Ignoring RTree " + getTestOpName() + " Test With Four Dimensions With Double Keys.");
-            }
-    		return;
-    	}
-    	if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("RTree " + getTestOpName() + " Test With Four Dimensions With Double Keys.");
-        }
-
-        ISerializerDeserializer[] fieldSerdes = { DoubleSerializerDeserializer.INSTANCE,
-                DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
-                DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
-                DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
-                DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE };
-
-        int numKeys = 8;
-        IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(
-                numKeys, DoublePointable.FACTORY);
-        // Range search, the rectangle bottom left coordinates are -1000.0,
-        // -1000.0, -1000.0, -1000.0 and the top right coordinates are 1000.0,
-        // 1000.0, 1000.0, 1000.0
-        ITupleReference key = TupleUtils.createDoubleTuple(-1000.0, -1000.0, -1000.0, -1000.0, 1000.0, 1000.0, 1000.0,
-                1000.0);
-
-        runTest(fieldSerdes, valueProviderFactories, numKeys, key, RTreePolicyType.RSTARTREE);
-    }
-}
diff --git a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeCheckTuple.java b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeCheckTuple.java
deleted file mode 100644
index c498136..0000000
--- a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeCheckTuple.java
+++ /dev/null
@@ -1,56 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.rtree;
-
-import edu.uci.ics.hyracks.storage.am.common.CheckTuple;
-
-@SuppressWarnings({ "rawtypes", "unchecked" })
-public class RTreeCheckTuple<T> extends CheckTuple {
-
-    public RTreeCheckTuple(int numFields, int numKeys) {
-        super(numFields, numKeys);
-    }
-
-    @Override
-    public boolean equals(Object o) {
-        RTreeCheckTuple<T> other = (RTreeCheckTuple<T>) o;
-        for (int i = 0; i < fields.length; i++) {
-            int cmp = fields[i].compareTo(other.getField(i));
-            if (cmp != 0) {
-                return false;
-            }
-        }
-        return true;
-    }
-
-    public boolean intersect(T o) {
-        RTreeCheckTuple<T> other = (RTreeCheckTuple<T>) o;
-        int maxFieldPos = numKeys / 2;
-        for (int i = 0; i < maxFieldPos; i++) {
-            int j = maxFieldPos + i;
-            int cmp = fields[i].compareTo(other.getField(j));
-            if (cmp > 0) {
-                return false;
-            }
-            cmp = fields[j].compareTo(other.getField(i));
-            if (cmp < 0) {
-                return false;
-            }
-        }
-        return true;
-    }
-
-}
\ No newline at end of file
diff --git a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeTestUtils.java b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeTestUtils.java
deleted file mode 100644
index 067c6cb..0000000
--- a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeTestUtils.java
+++ /dev/null
@@ -1,240 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.rtree;
-
-import static org.junit.Assert.fail;
-
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Iterator;
-import java.util.Random;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.dataflow.common.util.TupleUtils;
-import edu.uci.ics.hyracks.storage.am.common.CheckTuple;
-import edu.uci.ics.hyracks.storage.am.common.IIndexTestContext;
-import edu.uci.ics.hyracks.storage.am.common.TreeIndexTestUtils;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchPredicate;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
-import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-import edu.uci.ics.hyracks.storage.am.common.util.HashMultiSet;
-import edu.uci.ics.hyracks.storage.am.rtree.impls.SearchPredicate;
-import edu.uci.ics.hyracks.storage.am.rtree.util.RTreeUtils;
-
-@SuppressWarnings("rawtypes")
-public class RTreeTestUtils extends TreeIndexTestUtils {
-    private static final Logger LOGGER = Logger.getLogger(RTreeTestUtils.class.getName());
-    private int intPayloadValue = 0;
-    private double doublePayloadValue = 0.0;
-
-    @SuppressWarnings("unchecked")
-    // Create a new ArrayList containing the elements satisfying the search key
-    public HashMultiSet<RTreeCheckTuple> getRangeSearchExpectedResults(Collection<RTreeCheckTuple> checkTuples,
-            RTreeCheckTuple key) {
-        HashMultiSet<RTreeCheckTuple> expectedResult = new HashMultiSet<RTreeCheckTuple>();
-        Iterator<RTreeCheckTuple> iter = checkTuples.iterator();
-        while (iter.hasNext()) {
-            RTreeCheckTuple t = iter.next();
-            if (t.intersect(key)) {
-                expectedResult.add(t);
-            }
-        }
-        return expectedResult;
-    }
-
-    public void checkRangeSearch(IIndexTestContext ictx, ITupleReference key) throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("Testing Range Search.");
-        }
-        AbstractRTreeTestContext ctx = (AbstractRTreeTestContext) ictx;
-        MultiComparator cmp = RTreeUtils.getSearchMultiComparator(ctx.getComparatorFactories(), key);
-
-        ITreeIndexCursor searchCursor = (ITreeIndexCursor) ctx.getIndexAccessor().createSearchCursor();
-        SearchPredicate searchPred = new SearchPredicate(key, cmp);
-        ctx.getIndexAccessor().search(searchCursor, searchPred);
-
-        // Get the subset of elements from the expected set within given key
-        // range.
-        RTreeCheckTuple keyCheck = (RTreeCheckTuple) createCheckTupleFromTuple(key, ctx.getFieldSerdes(),
-                cmp.getKeyFieldCount());
-
-        HashMultiSet<RTreeCheckTuple> expectedResult = null;
-
-        expectedResult = getRangeSearchExpectedResults(ctx.getCheckTuples(), keyCheck);
-        checkExpectedResults(searchCursor, expectedResult, ctx.getFieldSerdes(), ctx.getKeyFieldCount(), null);
-    }
-
-    @SuppressWarnings("unchecked")
-    public void insertDoubleTuples(IIndexTestContext ctx, int numTuples, Random rnd) throws Exception {
-        int fieldCount = ctx.getFieldCount();
-        int numKeyFields = ctx.getKeyFieldCount();
-        double[] fieldValues = new double[ctx.getFieldCount()];
-        // Scale range of values according to number of keys.
-        // For example, for 2 keys we want the square root of numTuples, for 3
-        // keys the cube root of numTuples, etc.
-        double maxValue = Math.ceil(Math.pow(numTuples, 1.0 / (double) numKeyFields));
-        for (int i = 0; i < numTuples; i++) {
-            // Set keys.
-            setDoubleKeyFields(fieldValues, numKeyFields, maxValue, rnd);
-            // Set values.
-            setDoublePayloadFields(fieldValues, numKeyFields, fieldCount);
-            TupleUtils.createDoubleTuple(ctx.getTupleBuilder(), ctx.getTuple(), fieldValues);
-            if (LOGGER.isLoggable(Level.INFO)) {
-                if ((i + 1) % (numTuples / Math.min(10, numTuples)) == 0) {
-                    LOGGER.info("Inserting Tuple " + (i + 1) + "/" + numTuples);
-                }
-            }
-            try {
-                ctx.getIndexAccessor().insert(ctx.getTuple());
-                ctx.insertCheckTuple(createDoubleCheckTuple(fieldValues, ctx.getKeyFieldCount()), ctx.getCheckTuples());
-            } catch (TreeIndexException e) {
-                // We set expected values only after insertion succeeds because
-                // we
-                // ignore duplicate keys.
-            }
-        }
-    }
-
-    private void setDoubleKeyFields(double[] fieldValues, int numKeyFields, double maxValue, Random rnd) {
-        int maxFieldPos = numKeyFields / 2;
-        for (int j = 0; j < maxFieldPos; j++) {
-            int k = maxFieldPos + j;
-            double firstValue = rnd.nextDouble() % maxValue;
-            double secondValue;
-            do {
-                secondValue = rnd.nextDouble() % maxValue;
-            } while (secondValue < firstValue);
-            fieldValues[j] = firstValue;
-            fieldValues[k] = secondValue;
-        }
-    }
-
-    private void setDoublePayloadFields(double[] fieldValues, int numKeyFields, int numFields) {
-        for (int j = numKeyFields; j < numFields; j++) {
-            fieldValues[j] = doublePayloadValue++;
-        }
-    }
-
-    @SuppressWarnings("unchecked")
-    protected CheckTuple createDoubleCheckTuple(double[] fieldValues, int numKeyFields) {
-        RTreeCheckTuple<Double> checkTuple = new RTreeCheckTuple<Double>(fieldValues.length, numKeyFields);
-        for (double v : fieldValues) {
-            checkTuple.appendField(v);
-        }
-        return checkTuple;
-    }
-
-    @SuppressWarnings("unchecked")
-    public void bulkLoadDoubleTuples(IIndexTestContext ctx, int numTuples, Random rnd) throws Exception {
-        int fieldCount = ctx.getFieldCount();
-        int numKeyFields = ctx.getKeyFieldCount();
-        double[] fieldValues = new double[ctx.getFieldCount()];
-        double maxValue = Math.ceil(Math.pow(numTuples, 1.0 / (double) numKeyFields));
-        Collection<CheckTuple> tmpCheckTuples = createCheckTuplesCollection();
-        for (int i = 0; i < numTuples; i++) {
-            // Set keys.
-            setDoubleKeyFields(fieldValues, numKeyFields, maxValue, rnd);
-            // Set values.
-            setDoublePayloadFields(fieldValues, numKeyFields, fieldCount);
-
-            // Set expected values.
-            ctx.insertCheckTuple(createDoubleCheckTuple(fieldValues, ctx.getKeyFieldCount()), tmpCheckTuples);
-        }
-        bulkLoadCheckTuples(ctx, tmpCheckTuples);
-
-        // Add tmpCheckTuples to ctx check tuples for comparing searches.
-        for (CheckTuple checkTuple : tmpCheckTuples) {
-            ctx.insertCheckTuple(checkTuple, ctx.getCheckTuples());
-        }
-    }
-
-    @Override
-    public void checkExpectedResults(ITreeIndexCursor cursor, Collection checkTuples,
-            ISerializerDeserializer[] fieldSerdes, int keyFieldCount, Iterator<CheckTuple> checkIter) throws Exception {
-        int actualCount = 0;
-        try {
-            while (cursor.hasNext()) {
-                cursor.next();
-                ITupleReference tuple = cursor.getTuple();
-                RTreeCheckTuple checkTuple = (RTreeCheckTuple) createCheckTupleFromTuple(tuple, fieldSerdes,
-                        keyFieldCount);
-                if (!checkTuples.contains(checkTuple)) {
-                    fail("Scan or range search returned unexpected answer: " + checkTuple.toString());
-                }
-                actualCount++;
-            }
-            if (actualCount < checkTuples.size()) {
-                fail("Scan or range search returned fewer answers than expected.\nExpected: " + checkTuples.size()
-                        + "\nActual  : " + actualCount);
-            }
-            if (actualCount > checkTuples.size()) {
-                fail("Scan or range search returned more answers than expected.\nExpected: " + checkTuples.size()
-                        + "\nActual  : " + actualCount);
-            }
-        } finally {
-            cursor.close();
-        }
-    }
-
-    @Override
-    protected CheckTuple createCheckTuple(int numFields, int numKeyFields) {
-        return new RTreeCheckTuple(numFields, numKeyFields);
-    }
-
-    @Override
-    protected ISearchPredicate createNullSearchPredicate() {
-        return new SearchPredicate(null, null);
-    }
-
-    @SuppressWarnings("unchecked")
-    @Override
-    protected CheckTuple createIntCheckTuple(int[] fieldValues, int numKeyFields) {
-        RTreeCheckTuple<Integer> checkTuple = new RTreeCheckTuple<Integer>(fieldValues.length, numKeyFields);
-        for (int v : fieldValues) {
-            checkTuple.appendField(v);
-        }
-        return checkTuple;
-    }
-
-    @Override
-    protected void setIntKeyFields(int[] fieldValues, int numKeyFields, int maxValue, Random rnd) {
-        int maxFieldPos = numKeyFields / 2;
-        for (int j = 0; j < maxFieldPos; j++) {
-            int k = maxFieldPos + j;
-            int firstValue = rnd.nextInt() % maxValue;
-            int secondValue;
-            do {
-                secondValue = rnd.nextInt() % maxValue;
-            } while (secondValue < firstValue);
-            fieldValues[j] = firstValue;
-            fieldValues[k] = secondValue;
-        }
-    }
-
-    @Override
-    protected void setIntPayloadFields(int[] fieldValues, int numKeyFields, int numFields) {
-        for (int j = numKeyFields; j < numFields; j++) {
-            fieldValues[j] = intPayloadValue++;
-        }
-    }
-
-    @Override
-    protected Collection createCheckTuplesCollection() {
-        return new ArrayList<RTreeCheckTuple>();
-    }
-
-    @Override
-    protected ArrayTupleBuilder createDeleteTupleBuilder(IIndexTestContext ctx) {
-        return new ArrayTupleBuilder(ctx.getFieldCount());
-    }
-
-    @Override
-    protected boolean checkDiskOrderScanResult(ITupleReference tuple, CheckTuple checkTuple, IIndexTestContext ctx)
-            throws HyracksDataException {
-        return ctx.getCheckTuples().contains(checkTuple);
-    }
-}
diff --git a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/test/support/TestIndexLifecycleManagerProvider.java b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/test/support/TestIndexLifecycleManagerProvider.java
deleted file mode 100644
index 3a4d3e7..0000000
--- a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/test/support/TestIndexLifecycleManagerProvider.java
+++ /dev/null
@@ -1,16 +0,0 @@
-package edu.uci.ics.hyracks.test.support;
-
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexLifecycleManager;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexLifecycleManagerProvider;
-
-public class TestIndexLifecycleManagerProvider implements IIndexLifecycleManagerProvider {
-
-    private static final long serialVersionUID = 1L;
-
-    @Override
-    public IIndexLifecycleManager getLifecycleManager(IHyracksTaskContext ctx) {
-        return TestStorageManagerComponentHolder.getIndexLifecycleManager(ctx);
-    }
-
-}
diff --git a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/test/support/TestRootContext.java b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/test/support/TestRootContext.java
deleted file mode 100644
index e195036..0000000
--- a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/test/support/TestRootContext.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.test.support;
-
-import java.io.File;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.concurrent.Executors;
-
-import edu.uci.ics.hyracks.api.context.IHyracksRootContext;
-import edu.uci.ics.hyracks.api.exceptions.HyracksException;
-import edu.uci.ics.hyracks.api.io.IIOManager;
-import edu.uci.ics.hyracks.api.io.IODeviceHandle;
-import edu.uci.ics.hyracks.control.nc.io.IOManager;
-
-public class TestRootContext implements IHyracksRootContext {
-    private IOManager ioManager;
-
-    public TestRootContext() throws HyracksException {
-        List<IODeviceHandle> devices = new ArrayList<IODeviceHandle>();
-        devices.add(new IODeviceHandle(new File(System.getProperty("java.io.tmpdir")), "."));
-        ioManager = new IOManager(devices, Executors.newCachedThreadPool());
-    }
-
-    @Override
-    public IIOManager getIOManager() {
-        return ioManager;
-    }
-}
\ No newline at end of file
diff --git a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/test/support/TestStorageManagerComponentHolder.java b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/test/support/TestStorageManagerComponentHolder.java
deleted file mode 100644
index b704bf5..0000000
--- a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/test/support/TestStorageManagerComponentHolder.java
+++ /dev/null
@@ -1,125 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.test.support;
-
-import java.io.File;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.concurrent.Executors;
-
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.exceptions.HyracksException;
-import edu.uci.ics.hyracks.api.io.IODeviceHandle;
-import edu.uci.ics.hyracks.control.nc.io.IOManager;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexLifecycleManager;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IndexLifecycleManager;
-import edu.uci.ics.hyracks.storage.common.buffercache.BufferCache;
-import edu.uci.ics.hyracks.storage.common.buffercache.ClockPageReplacementStrategy;
-import edu.uci.ics.hyracks.storage.common.buffercache.HeapBufferAllocator;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-import edu.uci.ics.hyracks.storage.common.buffercache.ICacheMemoryAllocator;
-import edu.uci.ics.hyracks.storage.common.buffercache.IPageReplacementStrategy;
-import edu.uci.ics.hyracks.storage.common.file.IFileMapManager;
-import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
-import edu.uci.ics.hyracks.storage.common.file.ILocalResourceRepository;
-import edu.uci.ics.hyracks.storage.common.file.ILocalResourceRepositoryFactory;
-import edu.uci.ics.hyracks.storage.common.file.ResourceIdFactory;
-import edu.uci.ics.hyracks.storage.common.file.ResourceIdFactoryProvider;
-import edu.uci.ics.hyracks.storage.common.file.TransientFileMapManager;
-import edu.uci.ics.hyracks.storage.common.file.TransientLocalResourceRepositoryFactory;
-
-public class TestStorageManagerComponentHolder {
-    private static IBufferCache bufferCache;
-    private static IFileMapProvider fileMapProvider;
-    private static IOManager ioManager;
-    private static ILocalResourceRepository localResourceRepository;
-    private static IIndexLifecycleManager lcManager;
-    private static ResourceIdFactory resourceIdFactory;
-
-    private static int pageSize;
-    private static int numPages;
-    private static int maxOpenFiles;
-
-    public static void init(int pageSize, int numPages, int maxOpenFiles) {
-        TestStorageManagerComponentHolder.pageSize = pageSize;
-        TestStorageManagerComponentHolder.numPages = numPages;
-        TestStorageManagerComponentHolder.maxOpenFiles = maxOpenFiles;
-        bufferCache = null;
-        fileMapProvider = null;
-        localResourceRepository = null;
-        lcManager = null;
-    }
-
-    public synchronized static IIndexLifecycleManager getIndexLifecycleManager(IHyracksTaskContext ctx) {
-        if (lcManager == null) {
-            lcManager = new IndexLifecycleManager();
-        }
-        return lcManager;
-    }
-
-    public synchronized static IBufferCache getBufferCache(IHyracksTaskContext ctx) {
-        if (bufferCache == null) {
-            ICacheMemoryAllocator allocator = new HeapBufferAllocator();
-            IPageReplacementStrategy prs = new ClockPageReplacementStrategy();
-            IFileMapProvider fileMapProvider = getFileMapProvider(ctx);
-            bufferCache = new BufferCache(ctx.getIOManager(), allocator, prs, (IFileMapManager) fileMapProvider,
-                    pageSize, numPages, maxOpenFiles);
-        }
-        return bufferCache;
-    }
-
-    public synchronized static IFileMapProvider getFileMapProvider(IHyracksTaskContext ctx) {
-        if (fileMapProvider == null) {
-            fileMapProvider = new TransientFileMapManager();
-        }
-        return fileMapProvider;
-    }
-
-    public synchronized static IOManager getIOManager() throws HyracksException {
-        if (ioManager == null) {
-            List<IODeviceHandle> devices = new ArrayList<IODeviceHandle>();
-            devices.add(new IODeviceHandle(new File(System.getProperty("java.io.tmpdir")), "iodev_test_wa"));
-            ioManager = new IOManager(devices, Executors.newCachedThreadPool());
-        }
-        return ioManager;
-    }
-
-    public synchronized static ILocalResourceRepository getLocalResourceRepository(IHyracksTaskContext ctx) {
-        if (localResourceRepository == null) {
-            try {
-                ILocalResourceRepositoryFactory localResourceRepositoryFactory = new TransientLocalResourceRepositoryFactory();
-                localResourceRepository = localResourceRepositoryFactory.createRepository();
-            } catch (HyracksException e) {
-                //In order not to change the IStorageManagerInterface due to the test code, throw runtime exception.
-                throw new IllegalArgumentException();
-            }
-        }
-        return localResourceRepository;
-    }
-
-    public synchronized static ResourceIdFactory getResourceIdFactory(IHyracksTaskContext ctx) {
-        if (resourceIdFactory == null) {
-            try {
-                ResourceIdFactoryProvider resourceIdFactoryFactory = new ResourceIdFactoryProvider(
-                        getLocalResourceRepository(ctx));
-                resourceIdFactory = resourceIdFactoryFactory.createResourceIdFactory();
-            } catch (HyracksException e) {
-                //In order not to change the IStorageManagerInterface due to the test code, throw runtime exception.
-                throw new IllegalArgumentException();
-            }
-        }
-        return resourceIdFactory;
-    }
-}
\ No newline at end of file
diff --git a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/test/support/TestStorageManagerInterface.java b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/test/support/TestStorageManagerInterface.java
deleted file mode 100644
index 26c7861..0000000
--- a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/test/support/TestStorageManagerInterface.java
+++ /dev/null
@@ -1,47 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.test.support;
-
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.exceptions.HyracksException;
-import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
-import edu.uci.ics.hyracks.storage.common.file.ILocalResourceRepository;
-import edu.uci.ics.hyracks.storage.common.file.ResourceIdFactory;
-
-public class TestStorageManagerInterface implements IStorageManagerInterface {
-    private static final long serialVersionUID = 1L;
-
-    @Override
-    public IBufferCache getBufferCache(IHyracksTaskContext ctx) {
-        return TestStorageManagerComponentHolder.getBufferCache(ctx);
-    }
-
-    @Override
-    public IFileMapProvider getFileMapProvider(IHyracksTaskContext ctx) {
-        return TestStorageManagerComponentHolder.getFileMapProvider(ctx);
-    }
-
-    @Override
-    public ILocalResourceRepository getLocalResourceRepository(IHyracksTaskContext ctx) {
-        return TestStorageManagerComponentHolder.getLocalResourceRepository(ctx);
-    }
-
-	@Override
-	public ResourceIdFactory getResourceIdFactory(IHyracksTaskContext ctx) {
-		return TestStorageManagerComponentHolder.getResourceIdFactory(ctx);
-	}
-}
\ No newline at end of file
diff --git a/hyracks-tests/hyracks-storage-am-bloomfilter-test/pom.xml b/hyracks-tests/hyracks-storage-am-bloomfilter-test/pom.xml
deleted file mode 100644
index 3b15677..0000000
--- a/hyracks-tests/hyracks-storage-am-bloomfilter-test/pom.xml
+++ /dev/null
@@ -1,49 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <groupId>edu.uci.ics.hyracks</groupId>
-  <artifactId>hyracks-storage-am-bloomfilter-test</artifactId>
-  <version>0.2.2-SNAPSHOT</version>
-
-  <parent>
-    <groupId>edu.uci.ics.hyracks</groupId>
-    <artifactId>hyracks-tests</artifactId>
-    <version>0.2.2-SNAPSHOT</version>
-  </parent>
-
-  <build>
-    <plugins>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-compiler-plugin</artifactId>
-        <version>2.0.2</version>
-        <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
-        </configuration>
-      </plugin>
-    </plugins>
-  </build>
-  <dependencies>
-  	<dependency>
-  		<groupId>junit</groupId>
-  		<artifactId>junit</artifactId>
-  		<version>4.8.1</version>
-  		<type>jar</type>
-  		<scope>test</scope>
-  	</dependency>
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks</groupId>
-  		<artifactId>hyracks-storage-am-bloomfilter</artifactId>
-  		<version>0.2.2-SNAPSHOT</version>
-  		<type>jar</type>
-  		<scope>compile</scope>
-  	</dependency>
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks</groupId>
-  		<artifactId>hyracks-test-support</artifactId>
-  		<version>0.2.2-SNAPSHOT</version>
-  		<type>jar</type>
-  		<scope>test</scope>
-  	</dependency>
-  </dependencies>
-</project>
diff --git a/hyracks-tests/hyracks-storage-am-bloomfilter-test/src/test/java/edu/uci/ics/hyracks/storage/am/bloomfilter/BloomFilterTest.java b/hyracks-tests/hyracks-storage-am-bloomfilter-test/src/test/java/edu/uci/ics/hyracks/storage/am/bloomfilter/BloomFilterTest.java
deleted file mode 100644
index 6dab32c..0000000
--- a/hyracks-tests/hyracks-storage-am-bloomfilter-test/src/test/java/edu/uci/ics/hyracks/storage/am/bloomfilter/BloomFilterTest.java
+++ /dev/null
@@ -1,167 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.bloomfilter;
-
-import java.util.ArrayList;
-import java.util.Random;
-import java.util.TreeSet;
-import java.util.logging.Level;
-
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.util.TupleUtils;
-import edu.uci.ics.hyracks.storage.am.bloomfilter.impls.BloomCalculations;
-import edu.uci.ics.hyracks.storage.am.bloomfilter.impls.BloomFilter;
-import edu.uci.ics.hyracks.storage.am.bloomfilter.impls.BloomFilterSpecification;
-import edu.uci.ics.hyracks.storage.am.bloomfilter.util.AbstractBloomFilterTest;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexBulkLoader;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-
-@SuppressWarnings("rawtypes")
-public class BloomFilterTest extends AbstractBloomFilterTest {
-    private final Random rnd = new Random(50);
-
-    @Before
-    public void setUp() throws HyracksDataException {
-        super.setUp();
-    }
-
-    @Test
-    public void singleFieldTest() throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("TESTING BLOOM FILTER");
-        }
-
-        IBufferCache bufferCache = harness.getBufferCache();
-
-        int numElements = 100;
-        int[] keyFields = { 0 };
-
-        BloomFilter bf = new BloomFilter(bufferCache, harness.getFileMapProvider(), harness.getFileReference(),
-                keyFields);
-
-        double acceptanleFalsePositiveRate = 0.1;
-        int maxBucketsPerElement = BloomCalculations.maxBucketsPerElement(numElements);
-        BloomFilterSpecification bloomFilterSpec = BloomCalculations.computeBloomSpec(maxBucketsPerElement,
-                acceptanleFalsePositiveRate);
-
-        bf.create();
-        bf.activate();
-        IIndexBulkLoader builder = bf.createBuilder(numElements, bloomFilterSpec.getNumHashes(),
-                bloomFilterSpec.getNumBucketsPerElements());
-
-        int fieldCount = 2;
-        ArrayTupleBuilder tupleBuilder = new ArrayTupleBuilder(fieldCount);
-        ArrayTupleReference tuple = new ArrayTupleReference();
-
-        // generate keys
-        int maxKey = 1000;
-        TreeSet<Integer> uniqueKeys = new TreeSet<Integer>();
-        ArrayList<Integer> keys = new ArrayList<Integer>();
-        while (uniqueKeys.size() < numElements) {
-            int key = rnd.nextInt() % maxKey;
-            uniqueKeys.add(key);
-        }
-        for (Integer i : uniqueKeys) {
-            keys.add(i);
-        }
-
-        // Insert tuples in the bloom filter
-        for (int i = 0; i < keys.size(); ++i) {
-            TupleUtils.createIntegerTuple(tupleBuilder, tuple, keys.get(i), i);
-            builder.add(tuple);
-        }
-        builder.end();
-
-        // Check all the inserted tuples can be found.
-
-        long[] hashes = new long[2];
-        for (int i = 0; i < keys.size(); ++i) {
-            TupleUtils.createIntegerTuple(tupleBuilder, tuple, keys.get(i), i);
-            Assert.assertTrue(bf.contains(tuple, hashes));
-        }
-
-        bf.deactivate();
-        bf.destroy();
-    }
-
-    @Test
-    public void multiFieldTest() throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("TESTING BLOOM FILTER");
-        }
-
-        IBufferCache bufferCache = harness.getBufferCache();
-
-        int numElements = 10000;
-        int[] keyFields = { 2, 4, 1 };
-
-        BloomFilter bf = new BloomFilter(bufferCache, harness.getFileMapProvider(), harness.getFileReference(),
-                keyFields);
-
-        double acceptanleFalsePositiveRate = 0.1;
-        int maxBucketsPerElement = BloomCalculations.maxBucketsPerElement(numElements);
-        BloomFilterSpecification bloomFilterSpec = BloomCalculations.computeBloomSpec(maxBucketsPerElement,
-                acceptanleFalsePositiveRate);
-
-        bf.create();
-        bf.activate();
-        IIndexBulkLoader builder = bf.createBuilder(numElements, bloomFilterSpec.getNumHashes(),
-                bloomFilterSpec.getNumBucketsPerElements());
-
-        int fieldCount = 5;
-        ISerializerDeserializer[] fieldSerdes = { UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE };
-        ArrayTupleBuilder tupleBuilder = new ArrayTupleBuilder(fieldCount);
-        ArrayTupleReference tuple = new ArrayTupleReference();
-
-        int maxLength = 20;
-        ArrayList<String> s1 = new ArrayList<String>();
-        ArrayList<String> s2 = new ArrayList<String>();
-        ArrayList<String> s3 = new ArrayList<String>();
-        ArrayList<String> s4 = new ArrayList<String>();
-        for (int i = 0; i < numElements; ++i) {
-            s1.add(randomString(rnd.nextInt() % maxLength, rnd));
-            s2.add(randomString(rnd.nextInt() % maxLength, rnd));
-            s3.add(randomString(rnd.nextInt() % maxLength, rnd));
-            s4.add(randomString(rnd.nextInt() % maxLength, rnd));
-        }
-
-        for (int i = 0; i < numElements; ++i) {
-            TupleUtils.createTuple(tupleBuilder, tuple, fieldSerdes, s1.get(i), s2.get(i), i, s3.get(i), s4.get(i));
-            builder.add(tuple);
-        }
-        builder.end();
-
-        long[] hashes = new long[2];
-        for (int i = 0; i < numElements; ++i) {
-            TupleUtils.createTuple(tupleBuilder, tuple, fieldSerdes, s1.get(i), s2.get(i), i, s3.get(i), s4.get(i));
-            Assert.assertTrue(bf.contains(tuple, hashes));
-        }
-
-        bf.deactivate();
-        bf.destroy();
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-bloomfilter-test/src/test/java/edu/uci/ics/hyracks/storage/am/bloomfilter/MurmurHashForITupleReferenceTest.java b/hyracks-tests/hyracks-storage-am-bloomfilter-test/src/test/java/edu/uci/ics/hyracks/storage/am/bloomfilter/MurmurHashForITupleReferenceTest.java
deleted file mode 100644
index 284a6cb..0000000
--- a/hyracks-tests/hyracks-storage-am-bloomfilter-test/src/test/java/edu/uci/ics/hyracks/storage/am/bloomfilter/MurmurHashForITupleReferenceTest.java
+++ /dev/null
@@ -1,296 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.bloomfilter;
-
-import java.nio.ByteBuffer;
-import java.util.Random;
-import java.util.logging.Level;
-
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.util.TupleUtils;
-import edu.uci.ics.hyracks.storage.am.bloomfilter.impls.MurmurHash128Bit;
-import edu.uci.ics.hyracks.storage.am.bloomfilter.util.AbstractBloomFilterTest;
-
-@SuppressWarnings("rawtypes")
-public class MurmurHashForITupleReferenceTest extends AbstractBloomFilterTest {
-    private final static int NUM_LONG_VARS_FOR_128_BIT_HASH = 2;
-    private final static int DUMMY_FIELD = 0;
-    private final Random rnd = new Random(50);
-
-    @Before
-    public void setUp() throws HyracksDataException {
-        super.setUp();
-    }
-
-    @Test
-    public void murmurhashONEIntegerFieldTest() throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("TESTING MURMUR HASH ONE INTEGER FIELD");
-        }
-
-        int fieldCount = 2;
-        ArrayTupleBuilder tupleBuilder = new ArrayTupleBuilder(fieldCount);
-        ArrayTupleReference tuple = new ArrayTupleReference();
-        TupleUtils.createIntegerTuple(tupleBuilder, tuple, rnd.nextInt());
-        tuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());
-
-        int keyFields[] = { 0 };
-        int length = getTupleSize(tuple, keyFields);
-
-        long actuals[] = new long[NUM_LONG_VARS_FOR_128_BIT_HASH];
-        MurmurHash128Bit.hash3_x64_128(tuple, keyFields, 0L, actuals);
-
-        ByteBuffer buffer;
-        byte[] array = new byte[length];
-        fillArrayWithData(array, keyFields, tuple, length);
-        buffer = ByteBuffer.wrap(array);
-
-        long[] expecteds = hash3_x64_128(buffer, 0, length, 0L);
-        Assert.assertArrayEquals(expecteds, actuals);
-    }
-
-    @Test
-    public void murmurhashTwoIntegerFieldsTest() throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("TESTING MURMUR HASH TWO INTEGER FIELDS");
-        }
-
-        int fieldCount = 2;
-        ArrayTupleBuilder tupleBuilder = new ArrayTupleBuilder(fieldCount);
-        ArrayTupleReference tuple = new ArrayTupleReference();
-        TupleUtils.createIntegerTuple(tupleBuilder, tuple, rnd.nextInt(), rnd.nextInt());
-        tuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());
-
-        int keyFields[] = { 0, 1 };
-        int length = getTupleSize(tuple, keyFields);
-
-        long actuals[] = new long[NUM_LONG_VARS_FOR_128_BIT_HASH];
-        MurmurHash128Bit.hash3_x64_128(tuple, keyFields, 0L, actuals);
-
-        ByteBuffer buffer;
-        byte[] array = new byte[length];
-        fillArrayWithData(array, keyFields, tuple, length);
-        buffer = ByteBuffer.wrap(array);
-
-        long[] expecteds = hash3_x64_128(buffer, 0, length, 0L);
-        Assert.assertArrayEquals(expecteds, actuals);
-    }
-
-    @Test
-    public void murmurhashOneStringFieldTest() throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("TESTING MURMUR HASH ONE STRING FIELD");
-        }
-
-        int fieldCount = 2;
-        ISerializerDeserializer[] fieldSerdes = { UTF8StringSerializerDeserializer.INSTANCE };
-        ArrayTupleBuilder tupleBuilder = new ArrayTupleBuilder(fieldCount);
-        ArrayTupleReference tuple = new ArrayTupleReference();
-        String s = randomString(100, rnd);
-        TupleUtils.createTuple(tupleBuilder, tuple, fieldSerdes, s);
-
-        int keyFields[] = { 0 };
-        int length = getTupleSize(tuple, keyFields);
-
-        long actuals[] = new long[NUM_LONG_VARS_FOR_128_BIT_HASH];
-        MurmurHash128Bit.hash3_x64_128(tuple, keyFields, 0L, actuals);
-
-        byte[] array = new byte[length];
-        ByteBuffer buffer;
-        fillArrayWithData(array, keyFields, tuple, length);
-        buffer = ByteBuffer.wrap(array);
-
-        long[] expecteds = hash3_x64_128(buffer, 0, length, 0L);
-        Assert.assertArrayEquals(expecteds, actuals);
-    }
-
-    @Test
-    public void murmurhashThreeStringFieldsTest() throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("TESTING MURMUR HASH THREE STRING FIELDS");
-        }
-
-        int fieldCount = 3;
-        ISerializerDeserializer[] fieldSerdes = { UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE };
-        ArrayTupleBuilder tupleBuilder = new ArrayTupleBuilder(fieldCount);
-        ArrayTupleReference tuple = new ArrayTupleReference();
-        String s1 = randomString(40, rnd);
-        String s2 = randomString(60, rnd);
-        String s3 = randomString(20, rnd);
-        TupleUtils.createTuple(tupleBuilder, tuple, fieldSerdes, s1, s2, s3);
-
-        int keyFields[] = { 2, 0, 1 };
-        int length = getTupleSize(tuple, keyFields);
-
-        long actuals[] = new long[NUM_LONG_VARS_FOR_128_BIT_HASH];
-        MurmurHash128Bit.hash3_x64_128(tuple, keyFields, 0L, actuals);
-
-        byte[] array = new byte[length];
-        ByteBuffer buffer;
-        fillArrayWithData(array, keyFields, tuple, length);
-        buffer = ByteBuffer.wrap(array);
-
-        long[] expecteds = hash3_x64_128(buffer, 0, length, 0L);
-        Assert.assertArrayEquals(expecteds, actuals);
-    }
-
-    private void fillArrayWithData(byte[] array, int[] keyFields, ITupleReference tuple, int length) {
-        int currentFieldIndex = 0;
-        int bytePos = 0;
-        for (int i = 0; i < length; ++i) {
-            array[i] = tuple.getFieldData(DUMMY_FIELD)[tuple.getFieldStart(keyFields[currentFieldIndex]) + bytePos];
-            ++bytePos;
-            if (tuple.getFieldLength(keyFields[currentFieldIndex]) == bytePos) {
-                ++currentFieldIndex;
-                bytePos = 0;
-            }
-        }
-    }
-
-    private int getTupleSize(ITupleReference tuple, int[] keyFields) {
-        int length = 0;
-        for (int i = 0; i < keyFields.length; ++i) {
-            length += tuple.getFieldLength(keyFields[i]);
-        }
-        return length;
-    }
-
-    /**
-     * The hash3_x64_128 and getblock functions are borrowed from cassandra source code for testing purpose
-     **/
-    protected static long getblock(ByteBuffer key, int offset, int index) {
-        int i_8 = index << 3;
-        int blockOffset = offset + i_8;
-        return ((long) key.get(blockOffset + 0) & 0xff) + (((long) key.get(blockOffset + 1) & 0xff) << 8)
-                + (((long) key.get(blockOffset + 2) & 0xff) << 16) + (((long) key.get(blockOffset + 3) & 0xff) << 24)
-                + (((long) key.get(blockOffset + 4) & 0xff) << 32) + (((long) key.get(blockOffset + 5) & 0xff) << 40)
-                + (((long) key.get(blockOffset + 6) & 0xff) << 48) + (((long) key.get(blockOffset + 7) & 0xff) << 56);
-    }
-
-    public static long[] hash3_x64_128(ByteBuffer key, int offset, int length, long seed) {
-        final int nblocks = length >> 4; // Process as 128-bit blocks.
-
-        long h1 = seed;
-        long h2 = seed;
-
-        long c1 = 0x87c37b91114253d5L;
-        long c2 = 0x4cf5ad432745937fL;
-
-        //----------
-        // body
-
-        for (int i = 0; i < nblocks; i++) {
-            long k1 = getblock(key, offset, i * 2 + 0);
-            long k2 = getblock(key, offset, i * 2 + 1);
-
-            k1 *= c1;
-            k1 = MurmurHash128Bit.rotl64(k1, 31);
-            k1 *= c2;
-            h1 ^= k1;
-
-            h1 = MurmurHash128Bit.rotl64(h1, 27);
-            h1 += h2;
-            h1 = h1 * 5 + 0x52dce729;
-
-            k2 *= c2;
-            k2 = MurmurHash128Bit.rotl64(k2, 33);
-            k2 *= c1;
-            h2 ^= k2;
-
-            h2 = MurmurHash128Bit.rotl64(h2, 31);
-            h2 += h1;
-            h2 = h2 * 5 + 0x38495ab5;
-        }
-
-        //----------
-        // tail
-
-        // Advance offset to the unprocessed tail of the data.
-        offset += nblocks * 16;
-
-        long k1 = 0;
-        long k2 = 0;
-
-        switch (length & 15) {
-            case 15:
-                k2 ^= ((long) key.get(offset + 14)) << 48;
-            case 14:
-                k2 ^= ((long) key.get(offset + 13)) << 40;
-            case 13:
-                k2 ^= ((long) key.get(offset + 12)) << 32;
-            case 12:
-                k2 ^= ((long) key.get(offset + 11)) << 24;
-            case 11:
-                k2 ^= ((long) key.get(offset + 10)) << 16;
-            case 10:
-                k2 ^= ((long) key.get(offset + 9)) << 8;
-            case 9:
-                k2 ^= ((long) key.get(offset + 8)) << 0;
-                k2 *= c2;
-                k2 = MurmurHash128Bit.rotl64(k2, 33);
-                k2 *= c1;
-                h2 ^= k2;
-
-            case 8:
-                k1 ^= ((long) key.get(offset + 7)) << 56;
-            case 7:
-                k1 ^= ((long) key.get(offset + 6)) << 48;
-            case 6:
-                k1 ^= ((long) key.get(offset + 5)) << 40;
-            case 5:
-                k1 ^= ((long) key.get(offset + 4)) << 32;
-            case 4:
-                k1 ^= ((long) key.get(offset + 3)) << 24;
-            case 3:
-                k1 ^= ((long) key.get(offset + 2)) << 16;
-            case 2:
-                k1 ^= ((long) key.get(offset + 1)) << 8;
-            case 1:
-                k1 ^= ((long) key.get(offset));
-                k1 *= c1;
-                k1 = MurmurHash128Bit.rotl64(k1, 31);
-                k1 *= c2;
-                h1 ^= k1;
-        };
-
-        //----------
-        // finalization
-
-        h1 ^= length;
-        h2 ^= length;
-
-        h1 += h2;
-        h2 += h1;
-
-        h1 = MurmurHash128Bit.fmix(h1);
-        h2 = MurmurHash128Bit.fmix(h2);
-
-        h1 += h2;
-        h2 += h1;
-
-        return (new long[] { h1, h2 });
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-bloomfilter-test/src/test/java/edu/uci/ics/hyracks/storage/am/bloomfilter/util/AbstractBloomFilterTest.java b/hyracks-tests/hyracks-storage-am-bloomfilter-test/src/test/java/edu/uci/ics/hyracks/storage/am/bloomfilter/util/AbstractBloomFilterTest.java
deleted file mode 100644
index 9b857a6..0000000
--- a/hyracks-tests/hyracks-storage-am-bloomfilter-test/src/test/java/edu/uci/ics/hyracks/storage/am/bloomfilter/util/AbstractBloomFilterTest.java
+++ /dev/null
@@ -1,58 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.bloomfilter.util;
-
-import java.util.Random;
-import java.util.logging.Logger;
-
-import org.junit.After;
-import org.junit.Before;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-
-public abstract class AbstractBloomFilterTest {
-    protected final Logger LOGGER = Logger.getLogger(BloomFilterTestHarness.class.getName());
-
-    protected final BloomFilterTestHarness harness;
-
-    public AbstractBloomFilterTest() {
-        harness = new BloomFilterTestHarness();
-    }
-
-    public AbstractBloomFilterTest(int pageSize, int numPages, int maxOpenFiles, int hyracksFrameSize) {
-        harness = new BloomFilterTestHarness(pageSize, numPages, maxOpenFiles, hyracksFrameSize);
-    }
-
-    @Before
-    public void setUp() throws HyracksDataException {
-        harness.setUp();
-    }
-
-    @After
-    public void tearDown() throws HyracksDataException {
-        harness.tearDown();
-    }
-
-    public static String randomString(int length, Random random) {
-        char[] chars = "abcdefghijklmnopqrstuvwxyz".toCharArray();
-        StringBuilder strBuilder = new StringBuilder();
-        for (int i = 0; i < length; ++i) {
-            char c = chars[random.nextInt(chars.length)];
-            strBuilder.append(c);
-        }
-        return strBuilder.toString();
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-bloomfilter-test/src/test/java/edu/uci/ics/hyracks/storage/am/bloomfilter/util/BloomFilterTestHarness.java b/hyracks-tests/hyracks-storage-am-bloomfilter-test/src/test/java/edu/uci/ics/hyracks/storage/am/bloomfilter/util/BloomFilterTestHarness.java
deleted file mode 100644
index 8fac122..0000000
--- a/hyracks-tests/hyracks-storage-am-bloomfilter-test/src/test/java/edu/uci/ics/hyracks/storage/am/bloomfilter/util/BloomFilterTestHarness.java
+++ /dev/null
@@ -1,120 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.bloomfilter.util;
-
-import java.io.File;
-import java.text.SimpleDateFormat;
-import java.util.Date;
-import java.util.Random;
-
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.storage.am.config.AccessMethodTestsConfig;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
-import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
-import edu.uci.ics.hyracks.test.support.TestUtils;
-
-public class BloomFilterTestHarness {
-
-    private static final long RANDOM_SEED = 50;
-
-    protected final int pageSize;
-    protected final int numPages;
-    protected final int maxOpenFiles;
-    protected final int hyracksFrameSize;
-
-    protected IHyracksTaskContext ctx;
-    protected IBufferCache bufferCache;
-    protected IFileMapProvider fileMapProvider;
-    protected FileReference file;
-
-    protected final Random rnd = new Random();
-    protected final SimpleDateFormat simpleDateFormat = new SimpleDateFormat("ddMMyy-hhmmssSS");
-    protected final String tmpDir = System.getProperty("java.io.tmpdir");
-    protected final String sep = System.getProperty("file.separator");
-    protected String fileName;
-
-    public BloomFilterTestHarness() {
-        this.pageSize = AccessMethodTestsConfig.BLOOM_FILTER_PAGE_SIZE;
-        this.numPages = AccessMethodTestsConfig.BLOOM_FILTER_NUM_PAGES;
-        this.maxOpenFiles = AccessMethodTestsConfig.BLOOM_FILTER_MAX_OPEN_FILES;
-        this.hyracksFrameSize = AccessMethodTestsConfig.BLOOM_FILTER_HYRACKS_FRAME_SIZE;
-    }
-
-    public BloomFilterTestHarness(int pageSize, int numPages, int maxOpenFiles, int hyracksFrameSize) {
-        this.pageSize = pageSize;
-        this.numPages = numPages;
-        this.maxOpenFiles = maxOpenFiles;
-        this.hyracksFrameSize = hyracksFrameSize;
-    }
-
-    public void setUp() throws HyracksDataException {
-        fileName = tmpDir + sep + simpleDateFormat.format(new Date());
-        ctx = TestUtils.create(getHyracksFrameSize());
-        TestStorageManagerComponentHolder.init(pageSize, numPages, maxOpenFiles);
-        bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
-        fileMapProvider = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
-        file = new FileReference(new File(fileName));
-        rnd.setSeed(RANDOM_SEED);
-    }
-
-    public void tearDown() throws HyracksDataException {
-        bufferCache.close();
-        file.delete();
-    }
-
-    public IHyracksTaskContext getHyracksTaskContext() {
-        return ctx;
-    }
-
-    public IBufferCache getBufferCache() {
-        return bufferCache;
-    }
-
-    public IFileMapProvider getFileMapProvider() {
-        return fileMapProvider;
-    }
-
-    public FileReference getFileReference() {
-        return file;
-    }
-
-    public String getFileName() {
-        return fileName;
-    }
-
-    public Random getRandom() {
-        return rnd;
-    }
-
-    public int getPageSize() {
-        return pageSize;
-    }
-
-    public int getNumPages() {
-        return numPages;
-    }
-
-    public int getHyracksFrameSize() {
-        return hyracksFrameSize;
-    }
-
-    public int getMaxOpenFiles() {
-        return maxOpenFiles;
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-btree-test/pom.xml b/hyracks-tests/hyracks-storage-am-btree-test/pom.xml
deleted file mode 100644
index 5d80261..0000000
--- a/hyracks-tests/hyracks-storage-am-btree-test/pom.xml
+++ /dev/null
@@ -1,49 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <groupId>edu.uci.ics.hyracks</groupId>
-  <artifactId>hyracks-storage-am-btree-test</artifactId>
-  <version>0.2.2-SNAPSHOT</version>
-
-  <parent>
-    <groupId>edu.uci.ics.hyracks</groupId>
-    <artifactId>hyracks-tests</artifactId>
-    <version>0.2.2-SNAPSHOT</version>
-  </parent>
-
-  <build>
-    <plugins>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-compiler-plugin</artifactId>
-        <version>2.0.2</version>
-        <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
-        </configuration>
-      </plugin>
-    </plugins>
-  </build>
-  <dependencies>
-  	<dependency>
-  		<groupId>junit</groupId>
-  		<artifactId>junit</artifactId>
-  		<version>4.8.1</version>
-  		<type>jar</type>
-  		<scope>test</scope>
-  	</dependency>
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks</groupId>
-  		<artifactId>hyracks-storage-am-btree</artifactId>
-  		<version>0.2.2-SNAPSHOT</version>
-  		<type>jar</type>
-  		<scope>compile</scope>
-  	</dependency>
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks</groupId>
-  		<artifactId>hyracks-test-support</artifactId>
-  		<version>0.2.2-SNAPSHOT</version>
-  		<type>jar</type>
-  		<scope>test</scope>
-  	</dependency>
-  </dependencies>
-</project>
diff --git a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeBulkLoadTest.java b/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeBulkLoadTest.java
deleted file mode 100644
index 5417ca1..0000000
--- a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeBulkLoadTest.java
+++ /dev/null
@@ -1,59 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.btree;
-
-import java.util.Random;
-
-import org.junit.After;
-import org.junit.Before;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
-import edu.uci.ics.hyracks.storage.am.btree.util.BTreeTestContext;
-import edu.uci.ics.hyracks.storage.am.btree.util.BTreeTestHarness;
-
-public class BTreeBulkLoadTest extends OrderedIndexBulkLoadTest {
-
-    private final BTreeTestHarness harness = new BTreeTestHarness();
-
-    public BTreeBulkLoadTest() {
-        super(BTreeTestHarness.LEAF_FRAMES_TO_TEST, 1);
-    }
-
-    @Before
-    public void setUp() throws HyracksDataException {
-        harness.setUp();
-    }
-
-    @After
-    public void tearDown() throws HyracksDataException {
-        harness.tearDown();
-    }
-
-    @SuppressWarnings("rawtypes")
-    @Override
-    protected OrderedIndexTestContext createTestContext(ISerializerDeserializer[] fieldSerdes, int numKeys,
-            BTreeLeafFrameType leafType) throws Exception {
-        return BTreeTestContext.create(harness.getBufferCache(), harness.getFileMapProvider(),
-                harness.getFileReference(), fieldSerdes, numKeys, leafType);
-    }
-
-    @Override
-    protected Random getRandom() {
-        return harness.getRandom();
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeDeleteTest.java b/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeDeleteTest.java
deleted file mode 100644
index 10cd59f..0000000
--- a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeDeleteTest.java
+++ /dev/null
@@ -1,59 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.btree;
-
-import java.util.Random;
-
-import org.junit.After;
-import org.junit.Before;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
-import edu.uci.ics.hyracks.storage.am.btree.util.BTreeTestContext;
-import edu.uci.ics.hyracks.storage.am.btree.util.BTreeTestHarness;
-
-public class BTreeDeleteTest extends OrderedIndexDeleteTest {
-
-    private final BTreeTestHarness harness = new BTreeTestHarness();
-
-    public BTreeDeleteTest() {
-        super(BTreeTestHarness.LEAF_FRAMES_TO_TEST);
-    }
-
-    @Before
-    public void setUp() throws HyracksDataException {
-        harness.setUp();
-    }
-
-    @After
-    public void tearDown() throws HyracksDataException {
-        harness.tearDown();
-    }
-
-    @SuppressWarnings("rawtypes")
-    @Override
-    protected OrderedIndexTestContext createTestContext(ISerializerDeserializer[] fieldSerdes, int numKeys,
-            BTreeLeafFrameType leafType) throws Exception {
-        return BTreeTestContext.create(harness.getBufferCache(), harness.getFileMapProvider(),
-                harness.getFileReference(), fieldSerdes, numKeys, leafType);
-    }
-
-    @Override
-    protected Random getRandom() {
-        return harness.getRandom();
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeExamplesTest.java b/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeExamplesTest.java
deleted file mode 100644
index c02d53d..0000000
--- a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeExamplesTest.java
+++ /dev/null
@@ -1,49 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.btree;
-
-import org.junit.After;
-import org.junit.Before;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
-import edu.uci.ics.hyracks.storage.am.btree.util.BTreeTestHarness;
-import edu.uci.ics.hyracks.storage.am.btree.util.BTreeUtils;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
-import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
-
-public class BTreeExamplesTest extends OrderedIndexExamplesTest {
-    private final BTreeTestHarness harness = new BTreeTestHarness();
-
-    @Before
-    public void setUp() throws HyracksDataException {
-        harness.setUp();
-    }
-
-    @After
-    public void tearDown() throws HyracksDataException {
-        harness.tearDown();
-    }
-
-    protected ITreeIndex createTreeIndex(ITypeTraits[] typeTraits, IBinaryComparatorFactory[] cmpFactories,
-            int[] bloomFilterKeyFields) throws TreeIndexException {
-        return BTreeUtils.createBTree(harness.getBufferCache(), harness.getFileMapProvider(), typeTraits, cmpFactories,
-                BTreeLeafFrameType.REGULAR_NSM, harness.getFileReference());
-    }
-
-}
diff --git a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeInsertTest.java b/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeInsertTest.java
deleted file mode 100644
index efe7579..0000000
--- a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeInsertTest.java
+++ /dev/null
@@ -1,68 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.btree;
-
-import java.util.Random;
-
-import org.junit.After;
-import org.junit.Before;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
-import edu.uci.ics.hyracks.storage.am.btree.util.BTreeTestContext;
-import edu.uci.ics.hyracks.storage.am.btree.util.BTreeTestHarness;
-
-/**
- * Tests the BTree insert operation with strings and integer fields using 
- * various numbers of key and payload fields. Each tests first fills a BTree with 
- * randomly generated tuples. We compare the following operations against expected results: 
- *      1) Point searches for all tuples 
- *      2) Ordered scan
- *      3) Disk-order scan
- *      4) Range search (and prefix search for composite keys)
- */
-public class BTreeInsertTest extends OrderedIndexInsertTest {
-
-    private final BTreeTestHarness harness = new BTreeTestHarness();
-
-    public BTreeInsertTest() {
-        super(BTreeTestHarness.LEAF_FRAMES_TO_TEST);
-    }
-
-    @Before
-    public void setUp() throws HyracksDataException {
-        harness.setUp();
-    }
-
-    @After
-    public void tearDown() throws HyracksDataException {
-        harness.tearDown();
-    }
-
-    @SuppressWarnings("rawtypes")
-    @Override
-    protected OrderedIndexTestContext createTestContext(ISerializerDeserializer[] fieldSerdes, int numKeys,
-            BTreeLeafFrameType leafType) throws Exception {
-        return BTreeTestContext.create(harness.getBufferCache(), harness.getFileMapProvider(),
-                harness.getFileReference(), fieldSerdes, numKeys, leafType);
-    }
-
-    @Override
-    protected Random getRandom() {
-        return harness.getRandom();
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeLifecycleTest.java b/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeLifecycleTest.java
deleted file mode 100644
index 1445d2c..0000000
--- a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeLifecycleTest.java
+++ /dev/null
@@ -1,71 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.btree;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
-import edu.uci.ics.hyracks.storage.am.btree.util.BTreeTestContext;
-import edu.uci.ics.hyracks.storage.am.btree.util.BTreeTestHarness;
-import edu.uci.ics.hyracks.storage.am.common.AbstractIndexLifecycleTest;
-import edu.uci.ics.hyracks.storage.am.common.CheckTuple;
-import edu.uci.ics.hyracks.storage.am.common.IIndexTestContext;
-import edu.uci.ics.hyracks.storage.am.common.TreeIndexTestUtils;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
-
-public class BTreeLifecycleTest extends AbstractIndexLifecycleTest {
-    private final BTreeTestHarness harness = new BTreeTestHarness();
-    private final TreeIndexTestUtils titu = new OrderedIndexTestUtils();
-
-    @SuppressWarnings("rawtypes")
-    private final ISerializerDeserializer[] fieldSerdes = new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE };
-
-    private ITreeIndexFrame frame = null;
-
-    @SuppressWarnings("rawtypes")
-    private IIndexTestContext<? extends CheckTuple> testCtx;
-
-    @Override
-    public void setup() throws Exception {
-        harness.setUp();
-        testCtx = BTreeTestContext.create(harness.getBufferCache(), harness.getFileMapProvider(),
-                harness.getFileReference(), fieldSerdes, fieldSerdes.length, BTreeLeafFrameType.REGULAR_NSM);
-        index = testCtx.getIndex();
-    }
-
-    @Override
-    public void tearDown() throws HyracksDataException {
-        testCtx.getIndex().deactivate();
-        testCtx.getIndex().destroy();
-        harness.tearDown();
-    }
-
-    @Override
-    protected boolean persistentStateExists() {
-        return harness.getFileReference().getFile().exists();
-    }
-
-    @Override
-    protected boolean isEmptyIndex() throws HyracksDataException {
-        BTree btree = (BTree) testCtx.getIndex();
-        if (frame == null) {
-            frame = btree.getInteriorFrameFactory().createFrame();
-        }
-        return btree.isEmptyTree(frame);
-    }
-
-    @Override
-    protected void performInsertions() throws Exception {
-        titu.insertIntTuples(testCtx, 10, harness.getRandom());
-    }
-
-    @Override
-    protected void checkInsertions() throws Exception {
-        titu.checkScan(testCtx);
-    }
-
-    @Override
-    protected void clearCheckableInsertions() throws Exception {
-        testCtx.getCheckTuples().clear();
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeModificationOperationCallbackTest.java b/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeModificationOperationCallbackTest.java
deleted file mode 100644
index b5cbca3..0000000
--- a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeModificationOperationCallbackTest.java
+++ /dev/null
@@ -1,35 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.btree;
-
-import edu.uci.ics.hyracks.dataflow.common.util.SerdeUtils;
-import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
-import edu.uci.ics.hyracks.storage.am.btree.util.BTreeTestHarness;
-import edu.uci.ics.hyracks.storage.am.btree.util.BTreeUtils;
-
-public class BTreeModificationOperationCallbackTest extends AbstractModificationOperationCallbackTest {
-    private final BTreeTestHarness harness;
-
-    public BTreeModificationOperationCallbackTest() {
-        harness = new BTreeTestHarness();
-    }
-
-    @Override
-    protected void createIndexInstance() throws Exception {
-        index = BTreeUtils.createBTree(harness.getBufferCache(), harness.getFileMapProvider(),
-                SerdeUtils.serdesToTypeTraits(keySerdes),
-                SerdeUtils.serdesToComparatorFactories(keySerdes, keySerdes.length), BTreeLeafFrameType.REGULAR_NSM,
-                harness.getFileReference());
-    }
-
-    @Override
-    public void setup() throws Exception {
-        harness.setUp();
-        super.setup();
-    }
-
-    @Override
-    public void tearDown() throws Exception {
-        super.tearDown();
-        harness.tearDown();
-    }
-
-}
diff --git a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeSearchCursorTest.java b/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeSearchCursorTest.java
deleted file mode 100644
index ad0b21e..0000000
--- a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeSearchCursorTest.java
+++ /dev/null
@@ -1,429 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.btree;
-
-import java.io.ByteArrayInputStream;
-import java.io.DataInput;
-import java.io.DataInputStream;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.Random;
-import java.util.TreeSet;
-import java.util.logging.Level;
-
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
-import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.util.TupleUtils;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrame;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
-import edu.uci.ics.hyracks.storage.am.btree.exceptions.BTreeException;
-import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMInteriorFrameFactory;
-import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMLeafFrameFactory;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeRangeSearchCursor;
-import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
-import edu.uci.ics.hyracks.storage.am.btree.util.AbstractBTreeTest;
-import edu.uci.ics.hyracks.storage.am.common.TestOperationCallback;
-import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManager;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexAccessor;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrameFactory;
-import edu.uci.ics.hyracks.storage.am.common.frames.LIFOMetaDataFrameFactory;
-import edu.uci.ics.hyracks.storage.am.common.freepage.LinkedListFreePageManager;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriterFactory;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-
-public class BTreeSearchCursorTest extends AbstractBTreeTest {
-    private final int fieldCount = 2;
-    private final ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
-    private final TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
-    private final ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
-    private final Random rnd = new Random(50);
-
-    @Before
-    public void setUp() throws HyracksDataException {
-        super.setUp();
-        typeTraits[0] = IntegerPointable.TYPE_TRAITS;
-        typeTraits[1] = IntegerPointable.TYPE_TRAITS;
-    }
-
-    @Test
-    public void uniqueIndexTest() throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("TESTING RANGE SEARCH CURSOR ON UNIQUE INDEX");
-        }
-
-        IBufferCache bufferCache = harness.getBufferCache();
-
-        // declare keys
-        int keyFieldCount = 1;
-        IBinaryComparatorFactory[] cmpFactories = new IBinaryComparatorFactory[keyFieldCount];
-        cmpFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
-
-        ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(tupleWriterFactory);
-        ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(tupleWriterFactory);
-
-        IBTreeLeafFrame leafFrame = (IBTreeLeafFrame) leafFrameFactory.createFrame();
-        IBTreeInteriorFrame interiorFrame = (IBTreeInteriorFrame) interiorFrameFactory.createFrame();
-
-        IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, 0, metaFrameFactory);
-
-        BTree btree = new BTree(bufferCache, harness.getFileMapProvider(), freePageManager, interiorFrameFactory,
-                leafFrameFactory, cmpFactories, fieldCount, harness.getFileReference());
-        btree.create();
-        btree.activate();
-
-        ArrayTupleBuilder tupleBuilder = new ArrayTupleBuilder(fieldCount);
-        ArrayTupleReference tuple = new ArrayTupleReference();
-
-        ITreeIndexAccessor indexAccessor = btree.createAccessor(TestOperationCallback.INSTANCE,
-                TestOperationCallback.INSTANCE);
-
-        // generate keys
-        int numKeys = 50;
-        int maxKey = 1000;
-        TreeSet<Integer> uniqueKeys = new TreeSet<Integer>();
-        ArrayList<Integer> keys = new ArrayList<Integer>();
-        while (uniqueKeys.size() < numKeys) {
-            int key = rnd.nextInt() % maxKey;
-            uniqueKeys.add(key);
-        }
-        for (Integer i : uniqueKeys) {
-            keys.add(i);
-        }
-
-        // insert keys into btree
-        for (int i = 0; i < keys.size(); i++) {
-
-            TupleUtils.createIntegerTuple(tupleBuilder, tuple, keys.get(i), i);
-            tuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());
-
-            try {
-                indexAccessor.insert(tuple);
-            } catch (BTreeException e) {
-            } catch (Exception e) {
-                e.printStackTrace();
-            }
-        }
-
-        int minSearchKey = -100;
-        int maxSearchKey = 100;
-
-        // forward searches
-        Assert.assertTrue(performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true,
-                true, false));
-        Assert.assertTrue(performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, false,
-                true, false));
-        Assert.assertTrue(performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true,
-                false, false));
-        Assert.assertTrue(performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true,
-                true, false));
-
-        btree.deactivate();
-        btree.destroy();
-    }
-
-    @Test
-    public void nonUniqueIndexTest() throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("TESTING RANGE SEARCH CURSOR ON NONUNIQUE INDEX");
-        }
-
-        IBufferCache bufferCache = harness.getBufferCache();
-
-        // declare keys
-        int keyFieldCount = 2;
-        IBinaryComparatorFactory[] cmpFactories = new IBinaryComparatorFactory[keyFieldCount];
-        cmpFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
-        cmpFactories[1] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
-
-        ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(tupleWriterFactory);
-        ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(tupleWriterFactory);
-
-        IBTreeLeafFrame leafFrame = (IBTreeLeafFrame) leafFrameFactory.createFrame();
-        IBTreeInteriorFrame interiorFrame = (IBTreeInteriorFrame) interiorFrameFactory.createFrame();
-
-        IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, 0, metaFrameFactory);
-
-        BTree btree = new BTree(bufferCache, harness.getFileMapProvider(), freePageManager, interiorFrameFactory,
-                leafFrameFactory, cmpFactories, fieldCount, harness.getFileReference());
-        btree.create();
-        btree.activate();
-
-        ArrayTupleBuilder tupleBuilder = new ArrayTupleBuilder(fieldCount);
-        ArrayTupleReference tuple = new ArrayTupleReference();
-
-        ITreeIndexAccessor indexAccessor = btree.createAccessor(TestOperationCallback.INSTANCE,
-                TestOperationCallback.INSTANCE);
-
-        // generate keys
-        int numKeys = 50;
-        int maxKey = 10;
-        ArrayList<Integer> keys = new ArrayList<Integer>();
-        for (int i = 0; i < numKeys; i++) {
-            int k = rnd.nextInt() % maxKey;
-            keys.add(k);
-        }
-        Collections.sort(keys);
-
-        // insert keys into btree
-        for (int i = 0; i < keys.size(); i++) {
-
-            TupleUtils.createIntegerTuple(tupleBuilder, tuple, keys.get(i), i);
-            tuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());
-
-            try {
-                indexAccessor.insert(tuple);
-            } catch (BTreeException e) {
-            } catch (Exception e) {
-                e.printStackTrace();
-            }
-        }
-
-        int minSearchKey = -100;
-        int maxSearchKey = 100;
-
-        // forward searches
-        Assert.assertTrue(performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true,
-                true, false));
-        Assert.assertTrue(performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, false,
-                true, false));
-        Assert.assertTrue(performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true,
-                false, false));
-        Assert.assertTrue(performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true,
-                true, false));
-
-        btree.deactivate();
-        btree.destroy();
-    }
-
-    @Test
-    public void nonUniqueFieldPrefixIndexTest() throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("TESTING RANGE SEARCH CURSOR ON NONUNIQUE FIELD-PREFIX COMPRESSED INDEX");
-        }
-
-        IBufferCache bufferCache = harness.getBufferCache();
-
-        // declare keys
-        int keyFieldCount = 2;
-        IBinaryComparatorFactory[] cmpFactories = new IBinaryComparatorFactory[keyFieldCount];
-        cmpFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
-        cmpFactories[1] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
-
-        ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(tupleWriterFactory);
-        ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(tupleWriterFactory);
-
-        IBTreeLeafFrame leafFrame = (IBTreeLeafFrame) leafFrameFactory.createFrame();
-        IBTreeInteriorFrame interiorFrame = (IBTreeInteriorFrame) interiorFrameFactory.createFrame();
-
-        IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, 0, metaFrameFactory);
-
-        BTree btree = new BTree(bufferCache, harness.getFileMapProvider(), freePageManager, interiorFrameFactory,
-                leafFrameFactory, cmpFactories, fieldCount, harness.getFileReference());
-        btree.create();
-        btree.activate();
-
-        ArrayTupleBuilder tupleBuilder = new ArrayTupleBuilder(fieldCount);
-        ArrayTupleReference tuple = new ArrayTupleReference();
-
-        ITreeIndexAccessor indexAccessor = btree.createAccessor(TestOperationCallback.INSTANCE,
-                TestOperationCallback.INSTANCE);
-
-        // generate keys
-        int numKeys = 50;
-        int maxKey = 10;
-        ArrayList<Integer> keys = new ArrayList<Integer>();
-        for (int i = 0; i < numKeys; i++) {
-            int k = rnd.nextInt() % maxKey;
-            keys.add(k);
-        }
-        Collections.sort(keys);
-
-        // insert keys into btree
-        for (int i = 0; i < keys.size(); i++) {
-
-            TupleUtils.createIntegerTuple(tupleBuilder, tuple, keys.get(i), i);
-            tuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());
-
-            try {
-                indexAccessor.insert(tuple);
-            } catch (BTreeException e) {
-            } catch (Exception e) {
-                e.printStackTrace();
-            }
-        }
-
-        int minSearchKey = -100;
-        int maxSearchKey = 100;
-
-        // forward searches
-        Assert.assertTrue(performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true,
-                true, false));
-        Assert.assertTrue(performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, false,
-                true, false));
-        Assert.assertTrue(performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true,
-                false, false));
-        Assert.assertTrue(performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true,
-                true, false));
-
-        btree.deactivate();
-        btree.destroy();
-    }
-
-    public RangePredicate createRangePredicate(int lk, int hk, boolean lowKeyInclusive, boolean highKeyInclusive)
-            throws HyracksDataException {
-
-        // create tuplereferences for search keys
-        ITupleReference lowKey = TupleUtils.createIntegerTuple(lk);
-        ITupleReference highKey = TupleUtils.createIntegerTuple(hk);
-
-        IBinaryComparator[] searchCmps = new IBinaryComparator[1];
-        searchCmps[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY).createBinaryComparator();
-        MultiComparator searchCmp = new MultiComparator(searchCmps);
-
-        RangePredicate rangePred = new RangePredicate(lowKey, highKey, lowKeyInclusive, highKeyInclusive, searchCmp,
-                searchCmp);
-        return rangePred;
-    }
-
-    public void getExpectedResults(ArrayList<Integer> expectedResults, ArrayList<Integer> keys, int lk, int hk,
-            boolean lowKeyInclusive, boolean highKeyInclusive) {
-
-        // special cases
-        if (lk == hk && (!lowKeyInclusive || !highKeyInclusive))
-            return;
-        if (lk > hk)
-            return;
-
-        for (int i = 0; i < keys.size(); i++) {
-            if ((lk == keys.get(i) && lowKeyInclusive) || (hk == keys.get(i) && highKeyInclusive)) {
-                expectedResults.add(keys.get(i));
-                continue;
-            }
-
-            if (lk < keys.get(i) && hk > keys.get(i)) {
-                expectedResults.add(keys.get(i));
-                continue;
-            }
-        }
-    }
-
-    public boolean performSearches(ArrayList<Integer> keys, BTree btree, IBTreeLeafFrame leafFrame,
-            IBTreeInteriorFrame interiorFrame, int minKey, int maxKey, boolean lowKeyInclusive,
-            boolean highKeyInclusive, boolean printExpectedResults) throws Exception {
-
-        ArrayList<Integer> results = new ArrayList<Integer>();
-        ArrayList<Integer> expectedResults = new ArrayList<Integer>();
-
-        for (int i = minKey; i < maxKey; i++) {
-            for (int j = minKey; j < maxKey; j++) {
-
-                results.clear();
-                expectedResults.clear();
-
-                int lowKey = i;
-                int highKey = j;
-
-                ITreeIndexCursor rangeCursor = new BTreeRangeSearchCursor(leafFrame, false);
-                RangePredicate rangePred = createRangePredicate(lowKey, highKey, lowKeyInclusive, highKeyInclusive);
-                ITreeIndexAccessor indexAccessor = btree.createAccessor(TestOperationCallback.INSTANCE,
-                        TestOperationCallback.INSTANCE);
-                indexAccessor.search(rangeCursor, rangePred);
-
-                try {
-                    while (rangeCursor.hasNext()) {
-                        rangeCursor.next();
-                        ITupleReference frameTuple = rangeCursor.getTuple();
-                        ByteArrayInputStream inStream = new ByteArrayInputStream(frameTuple.getFieldData(0),
-                                frameTuple.getFieldStart(0), frameTuple.getFieldLength(0));
-                        DataInput dataIn = new DataInputStream(inStream);
-                        Integer res = IntegerSerializerDeserializer.INSTANCE.deserialize(dataIn);
-                        results.add(res);
-                    }
-                } catch (Exception e) {
-                    e.printStackTrace();
-                } finally {
-                    rangeCursor.close();
-                }
-
-                getExpectedResults(expectedResults, keys, lowKey, highKey, lowKeyInclusive, highKeyInclusive);
-
-                if (printExpectedResults) {
-                    if (expectedResults.size() > 0) {
-                        char l, u;
-
-                        if (lowKeyInclusive)
-                            l = '[';
-                        else
-                            l = '(';
-
-                        if (highKeyInclusive)
-                            u = ']';
-                        else
-                            u = ')';
-
-                        if (LOGGER.isLoggable(Level.INFO)) {
-                            LOGGER.info("RANGE: " + l + " " + lowKey + " , " + highKey + " " + u);
-                        }
-                        StringBuilder strBuilder = new StringBuilder();
-                        for (Integer r : expectedResults) {
-                            strBuilder.append(r + " ");
-                        }
-                        if (LOGGER.isLoggable(Level.INFO)) {
-                            LOGGER.info(strBuilder.toString());
-                        }
-                    }
-                }
-
-                if (results.size() == expectedResults.size()) {
-                    for (int k = 0; k < results.size(); k++) {
-                        if (!results.get(k).equals(expectedResults.get(k))) {
-                            if (LOGGER.isLoggable(Level.INFO)) {
-                                LOGGER.info("DIFFERENT RESULTS AT: i=" + i + " j=" + j + " k=" + k);
-                                LOGGER.info(results.get(k) + " " + expectedResults.get(k));
-                            }
-                            return false;
-                        }
-                    }
-                } else {
-                    if (LOGGER.isLoggable(Level.INFO)) {
-                        LOGGER.info("UNEQUAL NUMBER OF RESULTS AT: i=" + i + " j=" + j);
-                        LOGGER.info("RESULTS: " + results.size());
-                        LOGGER.info("EXPECTED RESULTS: " + expectedResults.size());
-                    }
-                    return false;
-                }
-            }
-        }
-
-        return true;
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeSearchOperationCallbackTest.java b/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeSearchOperationCallbackTest.java
deleted file mode 100644
index 037d992..0000000
--- a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeSearchOperationCallbackTest.java
+++ /dev/null
@@ -1,35 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.btree;
-
-import edu.uci.ics.hyracks.dataflow.common.util.SerdeUtils;
-import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
-import edu.uci.ics.hyracks.storage.am.btree.util.BTreeTestHarness;
-import edu.uci.ics.hyracks.storage.am.btree.util.BTreeUtils;
-
-public class BTreeSearchOperationCallbackTest extends AbstractSearchOperationCallbackTest {
-    private final BTreeTestHarness harness;
-
-    public BTreeSearchOperationCallbackTest() {
-        harness = new BTreeTestHarness();
-    }
-
-    @Override
-    protected void createIndexInstance() throws Exception {
-        index = BTreeUtils.createBTree(harness.getBufferCache(), harness.getFileMapProvider(),
-                SerdeUtils.serdesToTypeTraits(keySerdes),
-                SerdeUtils.serdesToComparatorFactories(keySerdes, keySerdes.length), BTreeLeafFrameType.REGULAR_NSM,
-                harness.getFileReference());
-    }
-
-    @Override
-    public void setup() throws Exception {
-        harness.setUp();
-        super.setup();
-    }
-
-    @Override
-    public void tearDown() throws Exception {
-        super.tearDown();
-        harness.tearDown();
-    }
-
-}
diff --git a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeStatsTest.java b/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeStatsTest.java
deleted file mode 100644
index 57535b2..0000000
--- a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeStatsTest.java
+++ /dev/null
@@ -1,161 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.btree;
-
-import java.io.DataOutput;
-import java.nio.ByteBuffer;
-import java.util.Random;
-import java.util.logging.Level;
-
-import org.junit.Test;
-
-import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
-import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.FrameTupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrame;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
-import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMInteriorFrameFactory;
-import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMLeafFrameFactory;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
-import edu.uci.ics.hyracks.storage.am.btree.util.AbstractBTreeTest;
-import edu.uci.ics.hyracks.storage.am.common.TestOperationCallback;
-import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManager;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexAccessor;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrame;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrameFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
-import edu.uci.ics.hyracks.storage.am.common.frames.LIFOMetaDataFrameFactory;
-import edu.uci.ics.hyracks.storage.am.common.freepage.LinkedListFreePageManager;
-import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriterFactory;
-import edu.uci.ics.hyracks.storage.am.common.util.TreeIndexBufferCacheWarmup;
-import edu.uci.ics.hyracks.storage.am.common.util.TreeIndexStats;
-import edu.uci.ics.hyracks.storage.am.common.util.TreeIndexStatsGatherer;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
-import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
-import edu.uci.ics.hyracks.test.support.TestUtils;
-
-@SuppressWarnings("rawtypes")
-public class BTreeStatsTest extends AbstractBTreeTest {
-    private static final int PAGE_SIZE = 4096;
-    private static final int NUM_PAGES = 1000;
-    private static final int MAX_OPEN_FILES = 10;
-    private static final int HYRACKS_FRAME_SIZE = 128;
-
-    private final IHyracksTaskContext ctx = TestUtils.create(HYRACKS_FRAME_SIZE);
-
-    @Test
-    public void test01() throws Exception {
-
-        TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES);
-        IBufferCache bufferCache = harness.getBufferCache();
-        IFileMapProvider fmp = harness.getFileMapProvider();
-
-        // declare fields
-        int fieldCount = 2;
-        ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
-        typeTraits[0] = IntegerPointable.TYPE_TRAITS;
-        typeTraits[1] = IntegerPointable.TYPE_TRAITS;
-
-        // declare keys
-        int keyFieldCount = 1;
-        IBinaryComparatorFactory[] cmpFactories = new IBinaryComparatorFactory[keyFieldCount];
-        cmpFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
-
-        TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
-        ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(tupleWriterFactory);
-        ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(tupleWriterFactory);
-        ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
-
-        IBTreeLeafFrame leafFrame = (IBTreeLeafFrame) leafFrameFactory.createFrame();
-        IBTreeInteriorFrame interiorFrame = (IBTreeInteriorFrame) interiorFrameFactory.createFrame();
-        ITreeIndexMetaDataFrame metaFrame = metaFrameFactory.createFrame();
-
-        IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, 0, metaFrameFactory);
-
-        BTree btree = new BTree(bufferCache, fmp, freePageManager, interiorFrameFactory, leafFrameFactory,
-                cmpFactories, fieldCount, harness.getFileReference());
-        btree.create();
-        btree.activate();
-
-        Random rnd = new Random();
-        rnd.setSeed(50);
-
-        long start = System.currentTimeMillis();
-
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("INSERTING INTO TREE");
-        }
-
-        ByteBuffer frame = ctx.allocateFrame();
-        FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
-        ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
-        DataOutput dos = tb.getDataOutput();
-
-        ISerializerDeserializer[] recDescSers = { IntegerSerializerDeserializer.INSTANCE,
-                IntegerSerializerDeserializer.INSTANCE };
-        RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
-        IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx.getFrameSize(), recDesc);
-        accessor.reset(frame);
-        FrameTupleReference tuple = new FrameTupleReference();
-
-        ITreeIndexAccessor indexAccessor = btree.createAccessor(TestOperationCallback.INSTANCE,
-                TestOperationCallback.INSTANCE);
-        // 10000
-        for (int i = 0; i < 100000; i++) {
-
-            int f0 = rnd.nextInt() % 100000;
-            int f1 = 5;
-
-            tb.reset();
-            IntegerSerializerDeserializer.INSTANCE.serialize(f0, dos);
-            tb.addFieldEndOffset();
-            IntegerSerializerDeserializer.INSTANCE.serialize(f1, dos);
-            tb.addFieldEndOffset();
-
-            appender.reset(frame, true);
-            appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
-
-            tuple.reset(accessor, 0);
-
-            if (LOGGER.isLoggable(Level.INFO)) {
-                if (i % 10000 == 0) {
-                    long end = System.currentTimeMillis();
-                    LOGGER.info("INSERTING " + i + " : " + f0 + " " + f1 + " " + (end - start));
-                }
-            }
-
-            try {
-                indexAccessor.insert(tuple);
-            } catch (TreeIndexException e) {
-            } catch (Exception e) {
-                e.printStackTrace();
-            }
-        }
-
-        int fileId = fmp.lookupFileId(harness.getFileReference());
-        TreeIndexStatsGatherer statsGatherer = new TreeIndexStatsGatherer(bufferCache, freePageManager, fileId,
-                btree.getRootPageId());
-        TreeIndexStats stats = statsGatherer.gatherStats(leafFrame, interiorFrame, metaFrame);
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("\n" + stats.toString());
-        }
-
-        TreeIndexBufferCacheWarmup bufferCacheWarmup = new TreeIndexBufferCacheWarmup(bufferCache, freePageManager,
-                fileId);
-        bufferCacheWarmup.warmup(leafFrame, metaFrame, new int[] { 1, 2 }, new int[] { 2, 5 });
-
-        btree.deactivate();
-        btree.destroy();
-        bufferCache.close();
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeUpdateSearchTest.java b/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeUpdateSearchTest.java
deleted file mode 100644
index f01799d..0000000
--- a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeUpdateSearchTest.java
+++ /dev/null
@@ -1,156 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.btree;
-
-import java.util.Random;
-import java.util.logging.Level;
-
-import org.junit.Test;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
-import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.util.TupleUtils;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
-import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMInteriorFrameFactory;
-import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMLeafFrameFactory;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeRangeSearchCursor;
-import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
-import edu.uci.ics.hyracks.storage.am.btree.util.AbstractBTreeTest;
-import edu.uci.ics.hyracks.storage.am.common.TestOperationCallback;
-import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManager;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexAccessor;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrameFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
-import edu.uci.ics.hyracks.storage.am.common.frames.LIFOMetaDataFrameFactory;
-import edu.uci.ics.hyracks.storage.am.common.freepage.LinkedListFreePageManager;
-import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriterFactory;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-
-public class BTreeUpdateSearchTest extends AbstractBTreeTest {
-
-    // Update scan test on fixed-length tuples.
-    @Test
-    public void test01() throws Exception {
-        IBufferCache bufferCache = harness.getBufferCache();
-
-        // declare fields
-        int fieldCount = 2;
-        ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
-        typeTraits[0] = IntegerPointable.TYPE_TRAITS;
-        typeTraits[1] = IntegerPointable.TYPE_TRAITS;
-
-        // declare keys
-        int keyFieldCount = 1;
-        IBinaryComparatorFactory[] cmpFactories = new IBinaryComparatorFactory[keyFieldCount];
-        cmpFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
-
-        @SuppressWarnings("rawtypes")
-        ISerializerDeserializer[] recDescSers = { IntegerSerializerDeserializer.INSTANCE,
-                IntegerSerializerDeserializer.INSTANCE };
-
-        TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
-        ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(tupleWriterFactory);
-        ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(tupleWriterFactory);
-        ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
-
-        IBTreeLeafFrame leafFrame = (IBTreeLeafFrame) leafFrameFactory.createFrame();
-
-        IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, 0, metaFrameFactory);
-        BTree btree = new BTree(bufferCache, harness.getFileMapProvider(), freePageManager, interiorFrameFactory,
-                leafFrameFactory, cmpFactories, fieldCount, harness.getFileReference());
-        btree.create();
-        btree.activate();
-
-        Random rnd = new Random();
-        rnd.setSeed(50);
-
-        long start = System.currentTimeMillis();
-
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("INSERTING INTO TREE");
-        }
-
-        ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
-        ArrayTupleReference insertTuple = new ArrayTupleReference();
-        ITreeIndexAccessor indexAccessor = btree.createAccessor(TestOperationCallback.INSTANCE,
-                TestOperationCallback.INSTANCE);
-
-        int numInserts = 10000;
-        for (int i = 0; i < numInserts; i++) {
-            int f0 = rnd.nextInt() % 10000;
-            int f1 = 5;
-            TupleUtils.createIntegerTuple(tb, insertTuple, f0, f1);
-            if (LOGGER.isLoggable(Level.INFO)) {
-                if (i % 10000 == 0) {
-                    long end = System.currentTimeMillis();
-                    LOGGER.info("INSERTING " + i + " : " + f0 + " " + f1 + " " + (end - start));
-                }
-            }
-
-            try {
-                indexAccessor.insert(insertTuple);
-            } catch (TreeIndexException e) {
-            } catch (Exception e) {
-                e.printStackTrace();
-            }
-        }
-        long end = System.currentTimeMillis();
-        long duration = end - start;
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("DURATION: " + duration);
-        }
-
-        // Update scan.
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("UPDATE SCAN:");
-        }
-        // Set the cursor to X latch nodes.
-        ITreeIndexCursor updateScanCursor = new BTreeRangeSearchCursor(leafFrame, true);
-        RangePredicate nullPred = new RangePredicate(null, null, true, true, null, null);
-        indexAccessor.search(updateScanCursor, nullPred);
-        try {
-            while (updateScanCursor.hasNext()) {
-                updateScanCursor.next();
-                ITupleReference tuple = updateScanCursor.getTuple();
-                // Change the value field.
-                IntegerSerializerDeserializer.putInt(10, tuple.getFieldData(1), tuple.getFieldStart(1));
-            }
-        } catch (Exception e) {
-            e.printStackTrace();
-        } finally {
-            updateScanCursor.close();
-        }
-
-        // Ordered scan to verify the values.
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("ORDERED SCAN:");
-        }
-        // Set the cursor to X latch nodes.
-        ITreeIndexCursor scanCursor = new BTreeRangeSearchCursor(leafFrame, true);
-        indexAccessor.search(scanCursor, nullPred);
-        try {
-            while (scanCursor.hasNext()) {
-                scanCursor.next();
-                ITupleReference tuple = scanCursor.getTuple();
-                String rec = TupleUtils.printTuple(tuple, recDescSers);
-                if (LOGGER.isLoggable(Level.INFO)) {
-                    LOGGER.info(rec);
-                }
-            }
-        } catch (Exception e) {
-            e.printStackTrace();
-        } finally {
-            scanCursor.close();
-        }
-        btree.deactivate();
-        btree.destroy();
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeUpdateTest.java b/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeUpdateTest.java
deleted file mode 100644
index 89bb50e..0000000
--- a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeUpdateTest.java
+++ /dev/null
@@ -1,59 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.btree;
-
-import java.util.Random;
-
-import org.junit.After;
-import org.junit.Before;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
-import edu.uci.ics.hyracks.storage.am.btree.util.BTreeTestContext;
-import edu.uci.ics.hyracks.storage.am.btree.util.BTreeTestHarness;
-
-public class BTreeUpdateTest extends OrderedIndexUpdateTest {
-
-    private final BTreeTestHarness harness = new BTreeTestHarness();
-
-    public BTreeUpdateTest() {
-        super(BTreeTestHarness.LEAF_FRAMES_TO_TEST);
-    }
-
-    @Before
-    public void setUp() throws HyracksDataException {
-        harness.setUp();
-    }
-
-    @After
-    public void tearDown() throws HyracksDataException {
-        harness.tearDown();
-    }
-
-    @SuppressWarnings("rawtypes")
-    @Override
-    protected OrderedIndexTestContext createTestContext(ISerializerDeserializer[] fieldSerdes, int numKeys,
-            BTreeLeafFrameType leafType) throws Exception {
-        return BTreeTestContext.create(harness.getBufferCache(), harness.getFileMapProvider(),
-                harness.getFileReference(), fieldSerdes, numKeys, leafType);
-    }
-
-    @Override
-    protected Random getRandom() {
-        return harness.getRandom();
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeUpsertTest.java b/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeUpsertTest.java
deleted file mode 100644
index ab32156..0000000
--- a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeUpsertTest.java
+++ /dev/null
@@ -1,68 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.btree;
-
-import java.util.Random;
-
-import org.junit.After;
-import org.junit.Before;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
-import edu.uci.ics.hyracks.storage.am.btree.util.BTreeTestContext;
-import edu.uci.ics.hyracks.storage.am.btree.util.BTreeTestHarness;
-
-/**
- * Tests the BTree insert operation with strings and integer fields using 
- * various numbers of key and payload fields. Each tests first fills a BTree with 
- * randomly generated tuples. We compare the following operations against expected results: 
- *      1) Point searches for all tuples 
- *      2) Ordered scan
- *      3) Disk-order scan
- *      4) Range search (and prefix search for composite keys)
- */
-public class BTreeUpsertTest extends OrderedIndexUpsertTest {
-
-    public BTreeUpsertTest() {
-        super(BTreeTestHarness.LEAF_FRAMES_TO_TEST);
-    }
-
-    private final BTreeTestHarness harness = new BTreeTestHarness();
-
-    @Before
-    public void setUp() throws HyracksDataException {
-        harness.setUp();
-    }
-
-    @After
-    public void tearDown() throws HyracksDataException {
-        harness.tearDown();
-    }
-
-    @SuppressWarnings("rawtypes")
-    @Override
-    protected OrderedIndexTestContext createTestContext(ISerializerDeserializer[] fieldSerdes, int numKeys,
-            BTreeLeafFrameType leafType) throws Exception {
-        return BTreeTestContext.create(harness.getBufferCache(), harness.getFileMapProvider(),
-                harness.getFileReference(), fieldSerdes, numKeys, leafType);
-    }
-
-    @Override
-    protected Random getRandom() {
-        return harness.getRandom();
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/FieldPrefixNSMTest.java b/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/FieldPrefixNSMTest.java
deleted file mode 100644
index 1bf511e..0000000
--- a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/FieldPrefixNSMTest.java
+++ /dev/null
@@ -1,232 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.btree;
-
-import java.io.DataOutput;
-import java.nio.ByteBuffer;
-import java.util.Random;
-import java.util.logging.Level;
-
-import org.junit.Assert;
-import org.junit.Test;
-
-import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
-import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.FrameTupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-import edu.uci.ics.hyracks.storage.am.btree.exceptions.BTreeException;
-import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeFieldPrefixNSMLeafFrame;
-import edu.uci.ics.hyracks.storage.am.btree.util.AbstractBTreeTest;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriter;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriter;
-import edu.uci.ics.hyracks.storage.am.common.util.TreeIndexUtils;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
-import edu.uci.ics.hyracks.storage.common.file.BufferedFileHandle;
-import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
-
-public class FieldPrefixNSMTest extends AbstractBTreeTest {
-
-    private static final int PAGE_SIZE = 32768;
-    private static final int NUM_PAGES = 40;
-    private static final int MAX_OPEN_FILES = 10;
-    private static final int HYRACKS_FRAME_SIZE = 128;
-
-    public FieldPrefixNSMTest() {
-        super(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES, HYRACKS_FRAME_SIZE);
-    }
-
-    private ITupleReference createTuple(IHyracksTaskContext ctx, int f0, int f1, int f2, boolean print)
-            throws HyracksDataException {
-        if (print) {
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("CREATING: " + f0 + " " + f1 + " " + f2);
-            }
-        }
-
-        ByteBuffer buf = ctx.allocateFrame();
-        FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
-        ArrayTupleBuilder tb = new ArrayTupleBuilder(3);
-        DataOutput dos = tb.getDataOutput();
-
-        @SuppressWarnings("rawtypes")
-        ISerializerDeserializer[] recDescSers = { IntegerSerializerDeserializer.INSTANCE,
-                IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
-        RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
-        IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx.getFrameSize(), recDesc);
-        accessor.reset(buf);
-        FrameTupleReference tuple = new FrameTupleReference();
-
-        tb.reset();
-        IntegerSerializerDeserializer.INSTANCE.serialize(f0, dos);
-        tb.addFieldEndOffset();
-        IntegerSerializerDeserializer.INSTANCE.serialize(f1, dos);
-        tb.addFieldEndOffset();
-        IntegerSerializerDeserializer.INSTANCE.serialize(f2, dos);
-        tb.addFieldEndOffset();
-
-        appender.reset(buf, true);
-        appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
-
-        tuple.reset(accessor, 0);
-
-        return tuple;
-    }
-
-    @Test
-    public void test01() throws Exception {
-
-        // declare fields
-        int fieldCount = 3;
-        ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
-        typeTraits[0] = IntegerPointable.TYPE_TRAITS;
-        typeTraits[1] = IntegerPointable.TYPE_TRAITS;
-        typeTraits[2] = IntegerPointable.TYPE_TRAITS;
-
-        // declare keys
-        int keyFieldCount = 3;
-        IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
-        cmps[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY).createBinaryComparator();
-        cmps[1] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY).createBinaryComparator();
-        cmps[2] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY).createBinaryComparator();
-        MultiComparator cmp = new MultiComparator(cmps);
-
-        // just for printing
-        @SuppressWarnings("rawtypes")
-        ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE,
-                IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
-
-        Random rnd = new Random();
-        rnd.setSeed(50);
-
-        IBufferCache bufferCache = harness.getBufferCache();
-        IFileMapProvider fileMapProvider = harness.getFileMapProvider();
-        bufferCache.createFile(harness.getFileReference());
-        int btreeFileId = fileMapProvider.lookupFileId(harness.getFileReference());
-        bufferCache.openFile(btreeFileId);
-        IHyracksTaskContext ctx = harness.getHyracksTaskContext();
-        ICachedPage page = bufferCache.pin(BufferedFileHandle.getDiskPageId(btreeFileId, 0), false);
-        try {
-
-            ITreeIndexTupleWriter tupleWriter = new TypeAwareTupleWriter(typeTraits);
-            BTreeFieldPrefixNSMLeafFrame frame = new BTreeFieldPrefixNSMLeafFrame(tupleWriter);
-            frame.setPage(page);
-            frame.initBuffer((byte) 0);
-            frame.setMultiComparator(cmp);
-            frame.setPrefixTupleCount(0);
-
-            String before = new String();
-            String after = new String();
-
-            int compactFreq = 5;
-            int compressFreq = 5;
-            int smallMax = 10;
-            int numRecords = 1000;
-
-            int[][] savedFields = new int[numRecords][3];
-
-            // insert records with random calls to compact and compress
-            for (int i = 0; i < numRecords; i++) {
-
-                if (LOGGER.isLoggable(Level.INFO)) {
-                    if ((i + 1) % 100 == 0) {
-                        LOGGER.info("INSERTING " + (i + 1) + " / " + numRecords);
-                    }
-                }
-
-                int a = rnd.nextInt() % smallMax;
-                int b = rnd.nextInt() % smallMax;
-                int c = i;
-
-                ITupleReference tuple = createTuple(ctx, a, b, c, false);
-                try {
-                    int targetTupleIndex = frame.findInsertTupleIndex(tuple);
-                    frame.insert(tuple, targetTupleIndex);
-                } catch (BTreeException e) {
-                    e.printStackTrace();
-                } catch (Exception e) {
-                    e.printStackTrace();
-                }
-
-                savedFields[i][0] = a;
-                savedFields[i][1] = b;
-                savedFields[i][2] = c;
-
-                if (rnd.nextInt() % compactFreq == 0) {
-                    before = TreeIndexUtils.printFrameTuples(frame, fieldSerdes);
-                    frame.compact();
-                    after = TreeIndexUtils.printFrameTuples(frame, fieldSerdes);
-                    Assert.assertEquals(before, after);
-                }
-
-                if (rnd.nextInt() % compressFreq == 0) {
-                    before = TreeIndexUtils.printFrameTuples(frame, fieldSerdes);
-                    frame.compress();
-                    after = TreeIndexUtils.printFrameTuples(frame, fieldSerdes);
-                    Assert.assertEquals(before, after);
-                }
-
-            }
-
-            // delete records with random calls to compact and compress
-            for (int i = 0; i < numRecords; i++) {
-                if (LOGGER.isLoggable(Level.INFO)) {
-                    if ((i + 1) % 100 == 0) {
-                        LOGGER.info("DELETING " + (i + 1) + " / " + numRecords);
-                    }
-                }
-
-                ITupleReference tuple = createTuple(ctx, savedFields[i][0], savedFields[i][1], savedFields[i][2], false);
-                try {
-                    int tupleIndex = frame.findDeleteTupleIndex(tuple);
-                    frame.delete(tuple, tupleIndex);
-                } catch (Exception e) {
-                }
-
-                if (rnd.nextInt() % compactFreq == 0) {
-                    before = TreeIndexUtils.printFrameTuples(frame, fieldSerdes);
-                    frame.compact();
-                    after = TreeIndexUtils.printFrameTuples(frame, fieldSerdes);
-                    Assert.assertEquals(before, after);
-                }
-
-                if (rnd.nextInt() % compressFreq == 0) {
-                    before = TreeIndexUtils.printFrameTuples(frame, fieldSerdes);
-                    frame.compress();
-                    after = TreeIndexUtils.printFrameTuples(frame, fieldSerdes);
-                    Assert.assertEquals(before, after);
-                }
-            }
-
-        } finally {
-            bufferCache.unpin(page);
-            bufferCache.closeFile(btreeFileId);
-            bufferCache.close();
-        }
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/StorageManagerTest.java b/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/StorageManagerTest.java
deleted file mode 100644
index d273a12..0000000
--- a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/StorageManagerTest.java
+++ /dev/null
@@ -1,267 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.btree;
-
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Random;
-import java.util.logging.Level;
-
-import org.junit.Test;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.storage.am.btree.util.AbstractBTreeTest;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
-import edu.uci.ics.hyracks.storage.common.file.BufferedFileHandle;
-import edu.uci.ics.hyracks.storage.common.sync.LatchType;
-
-public class StorageManagerTest extends AbstractBTreeTest {
-    public class PinnedLatchedPage {
-        public final ICachedPage page;
-        public final LatchType latch;
-        public final int pageId;
-
-        public PinnedLatchedPage(ICachedPage page, int pageId, LatchType latch) {
-            this.page = page;
-            this.pageId = pageId;
-            this.latch = latch;
-        }
-    }
-
-    public enum FileAccessType {
-        FTA_READONLY,
-        FTA_WRITEONLY,
-        FTA_MIXED,
-        FTA_UNLATCHED
-    }
-
-    public class FileAccessWorker implements Runnable {
-        private int workerId;
-        private final IBufferCache bufferCache;
-        private final int maxPages;
-        private final int fileId;
-        private final long thinkTime;
-        private final int maxLoopCount;
-        private final int maxPinnedPages;
-        private final int closeFileChance;
-        private final FileAccessType fta;
-        private int loopCount = 0;
-        private boolean fileIsOpen = false;
-        private Random rnd = new Random(50);
-        private List<PinnedLatchedPage> pinnedPages = new LinkedList<PinnedLatchedPage>();
-
-        public FileAccessWorker(int workerId, IBufferCache bufferCache, FileAccessType fta, int fileId, int maxPages,
-                int maxPinnedPages, int maxLoopCount, int closeFileChance, long thinkTime) {
-            this.bufferCache = bufferCache;
-            this.fileId = fileId;
-            this.maxPages = maxPages;
-            this.maxLoopCount = maxLoopCount;
-            this.maxPinnedPages = maxPinnedPages;
-            this.thinkTime = thinkTime;
-            this.closeFileChance = closeFileChance;
-            this.workerId = workerId;
-            this.fta = fta;
-        }
-
-        private void pinRandomPage() {
-            int pageId = Math.abs(rnd.nextInt() % maxPages);
-
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info(workerId + " PINNING PAGE: " + pageId);
-            }
-
-            try {
-                ICachedPage page = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
-                LatchType latch = null;
-
-                switch (fta) {
-
-                    case FTA_UNLATCHED: {
-                        latch = null;
-                    }
-                        break;
-
-                    case FTA_READONLY: {
-                        if (LOGGER.isLoggable(Level.INFO)) {
-                            LOGGER.info(workerId + " S LATCHING: " + pageId);
-                        }
-                        page.acquireReadLatch();
-                        latch = LatchType.LATCH_S;
-                    }
-                        break;
-
-                    case FTA_WRITEONLY: {
-                        if (LOGGER.isLoggable(Level.INFO)) {
-                            LOGGER.info(workerId + " X LATCHING: " + pageId);
-                        }
-                        page.acquireWriteLatch();
-                        latch = LatchType.LATCH_X;
-                    }
-                        break;
-
-                    case FTA_MIXED: {
-                        if (rnd.nextInt() % 2 == 0) {
-                            if (LOGGER.isLoggable(Level.INFO)) {
-                                LOGGER.info(workerId + " S LATCHING: " + pageId);
-                            }
-                            page.acquireReadLatch();
-                            latch = LatchType.LATCH_S;
-                        } else {
-                            if (LOGGER.isLoggable(Level.INFO)) {
-                                LOGGER.info(workerId + " X LATCHING: " + pageId);
-                            }
-                            page.acquireWriteLatch();
-                            latch = LatchType.LATCH_X;
-                        }
-                    }
-                        break;
-
-                }
-
-                PinnedLatchedPage plPage = new PinnedLatchedPage(page, pageId, latch);
-                pinnedPages.add(plPage);
-            } catch (HyracksDataException e) {
-                e.printStackTrace();
-            }
-        }
-
-        private void unpinRandomPage() {
-            int index = Math.abs(rnd.nextInt() % pinnedPages.size());
-            try {
-                PinnedLatchedPage plPage = pinnedPages.get(index);
-
-                if (plPage.latch != null) {
-                    if (plPage.latch == LatchType.LATCH_S) {
-                        if (LOGGER.isLoggable(Level.INFO)) {
-                            LOGGER.info(workerId + " S UNLATCHING: " + plPage.pageId);
-                        }
-                        plPage.page.releaseReadLatch();
-                    } else {
-                        if (LOGGER.isLoggable(Level.INFO)) {
-                            LOGGER.info(workerId + " X UNLATCHING: " + plPage.pageId);
-                        }
-                        plPage.page.releaseWriteLatch();
-                    }
-                }
-                if (LOGGER.isLoggable(Level.INFO)) {
-                    LOGGER.info(workerId + " UNPINNING PAGE: " + plPage.pageId);
-                }
-
-                bufferCache.unpin(plPage.page);
-                pinnedPages.remove(index);
-            } catch (HyracksDataException e) {
-                e.printStackTrace();
-            }
-        }
-
-        private void openFile() {
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info(workerId + " OPENING FILE: " + fileId);
-            }
-            try {
-                bufferCache.openFile(fileId);
-                fileIsOpen = true;
-            } catch (HyracksDataException e) {
-                e.printStackTrace();
-            }
-        }
-
-        private void closeFile() {
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info(workerId + " CLOSING FILE: " + fileId);
-            }
-            try {
-                bufferCache.closeFile(fileId);
-                fileIsOpen = false;
-            } catch (HyracksDataException e) {
-                e.printStackTrace();
-            }
-        }
-
-        @Override
-        public void run() {
-
-            openFile();
-
-            while (loopCount < maxLoopCount) {
-                loopCount++;
-
-                if (LOGGER.isLoggable(Level.INFO)) {
-                    LOGGER.info(workerId + " LOOP: " + loopCount + "/" + maxLoopCount);
-                }
-
-                if (fileIsOpen) {
-
-                    // pin some pages
-                    int pagesToPin = Math.abs(rnd.nextInt()) % (maxPinnedPages - pinnedPages.size());
-                    for (int i = 0; i < pagesToPin; i++) {
-                        pinRandomPage();
-                    }
-
-                    // do some thinking
-                    try {
-                        Thread.sleep(thinkTime);
-                    } catch (InterruptedException e) {
-                        e.printStackTrace();
-                    }
-
-                    // unpin some pages
-                    if (!pinnedPages.isEmpty()) {
-                        int pagesToUnpin = Math.abs(rnd.nextInt()) % pinnedPages.size();
-                        for (int i = 0; i < pagesToUnpin; i++) {
-                            unpinRandomPage();
-                        }
-                    }
-
-                    // possibly close file
-                    int closeFileCheck = Math.abs(rnd.nextInt()) % closeFileChance;
-                    if (pinnedPages.isEmpty() || closeFileCheck == 0) {
-                        int numPinnedPages = pinnedPages.size();
-                        for (int i = 0; i < numPinnedPages; i++) {
-                            unpinRandomPage();
-                        }
-                        closeFile();
-                    }
-                } else {
-                    openFile();
-                }
-            }
-
-            if (fileIsOpen) {
-                int numPinnedPages = pinnedPages.size();
-                for (int i = 0; i < numPinnedPages; i++) {
-                    unpinRandomPage();
-                }
-                closeFile();
-            }
-        }
-    }
-
-    @Test
-    public void oneThreadOneFileTest() throws Exception {
-        IBufferCache bufferCache = harness.getBufferCache();
-        bufferCache.createFile(harness.getFileReference());
-        int btreeFileId = harness.getFileMapProvider().lookupFileId(harness.getFileReference());
-        bufferCache.openFile(btreeFileId);
-        Thread worker = new Thread(new FileAccessWorker(0, harness.getBufferCache(), FileAccessType.FTA_UNLATCHED,
-                btreeFileId, 10, 10, 100, 10, 0));
-        worker.start();
-        worker.join();
-        bufferCache.closeFile(btreeFileId);
-        bufferCache.close();
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/multithread/BTreeMultiThreadTest.java b/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/multithread/BTreeMultiThreadTest.java
deleted file mode 100644
index 3f38c05..0000000
--- a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/multithread/BTreeMultiThreadTest.java
+++ /dev/null
@@ -1,95 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.btree.multithread;
-
-import java.util.ArrayList;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.storage.am.btree.OrderedIndexMultiThreadTest;
-import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
-import edu.uci.ics.hyracks.storage.am.btree.util.BTreeTestHarness;
-import edu.uci.ics.hyracks.storage.am.btree.util.BTreeUtils;
-import edu.uci.ics.hyracks.storage.am.common.IIndexTestWorkerFactory;
-import edu.uci.ics.hyracks.storage.am.common.TestOperationSelector.TestOperation;
-import edu.uci.ics.hyracks.storage.am.common.TestWorkloadConf;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
-import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
-import edu.uci.ics.hyracks.storage.am.common.datagen.ProbabilityHelper;
-
-public class BTreeMultiThreadTest extends OrderedIndexMultiThreadTest {
-
-    private final BTreeTestHarness harness = new BTreeTestHarness();
-    private final BTreeTestWorkerFactory workerFactory = new BTreeTestWorkerFactory();
-
-    @Override
-    protected void setUp() throws HyracksDataException {
-        harness.setUp();
-    }
-
-    @Override
-    protected void tearDown() throws HyracksDataException {
-        harness.tearDown();
-    }
-
-    @Override
-    protected ITreeIndex createIndex(ITypeTraits[] typeTraits, IBinaryComparatorFactory[] cmpFactories,
-            int[] bloomFilterKeyFields) throws TreeIndexException {
-        return BTreeUtils.createBTree(harness.getBufferCache(), harness.getFileMapProvider(), typeTraits, cmpFactories,
-                BTreeLeafFrameType.REGULAR_NSM, harness.getFileReference());
-    }
-
-    @Override
-    protected IIndexTestWorkerFactory getWorkerFactory() {
-        return workerFactory;
-    }
-
-    @Override
-    protected ArrayList<TestWorkloadConf> getTestWorkloadConf() {
-        ArrayList<TestWorkloadConf> workloadConfs = new ArrayList<TestWorkloadConf>();
-
-        // Insert only workload.
-        TestOperation[] insertOnlyOps = new TestOperation[] { TestOperation.INSERT };
-        workloadConfs.add(new TestWorkloadConf(insertOnlyOps, ProbabilityHelper
-                .getUniformProbDist(insertOnlyOps.length)));
-
-        // Inserts mixed with point searches and scans.
-        TestOperation[] insertSearchOnlyOps = new TestOperation[] { TestOperation.INSERT, TestOperation.POINT_SEARCH,
-                TestOperation.SCAN, TestOperation.DISKORDER_SCAN };
-        workloadConfs.add(new TestWorkloadConf(insertSearchOnlyOps, ProbabilityHelper
-                .getUniformProbDist(insertSearchOnlyOps.length)));
-
-        // Inserts, updates, deletes, and upserts.        
-        TestOperation[] insertDeleteUpdateUpsertOps = new TestOperation[] { TestOperation.INSERT, TestOperation.DELETE,
-                TestOperation.UPDATE, TestOperation.UPSERT };
-        workloadConfs.add(new TestWorkloadConf(insertDeleteUpdateUpsertOps, ProbabilityHelper
-                .getUniformProbDist(insertDeleteUpdateUpsertOps.length)));
-
-        // All operations mixed.
-        TestOperation[] allOps = new TestOperation[] { TestOperation.INSERT, TestOperation.DELETE,
-                TestOperation.UPDATE, TestOperation.UPSERT, TestOperation.POINT_SEARCH, TestOperation.SCAN,
-                TestOperation.DISKORDER_SCAN };
-        workloadConfs.add(new TestWorkloadConf(allOps, ProbabilityHelper.getUniformProbDist(allOps.length)));
-
-        return workloadConfs;
-    }
-
-    @Override
-    protected String getIndexTypeName() {
-        return "BTree";
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/multithread/BTreeTestWorker.java b/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/multithread/BTreeTestWorker.java
deleted file mode 100644
index be8dc5d..0000000
--- a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/multithread/BTreeTestWorker.java
+++ /dev/null
@@ -1,133 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.btree.multithread;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.btree.exceptions.BTreeDuplicateKeyException;
-import edu.uci.ics.hyracks.storage.am.btree.exceptions.BTreeNonExistentKeyException;
-import edu.uci.ics.hyracks.storage.am.btree.exceptions.BTreeNotUpdateableException;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
-import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
-import edu.uci.ics.hyracks.storage.am.common.AbstractIndexTestWorker;
-import edu.uci.ics.hyracks.storage.am.common.TestOperationSelector;
-import edu.uci.ics.hyracks.storage.am.common.TestOperationSelector.TestOperation;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndex;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.am.common.datagen.DataGenThread;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-
-public class BTreeTestWorker extends AbstractIndexTestWorker {
-
-    private final BTree btree;
-    private final int numKeyFields;
-    private final ArrayTupleBuilder deleteTb;
-    private final ArrayTupleReference deleteTuple = new ArrayTupleReference();
-
-    public BTreeTestWorker(DataGenThread dataGen, TestOperationSelector opSelector, IIndex index, int numBatches) {
-        super(dataGen, opSelector, index, numBatches);
-        btree = (BTree) index;
-        numKeyFields = btree.getComparatorFactories().length;
-        deleteTb = new ArrayTupleBuilder(numKeyFields);
-    }
-
-    @Override
-    public void performOp(ITupleReference tuple, TestOperation op) throws HyracksDataException, IndexException {
-        BTree.BTreeAccessor accessor = (BTree.BTreeAccessor) indexAccessor;
-        ITreeIndexCursor searchCursor = accessor.createSearchCursor();
-        ITreeIndexCursor diskOrderScanCursor = accessor.createDiskOrderScanCursor();
-        MultiComparator cmp = accessor.getOpContext().cmp;
-        RangePredicate rangePred = new RangePredicate(tuple, tuple, true, true, cmp, cmp);
-
-        switch (op) {
-            case INSERT:
-                try {
-                    accessor.insert(tuple);
-                } catch (BTreeDuplicateKeyException e) {
-                    // Ignore duplicate keys, since we get random tuples.
-                }
-                break;
-
-            case DELETE:
-                // Create a tuple reference with only key fields.
-                deleteTb.reset();
-                for (int i = 0; i < numKeyFields; i++) {
-                    deleteTb.addField(tuple.getFieldData(i), tuple.getFieldStart(i), tuple.getFieldLength(i));
-                }
-                deleteTuple.reset(deleteTb.getFieldEndOffsets(), deleteTb.getByteArray());
-                try {
-                    accessor.delete(deleteTuple);
-                } catch (BTreeNonExistentKeyException e) {
-                    // Ignore non-existant keys, since we get random tuples.
-                }
-                break;
-
-            case UPDATE:
-                try {
-                    accessor.update(tuple);
-                } catch (BTreeNonExistentKeyException e) {
-                    // Ignore non-existant keys, since we get random tuples.
-                } catch (BTreeNotUpdateableException e) {
-                    // Ignore not updateable exception due to numKeys == numFields.
-                }
-                break;
-
-            case UPSERT:
-                accessor.upsert(tuple);
-                // Upsert should not throw. If it does, there's 
-                // a bigger problem and the test should fail.
-                break;
-
-            case POINT_SEARCH:
-                searchCursor.reset();
-                rangePred.setLowKey(tuple, true);
-                rangePred.setHighKey(tuple, true);
-                accessor.search(searchCursor, rangePred);
-                consumeCursorTuples(searchCursor);
-                break;
-
-            case SCAN:
-                searchCursor.reset();
-                rangePred.setLowKey(null, true);
-                rangePred.setHighKey(null, true);
-                accessor.search(searchCursor, rangePred);
-                consumeCursorTuples(searchCursor);
-                break;
-
-            case DISKORDER_SCAN:
-                diskOrderScanCursor.reset();
-                accessor.diskOrderScan(diskOrderScanCursor);
-                consumeCursorTuples(diskOrderScanCursor);
-                break;
-
-            default:
-                throw new HyracksDataException("Op " + op.toString() + " not supported.");
-        }
-    }
-
-    private void consumeCursorTuples(ITreeIndexCursor cursor) throws HyracksDataException, IndexException {
-        try {
-            while (cursor.hasNext()) {
-                cursor.next();
-            }
-        } finally {
-            cursor.close();
-        }
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/multithread/BTreeTestWorkerFactory.java b/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/multithread/BTreeTestWorkerFactory.java
deleted file mode 100644
index bfde531..0000000
--- a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/multithread/BTreeTestWorkerFactory.java
+++ /dev/null
@@ -1,30 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.btree.multithread;
-
-import edu.uci.ics.hyracks.storage.am.common.AbstractIndexTestWorker;
-import edu.uci.ics.hyracks.storage.am.common.IIndexTestWorkerFactory;
-import edu.uci.ics.hyracks.storage.am.common.TestOperationSelector;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndex;
-import edu.uci.ics.hyracks.storage.am.common.datagen.DataGenThread;
-
-public class BTreeTestWorkerFactory implements IIndexTestWorkerFactory {
-    @Override
-    public AbstractIndexTestWorker create(DataGenThread dataGen, TestOperationSelector opSelector,
-            IIndex index, int numBatches) {
-        return new BTreeTestWorker(dataGen, opSelector, index, numBatches);
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/util/AbstractBTreeTest.java b/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/util/AbstractBTreeTest.java
deleted file mode 100644
index ef9a456..0000000
--- a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/util/AbstractBTreeTest.java
+++ /dev/null
@@ -1,47 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.btree.util;
-
-import java.util.logging.Logger;
-
-import org.junit.After;
-import org.junit.Before;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-
-public abstract class AbstractBTreeTest {
-    protected final Logger LOGGER = Logger.getLogger(BTreeTestHarness.class.getName());
-
-    protected final BTreeTestHarness harness;
-
-    public AbstractBTreeTest() {
-        harness = new BTreeTestHarness();
-    }
-
-    public AbstractBTreeTest(int pageSize, int numPages, int maxOpenFiles, int hyracksFrameSize) {
-        harness = new BTreeTestHarness(pageSize, numPages, maxOpenFiles, hyracksFrameSize);
-    }
-
-    @Before
-    public void setUp() throws HyracksDataException {
-        harness.setUp();
-    }
-
-    @After
-    public void tearDown() throws HyracksDataException {
-        harness.tearDown();
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/util/BTreeTestContext.java b/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/util/BTreeTestContext.java
deleted file mode 100644
index 1d63b85..0000000
--- a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/util/BTreeTestContext.java
+++ /dev/null
@@ -1,58 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.btree.util;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.dataflow.common.util.SerdeUtils;
-import edu.uci.ics.hyracks.storage.am.btree.OrderedIndexTestContext;
-import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
-
-@SuppressWarnings("rawtypes")
-public class BTreeTestContext extends OrderedIndexTestContext {
-
-    public BTreeTestContext(ISerializerDeserializer[] fieldSerdes, ITreeIndex treeIndex) {
-        super(fieldSerdes, treeIndex);
-    }
-
-    @Override
-    public int getKeyFieldCount() {
-        BTree btree = (BTree) index;
-        return btree.getComparatorFactories().length;
-    }
-
-    @Override
-    public IBinaryComparatorFactory[] getComparatorFactories() {
-        BTree btree = (BTree) index;
-        return btree.getComparatorFactories();
-    }
-
-    public static BTreeTestContext create(IBufferCache bufferCache, IFileMapProvider fileMapProvider,
-            FileReference file, ISerializerDeserializer[] fieldSerdes, int numKeyFields, BTreeLeafFrameType leafType)
-            throws Exception {
-        ITypeTraits[] typeTraits = SerdeUtils.serdesToTypeTraits(fieldSerdes);
-        IBinaryComparatorFactory[] cmpFactories = SerdeUtils.serdesToComparatorFactories(fieldSerdes, numKeyFields);
-        BTree btree = BTreeUtils.createBTree(bufferCache, fileMapProvider, typeTraits, cmpFactories, leafType, file);
-        BTreeTestContext testCtx = new BTreeTestContext(fieldSerdes, btree);
-        return testCtx;
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/util/BTreeTestHarness.java b/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/util/BTreeTestHarness.java
deleted file mode 100644
index e357bf5..0000000
--- a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/util/BTreeTestHarness.java
+++ /dev/null
@@ -1,123 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.btree.util;
-
-import java.io.File;
-import java.text.SimpleDateFormat;
-import java.util.Date;
-import java.util.Random;
-
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
-import edu.uci.ics.hyracks.storage.am.config.AccessMethodTestsConfig;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
-import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
-import edu.uci.ics.hyracks.test.support.TestUtils;
-
-public class BTreeTestHarness {
-    public static final BTreeLeafFrameType[] LEAF_FRAMES_TO_TEST = new BTreeLeafFrameType[] {
-            BTreeLeafFrameType.REGULAR_NSM, BTreeLeafFrameType.FIELD_PREFIX_COMPRESSED_NSM };
-
-    private static final long RANDOM_SEED = 50;
-
-    protected final int pageSize;
-    protected final int numPages;
-    protected final int maxOpenFiles;
-    protected final int hyracksFrameSize;
-
-    protected IHyracksTaskContext ctx;
-    protected IBufferCache bufferCache;
-    protected IFileMapProvider fileMapProvider;
-    protected FileReference file;
-
-    protected final Random rnd = new Random();
-    protected final SimpleDateFormat simpleDateFormat = new SimpleDateFormat("ddMMyy-hhmmssSS");
-    protected final String tmpDir = System.getProperty("java.io.tmpdir");
-    protected final String sep = System.getProperty("file.separator");
-    protected String fileName;
-
-    public BTreeTestHarness() {
-        this.pageSize = AccessMethodTestsConfig.BTREE_PAGE_SIZE;
-        this.numPages = AccessMethodTestsConfig.BTREE_NUM_PAGES;
-        this.maxOpenFiles = AccessMethodTestsConfig.BTREE_MAX_OPEN_FILES;
-        this.hyracksFrameSize = AccessMethodTestsConfig.BTREE_HYRACKS_FRAME_SIZE;
-    }
-
-    public BTreeTestHarness(int pageSize, int numPages, int maxOpenFiles, int hyracksFrameSize) {
-        this.pageSize = pageSize;
-        this.numPages = numPages;
-        this.maxOpenFiles = maxOpenFiles;
-        this.hyracksFrameSize = hyracksFrameSize;
-    }
-
-    public void setUp() throws HyracksDataException {
-        fileName = tmpDir + sep + simpleDateFormat.format(new Date());
-        ctx = TestUtils.create(getHyracksFrameSize());
-        TestStorageManagerComponentHolder.init(pageSize, numPages, maxOpenFiles);
-        bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
-        fileMapProvider = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
-        file = new FileReference(new File(fileName));
-        rnd.setSeed(RANDOM_SEED);
-    }
-
-    public void tearDown() throws HyracksDataException {
-        bufferCache.close();
-        file.delete();
-    }
-
-    public IHyracksTaskContext getHyracksTaskContext() {
-        return ctx;
-    }
-
-    public IBufferCache getBufferCache() {
-        return bufferCache;
-    }
-
-    public IFileMapProvider getFileMapProvider() {
-        return fileMapProvider;
-    }
-
-    public FileReference getFileReference() {
-        return file;
-    }
-
-    public String getFileName() {
-        return fileName;
-    }
-
-    public Random getRandom() {
-        return rnd;
-    }
-
-    public int getPageSize() {
-        return pageSize;
-    }
-
-    public int getNumPages() {
-        return numPages;
-    }
-
-    public int getHyracksFrameSize() {
-        return hyracksFrameSize;
-    }
-
-    public int getMaxOpenFiles() {
-        return maxOpenFiles;
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-btree-test/pom.xml b/hyracks-tests/hyracks-storage-am-lsm-btree-test/pom.xml
deleted file mode 100644
index 32a1fe8..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-btree-test/pom.xml
+++ /dev/null
@@ -1,47 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <groupId>edu.uci.ics.hyracks</groupId>
-  <artifactId>hyracks-storage-am-lsm-btree-test</artifactId>
-  <version>0.2.2-SNAPSHOT</version>
-
-  <parent>
-    <groupId>edu.uci.ics.hyracks</groupId>
-    <artifactId>hyracks-tests</artifactId>
-    <version>0.2.2-SNAPSHOT</version>
-  </parent>
-
-  <build>
-    <plugins>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-compiler-plugin</artifactId>
-        <version>2.0.2</version>
-        <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
-        </configuration>
-      </plugin>
-    </plugins>
-  </build>
-  <dependencies>  	
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks</groupId>
-  		<artifactId>hyracks-storage-am-lsm-btree</artifactId>
-  		<version>0.2.2-SNAPSHOT</version>
-  		<type>jar</type>
-  		<scope>compile</scope>
-  	</dependency>  	
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks</groupId>
-  		<artifactId>hyracks-storage-am-common</artifactId>
-  		<version>0.2.2-SNAPSHOT</version>
-  		<scope>compile</scope>
-  	</dependency>
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks</groupId>
-  		<artifactId>hyracks-test-support</artifactId>
-  		<version>0.2.2-SNAPSHOT</version>
-  		<scope>compile</scope>
-  	</dependency>
-  </dependencies>
-</project>
diff --git a/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/LSMBTreeBulkLoadTest.java b/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/LSMBTreeBulkLoadTest.java
deleted file mode 100644
index 4bd1910..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/LSMBTreeBulkLoadTest.java
+++ /dev/null
@@ -1,65 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.btree;
-
-import java.util.Random;
-
-import org.junit.After;
-import org.junit.Before;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.exceptions.HyracksException;
-import edu.uci.ics.hyracks.storage.am.btree.OrderedIndexBulkLoadTest;
-import edu.uci.ics.hyracks.storage.am.btree.OrderedIndexTestContext;
-import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
-import edu.uci.ics.hyracks.storage.am.lsm.btree.util.LSMBTreeTestContext;
-import edu.uci.ics.hyracks.storage.am.lsm.btree.util.LSMBTreeTestHarness;
-
-@SuppressWarnings("rawtypes")
-public class LSMBTreeBulkLoadTest extends OrderedIndexBulkLoadTest {
-
-    public LSMBTreeBulkLoadTest() {
-        super(LSMBTreeTestHarness.LEAF_FRAMES_TO_TEST, 1);
-    }
-
-    private final LSMBTreeTestHarness harness = new LSMBTreeTestHarness();
-
-    @Before
-    public void setUp() throws HyracksException {
-        harness.setUp();
-    }
-
-    @After
-    public void tearDown() throws HyracksDataException {
-        harness.tearDown();
-    }
-
-    @Override
-    protected OrderedIndexTestContext createTestContext(ISerializerDeserializer[] fieldSerdes, int numKeys,
-            BTreeLeafFrameType leafType) throws Exception {
-        return LSMBTreeTestContext.create(harness.getMemBufferCache(), harness.getMemFreePageManager(),
-                harness.getIOManager(), harness.getFileReference(), harness.getDiskBufferCache(),
-                harness.getDiskFileMapProvider(), fieldSerdes, numKeys, harness.getMergePolicy(),
-                harness.getOperationTrackerFactory(), harness.getIOScheduler(),
-                harness.getIOOperationCallbackProvider());
-    }
-
-    @Override
-    protected Random getRandom() {
-        return harness.getRandom();
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/LSMBTreeDeleteTest.java b/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/LSMBTreeDeleteTest.java
deleted file mode 100644
index 069faad..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/LSMBTreeDeleteTest.java
+++ /dev/null
@@ -1,65 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.btree;
-
-import java.util.Random;
-
-import org.junit.After;
-import org.junit.Before;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.exceptions.HyracksException;
-import edu.uci.ics.hyracks.storage.am.btree.OrderedIndexDeleteTest;
-import edu.uci.ics.hyracks.storage.am.btree.OrderedIndexTestContext;
-import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
-import edu.uci.ics.hyracks.storage.am.lsm.btree.util.LSMBTreeTestContext;
-import edu.uci.ics.hyracks.storage.am.lsm.btree.util.LSMBTreeTestHarness;
-
-@SuppressWarnings("rawtypes")
-public class LSMBTreeDeleteTest extends OrderedIndexDeleteTest {
-
-    public LSMBTreeDeleteTest() {
-        super(LSMBTreeTestHarness.LEAF_FRAMES_TO_TEST);
-    }
-
-    private final LSMBTreeTestHarness harness = new LSMBTreeTestHarness();
-
-    @Before
-    public void setUp() throws HyracksException {
-        harness.setUp();
-    }
-
-    @After
-    public void tearDown() throws HyracksDataException {
-        harness.tearDown();
-    }
-
-    @Override
-    protected OrderedIndexTestContext createTestContext(ISerializerDeserializer[] fieldSerdes, int numKeys,
-            BTreeLeafFrameType leafType) throws Exception {
-        return LSMBTreeTestContext.create(harness.getMemBufferCache(), harness.getMemFreePageManager(),
-                harness.getIOManager(), harness.getFileReference(), harness.getDiskBufferCache(),
-                harness.getDiskFileMapProvider(), fieldSerdes, numKeys, harness.getMergePolicy(),
-                harness.getOperationTrackerFactory(), harness.getIOScheduler(),
-                harness.getIOOperationCallbackProvider());
-    }
-
-    @Override
-    protected Random getRandom() {
-        return harness.getRandom();
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/LSMBTreeExamplesTest.java b/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/LSMBTreeExamplesTest.java
deleted file mode 100644
index 539ed3e..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/LSMBTreeExamplesTest.java
+++ /dev/null
@@ -1,54 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.btree;
-
-import org.junit.After;
-import org.junit.Before;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.exceptions.HyracksException;
-import edu.uci.ics.hyracks.storage.am.btree.OrderedIndexExamplesTest;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
-import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
-import edu.uci.ics.hyracks.storage.am.lsm.btree.util.LSMBTreeTestHarness;
-import edu.uci.ics.hyracks.storage.am.lsm.btree.util.LSMBTreeUtils;
-
-public class LSMBTreeExamplesTest extends OrderedIndexExamplesTest {
-    private final LSMBTreeTestHarness harness = new LSMBTreeTestHarness();
-
-    @Override
-    protected ITreeIndex createTreeIndex(ITypeTraits[] typeTraits, IBinaryComparatorFactory[] cmpFactories,
-            int[] bloomFilterKeyFields) throws TreeIndexException {
-        return LSMBTreeUtils.createLSMTree(harness.getMemBufferCache(), harness.getMemFreePageManager(),
-                harness.getIOManager(), harness.getFileReference(), harness.getDiskBufferCache(),
-                harness.getDiskFileMapProvider(), typeTraits, cmpFactories, bloomFilterKeyFields,
-                harness.getMergePolicy(), harness.getOperationTrackerFactory(), harness.getIOScheduler(),
-                harness.getIOOperationCallbackProvider());
-    }
-
-    @Before
-    public void setUp() throws HyracksException {
-        harness.setUp();
-    }
-
-    @After
-    public void tearDown() throws HyracksDataException {
-        harness.tearDown();
-    }
-
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/LSMBTreeInsertTest.java b/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/LSMBTreeInsertTest.java
deleted file mode 100644
index f17e3c8..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/LSMBTreeInsertTest.java
+++ /dev/null
@@ -1,65 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.btree;
-
-import java.util.Random;
-
-import org.junit.After;
-import org.junit.Before;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.exceptions.HyracksException;
-import edu.uci.ics.hyracks.storage.am.btree.OrderedIndexInsertTest;
-import edu.uci.ics.hyracks.storage.am.btree.OrderedIndexTestContext;
-import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
-import edu.uci.ics.hyracks.storage.am.lsm.btree.util.LSMBTreeTestContext;
-import edu.uci.ics.hyracks.storage.am.lsm.btree.util.LSMBTreeTestHarness;
-
-@SuppressWarnings("rawtypes")
-public class LSMBTreeInsertTest extends OrderedIndexInsertTest {
-
-    public LSMBTreeInsertTest() {
-        super(LSMBTreeTestHarness.LEAF_FRAMES_TO_TEST);
-    }
-
-    private final LSMBTreeTestHarness harness = new LSMBTreeTestHarness();
-
-    @Before
-    public void setUp() throws HyracksException {
-        harness.setUp();
-    }
-
-    @After
-    public void tearDown() throws HyracksDataException {
-        harness.tearDown();
-    }
-
-    @Override
-    protected OrderedIndexTestContext createTestContext(ISerializerDeserializer[] fieldSerdes, int numKeys,
-            BTreeLeafFrameType leafType) throws Exception {
-        return LSMBTreeTestContext.create(harness.getMemBufferCache(), harness.getMemFreePageManager(),
-                harness.getIOManager(), harness.getFileReference(), harness.getDiskBufferCache(),
-                harness.getDiskFileMapProvider(), fieldSerdes, numKeys, harness.getMergePolicy(),
-                harness.getOperationTrackerFactory(), harness.getIOScheduler(),
-                harness.getIOOperationCallbackProvider());
-    }
-
-    @Override
-    protected Random getRandom() {
-        return harness.getRandom();
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/LSMBTreeLifecycleTest.java b/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/LSMBTreeLifecycleTest.java
deleted file mode 100644
index 24d1f10..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/LSMBTreeLifecycleTest.java
+++ /dev/null
@@ -1,75 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.lsm.btree;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.api.io.IODeviceHandle;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-import edu.uci.ics.hyracks.storage.am.btree.OrderedIndexTestUtils;
-import edu.uci.ics.hyracks.storage.am.common.AbstractIndexLifecycleTest;
-import edu.uci.ics.hyracks.storage.am.common.CheckTuple;
-import edu.uci.ics.hyracks.storage.am.common.IIndexTestContext;
-import edu.uci.ics.hyracks.storage.am.common.TreeIndexTestUtils;
-import edu.uci.ics.hyracks.storage.am.lsm.btree.impls.LSMBTree;
-import edu.uci.ics.hyracks.storage.am.lsm.btree.util.LSMBTreeTestContext;
-import edu.uci.ics.hyracks.storage.am.lsm.btree.util.LSMBTreeTestHarness;
-
-public class LSMBTreeLifecycleTest extends AbstractIndexLifecycleTest {
-
-    @SuppressWarnings("rawtypes")
-    private final ISerializerDeserializer[] fieldSerdes = new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE };
-    private final LSMBTreeTestHarness harness = new LSMBTreeTestHarness();
-    private final TreeIndexTestUtils titu = new OrderedIndexTestUtils();
-
-    @SuppressWarnings("rawtypes")
-    private IIndexTestContext<? extends CheckTuple> testCtx;
-
-    @Override
-    protected boolean persistentStateExists() throws Exception {
-        // make sure all of the directories exist
-        for (IODeviceHandle handle : harness.getIOManager().getIODevices()) {
-            if (!new FileReference(handle, harness.getFileReference().getFile().getPath()).getFile().exists()) {
-                return false;
-            }
-        }
-        return true;
-    }
-
-    @Override
-    protected boolean isEmptyIndex() throws Exception {
-        return ((LSMBTree) index).isEmptyIndex();
-    }
-
-    @Override
-    public void setup() throws Exception {
-        harness.setUp();
-        testCtx = LSMBTreeTestContext.create(harness.getMemBufferCache(), harness.getMemFreePageManager(),
-                harness.getIOManager(), harness.getFileReference(), harness.getDiskBufferCache(),
-                harness.getDiskFileMapProvider(), fieldSerdes, fieldSerdes.length, harness.getMergePolicy(),
-                harness.getOperationTrackerFactory(), harness.getIOScheduler(),
-                harness.getIOOperationCallbackProvider());
-        index = testCtx.getIndex();
-    }
-
-    @Override
-    public void tearDown() throws Exception {
-        index.deactivate();
-        index.destroy();
-        harness.tearDown();
-    }
-
-    @Override
-    protected void performInsertions() throws Exception {
-        titu.insertIntTuples(testCtx, 10, harness.getRandom());
-    }
-
-    @Override
-    protected void checkInsertions() throws Exception {
-        titu.checkScan(testCtx);
-    }
-
-    @Override
-    protected void clearCheckableInsertions() throws Exception {
-        testCtx.getCheckTuples().clear();
-    }
-
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/LSMBTreeMergeTest.java b/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/LSMBTreeMergeTest.java
deleted file mode 100644
index da36c79..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/LSMBTreeMergeTest.java
+++ /dev/null
@@ -1,64 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.btree;
-
-import java.util.Random;
-
-import org.junit.After;
-import org.junit.Before;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.exceptions.HyracksException;
-import edu.uci.ics.hyracks.storage.am.btree.OrderedIndexTestContext;
-import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
-import edu.uci.ics.hyracks.storage.am.lsm.btree.util.LSMBTreeTestContext;
-import edu.uci.ics.hyracks.storage.am.lsm.btree.util.LSMBTreeTestHarness;
-
-@SuppressWarnings("rawtypes")
-public class LSMBTreeMergeTest extends LSMBTreeMergeTestDriver {
-
-    public LSMBTreeMergeTest() {
-        super(LSMBTreeTestHarness.LEAF_FRAMES_TO_TEST);
-    }
-
-    private final LSMBTreeTestHarness harness = new LSMBTreeTestHarness();
-
-    @Before
-    public void setUp() throws HyracksException {
-        harness.setUp();
-    }
-
-    @After
-    public void tearDown() throws HyracksDataException {
-        harness.tearDown();
-    }
-
-    @Override
-    protected OrderedIndexTestContext createTestContext(ISerializerDeserializer[] fieldSerdes, int numKeys,
-            BTreeLeafFrameType leafType) throws Exception {
-        return LSMBTreeTestContext.create(harness.getMemBufferCache(), harness.getMemFreePageManager(),
-                harness.getIOManager(), harness.getFileReference(), harness.getDiskBufferCache(),
-                harness.getDiskFileMapProvider(), fieldSerdes, numKeys, harness.getMergePolicy(),
-                harness.getOperationTrackerFactory(), harness.getIOScheduler(),
-                harness.getIOOperationCallbackProvider());
-    }
-
-    @Override
-    protected Random getRandom() {
-        return harness.getRandom();
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/LSMBTreeMergeTestDriver.java b/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/LSMBTreeMergeTestDriver.java
deleted file mode 100644
index 8b02a8e..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/LSMBTreeMergeTestDriver.java
+++ /dev/null
@@ -1,85 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.btree;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
-import edu.uci.ics.hyracks.storage.am.btree.OrderedIndexTestContext;
-import edu.uci.ics.hyracks.storage.am.btree.OrderedIndexTestDriver;
-import edu.uci.ics.hyracks.storage.am.btree.OrderedIndexTestUtils;
-import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
-import edu.uci.ics.hyracks.storage.am.config.AccessMethodTestsConfig;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIndexAccessor;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.NoOpIOOperationCallback;
-
-@SuppressWarnings("rawtypes")
-public abstract class LSMBTreeMergeTestDriver extends OrderedIndexTestDriver {
-
-    private final OrderedIndexTestUtils orderedIndexTestUtils;
-
-    public LSMBTreeMergeTestDriver(BTreeLeafFrameType[] leafFrameTypesToTest) {
-        super(leafFrameTypesToTest);
-        this.orderedIndexTestUtils = new OrderedIndexTestUtils();
-    }
-
-    @Override
-    protected void runTest(ISerializerDeserializer[] fieldSerdes, int numKeys, BTreeLeafFrameType leafType,
-            ITupleReference lowKey, ITupleReference highKey, ITupleReference prefixLowKey, ITupleReference prefixHighKey)
-            throws Exception {
-        OrderedIndexTestContext ctx = createTestContext(fieldSerdes, numKeys, leafType);
-        ctx.getIndex().create();
-        ctx.getIndex().activate();
-        // Start off with one tree bulk loaded.
-        // We assume all fieldSerdes are of the same type. Check the first one
-        // to determine which field types to generate.
-        if (fieldSerdes[0] instanceof IntegerSerializerDeserializer) {
-            orderedIndexTestUtils.bulkLoadIntTuples(ctx, numTuplesToInsert, getRandom());
-        } else if (fieldSerdes[0] instanceof UTF8StringSerializerDeserializer) {
-            orderedIndexTestUtils.bulkLoadStringTuples(ctx, numTuplesToInsert, getRandom());
-        }
-
-        int maxTreesToMerge = AccessMethodTestsConfig.LSM_BTREE_MAX_TREES_TO_MERGE;
-        for (int i = 0; i < maxTreesToMerge; i++) {
-            for (int j = 0; j < i; j++) {
-                if (fieldSerdes[0] instanceof IntegerSerializerDeserializer) {
-                    orderedIndexTestUtils.bulkLoadIntTuples(ctx, numTuplesToInsert, getRandom());
-                } else if (fieldSerdes[0] instanceof UTF8StringSerializerDeserializer) {
-                    orderedIndexTestUtils.bulkLoadStringTuples(ctx, numTuplesToInsert, getRandom());
-                }
-            }
-
-            ILSMIndexAccessor accessor = (ILSMIndexAccessor) ctx.getIndexAccessor();
-            accessor.scheduleMerge(NoOpIOOperationCallback.INSTANCE);
-
-            orderedIndexTestUtils.checkPointSearches(ctx);
-            orderedIndexTestUtils.checkScan(ctx);
-            orderedIndexTestUtils.checkDiskOrderScan(ctx);
-            orderedIndexTestUtils.checkRangeSearch(ctx, lowKey, highKey, true, true);
-            if (prefixLowKey != null && prefixHighKey != null) {
-                orderedIndexTestUtils.checkRangeSearch(ctx, prefixLowKey, prefixHighKey, true, true);
-            }
-        }
-        ctx.getIndex().deactivate();
-        ctx.getIndex().destroy();
-    }
-
-    @Override
-    protected String getTestOpName() {
-        return "LSM Merge";
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/LSMBTreeModificationOperationCallbackTest.java b/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/LSMBTreeModificationOperationCallbackTest.java
deleted file mode 100644
index 648e70f..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/LSMBTreeModificationOperationCallbackTest.java
+++ /dev/null
@@ -1,106 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.btree;
-
-import org.junit.Test;
-
-import edu.uci.ics.hyracks.dataflow.common.util.SerdeUtils;
-import edu.uci.ics.hyracks.dataflow.common.util.TupleUtils;
-import edu.uci.ics.hyracks.storage.am.btree.AbstractModificationOperationCallbackTest;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallback;
-import edu.uci.ics.hyracks.storage.am.lsm.btree.util.LSMBTreeTestHarness;
-import edu.uci.ics.hyracks.storage.am.lsm.btree.util.LSMBTreeUtils;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIndexAccessor;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.BlockingIOOperationCallbackWrapper;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.NoOpIOOperationCallback;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.NoOpOperationTrackerFactory;
-
-public class LSMBTreeModificationOperationCallbackTest extends AbstractModificationOperationCallbackTest {
-    private static final int NUM_TUPLES = 11;
-
-    private final LSMBTreeTestHarness harness;
-    private final BlockingIOOperationCallbackWrapper ioOpCallback;
-
-    public LSMBTreeModificationOperationCallbackTest() {
-        super();
-        this.ioOpCallback = new BlockingIOOperationCallbackWrapper(NoOpIOOperationCallback.INSTANCE);
-        harness = new LSMBTreeTestHarness();
-    }
-
-    @Override
-    protected void createIndexInstance() throws Exception {
-        index = LSMBTreeUtils.createLSMTree(harness.getMemBufferCache(), harness.getMemFreePageManager(),
-                harness.getIOManager(), harness.getFileReference(), harness.getDiskBufferCache(),
-                harness.getDiskFileMapProvider(), SerdeUtils.serdesToTypeTraits(keySerdes),
-                SerdeUtils.serdesToComparatorFactories(keySerdes, keySerdes.length), bloomFilterKeyFields,
-                harness.getMergePolicy(), NoOpOperationTrackerFactory.INSTANCE, harness.getIOScheduler(),
-                harness.getIOOperationCallbackProvider());
-    }
-
-    @Override
-    public void setup() throws Exception {
-        harness.setUp();
-        super.setup();
-    }
-
-    @Override
-    public void tearDown() throws Exception {
-        super.tearDown();
-        harness.tearDown();
-    }
-
-    @Test
-    public void modificationCallbackTest() throws Exception {
-        ILSMIndexAccessor accessor = (ILSMIndexAccessor) index.createAccessor(cb, NoOpOperationCallback.INSTANCE);
-
-        for (int j = 0; j < 2; j++) {
-            isFoundNull = true;
-            for (int i = 0; i < NUM_TUPLES; i++) {
-                TupleUtils.createIntegerTuple(builder, tuple, i);
-                accessor.insert(tuple);
-            }
-
-            if (j == 1) {
-                accessor.scheduleFlush(ioOpCallback);
-                ioOpCallback.waitForIO();
-                isFoundNull = true;
-            } else {
-                isFoundNull = false;
-            }
-
-            for (int i = 0; i < NUM_TUPLES; i++) {
-                TupleUtils.createIntegerTuple(builder, tuple, i);
-                accessor.upsert(tuple);
-            }
-
-            if (j == 1) {
-                accessor.scheduleFlush(ioOpCallback);
-                ioOpCallback.waitForIO();
-                isFoundNull = true;
-            } else {
-                isFoundNull = false;
-            }
-
-            for (int i = 0; i < NUM_TUPLES; i++) {
-                TupleUtils.createIntegerTuple(builder, tuple, i);
-                accessor.delete(tuple);
-            }
-
-            accessor.scheduleFlush(ioOpCallback);
-            ioOpCallback.waitForIO();
-        }
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/LSMBTreeMultiBulkLoadTest.java b/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/LSMBTreeMultiBulkLoadTest.java
deleted file mode 100644
index 3a99c16..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/LSMBTreeMultiBulkLoadTest.java
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.btree;
-
-import java.util.Random;
-
-import org.junit.After;
-import org.junit.Before;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.exceptions.HyracksException;
-import edu.uci.ics.hyracks.storage.am.btree.OrderedIndexBulkLoadTest;
-import edu.uci.ics.hyracks.storage.am.btree.OrderedIndexTestContext;
-import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
-import edu.uci.ics.hyracks.storage.am.config.AccessMethodTestsConfig;
-import edu.uci.ics.hyracks.storage.am.lsm.btree.util.LSMBTreeTestContext;
-import edu.uci.ics.hyracks.storage.am.lsm.btree.util.LSMBTreeTestHarness;
-
-@SuppressWarnings("rawtypes")
-public class LSMBTreeMultiBulkLoadTest extends OrderedIndexBulkLoadTest {
-    public LSMBTreeMultiBulkLoadTest() {
-        // Using 5 bulk load rounds.
-        super(LSMBTreeTestHarness.LEAF_FRAMES_TO_TEST, AccessMethodTestsConfig.LSM_BTREE_BULKLOAD_ROUNDS);
-    }
-
-    private final LSMBTreeTestHarness harness = new LSMBTreeTestHarness();
-
-    @Before
-    public void setUp() throws HyracksException {
-        harness.setUp();
-    }
-
-    @After
-    public void tearDown() throws HyracksDataException {
-        harness.tearDown();
-    }
-
-    @Override
-    protected OrderedIndexTestContext createTestContext(ISerializerDeserializer[] fieldSerdes, int numKeys,
-            BTreeLeafFrameType leafType) throws Exception {
-        return LSMBTreeTestContext.create(harness.getMemBufferCache(), harness.getMemFreePageManager(),
-                harness.getIOManager(), harness.getFileReference(), harness.getDiskBufferCache(),
-                harness.getDiskFileMapProvider(), fieldSerdes, numKeys, harness.getMergePolicy(),
-                harness.getOperationTrackerFactory(), harness.getIOScheduler(),
-                harness.getIOOperationCallbackProvider());
-    }
-
-    @Override
-    protected Random getRandom() {
-        return harness.getRandom();
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/LSMBTreeSearchOperationCallbackTest.java b/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/LSMBTreeSearchOperationCallbackTest.java
deleted file mode 100644
index 389c87f..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/LSMBTreeSearchOperationCallbackTest.java
+++ /dev/null
@@ -1,263 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.lsm.btree;
-
-import java.util.HashSet;
-import java.util.concurrent.Callable;
-import java.util.concurrent.Future;
-
-import org.junit.Assert;
-import org.junit.Test;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.dataflow.common.util.SerdeUtils;
-import edu.uci.ics.hyracks.dataflow.common.util.TupleUtils;
-import edu.uci.ics.hyracks.storage.am.btree.AbstractSearchOperationCallbackTest;
-import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexAccessor;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexBulkLoader;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexCursor;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchOperationCallback;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallback;
-import edu.uci.ics.hyracks.storage.am.lsm.btree.util.LSMBTreeTestHarness;
-import edu.uci.ics.hyracks.storage.am.lsm.btree.util.LSMBTreeUtils;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.NoOpOperationTrackerFactory;
-
-public class LSMBTreeSearchOperationCallbackTest extends AbstractSearchOperationCallbackTest {
-    private final LSMBTreeTestHarness harness;
-    private final HashSet<Integer> deleteSet;
-
-    public LSMBTreeSearchOperationCallbackTest() {
-        harness = new LSMBTreeTestHarness();
-        deleteSet = new HashSet<Integer>();
-    }
-
-    @Override
-    protected void createIndexInstance() throws Exception {
-        index = LSMBTreeUtils.createLSMTree(harness.getMemBufferCache(), harness.getMemFreePageManager(),
-                harness.getIOManager(), harness.getFileReference(), harness.getDiskBufferCache(),
-                harness.getDiskFileMapProvider(), SerdeUtils.serdesToTypeTraits(keySerdes),
-                SerdeUtils.serdesToComparatorFactories(keySerdes, keySerdes.length), bloomFilterKeyFields,
-                harness.getMergePolicy(), NoOpOperationTrackerFactory.INSTANCE, harness.getIOScheduler(),
-                harness.getIOOperationCallbackProvider());
-    }
-
-    @Override
-    public void setup() throws Exception {
-        harness.setUp();
-        super.setup();
-    }
-
-    @Override
-    public void tearDown() throws Exception {
-        super.tearDown();
-        harness.tearDown();
-    }
-
-    @Test
-    public void searchCallbackTest() throws Exception {
-        Future<Boolean> insertFuture = executor.submit(new InsertionTask());
-        Future<Boolean> searchFuture = executor.submit(new SearchTask());
-        Assert.assertTrue(searchFuture.get());
-        Assert.assertTrue(insertFuture.get());
-    }
-
-    private class SearchTask implements Callable<Boolean> {
-        private final ISearchOperationCallback cb;
-        private final IIndexAccessor accessor;
-        private final IIndexCursor cursor;
-        private final RangePredicate predicate;
-        private final ArrayTupleBuilder builder;
-        private final ArrayTupleReference tuple;
-        private final ArrayTupleBuilder expectedTupleToBeLockedBuilder;
-        private final ArrayTupleReference expectedTupleToBeLocked;
-        private final ArrayTupleBuilder expectedTupleToBeCanceledBuilder;
-        private final ArrayTupleReference expectedTupleToBeCanceled;
-
-        private boolean blockOnHigh;
-        private int expectedAfterBlock;
-        private int expectedTupleToBeLockedValue;
-
-        public SearchTask() {
-            this.cb = new SynchronizingSearchOperationCallback();
-            this.accessor = index.createAccessor(NoOpOperationCallback.INSTANCE, cb);
-            this.cursor = accessor.createSearchCursor();
-            this.predicate = new RangePredicate();
-            this.builder = new ArrayTupleBuilder(NUM_KEY_FIELDS);
-            this.tuple = new ArrayTupleReference();
-            this.expectedTupleToBeLockedBuilder = new ArrayTupleBuilder(NUM_KEY_FIELDS);
-            this.expectedTupleToBeLocked = new ArrayTupleReference();
-            this.expectedTupleToBeCanceledBuilder = new ArrayTupleBuilder(NUM_KEY_FIELDS);
-            this.expectedTupleToBeCanceled = new ArrayTupleReference();
-
-            this.blockOnHigh = false;
-            this.expectedAfterBlock = -1;
-            this.expectedTupleToBeLockedValue = -1;
-        }
-
-        @Override
-        public Boolean call() throws Exception {
-            lock.lock();
-            try {
-                if (!insertTaskStarted) {
-                    condition.await();
-                }
-
-                // begin a search on [50, +inf), blocking on 75
-                TupleUtils.createIntegerTuple(builder, tuple, 50);
-                predicate.setLowKey(tuple, true);
-                predicate.setHighKey(null, true);
-                accessor.search(cursor, predicate);
-                expectedTupleToBeLockedValue = 50;
-                TupleUtils.createIntegerTuple(builder, expectedTupleToBeLocked, expectedTupleToBeLockedValue);
-                consumeIntTupleRange(50, 75, true, 76);
-
-                // consume tuples [77, 150], blocking on 151
-                consumeIntTupleRange(77, 150, true, 150);
-
-                // consume tuples [152, 300]
-                consumeIntTupleRange(152, 300, false, -1);
-
-                cursor.close();
-            } finally {
-                lock.unlock();
-            }
-
-            return true;
-        }
-
-        private void consumeIntTupleRange(int begin, int end, boolean blockOnHigh, int expectedAfterBlock)
-                throws Exception {
-            if (end < begin) {
-                throw new IllegalArgumentException("Invalid range: [" + begin + ", " + end + "]");
-            }
-
-            for (int i = begin; i <= end; i++) {
-                if (blockOnHigh == true && i == end) {
-                    this.blockOnHigh = true;
-                    this.expectedAfterBlock = expectedAfterBlock;
-                }
-                TupleUtils.createIntegerTuple(builder, tuple, i);
-                if (!cursor.hasNext()) {
-                    Assert.fail("Failed to consume entire tuple range since cursor is exhausted.");
-                }
-                cursor.next();
-                Assert.assertEquals(0, cmp.compare(tuple, cursor.getTuple()));
-            }
-        }
-
-        private class SynchronizingSearchOperationCallback implements ISearchOperationCallback {
-
-            @Override
-            public boolean proceed(ITupleReference tuple) {
-                Assert.assertEquals(0, cmp.compare(SearchTask.this.expectedTupleToBeLocked, tuple));
-                return false;
-            }
-
-            @Override
-            public void reconcile(ITupleReference tuple) throws HyracksDataException {
-                Assert.assertEquals(0, cmp.compare(SearchTask.this.expectedTupleToBeLocked, tuple));
-                if (blockOnHigh) {
-                    TupleUtils.createIntegerTuple(builder, SearchTask.this.tuple, expectedAfterBlock);
-                    condition.signal();
-                    condition.awaitUninterruptibly();
-                    blockOnHigh = false;
-                }
-                expectedTupleToBeLockedValue++;
-                TupleUtils.createIntegerTuple(expectedTupleToBeLockedBuilder, expectedTupleToBeLocked,
-                        expectedTupleToBeLockedValue);
-
-            }
-
-            @Override
-            public void cancel(ITupleReference tuple) throws HyracksDataException {
-                boolean found = false;
-                for (int i : deleteSet) {
-                    TupleUtils.createIntegerTuple(expectedTupleToBeCanceledBuilder, expectedTupleToBeCanceled, i);
-                    if (cmp.compare(SearchTask.this.expectedTupleToBeCanceled, tuple) == 0) {
-                        found = true;
-                        break;
-                    }
-                }
-                Assert.assertTrue(found);
-            }
-
-        }
-    }
-
-    private class InsertionTask implements Callable<Boolean> {
-        private final IIndexAccessor accessor;
-        private final ArrayTupleBuilder builder;
-        private final ArrayTupleReference tuple;
-
-        public InsertionTask() {
-            this.accessor = index.createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
-            this.builder = new ArrayTupleBuilder(NUM_KEY_FIELDS);
-            this.tuple = new ArrayTupleReference();
-        }
-
-        @Override
-        public Boolean call() throws Exception {
-            lock.lock();
-            try {
-                insertTaskStarted = true;
-
-                // bulkload [101, 150] & [151, 200] as two separate disk components 
-                // insert [50, 100] & [301, 350] to the in-memory component
-                // delete tuple 151
-                bulkloadIntTupleRange(101, 150);
-                bulkloadIntTupleRange(151, 200);
-                insertIntTupleRange(50, 100);
-                insertIntTupleRange(301, 350);
-                int tupleTobeDeletedValue = 151;
-                deleteSet.add(tupleTobeDeletedValue);
-                TupleUtils.createIntegerTuple(builder, tuple, tupleTobeDeletedValue);
-                accessor.delete(tuple);
-                condition.signal();
-                condition.await();
-
-                // delete tuple 75
-                tupleTobeDeletedValue = 75;
-                deleteSet.add(tupleTobeDeletedValue);
-                TupleUtils.createIntegerTuple(builder, tuple, tupleTobeDeletedValue);
-                accessor.delete(tuple);
-                condition.signal();
-                condition.await();
-
-                // insert tuples [201, 300] and delete tuple 151
-                insertIntTupleRange(201, 300);
-                condition.signal();
-            } finally {
-                lock.unlock();
-            }
-
-            return true;
-        }
-
-        private void insertIntTupleRange(int begin, int end) throws Exception {
-            if (end < begin) {
-                throw new IllegalArgumentException("Invalid range: [" + begin + ", " + end + "]");
-            }
-
-            for (int i = begin; i <= end; i++) {
-                TupleUtils.createIntegerTuple(builder, tuple, i);
-                accessor.insert(tuple);
-            }
-        }
-
-        private void bulkloadIntTupleRange(int begin, int end) throws Exception {
-            if (end < begin) {
-                throw new IllegalArgumentException("Invalid range: [" + begin + ", " + end + "]");
-            }
-
-            IIndexBulkLoader bulkloader = index.createBulkLoader(1.0f, false, end - begin);
-            for (int i = begin; i <= end; i++) {
-                TupleUtils.createIntegerTuple(builder, tuple, i);
-                bulkloader.add(tuple);
-            }
-            bulkloader.end();
-        }
-
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/LSMBTreeUpdateTest.java b/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/LSMBTreeUpdateTest.java
deleted file mode 100644
index ca89512..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/LSMBTreeUpdateTest.java
+++ /dev/null
@@ -1,65 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.btree;
-
-import java.util.Random;
-
-import org.junit.After;
-import org.junit.Before;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.exceptions.HyracksException;
-import edu.uci.ics.hyracks.storage.am.btree.OrderedIndexTestContext;
-import edu.uci.ics.hyracks.storage.am.btree.OrderedIndexUpdateTest;
-import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
-import edu.uci.ics.hyracks.storage.am.lsm.btree.util.LSMBTreeTestContext;
-import edu.uci.ics.hyracks.storage.am.lsm.btree.util.LSMBTreeTestHarness;
-
-@SuppressWarnings("rawtypes")
-public class LSMBTreeUpdateTest extends OrderedIndexUpdateTest {
-
-    public LSMBTreeUpdateTest() {
-        super(LSMBTreeTestHarness.LEAF_FRAMES_TO_TEST);
-    }
-
-    private final LSMBTreeTestHarness harness = new LSMBTreeTestHarness();
-
-    @Before
-    public void setUp() throws HyracksException {
-        harness.setUp();
-    }
-
-    @After
-    public void tearDown() throws HyracksDataException {
-        harness.tearDown();
-    }
-
-    @Override
-    protected OrderedIndexTestContext createTestContext(ISerializerDeserializer[] fieldSerdes, int numKeys,
-            BTreeLeafFrameType leafType) throws Exception {
-        return LSMBTreeTestContext.create(harness.getMemBufferCache(), harness.getMemFreePageManager(),
-                harness.getIOManager(), harness.getFileReference(), harness.getDiskBufferCache(),
-                harness.getDiskFileMapProvider(), fieldSerdes, numKeys, harness.getMergePolicy(),
-                harness.getOperationTrackerFactory(), harness.getIOScheduler(),
-                harness.getIOOperationCallbackProvider());
-    }
-
-    @Override
-    protected Random getRandom() {
-        return harness.getRandom();
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/multithread/LSMBTreeMultiThreadTest.java b/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/multithread/LSMBTreeMultiThreadTest.java
deleted file mode 100644
index c494448..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/multithread/LSMBTreeMultiThreadTest.java
+++ /dev/null
@@ -1,115 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.btree.multithread;
-
-import java.util.ArrayList;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.exceptions.HyracksException;
-import edu.uci.ics.hyracks.storage.am.btree.OrderedIndexMultiThreadTest;
-import edu.uci.ics.hyracks.storage.am.common.IIndexTestWorkerFactory;
-import edu.uci.ics.hyracks.storage.am.common.TestOperationSelector.TestOperation;
-import edu.uci.ics.hyracks.storage.am.common.TestWorkloadConf;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
-import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
-import edu.uci.ics.hyracks.storage.am.common.datagen.ProbabilityHelper;
-import edu.uci.ics.hyracks.storage.am.lsm.btree.util.LSMBTreeTestHarness;
-import edu.uci.ics.hyracks.storage.am.lsm.btree.util.LSMBTreeUtils;
-
-public class LSMBTreeMultiThreadTest extends OrderedIndexMultiThreadTest {
-
-    private LSMBTreeTestHarness harness = new LSMBTreeTestHarness();
-
-    private LSMBTreeTestWorkerFactory workerFactory = new LSMBTreeTestWorkerFactory();
-
-    @Override
-    protected void setUp() throws HyracksException {
-        harness.setUp();
-    }
-
-    @Override
-    protected void tearDown() throws HyracksDataException {
-        harness.tearDown();
-    }
-
-    @Override
-    protected ITreeIndex createIndex(ITypeTraits[] typeTraits, IBinaryComparatorFactory[] cmpFactories,
-            int[] bloomFilterKeyFields) throws TreeIndexException {
-        return LSMBTreeUtils.createLSMTree(harness.getMemBufferCache(), harness.getMemFreePageManager(),
-                harness.getIOManager(), harness.getFileReference(), harness.getDiskBufferCache(),
-                harness.getDiskFileMapProvider(), typeTraits, cmpFactories, bloomFilterKeyFields,
-                harness.getMergePolicy(), harness.getOperationTrackerFactory(), harness.getIOScheduler(),
-                harness.getIOOperationCallbackProvider());
-    }
-
-    @Override
-    protected IIndexTestWorkerFactory getWorkerFactory() {
-        return workerFactory;
-    }
-
-    @Override
-    protected ArrayList<TestWorkloadConf> getTestWorkloadConf() {
-        ArrayList<TestWorkloadConf> workloadConfs = new ArrayList<TestWorkloadConf>();
-
-        // Insert only workload.
-        TestOperation[] insertOnlyOps = new TestOperation[] { TestOperation.INSERT };
-        workloadConfs.add(new TestWorkloadConf(insertOnlyOps, ProbabilityHelper
-                .getUniformProbDist(insertOnlyOps.length)));
-
-        // Insert and merge workload.
-        TestOperation[] insertMergeOps = new TestOperation[] { TestOperation.INSERT, TestOperation.MERGE };
-        workloadConfs.add(new TestWorkloadConf(insertMergeOps, ProbabilityHelper
-                .getUniformProbDist(insertMergeOps.length)));
-
-        // Inserts mixed with point searches and scans.
-        TestOperation[] insertSearchOnlyOps = new TestOperation[] { TestOperation.INSERT, TestOperation.POINT_SEARCH,
-                TestOperation.SCAN };
-        workloadConfs.add(new TestWorkloadConf(insertSearchOnlyOps, ProbabilityHelper
-                .getUniformProbDist(insertSearchOnlyOps.length)));
-
-        // Inserts, updates, and deletes.
-        TestOperation[] insertDeleteUpdateOps = new TestOperation[] { TestOperation.INSERT, TestOperation.DELETE,
-                TestOperation.UPDATE };
-        workloadConfs.add(new TestWorkloadConf(insertDeleteUpdateOps, ProbabilityHelper
-                .getUniformProbDist(insertDeleteUpdateOps.length)));
-
-        // Inserts, updates, deletes and merges.
-        TestOperation[] insertDeleteUpdateMergeOps = new TestOperation[] { TestOperation.INSERT, TestOperation.DELETE,
-                TestOperation.UPDATE, TestOperation.MERGE };
-        workloadConfs.add(new TestWorkloadConf(insertDeleteUpdateMergeOps, ProbabilityHelper
-                .getUniformProbDist(insertDeleteUpdateMergeOps.length)));
-
-        // All operations except merge.
-        TestOperation[] allNoMergeOps = new TestOperation[] { TestOperation.INSERT, TestOperation.DELETE,
-                TestOperation.UPDATE, TestOperation.POINT_SEARCH, TestOperation.SCAN };
-        workloadConfs.add(new TestWorkloadConf(allNoMergeOps, ProbabilityHelper
-                .getUniformProbDist(allNoMergeOps.length)));
-
-        // All operations.
-        TestOperation[] allOps = new TestOperation[] { TestOperation.INSERT, TestOperation.DELETE,
-                TestOperation.UPDATE, TestOperation.POINT_SEARCH, TestOperation.SCAN, TestOperation.MERGE };
-        workloadConfs.add(new TestWorkloadConf(allOps, ProbabilityHelper.getUniformProbDist(allOps.length)));
-
-        return workloadConfs;
-    }
-
-    @Override
-    protected String getIndexTypeName() {
-        return "LSMBTree";
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/multithread/LSMBTreeTestWorker.java b/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/multithread/LSMBTreeTestWorker.java
deleted file mode 100644
index c008f90..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/multithread/LSMBTreeTestWorker.java
+++ /dev/null
@@ -1,115 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.btree.multithread;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.btree.exceptions.BTreeDuplicateKeyException;
-import edu.uci.ics.hyracks.storage.am.btree.exceptions.BTreeNonExistentKeyException;
-import edu.uci.ics.hyracks.storage.am.btree.exceptions.BTreeNotUpdateableException;
-import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
-import edu.uci.ics.hyracks.storage.am.common.AbstractIndexTestWorker;
-import edu.uci.ics.hyracks.storage.am.common.TestOperationSelector;
-import edu.uci.ics.hyracks.storage.am.common.TestOperationSelector.TestOperation;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndex;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexCursor;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.am.common.datagen.DataGenThread;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-import edu.uci.ics.hyracks.storage.am.lsm.btree.impls.LSMBTree;
-import edu.uci.ics.hyracks.storage.am.lsm.btree.impls.LSMBTree.LSMBTreeAccessor;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.NoOpIOOperationCallback;
-
-public class LSMBTreeTestWorker extends AbstractIndexTestWorker {
-    private final LSMBTree lsmBTree;
-    private final int numKeyFields;
-    private final ArrayTupleBuilder deleteTb;
-    private final ArrayTupleReference deleteTuple = new ArrayTupleReference();
-
-    public LSMBTreeTestWorker(DataGenThread dataGen, TestOperationSelector opSelector, IIndex index, int numBatches) {
-        super(dataGen, opSelector, index, numBatches);
-        lsmBTree = (LSMBTree) index;
-        numKeyFields = lsmBTree.getComparatorFactories().length;
-        deleteTb = new ArrayTupleBuilder(numKeyFields);
-    }
-
-    @Override
-    public void performOp(ITupleReference tuple, TestOperation op) throws HyracksDataException, IndexException {
-        LSMBTreeAccessor accessor = (LSMBTreeAccessor) indexAccessor;
-        IIndexCursor searchCursor = accessor.createSearchCursor();
-        MultiComparator cmp = accessor.getMultiComparator();
-        RangePredicate rangePred = new RangePredicate(tuple, tuple, true, true, cmp, cmp);
-
-        switch (op) {
-            case INSERT:
-                try {
-                    accessor.insert(tuple);
-                } catch (BTreeDuplicateKeyException e) {
-                    // Ignore duplicate keys, since we get random tuples.
-                }
-                break;
-
-            case DELETE:
-                // Create a tuple reference with only key fields.
-                deleteTb.reset();
-                for (int i = 0; i < numKeyFields; i++) {
-                    deleteTb.addField(tuple.getFieldData(i), tuple.getFieldStart(i), tuple.getFieldLength(i));
-                }
-                deleteTuple.reset(deleteTb.getFieldEndOffsets(), deleteTb.getByteArray());
-                try {
-                    accessor.delete(deleteTuple);
-                } catch (BTreeNonExistentKeyException e) {
-                    // Ignore non-existant keys, since we get random tuples.
-                }
-                break;
-
-            case UPDATE:
-                try {
-                    accessor.update(tuple);
-                } catch (BTreeNonExistentKeyException e) {
-                    // Ignore non-existant keys, since we get random tuples.
-                } catch (BTreeNotUpdateableException e) {
-                    // Ignore not updateable exception due to numKeys == numFields.
-                }
-                break;
-
-            case POINT_SEARCH:
-                searchCursor.reset();
-                rangePred.setLowKey(tuple, true);
-                rangePred.setHighKey(tuple, true);
-                accessor.search(searchCursor, rangePred);
-                consumeCursorTuples(searchCursor);
-                break;
-
-            case SCAN:
-                searchCursor.reset();
-                rangePred.setLowKey(null, true);
-                rangePred.setHighKey(null, true);
-                accessor.search(searchCursor, rangePred);
-                consumeCursorTuples(searchCursor);
-                break;
-
-            case MERGE:
-                accessor.scheduleMerge(NoOpIOOperationCallback.INSTANCE);
-                break;
-
-            default:
-                throw new HyracksDataException("Op " + op.toString() + " not supported.");
-        }
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/multithread/LSMBTreeTestWorkerFactory.java b/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/multithread/LSMBTreeTestWorkerFactory.java
deleted file mode 100644
index 03463e6..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/multithread/LSMBTreeTestWorkerFactory.java
+++ /dev/null
@@ -1,30 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.btree.multithread;
-
-import edu.uci.ics.hyracks.storage.am.common.AbstractIndexTestWorker;
-import edu.uci.ics.hyracks.storage.am.common.IIndexTestWorkerFactory;
-import edu.uci.ics.hyracks.storage.am.common.TestOperationSelector;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndex;
-import edu.uci.ics.hyracks.storage.am.common.datagen.DataGenThread;
-
-public class LSMBTreeTestWorkerFactory implements IIndexTestWorkerFactory {
-    @Override
-    public AbstractIndexTestWorker create(DataGenThread dataGen, TestOperationSelector opSelector,
-            IIndex index, int numBatches) {
-        return new LSMBTreeTestWorker(dataGen, opSelector, index, numBatches);
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/perf/BTreeBulkLoadRunner.java b/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/perf/BTreeBulkLoadRunner.java
deleted file mode 100644
index 69e2b58..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/perf/BTreeBulkLoadRunner.java
+++ /dev/null
@@ -1,52 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.btree.perf;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.storage.am.btree.exceptions.BTreeException;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexBulkLoader;
-import edu.uci.ics.hyracks.storage.am.common.datagen.DataGenThread;
-import edu.uci.ics.hyracks.storage.am.common.datagen.TupleBatch;
-
-public class BTreeBulkLoadRunner extends BTreeRunner {
-
-    protected final float fillFactor;
-
-    public BTreeBulkLoadRunner(int numBatches, int pageSize, int numPages, ITypeTraits[] typeTraits,
-            IBinaryComparatorFactory[] cmpFactories, float fillFactor) throws HyracksDataException, BTreeException {
-        super(numBatches, pageSize, numPages, typeTraits, cmpFactories);
-        this.fillFactor = fillFactor;
-    }
-
-    @Override
-    public long runExperiment(DataGenThread dataGen, int numThreads) throws Exception {
-        btree.create();
-        long start = System.currentTimeMillis();
-        IIndexBulkLoader bulkLoader = btree.createBulkLoader(1.0f, false, 0L);
-        for (int i = 0; i < numBatches; i++) {
-            TupleBatch batch = dataGen.tupleBatchQueue.take();
-            for (int j = 0; j < batch.size(); j++) {
-                bulkLoader.add(batch.get(j));
-            }
-        }
-        bulkLoader.end();
-        long end = System.currentTimeMillis();
-        long time = end - start;
-        return time;
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/perf/BTreePageSizePerf.java b/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/perf/BTreePageSizePerf.java
deleted file mode 100644
index 7e0514b..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/perf/BTreePageSizePerf.java
+++ /dev/null
@@ -1,85 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.btree.perf;
-
-import java.util.Enumeration;
-import java.util.logging.Level;
-import java.util.logging.LogManager;
-import java.util.logging.Logger;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.util.SerdeUtils;
-import edu.uci.ics.hyracks.storage.am.common.datagen.DataGenThread;
-
-public class BTreePageSizePerf {
-    public static void main(String[] args) throws Exception {
-        // Disable logging so we can better see the output times.
-        Enumeration<String> loggers = LogManager.getLogManager().getLoggerNames();
-        while(loggers.hasMoreElements()) {
-            String loggerName = loggers.nextElement();
-            Logger logger = LogManager.getLogManager().getLogger(loggerName);
-            logger.setLevel(Level.OFF);
-        }
-        
-        int numTuples = 1000000;
-        int batchSize = 10000;
-        int numBatches = numTuples / batchSize;
-        
-        ISerializerDeserializer[] fieldSerdes = new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE };
-        ITypeTraits[] typeTraits = SerdeUtils.serdesToTypeTraits(fieldSerdes, 30);
-        
-        IBinaryComparatorFactory[] cmpFactories = SerdeUtils.serdesToComparatorFactories(fieldSerdes, fieldSerdes.length);
-        
-        runExperiment(numBatches, batchSize, 1024, 100000, fieldSerdes, cmpFactories, typeTraits);
-        runExperiment(numBatches, batchSize, 2048, 100000, fieldSerdes, cmpFactories, typeTraits);
-        runExperiment(numBatches, batchSize, 4096, 25000, fieldSerdes, cmpFactories, typeTraits);
-        runExperiment(numBatches, batchSize, 8192, 12500, fieldSerdes, cmpFactories, typeTraits);
-        runExperiment(numBatches, batchSize, 16384, 6250, fieldSerdes, cmpFactories, typeTraits);
-        runExperiment(numBatches, batchSize, 32768, 3125, fieldSerdes, cmpFactories, typeTraits);
-        runExperiment(numBatches, batchSize, 65536, 1564, fieldSerdes, cmpFactories, typeTraits);
-        runExperiment(numBatches, batchSize, 131072, 782, fieldSerdes, cmpFactories, typeTraits);
-        runExperiment(numBatches, batchSize, 262144, 391, fieldSerdes, cmpFactories, typeTraits);
-    }
-    
-    private static void runExperiment(int numBatches, int batchSize, int pageSize, int numPages, ISerializerDeserializer[] fieldSerdes, IBinaryComparatorFactory[] cmpFactories, ITypeTraits[] typeTraits) throws Exception {
-        System.out.println("PAGE SIZE: " + pageSize);
-        System.out.println("NUM PAGES: " + numPages);
-        System.out.println("MEMORY: " + (pageSize * numPages));
-        int repeats = 5;
-        long[] times = new long[repeats];
-        //BTreeRunner runner = new BTreeRunner(numTuples, pageSize, numPages, typeTraits, cmp);
-        InMemoryBTreeRunner runner = new InMemoryBTreeRunner(numBatches, pageSize, numPages, typeTraits, cmpFactories);
-        runner.init();
-        int numThreads = 1;
-        for (int i = 0; i < repeats; i++) {
-            DataGenThread dataGen = new DataGenThread(numThreads, numBatches, batchSize, fieldSerdes, 30, 50, 10, false);
-            dataGen.start();            
-            times[i] = runner.runExperiment(dataGen, numThreads);
-            System.out.println("TIME " + i + ": " + times[i] + "ms");
-        }
-        runner.deinit();
-        long avgTime = 0;
-        for (int i = 0; i < repeats; i++) {
-            avgTime += times[i];
-        }
-        avgTime /= repeats;
-        System.out.println("AVG TIME: " + avgTime + "ms");
-        System.out.println("-------------------------------");
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/perf/BTreeRunner.java b/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/perf/BTreeRunner.java
deleted file mode 100644
index 8658919..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/perf/BTreeRunner.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.btree.perf;
-
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.storage.am.btree.exceptions.BTreeException;
-import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
-import edu.uci.ics.hyracks.storage.am.btree.util.BTreeUtils;
-import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
-import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
-import edu.uci.ics.hyracks.test.support.TestUtils;
-
-public class BTreeRunner extends InMemoryBTreeRunner {
-    protected static final int MAX_OPEN_FILES = 10;
-    protected static final int HYRACKS_FRAME_SIZE = 128;
-
-    public BTreeRunner(int numTuples, int pageSize, int numPages, ITypeTraits[] typeTraits,
-            IBinaryComparatorFactory[] cmpFactories) throws HyracksDataException, BTreeException {
-        super(numTuples, pageSize, numPages, typeTraits, cmpFactories);
-    }
-
-    @Override
-    protected void init(int pageSize, int numPages, ITypeTraits[] typeTraits, IBinaryComparatorFactory[] cmpFactories)
-            throws HyracksDataException, BTreeException {
-        IHyracksTaskContext ctx = TestUtils.create(HYRACKS_FRAME_SIZE);
-        TestStorageManagerComponentHolder.init(pageSize, numPages, MAX_OPEN_FILES);
-        bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
-        IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
-        btree = BTreeUtils
-                .createBTree(bufferCache, fmp, typeTraits, cmpFactories, BTreeLeafFrameType.REGULAR_NSM, file);
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/perf/ConcurrentSkipListRunner.java b/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/perf/ConcurrentSkipListRunner.java
deleted file mode 100644
index 8f44966..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/perf/ConcurrentSkipListRunner.java
+++ /dev/null
@@ -1,138 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.btree.perf;
-
-import java.nio.ByteBuffer;
-import java.util.Comparator;
-import java.util.concurrent.ConcurrentSkipListSet;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.common.datagen.DataGenThread;
-import edu.uci.ics.hyracks.storage.am.common.datagen.TupleBatch;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleReference;
-import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriter;
-import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriterFactory;
-
-public class ConcurrentSkipListRunner implements IExperimentRunner {
-    public class TupleComparator implements Comparator<ITupleReference> {
-        private final MultiComparator cmp;
-
-        public TupleComparator(MultiComparator cmp) {
-            this.cmp = cmp;
-        }
-
-        @Override
-        public int compare(ITupleReference o1, ITupleReference o2) {
-            return cmp.compare(o1, o2);
-        }
-    }
-    
-    private final TupleComparator tupleCmp;
-    private final int numBatches;
-    private final int batchSize;
-    private final int tupleSize;
-    private final ITypeTraits[] typeTraits;
-    
-    public ConcurrentSkipListRunner(int numBatches, int batchSize, int tupleSize, ITypeTraits[] typeTraits, MultiComparator cmp) {
-        this.numBatches = numBatches;
-        this.tupleSize = tupleSize;
-        this.batchSize = batchSize;
-        this.typeTraits = typeTraits;
-        tupleCmp = new TupleComparator(cmp);
-    }
-    
-    @Override
-    public long runExperiment(DataGenThread dataGen, int numThreads) throws InterruptedException {
-        ConcurrentSkipListSet<ITupleReference> skipList = new ConcurrentSkipListSet<ITupleReference>(tupleCmp);
-        SkipListThread[] threads = new SkipListThread[numThreads];
-        int threadNumBatches = numBatches / numThreads;
-        for (int i = 0; i < numThreads; i++) {
-            threads[i] = new SkipListThread(dataGen, skipList, threadNumBatches, batchSize);            
-        }
-        // Wait until the tupleBatchQueue is completely full.
-        while (dataGen.tupleBatchQueue.remainingCapacity() != 0) {
-            Thread.sleep(10);
-        }
-        
-        long start = System.currentTimeMillis();
-        for (int i = 0; i < numThreads; i++) {
-            threads[i].start();
-        }
-        for (int i = 0; i < numThreads; i++) {
-            threads[i].join();
-        }
-        long end = System.currentTimeMillis();
-        long time = end - start;
-        return time;
-    }
-
-    @Override
-    public void init() throws Exception {
-    }
-
-    @Override
-    public void deinit() throws Exception {
-    }
-    
-    public void reset() throws Exception {
-    }
-    
-    public class SkipListThread extends Thread {
-    	private final DataGenThread dataGen;
-    	private final ConcurrentSkipListSet<ITupleReference> skipList;
-    	private final int numBatches;
-        public final TypeAwareTupleWriterFactory tupleWriterFactory;
-        public final TypeAwareTupleWriter tupleWriter;
-        public final TypeAwareTupleReference[] tuples;        
-        public final ByteBuffer tupleBuf; 
-
-        public SkipListThread(DataGenThread dataGen, ConcurrentSkipListSet<ITupleReference> skipList, int numBatches, int batchSize) {
-            this.dataGen = dataGen;
-            this.numBatches = numBatches;
-            this.skipList = skipList;
-            tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
-            tupleWriter = (TypeAwareTupleWriter) tupleWriterFactory.createTupleWriter();
-            int numTuples = numBatches * batchSize;
-            tuples = new TypeAwareTupleReference[numTuples];
-            tupleBuf = ByteBuffer.allocate(numTuples * tupleSize);
-            for (int i = 0; i < numTuples; i++) {
-                tuples[i] = (TypeAwareTupleReference) tupleWriter.createTupleReference();
-            }
-        }
-    	
-        @Override
-        public void run() {
-            int tupleIndex = 0;
-            try {                
-                for (int i = 0; i < numBatches; i++) {
-                    TupleBatch batch = dataGen.tupleBatchQueue.take();
-                    for (int j = 0; j < batch.size(); j++) {
-                        // Copy the tuple to the buffer and set the pre-created tuple ref.                        
-                        tupleWriter.writeTuple(batch.get(j), tupleBuf.array(), tupleIndex * tupleSize);
-                        tuples[tupleIndex].resetByTupleOffset(tupleBuf, tupleIndex * tupleSize);
-                        skipList.add(tuples[tupleIndex]);
-                        tupleIndex++;
-                    }
-                }
-            } catch (Exception e) {
-                System.out.println(tupleIndex);
-                e.printStackTrace();
-            }
-        }
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/perf/IExperimentRunner.java b/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/perf/IExperimentRunner.java
deleted file mode 100644
index 0ea3a71..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/perf/IExperimentRunner.java
+++ /dev/null
@@ -1,30 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.btree.perf;
-
-import edu.uci.ics.hyracks.storage.am.common.datagen.DataGenThread;
-
-public interface IExperimentRunner {
-    public static int DEFAULT_MAX_OUTSTANDING = 100000;
-    
-    public void init() throws Exception;
-    
-    public long runExperiment(DataGenThread dataGen, int numThreads) throws Exception;
-    
-    public void reset() throws Exception;
-    
-    public void deinit() throws Exception;
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/perf/InMemoryBTreeRunner.java b/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/perf/InMemoryBTreeRunner.java
deleted file mode 100644
index 1b453b7..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/perf/InMemoryBTreeRunner.java
+++ /dev/null
@@ -1,146 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.btree.perf;
-
-import java.io.File;
-import java.text.SimpleDateFormat;
-import java.util.Date;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.storage.am.btree.exceptions.BTreeException;
-import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMInteriorFrameFactory;
-import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMLeafFrameFactory;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
-import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManager;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexAccessor;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrameFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
-import edu.uci.ics.hyracks.storage.am.common.datagen.DataGenThread;
-import edu.uci.ics.hyracks.storage.am.common.datagen.TupleBatch;
-import edu.uci.ics.hyracks.storage.am.common.frames.LIFOMetaDataFrameFactory;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallback;
-import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriterFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.common.freepage.InMemoryBufferCache;
-import edu.uci.ics.hyracks.storage.am.lsm.common.freepage.InMemoryFreePageManager;
-import edu.uci.ics.hyracks.storage.common.buffercache.HeapBufferAllocator;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-import edu.uci.ics.hyracks.storage.common.buffercache.ICacheMemoryAllocator;
-import edu.uci.ics.hyracks.storage.common.file.TransientFileMapManager;
-
-public class InMemoryBTreeRunner extends Thread implements IExperimentRunner {
-    protected IBufferCache bufferCache;
-    protected FileReference file;
-
-    protected final static SimpleDateFormat simpleDateFormat = new SimpleDateFormat("ddMMyy-hhmmssSS");
-    protected final static String tmpDir = System.getProperty("java.io.tmpdir");
-    protected final static String sep = System.getProperty("file.separator");
-    protected String fileName;
-
-    protected final int numBatches;
-    protected BTree btree;
-
-    public InMemoryBTreeRunner(int numBatches, int pageSize, int numPages, ITypeTraits[] typeTraits,
-            IBinaryComparatorFactory[] cmpFactories) throws HyracksDataException, BTreeException {
-        this.numBatches = numBatches;
-        fileName = tmpDir + sep + simpleDateFormat.format(new Date());
-        file = new FileReference(new File(fileName));
-        init(pageSize, numPages, typeTraits, cmpFactories);
-    }
-
-    protected void init(int pageSize, int numPages, ITypeTraits[] typeTraits, IBinaryComparatorFactory[] cmpFactories)
-            throws HyracksDataException, BTreeException {
-        ICacheMemoryAllocator allocator = new HeapBufferAllocator();
-        bufferCache = new InMemoryBufferCache(allocator, pageSize, numPages, new TransientFileMapManager());
-        TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
-        ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(tupleWriterFactory);
-        ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(tupleWriterFactory);
-        ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
-        IFreePageManager freePageManager = new InMemoryFreePageManager(bufferCache.getNumPages(), metaFrameFactory);
-        btree = new BTree(bufferCache, new TransientFileMapManager(), freePageManager, interiorFrameFactory,
-                leafFrameFactory, cmpFactories, typeTraits.length, file);
-    }
-
-    @Override
-    public long runExperiment(DataGenThread dataGen, int numThreads) throws Exception {
-        BTreeThread[] threads = new BTreeThread[numThreads];
-        int threadNumBatches = numBatches / numThreads;
-        for (int i = 0; i < numThreads; i++) {
-            threads[i] = new BTreeThread(dataGen, btree, threadNumBatches);
-        }
-        // Wait until the tupleBatchQueue is completely full.
-        while (dataGen.tupleBatchQueue.remainingCapacity() != 0) {
-            Thread.sleep(10);
-        }
-
-        long start = System.currentTimeMillis();
-        for (int i = 0; i < numThreads; i++) {
-            threads[i].start();
-        }
-        for (int i = 0; i < numThreads; i++) {
-            threads[i].join();
-        }
-        long end = System.currentTimeMillis();
-        long time = end - start;
-        return time;
-    }
-
-    @Override
-    public void init() throws Exception {
-    }
-
-    @Override
-    public void deinit() throws Exception {
-        bufferCache.close();
-    }
-
-    @Override
-    public void reset() throws Exception {
-        btree.create();
-    }
-
-    public class BTreeThread extends Thread {
-        private final DataGenThread dataGen;
-        private final int numBatches;
-        private final ITreeIndexAccessor indexAccessor;
-
-        public BTreeThread(DataGenThread dataGen, BTree btree, int numBatches) {
-            this.dataGen = dataGen;
-            this.numBatches = numBatches;
-            indexAccessor = btree.createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
-        }
-
-        @Override
-        public void run() {
-            try {
-                for (int i = 0; i < numBatches; i++) {
-                    TupleBatch batch = dataGen.tupleBatchQueue.take();
-                    for (int j = 0; j < batch.size(); j++) {
-                        try {
-                            indexAccessor.insert(batch.get(j));
-                        } catch (TreeIndexException e) {
-                        }
-                    }
-                }
-            } catch (Exception e) {
-                e.printStackTrace();
-            }
-        }
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/perf/InMemorySortRunner.java b/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/perf/InMemorySortRunner.java
deleted file mode 100644
index 53fbd88..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/perf/InMemorySortRunner.java
+++ /dev/null
@@ -1,153 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.btree.perf;
-
-import java.nio.ByteBuffer;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.Comparator;
-import java.util.concurrent.ConcurrentSkipListSet;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.common.datagen.DataGenThread;
-import edu.uci.ics.hyracks.storage.am.common.datagen.TupleBatch;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleReference;
-import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriter;
-import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriterFactory;
-
-public class InMemorySortRunner implements IExperimentRunner {
-    public class TupleComparator implements Comparator<ITupleReference> {
-        private final MultiComparator cmp;
-
-        public TupleComparator(MultiComparator cmp) {
-            this.cmp = cmp;
-        }
-
-        @Override
-        public int compare(ITupleReference o1, ITupleReference o2) {
-            return cmp.compare(o1, o2);
-        }
-    }
-    
-    private final TupleComparator tupleCmp;
-    private final int numBatches;
-    private final int batchSize;
-    private final int tupleSize;
-    private final ITypeTraits[] typeTraits;
-    
-    private final TypeAwareTupleWriterFactory tupleWriterFactory;
-    private final TypeAwareTupleWriter tupleWriter;
-    private final ArrayList<TypeAwareTupleReference> tuples;        
-    private final ByteBuffer tupleBuf; 
-    
-    public InMemorySortRunner(int numBatches, int batchSize, int tupleSize, ITypeTraits[] typeTraits, MultiComparator cmp) {
-        this.numBatches = numBatches;
-        this.tupleSize = tupleSize;
-        this.batchSize = batchSize;
-        this.typeTraits = typeTraits;
-        tupleCmp = new TupleComparator(cmp);
-        tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
-        tupleWriter = (TypeAwareTupleWriter) tupleWriterFactory.createTupleWriter();
-        int numTuples = numBatches * batchSize;
-        tuples = new ArrayList<TypeAwareTupleReference>();
-        tupleBuf = ByteBuffer.allocate(numTuples * tupleSize);
-        for (int i = 0; i < numTuples; i++) {
-            tuples.add((TypeAwareTupleReference) tupleWriter.createTupleReference());
-        }
-    }
-    
-    @Override
-    public long runExperiment(DataGenThread dataGen, int numThreads) throws InterruptedException {
-        // Wait until the tupleBatchQueue is completely full.
-        while (dataGen.tupleBatchQueue.remainingCapacity() != 0) {
-            Thread.sleep(10);
-        }
-        
-        long start = System.currentTimeMillis();
-        int tupleIndex = 0;
-        for (int i = 0; i < numBatches; i++) {
-            TupleBatch batch = dataGen.tupleBatchQueue.take();
-            for (int j = 0; j < batch.size(); j++) {
-                // Copy the tuple to the buffer and set the pre-created tuple ref.                        
-                tupleWriter.writeTuple(batch.get(j), tupleBuf.array(), tupleIndex * tupleSize);
-                tuples.get(tupleIndex).resetByTupleOffset(tupleBuf, tupleIndex * tupleSize);
-                tupleIndex++;
-            }
-        }
-        // Perform the sort.        
-        Collections.sort(tuples, tupleCmp);
-        long end = System.currentTimeMillis();
-        long time = end - start;
-        return time;
-    }
-
-    @Override
-    public void init() throws Exception {
-    }
-
-    @Override
-    public void deinit() throws Exception {
-    }
-    
-    public void reset() throws Exception {
-    }
-    
-    public class SkipListThread extends Thread {
-    	private final DataGenThread dataGen;
-    	private final ConcurrentSkipListSet<ITupleReference> skipList;
-    	private final int numBatches;
-        public final TypeAwareTupleWriterFactory tupleWriterFactory;
-        public final TypeAwareTupleWriter tupleWriter;
-        public final TypeAwareTupleReference[] tuples;        
-        public final ByteBuffer tupleBuf; 
-
-        public SkipListThread(DataGenThread dataGen, ConcurrentSkipListSet<ITupleReference> skipList, int numBatches, int batchSize) {
-            this.dataGen = dataGen;
-            this.numBatches = numBatches;
-            this.skipList = skipList;
-            tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
-            tupleWriter = (TypeAwareTupleWriter) tupleWriterFactory.createTupleWriter();
-            int numTuples = numBatches * batchSize;
-            tuples = new TypeAwareTupleReference[numTuples];
-            tupleBuf = ByteBuffer.allocate(numTuples * tupleSize);
-            for (int i = 0; i < numTuples; i++) {
-                tuples[i] = (TypeAwareTupleReference) tupleWriter.createTupleReference();
-            }
-        }
-    	
-        @Override
-        public void run() {
-            int tupleIndex = 0;
-            try {                
-                for (int i = 0; i < numBatches; i++) {
-                    TupleBatch batch = dataGen.tupleBatchQueue.take();
-                    for (int j = 0; j < batch.size(); j++) {
-                        // Copy the tuple to the buffer and set the pre-created tuple ref.                        
-                        tupleWriter.writeTuple(batch.get(j), tupleBuf.array(), tupleIndex * tupleSize);
-                        tuples[tupleIndex].resetByTupleOffset(tupleBuf, tupleIndex * tupleSize);
-                        skipList.add(tuples[tupleIndex]);
-                        tupleIndex++;
-                    }
-                }
-            } catch (Exception e) {
-                System.out.println(tupleIndex);
-                e.printStackTrace();
-            }
-        }
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/perf/LSMTreeRunner.java b/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/perf/LSMTreeRunner.java
deleted file mode 100644
index 5d2185a..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/perf/LSMTreeRunner.java
+++ /dev/null
@@ -1,173 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.btree.perf;
-
-import java.io.File;
-import java.text.SimpleDateFormat;
-import java.util.Date;
-
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.exceptions.HyracksException;
-import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.control.nc.io.IOManager;
-import edu.uci.ics.hyracks.storage.am.btree.exceptions.BTreeException;
-import edu.uci.ics.hyracks.storage.am.common.api.IInMemoryFreePageManager;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexAccessor;
-import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
-import edu.uci.ics.hyracks.storage.am.common.datagen.DataGenThread;
-import edu.uci.ics.hyracks.storage.am.common.datagen.TupleBatch;
-import edu.uci.ics.hyracks.storage.am.common.frames.LIFOMetaDataFrameFactory;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallback;
-import edu.uci.ics.hyracks.storage.am.lsm.btree.impls.LSMBTree;
-import edu.uci.ics.hyracks.storage.am.lsm.btree.util.LSMBTreeUtils;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.IInMemoryBufferCache;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationScheduler;
-import edu.uci.ics.hyracks.storage.am.lsm.common.freepage.InMemoryBufferCache;
-import edu.uci.ics.hyracks.storage.am.lsm.common.freepage.InMemoryFreePageManager;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.NoMergePolicy;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.NoOpIOOperationCallback;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.SynchronousScheduler;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.ThreadCountingOperationTrackerFactory;
-import edu.uci.ics.hyracks.storage.common.buffercache.HeapBufferAllocator;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
-import edu.uci.ics.hyracks.storage.common.file.TransientFileMapManager;
-import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
-import edu.uci.ics.hyracks.test.support.TestUtils;
-
-public class LSMTreeRunner implements IExperimentRunner {
-
-    private static final int MAX_OPEN_FILES = 10000;
-    private static final int HYRACKS_FRAME_SIZE = 128;
-
-    protected IHyracksTaskContext ctx;
-    protected IOManager ioManager;
-    protected IBufferCache bufferCache;
-    protected int lsmtreeFileId;
-
-    protected final static SimpleDateFormat simpleDateFormat = new SimpleDateFormat("ddMMyy-hhmmssSS");
-    protected final static String sep = System.getProperty("file.separator");
-    protected final static String classDir = "/lsmtree/";
-    protected String onDiskDir;
-    protected FileReference file;
-
-    protected final int numBatches;
-    protected final LSMBTree lsmtree;
-    protected final ILSMIOOperationScheduler ioScheduler;
-    protected IBufferCache memBufferCache;
-    private final int onDiskPageSize;
-    private final int onDiskNumPages;
-
-    public LSMTreeRunner(int numBatches, int inMemPageSize, int inMemNumPages, int onDiskPageSize, int onDiskNumPages,
-            ITypeTraits[] typeTraits, IBinaryComparatorFactory[] cmpFactories, int[] bloomFilterKeyFields)
-            throws BTreeException, HyracksException {
-        this.numBatches = numBatches;
-
-        this.onDiskPageSize = onDiskPageSize;
-        this.onDiskNumPages = onDiskNumPages;
-
-        onDiskDir = classDir + sep + simpleDateFormat.format(new Date()) + sep;
-        file = new FileReference(new File(onDiskDir));
-        ctx = TestUtils.create(HYRACKS_FRAME_SIZE);
-
-        TestStorageManagerComponentHolder.init(this.onDiskPageSize, this.onDiskNumPages, MAX_OPEN_FILES);
-        bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
-        ioManager = TestStorageManagerComponentHolder.getIOManager();
-        IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
-
-        IInMemoryBufferCache memBufferCache = new InMemoryBufferCache(new HeapBufferAllocator(), inMemPageSize,
-                inMemNumPages, new TransientFileMapManager());
-        IInMemoryFreePageManager memFreePageManager = new InMemoryFreePageManager(inMemNumPages,
-                new LIFOMetaDataFrameFactory());
-        this.ioScheduler = SynchronousScheduler.INSTANCE;
-        lsmtree = LSMBTreeUtils.createLSMTree(memBufferCache, memFreePageManager, ioManager, file, bufferCache, fmp,
-                typeTraits, cmpFactories, bloomFilterKeyFields, NoMergePolicy.INSTANCE,
-                ThreadCountingOperationTrackerFactory.INSTANCE, ioScheduler, NoOpIOOperationCallback.INSTANCE);
-    }
-
-    @Override
-    public void init() throws Exception {
-    }
-
-    @Override
-    public long runExperiment(DataGenThread dataGen, int numThreads) throws Exception {
-        LSMTreeThread[] threads = new LSMTreeThread[numThreads];
-        int threadNumBatches = numBatches / numThreads;
-        for (int i = 0; i < numThreads; i++) {
-            threads[i] = new LSMTreeThread(dataGen, lsmtree, threadNumBatches);
-        }
-        // Wait until the tupleBatchQueue is completely full.
-        while (dataGen.tupleBatchQueue.remainingCapacity() != 0) {
-            Thread.sleep(10);
-        }
-
-        long start = System.currentTimeMillis();
-        for (int i = 0; i < numThreads; i++) {
-            threads[i].start();
-        }
-        for (int i = 0; i < numThreads; i++) {
-            threads[i].join();
-        }
-        long end = System.currentTimeMillis();
-        long time = end - start;
-        return time;
-    }
-
-    @Override
-    public void reset() throws Exception {
-        lsmtree.create();
-    }
-
-    @Override
-    public void deinit() throws Exception {
-        bufferCache.closeFile(lsmtreeFileId);
-        bufferCache.close();
-        memBufferCache.closeFile(lsmtreeFileId);
-        memBufferCache.close();
-    }
-
-    public class LSMTreeThread extends Thread {
-        private final DataGenThread dataGen;
-        private final int numBatches;
-        private final IIndexAccessor lsmTreeAccessor;
-
-        public LSMTreeThread(DataGenThread dataGen, LSMBTree lsmTree, int numBatches) {
-            this.dataGen = dataGen;
-            this.numBatches = numBatches;
-            lsmTreeAccessor = lsmTree.createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
-        }
-
-        @Override
-        public void run() {
-            try {
-                for (int i = 0; i < numBatches; i++) {
-                    TupleBatch batch = dataGen.tupleBatchQueue.take();
-                    for (int j = 0; j < batch.size(); j++) {
-                        try {
-                            lsmTreeAccessor.insert(batch.get(j));
-                        } catch (TreeIndexException e) {
-                        }
-                    }
-                }
-            } catch (Exception e) {
-                e.printStackTrace();
-            }
-        }
-    }
-
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/perf/PerfExperiment.java b/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/perf/PerfExperiment.java
deleted file mode 100644
index c842191..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/perf/PerfExperiment.java
+++ /dev/null
@@ -1,87 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.btree.perf;
-
-import java.util.Enumeration;
-import java.util.logging.Level;
-import java.util.logging.LogManager;
-import java.util.logging.Logger;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.util.SerdeUtils;
-import edu.uci.ics.hyracks.storage.am.common.datagen.DataGenThread;
-
-public class PerfExperiment {
-    public static void main(String[] args) throws Exception {
-        // Disable logging so we can better see the output times.
-        Enumeration<String> loggers = LogManager.getLogManager().getLoggerNames();
-        while(loggers.hasMoreElements()) {
-            String loggerName = loggers.nextElement();
-            Logger logger = LogManager.getLogManager().getLogger(loggerName);
-            logger.setLevel(Level.OFF);
-        }
-        
-        int numTuples = 100000; // 100K
-        //int numTuples = 1000000; // 1M
-        //int numTuples = 2000000; // 2M
-        //int numTuples = 3000000; // 3M
-        //int numTuples = 10000000; // 10M
-        //int numTuples = 20000000; // 20M
-        //int numTuples = 30000000; // 30M
-        //int numTuples = 40000000; // 40M
-        //int numTuples = 60000000; // 60M
-        //int numTuples = 100000000; // 100M
-        //int numTuples = 200000000; // 200M
-        int batchSize = 10000;
-        int numBatches = numTuples / batchSize;
-        
-        ISerializerDeserializer[] fieldSerdes = new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE };
-        ITypeTraits[] typeTraits = SerdeUtils.serdesToTypeTraits(fieldSerdes, 30);
-        
-        IBinaryComparatorFactory[] cmpFactories = SerdeUtils.serdesToComparatorFactories(fieldSerdes, fieldSerdes.length);
-        
-        //int repeats = 1000;
-        int repeats = 1;
-        long[] times = new long[repeats];
-
-        int numThreads = 2;
-        for (int i = 0; i < repeats; i++) {
-            //ConcurrentSkipListRunner runner = new ConcurrentSkipListRunner(numBatches, batchSize, tupleSize, typeTraits, cmp);
-            InMemoryBTreeRunner runner = new InMemoryBTreeRunner(numBatches, 8192, 100000, typeTraits, cmpFactories);
-            //BTreeBulkLoadRunner runner = new BTreeBulkLoadRunner(numBatches, 8192, 100000, typeTraits, cmp, 1.0f);
-        	//BTreeRunner runner = new BTreeRunner(numBatches, 8192, 100000, typeTraits, cmp);
-        	//String btreeName = "071211";
-        	//BTreeSearchRunner runner = new BTreeSearchRunner(btreeName, 10, numBatches, 8192, 25000, typeTraits, cmp);
-        	//LSMTreeRunner runner = new LSMTreeRunner(numBatches, 8192, 100, 8192, 250, typeTraits, cmp);
-        	//LSMTreeSearchRunner runner = new LSMTreeSearchRunner(100000, numBatches, 8192, 24750, 8192, 250, typeTraits, cmp); 
-            DataGenThread dataGen = new DataGenThread(numThreads, numBatches, batchSize, fieldSerdes, 30, 50, 10, false);
-            dataGen.start();
-            runner.reset();
-            times[i] = runner.runExperiment(dataGen, numThreads);
-            System.out.println("TIME " + i + ": " + times[i] + "ms");
-            runner.deinit();
-        }
-        long avgTime = 0;
-        for (int i = 0; i < repeats; i++) {
-            avgTime += times[i];
-        }
-        avgTime /= repeats;
-        System.out.println("AVG TIME: " + avgTime + "ms");
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/tuples/LSMBTreeTuplesTest.java b/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/tuples/LSMBTreeTuplesTest.java
deleted file mode 100644
index ce6c27c..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/tuples/LSMBTreeTuplesTest.java
+++ /dev/null
@@ -1,171 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.btree.tuples;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-
-import java.nio.ByteBuffer;
-import java.util.Arrays;
-import java.util.Random;
-
-import org.junit.Test;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.util.SerdeUtils;
-import edu.uci.ics.hyracks.dataflow.common.util.TupleUtils;
-import edu.uci.ics.hyracks.storage.am.common.datagen.DataGenUtils;
-import edu.uci.ics.hyracks.storage.am.common.datagen.IFieldValueGenerator;
-
-@SuppressWarnings("rawtypes")
-public class LSMBTreeTuplesTest {
-
-    private final Random rnd = new Random(50);
-    
-    private ByteBuffer writeTuple(ITupleReference tuple, LSMBTreeTupleWriter tupleWriter) {
-        // Write tuple into a buffer, then later try to read it.
-        int bytesRequired = tupleWriter.bytesRequired(tuple);
-        byte[] bytes = new byte[bytesRequired];
-        ByteBuffer targetBuf = ByteBuffer.wrap(bytes);
-        tupleWriter.writeTuple(tuple, bytes, 0);
-        return targetBuf;
-    }
-    
-    private void testLSMBTreeTuple(ISerializerDeserializer[] maxFieldSerdes) throws HyracksDataException {        
-        // Create a tuple with the max-1 fields for checking setFieldCount() of tuple references later.
-        ITypeTraits[] maxTypeTraits = SerdeUtils.serdesToTypeTraits(maxFieldSerdes); 
-        IFieldValueGenerator[] maxFieldGens = DataGenUtils.getFieldGensFromSerdes(maxFieldSerdes, rnd, false);
-        // Generate a tuple with random field values.
-        Object[] maxFields = new Object[maxFieldSerdes.length];
-        for (int j = 0; j < maxFieldSerdes.length; j++) {
-            maxFields[j] = maxFieldGens[j].next();
-        }            
-        
-        // Run test for varying number of fields and keys.
-        for (int numKeyFields = 1; numKeyFields < maxFieldSerdes.length; numKeyFields++) {
-            // Create tuples with varying number of fields, and try to interpret their bytes with the lsmBTreeTuple.
-            for (int numFields = numKeyFields; numFields <= maxFieldSerdes.length; numFields++) {                
-                // Create and write tuple to bytes using an LSMBTreeTupleWriter.
-                LSMBTreeTupleWriter maxMatterTupleWriter = new LSMBTreeTupleWriter(maxTypeTraits, numKeyFields, false);
-                ITupleReference maxTuple = TupleUtils.createTuple(maxFieldSerdes, (Object[])maxFields);
-                ByteBuffer maxMatterBuf = writeTuple(maxTuple, maxMatterTupleWriter);
-                // Tuple reference should work for both matter and antimatter tuples (doesn't matter which factory creates it).
-                LSMBTreeTupleReference maxLsmBTreeTuple = (LSMBTreeTupleReference) maxMatterTupleWriter.createTupleReference();
-                
-                ISerializerDeserializer[] fieldSerdes = Arrays.copyOfRange(maxFieldSerdes, 0, numFields);
-                ITypeTraits[] typeTraits = SerdeUtils.serdesToTypeTraits(fieldSerdes);                
-                IFieldValueGenerator[] fieldGens = DataGenUtils.getFieldGensFromSerdes(fieldSerdes, rnd, false);
-                // Generate a tuple with random field values.
-                Object[] fields = new Object[numFields];
-                for (int j = 0; j < numFields; j++) {
-                    fields[j] = fieldGens[j].next();
-                }            
-                // Create and write tuple to bytes using an LSMBTreeTupleWriter.
-                ITupleReference tuple = TupleUtils.createTuple(fieldSerdes, (Object[])fields);
-                LSMBTreeTupleWriter matterTupleWriter = new LSMBTreeTupleWriter(typeTraits, numKeyFields, false);
-                LSMBTreeTupleWriter antimatterTupleWriter = new LSMBTreeTupleWriter(typeTraits, numKeyFields, true);
-                LSMBTreeCopyTupleWriter copyTupleWriter = new LSMBTreeCopyTupleWriter(typeTraits, numKeyFields);
-                ByteBuffer matterBuf = writeTuple(tuple, matterTupleWriter);
-                ByteBuffer antimatterBuf = writeTuple(tuple, antimatterTupleWriter);
-
-                // The antimatter buf should only contain keys, sanity check the size.
-                if (numFields != numKeyFields) {
-                    assertTrue(antimatterBuf.array().length < matterBuf.array().length);
-                }
-
-                // Tuple reference should work for both matter and antimatter tuples (doesn't matter which factory creates it).
-                LSMBTreeTupleReference lsmBTreeTuple = (LSMBTreeTupleReference) matterTupleWriter.createTupleReference();                
-                
-                // Use LSMBTree tuple reference to interpret the written tuples.
-                // Repeat the block inside to test that repeated resetting to matter/antimatter tuples works.
-                for (int r = 0; r < 4; r++) {
-                    
-                    // Check matter tuple with lsmBTreeTuple.
-                    lsmBTreeTuple.resetByTupleOffset(matterBuf, 0);
-                    checkTuple(lsmBTreeTuple, numFields, false, fieldSerdes, fields);
-                    
-                    // Create a copy using copyTupleWriter, and verify again.
-                    ByteBuffer copyMatterBuf = writeTuple(lsmBTreeTuple, copyTupleWriter);
-                    lsmBTreeTuple.resetByTupleOffset(copyMatterBuf, 0);
-                    checkTuple(lsmBTreeTuple, numFields, false, fieldSerdes, fields);
-                    
-                    // Check antimatter tuple with lsmBTreeTuple.
-                    lsmBTreeTuple.resetByTupleOffset(antimatterBuf, 0);                                        
-                    // Should only contain keys.
-                    checkTuple(lsmBTreeTuple, numKeyFields, true, fieldSerdes, fields);
-                    
-                    // Create a copy using copyTupleWriter, and verify again.
-                    ByteBuffer copyAntimatterBuf = writeTuple(lsmBTreeTuple, copyTupleWriter);
-                    lsmBTreeTuple.resetByTupleOffset(copyAntimatterBuf, 0);
-                    // Should only contain keys.
-                    checkTuple(lsmBTreeTuple, numKeyFields, true, fieldSerdes, fields);
-                    
-                    // Check matter tuple with maxLsmBTreeTuple.
-                    // We should be able to manually set a prefix of the fields 
-                    // (the passed type traits in the tuple factory's constructor).
-                    maxLsmBTreeTuple.setFieldCount(numFields);
-                    maxLsmBTreeTuple.resetByTupleOffset(matterBuf, 0);
-                    checkTuple(maxLsmBTreeTuple, numFields, false, fieldSerdes, fields);
-                    
-                    // Check antimatter tuple with maxLsmBTreeTuple.
-                    maxLsmBTreeTuple.resetByTupleOffset(antimatterBuf, 0);
-                    // Should only contain keys.
-                    checkTuple(maxLsmBTreeTuple, numKeyFields, true, fieldSerdes, fields);
-                    
-                    // Resetting maxLsmBTreeTuple should set its field count to
-                    // maxFieldSerdes.length, based on the its type traits.
-                    maxLsmBTreeTuple.resetByTupleOffset(maxMatterBuf, 0);
-                    checkTuple(maxLsmBTreeTuple, maxFieldSerdes.length, false, maxFieldSerdes, maxFields);
-                }
-            }
-        }
-    }
-    
-    private void checkTuple(LSMBTreeTupleReference tuple, int expectedFieldCount, boolean expectedAntimatter, ISerializerDeserializer[] fieldSerdes, Object[] expectedFields) throws HyracksDataException {
-        assertEquals(expectedFieldCount, tuple.getFieldCount());
-        assertEquals(expectedAntimatter, tuple.isAntimatter());
-        Object[] deserMatterTuple = TupleUtils.deserializeTuple(tuple, fieldSerdes);
-        for (int j = 0; j < expectedFieldCount; j++) {
-            assertEquals(expectedFields[j], deserMatterTuple[j]);
-        }
-    }
-    
-    @Test
-    public void testLSMBTreeTuple() throws HyracksDataException {        
-        ISerializerDeserializer[] intFields = new IntegerSerializerDeserializer[] {
-                IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE,
-                IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE,
-                IntegerSerializerDeserializer.INSTANCE };
-        testLSMBTreeTuple(intFields);
-        
-        ISerializerDeserializer[] stringFields = new ISerializerDeserializer[] {
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE };
-        testLSMBTreeTuple(stringFields);
-        
-        ISerializerDeserializer[] mixedFields = new ISerializerDeserializer[] {
-                UTF8StringSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                IntegerSerializerDeserializer.INSTANCE };
-        testLSMBTreeTuple(mixedFields);
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/util/LSMBTreeTestContext.java b/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/util/LSMBTreeTestContext.java
deleted file mode 100644
index f790fde..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/util/LSMBTreeTestContext.java
+++ /dev/null
@@ -1,84 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.btree.util;
-
-import java.util.Collection;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.control.nc.io.IOManager;
-import edu.uci.ics.hyracks.dataflow.common.util.SerdeUtils;
-import edu.uci.ics.hyracks.storage.am.btree.OrderedIndexTestContext;
-import edu.uci.ics.hyracks.storage.am.common.CheckTuple;
-import edu.uci.ics.hyracks.storage.am.common.api.IInMemoryFreePageManager;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
-import edu.uci.ics.hyracks.storage.am.lsm.btree.impls.LSMBTree;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.IInMemoryBufferCache;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallbackProvider;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationScheduler;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMMergePolicy;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMOperationTrackerFactory;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
-
-@SuppressWarnings("rawtypes")
-public final class LSMBTreeTestContext extends OrderedIndexTestContext {
-
-    public LSMBTreeTestContext(ISerializerDeserializer[] fieldSerdes, ITreeIndex treeIndex) {
-        super(fieldSerdes, treeIndex);
-    }
-
-    @Override
-    public int getKeyFieldCount() {
-        LSMBTree lsmTree = (LSMBTree) index;
-        return lsmTree.getComparatorFactories().length;
-    }
-
-    @Override
-    public IBinaryComparatorFactory[] getComparatorFactories() {
-        LSMBTree lsmTree = (LSMBTree) index;
-        return lsmTree.getComparatorFactories();
-    }
-
-    /**
-     * Override to provide upsert semantics for the check tuples.
-     */
-    @Override
-    public void insertCheckTuple(CheckTuple checkTuple, Collection<CheckTuple> checkTuples) {
-        upsertCheckTuple(checkTuple, checkTuples);
-    }
-
-    public static LSMBTreeTestContext create(IInMemoryBufferCache memBufferCache,
-            IInMemoryFreePageManager memFreePageManager, IOManager ioManager, FileReference file,
-            IBufferCache diskBufferCache, IFileMapProvider diskFileMapProvider, ISerializerDeserializer[] fieldSerdes,
-            int numKeyFields, ILSMMergePolicy mergePolicy, ILSMOperationTrackerFactory opTrackerFactory,
-            ILSMIOOperationScheduler ioScheduler, ILSMIOOperationCallbackProvider ioOpCallbackProvider)
-            throws Exception {
-        ITypeTraits[] typeTraits = SerdeUtils.serdesToTypeTraits(fieldSerdes);
-        IBinaryComparatorFactory[] cmpFactories = SerdeUtils.serdesToComparatorFactories(fieldSerdes, numKeyFields);
-        int[] bloomFilterKeyFields = new int[numKeyFields];
-        for (int i = 0; i < numKeyFields; ++i) {
-            bloomFilterKeyFields[i] = i;
-        }
-        LSMBTree lsmTree = LSMBTreeUtils.createLSMTree(memBufferCache, memFreePageManager, ioManager, file,
-                diskBufferCache, diskFileMapProvider, typeTraits, cmpFactories, bloomFilterKeyFields, mergePolicy,
-                opTrackerFactory, ioScheduler, ioOpCallbackProvider);
-        LSMBTreeTestContext testCtx = new LSMBTreeTestContext(fieldSerdes, lsmTree);
-        return testCtx;
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/util/LSMBTreeTestHarness.java b/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/util/LSMBTreeTestHarness.java
deleted file mode 100644
index 9128607..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/util/LSMBTreeTestHarness.java
+++ /dev/null
@@ -1,215 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.btree.util;
-
-import java.io.File;
-import java.io.FilenameFilter;
-import java.text.SimpleDateFormat;
-import java.util.Date;
-import java.util.Random;
-import java.util.logging.Logger;
-
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.exceptions.HyracksException;
-import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.api.io.IODeviceHandle;
-import edu.uci.ics.hyracks.control.nc.io.IOManager;
-import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
-import edu.uci.ics.hyracks.storage.am.common.api.IInMemoryFreePageManager;
-import edu.uci.ics.hyracks.storage.am.common.frames.LIFOMetaDataFrameFactory;
-import edu.uci.ics.hyracks.storage.am.config.AccessMethodTestsConfig;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.IInMemoryBufferCache;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallbackProvider;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationScheduler;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMMergePolicy;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMOperationTrackerFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.common.freepage.InMemoryBufferCache;
-import edu.uci.ics.hyracks.storage.am.lsm.common.freepage.InMemoryFreePageManager;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.NoMergePolicy;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.NoOpIOOperationCallback;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.SynchronousScheduler;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.ThreadCountingOperationTrackerFactory;
-import edu.uci.ics.hyracks.storage.common.buffercache.HeapBufferAllocator;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
-import edu.uci.ics.hyracks.storage.common.file.TransientFileMapManager;
-import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
-import edu.uci.ics.hyracks.test.support.TestUtils;
-
-public class LSMBTreeTestHarness {
-    protected static final Logger LOGGER = Logger.getLogger(LSMBTreeTestHarness.class.getName());
-
-    public static final BTreeLeafFrameType[] LEAF_FRAMES_TO_TEST = new BTreeLeafFrameType[] { BTreeLeafFrameType.REGULAR_NSM };
-
-    private static final long RANDOM_SEED = 50;
-
-    protected final int diskPageSize;
-    protected final int diskNumPages;
-    protected final int diskMaxOpenFiles;
-    protected final int memPageSize;
-    protected final int memNumPages;
-    protected final int hyracksFrameSize;
-
-    protected IOManager ioManager;
-    protected IBufferCache diskBufferCache;
-    protected IFileMapProvider diskFileMapProvider;
-    protected IInMemoryBufferCache memBufferCache;
-    protected IInMemoryFreePageManager memFreePageManager;
-    protected IHyracksTaskContext ctx;
-    protected ILSMIOOperationScheduler ioScheduler;
-    protected ILSMMergePolicy mergePolicy;
-    protected ILSMOperationTrackerFactory opTrackerFactory;
-    protected ILSMIOOperationCallbackProvider ioOpCallbackProvider;
-
-    protected final Random rnd = new Random();
-    protected final static SimpleDateFormat simpleDateFormat = new SimpleDateFormat("ddMMyy-hhmmssSS");
-    protected final static String sep = System.getProperty("file.separator");
-    protected String onDiskDir;
-    protected FileReference file;
-
-    public LSMBTreeTestHarness() {
-        this.diskPageSize = AccessMethodTestsConfig.LSM_BTREE_DISK_PAGE_SIZE;
-        this.diskNumPages = AccessMethodTestsConfig.LSM_BTREE_DISK_NUM_PAGES;
-        this.diskMaxOpenFiles = AccessMethodTestsConfig.LSM_BTREE_DISK_MAX_OPEN_FILES;
-        this.memPageSize = AccessMethodTestsConfig.LSM_BTREE_MEM_PAGE_SIZE;
-        this.memNumPages = AccessMethodTestsConfig.LSM_BTREE_MEM_NUM_PAGES;
-        this.hyracksFrameSize = AccessMethodTestsConfig.LSM_BTREE_HYRACKS_FRAME_SIZE;
-        this.ioScheduler = SynchronousScheduler.INSTANCE;
-        this.mergePolicy = NoMergePolicy.INSTANCE;
-        this.opTrackerFactory = ThreadCountingOperationTrackerFactory.INSTANCE;
-        this.ioOpCallbackProvider = NoOpIOOperationCallback.INSTANCE;
-    }
-
-    public LSMBTreeTestHarness(int diskPageSize, int diskNumPages, int diskMaxOpenFiles, int memPageSize,
-            int memNumPages, int hyracksFrameSize) {
-        this.diskPageSize = diskPageSize;
-        this.diskNumPages = diskNumPages;
-        this.diskMaxOpenFiles = diskMaxOpenFiles;
-        this.memPageSize = memPageSize;
-        this.memNumPages = memNumPages;
-        this.hyracksFrameSize = hyracksFrameSize;
-        this.ioScheduler = SynchronousScheduler.INSTANCE;
-        this.mergePolicy = NoMergePolicy.INSTANCE;
-        this.opTrackerFactory = ThreadCountingOperationTrackerFactory.INSTANCE;
-    }
-
-    public void setUp() throws HyracksException {
-        onDiskDir = "lsm_btree_" + simpleDateFormat.format(new Date()) + sep;
-        file = new FileReference(new File(onDiskDir));
-        ctx = TestUtils.create(getHyracksFrameSize());
-        TestStorageManagerComponentHolder.init(diskPageSize, diskNumPages, diskMaxOpenFiles);
-        diskBufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
-        diskFileMapProvider = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
-        memBufferCache = new InMemoryBufferCache(new HeapBufferAllocator(), memPageSize, memNumPages,
-                new TransientFileMapManager());
-        memFreePageManager = new InMemoryFreePageManager(memNumPages, new LIFOMetaDataFrameFactory());
-        ioManager = TestStorageManagerComponentHolder.getIOManager();
-        rnd.setSeed(RANDOM_SEED);
-    }
-
-    public void tearDown() throws HyracksDataException {
-        diskBufferCache.close();
-        for (IODeviceHandle dev : ioManager.getIODevices()) {
-            File dir = new File(dev.getPath(), onDiskDir);
-            FilenameFilter filter = new FilenameFilter() {
-                public boolean accept(File dir, String name) {
-                    return !name.startsWith(".");
-                }
-            };
-            String[] files = dir.list(filter);
-            if (files != null) {
-                for (String fileName : files) {
-                    File file = new File(dir.getPath() + File.separator + fileName);
-                    file.delete();
-                }
-            }
-            dir.delete();
-        }
-    }
-
-    public int getDiskPageSize() {
-        return diskPageSize;
-    }
-
-    public int getDiskNumPages() {
-        return diskNumPages;
-    }
-
-    public int getDiskMaxOpenFiles() {
-        return diskMaxOpenFiles;
-    }
-
-    public int getMemPageSize() {
-        return memPageSize;
-    }
-
-    public int getMemNumPages() {
-        return memNumPages;
-    }
-
-    public int getHyracksFrameSize() {
-        return hyracksFrameSize;
-    }
-
-    public IOManager getIOManager() {
-        return ioManager;
-    }
-
-    public IBufferCache getDiskBufferCache() {
-        return diskBufferCache;
-    }
-
-    public IFileMapProvider getDiskFileMapProvider() {
-        return diskFileMapProvider;
-    }
-
-    public IInMemoryBufferCache getMemBufferCache() {
-        return memBufferCache;
-    }
-
-    public IInMemoryFreePageManager getMemFreePageManager() {
-        return memFreePageManager;
-    }
-
-    public IHyracksTaskContext getHyracksTastContext() {
-        return ctx;
-    }
-
-    public FileReference getFileReference() {
-        return file;
-    }
-
-    public Random getRandom() {
-        return rnd;
-    }
-
-    public ILSMIOOperationScheduler getIOScheduler() {
-        return ioScheduler;
-    }
-
-    public ILSMOperationTrackerFactory getOperationTrackerFactory() {
-        return opTrackerFactory;
-    }
-
-    public ILSMMergePolicy getMergePolicy() {
-        return mergePolicy;
-    }
-
-    public ILSMIOOperationCallbackProvider getIOOperationCallbackProvider() {
-        return ioOpCallbackProvider;
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-common-test/pom.xml b/hyracks-tests/hyracks-storage-am-lsm-common-test/pom.xml
deleted file mode 100644
index c6430c4..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-common-test/pom.xml
+++ /dev/null
@@ -1,42 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <groupId>edu.uci.ics.hyracks</groupId>
-  <artifactId>hyracks-storage-am-lsm-common-test</artifactId>
-  <version>0.2.2-SNAPSHOT</version>
-
-  <parent>
-    <groupId>edu.uci.ics.hyracks</groupId>
-    <artifactId>hyracks-tests</artifactId>
-    <version>0.2.2-SNAPSHOT</version>
-  </parent>
-
-  <build>
-    <plugins>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-compiler-plugin</artifactId>
-        <version>2.0.2</version>
-        <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
-        </configuration>
-      </plugin>
-    </plugins>
-  </build>
-  <dependencies>  	
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks</groupId>
-  		<artifactId>hyracks-storage-am-lsm-common</artifactId>
-  		<version>0.2.2-SNAPSHOT</version>
-  		<type>jar</type>
-  		<scope>compile</scope>
-  	</dependency>
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks</groupId>
-  		<artifactId>hyracks-test-support</artifactId>
-  		<version>0.2.2-SNAPSHOT</version>
-  		<type>jar</type>
-  		<scope>compile</scope>
-  	</dependency>
-  </dependencies>
-</project>
diff --git a/hyracks-tests/hyracks-storage-am-lsm-common-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/common/DummyLSMIndexFileManager.java b/hyracks-tests/hyracks-storage-am-lsm-common-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/common/DummyLSMIndexFileManager.java
deleted file mode 100644
index 69e23bc..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-common-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/common/DummyLSMIndexFileManager.java
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.common;
-
-import java.io.File;
-import java.io.FilenameFilter;
-import java.util.ArrayList;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.api.io.IIOManager;
-import edu.uci.ics.hyracks.api.io.IODeviceHandle;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.AbstractLSMIndexFileManager;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.TreeIndexFactory;
-import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
-
-public class DummyLSMIndexFileManager extends AbstractLSMIndexFileManager {
-
-    public DummyLSMIndexFileManager(IIOManager ioManager, IFileMapProvider fileMapProvider, FileReference file,
-            TreeIndexFactory<? extends ITreeIndex> treeFactory) {
-        super(ioManager, fileMapProvider, file, treeFactory, 0);
-    }
-
-    protected void cleanupAndGetValidFilesInternal(IODeviceHandle dev, FilenameFilter filter,
-            TreeIndexFactory<? extends ITreeIndex> treeFactory, ArrayList<ComparableFileName> allFiles)
-            throws HyracksDataException, IndexException {
-        File dir = new File(dev.getPath(), baseDir);
-        String[] files = dir.list(filter);
-        for (String fileName : files) {
-            File file = new File(dir.getPath() + File.separator + fileName);
-            FileReference fileRef = new FileReference(file);
-            allFiles.add(new ComparableFileName(fileRef));
-        }
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-common-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/common/DummyTreeFactory.java b/hyracks-tests/hyracks-storage-am-lsm-common-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/common/DummyTreeFactory.java
deleted file mode 100644
index 8b22771..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-common-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/common/DummyTreeFactory.java
+++ /dev/null
@@ -1,34 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.common;
-
-import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.TreeIndexFactory;
-
-public class DummyTreeFactory extends TreeIndexFactory<ITreeIndex> {
-
-    public DummyTreeFactory() {
-        super(null, null, null, null, null, null, 0);
-    }
-
-    @Override
-    public ITreeIndex createIndexInstance(FileReference file) throws IndexException {
-        return null;
-    }
-
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-common-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/common/InMemoryBufferCacheTest.java b/hyracks-tests/hyracks-storage-am-lsm-common-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/common/InMemoryBufferCacheTest.java
deleted file mode 100644
index adba93d..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-common-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/common/InMemoryBufferCacheTest.java
+++ /dev/null
@@ -1,64 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.common;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.fail;
-
-import java.util.HashSet;
-
-import org.junit.Test;
-
-import edu.uci.ics.hyracks.storage.am.lsm.common.freepage.InMemoryBufferCache;
-import edu.uci.ics.hyracks.storage.common.buffercache.HeapBufferAllocator;
-import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
-import edu.uci.ics.hyracks.storage.common.file.BufferedFileHandle;
-import edu.uci.ics.hyracks.storage.common.file.TransientFileMapManager;
-
-public class InMemoryBufferCacheTest {
-    private static final int PAGE_SIZE = 256;
-    private static final int NUM_PAGES = 100;
-    private HashSet<ICachedPage> pinnedPages = new HashSet<ICachedPage>();
-
-    @Test
-    public void test01() throws Exception {
-        InMemoryBufferCache memBufferCache = new InMemoryBufferCache(new HeapBufferAllocator(), PAGE_SIZE, NUM_PAGES,
-                new TransientFileMapManager());
-        memBufferCache.open();
-        int dummyFileId = 0;
-        // Pin all pages, and make sure they return unique ICachedPages.
-        // We expect no overflow pages.
-        for (int i = 0; i < NUM_PAGES; i++) {
-            ICachedPage page = memBufferCache.pin(BufferedFileHandle.getDiskPageId(dummyFileId, i), false);
-            if (pinnedPages.contains(page)) {
-                fail("Id collision for ICachedPage, caused by id: " + i);
-            }
-            pinnedPages.add(page);
-            assertEquals(0, memBufferCache.getNumOverflowPages());
-        }
-        // Pin pages above capacity. We expect to be given new overflow pages.
-        // Going above capacity should be very rare, but nevertheless succeed.
-        for (int i = 0; i < 100; i++) {
-            ICachedPage page = memBufferCache.pin(BufferedFileHandle.getDiskPageId(dummyFileId, i + NUM_PAGES), false);
-            if (pinnedPages.contains(page)) {
-                fail("Id collision for ICachedPage, caused by overflow id: " + i);
-            }
-            pinnedPages.add(page);
-            assertEquals(i + 1, memBufferCache.getNumOverflowPages());
-        }
-        memBufferCache.close();
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-common-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/common/InMemoryFreePageManagerTest.java b/hyracks-tests/hyracks-storage-am-lsm-common-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/common/InMemoryFreePageManagerTest.java
deleted file mode 100644
index bd09a3f..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-common-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/common/InMemoryFreePageManagerTest.java
+++ /dev/null
@@ -1,65 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.common;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
-
-import org.junit.Test;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrameFactory;
-import edu.uci.ics.hyracks.storage.am.common.frames.LIFOMetaDataFrameFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.common.freepage.InMemoryFreePageManager;
-
-public class InMemoryFreePageManagerTest {
-
-    private final int NUM_PAGES = 100;
-    
-    private void testInMemoryFreePageManager(InMemoryFreePageManager memFreePageManager) throws HyracksDataException {
-        // The first two pages are reserved for the BTree's metadata page and
-        // root page.
-        // The "actual" capacity is therefore numPages - 2.
-        int capacity = memFreePageManager.getCapacity();
-        assertEquals(capacity, NUM_PAGES - 2);
-        for (int i = 0; i < capacity; i++) {
-            int pageId = memFreePageManager.getFreePage(null);
-            // The free pages start from page 2;
-            assertEquals(i + 2, pageId);
-            assertFalse(memFreePageManager.isFull());
-        }
-        // Start asking for 100 pages above the capacity.
-        // Asking for pages above the capacity should be very rare, but
-        // nevertheless succeed.
-        // We expect isFull() to return true.
-        for (int i = 0; i < 100; i++) {
-            int pageId = memFreePageManager.getFreePage(null);
-            assertEquals(capacity + i + 2, pageId);
-            assertTrue(memFreePageManager.isFull());
-        }
-    }
-    
-    @Test
-    public void test01() throws HyracksDataException {
-        ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
-        InMemoryFreePageManager memFreePageManager = new InMemoryFreePageManager(NUM_PAGES, metaFrameFactory);
-        testInMemoryFreePageManager(memFreePageManager);
-        // We expect exactly the same behavior after a reset().
-        memFreePageManager.reset();
-        testInMemoryFreePageManager(memFreePageManager);
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-common-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/common/LSMIndexFileManagerTest.java b/hyracks-tests/hyracks-storage-am-lsm-common-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/common/LSMIndexFileManagerTest.java
deleted file mode 100644
index 161f4ce..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-common-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/common/LSMIndexFileManagerTest.java
+++ /dev/null
@@ -1,274 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.common;
-
-import static org.junit.Assert.assertEquals;
-
-import java.io.File;
-import java.io.FilenameFilter;
-import java.io.IOException;
-import java.text.SimpleDateFormat;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.Comparator;
-import java.util.Date;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.concurrent.Executors;
-
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.exceptions.HyracksException;
-import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.api.io.IODeviceHandle;
-import edu.uci.ics.hyracks.control.nc.io.IOManager;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIndexFileManager;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.LSMComponentFileReferences;
-import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
-import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
-
-public class LSMIndexFileManagerTest {
-    private static final int DEFAULT_PAGE_SIZE = 256;
-    private static final int DEFAULT_NUM_PAGES = 100;
-    private static final int DEFAULT_MAX_OPEN_FILES = 10;
-    protected final static SimpleDateFormat simpleDateFormat = new SimpleDateFormat("ddMMyy-hhmmssSS");
-    protected final static String sep = System.getProperty("file.separator");
-    protected IOManager ioManager;
-    protected IFileMapProvider fileMapProvider;
-    protected String baseDir;
-    protected FileReference file;
-
-    @Before
-    public void setUp() throws HyracksException {
-        TestStorageManagerComponentHolder.init(DEFAULT_PAGE_SIZE, DEFAULT_NUM_PAGES, DEFAULT_MAX_OPEN_FILES);
-        ioManager = TestStorageManagerComponentHolder.getIOManager();
-        fileMapProvider = TestStorageManagerComponentHolder.getFileMapProvider(null);
-        baseDir = "lsm_tree" + simpleDateFormat.format(new Date()) + sep;
-        File f = new File(baseDir);
-        f.mkdirs();
-        file = new FileReference(f);
-    }
-
-    @After
-    public void tearDown() throws HyracksDataException {
-        File f = new File(baseDir);
-        f.deleteOnExit();
-    }
-
-    public void sortOrderTest(boolean testFlushFileName) throws InterruptedException, HyracksDataException {
-        ILSMIndexFileManager fileManager = new DummyLSMIndexFileManager(ioManager, fileMapProvider, file,
-                new DummyTreeFactory());
-        LinkedList<String> fileNames = new LinkedList<String>();
-
-        int numFileNames = 100;
-        long sleepTime = 5;
-        for (int i = 0; i < numFileNames; i++) {
-            String flushFileName = (String) fileManager.getRelFlushFileReference().getInsertIndexFileReference()
-                    .getFile().getName();
-            if (testFlushFileName) {
-                fileNames.addFirst(flushFileName);
-            }
-            Thread.sleep(sleepTime);
-            if (!testFlushFileName) {
-                String secondFlushFileName = (String) fileManager.getRelFlushFileReference()
-                        .getInsertIndexFileReference().getFile().getName();
-                String mergeFileName = getMergeFileName(fileManager, flushFileName, secondFlushFileName);
-                fileNames.addFirst(mergeFileName);
-                Thread.sleep(sleepTime);
-            }
-        }
-
-        List<String> sortedFileNames = new ArrayList<String>();
-        sortedFileNames.addAll(fileNames);
-
-        // Make sure the comparator sorts in the correct order (i.e., the
-        // reverse insertion order in this case).
-        Comparator<String> cmp = fileManager.getFileNameComparator();
-        Collections.sort(sortedFileNames, cmp);
-        for (int i = 0; i < numFileNames; i++) {
-            assertEquals(fileNames.get(i), sortedFileNames.get(i));
-        }
-    }
-
-    @Test
-    public void flushAndMergeFilesSortOrderTest() throws InterruptedException, HyracksDataException {
-        sortOrderTest(true);
-        sortOrderTest(false);
-    }
-
-    public void cleanInvalidFilesTest(IOManager ioManager) throws InterruptedException, IOException, IndexException {
-        ILSMIndexFileManager fileManager = new DummyLSMIndexFileManager(ioManager, fileMapProvider, file,
-                new DummyTreeFactory());
-        fileManager.createDirs();
-
-        List<FileReference> flushFiles = new ArrayList<FileReference>();
-        List<FileReference> allFiles = new ArrayList<FileReference>();
-
-        int numFileNames = 100;
-        long sleepTime = 5;
-        // Generate a bunch of flush files.
-        for (int i = 0; i < numFileNames; i++) {
-            LSMComponentFileReferences relFlushFileRefs = fileManager.getRelFlushFileReference();
-            flushFiles.add(relFlushFileRefs.getInsertIndexFileReference());
-            Thread.sleep(sleepTime);
-        }
-        allFiles.addAll(flushFiles);
-
-        // Simulate merging some of the flush files.
-        // Merge range 0 to 4.
-        FileReference mergeFile1 = simulateMerge(fileManager, flushFiles.get(0), flushFiles.get(4));
-        allFiles.add(mergeFile1);
-        // Merge range 5 to 9.
-        FileReference mergeFile2 = simulateMerge(fileManager, flushFiles.get(5), flushFiles.get(9));
-        allFiles.add(mergeFile2);
-        // Merge range 10 to 19.
-        FileReference mergeFile3 = simulateMerge(fileManager, flushFiles.get(10), flushFiles.get(19));
-        allFiles.add(mergeFile3);
-        // Merge range 20 to 29.
-        FileReference mergeFile4 = simulateMerge(fileManager, flushFiles.get(20), flushFiles.get(29));
-        allFiles.add(mergeFile4);
-        // Merge range 50 to 79.
-        FileReference mergeFile5 = simulateMerge(fileManager, flushFiles.get(50), flushFiles.get(79));
-        allFiles.add(mergeFile5);
-
-        // Simulate merging of merge files.
-        FileReference mergeFile6 = simulateMerge(fileManager, mergeFile1, mergeFile2);
-        allFiles.add(mergeFile6);
-        FileReference mergeFile7 = simulateMerge(fileManager, mergeFile3, mergeFile4);
-        allFiles.add(mergeFile7);
-
-        // Create all files and set delete on exit for all files.
-        for (FileReference fileRef : allFiles) {
-            fileRef.getFile().createNewFile();
-            fileRef.getFile().deleteOnExit();
-        }
-
-        // Populate expected valid flush files.
-        List<String> expectedValidFiles = new ArrayList<String>();
-        for (int i = 30; i < 50; i++) {
-            expectedValidFiles.add(flushFiles.get(i).getFile().getName());
-        }
-        for (int i = 80; i < 100; i++) {
-            expectedValidFiles.add(flushFiles.get(i).getFile().getName());
-        }
-
-        // Populate expected valid merge files.
-        expectedValidFiles.add(mergeFile5.getFile().getName());
-        expectedValidFiles.add(mergeFile6.getFile().getName());
-        expectedValidFiles.add(mergeFile7.getFile().getName());
-
-        // Sort expected files.
-        Collections.sort(expectedValidFiles, fileManager.getFileNameComparator());
-
-        // Pass null and a dummy component finalizer. We don't test for physical consistency in this test.
-        List<LSMComponentFileReferences> lsmComonentFileReference = fileManager.cleanupAndGetValidFiles();
-
-        // Check actual files against expected files.
-        assertEquals(expectedValidFiles.size(), lsmComonentFileReference.size());
-        for (int i = 0; i < expectedValidFiles.size(); i++) {
-            assertEquals(expectedValidFiles.get(i), lsmComonentFileReference.get(i).getInsertIndexFileReference()
-                    .getFile().getName());
-        }
-
-        // Make sure invalid files were removed from all IODevices.
-        ArrayList<String> remainingFiles = new ArrayList<String>();
-        for (IODeviceHandle dev : ioManager.getIODevices()) {
-            File dir = new File(dev.getPath(), baseDir);
-            FilenameFilter filter = new FilenameFilter() {
-                public boolean accept(File dir, String name) {
-                    return !name.startsWith(".");
-                }
-            };
-            String[] files = dir.list(filter);
-            for (String file : files) {
-                File f = new File(file);
-                remainingFiles.add(f.getName());
-            }
-        }
-
-        Collections.sort(remainingFiles, fileManager.getFileNameComparator());
-        // Check actual files in directory against expected files.
-        assertEquals(expectedValidFiles.size(), remainingFiles.size());
-        for (int i = 0; i < expectedValidFiles.size(); i++) {
-            assertEquals(expectedValidFiles.get(i), remainingFiles.get(i));
-        }
-    }
-
-    @Test
-    public void singleIODeviceTest() throws InterruptedException, IOException, IndexException {
-        IOManager singleDeviceIOManager = createIOManager(1);
-        cleanInvalidFilesTest(singleDeviceIOManager);
-        cleanDirs(singleDeviceIOManager);
-    }
-
-    @Test
-    public void twoIODevicesTest() throws InterruptedException, IOException, IndexException {
-        IOManager twoDevicesIOManager = createIOManager(2);
-        cleanInvalidFilesTest(twoDevicesIOManager);
-        cleanDirs(twoDevicesIOManager);
-    }
-
-    @Test
-    public void fourIODevicesTest() throws InterruptedException, IOException, IndexException {
-        IOManager fourDevicesIOManager = createIOManager(4);
-        cleanInvalidFilesTest(fourDevicesIOManager);
-        cleanDirs(fourDevicesIOManager);
-    }
-
-    private void cleanDirs(IOManager ioManager) {
-        for (IODeviceHandle dev : ioManager.getIODevices()) {
-            File dir = new File(dev.getPath(), baseDir);
-            FilenameFilter filter = new FilenameFilter() {
-                public boolean accept(File dir, String name) {
-                    return !name.startsWith(".");
-                }
-            };
-            String[] files = dir.list(filter);
-            for (String file : files) {
-                File f = new File(file);
-                f.delete();
-            }
-        }
-    }
-
-    private IOManager createIOManager(int numDevices) throws HyracksException {
-        List<IODeviceHandle> devices = new ArrayList<IODeviceHandle>();
-        for (int i = 0; i < numDevices; i++) {
-            String iodevPath = System.getProperty("java.io.tmpdir") + sep + "test_iodev" + i;
-            devices.add(new IODeviceHandle(new File(iodevPath), "wa"));
-        }
-        return new IOManager(devices, Executors.newCachedThreadPool());
-    }
-
-    private FileReference simulateMerge(ILSMIndexFileManager fileManager, FileReference a, FileReference b)
-            throws HyracksDataException {
-        LSMComponentFileReferences relMergeFileRefs = fileManager.getRelMergeFileReference(a.getFile().getName(), b
-                .getFile().getName());
-        return relMergeFileRefs.getInsertIndexFileReference();
-    }
-
-    private String getMergeFileName(ILSMIndexFileManager fileNameManager, String firstFile, String lastFile)
-            throws HyracksDataException {
-        File f1 = new File(firstFile);
-        File f2 = new File(lastFile);
-        return (String) fileNameManager.getRelMergeFileReference(f1.getName(), f2.getName())
-                .getInsertIndexFileReference().getFile().getName();
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/pom.xml b/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/pom.xml
deleted file mode 100644
index 1edd32e..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/pom.xml
+++ /dev/null
@@ -1,49 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-	<modelVersion>4.0.0</modelVersion>
-	<artifactId>hyracks-storage-am-lsm-invertedindex-test</artifactId>
-
-	<parent>
-		<artifactId>hyracks-tests</artifactId>
-		<groupId>edu.uci.ics.hyracks</groupId>
-		<version>0.2.2-SNAPSHOT</version>
-		<relativePath>..</relativePath>
-	</parent>
-
-	<build>
-		<plugins>
-			<plugin>
-				<groupId>org.apache.maven.plugins</groupId>
-				<artifactId>maven-compiler-plugin</artifactId>
-				<version>2.0.2</version>
-				<configuration>
-					<source>1.6</source>
-					<target>1.6</target>
-				</configuration>
-			</plugin>
-		</plugins>
-	</build>
-	<dependencies>
-		<dependency>
-			<groupId>edu.uci.ics.hyracks</groupId>
-			<artifactId>hyracks-storage-am-lsm-invertedindex</artifactId>
-			<version>0.2.2-SNAPSHOT</version>
-			<type>jar</type>
-			<scope>compile</scope>
-		</dependency>
-		<dependency>
-			<groupId>edu.uci.ics.hyracks</groupId>
-			<artifactId>hyracks-test-support</artifactId>
-			<version>0.2.2-SNAPSHOT</version>
-			<type>jar</type>
-			<scope>test</scope>
-		</dependency>
-		<dependency>
-			<groupId>edu.uci.ics.hyracks</groupId>
-			<artifactId>hyracks-data-std</artifactId>
-			<version>0.2.2-SNAPSHOT</version>
-			<type>jar</type>
-			<scope>test</scope>
-		</dependency>
-	</dependencies>
-
-</project>
\ No newline at end of file
diff --git a/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/LSMInvertedIndexBulkLoadTest.java b/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/LSMInvertedIndexBulkLoadTest.java
deleted file mode 100644
index fcb78ad..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/LSMInvertedIndexBulkLoadTest.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex;
-
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.common.AbstractInvertedIndexLoadTest;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.util.LSMInvertedIndexTestContext.InvertedIndexType;
-
-public class LSMInvertedIndexBulkLoadTest extends AbstractInvertedIndexLoadTest {
-
-    public LSMInvertedIndexBulkLoadTest() {
-        super(InvertedIndexType.LSM, true, 1);
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/LSMInvertedIndexDeleteTest.java b/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/LSMInvertedIndexDeleteTest.java
deleted file mode 100644
index 4e2fe37..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/LSMInvertedIndexDeleteTest.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex;
-
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.common.AbstractInvertedIndexDeleteTest;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.util.LSMInvertedIndexTestContext.InvertedIndexType;
-
-public class LSMInvertedIndexDeleteTest extends AbstractInvertedIndexDeleteTest {
-
-    public LSMInvertedIndexDeleteTest() {
-        super(InvertedIndexType.LSM, false);
-    }
-}
\ No newline at end of file
diff --git a/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/LSMInvertedIndexInsertTest.java b/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/LSMInvertedIndexInsertTest.java
deleted file mode 100644
index e9a1c75..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/LSMInvertedIndexInsertTest.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex;
-
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.common.AbstractInvertedIndexLoadTest;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.util.LSMInvertedIndexTestContext.InvertedIndexType;
-
-public class LSMInvertedIndexInsertTest extends AbstractInvertedIndexLoadTest {
-
-    public LSMInvertedIndexInsertTest() {
-        super(InvertedIndexType.LSM, false, 1);
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/LSMInvertedIndexMergeTest.java b/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/LSMInvertedIndexMergeTest.java
deleted file mode 100644
index 811919b..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/LSMInvertedIndexMergeTest.java
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex;
-
-import java.io.IOException;
-
-import edu.uci.ics.hyracks.storage.am.common.api.IIndex;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.am.common.datagen.TupleGenerator;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallback;
-import edu.uci.ics.hyracks.storage.am.config.AccessMethodTestsConfig;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIndexAccessor;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.NoOpIOOperationCallback;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.common.AbstractInvertedIndexLoadTest;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.util.LSMInvertedIndexTestContext;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.util.LSMInvertedIndexTestContext.InvertedIndexType;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.util.LSMInvertedIndexTestUtils;
-
-public class LSMInvertedIndexMergeTest extends AbstractInvertedIndexLoadTest {
-
-    private final int maxTreesToMerge = AccessMethodTestsConfig.LSM_INVINDEX_MAX_TREES_TO_MERGE;
-
-    public LSMInvertedIndexMergeTest() {
-        super(InvertedIndexType.LSM, true, 1);
-    }
-
-    @Override
-    protected void runTest(LSMInvertedIndexTestContext testCtx, TupleGenerator tupleGen) throws IOException,
-            IndexException {
-        IIndex invIndex = testCtx.getIndex();
-        invIndex.create();
-        invIndex.activate();
-        ILSMIndexAccessor invIndexAccessor = (ILSMIndexAccessor) invIndex.createAccessor(
-                NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
-
-        for (int i = 0; i < maxTreesToMerge; i++) {
-            for (int j = 0; j < i; j++) {
-                if (bulkLoad) {
-                    LSMInvertedIndexTestUtils.bulkLoadInvIndex(testCtx, tupleGen, NUM_DOCS_TO_INSERT);
-                } else {
-                    LSMInvertedIndexTestUtils.insertIntoInvIndex(testCtx, tupleGen, NUM_DOCS_TO_INSERT);
-                }
-            }
-            // Perform merge.
-            invIndexAccessor.scheduleMerge(NoOpIOOperationCallback.INSTANCE);
-            validateAndCheckIndex(testCtx);
-            runTinySearchWorkload(testCtx, tupleGen);
-        }
-
-        invIndex.deactivate();
-        invIndex.destroy();
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/LSMInvertedIndexMultiBulkLoadTest.java b/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/LSMInvertedIndexMultiBulkLoadTest.java
deleted file mode 100644
index adfb689..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/LSMInvertedIndexMultiBulkLoadTest.java
+++ /dev/null
@@ -1,27 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex;
-
-import edu.uci.ics.hyracks.storage.am.config.AccessMethodTestsConfig;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.common.AbstractInvertedIndexLoadTest;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.util.LSMInvertedIndexTestContext.InvertedIndexType;
-
-public class LSMInvertedIndexMultiBulkLoadTest extends AbstractInvertedIndexLoadTest {
-
-    public LSMInvertedIndexMultiBulkLoadTest() {
-        super(InvertedIndexType.LSM, true, AccessMethodTestsConfig.LSM_INVINDEX_NUM_BULKLOAD_ROUNDS);
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/LSMInvertedIndexSearchTest.java b/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/LSMInvertedIndexSearchTest.java
deleted file mode 100644
index 1528e20..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/LSMInvertedIndexSearchTest.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex;
-
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.common.AbstractInvertedIndexSearchTest;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.util.LSMInvertedIndexTestContext.InvertedIndexType;
-
-public class LSMInvertedIndexSearchTest extends AbstractInvertedIndexSearchTest {
-
-    public LSMInvertedIndexSearchTest() {
-        super(InvertedIndexType.LSM, false);
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/PartitionedLSMInvertedIndexBulkLoadTest.java b/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/PartitionedLSMInvertedIndexBulkLoadTest.java
deleted file mode 100644
index f7a36f0..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/PartitionedLSMInvertedIndexBulkLoadTest.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex;
-
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.common.AbstractInvertedIndexLoadTest;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.util.LSMInvertedIndexTestContext.InvertedIndexType;
-
-public class PartitionedLSMInvertedIndexBulkLoadTest extends AbstractInvertedIndexLoadTest {
-
-    public PartitionedLSMInvertedIndexBulkLoadTest() {
-        super(InvertedIndexType.PARTITIONED_LSM, true, 1);
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/PartitionedLSMInvertedIndexDeleteTest.java b/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/PartitionedLSMInvertedIndexDeleteTest.java
deleted file mode 100644
index 4fd529b..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/PartitionedLSMInvertedIndexDeleteTest.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex;
-
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.common.AbstractInvertedIndexDeleteTest;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.util.LSMInvertedIndexTestContext.InvertedIndexType;
-
-public class PartitionedLSMInvertedIndexDeleteTest extends AbstractInvertedIndexDeleteTest {
-
-    public PartitionedLSMInvertedIndexDeleteTest() {
-        super(InvertedIndexType.PARTITIONED_LSM, false);
-    }
-}
\ No newline at end of file
diff --git a/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/PartitionedLSMInvertedIndexInsertTest.java b/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/PartitionedLSMInvertedIndexInsertTest.java
deleted file mode 100644
index 4608f81..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/PartitionedLSMInvertedIndexInsertTest.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex;
-
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.common.AbstractInvertedIndexLoadTest;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.util.LSMInvertedIndexTestContext.InvertedIndexType;
-
-public class PartitionedLSMInvertedIndexInsertTest extends AbstractInvertedIndexLoadTest {
-
-    public PartitionedLSMInvertedIndexInsertTest() {
-        super(InvertedIndexType.PARTITIONED_LSM, false, 1);
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/PartitionedLSMInvertedIndexMergeTest.java b/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/PartitionedLSMInvertedIndexMergeTest.java
deleted file mode 100644
index 786afe1..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/PartitionedLSMInvertedIndexMergeTest.java
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex;
-
-import java.io.IOException;
-
-import edu.uci.ics.hyracks.storage.am.common.api.IIndex;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.am.common.datagen.TupleGenerator;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallback;
-import edu.uci.ics.hyracks.storage.am.config.AccessMethodTestsConfig;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIndexAccessor;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.NoOpIOOperationCallback;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.common.AbstractInvertedIndexLoadTest;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.util.LSMInvertedIndexTestContext;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.util.LSMInvertedIndexTestContext.InvertedIndexType;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.util.LSMInvertedIndexTestUtils;
-
-public class PartitionedLSMInvertedIndexMergeTest extends AbstractInvertedIndexLoadTest {
-
-    private final int maxTreesToMerge = AccessMethodTestsConfig.LSM_INVINDEX_MAX_TREES_TO_MERGE;
-
-    public PartitionedLSMInvertedIndexMergeTest() {
-        super(InvertedIndexType.PARTITIONED_LSM, true, 1);
-    }
-
-    @Override
-    protected void runTest(LSMInvertedIndexTestContext testCtx, TupleGenerator tupleGen) throws IOException,
-            IndexException {
-        IIndex invIndex = testCtx.getIndex();
-        invIndex.create();
-        invIndex.activate();
-        ILSMIndexAccessor invIndexAccessor = (ILSMIndexAccessor) invIndex.createAccessor(
-                NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
-
-        for (int i = 0; i < maxTreesToMerge; i++) {
-            for (int j = 0; j < i; j++) {
-                if (bulkLoad) {
-                    LSMInvertedIndexTestUtils.bulkLoadInvIndex(testCtx, tupleGen, NUM_DOCS_TO_INSERT);
-                } else {
-                    LSMInvertedIndexTestUtils.insertIntoInvIndex(testCtx, tupleGen, NUM_DOCS_TO_INSERT);
-                }
-            }
-            // Perform merge.
-            invIndexAccessor.scheduleMerge(NoOpIOOperationCallback.INSTANCE);
-            validateAndCheckIndex(testCtx);
-            runTinySearchWorkload(testCtx, tupleGen);
-        }
-
-        invIndex.deactivate();
-        invIndex.destroy();
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/PartitionedLSMInvertedIndexMultiBulkLoadTest.java b/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/PartitionedLSMInvertedIndexMultiBulkLoadTest.java
deleted file mode 100644
index 80a3c0b..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/PartitionedLSMInvertedIndexMultiBulkLoadTest.java
+++ /dev/null
@@ -1,27 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex;
-
-import edu.uci.ics.hyracks.storage.am.config.AccessMethodTestsConfig;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.common.AbstractInvertedIndexLoadTest;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.util.LSMInvertedIndexTestContext.InvertedIndexType;
-
-public class PartitionedLSMInvertedIndexMultiBulkLoadTest extends AbstractInvertedIndexLoadTest {
-
-    public PartitionedLSMInvertedIndexMultiBulkLoadTest() {
-        super(InvertedIndexType.PARTITIONED_LSM, true, AccessMethodTestsConfig.LSM_INVINDEX_NUM_BULKLOAD_ROUNDS);
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/PartitionedLSMInvertedIndexSearchTest.java b/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/PartitionedLSMInvertedIndexSearchTest.java
deleted file mode 100644
index c8a7667..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/PartitionedLSMInvertedIndexSearchTest.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex;
-
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.common.AbstractInvertedIndexSearchTest;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.util.LSMInvertedIndexTestContext.InvertedIndexType;
-
-public class PartitionedLSMInvertedIndexSearchTest extends AbstractInvertedIndexSearchTest {
-
-    public PartitionedLSMInvertedIndexSearchTest() {
-        super(InvertedIndexType.PARTITIONED_LSM, false);
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/common/AbstractInvertedIndexDeleteTest.java b/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/common/AbstractInvertedIndexDeleteTest.java
deleted file mode 100644
index f7783fb..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/common/AbstractInvertedIndexDeleteTest.java
+++ /dev/null
@@ -1,100 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.common;
-
-import java.io.IOException;
-
-import org.junit.Test;
-
-import edu.uci.ics.hyracks.storage.am.common.api.IIndex;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.am.common.datagen.TupleGenerator;
-import edu.uci.ics.hyracks.storage.am.config.AccessMethodTestsConfig;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.util.LSMInvertedIndexTestContext;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.util.LSMInvertedIndexTestContext.InvertedIndexType;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.util.LSMInvertedIndexTestUtils;
-
-public abstract class AbstractInvertedIndexDeleteTest extends AbstractInvertedIndexTest {
-
-    protected final int numInsertRounds = AccessMethodTestsConfig.LSM_INVINDEX_NUM_INSERT_ROUNDS;
-    protected final int numDeleteRounds = AccessMethodTestsConfig.LSM_INVINDEX_NUM_DELETE_ROUNDS;
-    protected final boolean bulkLoad;
-
-    public AbstractInvertedIndexDeleteTest(InvertedIndexType invIndexType, boolean bulkLoad) {
-        super(invIndexType);
-        this.bulkLoad = bulkLoad;
-    }
-
-    protected void runTest(LSMInvertedIndexTestContext testCtx, TupleGenerator tupleGen) throws IOException,
-            IndexException {
-        IIndex invIndex = testCtx.getIndex();
-        invIndex.create();
-        invIndex.activate();
-
-        for (int i = 0; i < numInsertRounds; i++) {
-            // Start generating documents ids from 0 again.
-            tupleGen.reset();
-
-            if (bulkLoad) {
-                LSMInvertedIndexTestUtils.bulkLoadInvIndex(testCtx, tupleGen, NUM_DOCS_TO_INSERT);
-            } else {
-                LSMInvertedIndexTestUtils.insertIntoInvIndex(testCtx, tupleGen, NUM_DOCS_TO_INSERT);
-            }
-
-            // Delete all documents in a couple of rounds.
-            int numTuplesPerDeleteRound = (int) Math.ceil((float) testCtx.getDocumentCorpus().size()
-                    / (float) numDeleteRounds);
-            for (int j = 0; j < numDeleteRounds; j++) {
-                LSMInvertedIndexTestUtils.deleteFromInvIndex(testCtx, harness.getRandom(), numTuplesPerDeleteRound);
-                validateAndCheckIndex(testCtx);
-                runTinySearchWorkload(testCtx, tupleGen);
-            }
-        }
-
-        invIndex.deactivate();
-        invIndex.destroy();
-    }
-
-    @Test
-    public void wordTokensInvIndexTest() throws IOException, IndexException {
-        LSMInvertedIndexTestContext testCtx = LSMInvertedIndexTestUtils.createWordInvIndexTestContext(harness, invIndexType);
-        TupleGenerator tupleGen = LSMInvertedIndexTestUtils.createStringDocumentTupleGen(harness.getRandom());
-        runTest(testCtx, tupleGen);
-    }
-
-    @Test
-    public void hashedWordTokensInvIndexTest() throws IOException, IndexException {
-        LSMInvertedIndexTestContext testCtx = LSMInvertedIndexTestUtils.createHashedWordInvIndexTestContext(harness,
-                invIndexType);
-        TupleGenerator tupleGen = LSMInvertedIndexTestUtils.createStringDocumentTupleGen(harness.getRandom());
-        runTest(testCtx, tupleGen);
-    }
-
-    @Test
-    public void ngramTokensInvIndexTest() throws IOException, IndexException {
-        LSMInvertedIndexTestContext testCtx = LSMInvertedIndexTestUtils.createNGramInvIndexTestContext(harness, invIndexType);
-        TupleGenerator tupleGen = LSMInvertedIndexTestUtils.createPersonNamesTupleGen(harness.getRandom());
-        runTest(testCtx, tupleGen);
-    }
-
-    @Test
-    public void hashedNGramTokensInvIndexTest() throws IOException, IndexException {
-        LSMInvertedIndexTestContext testCtx = LSMInvertedIndexTestUtils.createHashedNGramInvIndexTestContext(harness,
-                invIndexType);
-        TupleGenerator tupleGen = LSMInvertedIndexTestUtils.createPersonNamesTupleGen(harness.getRandom());
-        runTest(testCtx, tupleGen);
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/common/AbstractInvertedIndexLoadTest.java b/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/common/AbstractInvertedIndexLoadTest.java
deleted file mode 100644
index c855cc4..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/common/AbstractInvertedIndexLoadTest.java
+++ /dev/null
@@ -1,89 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.common;
-
-import java.io.IOException;
-
-import org.junit.Test;
-
-import edu.uci.ics.hyracks.storage.am.common.api.IIndex;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.am.common.datagen.TupleGenerator;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.util.LSMInvertedIndexTestContext;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.util.LSMInvertedIndexTestContext.InvertedIndexType;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.util.LSMInvertedIndexTestUtils;
-
-public abstract class AbstractInvertedIndexLoadTest extends AbstractInvertedIndexTest {
-
-    protected final boolean bulkLoad;
-    protected final int numRounds;
-
-    public AbstractInvertedIndexLoadTest(InvertedIndexType invIndexType, boolean bulkLoad, int numRounds) {
-        super(invIndexType);
-        this.bulkLoad = bulkLoad;
-        this.numRounds = numRounds;
-    }
-
-    protected void runTest(LSMInvertedIndexTestContext testCtx, TupleGenerator tupleGen) throws IOException,
-            IndexException {
-        IIndex invIndex = testCtx.getIndex();
-        invIndex.create();
-        invIndex.activate();
-
-        for (int i = 0; i < numRounds; i++) {
-            if (bulkLoad) {
-                LSMInvertedIndexTestUtils.bulkLoadInvIndex(testCtx, tupleGen, NUM_DOCS_TO_INSERT);
-            } else {
-                LSMInvertedIndexTestUtils.insertIntoInvIndex(testCtx, tupleGen, NUM_DOCS_TO_INSERT);
-            }
-            validateAndCheckIndex(testCtx);
-            runTinySearchWorkload(testCtx, tupleGen);
-        }
-
-        invIndex.deactivate();
-        invIndex.destroy();
-    }
-
-    @Test
-    public void wordTokensInvIndexTest() throws IOException, IndexException {
-        LSMInvertedIndexTestContext testCtx = LSMInvertedIndexTestUtils.createWordInvIndexTestContext(harness, invIndexType);
-        TupleGenerator tupleGen = LSMInvertedIndexTestUtils.createStringDocumentTupleGen(harness.getRandom());
-        runTest(testCtx, tupleGen);
-    }
-
-    @Test
-    public void hashedWordTokensInvIndexTest() throws IOException, IndexException {
-        LSMInvertedIndexTestContext testCtx = LSMInvertedIndexTestUtils.createHashedWordInvIndexTestContext(harness,
-                invIndexType);
-        TupleGenerator tupleGen = LSMInvertedIndexTestUtils.createStringDocumentTupleGen(harness.getRandom());
-        runTest(testCtx, tupleGen);
-    }
-
-    @Test
-    public void ngramTokensInvIndexTest() throws IOException, IndexException {
-        LSMInvertedIndexTestContext testCtx = LSMInvertedIndexTestUtils.createNGramInvIndexTestContext(harness, invIndexType);
-        TupleGenerator tupleGen = LSMInvertedIndexTestUtils.createPersonNamesTupleGen(harness.getRandom());
-        runTest(testCtx, tupleGen);
-    }
-
-    @Test
-    public void hashedNGramTokensInvIndexTest() throws IOException, IndexException {
-        LSMInvertedIndexTestContext testCtx = LSMInvertedIndexTestUtils.createHashedNGramInvIndexTestContext(harness,
-                invIndexType);
-        TupleGenerator tupleGen = LSMInvertedIndexTestUtils.createPersonNamesTupleGen(harness.getRandom());
-        runTest(testCtx, tupleGen);
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/common/AbstractInvertedIndexSearchTest.java b/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/common/AbstractInvertedIndexSearchTest.java
deleted file mode 100644
index 991ff59..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/common/AbstractInvertedIndexSearchTest.java
+++ /dev/null
@@ -1,128 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.common;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import org.junit.Test;
-
-import edu.uci.ics.hyracks.storage.am.common.api.IIndex;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.am.common.datagen.TupleGenerator;
-import edu.uci.ics.hyracks.storage.am.config.AccessMethodTestsConfig;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndexSearchModifier;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.search.ConjunctiveSearchModifier;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.search.EditDistanceSearchModifier;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.search.JaccardSearchModifier;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.util.LSMInvertedIndexTestContext;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.util.LSMInvertedIndexTestContext.InvertedIndexType;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.util.LSMInvertedIndexTestUtils;
-
-public abstract class AbstractInvertedIndexSearchTest extends AbstractInvertedIndexTest {
-
-    protected final Logger LOGGER = Logger.getLogger(AbstractInvertedIndexSearchTest.class.getName());
-
-    protected int NUM_DOC_QUERIES = AccessMethodTestsConfig.LSM_INVINDEX_NUM_DOC_QUERIES;
-    protected int NUM_RANDOM_QUERIES = AccessMethodTestsConfig.LSM_INVINDEX_NUM_RANDOM_QUERIES;
-    protected final boolean bulkLoad;
-
-    public AbstractInvertedIndexSearchTest(InvertedIndexType invIndexType, boolean bulkLoad) {
-        super(invIndexType);
-        this.bulkLoad = bulkLoad;
-    }
-
-    protected void runTest(LSMInvertedIndexTestContext testCtx, TupleGenerator tupleGen,
-            List<IInvertedIndexSearchModifier> searchModifiers) throws IOException, IndexException {
-        IIndex invIndex = testCtx.getIndex();
-        invIndex.create();
-        invIndex.activate();
-
-        if (bulkLoad) {
-            LSMInvertedIndexTestUtils.bulkLoadInvIndex(testCtx, tupleGen, NUM_DOCS_TO_INSERT);
-        } else {
-            LSMInvertedIndexTestUtils.insertIntoInvIndex(testCtx, tupleGen, NUM_DOCS_TO_INSERT);
-        }
-        invIndex.validate();
-
-        for (IInvertedIndexSearchModifier searchModifier : searchModifiers) {
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("Running searches with: " + searchModifier.toString());
-            }
-            LSMInvertedIndexTestUtils.testIndexSearch(testCtx, tupleGen, harness.getRandom(), NUM_DOC_QUERIES,
-                    NUM_RANDOM_QUERIES, searchModifier, SCAN_COUNT_ARRAY);
-        }
-
-        invIndex.deactivate();
-        invIndex.destroy();
-    }
-
-    private void testWordInvIndexIndex(LSMInvertedIndexTestContext testCtx) throws IOException, IndexException {
-        TupleGenerator tupleGen = LSMInvertedIndexTestUtils.createStringDocumentTupleGen(harness.getRandom());
-        List<IInvertedIndexSearchModifier> searchModifiers = new ArrayList<IInvertedIndexSearchModifier>();
-        searchModifiers.add(new ConjunctiveSearchModifier());
-        searchModifiers.add(new JaccardSearchModifier(1.0f));
-        searchModifiers.add(new JaccardSearchModifier(0.9f));
-        searchModifiers.add(new JaccardSearchModifier(0.7f));
-        searchModifiers.add(new JaccardSearchModifier(0.5f));
-        runTest(testCtx, tupleGen, searchModifiers);
-    }
-
-    private void testNGramInvIndexIndex(LSMInvertedIndexTestContext testCtx) throws IOException, IndexException {
-        TupleGenerator tupleGen = LSMInvertedIndexTestUtils.createPersonNamesTupleGen(harness.getRandom());
-        List<IInvertedIndexSearchModifier> searchModifiers = new ArrayList<IInvertedIndexSearchModifier>();
-        searchModifiers.add(new ConjunctiveSearchModifier());
-        searchModifiers.add(new JaccardSearchModifier(1.0f));
-        searchModifiers.add(new JaccardSearchModifier(0.9f));
-        searchModifiers.add(new JaccardSearchModifier(0.7f));
-        searchModifiers.add(new JaccardSearchModifier(0.5f));
-        searchModifiers.add(new EditDistanceSearchModifier(LSMInvertedIndexTestUtils.TEST_GRAM_LENGTH, 0));
-        searchModifiers.add(new EditDistanceSearchModifier(LSMInvertedIndexTestUtils.TEST_GRAM_LENGTH, 1));
-        searchModifiers.add(new EditDistanceSearchModifier(LSMInvertedIndexTestUtils.TEST_GRAM_LENGTH, 2));
-        searchModifiers.add(new EditDistanceSearchModifier(LSMInvertedIndexTestUtils.TEST_GRAM_LENGTH, 3));
-        runTest(testCtx, tupleGen, searchModifiers);
-    }
-
-    @Test
-    public void wordTokensInvIndexTest() throws IOException, IndexException {
-        LSMInvertedIndexTestContext testCtx = LSMInvertedIndexTestUtils.createWordInvIndexTestContext(harness, invIndexType);
-        testWordInvIndexIndex(testCtx);
-    }
-
-    @Test
-    public void hashedWordTokensInvIndexTest() throws IOException, IndexException {
-        LSMInvertedIndexTestContext testCtx = LSMInvertedIndexTestUtils.createHashedWordInvIndexTestContext(harness,
-                invIndexType);
-        testWordInvIndexIndex(testCtx);
-    }
-
-    @Test
-    public void ngramTokensInvIndexTest() throws IOException, IndexException {
-        LSMInvertedIndexTestContext testCtx = LSMInvertedIndexTestUtils.createNGramInvIndexTestContext(harness, invIndexType);
-        testNGramInvIndexIndex(testCtx);
-    }
-
-    @Test
-    public void hashedNGramTokensInvIndexTest() throws IOException, IndexException {
-        LSMInvertedIndexTestContext testCtx = LSMInvertedIndexTestUtils.createHashedNGramInvIndexTestContext(harness,
-                invIndexType);
-        testNGramInvIndexIndex(testCtx);
-    }
-
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/common/AbstractInvertedIndexTest.java b/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/common/AbstractInvertedIndexTest.java
deleted file mode 100644
index 90a6d54..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/common/AbstractInvertedIndexTest.java
+++ /dev/null
@@ -1,101 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.common;
-
-import java.io.IOException;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import org.junit.After;
-import org.junit.Before;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.exceptions.HyracksException;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndex;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.am.common.datagen.TupleGenerator;
-import edu.uci.ics.hyracks.storage.am.config.AccessMethodTestsConfig;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndexSearchModifier;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.search.ConjunctiveSearchModifier;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.search.JaccardSearchModifier;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.util.LSMInvertedIndexTestContext;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.util.LSMInvertedIndexTestContext.InvertedIndexType;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.util.LSMInvertedIndexTestUtils;
-
-public abstract class AbstractInvertedIndexTest {
-    protected final Logger LOGGER = Logger.getLogger(AbstractInvertedIndexTest.class.getName());
-
-    protected final LSMInvertedIndexTestHarness harness = new LSMInvertedIndexTestHarness();
-
-    protected final int NUM_DOCS_TO_INSERT = AccessMethodTestsConfig.LSM_INVINDEX_NUM_DOCS_TO_INSERT;
-    protected final int[] SCAN_COUNT_ARRAY = new int[AccessMethodTestsConfig.LSM_INVINDEX_SCAN_COUNT_ARRAY_SIZE];
-
-    protected final int TINY_WORKLOAD_NUM_DOC_QUERIES = AccessMethodTestsConfig.LSM_INVINDEX_TINY_NUM_DOC_QUERIES;
-    protected final int TINY_WORKLOAD_NUM_RANDOM_QUERIES = AccessMethodTestsConfig.LSM_INVINDEX_TINY_NUM_RANDOM_QUERIES;
-
-    // Note: The edit-distance search modifier is tested separately.
-    protected final IInvertedIndexSearchModifier[] TEST_SEARCH_MODIFIERS = new IInvertedIndexSearchModifier[] {
-            new ConjunctiveSearchModifier(), new JaccardSearchModifier(0.8f), new JaccardSearchModifier(0.5f) };
-
-    protected final InvertedIndexType invIndexType;
-
-    public AbstractInvertedIndexTest(InvertedIndexType invIndexType) {
-        this.invIndexType = invIndexType;
-    }
-
-    @Before
-    public void setUp() throws HyracksException {
-        harness.setUp();
-    }
-
-    @After
-    public void tearDown() throws HyracksDataException {
-        harness.tearDown();
-    }
-
-    /**
-     * Validates the index, and compares it against the expected index.
-     * This test is only for verifying the integrity and correctness of the index,
-     * it does not ensure the correctness of index searches.
-     */
-    protected void validateAndCheckIndex(LSMInvertedIndexTestContext testCtx) throws HyracksDataException, IndexException {
-        IIndex invIndex = testCtx.getIndex();
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("Validating index: " + invIndex);
-        }
-        // Validate index and compare against expected index.
-        invIndex.validate();
-        if (invIndexType == InvertedIndexType.INMEMORY || invIndexType == InvertedIndexType.ONDISK) {
-            // This comparison method exercises different features of these types of inverted indexes.
-            LSMInvertedIndexTestUtils.compareActualAndExpectedIndexes(testCtx);
-        }
-        LSMInvertedIndexTestUtils.compareActualAndExpectedIndexesRangeSearch(testCtx);
-    }
-
-    /**
-     * Runs a workload of queries using different search modifiers, and verifies the correctness of the results.
-     */
-    protected void runTinySearchWorkload(LSMInvertedIndexTestContext testCtx, TupleGenerator tupleGen) throws IOException,
-            IndexException {
-        for (IInvertedIndexSearchModifier searchModifier : TEST_SEARCH_MODIFIERS) {
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("Running test workload with: " + searchModifier.toString());
-            }
-            LSMInvertedIndexTestUtils.testIndexSearch(testCtx, tupleGen, harness.getRandom(),
-                    TINY_WORKLOAD_NUM_DOC_QUERIES, TINY_WORKLOAD_NUM_RANDOM_QUERIES, searchModifier, SCAN_COUNT_ARRAY);
-        }
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/common/LSMInvertedIndexTestHarness.java b/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/common/LSMInvertedIndexTestHarness.java
deleted file mode 100644
index 5be1d6a..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/common/LSMInvertedIndexTestHarness.java
+++ /dev/null
@@ -1,216 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.common;
-
-import java.io.File;
-import java.io.FilenameFilter;
-import java.text.SimpleDateFormat;
-import java.util.Date;
-import java.util.Random;
-
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.exceptions.HyracksException;
-import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.api.io.IODeviceHandle;
-import edu.uci.ics.hyracks.control.nc.io.IOManager;
-import edu.uci.ics.hyracks.storage.am.common.api.IInMemoryFreePageManager;
-import edu.uci.ics.hyracks.storage.am.common.frames.LIFOMetaDataFrameFactory;
-import edu.uci.ics.hyracks.storage.am.config.AccessMethodTestsConfig;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.IInMemoryBufferCache;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallbackProvider;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationScheduler;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMMergePolicy;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMOperationTrackerFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.common.freepage.DualIndexInMemoryBufferCache;
-import edu.uci.ics.hyracks.storage.am.lsm.common.freepage.DualIndexInMemoryFreePageManager;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.NoMergePolicy;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.NoOpIOOperationCallback;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.SynchronousScheduler;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.ThreadCountingOperationTrackerFactory;
-import edu.uci.ics.hyracks.storage.common.buffercache.HeapBufferAllocator;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
-import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
-import edu.uci.ics.hyracks.test.support.TestUtils;
-
-public class LSMInvertedIndexTestHarness {
-
-    private static final long RANDOM_SEED = 50;
-
-    protected final int diskPageSize;
-    protected final int diskNumPages;
-    protected final int diskMaxOpenFiles;
-    protected final int memPageSize;
-    protected final int memNumPages;
-    protected final int hyracksFrameSize;
-
-    protected IOManager ioManager;
-    protected IBufferCache diskBufferCache;
-    protected IFileMapProvider diskFileMapProvider;
-    protected IInMemoryBufferCache memBufferCache;
-    protected IInMemoryFreePageManager memFreePageManager;
-    protected IHyracksTaskContext ctx;
-    protected ILSMIOOperationScheduler ioScheduler;
-    protected ILSMMergePolicy mergePolicy;
-    protected ILSMOperationTrackerFactory opTrackerFactory;
-    protected ILSMIOOperationCallbackProvider ioOpCallbackProvider;
-
-    protected final Random rnd = new Random();
-    protected final static SimpleDateFormat simpleDateFormat = new SimpleDateFormat("ddMMyy-hhmmssSS");
-    protected final static String sep = System.getProperty("file.separator");
-    protected String onDiskDir;
-    protected String btreeFileName = "btree_vocab";
-    protected String invIndexFileName = "inv_index";
-    protected FileReference invIndexFileRef;
-
-    public LSMInvertedIndexTestHarness() {
-        this.diskPageSize = AccessMethodTestsConfig.LSM_INVINDEX_DISK_PAGE_SIZE;
-        this.diskNumPages = AccessMethodTestsConfig.LSM_INVINDEX_DISK_NUM_PAGES;
-        this.diskMaxOpenFiles = AccessMethodTestsConfig.LSM_INVINDEX_DISK_MAX_OPEN_FILES;
-        this.memPageSize = AccessMethodTestsConfig.LSM_INVINDEX_MEM_PAGE_SIZE;
-        this.memNumPages = AccessMethodTestsConfig.LSM_INVINDEX_MEM_NUM_PAGES;
-        this.hyracksFrameSize = AccessMethodTestsConfig.LSM_INVINDEX_HYRACKS_FRAME_SIZE;
-        this.ioScheduler = SynchronousScheduler.INSTANCE;
-        this.mergePolicy = NoMergePolicy.INSTANCE;
-        this.opTrackerFactory = ThreadCountingOperationTrackerFactory.INSTANCE;
-        this.ioOpCallbackProvider = NoOpIOOperationCallback.INSTANCE;
-    }
-
-    public LSMInvertedIndexTestHarness(int diskPageSize, int diskNumPages, int diskMaxOpenFiles, int memPageSize,
-            int memNumPages, int hyracksFrameSize) {
-        this.diskPageSize = diskPageSize;
-        this.diskNumPages = diskNumPages;
-        this.diskMaxOpenFiles = diskMaxOpenFiles;
-        this.memPageSize = memPageSize;
-        this.memNumPages = memNumPages;
-        this.hyracksFrameSize = hyracksFrameSize;
-        this.ioScheduler = SynchronousScheduler.INSTANCE;
-        this.mergePolicy = NoMergePolicy.INSTANCE;
-        this.opTrackerFactory = ThreadCountingOperationTrackerFactory.INSTANCE;
-    }
-
-    public void setUp() throws HyracksException {
-        onDiskDir = "lsm_invertedindex_" + simpleDateFormat.format(new Date()) + sep;
-        ctx = TestUtils.create(getHyracksFrameSize());
-        TestStorageManagerComponentHolder.init(diskPageSize, diskNumPages, diskMaxOpenFiles);
-        diskBufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
-        diskFileMapProvider = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
-        memBufferCache = new DualIndexInMemoryBufferCache(new HeapBufferAllocator(), memPageSize, memNumPages);
-        memBufferCache.open();
-        memFreePageManager = new DualIndexInMemoryFreePageManager(memNumPages, new LIFOMetaDataFrameFactory());
-        ioManager = TestStorageManagerComponentHolder.getIOManager();
-        rnd.setSeed(RANDOM_SEED);
-        invIndexFileRef = ioManager.getIODevices().get(0).createFileReference(onDiskDir + invIndexFileName);
-    }
-
-    public void tearDown() throws HyracksDataException {
-        diskBufferCache.close();
-        for (IODeviceHandle dev : ioManager.getIODevices()) {
-            File dir = new File(dev.getPath(), onDiskDir);
-            FilenameFilter filter = new FilenameFilter() {
-                public boolean accept(File dir, String name) {
-                    return !name.startsWith(".");
-                }
-            };
-            String[] files = dir.list(filter);
-            if (files != null) {
-                for (String fileName : files) {
-                    File file = new File(dir.getPath() + File.separator + fileName);
-                    file.delete();
-                }
-            }
-            dir.delete();
-        }
-        memBufferCache.close();
-    }
-
-    public FileReference getInvListsFileRef() {
-        return invIndexFileRef;
-    }
-
-    public int getDiskPageSize() {
-        return diskPageSize;
-    }
-
-    public int getDiskNumPages() {
-        return diskNumPages;
-    }
-
-    public int getDiskMaxOpenFiles() {
-        return diskMaxOpenFiles;
-    }
-
-    public int getMemPageSize() {
-        return memPageSize;
-    }
-
-    public int getMemNumPages() {
-        return memNumPages;
-    }
-
-    public int getHyracksFrameSize() {
-        return hyracksFrameSize;
-    }
-
-    public IOManager getIOManager() {
-        return ioManager;
-    }
-
-    public IBufferCache getDiskBufferCache() {
-        return diskBufferCache;
-    }
-
-    public IFileMapProvider getDiskFileMapProvider() {
-        return diskFileMapProvider;
-    }
-
-    public IInMemoryBufferCache getMemBufferCache() {
-        return memBufferCache;
-    }
-
-    public IInMemoryFreePageManager getMemFreePageManager() {
-        return memFreePageManager;
-    }
-
-    public IHyracksTaskContext getHyracksTastContext() {
-        return ctx;
-    }
-
-    public String getOnDiskDir() {
-        return onDiskDir;
-    }
-
-    public Random getRandom() {
-        return rnd;
-    }
-
-    public ILSMIOOperationScheduler getIOScheduler() {
-        return ioScheduler;
-    }
-
-    public ILSMOperationTrackerFactory getOperationTrackerFactory() {
-        return opTrackerFactory;
-    }
-
-    public ILSMMergePolicy getMergePolicy() {
-        return mergePolicy;
-    }
-
-    public ILSMIOOperationCallbackProvider getIOOperationCallbackProvider() {
-        return ioOpCallbackProvider;
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/inmemory/InMemoryInvertedIndexDeleteTest.java b/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/inmemory/InMemoryInvertedIndexDeleteTest.java
deleted file mode 100644
index c71d996..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/inmemory/InMemoryInvertedIndexDeleteTest.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.inmemory;
-
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.common.AbstractInvertedIndexDeleteTest;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.util.LSMInvertedIndexTestContext.InvertedIndexType;
-
-public class InMemoryInvertedIndexDeleteTest extends AbstractInvertedIndexDeleteTest {
-    
-    public InMemoryInvertedIndexDeleteTest() {
-        super(InvertedIndexType.INMEMORY, false);
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/inmemory/InMemoryInvertedIndexInsertTest.java b/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/inmemory/InMemoryInvertedIndexInsertTest.java
deleted file mode 100644
index d2b883d..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/inmemory/InMemoryInvertedIndexInsertTest.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.inmemory;
-
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.common.AbstractInvertedIndexLoadTest;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.util.LSMInvertedIndexTestContext.InvertedIndexType;
-
-public class InMemoryInvertedIndexInsertTest extends AbstractInvertedIndexLoadTest {
-    
-    public InMemoryInvertedIndexInsertTest() {
-        super(InvertedIndexType.INMEMORY, false, 1);
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/inmemory/InMemoryInvertedIndexSearchTest.java b/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/inmemory/InMemoryInvertedIndexSearchTest.java
deleted file mode 100644
index f3b3026..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/inmemory/InMemoryInvertedIndexSearchTest.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.inmemory;
-
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.common.AbstractInvertedIndexSearchTest;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.util.LSMInvertedIndexTestContext.InvertedIndexType;
-
-public class InMemoryInvertedIndexSearchTest extends AbstractInvertedIndexSearchTest {
-
-    public InMemoryInvertedIndexSearchTest() {
-        super(InvertedIndexType.INMEMORY, false);
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/inmemory/PartitionedInMemoryInvertedIndexDeleteTest.java b/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/inmemory/PartitionedInMemoryInvertedIndexDeleteTest.java
deleted file mode 100644
index eac7765..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/inmemory/PartitionedInMemoryInvertedIndexDeleteTest.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.inmemory;
-
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.common.AbstractInvertedIndexDeleteTest;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.util.LSMInvertedIndexTestContext.InvertedIndexType;
-
-public class PartitionedInMemoryInvertedIndexDeleteTest extends AbstractInvertedIndexDeleteTest {
-    
-    public PartitionedInMemoryInvertedIndexDeleteTest() {
-        super(InvertedIndexType.PARTITIONED_INMEMORY, false);
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/inmemory/PartitionedInMemoryInvertedIndexInsertTest.java b/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/inmemory/PartitionedInMemoryInvertedIndexInsertTest.java
deleted file mode 100644
index 8342efd..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/inmemory/PartitionedInMemoryInvertedIndexInsertTest.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.inmemory;
-
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.common.AbstractInvertedIndexLoadTest;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.util.LSMInvertedIndexTestContext.InvertedIndexType;
-
-public class PartitionedInMemoryInvertedIndexInsertTest extends AbstractInvertedIndexLoadTest {
-
-    public PartitionedInMemoryInvertedIndexInsertTest() {
-        super(InvertedIndexType.PARTITIONED_INMEMORY, false, 1);
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/inmemory/PartitionedInMemoryInvertedIndexSearchTest.java b/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/inmemory/PartitionedInMemoryInvertedIndexSearchTest.java
deleted file mode 100644
index 385d65d..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/inmemory/PartitionedInMemoryInvertedIndexSearchTest.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.inmemory;
-
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.common.AbstractInvertedIndexSearchTest;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.util.LSMInvertedIndexTestContext.InvertedIndexType;
-
-public class PartitionedInMemoryInvertedIndexSearchTest extends AbstractInvertedIndexSearchTest {
-
-    public PartitionedInMemoryInvertedIndexSearchTest() {
-        super(InvertedIndexType.PARTITIONED_INMEMORY, false);
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/multithread/LSMInvertedIndexMultiThreadTest.java b/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/multithread/LSMInvertedIndexMultiThreadTest.java
deleted file mode 100644
index bd48068..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/multithread/LSMInvertedIndexMultiThreadTest.java
+++ /dev/null
@@ -1,162 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.multithread;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import org.junit.Test;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.exceptions.HyracksException;
-import edu.uci.ics.hyracks.storage.am.common.TestOperationSelector.TestOperation;
-import edu.uci.ics.hyracks.storage.am.common.TestWorkloadConf;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
-import edu.uci.ics.hyracks.storage.am.common.datagen.ProbabilityHelper;
-import edu.uci.ics.hyracks.storage.am.common.datagen.TupleGenerator;
-import edu.uci.ics.hyracks.storage.am.config.AccessMethodTestsConfig;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.common.LSMInvertedIndexTestHarness;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.util.LSMInvertedIndexTestContext;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.util.LSMInvertedIndexTestContext.InvertedIndexType;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.util.LSMInvertedIndexTestUtils;
-
-public class LSMInvertedIndexMultiThreadTest {
-
-    protected final Logger LOGGER = Logger.getLogger(LSMInvertedIndexMultiThreadTest.class.getName());
-
-    // Machine-specific number of threads to use for testing.
-    protected final int REGULAR_NUM_THREADS = Runtime.getRuntime().availableProcessors();
-    // Excessive number of threads for testing.
-    protected final int EXCESSIVE_NUM_THREADS = Runtime.getRuntime().availableProcessors() * 4;
-    protected final int NUM_OPERATIONS = AccessMethodTestsConfig.LSM_INVINDEX_MULTITHREAD_NUM_OPERATIONS;
-
-    protected final LSMInvertedIndexTestHarness harness = new LSMInvertedIndexTestHarness();
-    protected final LSMInvertedIndexWorkerFactory workerFactory = new LSMInvertedIndexWorkerFactory();
-    protected final ArrayList<TestWorkloadConf> workloadConfs = getTestWorkloadConf();
-
-    protected void setUp() throws HyracksException {
-        harness.setUp();
-    }
-
-    protected void tearDown() throws HyracksDataException {
-        harness.tearDown();
-    }
-
-    protected void runTest(LSMInvertedIndexTestContext testCtx, TupleGenerator tupleGen, int numThreads,
-            TestWorkloadConf conf, String dataMsg) throws InterruptedException, TreeIndexException, HyracksException {
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("LSMInvertedIndex MultiThread Test:\nData: " + dataMsg + "; Threads: " + numThreads
-                    + "; Workload: " + conf.toString() + ".");
-        }
-
-        // 4 batches per thread.
-        int batchSize = (NUM_OPERATIONS / numThreads) / 4;
-
-        LSMInvertedIndexMultiThreadTestDriver driver = new LSMInvertedIndexMultiThreadTestDriver(testCtx.getIndex(),
-                workerFactory, tupleGen.getFieldSerdes(), tupleGen.getFieldGens(), conf.ops, conf.opProbs);
-        driver.init();
-        long[] times = driver.run(numThreads, 1, NUM_OPERATIONS, batchSize);
-        testCtx.getIndex().validate();
-        driver.deinit();
-
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("LSMInvertedIndex MultiThread Test Time: " + times[0] + "ms");
-        }
-    }
-
-    protected ArrayList<TestWorkloadConf> getTestWorkloadConf() {
-        ArrayList<TestWorkloadConf> workloadConfs = new ArrayList<TestWorkloadConf>();
-
-        // Insert only workload.
-        TestOperation[] insertOnlyOps = new TestOperation[] { TestOperation.INSERT };
-        workloadConfs.add(new TestWorkloadConf(insertOnlyOps, ProbabilityHelper
-                .getUniformProbDist(insertOnlyOps.length)));
-
-        // Insert and merge workload.
-        TestOperation[] insertMergeOps = new TestOperation[] { TestOperation.INSERT, TestOperation.MERGE };
-        workloadConfs.add(new TestWorkloadConf(insertMergeOps, ProbabilityHelper
-                .getUniformProbDist(insertMergeOps.length)));
-
-        // Inserts mixed with point searches and scans.
-        TestOperation[] insertSearchOnlyOps = new TestOperation[] { TestOperation.INSERT, TestOperation.POINT_SEARCH,
-                TestOperation.SCAN };
-        workloadConfs.add(new TestWorkloadConf(insertSearchOnlyOps, ProbabilityHelper
-                .getUniformProbDist(insertSearchOnlyOps.length)));
-
-        // Inserts, and deletes.
-        TestOperation[] insertDeleteUpdateOps = new TestOperation[] { TestOperation.INSERT, TestOperation.DELETE };
-        workloadConfs.add(new TestWorkloadConf(insertDeleteUpdateOps, ProbabilityHelper
-                .getUniformProbDist(insertDeleteUpdateOps.length)));
-
-        // Inserts, deletes and merges.
-        TestOperation[] insertDeleteUpdateMergeOps = new TestOperation[] { TestOperation.INSERT, TestOperation.DELETE,
-                TestOperation.MERGE };
-        workloadConfs.add(new TestWorkloadConf(insertDeleteUpdateMergeOps, ProbabilityHelper
-                .getUniformProbDist(insertDeleteUpdateMergeOps.length)));
-
-        // All operations except merge.
-        TestOperation[] allNoMergeOps = new TestOperation[] { TestOperation.INSERT, TestOperation.DELETE,
-                TestOperation.POINT_SEARCH, TestOperation.SCAN };
-        workloadConfs.add(new TestWorkloadConf(allNoMergeOps, ProbabilityHelper
-                .getUniformProbDist(allNoMergeOps.length)));
-
-        // All operations.
-        TestOperation[] allOps = new TestOperation[] { TestOperation.INSERT, TestOperation.DELETE,
-                TestOperation.POINT_SEARCH, TestOperation.SCAN, TestOperation.MERGE };
-        workloadConfs.add(new TestWorkloadConf(allOps, ProbabilityHelper.getUniformProbDist(allOps.length)));
-
-        return workloadConfs;
-    }
-
-    @Test
-    public void wordTokensInvIndexTest() throws IOException, IndexException, InterruptedException {
-        String dataMsg = "Documents";
-        int[] numThreads = new int[] { REGULAR_NUM_THREADS, EXCESSIVE_NUM_THREADS };
-        for (int i = 0; i < numThreads.length; i++) {
-            for (TestWorkloadConf conf : workloadConfs) {
-                setUp();
-                LSMInvertedIndexTestContext testCtx = LSMInvertedIndexTestUtils.createWordInvIndexTestContext(harness,
-                        getIndexType());
-                TupleGenerator tupleGen = LSMInvertedIndexTestUtils.createStringDocumentTupleGen(harness.getRandom());
-                runTest(testCtx, tupleGen, numThreads[i], conf, dataMsg);
-                tearDown();
-            }
-        }
-    }
-
-    @Test
-    public void hashedNGramTokensInvIndexTest() throws IOException, IndexException, InterruptedException {
-        String dataMsg = "Person Names";
-        int[] numThreads = new int[] { REGULAR_NUM_THREADS, EXCESSIVE_NUM_THREADS };
-        for (int i = 0; i < numThreads.length; i++) {
-            for (TestWorkloadConf conf : workloadConfs) {
-                setUp();
-                LSMInvertedIndexTestContext testCtx = LSMInvertedIndexTestUtils.createHashedNGramInvIndexTestContext(
-                        harness, getIndexType());
-                TupleGenerator tupleGen = LSMInvertedIndexTestUtils.createPersonNamesTupleGen(harness.getRandom());
-                runTest(testCtx, tupleGen, numThreads[i], conf, dataMsg);
-                tearDown();
-            }
-        }
-    }
-
-    protected InvertedIndexType getIndexType() {
-        return InvertedIndexType.LSM;
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/multithread/LSMInvertedIndexMultiThreadTestDriver.java b/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/multithread/LSMInvertedIndexMultiThreadTestDriver.java
deleted file mode 100644
index 6159969..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/multithread/LSMInvertedIndexMultiThreadTestDriver.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.multithread;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.storage.am.common.IIndexTestWorkerFactory;
-import edu.uci.ics.hyracks.storage.am.common.IndexMultiThreadTestDriver;
-import edu.uci.ics.hyracks.storage.am.common.TestOperationSelector.TestOperation;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndex;
-import edu.uci.ics.hyracks.storage.am.common.datagen.DataGenThread;
-import edu.uci.ics.hyracks.storage.am.common.datagen.IFieldValueGenerator;
-
-@SuppressWarnings("rawtypes")
-public class LSMInvertedIndexMultiThreadTestDriver extends IndexMultiThreadTestDriver {
-
-    protected final IFieldValueGenerator[] fieldGens;
-
-    public LSMInvertedIndexMultiThreadTestDriver(IIndex index, IIndexTestWorkerFactory workerFactory,
-            ISerializerDeserializer[] fieldSerdes, IFieldValueGenerator[] fieldGens, TestOperation[] ops,
-            double[] opProbs) {
-        super(index, workerFactory, fieldSerdes, ops, opProbs);
-        this.fieldGens = fieldGens;
-    }
-
-    public DataGenThread createDatagenThread(int numThreads, int numBatches, int batchSize) {
-        return new DataGenThread(numThreads, numBatches, batchSize, fieldSerdes, fieldGens, RANDOM_SEED, 2 * numThreads);
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/multithread/LSMInvertedIndexTestWorker.java b/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/multithread/LSMInvertedIndexTestWorker.java
deleted file mode 100644
index d6bb3d3..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/multithread/LSMInvertedIndexTestWorker.java
+++ /dev/null
@@ -1,129 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.multithread;
-
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Random;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.dataflow.common.util.TupleUtils;
-import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
-import edu.uci.ics.hyracks.storage.am.common.AbstractIndexTestWorker;
-import edu.uci.ics.hyracks.storage.am.common.TestOperationSelector;
-import edu.uci.ics.hyracks.storage.am.common.TestOperationSelector.TestOperation;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndex;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexCursor;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.am.common.datagen.DataGenThread;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.NoOpIOOperationCallback;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndexSearchModifier;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.exceptions.OccurrenceThresholdPanicException;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.impls.LSMInvertedIndex;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.impls.LSMInvertedIndexAccessor;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.search.ConjunctiveSearchModifier;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.search.InvertedIndexSearchPredicate;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.search.JaccardSearchModifier;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.tokenizers.IBinaryTokenizerFactory;
-
-public class LSMInvertedIndexTestWorker extends AbstractIndexTestWorker {
-
-    protected final LSMInvertedIndex invIndex;
-    protected final List<ITupleReference> documentCorpus = new ArrayList<ITupleReference>();
-    protected final Random rnd = new Random(50);
-
-    protected final IInvertedIndexSearchModifier[] TEST_SEARCH_MODIFIERS = new IInvertedIndexSearchModifier[] {
-            new ConjunctiveSearchModifier(), new JaccardSearchModifier(0.8f), new JaccardSearchModifier(0.5f) };
-
-    public LSMInvertedIndexTestWorker(DataGenThread dataGen, TestOperationSelector opSelector, IIndex index,
-            int numBatches) {
-        super(dataGen, opSelector, index, numBatches);
-        invIndex = (LSMInvertedIndex) index;
-    }
-
-    @Override
-    public void performOp(ITupleReference tuple, TestOperation op) throws HyracksDataException, IndexException {
-        LSMInvertedIndexAccessor accessor = (LSMInvertedIndexAccessor) indexAccessor;
-        IIndexCursor searchCursor = accessor.createSearchCursor();
-        IIndexCursor rangeSearchCursor = accessor.createRangeSearchCursor();
-        RangePredicate rangePred = new RangePredicate(null, null, true, true, null, null);
-        IBinaryTokenizerFactory tokenizerFactory = invIndex.getTokenizerFactory();
-        int searchModifierIndex = Math.abs(rnd.nextInt()) % TEST_SEARCH_MODIFIERS.length;
-        InvertedIndexSearchPredicate searchPred = new InvertedIndexSearchPredicate(tokenizerFactory.createTokenizer(),
-                TEST_SEARCH_MODIFIERS[searchModifierIndex]);
-
-        switch (op) {
-            case INSERT: {
-                insert(accessor, tuple);
-                break;
-            }
-
-            case DELETE: {
-                // Randomly pick a document from the corpus to delete.
-                if (!documentCorpus.isEmpty()) {
-                    int docIndex = Math.abs(rnd.nextInt()) % documentCorpus.size();
-                    ITupleReference deleteTuple = documentCorpus.get(docIndex);
-                    accessor.delete(deleteTuple);
-                    // Swap tupleIndex with last element.
-                    documentCorpus.set(docIndex, documentCorpus.get(documentCorpus.size() - 1));
-                    documentCorpus.remove(documentCorpus.size() - 1);
-                } else {
-                    // No existing documents to delete, treat this case as an insert.
-                    insert(accessor, tuple);
-                }
-                break;
-            }
-
-            case POINT_SEARCH: {
-                searchCursor.reset();
-                searchPred.setQueryTuple(tuple);
-                searchPred.setQueryFieldIndex(0);
-                try {
-                    accessor.search(searchCursor, searchPred);
-                    consumeCursorTuples(searchCursor);
-                } catch (OccurrenceThresholdPanicException e) {
-                    // Ignore.
-                }
-                break;
-            }
-
-            case SCAN: {
-                rangeSearchCursor.reset();
-                accessor.rangeSearch(rangeSearchCursor, rangePred);
-                consumeCursorTuples(rangeSearchCursor);
-                break;
-            }
-
-            case MERGE: {
-                accessor.scheduleMerge(NoOpIOOperationCallback.INSTANCE);
-                break;
-            }
-
-            default:
-                throw new HyracksDataException("Op " + op.toString() + " not supported.");
-        }
-    }
-
-    private void insert(LSMInvertedIndexAccessor accessor, ITupleReference tuple) throws HyracksDataException,
-            IndexException {
-        // Ignore ongoing merges. Do an insert instead.
-        accessor.insert(tuple);
-        // Add tuple to document corpus so we can delete it.
-        ITupleReference copyTuple = TupleUtils.copyTuple(tuple);
-        documentCorpus.add(copyTuple);
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/multithread/LSMInvertedIndexWorkerFactory.java b/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/multithread/LSMInvertedIndexWorkerFactory.java
deleted file mode 100644
index cce843b..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/multithread/LSMInvertedIndexWorkerFactory.java
+++ /dev/null
@@ -1,30 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.multithread;
-
-import edu.uci.ics.hyracks.storage.am.common.AbstractIndexTestWorker;
-import edu.uci.ics.hyracks.storage.am.common.IIndexTestWorkerFactory;
-import edu.uci.ics.hyracks.storage.am.common.TestOperationSelector;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndex;
-import edu.uci.ics.hyracks.storage.am.common.datagen.DataGenThread;
-
-public class LSMInvertedIndexWorkerFactory implements IIndexTestWorkerFactory {
-    @Override
-    public AbstractIndexTestWorker create(DataGenThread dataGen, TestOperationSelector opSelector,
-            IIndex index, int numBatches) {
-        return new LSMInvertedIndexTestWorker(dataGen, opSelector, index, numBatches);
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/multithread/PartitionedLSMInvertedIndexMultiThreadTest.java b/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/multithread/PartitionedLSMInvertedIndexMultiThreadTest.java
deleted file mode 100644
index 1adaf61..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/multithread/PartitionedLSMInvertedIndexMultiThreadTest.java
+++ /dev/null
@@ -1,25 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.multithread;
-
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.util.LSMInvertedIndexTestContext.InvertedIndexType;
-
-public class PartitionedLSMInvertedIndexMultiThreadTest extends LSMInvertedIndexMultiThreadTest {
-
-    protected InvertedIndexType getIndexType() {
-        return InvertedIndexType.PARTITIONED_LSM;
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/ondisk/FixedSizeFrameTupleTest.java b/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/ondisk/FixedSizeFrameTupleTest.java
deleted file mode 100644
index 3059062..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/ondisk/FixedSizeFrameTupleTest.java
+++ /dev/null
@@ -1,75 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.ondisk;
-
-import java.nio.ByteBuffer;
-import java.util.ArrayList;
-import java.util.Random;
-
-import junit.framework.Assert;
-
-import org.junit.Test;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.ondisk.FixedSizeFrameTupleAccessor;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.ondisk.FixedSizeFrameTupleAppender;
-
-public class FixedSizeFrameTupleTest {
-
-    private static int FRAME_SIZE = 4096;
-
-    private Random rnd = new Random(50);
-
-    /**
-     * This test verifies the correct behavior of the FixedSizeFrameTuple class.
-     * Frames containing FixedSizeFrameTuple's require neither tuple slots nor
-     * field slots. The tests inserts generated data into a frame until the
-     * frame is full, and then verifies the frame's contents.
-     * 
-     */
-    @Test
-    public void singleFieldTest() throws Exception {
-        ByteBuffer buffer = ByteBuffer.allocate(FRAME_SIZE);
-
-        ITypeTraits[] fields = new ITypeTraits[1];
-        fields[0] = IntegerPointable.TYPE_TRAITS;
-
-        FixedSizeFrameTupleAppender ftapp = new FixedSizeFrameTupleAppender(FRAME_SIZE, fields);
-        FixedSizeFrameTupleAccessor ftacc = new FixedSizeFrameTupleAccessor(FRAME_SIZE, fields);
-
-        boolean frameHasSpace = true;
-
-        ArrayList<Integer> check = new ArrayList<Integer>();
-
-        ftapp.reset(buffer, true);
-        while (frameHasSpace) {
-            int val = rnd.nextInt();
-            frameHasSpace = ftapp.append(val);
-            if (frameHasSpace) {
-                check.add(val);
-                ftapp.incrementTupleCount(1);
-            }
-        }
-
-        ftacc.reset(buffer);
-        for (int i = 0; i < ftacc.getTupleCount(); i++) {
-            int val = IntegerSerializerDeserializer.getInt(ftacc.getBuffer().array(), ftacc.getTupleStartOffset(i));
-            Assert.assertEquals(check.get(i).intValue(), val);
-        }
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/ondisk/OnDiskInvertedIndexBulkLoadTest.java b/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/ondisk/OnDiskInvertedIndexBulkLoadTest.java
deleted file mode 100644
index 4813615..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/ondisk/OnDiskInvertedIndexBulkLoadTest.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.ondisk;
-
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.common.AbstractInvertedIndexLoadTest;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.util.LSMInvertedIndexTestContext.InvertedIndexType;
-
-public class OnDiskInvertedIndexBulkLoadTest extends AbstractInvertedIndexLoadTest {
-
-    public OnDiskInvertedIndexBulkLoadTest() {
-        super(InvertedIndexType.ONDISK, true, 1);
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/ondisk/OnDiskInvertedIndexLifecycleTest.java b/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/ondisk/OnDiskInvertedIndexLifecycleTest.java
deleted file mode 100644
index 09432de..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/ondisk/OnDiskInvertedIndexLifecycleTest.java
+++ /dev/null
@@ -1,88 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.ondisk;
-
-import java.io.File;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
-import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
-import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
-import edu.uci.ics.hyracks.storage.am.common.AbstractIndexLifecycleTest;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedListBuilder;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.common.LSMInvertedIndexTestHarness;
-
-public class OnDiskInvertedIndexLifecycleTest extends AbstractIndexLifecycleTest {
-
-    private final LSMInvertedIndexTestHarness harness = new LSMInvertedIndexTestHarness();
-    private ITreeIndexFrame frame = null;
-
-    @Override
-    protected boolean persistentStateExists() throws Exception {
-        return harness.getInvListsFileRef().getFile().exists()
-                && ((OnDiskInvertedIndex) index).getBTree().getFileReference().getFile().exists();
-    }
-
-    @Override
-    protected boolean isEmptyIndex() throws Exception {
-        if (frame == null) {
-            frame = ((OnDiskInvertedIndex) index).getBTree().getLeafFrameFactory().createFrame();
-        }
-        return ((OnDiskInvertedIndex) index).getBTree().isEmptyTree(frame);
-    }
-
-    @Override
-    public void setup() throws Exception {
-        harness.setUp();
-        ITypeTraits[] tokenTypeTraits = new ITypeTraits[] { UTF8StringPointable.TYPE_TRAITS };
-        IBinaryComparatorFactory[] tokenCmpFactories = new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory
-                .of(UTF8StringPointable.FACTORY) };
-        ITypeTraits[] invListTypeTraits = new ITypeTraits[] { IntegerPointable.TYPE_TRAITS };
-        IBinaryComparatorFactory[] invListCmpFactories = new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory
-                .of(IntegerPointable.FACTORY) };
-        IInvertedListBuilder invListBuilder = new FixedSizeElementInvertedListBuilder(invListTypeTraits);
-        FileReference btreeFile = new FileReference(new File(harness.getInvListsFileRef().getFile().getPath() + "_btree"));
-        index = new OnDiskInvertedIndex(harness.getDiskBufferCache(), harness.getDiskFileMapProvider(), invListBuilder,
-                invListTypeTraits, invListCmpFactories, tokenTypeTraits, tokenCmpFactories, harness.getInvListsFileRef(),
-                btreeFile);
-
-    }
-
-    @Override
-    public void tearDown() throws Exception {
-        index.deactivate();
-        index.destroy();
-        harness.tearDown();
-    }
-
-    @Override
-    protected void performInsertions() throws Exception {
-        // Do nothing.
-    }
-
-    @Override
-    protected void checkInsertions() throws Exception {
-        // Do nothing.
-    }
-
-    @Override
-    protected void clearCheckableInsertions() throws Exception {
-        // Do nothing.
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/ondisk/OnDiskInvertedIndexSearchTest.java b/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/ondisk/OnDiskInvertedIndexSearchTest.java
deleted file mode 100644
index 76d9200..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/ondisk/OnDiskInvertedIndexSearchTest.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.ondisk;
-
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.common.AbstractInvertedIndexSearchTest;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.util.LSMInvertedIndexTestContext.InvertedIndexType;
-
-public class OnDiskInvertedIndexSearchTest extends AbstractInvertedIndexSearchTest {
-
-    public OnDiskInvertedIndexSearchTest() {
-        super(InvertedIndexType.ONDISK, true);
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/ondisk/PartitionedOnDiskInvertedIndexBulkLoadTest.java b/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/ondisk/PartitionedOnDiskInvertedIndexBulkLoadTest.java
deleted file mode 100644
index f641630..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/ondisk/PartitionedOnDiskInvertedIndexBulkLoadTest.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.ondisk;
-
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.common.AbstractInvertedIndexLoadTest;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.util.LSMInvertedIndexTestContext.InvertedIndexType;
-
-public class PartitionedOnDiskInvertedIndexBulkLoadTest extends AbstractInvertedIndexLoadTest {
-
-    public PartitionedOnDiskInvertedIndexBulkLoadTest() {
-        super(InvertedIndexType.PARTITIONED_ONDISK, true, 1);
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/ondisk/PartitionedOnDiskInvertedIndexSearchTest.java b/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/ondisk/PartitionedOnDiskInvertedIndexSearchTest.java
deleted file mode 100644
index 4fa25ed..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/ondisk/PartitionedOnDiskInvertedIndexSearchTest.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.ondisk;
-
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.common.AbstractInvertedIndexSearchTest;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.util.LSMInvertedIndexTestContext.InvertedIndexType;
-
-public class PartitionedOnDiskInvertedIndexSearchTest extends AbstractInvertedIndexSearchTest {
-
-    public PartitionedOnDiskInvertedIndexSearchTest() {
-        super(InvertedIndexType.PARTITIONED_ONDISK, true);
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/tokenizers/NGramTokenizerTest.java b/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/tokenizers/NGramTokenizerTest.java
deleted file mode 100644
index 33ea4f5..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/tokenizers/NGramTokenizerTest.java
+++ /dev/null
@@ -1,228 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.tokenizers;
-
-import java.io.ByteArrayInputStream;
-import java.io.ByteArrayOutputStream;
-import java.io.DataInput;
-import java.io.DataInputStream;
-import java.io.DataOutput;
-import java.io.DataOutputStream;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.HashMap;
-
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
-
-import edu.uci.ics.hyracks.data.std.util.GrowableArray;
-
-public class NGramTokenizerTest {
-
-    private char PRECHAR = '#';
-    private char POSTCHAR = '$';
-
-    private String str = "Jürgen S. Generic's Car";
-    private byte[] inputBuffer;
-
-    private int gramLength = 3;
-
-    private void getExpectedGrams(String s, int gramLength, ArrayList<String> grams, boolean prePost) {
-
-        String tmp = s.toLowerCase();
-        if (prePost) {
-            StringBuilder preBuilder = new StringBuilder();
-            for (int i = 0; i < gramLength - 1; i++) {
-                preBuilder.append(PRECHAR);
-            }
-            String pre = preBuilder.toString();
-
-            StringBuilder postBuilder = new StringBuilder();
-            for (int i = 0; i < gramLength - 1; i++) {
-                postBuilder.append(POSTCHAR);
-            }
-            String post = postBuilder.toString();
-
-            tmp = pre + s.toLowerCase() + post;
-        }
-
-        for (int i = 0; i < tmp.length() - gramLength + 1; i++) {
-            String gram = tmp.substring(i, i + gramLength);
-            grams.add(gram);
-        }
-    }
-
-    @Before
-    public void init() throws Exception {
-        // serialize string into bytes
-        ByteArrayOutputStream baos = new ByteArrayOutputStream();
-        DataOutput dos = new DataOutputStream(baos);
-        dos.writeUTF(str);
-        inputBuffer = baos.toByteArray();
-    }
-
-    void runTestNGramTokenizerWithCountedHashedUTF8Tokens(boolean prePost) throws IOException {
-        HashedUTF8NGramTokenFactory tokenFactory = new HashedUTF8NGramTokenFactory();
-        NGramUTF8StringBinaryTokenizer tokenizer = new NGramUTF8StringBinaryTokenizer(gramLength, prePost, false,
-                false, tokenFactory);
-        tokenizer.reset(inputBuffer, 0, inputBuffer.length);
-
-        ArrayList<String> expectedGrams = new ArrayList<String>();
-        getExpectedGrams(str, gramLength, expectedGrams, prePost);
-        ArrayList<Integer> expectedHashedGrams = new ArrayList<Integer>();
-        HashMap<String, Integer> gramCounts = new HashMap<String, Integer>();
-        for (String s : expectedGrams) {
-            Integer count = gramCounts.get(s);
-            if (count == null) {
-                count = 1;
-                gramCounts.put(s, count);
-            } else {
-                count++;
-            }
-
-            int hash = tokenHash(s, count);
-            expectedHashedGrams.add(hash);
-        }
-
-        int tokenCount = 0;
-
-        while (tokenizer.hasNext()) {
-            tokenizer.next();
-
-            // serialize hashed token
-            GrowableArray tokenData = new GrowableArray();
-
-            IToken token = tokenizer.getToken();
-            token.serializeToken(tokenData);
-
-            // deserialize token
-            ByteArrayInputStream bais = new ByteArrayInputStream(tokenData.getByteArray());
-            DataInput in = new DataInputStream(bais);
-
-            Integer hashedGram = in.readInt();
-
-            // System.out.println(hashedGram);
-
-            Assert.assertEquals(expectedHashedGrams.get(tokenCount), hashedGram);
-
-            tokenCount++;
-        }
-        // System.out.println("---------");
-    }
-
-    void runTestNGramTokenizerWithHashedUTF8Tokens(boolean prePost) throws IOException {
-        HashedUTF8NGramTokenFactory tokenFactory = new HashedUTF8NGramTokenFactory();
-        NGramUTF8StringBinaryTokenizer tokenizer = new NGramUTF8StringBinaryTokenizer(gramLength, prePost, true, false,
-                tokenFactory);
-        tokenizer.reset(inputBuffer, 0, inputBuffer.length);
-
-        ArrayList<String> expectedGrams = new ArrayList<String>();
-        getExpectedGrams(str, gramLength, expectedGrams, prePost);
-        ArrayList<Integer> expectedHashedGrams = new ArrayList<Integer>();
-        for (String s : expectedGrams) {
-            int hash = tokenHash(s, 1);
-            expectedHashedGrams.add(hash);
-        }
-
-        int tokenCount = 0;
-
-        while (tokenizer.hasNext()) {
-            tokenizer.next();
-
-            // serialize hashed token
-            GrowableArray tokenData = new GrowableArray();
-
-            IToken token = tokenizer.getToken();
-            token.serializeToken(tokenData);
-
-            // deserialize token
-            ByteArrayInputStream bais = new ByteArrayInputStream(tokenData.getByteArray());
-            DataInput in = new DataInputStream(bais);
-
-            Integer hashedGram = in.readInt();
-
-            // System.out.println(hashedGram);
-
-            Assert.assertEquals(expectedHashedGrams.get(tokenCount), hashedGram);
-
-            tokenCount++;
-        }
-        // System.out.println("---------");
-    }
-
-    void runTestNGramTokenizerWithUTF8Tokens(boolean prePost) throws IOException {
-        UTF8NGramTokenFactory tokenFactory = new UTF8NGramTokenFactory();
-        NGramUTF8StringBinaryTokenizer tokenizer = new NGramUTF8StringBinaryTokenizer(gramLength, prePost, true, false,
-                tokenFactory);
-        tokenizer.reset(inputBuffer, 0, inputBuffer.length);
-
-        ArrayList<String> expectedGrams = new ArrayList<String>();
-        getExpectedGrams(str, gramLength, expectedGrams, prePost);
-
-        int tokenCount = 0;
-
-        while (tokenizer.hasNext()) {
-            tokenizer.next();
-
-            // serialize hashed token
-            GrowableArray tokenData = new GrowableArray();
-
-            IToken token = tokenizer.getToken();
-            token.serializeToken(tokenData);
-
-            // deserialize token
-            ByteArrayInputStream bais = new ByteArrayInputStream(tokenData.getByteArray());
-            DataInput in = new DataInputStream(bais);
-
-            String strGram = in.readUTF();
-
-            // System.out.println("\"" + strGram + "\"");
-
-            Assert.assertEquals(expectedGrams.get(tokenCount), strGram);
-
-            tokenCount++;
-        }
-        // System.out.println("---------");
-    }
-
-    @Test
-    public void testNGramTokenizerWithCountedHashedUTF8Tokens() throws Exception {
-        runTestNGramTokenizerWithCountedHashedUTF8Tokens(false);
-        runTestNGramTokenizerWithCountedHashedUTF8Tokens(true);
-    }
-
-    @Test
-    public void testNGramTokenizerWithHashedUTF8Tokens() throws Exception {
-        runTestNGramTokenizerWithHashedUTF8Tokens(false);
-        runTestNGramTokenizerWithHashedUTF8Tokens(true);
-    }
-
-    @Test
-    public void testNGramTokenizerWithUTF8Tokens() throws IOException {
-        runTestNGramTokenizerWithUTF8Tokens(false);
-        runTestNGramTokenizerWithUTF8Tokens(true);
-    }
-
-    public int tokenHash(String token, int tokenCount) {
-        int h = AbstractUTF8Token.GOLDEN_RATIO_32;
-        for (int i = 0; i < token.length(); i++) {
-            h ^= token.charAt(i);
-            h *= AbstractUTF8Token.GOLDEN_RATIO_32;
-        }
-        return h + tokenCount;
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/tokenizers/WordTokenizerTest.java b/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/tokenizers/WordTokenizerTest.java
deleted file mode 100644
index 3ff9304..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/tokenizers/WordTokenizerTest.java
+++ /dev/null
@@ -1,210 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.tokenizers;
-
-import java.io.ByteArrayInputStream;
-import java.io.ByteArrayOutputStream;
-import java.io.DataInput;
-import java.io.DataInputStream;
-import java.io.DataOutput;
-import java.io.DataOutputStream;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.HashMap;
-
-import junit.framework.Assert;
-
-import org.junit.Before;
-import org.junit.Test;
-
-import edu.uci.ics.hyracks.data.std.util.GrowableArray;
-
-public class WordTokenizerTest {
-
-    private String text = "Hello World, I would like to inform you of the importance of Foo Bar. Yes, Foo Bar. Jürgen.";
-    private byte[] inputBuffer;
-
-    private ArrayList<String> expectedUTF8Tokens = new ArrayList<String>();
-    private ArrayList<Integer> expectedHashedUTF8Tokens = new ArrayList<Integer>();
-    private ArrayList<Integer> expectedCountedHashedUTF8Tokens = new ArrayList<Integer>();
-
-    private boolean isSeparator(char c) {
-        return !(Character.isLetterOrDigit(c) || Character.getType(c) == Character.OTHER_LETTER || Character.getType(c) == Character.OTHER_NUMBER);
-    }
-
-    private void tokenize(String text, ArrayList<String> tokens) {
-        String lowerCaseText = text.toLowerCase();
-        int startIx = 0;
-
-        // Skip separators at beginning of string.
-        while (isSeparator(lowerCaseText.charAt(startIx))) {
-            startIx++;
-        }
-        while (startIx < lowerCaseText.length()) {
-            while (startIx < lowerCaseText.length() && isSeparator(lowerCaseText.charAt(startIx))) {
-                startIx++;
-            }
-            int tokenStart = startIx;
-
-            while (startIx < lowerCaseText.length() && !isSeparator(lowerCaseText.charAt(startIx))) {
-                startIx++;
-            }
-            int tokenEnd = startIx;
-
-            // Emit token.
-            String token = lowerCaseText.substring(tokenStart, tokenEnd);
-
-            tokens.add(token);
-        }
-    }
-
-    @Before
-    public void init() throws IOException {
-        // serialize text into bytes
-        ByteArrayOutputStream baos = new ByteArrayOutputStream();
-        DataOutput dos = new DataOutputStream(baos);
-        dos.writeUTF(text);
-        inputBuffer = baos.toByteArray();
-
-        // init expected string tokens
-        tokenize(text, expectedUTF8Tokens);
-
-        // hashed tokens ignoring token count
-        for (int i = 0; i < expectedUTF8Tokens.size(); i++) {
-            int hash = tokenHash(expectedUTF8Tokens.get(i), 1);
-            expectedHashedUTF8Tokens.add(hash);
-        }
-
-        // hashed tokens using token count
-        HashMap<String, Integer> tokenCounts = new HashMap<String, Integer>();
-        for (int i = 0; i < expectedUTF8Tokens.size(); i++) {
-            Integer count = tokenCounts.get(expectedUTF8Tokens.get(i));
-            if (count == null) {
-                count = 1;
-                tokenCounts.put(expectedUTF8Tokens.get(i), count);
-            } else {
-                count++;
-            }
-
-            int hash = tokenHash(expectedUTF8Tokens.get(i), count);
-            expectedCountedHashedUTF8Tokens.add(hash);
-        }
-    }
-
-    @Test
-    public void testWordTokenizerWithCountedHashedUTF8Tokens() throws IOException {
-
-        HashedUTF8WordTokenFactory tokenFactory = new HashedUTF8WordTokenFactory();
-        DelimitedUTF8StringBinaryTokenizer tokenizer = new DelimitedUTF8StringBinaryTokenizer(false, false,
-                tokenFactory);
-
-        tokenizer.reset(inputBuffer, 0, inputBuffer.length);
-
-        int tokenCount = 0;
-
-        while (tokenizer.hasNext()) {
-            tokenizer.next();
-
-            // serialize hashed token
-            GrowableArray tokenData = new GrowableArray();
-
-            IToken token = tokenizer.getToken();
-            token.serializeToken(tokenData);
-
-            // deserialize token
-            ByteArrayInputStream bais = new ByteArrayInputStream(tokenData.getByteArray());
-            DataInput in = new DataInputStream(bais);
-
-            Integer hashedToken = in.readInt();
-
-            Assert.assertEquals(hashedToken, expectedCountedHashedUTF8Tokens.get(tokenCount));
-
-            tokenCount++;
-        }
-    }
-
-    @Test
-    public void testWordTokenizerWithHashedUTF8Tokens() throws IOException {
-
-        HashedUTF8WordTokenFactory tokenFactory = new HashedUTF8WordTokenFactory();
-        DelimitedUTF8StringBinaryTokenizer tokenizer = new DelimitedUTF8StringBinaryTokenizer(true, false, tokenFactory);
-
-        tokenizer.reset(inputBuffer, 0, inputBuffer.length);
-
-        int tokenCount = 0;
-
-        while (tokenizer.hasNext()) {
-            tokenizer.next();
-
-            // serialize hashed token
-            GrowableArray tokenData = new GrowableArray();
-
-            IToken token = tokenizer.getToken();
-            token.serializeToken(tokenData);
-
-            // deserialize token
-            ByteArrayInputStream bais = new ByteArrayInputStream(tokenData.getByteArray());
-            DataInput in = new DataInputStream(bais);
-
-            Integer hashedToken = in.readInt();
-
-            Assert.assertEquals(expectedHashedUTF8Tokens.get(tokenCount), hashedToken);
-
-            tokenCount++;
-        }
-    }
-
-    @Test
-    public void testWordTokenizerWithUTF8Tokens() throws IOException {
-
-        UTF8WordTokenFactory tokenFactory = new UTF8WordTokenFactory();
-        DelimitedUTF8StringBinaryTokenizer tokenizer = new DelimitedUTF8StringBinaryTokenizer(true, false, tokenFactory);
-
-        tokenizer.reset(inputBuffer, 0, inputBuffer.length);
-
-        int tokenCount = 0;
-
-        while (tokenizer.hasNext()) {
-            tokenizer.next();
-
-            // serialize hashed token
-            GrowableArray tokenData = new GrowableArray();
-
-            IToken token = tokenizer.getToken();
-            token.serializeToken(tokenData);
-
-            // deserialize token
-            ByteArrayInputStream bais = new ByteArrayInputStream(tokenData.getByteArray());
-            DataInput in = new DataInputStream(bais);
-
-            String strToken = in.readUTF();
-
-            Assert.assertEquals(expectedUTF8Tokens.get(tokenCount), strToken);
-
-            tokenCount++;
-        }
-    }
-
-    // JAQL Hash
-    public int tokenHash(String token, int tokenCount) {
-        int h = AbstractUTF8Token.GOLDEN_RATIO_32;
-        for (int i = 0; i < token.length(); i++) {
-            h ^= token.charAt(i);
-            h *= AbstractUTF8Token.GOLDEN_RATIO_32;
-        }
-        return h + tokenCount;
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/util/LSMInvertedIndexTestContext.java b/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/util/LSMInvertedIndexTestContext.java
deleted file mode 100644
index 870e6d9..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/util/LSMInvertedIndexTestContext.java
+++ /dev/null
@@ -1,249 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.util;
-
-import java.io.ByteArrayInputStream;
-import java.io.DataInput;
-import java.io.DataInputStream;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.HashSet;
-import java.util.List;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.dataflow.common.util.SerdeUtils;
-import edu.uci.ics.hyracks.dataflow.common.util.TupleUtils;
-import edu.uci.ics.hyracks.storage.am.btree.OrderedIndexTestContext;
-import edu.uci.ics.hyracks.storage.am.common.CheckTuple;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndex;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndex;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.common.LSMInvertedIndexTestHarness;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.exceptions.InvertedIndexException;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.tokenizers.IBinaryTokenizerFactory;
-
-@SuppressWarnings("rawtypes")
-public class LSMInvertedIndexTestContext extends OrderedIndexTestContext {
-
-    public static enum InvertedIndexType {
-        INMEMORY,
-        ONDISK,
-        LSM,
-        PARTITIONED_INMEMORY,
-        PARTITIONED_ONDISK,
-        PARTITIONED_LSM
-    };
-
-    protected IInvertedIndex invIndex;
-    protected IBinaryComparatorFactory[] allCmpFactories;
-    protected IBinaryTokenizerFactory tokenizerFactory;
-    protected InvertedIndexType invIndexType;
-    protected InvertedIndexTokenizingTupleIterator indexTupleIter;
-    protected HashSet<Comparable> allTokens = new HashSet<Comparable>();
-    protected List<ITupleReference> documentCorpus = new ArrayList<ITupleReference>();
-
-    public LSMInvertedIndexTestContext(ISerializerDeserializer[] fieldSerdes, IIndex index,
-            IBinaryTokenizerFactory tokenizerFactory, InvertedIndexType invIndexType,
-            InvertedIndexTokenizingTupleIterator indexTupleIter) {
-        super(fieldSerdes, index);
-        invIndex = (IInvertedIndex) index;
-        this.tokenizerFactory = tokenizerFactory;
-        this.invIndexType = invIndexType;
-        this.indexTupleIter = indexTupleIter;
-    }
-
-    @Override
-    public int getKeyFieldCount() {
-        return fieldSerdes.length;
-    }
-
-    @Override
-    public IBinaryComparatorFactory[] getComparatorFactories() {
-        if (allCmpFactories == null) {
-            // Concatenate token and inv-list comparators.
-            IInvertedIndex invIndex = (IInvertedIndex) index;
-            IBinaryComparatorFactory[] tokenCmpFactories = invIndex.getTokenCmpFactories();
-            IBinaryComparatorFactory[] invListCmpFactories = invIndex.getInvListCmpFactories();
-            int totalCmpCount = tokenCmpFactories.length + invListCmpFactories.length;
-            allCmpFactories = new IBinaryComparatorFactory[totalCmpCount];
-            for (int i = 0; i < tokenCmpFactories.length; i++) {
-                allCmpFactories[i] = tokenCmpFactories[i];
-            }
-            for (int i = 0; i < invListCmpFactories.length; i++) {
-                allCmpFactories[i + tokenCmpFactories.length] = invListCmpFactories[i];
-            }
-        }
-        return allCmpFactories;
-    }
-
-    public static LSMInvertedIndexTestContext create(LSMInvertedIndexTestHarness harness,
-            ISerializerDeserializer[] fieldSerdes, int tokenFieldCount, IBinaryTokenizerFactory tokenizerFactory,
-            InvertedIndexType invIndexType) throws IndexException {
-        ITypeTraits[] allTypeTraits = SerdeUtils.serdesToTypeTraits(fieldSerdes);
-        IBinaryComparatorFactory[] allCmpFactories = SerdeUtils.serdesToComparatorFactories(fieldSerdes,
-                fieldSerdes.length);
-        // Set token type traits and comparators.
-        ITypeTraits[] tokenTypeTraits = new ITypeTraits[tokenFieldCount];
-        IBinaryComparatorFactory[] tokenCmpFactories = new IBinaryComparatorFactory[tokenFieldCount];
-        for (int i = 0; i < tokenTypeTraits.length; i++) {
-            tokenTypeTraits[i] = allTypeTraits[i];
-            tokenCmpFactories[i] = allCmpFactories[i];
-        }
-        // Set inverted-list element type traits and comparators.
-        int invListFieldCount = fieldSerdes.length - tokenFieldCount;
-        ITypeTraits[] invListTypeTraits = new ITypeTraits[invListFieldCount];
-        IBinaryComparatorFactory[] invListCmpFactories = new IBinaryComparatorFactory[invListFieldCount];
-        for (int i = 0; i < invListTypeTraits.length; i++) {
-            invListTypeTraits[i] = allTypeTraits[i + tokenFieldCount];
-            invListCmpFactories[i] = allCmpFactories[i + tokenFieldCount];
-        }
-        // Create index and test context.        
-        IInvertedIndex invIndex;
-        switch (invIndexType) {
-            case INMEMORY: {
-                invIndex = InvertedIndexUtils.createInMemoryBTreeInvertedindex(harness.getMemBufferCache(),
-                        harness.getMemFreePageManager(), invListTypeTraits, invListCmpFactories, tokenTypeTraits,
-                        tokenCmpFactories, tokenizerFactory);
-                break;
-            }
-            case PARTITIONED_INMEMORY: {
-                invIndex = InvertedIndexUtils.createPartitionedInMemoryBTreeInvertedindex(harness.getMemBufferCache(),
-                        harness.getMemFreePageManager(), invListTypeTraits, invListCmpFactories, tokenTypeTraits,
-                        tokenCmpFactories, tokenizerFactory);
-                break;
-            }
-            case ONDISK: {
-                invIndex = InvertedIndexUtils.createOnDiskInvertedIndex(harness.getDiskBufferCache(),
-                        harness.getDiskFileMapProvider(), invListTypeTraits, invListCmpFactories, tokenTypeTraits,
-                        tokenCmpFactories, harness.getInvListsFileRef());
-                break;
-            }
-            case PARTITIONED_ONDISK: {
-                invIndex = InvertedIndexUtils.createPartitionedOnDiskInvertedIndex(harness.getDiskBufferCache(),
-                        harness.getDiskFileMapProvider(), invListTypeTraits, invListCmpFactories, tokenTypeTraits,
-                        tokenCmpFactories, harness.getInvListsFileRef());
-                break;
-            }
-            case LSM: {
-                invIndex = InvertedIndexUtils.createLSMInvertedIndex(harness.getMemBufferCache(),
-                        harness.getMemFreePageManager(), harness.getDiskFileMapProvider(), invListTypeTraits,
-                        invListCmpFactories, tokenTypeTraits, tokenCmpFactories, tokenizerFactory,
-                        harness.getDiskBufferCache(), harness.getIOManager(), harness.getOnDiskDir(),
-                        harness.getMergePolicy(), harness.getOperationTrackerFactory(), harness.getIOScheduler(),
-                        harness.getIOOperationCallbackProvider());
-                break;
-            }
-            case PARTITIONED_LSM: {
-                invIndex = InvertedIndexUtils.createPartitionedLSMInvertedIndex(harness.getMemBufferCache(),
-                        harness.getMemFreePageManager(), harness.getDiskFileMapProvider(), invListTypeTraits,
-                        invListCmpFactories, tokenTypeTraits, tokenCmpFactories, tokenizerFactory,
-                        harness.getDiskBufferCache(), harness.getIOManager(), harness.getOnDiskDir(),
-                        harness.getMergePolicy(), harness.getOperationTrackerFactory(), harness.getIOScheduler(),
-                        harness.getIOOperationCallbackProvider());
-                break;
-            }
-            default: {
-                throw new InvertedIndexException("Unknow inverted-index type '" + invIndexType + "'.");
-            }
-        }
-        InvertedIndexTokenizingTupleIterator indexTupleIter = null;
-        switch (invIndexType) {
-            case INMEMORY:
-            case ONDISK:
-            case LSM: {
-                indexTupleIter = new InvertedIndexTokenizingTupleIterator(invIndex.getTokenTypeTraits().length,
-                        invIndex.getInvListTypeTraits().length, tokenizerFactory.createTokenizer());
-                break;
-            }
-            case PARTITIONED_INMEMORY:
-            case PARTITIONED_ONDISK:
-            case PARTITIONED_LSM: {
-                indexTupleIter = new PartitionedInvertedIndexTokenizingTupleIterator(
-                        invIndex.getTokenTypeTraits().length, invIndex.getInvListTypeTraits().length,
-                        tokenizerFactory.createTokenizer());
-                break;
-            }
-            default: {
-                throw new InvertedIndexException("Unknow inverted-index type '" + invIndexType + "'.");
-            }
-        }
-        LSMInvertedIndexTestContext testCtx = new LSMInvertedIndexTestContext(fieldSerdes, invIndex, tokenizerFactory,
-                invIndexType, indexTupleIter);
-        return testCtx;
-    }
-
-    public void insertCheckTuples(ITupleReference tuple, Collection<CheckTuple> checkTuples)
-            throws HyracksDataException {
-        documentCorpus.add(TupleUtils.copyTuple(tuple));
-        indexTupleIter.reset(tuple);
-        while (indexTupleIter.hasNext()) {
-            indexTupleIter.next();
-            ITupleReference insertTuple = indexTupleIter.getTuple();
-            CheckTuple checkTuple = createCheckTuple(insertTuple);
-            insertCheckTuple(checkTuple, checkTuples);
-            allTokens.add(checkTuple.getField(0));
-        }
-    }
-
-    public void deleteCheckTuples(ITupleReference tuple, Collection<CheckTuple> checkTuples)
-            throws HyracksDataException {
-        indexTupleIter.reset(tuple);
-        while (indexTupleIter.hasNext()) {
-            indexTupleIter.next();
-            ITupleReference insertTuple = indexTupleIter.getTuple();
-            CheckTuple checkTuple = createCheckTuple(insertTuple);
-            deleteCheckTuple(checkTuple, checkTuples);
-        }
-    }
-
-    public HashSet<Comparable> getAllTokens() {
-        return allTokens;
-    }
-
-    @SuppressWarnings("unchecked")
-    public CheckTuple createCheckTuple(ITupleReference tuple) throws HyracksDataException {
-        CheckTuple checkTuple = new CheckTuple(fieldSerdes.length, fieldSerdes.length);
-        for (int i = 0; i < fieldSerdes.length; i++) {
-            ByteArrayInputStream bains = new ByteArrayInputStream(tuple.getFieldData(i), tuple.getFieldStart(i),
-                    tuple.getFieldLength(i));
-            DataInput in = new DataInputStream(bains);
-            Comparable field = (Comparable) fieldSerdes[i].deserialize(in);
-            checkTuple.appendField(field);
-        }
-        return checkTuple;
-    }
-
-    @Override
-    public void upsertCheckTuple(CheckTuple checkTuple, Collection<CheckTuple> checkTuples) {
-        throw new UnsupportedOperationException("Upsert not supported by inverted index.");
-    }
-
-    public IBinaryTokenizerFactory getTokenizerFactory() {
-        return tokenizerFactory;
-    }
-
-    public List<ITupleReference> getDocumentCorpus() {
-        return documentCorpus;
-    }
-
-    public InvertedIndexType getInvertedIndexType() {
-        return invIndexType;
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/util/LSMInvertedIndexTestUtils.java b/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/util/LSMInvertedIndexTestUtils.java
deleted file mode 100644
index 97f78f3..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/util/LSMInvertedIndexTestUtils.java
+++ /dev/null
@@ -1,568 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.util;
-
-import static org.junit.Assert.fail;
-
-import java.io.ByteArrayInputStream;
-import java.io.DataInput;
-import java.io.DataInputStream;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Random;
-import java.util.SortedSet;
-import java.util.TreeSet;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.data.std.util.GrowableArray;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.ShortSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
-import edu.uci.ics.hyracks.storage.am.btree.OrderedIndexTestUtils;
-import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
-import edu.uci.ics.hyracks.storage.am.common.CheckTuple;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexBulkLoader;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexCursor;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.am.common.datagen.DocumentStringFieldValueGenerator;
-import edu.uci.ics.hyracks.storage.am.common.datagen.IFieldValueGenerator;
-import edu.uci.ics.hyracks.storage.am.common.datagen.PersonNameFieldValueGenerator;
-import edu.uci.ics.hyracks.storage.am.common.datagen.SortedIntegerFieldValueGenerator;
-import edu.uci.ics.hyracks.storage.am.common.datagen.TupleGenerator;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallback;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-import edu.uci.ics.hyracks.storage.am.common.tuples.PermutingTupleReference;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndex;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndexAccessor;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndexSearchModifier;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedListCursor;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.common.LSMInvertedIndexTestHarness;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.exceptions.OccurrenceThresholdPanicException;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.search.InvertedIndexSearchPredicate;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.tokenizers.DelimitedUTF8StringBinaryTokenizerFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.tokenizers.HashedUTF8NGramTokenFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.tokenizers.HashedUTF8WordTokenFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.tokenizers.IBinaryTokenizer;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.tokenizers.IBinaryTokenizerFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.tokenizers.IToken;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.tokenizers.ITokenFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.tokenizers.NGramUTF8StringBinaryTokenizerFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.tokenizers.UTF8NGramTokenFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.tokenizers.UTF8WordTokenFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.util.LSMInvertedIndexTestContext.InvertedIndexType;
-
-@SuppressWarnings("rawtypes")
-public class LSMInvertedIndexTestUtils {
-
-    public static final int TEST_GRAM_LENGTH = 3;
-
-    public static TupleGenerator createStringDocumentTupleGen(Random rnd) throws IOException {
-        IFieldValueGenerator[] fieldGens = new IFieldValueGenerator[2];
-        fieldGens[0] = new DocumentStringFieldValueGenerator(2, 10, 10000, rnd);
-        fieldGens[1] = new SortedIntegerFieldValueGenerator(0);
-        ISerializerDeserializer[] fieldSerdes = new ISerializerDeserializer[] {
-                UTF8StringSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
-        TupleGenerator tupleGen = new TupleGenerator(fieldGens, fieldSerdes, 0);
-        return tupleGen;
-    }
-
-    public static TupleGenerator createPersonNamesTupleGen(Random rnd) throws IOException {
-        IFieldValueGenerator[] fieldGens = new IFieldValueGenerator[2];
-        fieldGens[0] = new PersonNameFieldValueGenerator(rnd, 0.5f);
-        fieldGens[1] = new SortedIntegerFieldValueGenerator(0);
-        ISerializerDeserializer[] fieldSerdes = new ISerializerDeserializer[] {
-                UTF8StringSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
-        TupleGenerator tupleGen = new TupleGenerator(fieldGens, fieldSerdes, 0);
-        return tupleGen;
-    }
-
-    private static ISerializerDeserializer[] getNonHashedIndexFieldSerdes(InvertedIndexType invIndexType)
-            throws IndexException {
-        ISerializerDeserializer[] fieldSerdes = null;
-        switch (invIndexType) {
-            case INMEMORY:
-            case ONDISK:
-            case LSM: {
-                fieldSerdes = new ISerializerDeserializer[] { UTF8StringSerializerDeserializer.INSTANCE,
-                        IntegerSerializerDeserializer.INSTANCE };
-                break;
-            }
-            case PARTITIONED_INMEMORY:
-            case PARTITIONED_ONDISK:
-            case PARTITIONED_LSM: {
-                // Such indexes also include the set-size for partitioning.
-                fieldSerdes = new ISerializerDeserializer[] { UTF8StringSerializerDeserializer.INSTANCE,
-                        ShortSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
-                break;
-            }
-            default: {
-                throw new IndexException("Unhandled inverted index type '" + invIndexType + "'.");
-            }
-        }
-        return fieldSerdes;
-    }
-
-    private static ISerializerDeserializer[] getHashedIndexFieldSerdes(InvertedIndexType invIndexType)
-            throws IndexException {
-        ISerializerDeserializer[] fieldSerdes = null;
-        switch (invIndexType) {
-            case INMEMORY:
-            case ONDISK:
-            case LSM: {
-                fieldSerdes = new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE,
-                        IntegerSerializerDeserializer.INSTANCE };
-                break;
-            }
-            case PARTITIONED_INMEMORY:
-            case PARTITIONED_ONDISK:
-            case PARTITIONED_LSM: {
-                // Such indexes also include the set-size for partitioning.
-                fieldSerdes = new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE,
-                        ShortSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
-                break;
-            }
-            default: {
-                throw new IndexException("Unhandled inverted index type '" + invIndexType + "'.");
-            }
-        }
-        return fieldSerdes;
-    }
-
-    public static LSMInvertedIndexTestContext createWordInvIndexTestContext(LSMInvertedIndexTestHarness harness,
-            InvertedIndexType invIndexType) throws IOException, IndexException {
-        ISerializerDeserializer[] fieldSerdes = getNonHashedIndexFieldSerdes(invIndexType);
-        ITokenFactory tokenFactory = new UTF8WordTokenFactory();
-        IBinaryTokenizerFactory tokenizerFactory = new DelimitedUTF8StringBinaryTokenizerFactory(true, false,
-                tokenFactory);
-        LSMInvertedIndexTestContext testCtx = LSMInvertedIndexTestContext.create(harness, fieldSerdes,
-                fieldSerdes.length - 1, tokenizerFactory, invIndexType);
-        return testCtx;
-    }
-
-    public static LSMInvertedIndexTestContext createHashedWordInvIndexTestContext(LSMInvertedIndexTestHarness harness,
-            InvertedIndexType invIndexType) throws IOException, IndexException {
-        ISerializerDeserializer[] fieldSerdes = getHashedIndexFieldSerdes(invIndexType);
-        ITokenFactory tokenFactory = new HashedUTF8WordTokenFactory();
-        IBinaryTokenizerFactory tokenizerFactory = new DelimitedUTF8StringBinaryTokenizerFactory(true, false,
-                tokenFactory);
-        LSMInvertedIndexTestContext testCtx = LSMInvertedIndexTestContext.create(harness, fieldSerdes,
-                fieldSerdes.length - 1, tokenizerFactory, invIndexType);
-        return testCtx;
-    }
-
-    public static LSMInvertedIndexTestContext createNGramInvIndexTestContext(LSMInvertedIndexTestHarness harness,
-            InvertedIndexType invIndexType) throws IOException, IndexException {
-        ISerializerDeserializer[] fieldSerdes = getNonHashedIndexFieldSerdes(invIndexType);
-        ITokenFactory tokenFactory = new UTF8NGramTokenFactory();
-        IBinaryTokenizerFactory tokenizerFactory = new NGramUTF8StringBinaryTokenizerFactory(TEST_GRAM_LENGTH, true,
-                true, false, tokenFactory);
-        LSMInvertedIndexTestContext testCtx = LSMInvertedIndexTestContext.create(harness, fieldSerdes,
-                fieldSerdes.length - 1, tokenizerFactory, invIndexType);
-        return testCtx;
-    }
-
-    public static LSMInvertedIndexTestContext createHashedNGramInvIndexTestContext(LSMInvertedIndexTestHarness harness,
-            InvertedIndexType invIndexType) throws IOException, IndexException {
-        ISerializerDeserializer[] fieldSerdes = getHashedIndexFieldSerdes(invIndexType);
-        ITokenFactory tokenFactory = new HashedUTF8NGramTokenFactory();
-        IBinaryTokenizerFactory tokenizerFactory = new NGramUTF8StringBinaryTokenizerFactory(TEST_GRAM_LENGTH, true,
-                true, false, tokenFactory);
-        LSMInvertedIndexTestContext testCtx = LSMInvertedIndexTestContext.create(harness, fieldSerdes,
-                fieldSerdes.length - 1, tokenizerFactory, invIndexType);
-        return testCtx;
-    }
-
-    public static void bulkLoadInvIndex(LSMInvertedIndexTestContext testCtx, TupleGenerator tupleGen, int numDocs)
-            throws IndexException, IOException {
-        SortedSet<CheckTuple> tmpMemIndex = new TreeSet<CheckTuple>();
-        // First generate the expected index by inserting the documents one-by-one.
-        for (int i = 0; i < numDocs; i++) {
-            ITupleReference tuple = tupleGen.next();
-            testCtx.insertCheckTuples(tuple, tmpMemIndex);
-        }
-        ISerializerDeserializer[] fieldSerdes = testCtx.getFieldSerdes();
-
-        // Use the expected index to bulk-load the actual index.
-        IIndexBulkLoader bulkLoader = testCtx.getIndex().createBulkLoader(1.0f, false, numDocs);
-        ArrayTupleBuilder tupleBuilder = new ArrayTupleBuilder(testCtx.getFieldSerdes().length);
-        ArrayTupleReference tuple = new ArrayTupleReference();
-        Iterator<CheckTuple> checkTupleIter = tmpMemIndex.iterator();
-        while (checkTupleIter.hasNext()) {
-            CheckTuple checkTuple = checkTupleIter.next();
-            OrderedIndexTestUtils.createTupleFromCheckTuple(checkTuple, tupleBuilder, tuple, fieldSerdes);
-            bulkLoader.add(tuple);
-        }
-        bulkLoader.end();
-
-        // Add all check tuples from the temp index to the text context.
-        testCtx.getCheckTuples().addAll(tmpMemIndex);
-    }
-
-    public static void insertIntoInvIndex(LSMInvertedIndexTestContext testCtx, TupleGenerator tupleGen, int numDocs)
-            throws IOException, IndexException {
-        // InMemoryInvertedIndex only supports insert.
-        for (int i = 0; i < numDocs; i++) {
-            ITupleReference tuple = tupleGen.next();
-            testCtx.getIndexAccessor().insert(tuple);
-            testCtx.insertCheckTuples(tuple, testCtx.getCheckTuples());
-        }
-    }
-
-    public static void deleteFromInvIndex(LSMInvertedIndexTestContext testCtx, Random rnd, int numDocsToDelete)
-            throws HyracksDataException, IndexException {
-        List<ITupleReference> documentCorpus = testCtx.getDocumentCorpus();
-        for (int i = 0; i < numDocsToDelete && !documentCorpus.isEmpty(); i++) {
-            int size = documentCorpus.size();
-            int tupleIndex = Math.abs(rnd.nextInt()) % size;
-            ITupleReference deleteTuple = documentCorpus.get(tupleIndex);
-            testCtx.getIndexAccessor().delete(deleteTuple);
-            testCtx.deleteCheckTuples(deleteTuple, testCtx.getCheckTuples());
-            // Swap tupleIndex with last element.
-            documentCorpus.set(tupleIndex, documentCorpus.get(size - 1));
-            documentCorpus.remove(size - 1);
-        }
-    }
-
-    /**
-     * Compares actual and expected indexes using the rangeSearch() method of the inverted-index accessor.
-     */
-    public static void compareActualAndExpectedIndexesRangeSearch(LSMInvertedIndexTestContext testCtx)
-            throws HyracksDataException, IndexException {
-        IInvertedIndex invIndex = (IInvertedIndex) testCtx.getIndex();
-        int tokenFieldCount = invIndex.getTokenTypeTraits().length;
-        int invListFieldCount = invIndex.getInvListTypeTraits().length;
-        IInvertedIndexAccessor invIndexAccessor = (IInvertedIndexAccessor) invIndex.createAccessor(
-                NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
-        IIndexCursor invIndexCursor = invIndexAccessor.createRangeSearchCursor();
-        MultiComparator tokenCmp = MultiComparator.create(invIndex.getTokenCmpFactories());
-        IBinaryComparatorFactory[] tupleCmpFactories = new IBinaryComparatorFactory[tokenFieldCount + invListFieldCount];
-        for (int i = 0; i < tokenFieldCount; i++) {
-            tupleCmpFactories[i] = invIndex.getTokenCmpFactories()[i];
-        }
-        for (int i = 0; i < invListFieldCount; i++) {
-            tupleCmpFactories[tokenFieldCount + i] = invIndex.getInvListCmpFactories()[i];
-        }
-        MultiComparator tupleCmp = MultiComparator.create(tupleCmpFactories);
-        RangePredicate nullPred = new RangePredicate(null, null, true, true, tokenCmp, tokenCmp);
-        invIndexAccessor.rangeSearch(invIndexCursor, nullPred);
-
-        // Helpers for generating a serialized inverted-list element from a CheckTuple from the expected index.
-        ISerializerDeserializer[] fieldSerdes = testCtx.getFieldSerdes();
-        ArrayTupleBuilder expectedBuilder = new ArrayTupleBuilder(fieldSerdes.length);
-        ArrayTupleReference expectedTuple = new ArrayTupleReference();
-
-        Iterator<CheckTuple> expectedIter = testCtx.getCheckTuples().iterator();
-
-        // Compare index elements.
-        try {
-            while (invIndexCursor.hasNext() && expectedIter.hasNext()) {
-                invIndexCursor.next();
-                ITupleReference actualTuple = invIndexCursor.getTuple();
-                CheckTuple expected = expectedIter.next();
-                OrderedIndexTestUtils.createTupleFromCheckTuple(expected, expectedBuilder, expectedTuple, fieldSerdes);
-                if (tupleCmp.compare(actualTuple, expectedTuple) != 0) {
-                    fail("Index entries differ for token '" + expected.getField(0) + "'.");
-                }
-            }
-            if (expectedIter.hasNext()) {
-                fail("Indexes do not match. Actual index is missing entries.");
-            }
-            if (invIndexCursor.hasNext()) {
-                fail("Indexes do not match. Actual index contains too many entries.");
-            }
-        } finally {
-            invIndexCursor.close();
-        }
-    }
-
-    /**
-     * Compares actual and expected indexes by comparing their inverted-lists one by one. Exercises the openInvertedListCursor() method of the inverted-index accessor.
-     */
-    @SuppressWarnings("unchecked")
-    public static void compareActualAndExpectedIndexes(LSMInvertedIndexTestContext testCtx)
-            throws HyracksDataException, IndexException {
-        IInvertedIndex invIndex = (IInvertedIndex) testCtx.getIndex();
-        ISerializerDeserializer[] fieldSerdes = testCtx.getFieldSerdes();
-        MultiComparator invListCmp = MultiComparator.create(invIndex.getInvListCmpFactories());
-        IInvertedIndexAccessor invIndexAccessor = (IInvertedIndexAccessor) testCtx.getIndexAccessor();
-        int tokenFieldCount = invIndex.getTokenTypeTraits().length;
-        int invListFieldCount = invIndex.getInvListTypeTraits().length;
-        // All tokens that were inserted into the indexes.
-        Iterator<Comparable> tokensIter = testCtx.getAllTokens().iterator();
-
-        // Search key for finding an inverted-list in the actual index.
-        ArrayTupleBuilder searchKeyBuilder = new ArrayTupleBuilder(tokenFieldCount);
-        ArrayTupleReference searchKey = new ArrayTupleReference();
-        // Cursor over inverted list from actual index.
-        IInvertedListCursor actualInvListCursor = invIndexAccessor.createInvertedListCursor();
-
-        // Helpers for generating a serialized inverted-list element from a CheckTuple from the expected index.
-        ArrayTupleBuilder expectedBuilder = new ArrayTupleBuilder(fieldSerdes.length);
-        // Includes the token fields.
-        ArrayTupleReference completeExpectedTuple = new ArrayTupleReference();
-        // Field permutation and permuting tuple reference to strip away token fields from completeExpectedTuple.
-        int[] fieldPermutation = new int[invListFieldCount];
-        for (int i = 0; i < fieldPermutation.length; i++) {
-            fieldPermutation[i] = tokenFieldCount + i;
-        }
-        PermutingTupleReference expectedTuple = new PermutingTupleReference(fieldPermutation);
-
-        // Iterate over all tokens. Find the inverted-lists in actual and expected indexes. Compare the inverted lists,
-        while (tokensIter.hasNext()) {
-            Comparable token = tokensIter.next();
-
-            // Position inverted-list iterator on expected index.
-            CheckTuple checkLowKey = new CheckTuple(tokenFieldCount, tokenFieldCount);
-            checkLowKey.appendField(token);
-            CheckTuple checkHighKey = new CheckTuple(tokenFieldCount, tokenFieldCount);
-            checkHighKey.appendField(token);
-
-            SortedSet<CheckTuple> expectedInvList = OrderedIndexTestUtils.getPrefixExpectedSubset(
-                    testCtx.getCheckTuples(), checkLowKey, checkHighKey);
-            Iterator<CheckTuple> expectedInvListIter = expectedInvList.iterator();
-
-            // Position inverted-list cursor in actual index.
-            OrderedIndexTestUtils.createTupleFromCheckTuple(checkLowKey, searchKeyBuilder, searchKey, fieldSerdes);
-            invIndexAccessor.openInvertedListCursor(actualInvListCursor, searchKey);
-
-            if (actualInvListCursor.size() != expectedInvList.size()) {
-                fail("Actual and expected inverted lists for token '" + token.toString()
-                        + "' have different sizes. Actual size: " + actualInvListCursor.size() + ". Expected size: "
-                        + expectedInvList.size() + ".");
-            }
-            // Compare inverted-list elements.
-            int count = 0;
-            actualInvListCursor.pinPages();
-            try {
-                while (actualInvListCursor.hasNext() && expectedInvListIter.hasNext()) {
-                    actualInvListCursor.next();
-                    ITupleReference actual = actualInvListCursor.getTuple();
-                    CheckTuple expected = expectedInvListIter.next();
-                    OrderedIndexTestUtils.createTupleFromCheckTuple(expected, expectedBuilder, completeExpectedTuple,
-                            fieldSerdes);
-                    expectedTuple.reset(completeExpectedTuple);
-                    if (invListCmp.compare(actual, expectedTuple) != 0) {
-                        fail("Inverted lists of token '" + token + "' differ at position " + count + ".");
-                    }
-                    count++;
-                }
-            } finally {
-                actualInvListCursor.unpinPages();
-            }
-        }
-    }
-
-    /**
-     * Determine the expected results with the simple ScanCount algorithm.
-     */
-    public static void getExpectedResults(int[] scanCountArray, TreeSet<CheckTuple> checkTuples,
-            ITupleReference searchDocument, IBinaryTokenizer tokenizer, ISerializerDeserializer tokenSerde,
-            IInvertedIndexSearchModifier searchModifier, List<Integer> expectedResults, InvertedIndexType invIndexType)
-            throws IOException {
-        boolean isPartitioned = false;
-        switch (invIndexType) {
-            case INMEMORY:
-            case ONDISK:
-            case LSM: {
-                isPartitioned = false;
-                break;
-            }
-            case PARTITIONED_INMEMORY:
-            case PARTITIONED_ONDISK:
-            case PARTITIONED_LSM: {
-                isPartitioned = true;
-                break;
-            }
-        }
-        getExpectedResults(scanCountArray, checkTuples, searchDocument, tokenizer, tokenSerde, searchModifier,
-                expectedResults, isPartitioned);
-    }
-
-    @SuppressWarnings("unchecked")
-    public static void getExpectedResults(int[] scanCountArray, TreeSet<CheckTuple> checkTuples,
-            ITupleReference searchDocument, IBinaryTokenizer tokenizer, ISerializerDeserializer tokenSerde,
-            IInvertedIndexSearchModifier searchModifier, List<Integer> expectedResults, boolean isPartitioned)
-            throws IOException {
-        // Reset scan count array.
-        Arrays.fill(scanCountArray, 0);
-        expectedResults.clear();
-
-        GrowableArray tokenData = new GrowableArray();
-        tokenizer.reset(searchDocument.getFieldData(0), searchDocument.getFieldStart(0),
-                searchDocument.getFieldLength(0));
-        // Run though tokenizer to get number of tokens.
-        int numQueryTokens = 0;
-        while (tokenizer.hasNext()) {
-            tokenizer.next();
-            numQueryTokens++;
-        }
-        short numTokensLowerBound = -1;
-        short numTokensUpperBound = -1;
-        int invListElementField = 1;
-        if (isPartitioned) {
-            numTokensLowerBound = searchModifier.getNumTokensLowerBound((short) numQueryTokens);
-            numTokensUpperBound = searchModifier.getNumTokensUpperBound((short) numQueryTokens);
-            invListElementField = 2;
-        }
-        int occurrenceThreshold = searchModifier.getOccurrenceThreshold(numQueryTokens);
-        tokenizer.reset(searchDocument.getFieldData(0), searchDocument.getFieldStart(0),
-                searchDocument.getFieldLength(0));
-        while (tokenizer.hasNext()) {
-            tokenizer.next();
-            IToken token = tokenizer.getToken();
-            tokenData.reset();
-            token.serializeToken(tokenData);
-            ByteArrayInputStream inStream = new ByteArrayInputStream(tokenData.getByteArray(), 0, tokenData.getLength());
-            DataInput dataIn = new DataInputStream(inStream);
-            Comparable tokenObj = (Comparable) tokenSerde.deserialize(dataIn);
-            CheckTuple lowKey;
-            if (numTokensLowerBound < 0) {
-                // Index is not partitioned, or no length filtering is possible for this search modifier.
-                lowKey = new CheckTuple(1, 1);
-                lowKey.appendField(tokenObj);
-            } else {
-                // Index is length partitioned, and search modifier supports length filtering.
-                lowKey = new CheckTuple(2, 2);
-                lowKey.appendField(tokenObj);
-                lowKey.appendField(Short.valueOf(numTokensLowerBound));
-            }
-            CheckTuple highKey;
-            if (numTokensUpperBound < 0) {
-                // Index is not partitioned, or no length filtering is possible for this search modifier.
-                highKey = new CheckTuple(1, 1);
-                highKey.appendField(tokenObj);
-            } else {
-                // Index is length partitioned, and search modifier supports length filtering.
-                highKey = new CheckTuple(2, 2);
-                highKey.appendField(tokenObj);
-                highKey.appendField(Short.valueOf(numTokensUpperBound));
-            }
-
-            // Get view over check tuples containing inverted-list corresponding to token. 
-            SortedSet<CheckTuple> invList = OrderedIndexTestUtils.getPrefixExpectedSubset(checkTuples, lowKey, highKey);
-            Iterator<CheckTuple> invListIter = invList.iterator();
-            // Iterate over inverted list and update scan count array.
-            while (invListIter.hasNext()) {
-                CheckTuple checkTuple = invListIter.next();
-                Integer element = (Integer) checkTuple.getField(invListElementField);
-                scanCountArray[element]++;
-            }
-        }
-
-        // Run through scan count array, and see whether elements satisfy the given occurrence threshold.
-        expectedResults.clear();
-        for (int i = 0; i < scanCountArray.length; i++) {
-            if (scanCountArray[i] >= occurrenceThreshold) {
-                expectedResults.add(i);
-            }
-        }
-    }
-
-    public static void testIndexSearch(LSMInvertedIndexTestContext testCtx, TupleGenerator tupleGen, Random rnd,
-            int numDocQueries, int numRandomQueries, IInvertedIndexSearchModifier searchModifier, int[] scanCountArray)
-            throws IOException, IndexException {
-        IInvertedIndex invIndex = testCtx.invIndex;
-        IInvertedIndexAccessor accessor = (IInvertedIndexAccessor) invIndex.createAccessor(
-                NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
-        IBinaryTokenizer tokenizer = testCtx.getTokenizerFactory().createTokenizer();
-        InvertedIndexSearchPredicate searchPred = new InvertedIndexSearchPredicate(tokenizer, searchModifier);
-        List<ITupleReference> documentCorpus = testCtx.getDocumentCorpus();
-        // Project away the primary-key field.
-        int[] fieldPermutation = new int[] { 0 };
-        PermutingTupleReference searchDocument = new PermutingTupleReference(fieldPermutation);
-
-        IIndexCursor resultCursor = accessor.createSearchCursor();
-        int numQueries = numDocQueries + numRandomQueries;
-        for (int i = 0; i < numQueries; i++) {
-            // If number of documents in the corpus is less than numDocQueries, then replace the remaining ones with random queries.
-            if (i >= numDocQueries || i >= documentCorpus.size()) {
-                // Generate a random query.
-                ITupleReference randomQuery = tupleGen.next();
-                searchDocument.reset(randomQuery);
-            } else {
-                // Pick a random document from the corpus to use as the search query.
-                int queryIndex = Math.abs(rnd.nextInt() % documentCorpus.size());
-                searchDocument.reset(documentCorpus.get(queryIndex));
-            }
-
-            // Set query tuple in search predicate.
-            searchPred.setQueryTuple(searchDocument);
-            searchPred.setQueryFieldIndex(0);
-
-            resultCursor.reset();
-            boolean panic = false;
-            try {
-                accessor.search(resultCursor, searchPred);
-            } catch (OccurrenceThresholdPanicException e) {
-                // ignore panic queries.
-                panic = true;
-            }
-
-            try {
-                if (!panic) {
-                    // Consume cursor and deserialize results so we can sort them. Some search cursors may not deliver the result sorted (e.g., LSM search cursor).
-                    ArrayList<Integer> actualResults = new ArrayList<Integer>();
-                    try {
-                        while (resultCursor.hasNext()) {
-                            resultCursor.next();
-                            ITupleReference resultTuple = resultCursor.getTuple();
-                            int actual = IntegerSerializerDeserializer.getInt(resultTuple.getFieldData(0),
-                                    resultTuple.getFieldStart(0));
-                            actualResults.add(Integer.valueOf(actual));
-                        }
-                    } catch (OccurrenceThresholdPanicException e) {
-                        // Ignore panic queries.
-                        continue;
-                    }
-                    Collections.sort(actualResults);
-
-                    // Get expected results.
-                    List<Integer> expectedResults = new ArrayList<Integer>();
-                    LSMInvertedIndexTestUtils.getExpectedResults(scanCountArray, testCtx.getCheckTuples(),
-                            searchDocument, tokenizer, testCtx.getFieldSerdes()[0], searchModifier, expectedResults,
-                            testCtx.getInvertedIndexType());
-
-                    Iterator<Integer> expectedIter = expectedResults.iterator();
-                    Iterator<Integer> actualIter = actualResults.iterator();
-                    while (expectedIter.hasNext() && actualIter.hasNext()) {
-                        int expected = expectedIter.next();
-                        int actual = actualIter.next();
-                        if (actual != expected) {
-                            fail("Query results do not match. Encountered: " + actual + ". Expected: " + expected + "");
-                        }
-                    }
-                    if (expectedIter.hasNext()) {
-                        fail("Query results do not match. Actual results missing.");
-                    }
-                    if (actualIter.hasNext()) {
-                        fail("Query results do not match. Actual contains too many results.");
-                    }
-                }
-            } finally {
-                resultCursor.close();
-            }
-        }
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-rtree-test/pom.xml b/hyracks-tests/hyracks-storage-am-lsm-rtree-test/pom.xml
deleted file mode 100644
index c53c7d0..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-rtree-test/pom.xml
+++ /dev/null
@@ -1,42 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <groupId>edu.uci.ics.hyracks</groupId>
-  <artifactId>hyracks-storage-am-lsm-rtree-test</artifactId>
-  <version>0.2.2-SNAPSHOT</version>
-
-  <parent>
-    <groupId>edu.uci.ics.hyracks</groupId>
-    <artifactId>hyracks-tests</artifactId>
-    <version>0.2.2-SNAPSHOT</version>
-  </parent>
-
-  <build>
-    <plugins>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-compiler-plugin</artifactId>
-        <version>2.0.2</version>
-        <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
-        </configuration>
-      </plugin>
-    </plugins>
-  </build>
-  <dependencies>  	
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks</groupId>
-  		<artifactId>hyracks-storage-am-lsm-rtree</artifactId>
-  		<version>0.2.2-SNAPSHOT</version>
-  		<type>jar</type>
-  		<scope>compile</scope>
-  	</dependency>
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks</groupId>
-  		<artifactId>hyracks-test-support</artifactId>
-  		<version>0.2.2-SNAPSHOT</version>
-  		<type>jar</type>
-  		<scope>compile</scope>
-  	</dependency>
-  </dependencies>
-</project>
diff --git a/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeBulkLoadTest.java b/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeBulkLoadTest.java
deleted file mode 100644
index 995f18c..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeBulkLoadTest.java
+++ /dev/null
@@ -1,68 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.rtree;
-
-import java.util.Random;
-
-import org.junit.After;
-import org.junit.Before;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.exceptions.HyracksException;
-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
-import edu.uci.ics.hyracks.storage.am.config.AccessMethodTestsConfig;
-import edu.uci.ics.hyracks.storage.am.lsm.rtree.util.LSMRTreeTestContext;
-import edu.uci.ics.hyracks.storage.am.lsm.rtree.util.LSMRTreeTestHarness;
-import edu.uci.ics.hyracks.storage.am.rtree.AbstractRTreeBulkLoadTest;
-import edu.uci.ics.hyracks.storage.am.rtree.AbstractRTreeTestContext;
-import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreePolicyType;
-
-@SuppressWarnings("rawtypes")
-public class LSMRTreeBulkLoadTest extends AbstractRTreeBulkLoadTest {
-
-    public LSMRTreeBulkLoadTest() {
-        super(1, AccessMethodTestsConfig.LSM_RTREE_TEST_RSTAR_POLICY);
-    }
-
-    private final LSMRTreeTestHarness harness = new LSMRTreeTestHarness();
-
-    @Before
-    public void setUp() throws HyracksException {
-        harness.setUp();
-    }
-
-    @After
-    public void tearDown() throws HyracksDataException {
-        harness.tearDown();
-    }
-
-    @Override
-    protected AbstractRTreeTestContext createTestContext(ISerializerDeserializer[] fieldSerdes,
-            IPrimitiveValueProviderFactory[] valueProviderFactories, int numKeys, RTreePolicyType rtreePolicyType)
-            throws Exception {
-        return LSMRTreeTestContext.create(harness.getMemBufferCache(), harness.getMemFreePageManager(),
-                harness.getIOManager(), harness.getFileReference(), harness.getDiskBufferCache(),
-                harness.getDiskFileMapProvider(), fieldSerdes, valueProviderFactories, numKeys, rtreePolicyType,
-                harness.getMergePolicy(), harness.getOperationTrackerFactory(), harness.getIOScheduler(),
-                harness.getIOOperationCallbackProvider());
-    }
-
-    @Override
-    protected Random getRandom() {
-        return harness.getRandom();
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeDeleteTest.java b/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeDeleteTest.java
deleted file mode 100644
index d72b668..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeDeleteTest.java
+++ /dev/null
@@ -1,68 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.rtree;
-
-import java.util.Random;
-
-import org.junit.After;
-import org.junit.Before;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.exceptions.HyracksException;
-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
-import edu.uci.ics.hyracks.storage.am.config.AccessMethodTestsConfig;
-import edu.uci.ics.hyracks.storage.am.lsm.rtree.util.LSMRTreeTestContext;
-import edu.uci.ics.hyracks.storage.am.lsm.rtree.util.LSMRTreeTestHarness;
-import edu.uci.ics.hyracks.storage.am.rtree.AbstractRTreeDeleteTest;
-import edu.uci.ics.hyracks.storage.am.rtree.AbstractRTreeTestContext;
-import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreePolicyType;
-
-@SuppressWarnings("rawtypes")
-public class LSMRTreeDeleteTest extends AbstractRTreeDeleteTest {
-
-    private final LSMRTreeTestHarness harness = new LSMRTreeTestHarness();
-
-    public LSMRTreeDeleteTest() {
-        super(AccessMethodTestsConfig.LSM_RTREE_TEST_RSTAR_POLICY);
-    }
-
-    @Before
-    public void setUp() throws HyracksException {
-        harness.setUp();
-    }
-
-    @After
-    public void tearDown() throws HyracksDataException {
-        harness.tearDown();
-    }
-
-    @Override
-    protected AbstractRTreeTestContext createTestContext(ISerializerDeserializer[] fieldSerdes,
-            IPrimitiveValueProviderFactory[] valueProviderFactories, int numKeys, RTreePolicyType rtreePolicyType)
-            throws Exception {
-        return LSMRTreeTestContext.create(harness.getMemBufferCache(), harness.getMemFreePageManager(),
-                harness.getIOManager(), harness.getFileReference(), harness.getDiskBufferCache(),
-                harness.getDiskFileMapProvider(), fieldSerdes, valueProviderFactories, numKeys, rtreePolicyType,
-                harness.getMergePolicy(), harness.getOperationTrackerFactory(), harness.getIOScheduler(),
-                harness.getIOOperationCallbackProvider());
-    }
-
-    @Override
-    protected Random getRandom() {
-        return harness.getRandom();
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeExamplesTest.java b/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeExamplesTest.java
deleted file mode 100644
index cba8cee..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeExamplesTest.java
+++ /dev/null
@@ -1,59 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.rtree;
-
-import org.junit.After;
-import org.junit.Before;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.exceptions.HyracksException;
-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
-import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
-import edu.uci.ics.hyracks.storage.am.lsm.rtree.util.LSMRTreeTestHarness;
-import edu.uci.ics.hyracks.storage.am.lsm.rtree.utils.LSMRTreeUtils;
-import edu.uci.ics.hyracks.storage.am.rtree.AbstractRTreeExamplesTest;
-import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreePolicyType;
-
-public class LSMRTreeExamplesTest extends AbstractRTreeExamplesTest {
-    private final LSMRTreeTestHarness harness = new LSMRTreeTestHarness();
-
-    @Override
-    protected ITreeIndex createTreeIndex(ITypeTraits[] typeTraits, IBinaryComparatorFactory[] rtreeCmpFactories,
-            IBinaryComparatorFactory[] btreeCmpFactories, IPrimitiveValueProviderFactory[] valueProviderFactories,
-            RTreePolicyType rtreePolicyType) throws TreeIndexException {
-        return LSMRTreeUtils.createLSMTree(harness.getMemBufferCache(), harness.getMemFreePageManager(),
-                harness.getIOManager(), harness.getFileReference(), harness.getDiskBufferCache(),
-                harness.getDiskFileMapProvider(), typeTraits, rtreeCmpFactories, btreeCmpFactories,
-                valueProviderFactories, rtreePolicyType, harness.getMergePolicy(),
-                harness.getOperationTrackerFactory(), harness.getIOScheduler(),
-                harness.getIOOperationCallbackProvider(),
-                LSMRTreeUtils.proposeBestLinearizer(typeTraits, rtreeCmpFactories.length));
-    }
-
-    @Before
-    public void setUp() throws HyracksException {
-        harness.setUp();
-    }
-
-    @After
-    public void tearDown() throws HyracksDataException {
-        harness.tearDown();
-    }
-
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeInsertTest.java b/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeInsertTest.java
deleted file mode 100644
index 96485f8..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeInsertTest.java
+++ /dev/null
@@ -1,68 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.rtree;
-
-import java.util.Random;
-
-import org.junit.After;
-import org.junit.Before;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.exceptions.HyracksException;
-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
-import edu.uci.ics.hyracks.storage.am.config.AccessMethodTestsConfig;
-import edu.uci.ics.hyracks.storage.am.lsm.rtree.util.LSMRTreeTestContext;
-import edu.uci.ics.hyracks.storage.am.lsm.rtree.util.LSMRTreeTestHarness;
-import edu.uci.ics.hyracks.storage.am.rtree.AbstractRTreeInsertTest;
-import edu.uci.ics.hyracks.storage.am.rtree.AbstractRTreeTestContext;
-import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreePolicyType;
-
-@SuppressWarnings("rawtypes")
-public class LSMRTreeInsertTest extends AbstractRTreeInsertTest {
-
-    private final LSMRTreeTestHarness harness = new LSMRTreeTestHarness();
-
-    public LSMRTreeInsertTest() {
-        super(AccessMethodTestsConfig.LSM_RTREE_TEST_RSTAR_POLICY);
-    }
-
-    @Before
-    public void setUp() throws HyracksException {
-        harness.setUp();
-    }
-
-    @After
-    public void tearDown() throws HyracksDataException {
-        harness.tearDown();
-    }
-
-    @Override
-    protected AbstractRTreeTestContext createTestContext(ISerializerDeserializer[] fieldSerdes,
-            IPrimitiveValueProviderFactory[] valueProviderFactories, int numKeys, RTreePolicyType rtreePolicyType)
-            throws Exception {
-        return LSMRTreeTestContext.create(harness.getMemBufferCache(), harness.getMemFreePageManager(),
-                harness.getIOManager(), harness.getFileReference(), harness.getDiskBufferCache(),
-                harness.getDiskFileMapProvider(), fieldSerdes, valueProviderFactories, numKeys, rtreePolicyType,
-                harness.getMergePolicy(), harness.getOperationTrackerFactory(), harness.getIOScheduler(),
-                harness.getIOOperationCallbackProvider());
-    }
-
-    @Override
-    protected Random getRandom() {
-        return harness.getRandom();
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeLifecycleTest.java b/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeLifecycleTest.java
deleted file mode 100644
index e72b3ca..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeLifecycleTest.java
+++ /dev/null
@@ -1,84 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.lsm.rtree;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.api.io.IODeviceHandle;
-import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-import edu.uci.ics.hyracks.storage.am.common.AbstractIndexLifecycleTest;
-import edu.uci.ics.hyracks.storage.am.common.CheckTuple;
-import edu.uci.ics.hyracks.storage.am.common.IIndexTestContext;
-import edu.uci.ics.hyracks.storage.am.common.TreeIndexTestUtils;
-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.rtree.impls.LSMRTree;
-import edu.uci.ics.hyracks.storage.am.lsm.rtree.util.LSMRTreeTestContext;
-import edu.uci.ics.hyracks.storage.am.lsm.rtree.util.LSMRTreeTestHarness;
-import edu.uci.ics.hyracks.storage.am.rtree.RTreeTestUtils;
-import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreePolicyType;
-import edu.uci.ics.hyracks.storage.am.rtree.util.RTreeUtils;
-
-public class LSMRTreeLifecycleTest extends AbstractIndexLifecycleTest {
-
-    @SuppressWarnings("rawtypes")
-    private final ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE,
-            IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE,
-            IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
-    private final IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils
-            .createPrimitiveValueProviderFactories(4, IntegerPointable.FACTORY);
-    private final int numKeys = 4;
-
-    private final LSMRTreeTestHarness harness = new LSMRTreeTestHarness();
-    private final TreeIndexTestUtils titu = new RTreeTestUtils();
-
-    @SuppressWarnings("rawtypes")
-    private IIndexTestContext<? extends CheckTuple> testCtx;
-
-    @Override
-    protected boolean persistentStateExists() throws Exception {
-        // make sure all of the directories exist
-        for (IODeviceHandle handle : harness.getIOManager().getIODevices()) {
-            if (!new FileReference(handle, harness.getFileReference().getFile().getPath()).getFile().exists()) {
-                return false;
-            }
-        }
-        return true;
-    }
-
-    @Override
-    protected boolean isEmptyIndex() throws Exception {
-        return ((LSMRTree) index).isEmptyIndex();
-    }
-
-    @Override
-    public void setup() throws Exception {
-        harness.setUp();
-        testCtx = LSMRTreeTestContext.create(harness.getMemBufferCache(), harness.getMemFreePageManager(),
-                harness.getIOManager(), harness.getFileReference(), harness.getDiskBufferCache(),
-                harness.getDiskFileMapProvider(), fieldSerdes, valueProviderFactories, numKeys, RTreePolicyType.RTREE,
-                harness.getMergePolicy(), harness.getOperationTrackerFactory(), harness.getIOScheduler(),
-                harness.getIOOperationCallbackProvider());
-        index = testCtx.getIndex();
-    }
-
-    @Override
-    public void tearDown() throws Exception {
-        index.deactivate();
-        index.destroy();
-        harness.tearDown();
-    }
-
-    @Override
-    protected void performInsertions() throws Exception {
-        titu.insertIntTuples(testCtx, 10, harness.getRandom());
-    }
-
-    @Override
-    protected void checkInsertions() throws Exception {
-        titu.checkScan(testCtx);
-    }
-
-    @Override
-    protected void clearCheckableInsertions() throws Exception {
-        testCtx.getCheckTuples().clear();
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeMergeTest.java b/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeMergeTest.java
deleted file mode 100644
index 1d07484..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeMergeTest.java
+++ /dev/null
@@ -1,67 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.rtree;
-
-import java.util.Random;
-
-import org.junit.After;
-import org.junit.Before;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.exceptions.HyracksException;
-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
-import edu.uci.ics.hyracks.storage.am.config.AccessMethodTestsConfig;
-import edu.uci.ics.hyracks.storage.am.lsm.rtree.util.LSMRTreeTestContext;
-import edu.uci.ics.hyracks.storage.am.lsm.rtree.util.LSMRTreeTestHarness;
-import edu.uci.ics.hyracks.storage.am.rtree.AbstractRTreeTestContext;
-import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreePolicyType;
-
-@SuppressWarnings("rawtypes")
-public class LSMRTreeMergeTest extends LSMRTreeMergeTestDriver {
-
-    private final LSMRTreeTestHarness harness = new LSMRTreeTestHarness();
-
-    public LSMRTreeMergeTest() {
-        super(AccessMethodTestsConfig.LSM_RTREE_TEST_RSTAR_POLICY);
-    }
-
-    @Before
-    public void setUp() throws HyracksException {
-        harness.setUp();
-    }
-
-    @After
-    public void tearDown() throws HyracksDataException {
-        harness.tearDown();
-    }
-
-    @Override
-    protected AbstractRTreeTestContext createTestContext(ISerializerDeserializer[] fieldSerdes,
-            IPrimitiveValueProviderFactory[] valueProviderFactories, int numKeys, RTreePolicyType rtreePolicyType)
-            throws Exception {
-        return LSMRTreeTestContext.create(harness.getMemBufferCache(), harness.getMemFreePageManager(),
-                harness.getIOManager(), harness.getFileReference(), harness.getDiskBufferCache(),
-                harness.getDiskFileMapProvider(), fieldSerdes, valueProviderFactories, numKeys, rtreePolicyType,
-                harness.getMergePolicy(), harness.getOperationTrackerFactory(), harness.getIOScheduler(),
-                harness.getIOOperationCallbackProvider());
-    }
-
-    @Override
-    protected Random getRandom() {
-        return harness.getRandom();
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeMergeTestDriver.java b/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeMergeTestDriver.java
deleted file mode 100644
index f272942..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeMergeTestDriver.java
+++ /dev/null
@@ -1,83 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.rtree;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.DoubleSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
-import edu.uci.ics.hyracks.storage.am.config.AccessMethodTestsConfig;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIndexAccessor;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.NoOpIOOperationCallback;
-import edu.uci.ics.hyracks.storage.am.rtree.AbstractRTreeTestContext;
-import edu.uci.ics.hyracks.storage.am.rtree.AbstractRTreeTestDriver;
-import edu.uci.ics.hyracks.storage.am.rtree.RTreeTestUtils;
-import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreePolicyType;
-
-@SuppressWarnings("rawtypes")
-public abstract class LSMRTreeMergeTestDriver extends AbstractRTreeTestDriver {
-
-    private final RTreeTestUtils rTreeTestUtils;
-
-    public LSMRTreeMergeTestDriver(boolean testRstarPolicy) {
-        super(testRstarPolicy);
-        this.rTreeTestUtils = new RTreeTestUtils();
-    }
-
-    @Override
-    protected void runTest(ISerializerDeserializer[] fieldSerdes,
-            IPrimitiveValueProviderFactory[] valueProviderFactories, int numKeys, ITupleReference key,
-            RTreePolicyType rtreePolicyType) throws Exception {
-
-        AbstractRTreeTestContext ctx = createTestContext(fieldSerdes, valueProviderFactories, numKeys, rtreePolicyType);
-        ctx.getIndex().create();
-        ctx.getIndex().activate();
-        // Start off with one tree bulk loaded.
-        // We assume all fieldSerdes are of the same type. Check the first one
-        // to determine which field types to generate.
-        if (fieldSerdes[0] instanceof IntegerSerializerDeserializer) {
-            rTreeTestUtils.bulkLoadIntTuples(ctx, numTuplesToInsert, getRandom());
-        } else if (fieldSerdes[0] instanceof DoubleSerializerDeserializer) {
-            rTreeTestUtils.bulkLoadDoubleTuples(ctx, numTuplesToInsert, getRandom());
-        }
-
-        int maxTreesToMerge = AccessMethodTestsConfig.LSM_RTREE_BULKLOAD_ROUNDS;
-        for (int i = 0; i < maxTreesToMerge; i++) {
-            for (int j = 0; j < i; j++) {
-                if (fieldSerdes[0] instanceof IntegerSerializerDeserializer) {
-                    rTreeTestUtils.bulkLoadIntTuples(ctx, numTuplesToInsert, getRandom());
-                } else if (fieldSerdes[0] instanceof DoubleSerializerDeserializer) {
-                    rTreeTestUtils.bulkLoadDoubleTuples(ctx, numTuplesToInsert, getRandom());
-                }
-            }
-
-            ILSMIndexAccessor accessor = (ILSMIndexAccessor) ctx.getIndexAccessor();
-            accessor.scheduleMerge(NoOpIOOperationCallback.INSTANCE);
-
-            rTreeTestUtils.checkScan(ctx);
-            rTreeTestUtils.checkDiskOrderScan(ctx);
-            rTreeTestUtils.checkRangeSearch(ctx, key);
-        }
-        ctx.getIndex().deactivate();
-        ctx.getIndex().destroy();
-    }
-
-    @Override
-    protected String getTestOpName() {
-        return "LSM Merge";
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeMultiBulkLoadTest.java b/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeMultiBulkLoadTest.java
deleted file mode 100644
index cc46065..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeMultiBulkLoadTest.java
+++ /dev/null
@@ -1,68 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.rtree;
-
-import java.util.Random;
-
-import org.junit.After;
-import org.junit.Before;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.exceptions.HyracksException;
-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
-import edu.uci.ics.hyracks.storage.am.config.AccessMethodTestsConfig;
-import edu.uci.ics.hyracks.storage.am.lsm.rtree.util.LSMRTreeTestContext;
-import edu.uci.ics.hyracks.storage.am.lsm.rtree.util.LSMRTreeTestHarness;
-import edu.uci.ics.hyracks.storage.am.rtree.AbstractRTreeBulkLoadTest;
-import edu.uci.ics.hyracks.storage.am.rtree.AbstractRTreeTestContext;
-import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreePolicyType;
-
-@SuppressWarnings("rawtypes")
-public class LSMRTreeMultiBulkLoadTest extends AbstractRTreeBulkLoadTest {
-
-    public LSMRTreeMultiBulkLoadTest() {
-        super(AccessMethodTestsConfig.LSM_RTREE_BULKLOAD_ROUNDS, AccessMethodTestsConfig.LSM_RTREE_TEST_RSTAR_POLICY);
-    }
-
-    private final LSMRTreeTestHarness harness = new LSMRTreeTestHarness();
-
-    @Before
-    public void setUp() throws HyracksException {
-        harness.setUp();
-    }
-
-    @After
-    public void tearDown() throws HyracksDataException {
-        harness.tearDown();
-    }
-
-    @Override
-    protected AbstractRTreeTestContext createTestContext(ISerializerDeserializer[] fieldSerdes,
-            IPrimitiveValueProviderFactory[] valueProviderFactories, int numKeys, RTreePolicyType rtreePolicyType)
-            throws Exception {
-        return LSMRTreeTestContext.create(harness.getMemBufferCache(), harness.getMemFreePageManager(),
-                harness.getIOManager(), harness.getFileReference(), harness.getDiskBufferCache(),
-                harness.getDiskFileMapProvider(), fieldSerdes, valueProviderFactories, numKeys, rtreePolicyType,
-                harness.getMergePolicy(), harness.getOperationTrackerFactory(), harness.getIOScheduler(),
-                harness.getIOOperationCallbackProvider());
-    }
-
-    @Override
-    protected Random getRandom() {
-        return harness.getRandom();
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesBulkLoadTest.java b/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesBulkLoadTest.java
deleted file mode 100644
index 81a952d..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesBulkLoadTest.java
+++ /dev/null
@@ -1,69 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.rtree;
-
-import java.util.Random;
-
-import org.junit.After;
-import org.junit.Before;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.exceptions.HyracksException;
-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
-import edu.uci.ics.hyracks.storage.am.config.AccessMethodTestsConfig;
-import edu.uci.ics.hyracks.storage.am.lsm.rtree.util.LSMRTreeTestHarness;
-import edu.uci.ics.hyracks.storage.am.lsm.rtree.util.LSMRTreeWithAntiMatterTuplesTestContext;
-import edu.uci.ics.hyracks.storage.am.rtree.AbstractRTreeBulkLoadTest;
-import edu.uci.ics.hyracks.storage.am.rtree.AbstractRTreeTestContext;
-import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreePolicyType;
-
-@SuppressWarnings("rawtypes")
-public class LSMRTreeWithAntiMatterTuplesBulkLoadTest extends AbstractRTreeBulkLoadTest {
-
-    public LSMRTreeWithAntiMatterTuplesBulkLoadTest() {
-        super(1, AccessMethodTestsConfig.LSM_RTREE_TEST_RSTAR_POLICY);
-    }
-
-    private final LSMRTreeTestHarness harness = new LSMRTreeTestHarness();
-
-    @Before
-    public void setUp() throws HyracksException {
-        harness.setUp();
-    }
-
-    @After
-    public void tearDown() throws HyracksDataException {
-        harness.tearDown();
-    }
-
-    @Override
-    protected AbstractRTreeTestContext createTestContext(ISerializerDeserializer[] fieldSerdes,
-            IPrimitiveValueProviderFactory[] valueProviderFactories, int numKeys, RTreePolicyType rtreePolicyType)
-            throws Exception {
-        return LSMRTreeWithAntiMatterTuplesTestContext.create(harness.getMemBufferCache(),
-                harness.getMemFreePageManager(), harness.getIOManager(), harness.getFileReference(),
-                harness.getDiskBufferCache(), harness.getDiskFileMapProvider(), fieldSerdes, valueProviderFactories,
-                numKeys, rtreePolicyType, harness.getMergePolicy(), harness.getOperationTrackerFactory(),
-                harness.getIOScheduler(), harness.getIOOperationCallbackProvider());
-
-    }
-
-    @Override
-    protected Random getRandom() {
-        return harness.getRandom();
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesDeleteTest.java b/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesDeleteTest.java
deleted file mode 100644
index 1ee92d9..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesDeleteTest.java
+++ /dev/null
@@ -1,69 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.rtree;
-
-import java.util.Random;
-
-import org.junit.After;
-import org.junit.Before;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.exceptions.HyracksException;
-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
-import edu.uci.ics.hyracks.storage.am.config.AccessMethodTestsConfig;
-import edu.uci.ics.hyracks.storage.am.lsm.rtree.util.LSMRTreeTestHarness;
-import edu.uci.ics.hyracks.storage.am.lsm.rtree.util.LSMRTreeWithAntiMatterTuplesTestContext;
-import edu.uci.ics.hyracks.storage.am.rtree.AbstractRTreeDeleteTest;
-import edu.uci.ics.hyracks.storage.am.rtree.AbstractRTreeTestContext;
-import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreePolicyType;
-
-@SuppressWarnings("rawtypes")
-public class LSMRTreeWithAntiMatterTuplesDeleteTest extends AbstractRTreeDeleteTest {
-
-    private final LSMRTreeTestHarness harness = new LSMRTreeTestHarness();
-
-    public LSMRTreeWithAntiMatterTuplesDeleteTest() {
-        super(AccessMethodTestsConfig.LSM_RTREE_TEST_RSTAR_POLICY);
-    }
-
-    @Before
-    public void setUp() throws HyracksException {
-        harness.setUp();
-    }
-
-    @After
-    public void tearDown() throws HyracksDataException {
-        harness.tearDown();
-    }
-
-    @Override
-    protected AbstractRTreeTestContext createTestContext(ISerializerDeserializer[] fieldSerdes,
-            IPrimitiveValueProviderFactory[] valueProviderFactories, int numKeys, RTreePolicyType rtreePolicyType)
-            throws Exception {
-        return LSMRTreeWithAntiMatterTuplesTestContext.create(harness.getMemBufferCache(),
-                harness.getMemFreePageManager(), harness.getIOManager(), harness.getFileReference(),
-                harness.getDiskBufferCache(), harness.getDiskFileMapProvider(), fieldSerdes, valueProviderFactories,
-                numKeys, rtreePolicyType, harness.getMergePolicy(), harness.getOperationTrackerFactory(),
-                harness.getIOScheduler(), harness.getIOOperationCallbackProvider());
-
-    }
-
-    @Override
-    protected Random getRandom() {
-        return harness.getRandom();
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesExamplesTest.java b/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesExamplesTest.java
deleted file mode 100644
index 3a2537c..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesExamplesTest.java
+++ /dev/null
@@ -1,59 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.rtree;
-
-import org.junit.After;
-import org.junit.Before;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.exceptions.HyracksException;
-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
-import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
-import edu.uci.ics.hyracks.storage.am.lsm.rtree.util.LSMRTreeTestHarness;
-import edu.uci.ics.hyracks.storage.am.lsm.rtree.utils.LSMRTreeUtils;
-import edu.uci.ics.hyracks.storage.am.rtree.AbstractRTreeExamplesTest;
-import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreePolicyType;
-
-public class LSMRTreeWithAntiMatterTuplesExamplesTest extends AbstractRTreeExamplesTest {
-    private final LSMRTreeTestHarness harness = new LSMRTreeTestHarness();
-
-    @Override
-    protected ITreeIndex createTreeIndex(ITypeTraits[] typeTraits, IBinaryComparatorFactory[] rtreeCmpFactories,
-            IBinaryComparatorFactory[] btreeCmpFactories, IPrimitiveValueProviderFactory[] valueProviderFactories,
-            RTreePolicyType rtreePolicyType) throws TreeIndexException {
-        return LSMRTreeUtils.createLSMTreeWithAntiMatterTuples(harness.getMemBufferCache(),
-                harness.getMemFreePageManager(), harness.getIOManager(), harness.getFileReference(),
-                harness.getDiskBufferCache(), harness.getDiskFileMapProvider(), typeTraits, rtreeCmpFactories,
-                btreeCmpFactories, valueProviderFactories, rtreePolicyType, harness.getMergePolicy(),
-                harness.getOperationTrackerFactory(), harness.getIOScheduler(),
-                harness.getIOOperationCallbackProvider(),
-                LSMRTreeUtils.proposeBestLinearizer(typeTraits, rtreeCmpFactories.length));
-    }
-
-    @Before
-    public void setUp() throws HyracksException {
-        harness.setUp();
-    }
-
-    @After
-    public void tearDown() throws HyracksDataException {
-        harness.tearDown();
-    }
-
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesInsertTest.java b/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesInsertTest.java
deleted file mode 100644
index 61d5ce7..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesInsertTest.java
+++ /dev/null
@@ -1,69 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.rtree;
-
-import java.util.Random;
-
-import org.junit.After;
-import org.junit.Before;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.exceptions.HyracksException;
-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
-import edu.uci.ics.hyracks.storage.am.config.AccessMethodTestsConfig;
-import edu.uci.ics.hyracks.storage.am.lsm.rtree.util.LSMRTreeTestHarness;
-import edu.uci.ics.hyracks.storage.am.lsm.rtree.util.LSMRTreeWithAntiMatterTuplesTestContext;
-import edu.uci.ics.hyracks.storage.am.rtree.AbstractRTreeInsertTest;
-import edu.uci.ics.hyracks.storage.am.rtree.AbstractRTreeTestContext;
-import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreePolicyType;
-
-@SuppressWarnings("rawtypes")
-public class LSMRTreeWithAntiMatterTuplesInsertTest extends AbstractRTreeInsertTest {
-
-    private final LSMRTreeTestHarness harness = new LSMRTreeTestHarness();
-
-    public LSMRTreeWithAntiMatterTuplesInsertTest() {
-        super(AccessMethodTestsConfig.LSM_RTREE_TEST_RSTAR_POLICY);
-    }
-
-    @Before
-    public void setUp() throws HyracksException {
-        harness.setUp();
-    }
-
-    @After
-    public void tearDown() throws HyracksDataException {
-        harness.tearDown();
-    }
-
-    @Override
-    protected AbstractRTreeTestContext createTestContext(ISerializerDeserializer[] fieldSerdes,
-            IPrimitiveValueProviderFactory[] valueProviderFactories, int numKeys, RTreePolicyType rtreePolicyType)
-            throws Exception {
-        return LSMRTreeWithAntiMatterTuplesTestContext.create(harness.getMemBufferCache(),
-                harness.getMemFreePageManager(), harness.getIOManager(), harness.getFileReference(),
-                harness.getDiskBufferCache(), harness.getDiskFileMapProvider(), fieldSerdes, valueProviderFactories,
-                numKeys, rtreePolicyType, harness.getMergePolicy(), harness.getOperationTrackerFactory(),
-                harness.getIOScheduler(), harness.getIOOperationCallbackProvider());
-
-    }
-
-    @Override
-    protected Random getRandom() {
-        return harness.getRandom();
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesLifecycleTest.java b/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesLifecycleTest.java
deleted file mode 100644
index aee8670..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesLifecycleTest.java
+++ /dev/null
@@ -1,84 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.lsm.rtree;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.api.io.IODeviceHandle;
-import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-import edu.uci.ics.hyracks.storage.am.common.AbstractIndexLifecycleTest;
-import edu.uci.ics.hyracks.storage.am.common.CheckTuple;
-import edu.uci.ics.hyracks.storage.am.common.IIndexTestContext;
-import edu.uci.ics.hyracks.storage.am.common.TreeIndexTestUtils;
-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.rtree.impls.LSMRTreeWithAntiMatterTuples;
-import edu.uci.ics.hyracks.storage.am.lsm.rtree.util.LSMRTreeTestHarness;
-import edu.uci.ics.hyracks.storage.am.lsm.rtree.util.LSMRTreeWithAntiMatterTuplesTestContext;
-import edu.uci.ics.hyracks.storage.am.rtree.RTreeTestUtils;
-import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreePolicyType;
-import edu.uci.ics.hyracks.storage.am.rtree.util.RTreeUtils;
-
-public class LSMRTreeWithAntiMatterTuplesLifecycleTest extends AbstractIndexLifecycleTest {
-
-    @SuppressWarnings("rawtypes")
-    private final ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE,
-            IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE,
-            IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
-    private final IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils
-            .createPrimitiveValueProviderFactories(4, IntegerPointable.FACTORY);
-    private final int numKeys = 4;
-
-    private final LSMRTreeTestHarness harness = new LSMRTreeTestHarness();
-    private final TreeIndexTestUtils titu = new RTreeTestUtils();
-
-    @SuppressWarnings("rawtypes")
-    private IIndexTestContext<? extends CheckTuple> testCtx;
-
-    @Override
-    protected boolean persistentStateExists() throws Exception {
-        // make sure all of the directories exist
-        for (IODeviceHandle handle : harness.getIOManager().getIODevices()) {
-            if (!new FileReference(handle, harness.getFileReference().getFile().getPath()).getFile().exists()) {
-                return false;
-            }
-        }
-        return true;
-    }
-
-    @Override
-    protected boolean isEmptyIndex() throws Exception {
-        return ((LSMRTreeWithAntiMatterTuples) index).isEmptyIndex();
-    }
-
-    @Override
-    public void setup() throws Exception {
-        harness.setUp();
-        testCtx = LSMRTreeWithAntiMatterTuplesTestContext.create(harness.getMemBufferCache(),
-                harness.getMemFreePageManager(), harness.getIOManager(), harness.getFileReference(),
-                harness.getDiskBufferCache(), harness.getDiskFileMapProvider(), fieldSerdes, valueProviderFactories,
-                numKeys, RTreePolicyType.RTREE, harness.getMergePolicy(), harness.getOperationTrackerFactory(),
-                harness.getIOScheduler(), harness.getIOOperationCallbackProvider());
-        index = testCtx.getIndex();
-    }
-
-    @Override
-    public void tearDown() throws Exception {
-        index.deactivate();
-        index.destroy();
-        harness.tearDown();
-    }
-
-    @Override
-    protected void performInsertions() throws Exception {
-        titu.insertIntTuples(testCtx, 10, harness.getRandom());
-    }
-
-    @Override
-    protected void checkInsertions() throws Exception {
-        titu.checkScan(testCtx);
-    }
-
-    @Override
-    protected void clearCheckableInsertions() throws Exception {
-        testCtx.getCheckTuples().clear();
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesMergeTest.java b/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesMergeTest.java
deleted file mode 100644
index d5fecbf..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesMergeTest.java
+++ /dev/null
@@ -1,67 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.rtree;
-
-import java.util.Random;
-
-import org.junit.After;
-import org.junit.Before;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.exceptions.HyracksException;
-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
-import edu.uci.ics.hyracks.storage.am.config.AccessMethodTestsConfig;
-import edu.uci.ics.hyracks.storage.am.lsm.rtree.util.LSMRTreeTestHarness;
-import edu.uci.ics.hyracks.storage.am.lsm.rtree.util.LSMRTreeWithAntiMatterTuplesTestContext;
-import edu.uci.ics.hyracks.storage.am.rtree.AbstractRTreeTestContext;
-import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreePolicyType;
-
-@SuppressWarnings("rawtypes")
-public class LSMRTreeWithAntiMatterTuplesMergeTest extends LSMRTreeMergeTestDriver {
-
-    private final LSMRTreeTestHarness harness = new LSMRTreeTestHarness();
-
-    public LSMRTreeWithAntiMatterTuplesMergeTest() {
-        super(AccessMethodTestsConfig.LSM_RTREE_TEST_RSTAR_POLICY);
-    }
-
-    @Before
-    public void setUp() throws HyracksException {
-        harness.setUp();
-    }
-
-    @After
-    public void tearDown() throws HyracksDataException {
-        harness.tearDown();
-    }
-
-    @Override
-    protected AbstractRTreeTestContext createTestContext(ISerializerDeserializer[] fieldSerdes,
-            IPrimitiveValueProviderFactory[] valueProviderFactories, int numKeys, RTreePolicyType rtreePolicyType)
-            throws Exception {
-        return LSMRTreeWithAntiMatterTuplesTestContext.create(harness.getMemBufferCache(),
-                harness.getMemFreePageManager(), harness.getIOManager(), harness.getFileReference(),
-                harness.getDiskBufferCache(), harness.getDiskFileMapProvider(), fieldSerdes, valueProviderFactories,
-                numKeys, rtreePolicyType, harness.getMergePolicy(), harness.getOperationTrackerFactory(),
-                harness.getIOScheduler(), harness.getIOOperationCallbackProvider());
-    }
-
-    @Override
-    protected Random getRandom() {
-        return harness.getRandom();
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesMultiBulkLoadTest.java b/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesMultiBulkLoadTest.java
deleted file mode 100644
index de5f065..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesMultiBulkLoadTest.java
+++ /dev/null
@@ -1,69 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.rtree;
-
-import java.util.Random;
-
-import org.junit.After;
-import org.junit.Before;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.exceptions.HyracksException;
-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
-import edu.uci.ics.hyracks.storage.am.config.AccessMethodTestsConfig;
-import edu.uci.ics.hyracks.storage.am.lsm.rtree.util.LSMRTreeTestHarness;
-import edu.uci.ics.hyracks.storage.am.lsm.rtree.util.LSMRTreeWithAntiMatterTuplesTestContext;
-import edu.uci.ics.hyracks.storage.am.rtree.AbstractRTreeBulkLoadTest;
-import edu.uci.ics.hyracks.storage.am.rtree.AbstractRTreeTestContext;
-import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreePolicyType;
-
-@SuppressWarnings("rawtypes")
-public class LSMRTreeWithAntiMatterTuplesMultiBulkLoadTest extends AbstractRTreeBulkLoadTest {
-
-    public LSMRTreeWithAntiMatterTuplesMultiBulkLoadTest() {
-        super(AccessMethodTestsConfig.LSM_RTREE_BULKLOAD_ROUNDS, AccessMethodTestsConfig.LSM_RTREE_TEST_RSTAR_POLICY);
-    }
-
-    private final LSMRTreeTestHarness harness = new LSMRTreeTestHarness();
-
-    @Before
-    public void setUp() throws HyracksException {
-        harness.setUp();
-    }
-
-    @After
-    public void tearDown() throws HyracksDataException {
-        harness.tearDown();
-    }
-
-    @Override
-    protected AbstractRTreeTestContext createTestContext(ISerializerDeserializer[] fieldSerdes,
-            IPrimitiveValueProviderFactory[] valueProviderFactories, int numKeys, RTreePolicyType rtreePolicyType)
-            throws Exception {
-        return LSMRTreeWithAntiMatterTuplesTestContext.create(harness.getMemBufferCache(),
-                harness.getMemFreePageManager(), harness.getIOManager(), harness.getFileReference(),
-                harness.getDiskBufferCache(), harness.getDiskFileMapProvider(), fieldSerdes, valueProviderFactories,
-                numKeys, rtreePolicyType, harness.getMergePolicy(), harness.getOperationTrackerFactory(),
-                harness.getIOScheduler(), harness.getIOOperationCallbackProvider());
-
-    }
-
-    @Override
-    protected Random getRandom() {
-        return harness.getRandom();
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/multithread/AbstractLSMRTreeTestWorker.java b/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/multithread/AbstractLSMRTreeTestWorker.java
deleted file mode 100644
index d530b82..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/multithread/AbstractLSMRTreeTestWorker.java
+++ /dev/null
@@ -1,86 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.rtree.multithread;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.common.AbstractIndexTestWorker;
-import edu.uci.ics.hyracks.storage.am.common.TestOperationSelector;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndex;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.am.common.datagen.DataGenThread;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-
-public abstract class AbstractLSMRTreeTestWorker extends AbstractIndexTestWorker {
-
-    protected final ITreeIndex lsmRTree;
-    protected final int numFields;
-    protected final ArrayTupleBuilder rearrangedTb;
-    protected final ArrayTupleReference rearrangedTuple = new ArrayTupleReference();
-
-    public AbstractLSMRTreeTestWorker(DataGenThread dataGen, TestOperationSelector opSelector, IIndex index,
-            int numBatches) {
-        super(dataGen, opSelector, index, numBatches);
-        lsmRTree = (ITreeIndex) index;
-        numFields = lsmRTree.getFieldCount();
-        rearrangedTb = new ArrayTupleBuilder(numFields);
-    }
-
-    protected void rearrangeTuple(ITupleReference tuple, MultiComparator cmp) throws HyracksDataException {
-        // Create a tuple with rearranged key values to make sure lower points
-        // have larger coordinates than high points.
-        rearrangedTb.reset();
-        int maxFieldPos = cmp.getKeyFieldCount() / 2;
-        for (int i = 0; i < maxFieldPos; i++) {
-            int j = maxFieldPos + i;
-            int c = cmp.getComparators()[i].compare(tuple.getFieldData(i), tuple.getFieldStart(i),
-                    tuple.getFieldLength(i), tuple.getFieldData(j), tuple.getFieldStart(j), tuple.getFieldLength(j));
-            if (c > 0) {
-                rearrangedTb.addField(tuple.getFieldData(j), tuple.getFieldStart(j), tuple.getFieldLength(j));
-            } else {
-                rearrangedTb.addField(tuple.getFieldData(i), tuple.getFieldStart(i), tuple.getFieldLength(i));
-            }
-        }
-        for (int i = 0; i < maxFieldPos; i++) {
-            int j = maxFieldPos + i;
-            int c = cmp.getComparators()[i].compare(tuple.getFieldData(i), tuple.getFieldStart(i),
-                    tuple.getFieldLength(i), tuple.getFieldData(j), tuple.getFieldStart(j), tuple.getFieldLength(j));
-            if (c > 0) {
-                rearrangedTb.addField(tuple.getFieldData(i), tuple.getFieldStart(i), tuple.getFieldLength(i));
-            } else {
-                rearrangedTb.addField(tuple.getFieldData(j), tuple.getFieldStart(j), tuple.getFieldLength(j));
-            }
-        }
-        for (int i = cmp.getKeyFieldCount(); i < numFields; i++) {
-            rearrangedTb.addField(tuple.getFieldData(i), tuple.getFieldStart(i), tuple.getFieldLength(i));
-        }
-        rearrangedTuple.reset(rearrangedTb.getFieldEndOffsets(), rearrangedTb.getByteArray());
-    }
-
-    protected void consumeCursorTuples(ITreeIndexCursor cursor) throws HyracksDataException, IndexException {
-        try {
-            while (cursor.hasNext()) {
-                cursor.next();
-            }
-        } finally {
-            cursor.close();
-        }
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/multithread/LSMRTreeMultiThreadTest.java b/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/multithread/LSMRTreeMultiThreadTest.java
deleted file mode 100644
index 37be58d..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/multithread/LSMRTreeMultiThreadTest.java
+++ /dev/null
@@ -1,123 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.rtree.multithread;
-
-import java.util.ArrayList;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.exceptions.HyracksException;
-import edu.uci.ics.hyracks.storage.am.common.IIndexTestWorkerFactory;
-import edu.uci.ics.hyracks.storage.am.common.TestOperationSelector.TestOperation;
-import edu.uci.ics.hyracks.storage.am.common.TestWorkloadConf;
-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
-import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
-import edu.uci.ics.hyracks.storage.am.common.datagen.ProbabilityHelper;
-import edu.uci.ics.hyracks.storage.am.lsm.rtree.util.LSMRTreeTestHarness;
-import edu.uci.ics.hyracks.storage.am.lsm.rtree.utils.LSMRTreeUtils;
-import edu.uci.ics.hyracks.storage.am.rtree.AbstractRTreeMultiThreadTest;
-import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreePolicyType;
-
-public class LSMRTreeMultiThreadTest extends AbstractRTreeMultiThreadTest {
-
-    private LSMRTreeTestHarness harness = new LSMRTreeTestHarness();
-
-    private LSMRTreeTestWorkerFactory workerFactory = new LSMRTreeTestWorkerFactory();
-
-    public LSMRTreeMultiThreadTest() {
-        super(false);
-    }
-
-    @Override
-    protected void setUp() throws HyracksException {
-        harness.setUp();
-    }
-
-    @Override
-    protected void tearDown() throws HyracksDataException {
-        harness.tearDown();
-    }
-
-    @Override
-    protected ITreeIndex createTreeIndex(ITypeTraits[] typeTraits, IBinaryComparatorFactory[] rtreeCmpFactories,
-            IBinaryComparatorFactory[] btreeCmpFactories, IPrimitiveValueProviderFactory[] valueProviderFactories,
-            RTreePolicyType rtreePolicyType) throws TreeIndexException {
-        return LSMRTreeUtils.createLSMTree(harness.getMemBufferCache(), harness.getMemFreePageManager(),
-                harness.getIOManager(), harness.getFileReference(), harness.getDiskBufferCache(),
-                harness.getDiskFileMapProvider(), typeTraits, rtreeCmpFactories, btreeCmpFactories,
-                valueProviderFactories, rtreePolicyType, harness.getMergePolicy(),
-                harness.getOperationTrackerFactory(), harness.getIOScheduler(),
-                harness.getIOOperationCallbackProvider(),
-                LSMRTreeUtils.proposeBestLinearizer(typeTraits, rtreeCmpFactories.length));
-    }
-
-    @Override
-    protected IIndexTestWorkerFactory getWorkerFactory() {
-        return workerFactory;
-    }
-
-    @Override
-    protected ArrayList<TestWorkloadConf> getTestWorkloadConf() {
-        ArrayList<TestWorkloadConf> workloadConfs = new ArrayList<TestWorkloadConf>();
-
-        // Insert only workload.
-        TestOperation[] insertOnlyOps = new TestOperation[] { TestOperation.INSERT };
-        workloadConfs.add(new TestWorkloadConf(insertOnlyOps, ProbabilityHelper
-                .getUniformProbDist(insertOnlyOps.length)));
-
-        // Insert and merge workload.
-        TestOperation[] insertMergeOps = new TestOperation[] { TestOperation.INSERT, TestOperation.MERGE };
-        workloadConfs.add(new TestWorkloadConf(insertMergeOps, ProbabilityHelper
-                .getUniformProbDist(insertMergeOps.length)));
-
-        // Inserts mixed with scans.
-        TestOperation[] insertSearchOnlyOps = new TestOperation[] { TestOperation.INSERT, TestOperation.SCAN };
-        workloadConfs.add(new TestWorkloadConf(insertSearchOnlyOps, ProbabilityHelper
-                .getUniformProbDist(insertSearchOnlyOps.length)));
-
-        // Inserts and deletes.
-        TestOperation[] insertDeleteOps = new TestOperation[] { TestOperation.INSERT, TestOperation.DELETE };
-        workloadConfs.add(new TestWorkloadConf(insertDeleteOps, ProbabilityHelper
-                .getUniformProbDist(insertDeleteOps.length)));
-
-        // Inserts, deletes and merges.
-        TestOperation[] insertDeleteMergeOps = new TestOperation[] { TestOperation.INSERT, TestOperation.DELETE,
-                TestOperation.MERGE };
-        workloadConfs.add(new TestWorkloadConf(insertDeleteMergeOps, ProbabilityHelper
-                .getUniformProbDist(insertDeleteMergeOps.length)));
-
-        // All operations except merge.
-        TestOperation[] allNoMergeOps = new TestOperation[] { TestOperation.INSERT, TestOperation.DELETE,
-                TestOperation.SCAN };
-        workloadConfs.add(new TestWorkloadConf(allNoMergeOps, ProbabilityHelper
-                .getUniformProbDist(allNoMergeOps.length)));
-
-        // All operations.
-        TestOperation[] allOps = new TestOperation[] { TestOperation.INSERT, TestOperation.DELETE, TestOperation.SCAN,
-                TestOperation.MERGE };
-        workloadConfs.add(new TestWorkloadConf(allOps, ProbabilityHelper.getUniformProbDist(allOps.length)));
-
-        return workloadConfs;
-    }
-
-    @Override
-    protected String getIndexTypeName() {
-        return "LSMRTree";
-    }
-
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/multithread/LSMRTreeTestWorker.java b/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/multithread/LSMRTreeTestWorker.java
deleted file mode 100644
index 154756e..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/multithread/LSMRTreeTestWorker.java
+++ /dev/null
@@ -1,123 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.rtree.multithread;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.common.AbstractIndexTestWorker;
-import edu.uci.ics.hyracks.storage.am.common.TestOperationSelector;
-import edu.uci.ics.hyracks.storage.am.common.TestOperationSelector.TestOperation;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndex;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.am.common.datagen.DataGenThread;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.NoOpIOOperationCallback;
-import edu.uci.ics.hyracks.storage.am.lsm.rtree.impls.LSMRTree;
-import edu.uci.ics.hyracks.storage.am.lsm.rtree.impls.LSMRTree.LSMRTreeAccessor;
-import edu.uci.ics.hyracks.storage.am.rtree.impls.SearchPredicate;
-
-public class LSMRTreeTestWorker extends AbstractIndexTestWorker {
-
-    private final LSMRTree lsmRTree;
-    private final int numFields;
-    private final ArrayTupleBuilder rearrangedTb;
-    private final ArrayTupleReference rearrangedTuple = new ArrayTupleReference();
-
-    public LSMRTreeTestWorker(DataGenThread dataGen, TestOperationSelector opSelector, IIndex index, int numBatches) {
-        super(dataGen, opSelector, index, numBatches);
-        lsmRTree = (LSMRTree) index;
-        numFields = lsmRTree.getFieldCount();
-        rearrangedTb = new ArrayTupleBuilder(numFields);
-    }
-
-    @Override
-    public void performOp(ITupleReference tuple, TestOperation op) throws HyracksDataException, IndexException {
-        LSMRTreeAccessor accessor = (LSMRTreeAccessor) indexAccessor;
-        ITreeIndexCursor searchCursor = accessor.createSearchCursor();
-        MultiComparator cmp = accessor.getMultiComparator();
-        SearchPredicate rangePred = new SearchPredicate(tuple, cmp);
-
-        switch (op) {
-            case INSERT:
-                rearrangeTuple(tuple, cmp);
-                accessor.insert(rearrangedTuple);
-                break;
-
-            case DELETE:
-                rearrangeTuple(tuple, cmp);
-                accessor.delete(rearrangedTuple);
-                break;
-
-            case SCAN:
-                searchCursor.reset();
-                rangePred.setSearchKey(null);
-                accessor.search(searchCursor, rangePred);
-                consumeCursorTuples(searchCursor);
-                break;
-
-            case MERGE:
-                accessor.scheduleMerge(NoOpIOOperationCallback.INSTANCE);
-                break;
-
-            default:
-                throw new HyracksDataException("Op " + op.toString() + " not supported.");
-        }
-    }
-
-    private void rearrangeTuple(ITupleReference tuple, MultiComparator cmp) throws HyracksDataException {
-        // Create a tuple with rearranged key values to make sure lower points
-        // have larger coordinates than high points.
-        rearrangedTb.reset();
-        int maxFieldPos = cmp.getKeyFieldCount() / 2;
-        for (int i = 0; i < maxFieldPos; i++) {
-            int j = maxFieldPos + i;
-            int c = cmp.getComparators()[i].compare(tuple.getFieldData(i), tuple.getFieldStart(i),
-                    tuple.getFieldLength(i), tuple.getFieldData(j), tuple.getFieldStart(j), tuple.getFieldLength(j));
-            if (c > 0) {
-                rearrangedTb.addField(tuple.getFieldData(j), tuple.getFieldStart(j), tuple.getFieldLength(j));
-            } else {
-                rearrangedTb.addField(tuple.getFieldData(i), tuple.getFieldStart(i), tuple.getFieldLength(i));
-            }
-        }
-        for (int i = 0; i < maxFieldPos; i++) {
-            int j = maxFieldPos + i;
-            int c = cmp.getComparators()[i].compare(tuple.getFieldData(i), tuple.getFieldStart(i),
-                    tuple.getFieldLength(i), tuple.getFieldData(j), tuple.getFieldStart(j), tuple.getFieldLength(j));
-            if (c > 0) {
-                rearrangedTb.addField(tuple.getFieldData(i), tuple.getFieldStart(i), tuple.getFieldLength(i));
-            } else {
-                rearrangedTb.addField(tuple.getFieldData(j), tuple.getFieldStart(j), tuple.getFieldLength(j));
-            }
-        }
-        for (int i = cmp.getKeyFieldCount(); i < numFields; i++) {
-            rearrangedTb.addField(tuple.getFieldData(i), tuple.getFieldStart(i), tuple.getFieldLength(i));
-        }
-        rearrangedTuple.reset(rearrangedTb.getFieldEndOffsets(), rearrangedTb.getByteArray());
-    }
-
-    private void consumeCursorTuples(ITreeIndexCursor cursor) throws HyracksDataException, IndexException {
-        try {
-            while (cursor.hasNext()) {
-                cursor.next();
-            }
-        } finally {
-            cursor.close();
-        }
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/multithread/LSMRTreeTestWorkerFactory.java b/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/multithread/LSMRTreeTestWorkerFactory.java
deleted file mode 100644
index 13e3ab0..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/multithread/LSMRTreeTestWorkerFactory.java
+++ /dev/null
@@ -1,30 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.rtree.multithread;
-
-import edu.uci.ics.hyracks.storage.am.common.AbstractIndexTestWorker;
-import edu.uci.ics.hyracks.storage.am.common.IIndexTestWorkerFactory;
-import edu.uci.ics.hyracks.storage.am.common.TestOperationSelector;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndex;
-import edu.uci.ics.hyracks.storage.am.common.datagen.DataGenThread;
-
-public class LSMRTreeTestWorkerFactory implements IIndexTestWorkerFactory {
-    @Override
-    public AbstractIndexTestWorker create(DataGenThread dataGen, TestOperationSelector opSelector,
-            IIndex index, int numBatches) {
-        return new LSMRTreeTestWorker(dataGen, opSelector, index, numBatches);
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/multithread/LSMRTreeWithAntiMatterTuplesMultiThreadTest.java b/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/multithread/LSMRTreeWithAntiMatterTuplesMultiThreadTest.java
deleted file mode 100644
index af73676..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/multithread/LSMRTreeWithAntiMatterTuplesMultiThreadTest.java
+++ /dev/null
@@ -1,124 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.rtree.multithread;
-
-import java.util.ArrayList;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.exceptions.HyracksException;
-import edu.uci.ics.hyracks.storage.am.common.IIndexTestWorkerFactory;
-import edu.uci.ics.hyracks.storage.am.common.TestOperationSelector.TestOperation;
-import edu.uci.ics.hyracks.storage.am.common.TestWorkloadConf;
-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
-import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
-import edu.uci.ics.hyracks.storage.am.common.datagen.ProbabilityHelper;
-import edu.uci.ics.hyracks.storage.am.lsm.rtree.util.LSMRTreeTestHarness;
-import edu.uci.ics.hyracks.storage.am.lsm.rtree.utils.LSMRTreeUtils;
-import edu.uci.ics.hyracks.storage.am.rtree.AbstractRTreeMultiThreadTest;
-import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreePolicyType;
-
-public class LSMRTreeWithAntiMatterTuplesMultiThreadTest extends AbstractRTreeMultiThreadTest {
-
-    private LSMRTreeTestHarness harness = new LSMRTreeTestHarness();
-
-    private LSMRTreeWithAntiMatterTuplesTestWorkerFactory workerFactory = new LSMRTreeWithAntiMatterTuplesTestWorkerFactory();
-
-    public LSMRTreeWithAntiMatterTuplesMultiThreadTest() {
-        super(false);
-    }
-
-    @Override
-    protected void setUp() throws HyracksException {
-        harness.setUp();
-    }
-
-    @Override
-    protected void tearDown() throws HyracksDataException {
-        harness.tearDown();
-    }
-
-    @Override
-    protected ITreeIndex createTreeIndex(ITypeTraits[] typeTraits, IBinaryComparatorFactory[] rtreeCmpFactories,
-            IBinaryComparatorFactory[] btreeCmpFactories, IPrimitiveValueProviderFactory[] valueProviderFactories,
-            RTreePolicyType rtreePolicyType) throws TreeIndexException {
-        return LSMRTreeUtils.createLSMTreeWithAntiMatterTuples(harness.getMemBufferCache(),
-                harness.getMemFreePageManager(), harness.getIOManager(), harness.getFileReference(),
-                harness.getDiskBufferCache(), harness.getDiskFileMapProvider(), typeTraits, rtreeCmpFactories,
-                btreeCmpFactories, valueProviderFactories, rtreePolicyType, harness.getMergePolicy(),
-                harness.getOperationTrackerFactory(), harness.getIOScheduler(),
-                harness.getIOOperationCallbackProvider(),
-                LSMRTreeUtils.proposeBestLinearizer(typeTraits, rtreeCmpFactories.length));
-
-    }
-
-    @Override
-    protected IIndexTestWorkerFactory getWorkerFactory() {
-        return workerFactory;
-    }
-
-    @Override
-    protected ArrayList<TestWorkloadConf> getTestWorkloadConf() {
-        ArrayList<TestWorkloadConf> workloadConfs = new ArrayList<TestWorkloadConf>();
-
-        // Insert only workload.
-        TestOperation[] insertOnlyOps = new TestOperation[] { TestOperation.INSERT };
-        workloadConfs.add(new TestWorkloadConf(insertOnlyOps, ProbabilityHelper
-                .getUniformProbDist(insertOnlyOps.length)));
-
-        // Insert and merge workload.
-        TestOperation[] insertMergeOps = new TestOperation[] { TestOperation.INSERT, TestOperation.MERGE };
-        workloadConfs.add(new TestWorkloadConf(insertMergeOps, ProbabilityHelper
-                .getUniformProbDist(insertMergeOps.length)));
-
-        // Inserts mixed with scans.
-        TestOperation[] insertSearchOnlyOps = new TestOperation[] { TestOperation.INSERT, TestOperation.SCAN };
-        workloadConfs.add(new TestWorkloadConf(insertSearchOnlyOps, ProbabilityHelper
-                .getUniformProbDist(insertSearchOnlyOps.length)));
-
-        // Inserts and deletes.
-        TestOperation[] insertDeleteOps = new TestOperation[] { TestOperation.INSERT, TestOperation.DELETE };
-        workloadConfs.add(new TestWorkloadConf(insertDeleteOps, ProbabilityHelper
-                .getUniformProbDist(insertDeleteOps.length)));
-
-        // Inserts, deletes and merges.
-        TestOperation[] insertDeleteMergeOps = new TestOperation[] { TestOperation.INSERT, TestOperation.DELETE,
-                TestOperation.MERGE };
-        workloadConfs.add(new TestWorkloadConf(insertDeleteMergeOps, ProbabilityHelper
-                .getUniformProbDist(insertDeleteMergeOps.length)));
-
-        // All operations except merge.
-        TestOperation[] allNoMergeOps = new TestOperation[] { TestOperation.INSERT, TestOperation.DELETE,
-                TestOperation.SCAN };
-        workloadConfs.add(new TestWorkloadConf(allNoMergeOps, ProbabilityHelper
-                .getUniformProbDist(allNoMergeOps.length)));
-
-        // All operations.
-        TestOperation[] allOps = new TestOperation[] { TestOperation.INSERT, TestOperation.DELETE, TestOperation.SCAN,
-                TestOperation.MERGE };
-        workloadConfs.add(new TestWorkloadConf(allOps, ProbabilityHelper.getUniformProbDist(allOps.length)));
-
-        return workloadConfs;
-    }
-
-    @Override
-    protected String getIndexTypeName() {
-        return "LSMRTree";
-    }
-
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/multithread/LSMRTreeWithAntiMatterTuplesTestWorker.java b/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/multithread/LSMRTreeWithAntiMatterTuplesTestWorker.java
deleted file mode 100644
index 661138d..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/multithread/LSMRTreeWithAntiMatterTuplesTestWorker.java
+++ /dev/null
@@ -1,71 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.rtree.multithread;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.common.TestOperationSelector;
-import edu.uci.ics.hyracks.storage.am.common.TestOperationSelector.TestOperation;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndex;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.am.common.datagen.DataGenThread;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.NoOpIOOperationCallback;
-import edu.uci.ics.hyracks.storage.am.lsm.rtree.impls.LSMRTreeWithAntiMatterTuples.LSMRTreeWithAntiMatterTuplesAccessor;
-import edu.uci.ics.hyracks.storage.am.rtree.impls.SearchPredicate;
-
-public class LSMRTreeWithAntiMatterTuplesTestWorker extends AbstractLSMRTreeTestWorker {
-
-    public LSMRTreeWithAntiMatterTuplesTestWorker(DataGenThread dataGen, TestOperationSelector opSelector,
-            IIndex index, int numBatches) {
-        super(dataGen, opSelector, index, numBatches);
-    }
-
-    @Override
-    public void performOp(ITupleReference tuple, TestOperation op) throws HyracksDataException, IndexException {
-        LSMRTreeWithAntiMatterTuplesAccessor accessor = (LSMRTreeWithAntiMatterTuplesAccessor) indexAccessor;
-        ITreeIndexCursor searchCursor = accessor.createSearchCursor();
-        MultiComparator cmp = accessor.getMultiComparator();
-        SearchPredicate rangePred = new SearchPredicate(tuple, cmp);
-
-        switch (op) {
-            case INSERT:
-                rearrangeTuple(tuple, cmp);
-                accessor.insert(rearrangedTuple);
-                break;
-
-            case DELETE:
-                rearrangeTuple(tuple, cmp);
-                accessor.delete(rearrangedTuple);
-                break;
-
-            case SCAN:
-                searchCursor.reset();
-                rangePred.setSearchKey(null);
-                accessor.search(searchCursor, rangePred);
-                consumeCursorTuples(searchCursor);
-                break;
-
-            case MERGE:
-                accessor.scheduleMerge(NoOpIOOperationCallback.INSTANCE);
-                break;
-
-            default:
-                throw new HyracksDataException("Op " + op.toString() + " not supported.");
-        }
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/multithread/LSMRTreeWithAntiMatterTuplesTestWorkerFactory.java b/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/multithread/LSMRTreeWithAntiMatterTuplesTestWorkerFactory.java
deleted file mode 100644
index 4e78d82..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/multithread/LSMRTreeWithAntiMatterTuplesTestWorkerFactory.java
+++ /dev/null
@@ -1,30 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.rtree.multithread;
-
-import edu.uci.ics.hyracks.storage.am.common.AbstractIndexTestWorker;
-import edu.uci.ics.hyracks.storage.am.common.IIndexTestWorkerFactory;
-import edu.uci.ics.hyracks.storage.am.common.TestOperationSelector;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndex;
-import edu.uci.ics.hyracks.storage.am.common.datagen.DataGenThread;
-
-public class LSMRTreeWithAntiMatterTuplesTestWorkerFactory implements IIndexTestWorkerFactory {
-    @Override
-    public AbstractIndexTestWorker create(DataGenThread dataGen, TestOperationSelector opSelector,
-            IIndex index, int numBatches) {
-        return new LSMRTreeWithAntiMatterTuplesTestWorker(dataGen, opSelector, index, numBatches);
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/util/LSMRTreeTestContext.java b/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/util/LSMRTreeTestContext.java
deleted file mode 100644
index 27fb9c8..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/util/LSMRTreeTestContext.java
+++ /dev/null
@@ -1,89 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.rtree.util;
-
-import java.util.Collection;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.control.nc.io.IOManager;
-import edu.uci.ics.hyracks.dataflow.common.util.SerdeUtils;
-import edu.uci.ics.hyracks.storage.am.common.api.IInMemoryFreePageManager;
-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.IInMemoryBufferCache;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallbackProvider;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationScheduler;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMMergePolicy;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMOperationTrackerFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.rtree.impls.LSMRTree;
-import edu.uci.ics.hyracks.storage.am.lsm.rtree.utils.LSMRTreeUtils;
-import edu.uci.ics.hyracks.storage.am.rtree.AbstractRTreeTestContext;
-import edu.uci.ics.hyracks.storage.am.rtree.RTreeCheckTuple;
-import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreePolicyType;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
-
-@SuppressWarnings("rawtypes")
-public final class LSMRTreeTestContext extends AbstractRTreeTestContext {
-
-    public LSMRTreeTestContext(ISerializerDeserializer[] fieldSerdes, ITreeIndex treeIndex) {
-        super(fieldSerdes, treeIndex);
-    }
-
-    @Override
-    public int getKeyFieldCount() {
-        LSMRTree lsmTree = (LSMRTree) index;
-        return lsmTree.getComparatorFactories().length;
-    }
-
-    /**
-     * Override to provide delete semantics for the check tuples.
-     */
-    @Override
-    public void deleteCheckTuple(RTreeCheckTuple checkTuple, Collection<RTreeCheckTuple> checkTuples) {
-        while (checkTuples.remove(checkTuple)) {
-        }
-    }
-
-    @Override
-    public IBinaryComparatorFactory[] getComparatorFactories() {
-        LSMRTree lsmTree = (LSMRTree) index;
-        return lsmTree.getComparatorFactories();
-    }
-
-    public static LSMRTreeTestContext create(IInMemoryBufferCache memBufferCache,
-            IInMemoryFreePageManager memFreePageManager, IOManager ioManager, FileReference file,
-            IBufferCache diskBufferCache, IFileMapProvider diskFileMapProvider, ISerializerDeserializer[] fieldSerdes,
-            IPrimitiveValueProviderFactory[] valueProviderFactories, int numKeyFields, RTreePolicyType rtreePolicyType,
-            ILSMMergePolicy mergePolicy, ILSMOperationTrackerFactory opTrackerFactory,
-            ILSMIOOperationScheduler ioScheduler, ILSMIOOperationCallbackProvider ioOpCallbackProvider)
-            throws Exception {
-        ITypeTraits[] typeTraits = SerdeUtils.serdesToTypeTraits(fieldSerdes);
-        IBinaryComparatorFactory[] rtreeCmpFactories = SerdeUtils
-                .serdesToComparatorFactories(fieldSerdes, numKeyFields);
-        IBinaryComparatorFactory[] btreeCmpFactories = SerdeUtils.serdesToComparatorFactories(fieldSerdes,
-                fieldSerdes.length);
-        LSMRTree lsmTree = LSMRTreeUtils.createLSMTree(memBufferCache, memFreePageManager, ioManager, file,
-                diskBufferCache, diskFileMapProvider, typeTraits, rtreeCmpFactories, btreeCmpFactories,
-                valueProviderFactories, rtreePolicyType, mergePolicy, opTrackerFactory, ioScheduler,
-                ioOpCallbackProvider, LSMRTreeUtils.proposeBestLinearizer(typeTraits, rtreeCmpFactories.length));
-        LSMRTreeTestContext testCtx = new LSMRTreeTestContext(fieldSerdes, lsmTree);
-        return testCtx;
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/util/LSMRTreeTestHarness.java b/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/util/LSMRTreeTestHarness.java
deleted file mode 100644
index 6fb6f9c..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/util/LSMRTreeTestHarness.java
+++ /dev/null
@@ -1,214 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.rtree.util;
-
-import java.io.File;
-import java.io.FilenameFilter;
-import java.text.SimpleDateFormat;
-import java.util.Date;
-import java.util.Random;
-import java.util.logging.Logger;
-
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.exceptions.HyracksException;
-import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.api.io.IODeviceHandle;
-import edu.uci.ics.hyracks.control.nc.io.IOManager;
-import edu.uci.ics.hyracks.storage.am.common.api.IInMemoryFreePageManager;
-import edu.uci.ics.hyracks.storage.am.common.frames.LIFOMetaDataFrameFactory;
-import edu.uci.ics.hyracks.storage.am.config.AccessMethodTestsConfig;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.IInMemoryBufferCache;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallbackProvider;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationScheduler;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMMergePolicy;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMOperationTrackerFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.common.freepage.DualIndexInMemoryBufferCache;
-import edu.uci.ics.hyracks.storage.am.lsm.common.freepage.DualIndexInMemoryFreePageManager;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.NoMergePolicy;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.NoOpIOOperationCallback;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.SynchronousScheduler;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.ThreadCountingOperationTrackerFactory;
-import edu.uci.ics.hyracks.storage.common.buffercache.HeapBufferAllocator;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
-import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
-import edu.uci.ics.hyracks.test.support.TestUtils;
-
-public class LSMRTreeTestHarness {
-    protected static final Logger LOGGER = Logger.getLogger(LSMRTreeTestHarness.class.getName());
-
-    private static final long RANDOM_SEED = 50;
-
-    protected final int diskPageSize;
-    protected final int diskNumPages;
-    protected final int diskMaxOpenFiles;
-    protected final int memPageSize;
-    protected final int memNumPages;
-    protected final int hyracksFrameSize;
-
-    protected IOManager ioManager;
-    protected IBufferCache diskBufferCache;
-    protected IFileMapProvider diskFileMapProvider;
-    protected IInMemoryBufferCache memBufferCache;
-    protected IInMemoryFreePageManager memFreePageManager;
-    protected IHyracksTaskContext ctx;
-    protected ILSMIOOperationScheduler ioScheduler;
-    protected ILSMIOOperationCallbackProvider ioOpCallbackProvider;
-    protected ILSMMergePolicy mergePolicy;
-    protected ILSMOperationTrackerFactory opTrackerFactory;
-
-    protected final Random rnd = new Random();
-    protected final static SimpleDateFormat simpleDateFormat = new SimpleDateFormat("ddMMyy-hhmmssSS");
-    protected final static String sep = System.getProperty("file.separator");
-    protected String onDiskDir;
-    protected FileReference file;
-
-    public LSMRTreeTestHarness() {
-        this.diskPageSize = AccessMethodTestsConfig.LSM_RTREE_DISK_PAGE_SIZE;
-        this.diskNumPages = AccessMethodTestsConfig.LSM_RTREE_DISK_NUM_PAGES;
-        this.diskMaxOpenFiles = AccessMethodTestsConfig.LSM_RTREE_DISK_MAX_OPEN_FILES;
-        this.memPageSize = AccessMethodTestsConfig.LSM_RTREE_MEM_PAGE_SIZE;
-        this.memNumPages = AccessMethodTestsConfig.LSM_RTREE_MEM_NUM_PAGES;
-        this.hyracksFrameSize = AccessMethodTestsConfig.LSM_RTREE_HYRACKS_FRAME_SIZE;
-        this.ioScheduler = SynchronousScheduler.INSTANCE;
-        this.mergePolicy = NoMergePolicy.INSTANCE;
-        this.opTrackerFactory = ThreadCountingOperationTrackerFactory.INSTANCE;
-        this.ioOpCallbackProvider = NoOpIOOperationCallback.INSTANCE;
-    }
-
-    public LSMRTreeTestHarness(int diskPageSize, int diskNumPages, int diskMaxOpenFiles, int memPageSize,
-            int memNumPages, int hyracksFrameSize) {
-        this.diskPageSize = diskPageSize;
-        this.diskNumPages = diskNumPages;
-        this.diskMaxOpenFiles = diskMaxOpenFiles;
-        this.memPageSize = memPageSize;
-        this.memNumPages = memNumPages;
-        this.hyracksFrameSize = hyracksFrameSize;
-        this.ioScheduler = SynchronousScheduler.INSTANCE;
-        this.mergePolicy = NoMergePolicy.INSTANCE;
-        this.opTrackerFactory = ThreadCountingOperationTrackerFactory.INSTANCE;
-    }
-
-    public void setUp() throws HyracksException {
-        onDiskDir = "lsm_rtree_" + simpleDateFormat.format(new Date()) + sep;
-        file = new FileReference(new File(onDiskDir));
-        ctx = TestUtils.create(getHyracksFrameSize());
-        TestStorageManagerComponentHolder.init(diskPageSize, diskNumPages, diskMaxOpenFiles);
-        diskBufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
-        diskFileMapProvider = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
-        memBufferCache = new DualIndexInMemoryBufferCache(new HeapBufferAllocator(), memPageSize, memNumPages);
-        memFreePageManager = new DualIndexInMemoryFreePageManager(memNumPages, new LIFOMetaDataFrameFactory());
-        ioManager = TestStorageManagerComponentHolder.getIOManager();
-        rnd.setSeed(RANDOM_SEED);
-    }
-
-    public void tearDown() throws HyracksDataException {
-        diskBufferCache.close();
-        for (IODeviceHandle dev : ioManager.getIODevices()) {
-            File dir = new File(dev.getPath(), onDiskDir);
-            FilenameFilter filter = new FilenameFilter() {
-                public boolean accept(File dir, String name) {
-                    return !name.startsWith(".");
-                }
-            };
-            String[] files = dir.list(filter);
-            if (files != null) {
-                for (String fileName : files) {
-                    File file = new File(dir.getPath() + File.separator + fileName);
-                    file.delete();
-                }
-            }
-            dir.delete();
-        }
-    }
-
-    public int getDiskPageSize() {
-        return diskPageSize;
-    }
-
-    public int getDiskNumPages() {
-        return diskNumPages;
-    }
-
-    public int getDiskMaxOpenFiles() {
-        return diskMaxOpenFiles;
-    }
-
-    public int getMemPageSize() {
-        return memPageSize;
-    }
-
-    public int getMemNumPages() {
-        return memNumPages;
-    }
-
-    public int getHyracksFrameSize() {
-        return hyracksFrameSize;
-    }
-
-    public IOManager getIOManager() {
-        return ioManager;
-    }
-
-    public IBufferCache getDiskBufferCache() {
-        return diskBufferCache;
-    }
-
-    public IFileMapProvider getDiskFileMapProvider() {
-        return diskFileMapProvider;
-    }
-
-    public IInMemoryBufferCache getMemBufferCache() {
-        return memBufferCache;
-    }
-
-    public IInMemoryFreePageManager getMemFreePageManager() {
-        return memFreePageManager;
-    }
-
-    public IHyracksTaskContext getHyracksTastContext() {
-        return ctx;
-    }
-
-    public String getOnDiskDir() {
-        return onDiskDir;
-    }
-
-    public FileReference getFileReference() {
-        return file;
-    }
-
-    public Random getRandom() {
-        return rnd;
-    }
-
-    public ILSMIOOperationScheduler getIOScheduler() {
-        return ioScheduler;
-    }
-
-    public ILSMOperationTrackerFactory getOperationTrackerFactory() {
-        return opTrackerFactory;
-    }
-
-    public ILSMMergePolicy getMergePolicy() {
-        return mergePolicy;
-    }
-
-    public ILSMIOOperationCallbackProvider getIOOperationCallbackProvider() {
-        return ioOpCallbackProvider;
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/util/LSMRTreeWithAntiMatterTuplesTestContext.java b/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/util/LSMRTreeWithAntiMatterTuplesTestContext.java
deleted file mode 100644
index 5860236..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/util/LSMRTreeWithAntiMatterTuplesTestContext.java
+++ /dev/null
@@ -1,91 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.rtree.util;
-
-import java.util.Collection;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.control.nc.io.IOManager;
-import edu.uci.ics.hyracks.dataflow.common.util.SerdeUtils;
-import edu.uci.ics.hyracks.storage.am.common.api.IInMemoryFreePageManager;
-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.IInMemoryBufferCache;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallbackProvider;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationScheduler;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMMergePolicy;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMOperationTrackerFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.rtree.impls.LSMRTreeWithAntiMatterTuples;
-import edu.uci.ics.hyracks.storage.am.lsm.rtree.utils.LSMRTreeUtils;
-import edu.uci.ics.hyracks.storage.am.rtree.AbstractRTreeTestContext;
-import edu.uci.ics.hyracks.storage.am.rtree.RTreeCheckTuple;
-import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreePolicyType;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
-
-@SuppressWarnings("rawtypes")
-public final class LSMRTreeWithAntiMatterTuplesTestContext extends AbstractRTreeTestContext {
-
-    public LSMRTreeWithAntiMatterTuplesTestContext(ISerializerDeserializer[] fieldSerdes, ITreeIndex treeIndex) {
-        super(fieldSerdes, treeIndex);
-    }
-
-    @Override
-    public int getKeyFieldCount() {
-        LSMRTreeWithAntiMatterTuples lsmTree = (LSMRTreeWithAntiMatterTuples) index;
-        return lsmTree.getComparatorFactories().length;
-    }
-
-    /**
-     * Override to provide delete semantics for the check tuples.
-     */
-    @Override
-    public void deleteCheckTuple(RTreeCheckTuple checkTuple, Collection<RTreeCheckTuple> checkTuples) {
-        while (checkTuples.remove(checkTuple)) {
-        }
-    }
-
-    @Override
-    public IBinaryComparatorFactory[] getComparatorFactories() {
-        LSMRTreeWithAntiMatterTuples lsmTree = (LSMRTreeWithAntiMatterTuples) index;
-        return lsmTree.getComparatorFactories();
-    }
-
-    public static LSMRTreeWithAntiMatterTuplesTestContext create(IInMemoryBufferCache memBufferCache,
-            IInMemoryFreePageManager memFreePageManager, IOManager ioManager, FileReference file,
-            IBufferCache diskBufferCache, IFileMapProvider diskFileMapProvider, ISerializerDeserializer[] fieldSerdes,
-            IPrimitiveValueProviderFactory[] valueProviderFactories, int numKeyFields, RTreePolicyType rtreePolicyType,
-            ILSMMergePolicy mergePolicy, ILSMOperationTrackerFactory opTrackerFactory,
-            ILSMIOOperationScheduler ioScheduler, ILSMIOOperationCallbackProvider ioOpCallbackProvider)
-            throws Exception {
-        ITypeTraits[] typeTraits = SerdeUtils.serdesToTypeTraits(fieldSerdes);
-        IBinaryComparatorFactory[] rtreeCmpFactories = SerdeUtils
-                .serdesToComparatorFactories(fieldSerdes, numKeyFields);
-        IBinaryComparatorFactory[] btreeCmpFactories = SerdeUtils.serdesToComparatorFactories(fieldSerdes,
-                fieldSerdes.length);
-        LSMRTreeWithAntiMatterTuples lsmTree = LSMRTreeUtils.createLSMTreeWithAntiMatterTuples(memBufferCache,
-                memFreePageManager, ioManager, file, diskBufferCache, diskFileMapProvider, typeTraits,
-                rtreeCmpFactories, btreeCmpFactories, valueProviderFactories, rtreePolicyType, mergePolicy,
-                opTrackerFactory, ioScheduler, ioOpCallbackProvider,
-                LSMRTreeUtils.proposeBestLinearizer(typeTraits, rtreeCmpFactories.length));
-        LSMRTreeWithAntiMatterTuplesTestContext testCtx = new LSMRTreeWithAntiMatterTuplesTestContext(fieldSerdes,
-                lsmTree);
-        return testCtx;
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-rtree-test/pom.xml b/hyracks-tests/hyracks-storage-am-rtree-test/pom.xml
deleted file mode 100644
index dd12f56..0000000
--- a/hyracks-tests/hyracks-storage-am-rtree-test/pom.xml
+++ /dev/null
@@ -1,49 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <groupId>edu.uci.ics.hyracks</groupId>
-  <artifactId>hyracks-storage-am-rtree-test</artifactId>
-  <version>0.2.2-SNAPSHOT</version>
-
-  <parent>
-    <groupId>edu.uci.ics.hyracks</groupId>
-    <artifactId>hyracks-tests</artifactId>
-    <version>0.2.2-SNAPSHOT</version>
-  </parent>
-
-  <build>
-    <plugins>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-compiler-plugin</artifactId>
-        <version>2.0.2</version>
-        <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
-        </configuration>
-      </plugin>
-    </plugins>
-  </build>
-  <dependencies>
-  	<dependency>
-  		<groupId>junit</groupId>
-  		<artifactId>junit</artifactId>
-  		<version>4.8.1</version>
-  		<type>jar</type>
-  		<scope>test</scope>
-  	</dependency>
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks</groupId>
-  		<artifactId>hyracks-storage-am-rtree</artifactId>
-  		<version>0.2.2-SNAPSHOT</version>
-  		<type>jar</type>
-  		<scope>compile</scope>
-  	</dependency>
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks</groupId>
-  		<artifactId>hyracks-test-support</artifactId>
-  		<version>0.2.2-SNAPSHOT</version>
-  		<type>jar</type>
-  		<scope>test</scope>
-  	</dependency>
-  </dependencies>
-</project>
diff --git a/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeBulkLoadTest.java b/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeBulkLoadTest.java
deleted file mode 100644
index 4eeec1a..0000000
--- a/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeBulkLoadTest.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.rtree;
-
-import java.util.Random;
-
-import org.junit.After;
-import org.junit.Before;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
-import edu.uci.ics.hyracks.storage.am.config.AccessMethodTestsConfig;
-import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreePolicyType;
-import edu.uci.ics.hyracks.storage.am.rtree.utils.RTreeTestContext;
-import edu.uci.ics.hyracks.storage.am.rtree.utils.RTreeTestHarness;
-
-@SuppressWarnings("rawtypes")
-public class RTreeBulkLoadTest extends AbstractRTreeBulkLoadTest {
-
-    public RTreeBulkLoadTest() {
-        super(1, AccessMethodTestsConfig.RTREE_TEST_RSTAR_POLICY);
-    }
-
-    private final RTreeTestHarness harness = new RTreeTestHarness();
-
-    @Before
-    public void setUp() throws HyracksDataException {
-        harness.setUp();
-    }
-
-    @After
-    public void tearDown() throws HyracksDataException {
-        harness.tearDown();
-    }
-
-    @Override
-    protected AbstractRTreeTestContext createTestContext(ISerializerDeserializer[] fieldSerdes,
-            IPrimitiveValueProviderFactory[] valueProviderFactories, int numKeys, RTreePolicyType rtreePolicyType)
-            throws Exception {
-        return RTreeTestContext.create(harness.getBufferCache(), harness.getFileMapProvider(),
-                harness.getFileReference(), fieldSerdes, valueProviderFactories, numKeys, rtreePolicyType);
-    }
-
-    @Override
-    protected Random getRandom() {
-        return harness.getRandom();
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeDeleteTest.java b/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeDeleteTest.java
deleted file mode 100644
index f4e45a8..0000000
--- a/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeDeleteTest.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.rtree;
-
-import java.util.Random;
-
-import org.junit.After;
-import org.junit.Before;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
-import edu.uci.ics.hyracks.storage.am.config.AccessMethodTestsConfig;
-import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreePolicyType;
-import edu.uci.ics.hyracks.storage.am.rtree.utils.RTreeTestContext;
-import edu.uci.ics.hyracks.storage.am.rtree.utils.RTreeTestHarness;
-
-@SuppressWarnings("rawtypes")
-public class RTreeDeleteTest extends AbstractRTreeDeleteTest {
-
-	private final RTreeTestHarness harness = new RTreeTestHarness();
-
-	public RTreeDeleteTest() {
-		super(AccessMethodTestsConfig.RTREE_TEST_RSTAR_POLICY);
-	}
-	
-    @Before
-    public void setUp() throws HyracksDataException {
-        harness.setUp();
-    }
-
-    @After
-    public void tearDown() throws HyracksDataException {
-        harness.tearDown();
-    }
-
-    @Override
-    protected AbstractRTreeTestContext createTestContext(ISerializerDeserializer[] fieldSerdes,
-            IPrimitiveValueProviderFactory[] valueProviderFactories, int numKeys, RTreePolicyType rtreePolicyType)
-            throws Exception {
-        return RTreeTestContext.create(harness.getBufferCache(), harness.getFileMapProvider(),
-                harness.getFileReference(), fieldSerdes, valueProviderFactories, numKeys, rtreePolicyType);
-    }
-
-    @Override
-    protected Random getRandom() {
-        return harness.getRandom();
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeExamplesTest.java b/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeExamplesTest.java
deleted file mode 100644
index 84c8642..0000000
--- a/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeExamplesTest.java
+++ /dev/null
@@ -1,52 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.rtree;
-
-import org.junit.After;
-import org.junit.Before;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
-import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
-import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreePolicyType;
-import edu.uci.ics.hyracks.storage.am.rtree.util.RTreeUtils;
-import edu.uci.ics.hyracks.storage.am.rtree.utils.RTreeTestHarness;
-
-public class RTreeExamplesTest extends AbstractRTreeExamplesTest {
-    private final RTreeTestHarness harness = new RTreeTestHarness();
-
-    @Before
-    public void setUp() throws HyracksDataException {
-        harness.setUp();
-    }
-
-    @After
-    public void tearDown() throws HyracksDataException {
-        harness.tearDown();
-    }
-
-    @Override
-    protected ITreeIndex createTreeIndex(ITypeTraits[] typeTraits, IBinaryComparatorFactory[] rtreeCmpFactories,
-            IBinaryComparatorFactory[] btreeCmpFactories, IPrimitiveValueProviderFactory[] valueProviderFactories,
-            RTreePolicyType rtreePolicyType) throws TreeIndexException {
-        return RTreeUtils.createRTree(harness.getBufferCache(), harness.getFileMapProvider(), typeTraits,
-                valueProviderFactories, rtreeCmpFactories, rtreePolicyType, harness.getFileReference());
-    }
-
-}
diff --git a/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeInsertTest.java b/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeInsertTest.java
deleted file mode 100644
index 065f687..0000000
--- a/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeInsertTest.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.rtree;
-
-import java.util.Random;
-
-import org.junit.After;
-import org.junit.Before;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
-import edu.uci.ics.hyracks.storage.am.config.AccessMethodTestsConfig;
-import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreePolicyType;
-import edu.uci.ics.hyracks.storage.am.rtree.utils.RTreeTestContext;
-import edu.uci.ics.hyracks.storage.am.rtree.utils.RTreeTestHarness;
-
-@SuppressWarnings("rawtypes")
-public class RTreeInsertTest extends AbstractRTreeInsertTest {
-
-	private final RTreeTestHarness harness = new RTreeTestHarness();
-
-	public RTreeInsertTest() {
-		super(AccessMethodTestsConfig.RTREE_TEST_RSTAR_POLICY);
-	}
-	
-    @Before
-    public void setUp() throws HyracksDataException {
-        harness.setUp();
-    }
-
-    @After
-    public void tearDown() throws HyracksDataException {
-        harness.tearDown();
-    }
-
-    @Override
-    protected AbstractRTreeTestContext createTestContext(ISerializerDeserializer[] fieldSerdes,
-            IPrimitiveValueProviderFactory[] valueProviderFactories, int numKeys, RTreePolicyType rtreePolicyType)
-            throws Exception {
-        return RTreeTestContext.create(harness.getBufferCache(), harness.getFileMapProvider(),
-                harness.getFileReference(), fieldSerdes, valueProviderFactories, numKeys, rtreePolicyType);
-    }
-
-    @Override
-    protected Random getRandom() {
-        return harness.getRandom();
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeLifecycleTest.java b/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeLifecycleTest.java
deleted file mode 100644
index 69c7e15..0000000
--- a/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeLifecycleTest.java
+++ /dev/null
@@ -1,79 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.rtree;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-import edu.uci.ics.hyracks.storage.am.common.AbstractIndexLifecycleTest;
-import edu.uci.ics.hyracks.storage.am.common.CheckTuple;
-import edu.uci.ics.hyracks.storage.am.common.IIndexTestContext;
-import edu.uci.ics.hyracks.storage.am.common.TreeIndexTestUtils;
-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
-import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreePolicyType;
-import edu.uci.ics.hyracks.storage.am.rtree.impls.RTree;
-import edu.uci.ics.hyracks.storage.am.rtree.util.RTreeUtils;
-import edu.uci.ics.hyracks.storage.am.rtree.utils.RTreeTestContext;
-import edu.uci.ics.hyracks.storage.am.rtree.utils.RTreeTestHarness;
-
-public class RTreeLifecycleTest extends AbstractIndexLifecycleTest {
-    private final RTreeTestHarness harness = new RTreeTestHarness();
-    private final TreeIndexTestUtils titu = new RTreeTestUtils();
-
-    @SuppressWarnings("rawtypes")
-    private final ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE,
-            IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE,
-            IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
-    private final IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils
-            .createPrimitiveValueProviderFactories(4, IntegerPointable.FACTORY);
-    private final int numKeys = 4;
-
-    @SuppressWarnings("rawtypes")
-    private IIndexTestContext<? extends CheckTuple> testCtx;
-    private ITreeIndexFrame frame = null;
-
-    @Override
-    public void setup() throws Exception {
-        harness.setUp();
-        testCtx = RTreeTestContext.create(harness.getBufferCache(), harness.getFileMapProvider(),
-                harness.getFileReference(), fieldSerdes, valueProviderFactories, numKeys, RTreePolicyType.RTREE);
-        index = testCtx.getIndex();
-    }
-
-    @Override
-    public void tearDown() throws HyracksDataException {
-        testCtx.getIndex().deactivate();
-        testCtx.getIndex().destroy();
-        harness.tearDown();
-    }
-
-    @Override
-    protected boolean persistentStateExists() {
-        return harness.getFileReference().getFile().exists();
-    }
-
-    @Override
-    protected boolean isEmptyIndex() throws HyracksDataException {
-        RTree rtree = (RTree) testCtx.getIndex();
-        if (frame == null) {
-            frame = rtree.getInteriorFrameFactory().createFrame();
-        }
-        return rtree.isEmptyTree(frame);
-    }
-
-    @Override
-    protected void performInsertions() throws Exception {
-        titu.insertIntTuples(testCtx, 10, harness.getRandom());
-    }
-
-    @Override
-    protected void checkInsertions() throws Exception {
-        titu.checkScan(testCtx);
-    }
-
-    @Override
-    protected void clearCheckableInsertions() throws Exception {
-        testCtx.getCheckTuples().clear();
-    }
-
-}
diff --git a/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeSearchCursorTest.java b/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeSearchCursorTest.java
deleted file mode 100644
index 06d18bb..0000000
--- a/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeSearchCursorTest.java
+++ /dev/null
@@ -1,178 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.rtree;
-
-import java.util.ArrayList;
-import java.util.Random;
-import java.util.logging.Level;
-
-import org.junit.Before;
-import org.junit.Test;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
-import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.util.TupleUtils;
-import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManager;
-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexAccessor;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrameFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
-import edu.uci.ics.hyracks.storage.am.common.frames.LIFOMetaDataFrameFactory;
-import edu.uci.ics.hyracks.storage.am.common.freepage.LinkedListFreePageManager;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallback;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-import edu.uci.ics.hyracks.storage.am.common.util.HashMultiSet;
-import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeInteriorFrame;
-import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeLeafFrame;
-import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreeNSMInteriorFrameFactory;
-import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreeNSMLeafFrameFactory;
-import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreePolicyType;
-import edu.uci.ics.hyracks.storage.am.rtree.impls.RTree;
-import edu.uci.ics.hyracks.storage.am.rtree.impls.RTreeSearchCursor;
-import edu.uci.ics.hyracks.storage.am.rtree.impls.SearchPredicate;
-import edu.uci.ics.hyracks.storage.am.rtree.tuples.RTreeTypeAwareTupleWriterFactory;
-import edu.uci.ics.hyracks.storage.am.rtree.util.RTreeUtils;
-import edu.uci.ics.hyracks.storage.am.rtree.utils.AbstractRTreeTest;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-
-public class RTreeSearchCursorTest extends AbstractRTreeTest {
-
-    private final RTreeTestUtils rTreeTestUtils;
-    private Random rnd = new Random(50);
-
-    public RTreeSearchCursorTest() {
-        this.rTreeTestUtils = new RTreeTestUtils();
-    }
-
-    @Before
-    public void setUp() throws HyracksDataException {
-        super.setUp();
-    }
-
-    @SuppressWarnings({ "unchecked", "rawtypes" })
-    @Test
-    public void rangeSearchTest() throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("TESTING RANGE SEARCH CURSOR FOR RTREE");
-        }
-
-        IBufferCache bufferCache = harness.getBufferCache();
-
-        // Declare fields.
-        int fieldCount = 5;
-        ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
-        typeTraits[0] = IntegerPointable.TYPE_TRAITS;
-        typeTraits[1] = IntegerPointable.TYPE_TRAITS;
-        typeTraits[2] = IntegerPointable.TYPE_TRAITS;
-        typeTraits[3] = IntegerPointable.TYPE_TRAITS;
-        typeTraits[4] = IntegerPointable.TYPE_TRAITS;
-        // Declare field serdes.
-        ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE,
-                IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE,
-                IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
-
-        // Declare keys.
-        int keyFieldCount = 4;
-        IBinaryComparatorFactory[] cmpFactories = new IBinaryComparatorFactory[keyFieldCount];
-        cmpFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
-        cmpFactories[1] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
-        cmpFactories[2] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
-        cmpFactories[3] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
-
-        // create value providers
-        IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(
-                cmpFactories.length, IntegerPointable.FACTORY);
-
-        RTreeTypeAwareTupleWriterFactory tupleWriterFactory = new RTreeTypeAwareTupleWriterFactory(typeTraits);
-        ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
-
-        ITreeIndexFrameFactory interiorFrameFactory = new RTreeNSMInteriorFrameFactory(tupleWriterFactory,
-                valueProviderFactories, RTreePolicyType.RTREE);
-        ITreeIndexFrameFactory leafFrameFactory = new RTreeNSMLeafFrameFactory(tupleWriterFactory,
-                valueProviderFactories, RTreePolicyType.RTREE);
-
-        IRTreeInteriorFrame interiorFrame = (IRTreeInteriorFrame) interiorFrameFactory.createFrame();
-        IRTreeLeafFrame leafFrame = (IRTreeLeafFrame) leafFrameFactory.createFrame();
-        IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, 0, metaFrameFactory);
-
-        RTree rtree = new RTree(bufferCache, harness.getFileMapProvider(), freePageManager, interiorFrameFactory,
-                leafFrameFactory, cmpFactories, fieldCount, harness.getFileReference());
-        rtree.create();
-        rtree.activate();
-
-        ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
-        ArrayTupleReference tuple = new ArrayTupleReference();
-        ITreeIndexAccessor indexAccessor = rtree.createAccessor(NoOpOperationCallback.INSTANCE,
-                NoOpOperationCallback.INSTANCE);
-        int numInserts = 10000;
-        ArrayList<RTreeCheckTuple> checkTuples = new ArrayList<RTreeCheckTuple>();
-        for (int i = 0; i < numInserts; i++) {
-            int p1x = rnd.nextInt();
-            int p1y = rnd.nextInt();
-            int p2x = rnd.nextInt();
-            int p2y = rnd.nextInt();
-
-            int pk = rnd.nextInt();;
-
-            TupleUtils.createIntegerTuple(tb, tuple, Math.min(p1x, p2x), Math.min(p1y, p2y), Math.max(p1x, p2x),
-                    Math.max(p1y, p2y), pk);
-            try {
-                indexAccessor.insert(tuple);
-            } catch (TreeIndexException e) {
-            }
-            RTreeCheckTuple checkTuple = new RTreeCheckTuple(fieldCount, keyFieldCount);
-            checkTuple.appendField(Math.min(p1x, p2x));
-            checkTuple.appendField(Math.min(p1y, p2y));
-            checkTuple.appendField(Math.max(p1x, p2x));
-            checkTuple.appendField(Math.max(p1y, p2y));
-            checkTuple.appendField(pk);
-
-            checkTuples.add(checkTuple);
-        }
-
-        // Build key.
-        ArrayTupleBuilder keyTb = new ArrayTupleBuilder(keyFieldCount);
-        ArrayTupleReference key = new ArrayTupleReference();
-        TupleUtils.createIntegerTuple(keyTb, key, -1000, -1000, 1000, 1000);
-
-        MultiComparator cmp = MultiComparator.create(cmpFactories);
-        ITreeIndexCursor searchCursor = new RTreeSearchCursor(interiorFrame, leafFrame);
-        SearchPredicate searchPredicate = new SearchPredicate(key, cmp);
-
-        RTreeCheckTuple keyCheck = (RTreeCheckTuple) rTreeTestUtils.createCheckTupleFromTuple(key, fieldSerdes,
-                keyFieldCount);
-        HashMultiSet<RTreeCheckTuple> expectedResult = rTreeTestUtils.getRangeSearchExpectedResults(checkTuples,
-                keyCheck);
-
-        rTreeTestUtils.getRangeSearchExpectedResults(checkTuples, keyCheck);
-        indexAccessor.search(searchCursor, searchPredicate);
-
-        rTreeTestUtils.checkExpectedResults(searchCursor, expectedResult, fieldSerdes, keyFieldCount, null);
-
-        rtree.deactivate();
-        rtree.destroy();
-    }
-
-}
diff --git a/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/multithread/RTreeMultiThreadTest.java b/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/multithread/RTreeMultiThreadTest.java
deleted file mode 100644
index ee245e8..0000000
--- a/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/multithread/RTreeMultiThreadTest.java
+++ /dev/null
@@ -1,101 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.rtree.multithread;
-
-import java.util.ArrayList;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.storage.am.common.IIndexTestWorkerFactory;
-import edu.uci.ics.hyracks.storage.am.common.TestOperationSelector.TestOperation;
-import edu.uci.ics.hyracks.storage.am.common.TestWorkloadConf;
-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
-import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
-import edu.uci.ics.hyracks.storage.am.common.datagen.ProbabilityHelper;
-import edu.uci.ics.hyracks.storage.am.rtree.AbstractRTreeMultiThreadTest;
-import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreePolicyType;
-import edu.uci.ics.hyracks.storage.am.rtree.util.RTreeUtils;
-import edu.uci.ics.hyracks.storage.am.rtree.utils.RTreeTestHarness;
-
-public class RTreeMultiThreadTest extends AbstractRTreeMultiThreadTest {
-
-    public RTreeMultiThreadTest() {
-        super(true);
-    }
-
-    private RTreeTestHarness harness = new RTreeTestHarness();
-
-    private RTreeTestWorkerFactory workerFactory = new RTreeTestWorkerFactory();
-
-    @Override
-    protected void setUp() throws HyracksDataException {
-        harness.setUp();
-    }
-
-    @Override
-    protected void tearDown() throws HyracksDataException {
-        harness.tearDown();
-    }
-
-    @Override
-    protected ITreeIndex createTreeIndex(ITypeTraits[] typeTraits, IBinaryComparatorFactory[] rtreeCmpFactories,
-            IBinaryComparatorFactory[] btreeCmpFactories, IPrimitiveValueProviderFactory[] valueProviderFactories,
-            RTreePolicyType rtreePolicyType) throws TreeIndexException {
-        return RTreeUtils.createRTree(harness.getBufferCache(), harness.getFileMapProvider(), typeTraits,
-                valueProviderFactories, rtreeCmpFactories, rtreePolicyType, harness.getFileReference());
-
-    }
-
-    @Override
-    protected IIndexTestWorkerFactory getWorkerFactory() {
-        return workerFactory;
-    }
-
-    @Override
-    protected ArrayList<TestWorkloadConf> getTestWorkloadConf() {
-        ArrayList<TestWorkloadConf> workloadConfs = new ArrayList<TestWorkloadConf>();
-
-        // Insert only workload.
-        TestOperation[] insertOnlyOps = new TestOperation[] { TestOperation.INSERT };
-        workloadConfs.add(new TestWorkloadConf(insertOnlyOps, ProbabilityHelper
-                .getUniformProbDist(insertOnlyOps.length)));
-
-        // Inserts mixed with scans.
-        TestOperation[] insertSearchOnlyOps = new TestOperation[] { TestOperation.INSERT, TestOperation.SCAN,
-                TestOperation.DISKORDER_SCAN };
-        workloadConfs.add(new TestWorkloadConf(insertSearchOnlyOps, ProbabilityHelper
-                .getUniformProbDist(insertSearchOnlyOps.length)));
-
-        // Inserts and deletes.
-        TestOperation[] insertDeleteOps = new TestOperation[] { TestOperation.INSERT, TestOperation.DELETE };
-        workloadConfs.add(new TestWorkloadConf(insertDeleteOps, ProbabilityHelper
-                .getUniformProbDist(insertDeleteOps.length)));
-
-        // All operations mixed.
-        TestOperation[] allOps = new TestOperation[] { TestOperation.INSERT, TestOperation.DELETE, TestOperation.SCAN,
-                TestOperation.DISKORDER_SCAN };
-        workloadConfs.add(new TestWorkloadConf(allOps, ProbabilityHelper.getUniformProbDist(allOps.length)));
-
-        return workloadConfs;
-    }
-
-    @Override
-    protected String getIndexTypeName() {
-        return "RTree";
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/multithread/RTreeTestWorker.java b/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/multithread/RTreeTestWorker.java
deleted file mode 100644
index 3d0381b..0000000
--- a/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/multithread/RTreeTestWorker.java
+++ /dev/null
@@ -1,115 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.rtree.multithread;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.common.AbstractIndexTestWorker;
-import edu.uci.ics.hyracks.storage.am.common.TestOperationSelector;
-import edu.uci.ics.hyracks.storage.am.common.TestOperationSelector.TestOperation;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndex;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexCursor;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.am.common.datagen.DataGenThread;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-import edu.uci.ics.hyracks.storage.am.rtree.impls.RTree;
-import edu.uci.ics.hyracks.storage.am.rtree.impls.SearchPredicate;
-
-public class RTreeTestWorker extends AbstractIndexTestWorker {
-
-    private final RTree rtree;
-    private final int numFields;
-    private final ArrayTupleReference rearrangedTuple = new ArrayTupleReference();
-    private final ArrayTupleBuilder rearrangedTb;
-
-    public RTreeTestWorker(DataGenThread dataGen, TestOperationSelector opSelector, IIndex index, int numBatches) {
-        super(dataGen, opSelector, index, numBatches);
-        rtree = (RTree) index;
-        numFields = rtree.getFieldCount();
-        rearrangedTb = new ArrayTupleBuilder(numFields);
-    }
-
-    @Override
-    public void performOp(ITupleReference tuple, TestOperation op) throws HyracksDataException, IndexException {
-        RTree.RTreeAccessor accessor = (RTree.RTreeAccessor) indexAccessor;
-        IIndexCursor searchCursor = accessor.createSearchCursor();
-        ITreeIndexCursor diskOrderScanCursor = accessor.createDiskOrderScanCursor();
-        MultiComparator cmp = accessor.getOpContext().cmp;
-        SearchPredicate rangePred = new SearchPredicate(tuple, cmp);
-
-        switch (op) {
-            case INSERT:
-                rearrangeTuple(tuple, cmp);
-                accessor.insert(rearrangedTuple);
-                break;
-
-            case DELETE:
-                rearrangeTuple(tuple, cmp);
-                accessor.delete(rearrangedTuple);
-                break;
-
-            case SCAN:
-                searchCursor.reset();
-                rangePred.setSearchKey(null);
-                accessor.search(searchCursor, rangePred);
-                consumeCursorTuples(searchCursor);
-                break;
-
-            case DISKORDER_SCAN:
-                diskOrderScanCursor.reset();
-                accessor.diskOrderScan(diskOrderScanCursor);
-                consumeCursorTuples(diskOrderScanCursor);
-                break;
-
-            default:
-                throw new HyracksDataException("Op " + op.toString() + " not supported.");
-        }
-    }
-
-    private void rearrangeTuple(ITupleReference tuple, MultiComparator cmp) throws HyracksDataException {
-        // Create a tuple with rearranged key values to make sure lower points
-        // have larger coordinates than high points.
-        rearrangedTb.reset();
-        int maxFieldPos = cmp.getKeyFieldCount() / 2;
-        for (int i = 0; i < maxFieldPos; i++) {
-            int j = maxFieldPos + i;
-            int c = cmp.getComparators()[i].compare(tuple.getFieldData(i), tuple.getFieldStart(i),
-                    tuple.getFieldLength(i), tuple.getFieldData(j), tuple.getFieldStart(j), tuple.getFieldLength(j));
-            if (c > 0) {
-                rearrangedTb.addField(tuple.getFieldData(j), tuple.getFieldStart(j), tuple.getFieldLength(j));
-            } else {
-                rearrangedTb.addField(tuple.getFieldData(i), tuple.getFieldStart(i), tuple.getFieldLength(i));
-            }
-        }
-        for (int i = 0; i < maxFieldPos; i++) {
-            int j = maxFieldPos + i;
-            int c = cmp.getComparators()[i].compare(tuple.getFieldData(i), tuple.getFieldStart(i),
-                    tuple.getFieldLength(i), tuple.getFieldData(j), tuple.getFieldStart(j), tuple.getFieldLength(j));
-            if (c > 0) {
-                rearrangedTb.addField(tuple.getFieldData(i), tuple.getFieldStart(i), tuple.getFieldLength(i));
-            } else {
-                rearrangedTb.addField(tuple.getFieldData(j), tuple.getFieldStart(j), tuple.getFieldLength(j));
-            }
-        }
-        for (int i = cmp.getKeyFieldCount(); i < numFields; i++) {
-            rearrangedTb.addField(tuple.getFieldData(i), tuple.getFieldStart(i), tuple.getFieldLength(i));
-        }
-        rearrangedTuple.reset(rearrangedTb.getFieldEndOffsets(), rearrangedTb.getByteArray());
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/multithread/RTreeTestWorkerFactory.java b/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/multithread/RTreeTestWorkerFactory.java
deleted file mode 100644
index 36b47bb..0000000
--- a/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/multithread/RTreeTestWorkerFactory.java
+++ /dev/null
@@ -1,30 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.rtree.multithread;
-
-import edu.uci.ics.hyracks.storage.am.common.AbstractIndexTestWorker;
-import edu.uci.ics.hyracks.storage.am.common.IIndexTestWorkerFactory;
-import edu.uci.ics.hyracks.storage.am.common.TestOperationSelector;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndex;
-import edu.uci.ics.hyracks.storage.am.common.datagen.DataGenThread;
-
-public class RTreeTestWorkerFactory implements IIndexTestWorkerFactory {
-    @Override
-    public AbstractIndexTestWorker create(DataGenThread dataGen, TestOperationSelector opSelector,
-            IIndex index, int numBatches) {
-        return new RTreeTestWorker(dataGen, opSelector, index, numBatches);
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/utils/RTreeTestContext.java b/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/utils/RTreeTestContext.java
deleted file mode 100644
index 8c1f0aa..0000000
--- a/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/utils/RTreeTestContext.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.rtree.utils;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.dataflow.common.util.SerdeUtils;
-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
-import edu.uci.ics.hyracks.storage.am.rtree.AbstractRTreeTestContext;
-import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreePolicyType;
-import edu.uci.ics.hyracks.storage.am.rtree.impls.RTree;
-import edu.uci.ics.hyracks.storage.am.rtree.util.RTreeUtils;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
-
-@SuppressWarnings("rawtypes")
-public class RTreeTestContext extends AbstractRTreeTestContext {
-
-    public RTreeTestContext(ISerializerDeserializer[] fieldSerdes, ITreeIndex treeIndex) {
-        super(fieldSerdes, treeIndex);
-    }
-
-    @Override
-    public int getKeyFieldCount() {
-        RTree rtree = (RTree) index;
-        return rtree.getComparatorFactories().length;
-    }
-
-    @Override
-    public IBinaryComparatorFactory[] getComparatorFactories() {
-        RTree rtree = (RTree) index;
-        return rtree.getComparatorFactories();
-    }
-
-    public static RTreeTestContext create(IBufferCache bufferCache, IFileMapProvider fileMapProvider,
-            FileReference file, ISerializerDeserializer[] fieldSerdes,
-            IPrimitiveValueProviderFactory[] valueProviderFactories, int numKeyFields, RTreePolicyType rtreePolicyType)
-            throws Exception {
-        ITypeTraits[] typeTraits = SerdeUtils.serdesToTypeTraits(fieldSerdes);
-        IBinaryComparatorFactory[] cmpFactories = SerdeUtils.serdesToComparatorFactories(fieldSerdes, numKeyFields);
-        RTree rtree = RTreeUtils.createRTree(bufferCache, fileMapProvider, typeTraits, valueProviderFactories,
-                cmpFactories, rtreePolicyType, file);
-        RTreeTestContext testCtx = new RTreeTestContext(fieldSerdes, rtree);
-        return testCtx;
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/utils/RTreeTestHarness.java b/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/utils/RTreeTestHarness.java
deleted file mode 100644
index e324a7f..0000000
--- a/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/utils/RTreeTestHarness.java
+++ /dev/null
@@ -1,122 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.rtree.utils;
-
-import java.io.File;
-import java.text.SimpleDateFormat;
-import java.util.Date;
-import java.util.Random;
-
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.storage.am.config.AccessMethodTestsConfig;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
-import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
-import edu.uci.ics.hyracks.test.support.TestUtils;
-
-public class RTreeTestHarness {
-
-    private static final long RANDOM_SEED = 50;
-
-    protected final int pageSize;
-    protected final int numPages;
-    protected final int maxOpenFiles;
-    protected final int hyracksFrameSize;
-
-    protected IHyracksTaskContext ctx;
-    protected IBufferCache bufferCache;
-    protected IFileMapProvider fileMapProvider;
-    protected int treeFileId;
-
-    protected final Random rnd = new Random();
-    protected final SimpleDateFormat simpleDateFormat = new SimpleDateFormat("ddMMyy-hhmmssSS");
-    protected final String tmpDir = System.getProperty("java.io.tmpdir");
-    protected final String sep = System.getProperty("file.separator");
-    protected String fileName;
-    protected FileReference file;
-
-    public RTreeTestHarness() {
-        this.pageSize = AccessMethodTestsConfig.RTREE_PAGE_SIZE;
-        this.numPages = AccessMethodTestsConfig.RTREE_NUM_PAGES;
-        this.maxOpenFiles = AccessMethodTestsConfig.RTREE_MAX_OPEN_FILES;
-        this.hyracksFrameSize = AccessMethodTestsConfig.RTREE_HYRACKS_FRAME_SIZE;
-    }
-
-    public RTreeTestHarness(int pageSize, int numPages, int maxOpenFiles, int hyracksFrameSize) {
-        this.pageSize = pageSize;
-        this.numPages = numPages;
-        this.maxOpenFiles = maxOpenFiles;
-        this.hyracksFrameSize = hyracksFrameSize;
-    }
-
-    public void setUp() throws HyracksDataException {
-        fileName = tmpDir + sep + simpleDateFormat.format(new Date());
-        file = new FileReference(new File(fileName));
-        ctx = TestUtils.create(getHyracksFrameSize());
-        TestStorageManagerComponentHolder.init(pageSize, numPages, maxOpenFiles);
-        bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
-        fileMapProvider = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
-        rnd.setSeed(RANDOM_SEED);
-    }
-
-    public void tearDown() throws HyracksDataException {
-        bufferCache.close();
-        File f = new File(fileName);
-        f.deleteOnExit();
-    }
-
-    public IHyracksTaskContext getHyracksTaskContext() {
-        return ctx;
-    }
-
-    public IBufferCache getBufferCache() {
-        return bufferCache;
-    }
-
-    public IFileMapProvider getFileMapProvider() {
-        return fileMapProvider;
-    }
-
-    public String getFileName() {
-        return fileName;
-    }
-
-    public Random getRandom() {
-        return rnd;
-    }
-
-    public int getPageSize() {
-        return pageSize;
-    }
-
-    public int getNumPages() {
-        return numPages;
-    }
-
-    public int getHyracksFrameSize() {
-        return hyracksFrameSize;
-    }
-
-    public int getMaxOpenFiles() {
-        return maxOpenFiles;
-    }
-
-    public FileReference getFileReference() {
-        return file;
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-common-test/pom.xml b/hyracks-tests/hyracks-storage-common-test/pom.xml
deleted file mode 100644
index 9bb0da1..0000000
--- a/hyracks-tests/hyracks-storage-common-test/pom.xml
+++ /dev/null
@@ -1,48 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <groupId>edu.uci.ics.hyracks</groupId>
-  <artifactId>hyracks-storage-common-test</artifactId>
-  <version>0.2.2-SNAPSHOT</version>
-
-  <parent>
-    <groupId>edu.uci.ics.hyracks</groupId>
-    <artifactId>hyracks-tests</artifactId>
-    <version>0.2.2-SNAPSHOT</version>
-  </parent>
-
-  <build>
-    <plugins>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-compiler-plugin</artifactId>
-        <version>2.0.2</version>
-        <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
-        </configuration>
-      </plugin>
-    </plugins>
-  </build>
-  <dependencies>
-  	<dependency>
-  		<groupId>junit</groupId>
-  		<artifactId>junit</artifactId>
-  		<version>4.8.1</version>
-  		<type>jar</type>
-  		<scope>test</scope>
-  	</dependency>
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks</groupId>
-  		<artifactId>hyracks-storage-common</artifactId>
-  		<version>0.2.2-SNAPSHOT</version>
-  		<scope>compile</scope>
-  	</dependency>
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks</groupId>
-  		<artifactId>hyracks-test-support</artifactId>
-  		<version>0.2.2-SNAPSHOT</version>
-  		<type>jar</type>
-  		<scope>compile</scope>
-  	</dependency>
-  </dependencies>
-</project>
diff --git a/hyracks-tests/pom.xml b/hyracks-tests/pom.xml
deleted file mode 100644
index 4011339..0000000
--- a/hyracks-tests/pom.xml
+++ /dev/null
@@ -1,24 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <groupId>edu.uci.ics.hyracks</groupId>
-  <artifactId>hyracks-tests</artifactId>
-  <version>0.2.2-SNAPSHOT</version>
-  <packaging>pom</packaging>
-
-  <parent>
-    <groupId>edu.uci.ics.hyracks</groupId>
-    <artifactId>hyracks</artifactId>
-    <version>0.2.2-SNAPSHOT</version>
-  </parent>
-
-  <modules>
-    <module>hyracks-storage-common-test</module>
-    <module>hyracks-storage-am-btree-test</module>
-    <module>hyracks-storage-am-rtree-test</module>
-    <module>hyracks-storage-am-lsm-common-test</module>
-    <module>hyracks-storage-am-lsm-btree-test</module>
-    <module>hyracks-storage-am-lsm-rtree-test</module>
-    <module>hyracks-storage-am-lsm-invertedindex-test</module>
-    <module>hyracks-storage-am-bloomfilter-test</module>
-  </modules>
-</project>
diff --git a/hyracks-yarn/hyracks-yarn-am/pom.xml b/hyracks-yarn/hyracks-yarn-am/pom.xml
deleted file mode 100644
index 9e453a6..0000000
--- a/hyracks-yarn/hyracks-yarn-am/pom.xml
+++ /dev/null
@@ -1,74 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <artifactId>hyracks-yarn-am</artifactId>
-  <parent>
-    <groupId>edu.uci.ics.hyracks</groupId>
-    <artifactId>hyracks-yarn</artifactId>
-    <version>0.2.1-SNAPSHOT</version>
-  </parent>
-
-  <build>
-    <plugins>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-compiler-plugin</artifactId>
-        <version>2.0.2</version>
-        <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
-        </configuration>
-      </plugin>
-      <plugin>
-        <groupId>org.codehaus.mojo</groupId>
-        <artifactId>appassembler-maven-plugin</artifactId>
-        <executions>
-          <execution>
-            <configuration>
-              <programs>
-                <program>
-                  <mainClass>edu.uci.ics.hyracks.yarn.am.HyracksYarnApplicationMaster</mainClass>
-                  <name>hyracks-yarn-am</name>
-                </program>
-              </programs>
-              <repositoryLayout>flat</repositoryLayout>
-              <repositoryName>lib</repositoryName>
-            </configuration>
-            <phase>package</phase>
-            <goals>
-              <goal>assemble</goal>
-            </goals>
-          </execution>
-        </executions>
-      </plugin>
-      <plugin>
-        <artifactId>maven-assembly-plugin</artifactId>
-        <version>2.2-beta-5</version>
-        <executions>
-          <execution>
-            <configuration>
-              <descriptors>
-                <descriptor>src/main/assembly/binary-assembly.xml</descriptor>
-              </descriptors>
-            </configuration>
-            <phase>package</phase>
-            <goals>
-              <goal>attached</goal>
-            </goals>
-          </execution>
-        </executions>
-      </plugin>
-    </plugins>
-  </build>
-  <dependencies>
-  <dependency>
-  	<groupId>args4j</groupId>
-  	<artifactId>args4j</artifactId>
-  	<version>2.0.16</version>
-  </dependency>
-  <dependency>
-  	<groupId>edu.uci.ics.hyracks</groupId>
-  	<artifactId>hyracks-yarn-common</artifactId>
-  	<version>0.2.1-SNAPSHOT</version>
-  </dependency>
-  </dependencies>
-</project>
diff --git a/hyracks-yarn/hyracks-yarn-client/pom.xml b/hyracks-yarn/hyracks-yarn-client/pom.xml
deleted file mode 100644
index 08935a7..0000000
--- a/hyracks-yarn/hyracks-yarn-client/pom.xml
+++ /dev/null
@@ -1,85 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <artifactId>hyracks-yarn-client</artifactId>
-  <parent>
-    <groupId>edu.uci.ics.hyracks</groupId>
-    <artifactId>hyracks-yarn</artifactId>
-    <version>0.2.1-SNAPSHOT</version>
-  </parent>
-
-  <build>
-    <plugins>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-compiler-plugin</artifactId>
-        <version>2.0.2</version>
-        <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
-        </configuration>
-      </plugin>
-      <plugin>
-        <groupId>org.codehaus.mojo</groupId>
-        <artifactId>appassembler-maven-plugin</artifactId>
-        <executions>
-          <execution>
-            <configuration>
-              <programs>
-                <program>
-                  <mainClass>edu.uci.ics.hyracks.yarn.client.LaunchHyracksApplication</mainClass>
-                  <name>launch-hyracks-application</name>
-                </program>
-                <program>
-                  <mainClass>edu.uci.ics.hyracks.yarn.client.KillHyracksApplication</mainClass>
-                  <name>kill-hyracks-application</name>
-                </program>
-              </programs>
-              <repositoryLayout>flat</repositoryLayout>
-              <repositoryName>lib</repositoryName>
-            </configuration>
-            <phase>package</phase>
-            <goals>
-              <goal>assemble</goal>
-            </goals>
-          </execution>
-        </executions>
-      </plugin>
-      <plugin>
-        <artifactId>maven-assembly-plugin</artifactId>
-        <version>2.2-beta-5</version>
-        <executions>
-          <execution>
-            <configuration>
-              <descriptors>
-                <descriptor>src/main/assembly/binary-assembly.xml</descriptor>
-              </descriptors>
-            </configuration>
-            <phase>package</phase>
-            <goals>
-              <goal>attached</goal>
-            </goals>
-          </execution>
-        </executions>
-      </plugin>
-    </plugins>
-  </build>
-  <dependencies>
-  <dependency>
-  	<groupId>args4j</groupId>
-  	<artifactId>args4j</artifactId>
-  	<version>2.0.16</version>
-  </dependency>
-  <dependency>
-  	<groupId>edu.uci.ics.hyracks</groupId>
-  	<artifactId>hyracks-yarn-common</artifactId>
-  	<version>0.2.1-SNAPSHOT</version>
-  </dependency>
-  <dependency>
-  	<groupId>edu.uci.ics.hyracks</groupId>
-  	<artifactId>hyracks-yarn-am</artifactId>
-  	<version>0.2.1-SNAPSHOT</version>
-    <type>zip</type>
-    <classifier>binary-assembly</classifier>
-  </dependency>
-  </dependencies>
-</project>
diff --git a/hyracks-yarn/hyracks-yarn-common/pom.xml b/hyracks-yarn/hyracks-yarn-common/pom.xml
deleted file mode 100644
index 3aaf4a2..0000000
--- a/hyracks-yarn/hyracks-yarn-common/pom.xml
+++ /dev/null
@@ -1,40 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <artifactId>hyracks-yarn-common</artifactId>
-  <parent>
-    <groupId>edu.uci.ics.hyracks</groupId>
-    <artifactId>hyracks-yarn</artifactId>
-    <version>0.2.1-SNAPSHOT</version>
-  </parent>
-
-  <build>
-    <plugins>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-compiler-plugin</artifactId>
-        <version>2.0.2</version>
-        <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
-        </configuration>
-      </plugin>
-    </plugins>
-  </build>
-  <dependencies>
-  <dependency>
-  	<groupId>org.apache.hadoop</groupId>
-  	<artifactId>hadoop-yarn-api</artifactId>
-  	<version>2.0.0-alpha</version>
-  </dependency>
-  <dependency>
-  	<groupId>org.apache.hadoop</groupId>
-  	<artifactId>hadoop-yarn-common</artifactId>
-  	<version>2.0.0-alpha</version>
-  </dependency>
-  <dependency>
-  	<groupId>org.apache.hadoop</groupId>
-  	<artifactId>hadoop-common</artifactId>
-  	<version>2.0.0-alpha</version>
-  </dependency>
-  </dependencies>
-</project>
diff --git a/hyracks-yarn/pom.xml b/hyracks-yarn/pom.xml
deleted file mode 100644
index af4b0b1..0000000
--- a/hyracks-yarn/pom.xml
+++ /dev/null
@@ -1,17 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <artifactId>hyracks-yarn</artifactId>
-  <packaging>pom</packaging>
-
-  <parent>
-    <groupId>edu.uci.ics.hyracks</groupId>
-    <artifactId>hyracks</artifactId>
-    <version>0.2.1-SNAPSHOT</version>
-  </parent>
-
-  <modules>
-    <module>hyracks-yarn-common</module>
-    <module>hyracks-yarn-client</module>
-    <module>hyracks-yarn-am</module>
-  </modules>
-</project>
diff --git a/hyracks/hyracks-api/pom.xml b/hyracks/hyracks-api/pom.xml
new file mode 100644
index 0000000..09307e4
--- /dev/null
+++ b/hyracks/hyracks-api/pom.xml
@@ -0,0 +1,57 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <artifactId>hyracks-api</artifactId>
+  <name>hyracks-api</name>
+  <parent>
+    <groupId>edu.uci.ics.hyracks</groupId>
+    <artifactId>hyracks</artifactId>
+    <version>0.2.3-SNAPSHOT</version>
+  </parent>
+
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-compiler-plugin</artifactId>
+        <version>2.0.2</version>
+        <configuration>
+          <source>1.7</source>
+          <target>1.7</target>
+        </configuration>
+      </plugin>
+    </plugins>
+  </build>
+  <dependencies>
+  	<dependency>
+  		<groupId>org.json</groupId>
+  		<artifactId>json</artifactId>
+  		<version>20090211</version>
+  		<type>jar</type>
+  		<scope>compile</scope>
+  	</dependency>
+  	<dependency>
+  		<groupId>org.apache.httpcomponents</groupId>
+  		<artifactId>httpclient</artifactId>
+  		<version>4.1-alpha2</version>
+  		<type>jar</type>
+  		<scope>compile</scope>
+  	</dependency>
+  	<dependency>
+  		<groupId>args4j</groupId>
+  		<artifactId>args4j</artifactId>
+  		<version>2.0.12</version>
+  		<type>jar</type>
+  		<scope>compile</scope>
+  	</dependency>
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>hyracks-ipc</artifactId>
+  		<version>0.2.3-SNAPSHOT</version>
+  	</dependency>
+  	<dependency>
+  		<groupId>org.apache.commons</groupId>
+  		<artifactId>commons-lang3</artifactId>
+  		<version>3.1</version>
+  	</dependency>
+  </dependencies>
+</project>
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/application/IApplicationContext.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/application/IApplicationContext.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/application/IApplicationContext.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/application/IApplicationContext.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/application/IBootstrap.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/application/IBootstrap.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/application/IBootstrap.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/application/IBootstrap.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/application/ICCApplicationContext.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/application/ICCApplicationContext.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/application/ICCApplicationContext.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/application/ICCApplicationContext.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/application/ICCBootstrap.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/application/ICCBootstrap.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/application/ICCBootstrap.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/application/ICCBootstrap.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/application/INCApplicationContext.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/application/INCApplicationContext.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/application/INCApplicationContext.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/application/INCApplicationContext.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/application/INCBootstrap.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/application/INCBootstrap.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/application/INCBootstrap.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/application/INCBootstrap.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/channels/IInputChannel.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/channels/IInputChannel.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/channels/IInputChannel.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/channels/IInputChannel.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/channels/IInputChannelMonitor.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/channels/IInputChannelMonitor.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/channels/IInputChannelMonitor.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/channels/IInputChannelMonitor.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/ClusterControllerInfo.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/ClusterControllerInfo.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/ClusterControllerInfo.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/ClusterControllerInfo.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/HyracksClientInterfaceFunctions.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/HyracksClientInterfaceFunctions.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/HyracksClientInterfaceFunctions.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/HyracksClientInterfaceFunctions.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/HyracksClientInterfaceRemoteProxy.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/HyracksClientInterfaceRemoteProxy.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/HyracksClientInterfaceRemoteProxy.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/HyracksClientInterfaceRemoteProxy.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/HyracksConnection.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/HyracksConnection.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/HyracksConnection.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/HyracksConnection.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/IHyracksClientConnection.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/IHyracksClientConnection.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/IHyracksClientConnection.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/IHyracksClientConnection.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/IHyracksClientInterface.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/IHyracksClientInterface.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/IHyracksClientInterface.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/IHyracksClientInterface.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/NodeControllerInfo.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/NodeControllerInfo.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/NodeControllerInfo.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/NodeControllerInfo.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/NodeStatus.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/NodeStatus.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/NodeStatus.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/NodeStatus.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/impl/ActivityClusterGraphBuilder.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/impl/ActivityClusterGraphBuilder.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/impl/ActivityClusterGraphBuilder.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/impl/ActivityClusterGraphBuilder.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/impl/IConnectorDescriptorVisitor.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/impl/IConnectorDescriptorVisitor.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/impl/IConnectorDescriptorVisitor.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/impl/IConnectorDescriptorVisitor.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/impl/IOperatorDescriptorVisitor.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/impl/IOperatorDescriptorVisitor.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/impl/IOperatorDescriptorVisitor.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/impl/IOperatorDescriptorVisitor.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/impl/JobActivityGraphBuilder.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/impl/JobActivityGraphBuilder.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/impl/JobActivityGraphBuilder.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/impl/JobActivityGraphBuilder.java
diff --git a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/impl/JobSpecificationActivityClusterGraphGeneratorFactory.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/impl/JobSpecificationActivityClusterGraphGeneratorFactory.java
new file mode 100644
index 0000000..f36b7b3
--- /dev/null
+++ b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/impl/JobSpecificationActivityClusterGraphGeneratorFactory.java
@@ -0,0 +1,90 @@
+package edu.uci.ics.hyracks.api.client.impl;
+
+import java.util.EnumSet;
+import java.util.HashSet;
+import java.util.Set;
+
+import edu.uci.ics.hyracks.api.application.ICCApplicationContext;
+import edu.uci.ics.hyracks.api.constraints.Constraint;
+import edu.uci.ics.hyracks.api.constraints.IConstraintAcceptor;
+import edu.uci.ics.hyracks.api.dataflow.IConnectorDescriptor;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksException;
+import edu.uci.ics.hyracks.api.job.ActivityClusterGraph;
+import edu.uci.ics.hyracks.api.job.IActivityClusterGraphGenerator;
+import edu.uci.ics.hyracks.api.job.IActivityClusterGraphGeneratorFactory;
+import edu.uci.ics.hyracks.api.job.JobActivityGraph;
+import edu.uci.ics.hyracks.api.job.JobFlag;
+import edu.uci.ics.hyracks.api.job.JobId;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+
+public class JobSpecificationActivityClusterGraphGeneratorFactory implements IActivityClusterGraphGeneratorFactory {
+    private static final long serialVersionUID = 1L;
+
+    private final JobSpecification spec;
+
+    public JobSpecificationActivityClusterGraphGeneratorFactory(JobSpecification jobSpec) {
+        this.spec = jobSpec;
+    }
+
+    @Override
+    public IActivityClusterGraphGenerator createActivityClusterGraphGenerator(String appName, JobId jobId,
+            final ICCApplicationContext ccAppCtx, EnumSet<JobFlag> jobFlags) throws HyracksException {
+        final JobActivityGraphBuilder builder = new JobActivityGraphBuilder(spec, jobFlags);
+        PlanUtils.visit(spec, new IConnectorDescriptorVisitor() {
+            @Override
+            public void visit(IConnectorDescriptor conn) throws HyracksException {
+                builder.addConnector(conn);
+            }
+        });
+        PlanUtils.visit(spec, new IOperatorDescriptorVisitor() {
+            @Override
+            public void visit(IOperatorDescriptor op) {
+                op.contributeActivities(builder);
+            }
+        });
+        builder.finish();
+        final JobActivityGraph jag = builder.getActivityGraph();
+        ActivityClusterGraphBuilder acgb = new ActivityClusterGraphBuilder();
+
+        final ActivityClusterGraph acg = acgb.inferActivityClusters(jobId, jag);
+        acg.setFrameSize(spec.getFrameSize());
+        acg.setMaxReattempts(spec.getMaxReattempts());
+        acg.setJobletEventListenerFactory(spec.getJobletEventListenerFactory());
+        acg.setGlobalJobDataFactory(spec.getGlobalJobDataFactory());
+        acg.setConnectorPolicyAssignmentPolicy(spec.getConnectorPolicyAssignmentPolicy());
+        acg.setUseConnectorPolicyForScheduling(spec.isUseConnectorPolicyForScheduling());
+        final Set<Constraint> constraints = new HashSet<Constraint>();
+        final IConstraintAcceptor acceptor = new IConstraintAcceptor() {
+            @Override
+            public void addConstraint(Constraint constraint) {
+                constraints.add(constraint);
+            }
+        };
+        PlanUtils.visit(spec, new IOperatorDescriptorVisitor() {
+            @Override
+            public void visit(IOperatorDescriptor op) {
+                op.contributeSchedulingConstraints(acceptor, ccAppCtx);
+            }
+        });
+        PlanUtils.visit(spec, new IConnectorDescriptorVisitor() {
+            @Override
+            public void visit(IConnectorDescriptor conn) {
+                conn.contributeSchedulingConstraints(acceptor, acg.getConnectorMap().get(conn.getConnectorId()),
+                        ccAppCtx);
+            }
+        });
+        constraints.addAll(spec.getUserConstraints());
+        return new IActivityClusterGraphGenerator() {
+            @Override
+            public ActivityClusterGraph initialize() {
+                return acg;
+            }
+
+            @Override
+            public Set<Constraint> getConstraints() {
+                return constraints;
+            }
+        };
+    }
+}
\ No newline at end of file
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/impl/PlanUtils.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/impl/PlanUtils.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/impl/PlanUtils.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/impl/PlanUtils.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/comm/FrameConstants.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/comm/FrameConstants.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/comm/FrameConstants.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/comm/FrameConstants.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/comm/FrameHelper.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/comm/FrameHelper.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/comm/FrameHelper.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/comm/FrameHelper.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/comm/IFrameReader.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/comm/IFrameReader.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/comm/IFrameReader.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/comm/IFrameReader.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/comm/IFrameTupleAccessor.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/comm/IFrameTupleAccessor.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/comm/IFrameTupleAccessor.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/comm/IFrameTupleAccessor.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/comm/IFrameWriter.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/comm/IFrameWriter.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/comm/IFrameWriter.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/comm/IFrameWriter.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/comm/IPartitionCollector.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/comm/IPartitionCollector.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/comm/IPartitionCollector.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/comm/IPartitionCollector.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/comm/IPartitionWriterFactory.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/comm/IPartitionWriterFactory.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/comm/IPartitionWriterFactory.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/comm/IPartitionWriterFactory.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/comm/NetworkAddress.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/comm/NetworkAddress.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/comm/NetworkAddress.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/comm/NetworkAddress.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/comm/PartitionChannel.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/comm/PartitionChannel.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/comm/PartitionChannel.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/comm/PartitionChannel.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/constraints/Constraint.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/constraints/Constraint.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/constraints/Constraint.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/constraints/Constraint.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/constraints/IConstraintAcceptor.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/constraints/IConstraintAcceptor.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/constraints/IConstraintAcceptor.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/constraints/IConstraintAcceptor.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/constraints/PartitionConstraintHelper.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/constraints/PartitionConstraintHelper.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/constraints/PartitionConstraintHelper.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/constraints/PartitionConstraintHelper.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/constraints/expressions/ConstantExpression.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/constraints/expressions/ConstantExpression.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/constraints/expressions/ConstantExpression.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/constraints/expressions/ConstantExpression.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/constraints/expressions/ConstraintExpression.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/constraints/expressions/ConstraintExpression.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/constraints/expressions/ConstraintExpression.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/constraints/expressions/ConstraintExpression.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/constraints/expressions/LValueConstraintExpression.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/constraints/expressions/LValueConstraintExpression.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/constraints/expressions/LValueConstraintExpression.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/constraints/expressions/LValueConstraintExpression.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/constraints/expressions/PartitionCountExpression.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/constraints/expressions/PartitionCountExpression.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/constraints/expressions/PartitionCountExpression.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/constraints/expressions/PartitionCountExpression.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/constraints/expressions/PartitionLocationExpression.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/constraints/expressions/PartitionLocationExpression.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/constraints/expressions/PartitionLocationExpression.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/constraints/expressions/PartitionLocationExpression.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/context/ICCContext.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/context/ICCContext.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/context/ICCContext.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/context/ICCContext.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/context/IHyracksCommonContext.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/context/IHyracksCommonContext.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/context/IHyracksCommonContext.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/context/IHyracksCommonContext.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/context/IHyracksJobletContext.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/context/IHyracksJobletContext.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/context/IHyracksJobletContext.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/context/IHyracksJobletContext.java
diff --git a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/context/IHyracksRootContext.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/context/IHyracksRootContext.java
new file mode 100644
index 0000000..c4989c5
--- /dev/null
+++ b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/context/IHyracksRootContext.java
@@ -0,0 +1,26 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.api.context;
+
+import java.util.Map;
+
+import edu.uci.ics.hyracks.api.client.NodeControllerInfo;
+import edu.uci.ics.hyracks.api.io.IIOManager;
+
+public interface IHyracksRootContext {
+    public IIOManager getIOManager();
+
+    public Map<String, NodeControllerInfo> getNodeControllerInfos() throws Exception;
+}
\ No newline at end of file
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/context/IHyracksTaskContext.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/context/IHyracksTaskContext.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/context/IHyracksTaskContext.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/context/IHyracksTaskContext.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/ActivityId.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/ActivityId.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/ActivityId.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/ActivityId.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/ConnectorDescriptorId.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/ConnectorDescriptorId.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/ConnectorDescriptorId.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/ConnectorDescriptorId.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/IActivity.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/IActivity.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/IActivity.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/IActivity.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/IActivityGraphBuilder.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/IActivityGraphBuilder.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/IActivityGraphBuilder.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/IActivityGraphBuilder.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/IConnectorDescriptor.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/IConnectorDescriptor.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/IConnectorDescriptor.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/IConnectorDescriptor.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/IDataReader.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/IDataReader.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/IDataReader.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/IDataReader.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/IDataWriter.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/IDataWriter.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/IDataWriter.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/IDataWriter.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/IOpenableDataReader.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/IOpenableDataReader.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/IOpenableDataReader.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/IOpenableDataReader.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/IOpenableDataWriter.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/IOpenableDataWriter.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/IOpenableDataWriter.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/IOpenableDataWriter.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/IOperatorDescriptor.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/IOperatorDescriptor.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/IOperatorDescriptor.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/IOperatorDescriptor.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/IOperatorNodePullable.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/IOperatorNodePullable.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/IOperatorNodePullable.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/IOperatorNodePullable.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/IOperatorNodePushable.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/IOperatorNodePushable.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/IOperatorNodePushable.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/IOperatorNodePushable.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/OperatorDescriptorId.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/OperatorDescriptorId.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/OperatorDescriptorId.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/OperatorDescriptorId.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/OperatorInstanceId.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/OperatorInstanceId.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/OperatorInstanceId.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/OperatorInstanceId.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/TaskAttemptId.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/TaskAttemptId.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/TaskAttemptId.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/TaskAttemptId.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/TaskId.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/TaskId.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/TaskId.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/TaskId.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/connectors/IConnectorPolicy.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/connectors/IConnectorPolicy.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/connectors/IConnectorPolicy.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/connectors/IConnectorPolicy.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/connectors/IConnectorPolicyAssignmentPolicy.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/connectors/IConnectorPolicyAssignmentPolicy.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/connectors/IConnectorPolicyAssignmentPolicy.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/connectors/IConnectorPolicyAssignmentPolicy.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/connectors/PipeliningConnectorPolicy.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/connectors/PipeliningConnectorPolicy.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/connectors/PipeliningConnectorPolicy.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/connectors/PipeliningConnectorPolicy.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/connectors/SendSideMaterializedBlockingConnectorPolicy.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/connectors/SendSideMaterializedBlockingConnectorPolicy.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/connectors/SendSideMaterializedBlockingConnectorPolicy.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/connectors/SendSideMaterializedBlockingConnectorPolicy.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/connectors/SendSideMaterializedPipeliningConnectorPolicy.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/connectors/SendSideMaterializedPipeliningConnectorPolicy.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/connectors/SendSideMaterializedPipeliningConnectorPolicy.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/connectors/SendSideMaterializedPipeliningConnectorPolicy.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/connectors/SendSideMaterializedReceiveSideMaterializedBlockingConnectorPolicy.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/connectors/SendSideMaterializedReceiveSideMaterializedBlockingConnectorPolicy.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/connectors/SendSideMaterializedReceiveSideMaterializedBlockingConnectorPolicy.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/connectors/SendSideMaterializedReceiveSideMaterializedBlockingConnectorPolicy.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/connectors/SendSideMaterializedReceiveSideMaterializedPipeliningConnectorPolicy.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/connectors/SendSideMaterializedReceiveSideMaterializedPipeliningConnectorPolicy.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/connectors/SendSideMaterializedReceiveSideMaterializedPipeliningConnectorPolicy.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/connectors/SendSideMaterializedReceiveSideMaterializedPipeliningConnectorPolicy.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/state/IStateObject.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/state/IStateObject.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/state/IStateObject.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/state/IStateObject.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/IBinaryComparator.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/IBinaryComparator.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/IBinaryComparator.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/IBinaryComparator.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/IBinaryComparatorFactory.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/IBinaryComparatorFactory.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/IBinaryComparatorFactory.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/IBinaryComparatorFactory.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/IBinaryHashFunction.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/IBinaryHashFunction.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/IBinaryHashFunction.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/IBinaryHashFunction.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/IBinaryHashFunctionFactory.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/IBinaryHashFunctionFactory.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/IBinaryHashFunctionFactory.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/IBinaryHashFunctionFactory.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/IBinaryHashFunctionFamily.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/IBinaryHashFunctionFamily.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/IBinaryHashFunctionFamily.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/IBinaryHashFunctionFamily.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/IComparator.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/IComparator.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/IComparator.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/IComparator.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/IComparatorFactory.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/IComparatorFactory.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/IComparatorFactory.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/IComparatorFactory.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/IHashFunction.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/IHashFunction.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/IHashFunction.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/IHashFunction.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/IHashFunctionFactory.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/IHashFunctionFactory.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/IHashFunctionFactory.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/IHashFunctionFactory.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/INormalizedKeyComputer.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/INormalizedKeyComputer.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/INormalizedKeyComputer.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/INormalizedKeyComputer.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/INormalizedKeyComputerFactory.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/INormalizedKeyComputerFactory.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/INormalizedKeyComputerFactory.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/INormalizedKeyComputerFactory.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/INullWriter.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/INullWriter.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/INullWriter.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/INullWriter.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/INullWriterFactory.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/INullWriterFactory.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/INullWriterFactory.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/INullWriterFactory.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/IRecordDescriptorProvider.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/IRecordDescriptorProvider.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/IRecordDescriptorProvider.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/IRecordDescriptorProvider.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/ISerializerDeserializer.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/ISerializerDeserializer.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/ISerializerDeserializer.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/ISerializerDeserializer.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/ITuplePairComparator.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/ITuplePairComparator.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/ITuplePairComparator.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/ITuplePairComparator.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/ITuplePairComparatorFactory.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/ITuplePairComparatorFactory.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/ITuplePairComparatorFactory.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/ITuplePairComparatorFactory.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/ITuplePartitionComputer.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/ITuplePartitionComputer.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/ITuplePartitionComputer.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/ITuplePartitionComputer.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/ITuplePartitionComputerFactory.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/ITuplePartitionComputerFactory.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/ITuplePartitionComputerFactory.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/ITuplePartitionComputerFactory.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/ITuplePartitionComputerFamily.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/ITuplePartitionComputerFamily.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/ITuplePartitionComputerFamily.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/ITuplePartitionComputerFamily.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/ITypeTraits.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/ITypeTraits.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/ITypeTraits.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/ITypeTraits.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/RecordDescriptor.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/RecordDescriptor.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/RecordDescriptor.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/RecordDescriptor.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/exceptions/HyracksDataException.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/exceptions/HyracksDataException.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/exceptions/HyracksDataException.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/exceptions/HyracksDataException.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/exceptions/HyracksException.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/exceptions/HyracksException.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/exceptions/HyracksException.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/exceptions/HyracksException.java
diff --git a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/io/FileReference.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/io/FileReference.java
new file mode 100644
index 0000000..9e27077
--- /dev/null
+++ b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/io/FileReference.java
@@ -0,0 +1,58 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.api.io;
+
+import java.io.File;
+import java.io.Serializable;
+
+public final class FileReference implements Serializable {
+    private static final long serialVersionUID = 1L;
+
+    private final File file;
+
+    public FileReference(IODeviceHandle dev, String devRelPath) {
+        file = new File(dev.getPath(), devRelPath);
+    }
+
+    public FileReference(File file) {
+        this.file = file;
+    }
+
+    public File getFile() {
+        return file;
+    }
+
+    @Override
+    public String toString() {
+        return file.getAbsolutePath();
+    }
+
+    @Override
+    public boolean equals(Object o) {
+        if (!(o instanceof FileReference)) {
+            return false;
+        }
+        return file.equals(((FileReference) o).file);
+    }
+
+    @Override
+    public int hashCode() {
+        return file.hashCode();
+    }
+
+    public void delete() {
+        file.delete();
+    }
+}
\ No newline at end of file
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/io/IFileHandle.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/io/IFileHandle.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/io/IFileHandle.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/io/IFileHandle.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/io/IIOFuture.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/io/IIOFuture.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/io/IIOFuture.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/io/IIOFuture.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/io/IIOManager.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/io/IIOManager.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/io/IIOManager.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/io/IIOManager.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/io/IODeviceHandle.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/io/IODeviceHandle.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/io/IODeviceHandle.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/io/IODeviceHandle.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/io/IWorkspaceFileFactory.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/io/IWorkspaceFileFactory.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/io/IWorkspaceFileFactory.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/io/IWorkspaceFileFactory.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/ActivityCluster.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/ActivityCluster.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/ActivityCluster.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/ActivityCluster.java
diff --git a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/ActivityClusterGraph.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/ActivityClusterGraph.java
new file mode 100644
index 0000000..32c93cd
--- /dev/null
+++ b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/ActivityClusterGraph.java
@@ -0,0 +1,149 @@
+/*
+2 * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.api.job;
+
+import java.io.Serializable;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.json.JSONArray;
+import org.json.JSONException;
+import org.json.JSONObject;
+
+import edu.uci.ics.hyracks.api.dataflow.ActivityId;
+import edu.uci.ics.hyracks.api.dataflow.ConnectorDescriptorId;
+import edu.uci.ics.hyracks.api.dataflow.connectors.IConnectorPolicyAssignmentPolicy;
+
+public class ActivityClusterGraph implements Serializable {
+    private static final long serialVersionUID = 1L;
+
+    private int version;
+
+    private final Map<ActivityClusterId, ActivityCluster> activityClusterMap;
+
+    private final Map<ActivityId, ActivityCluster> activityMap;
+
+    private final Map<ConnectorDescriptorId, ActivityCluster> connectorMap;
+
+    private int frameSize;
+
+    private int maxReattempts;
+
+    private IJobletEventListenerFactory jobletEventListenerFactory;
+
+    private IGlobalJobDataFactory globalJobDataFactory;
+
+    private IConnectorPolicyAssignmentPolicy connectorPolicyAssignmentPolicy;
+
+    private boolean useConnectorPolicyForScheduling;
+
+    public ActivityClusterGraph() {
+        version = 0;
+        activityClusterMap = new HashMap<ActivityClusterId, ActivityCluster>();
+        activityMap = new HashMap<ActivityId, ActivityCluster>();
+        connectorMap = new HashMap<ConnectorDescriptorId, ActivityCluster>();
+        frameSize = 32768;
+    }
+
+    public Map<ActivityId, ActivityCluster> getActivityMap() {
+        return activityMap;
+    }
+
+    public Map<ConnectorDescriptorId, ActivityCluster> getConnectorMap() {
+        return connectorMap;
+    }
+
+    public Map<ActivityClusterId, ActivityCluster> getActivityClusterMap() {
+        return activityClusterMap;
+    }
+
+    public void addActivityClusters(Collection<ActivityCluster> newActivityClusters) {
+        for (ActivityCluster ac : newActivityClusters) {
+            activityClusterMap.put(ac.getId(), ac);
+            for (ActivityId aid : ac.getActivityMap().keySet()) {
+                activityMap.put(aid, ac);
+            }
+            for (ConnectorDescriptorId cid : ac.getConnectorMap().keySet()) {
+                connectorMap.put(cid, ac);
+            }
+        }
+        ++version;
+    }
+
+    public int getVersion() {
+        return version;
+    }
+
+    public void setFrameSize(int frameSize) {
+        this.frameSize = frameSize;
+    }
+
+    public int getFrameSize() {
+        return frameSize;
+    }
+
+    public void setMaxReattempts(int maxReattempts) {
+        this.maxReattempts = maxReattempts;
+    }
+
+    public int getMaxReattempts() {
+        return maxReattempts;
+    }
+
+    public IJobletEventListenerFactory getJobletEventListenerFactory() {
+        return jobletEventListenerFactory;
+    }
+
+    public void setJobletEventListenerFactory(IJobletEventListenerFactory jobletEventListenerFactory) {
+        this.jobletEventListenerFactory = jobletEventListenerFactory;
+    }
+
+    public IGlobalJobDataFactory getGlobalJobDataFactory() {
+        return globalJobDataFactory;
+    }
+
+    public void setGlobalJobDataFactory(IGlobalJobDataFactory globalJobDataFactory) {
+        this.globalJobDataFactory = globalJobDataFactory;
+    }
+
+    public IConnectorPolicyAssignmentPolicy getConnectorPolicyAssignmentPolicy() {
+        return connectorPolicyAssignmentPolicy;
+    }
+
+    public void setConnectorPolicyAssignmentPolicy(IConnectorPolicyAssignmentPolicy connectorPolicyAssignmentPolicy) {
+        this.connectorPolicyAssignmentPolicy = connectorPolicyAssignmentPolicy;
+    }
+
+    public boolean isUseConnectorPolicyForScheduling() {
+        return useConnectorPolicyForScheduling;
+    }
+
+    public void setUseConnectorPolicyForScheduling(boolean useConnectorPolicyForScheduling) {
+        this.useConnectorPolicyForScheduling = useConnectorPolicyForScheduling;
+    }
+
+    public JSONObject toJSON() throws JSONException {
+        JSONObject acgj = new JSONObject();
+
+        JSONArray acl = new JSONArray();
+        for (ActivityCluster ac : activityClusterMap.values()) {
+            acl.put(ac.toJSON());
+        }
+        acgj.put("version", version);
+        acgj.put("activity-clusters", acl);
+        return acgj;
+    }
+}
\ No newline at end of file
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/ActivityClusterId.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/ActivityClusterId.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/ActivityClusterId.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/ActivityClusterId.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/IActivityClusterGraphGenerator.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/IActivityClusterGraphGenerator.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/IActivityClusterGraphGenerator.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/IActivityClusterGraphGenerator.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/IActivityClusterGraphGeneratorFactory.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/IActivityClusterGraphGeneratorFactory.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/IActivityClusterGraphGeneratorFactory.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/IActivityClusterGraphGeneratorFactory.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/IConnectorDescriptorRegistry.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/IConnectorDescriptorRegistry.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/IConnectorDescriptorRegistry.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/IConnectorDescriptorRegistry.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/IGlobalJobDataFactory.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/IGlobalJobDataFactory.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/IGlobalJobDataFactory.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/IGlobalJobDataFactory.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/IJobLifecycleListener.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/IJobLifecycleListener.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/IJobLifecycleListener.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/IJobLifecycleListener.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/IJobletEventListener.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/IJobletEventListener.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/IJobletEventListener.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/IJobletEventListener.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/IJobletEventListenerFactory.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/IJobletEventListenerFactory.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/IJobletEventListenerFactory.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/IJobletEventListenerFactory.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/IOperatorDescriptorRegistry.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/IOperatorDescriptorRegistry.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/IOperatorDescriptorRegistry.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/IOperatorDescriptorRegistry.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/IOperatorEnvironment.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/IOperatorEnvironment.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/IOperatorEnvironment.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/IOperatorEnvironment.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/JobActivityGraph.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/JobActivityGraph.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/JobActivityGraph.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/JobActivityGraph.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/JobFlag.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/JobFlag.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/JobFlag.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/JobFlag.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/JobId.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/JobId.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/JobId.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/JobId.java
diff --git a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/JobSpecification.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/JobSpecification.java
new file mode 100644
index 0000000..7c523f1
--- /dev/null
+++ b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/JobSpecification.java
@@ -0,0 +1,332 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.api.job;
+
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.commons.lang3.tuple.Pair;
+import org.json.JSONArray;
+import org.json.JSONException;
+import org.json.JSONObject;
+
+import edu.uci.ics.hyracks.api.constraints.Constraint;
+import edu.uci.ics.hyracks.api.dataflow.ConnectorDescriptorId;
+import edu.uci.ics.hyracks.api.dataflow.IConnectorDescriptor;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
+import edu.uci.ics.hyracks.api.dataflow.OperatorDescriptorId;
+import edu.uci.ics.hyracks.api.dataflow.connectors.IConnectorPolicyAssignmentPolicy;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+
+public class JobSpecification implements Serializable, IOperatorDescriptorRegistry, IConnectorDescriptorRegistry {
+    private static final long serialVersionUID = 1L;
+
+    private final List<OperatorDescriptorId> roots;
+
+    private final Map<OperatorDescriptorId, IOperatorDescriptor> opMap;
+
+    private final Map<ConnectorDescriptorId, IConnectorDescriptor> connMap;
+
+    private final Map<OperatorDescriptorId, List<IConnectorDescriptor>> opInputMap;
+
+    private final Map<OperatorDescriptorId, List<IConnectorDescriptor>> opOutputMap;
+
+    private final Map<ConnectorDescriptorId, Pair<Pair<IOperatorDescriptor, Integer>, Pair<IOperatorDescriptor, Integer>>> connectorOpMap;
+
+    private final Map<String, Serializable> properties;
+
+    private final Set<Constraint> userConstraints;
+
+    private IConnectorPolicyAssignmentPolicy connectorPolicyAssignmentPolicy;
+
+    private int frameSize;
+
+    private int maxReattempts;
+
+    private IJobletEventListenerFactory jobletEventListenerFactory;
+
+    private IGlobalJobDataFactory globalJobDataFactory;
+
+    private boolean useConnectorPolicyForScheduling;
+
+    private transient int operatorIdCounter;
+
+    private transient int connectorIdCounter;
+
+    public JobSpecification() {
+        roots = new ArrayList<OperatorDescriptorId>();
+        opMap = new HashMap<OperatorDescriptorId, IOperatorDescriptor>();
+        connMap = new HashMap<ConnectorDescriptorId, IConnectorDescriptor>();
+        opInputMap = new HashMap<OperatorDescriptorId, List<IConnectorDescriptor>>();
+        opOutputMap = new HashMap<OperatorDescriptorId, List<IConnectorDescriptor>>();
+        connectorOpMap = new HashMap<ConnectorDescriptorId, Pair<Pair<IOperatorDescriptor, Integer>, Pair<IOperatorDescriptor, Integer>>>();
+        properties = new HashMap<String, Serializable>();
+        userConstraints = new HashSet<Constraint>();
+        operatorIdCounter = 0;
+        connectorIdCounter = 0;
+        frameSize = 32768;
+        maxReattempts = 2;
+        useConnectorPolicyForScheduling = true;
+    }
+
+    @Override
+    public OperatorDescriptorId createOperatorDescriptorId(IOperatorDescriptor op) {
+        OperatorDescriptorId odId = new OperatorDescriptorId(operatorIdCounter++);
+        opMap.put(odId, op);
+        return odId;
+    }
+
+    @Override
+    public ConnectorDescriptorId createConnectorDescriptor(IConnectorDescriptor conn) {
+        ConnectorDescriptorId cdId = new ConnectorDescriptorId(connectorIdCounter++);
+        connMap.put(cdId, conn);
+        return cdId;
+    }
+
+    public void addRoot(IOperatorDescriptor op) {
+        roots.add(op.getOperatorId());
+    }
+
+    public void connect(IConnectorDescriptor conn, IOperatorDescriptor producerOp, int producerPort,
+            IOperatorDescriptor consumerOp, int consumerPort) {
+        insertIntoIndexedMap(opInputMap, consumerOp.getOperatorId(), consumerPort, conn);
+        insertIntoIndexedMap(opOutputMap, producerOp.getOperatorId(), producerPort, conn);
+        connectorOpMap.put(
+                conn.getConnectorId(),
+                Pair.<Pair<IOperatorDescriptor, Integer>, Pair<IOperatorDescriptor, Integer>> of(
+                        Pair.<IOperatorDescriptor, Integer> of(producerOp, producerPort),
+                        Pair.<IOperatorDescriptor, Integer> of(consumerOp, consumerPort)));
+    }
+
+    public void setProperty(String name, Serializable value) {
+        properties.put(name, value);
+    }
+
+    public Serializable getProperty(String name) {
+        return properties.get(name);
+    }
+
+    private <T> void extend(List<T> list, int index) {
+        int n = list.size();
+        for (int i = n; i <= index; ++i) {
+            list.add(null);
+        }
+    }
+
+    public Map<ConnectorDescriptorId, IConnectorDescriptor> getConnectorMap() {
+        return connMap;
+    }
+
+    public Map<ConnectorDescriptorId, Pair<Pair<IOperatorDescriptor, Integer>, Pair<IOperatorDescriptor, Integer>>> getConnectorOperatorMap() {
+        return connectorOpMap;
+    }
+
+    public RecordDescriptor getConnectorRecordDescriptor(IConnectorDescriptor conn) {
+        Pair<Pair<IOperatorDescriptor, Integer>, Pair<IOperatorDescriptor, Integer>> connInfo = connectorOpMap.get(conn
+                .getConnectorId());
+        return connInfo.getLeft().getLeft().getOutputRecordDescriptors()[connInfo.getLeft().getRight()];
+    }
+
+    public IOperatorDescriptor getConsumer(IConnectorDescriptor conn) {
+        Pair<Pair<IOperatorDescriptor, Integer>, Pair<IOperatorDescriptor, Integer>> connInfo = connectorOpMap.get(conn
+                .getConnectorId());
+        return connInfo.getRight().getLeft();
+    }
+
+    public int getConsumerInputIndex(IConnectorDescriptor conn) {
+        Pair<Pair<IOperatorDescriptor, Integer>, Pair<IOperatorDescriptor, Integer>> connInfo = connectorOpMap.get(conn
+                .getConnectorId());
+        return connInfo.getRight().getRight();
+    }
+
+    public IConnectorDescriptor getInputConnectorDescriptor(IOperatorDescriptor op, int inputIndex) {
+        return getInputConnectorDescriptor(op.getOperatorId(), inputIndex);
+    }
+
+    public IConnectorDescriptor getInputConnectorDescriptor(OperatorDescriptorId odId, int inputIndex) {
+        return opInputMap.get(odId).get(inputIndex);
+    }
+
+    public Map<OperatorDescriptorId, List<IConnectorDescriptor>> getOperatorInputMap() {
+        return opInputMap;
+    }
+
+    public RecordDescriptor getOperatorInputRecordDescriptor(OperatorDescriptorId odId, int inputIndex) {
+        return getConnectorRecordDescriptor(getInputConnectorDescriptor(odId, inputIndex));
+    }
+
+    public Map<OperatorDescriptorId, IOperatorDescriptor> getOperatorMap() {
+        return opMap;
+    }
+
+    public Map<OperatorDescriptorId, List<IConnectorDescriptor>> getOperatorOutputMap() {
+        return opOutputMap;
+    }
+
+    public RecordDescriptor getOperatorOutputRecordDescriptor(OperatorDescriptorId odId, int outputIndex) {
+        return getConnectorRecordDescriptor(getOutputConnectorDescriptor(odId, outputIndex));
+    }
+
+    public IConnectorDescriptor getOutputConnectorDescriptor(IOperatorDescriptor op, int outputIndex) {
+        return getOutputConnectorDescriptor(op.getOperatorId(), outputIndex);
+    }
+
+    public IConnectorDescriptor getOutputConnectorDescriptor(OperatorDescriptorId odId, int outputIndex) {
+        return opOutputMap.get(odId).get(outputIndex);
+    }
+
+    public IOperatorDescriptor getProducer(IConnectorDescriptor conn) {
+        Pair<Pair<IOperatorDescriptor, Integer>, Pair<IOperatorDescriptor, Integer>> connInfo = connectorOpMap.get(conn
+                .getConnectorId());
+        return connInfo.getLeft().getLeft();
+    }
+
+    public int getProducerOutputIndex(IConnectorDescriptor conn) {
+        Pair<Pair<IOperatorDescriptor, Integer>, Pair<IOperatorDescriptor, Integer>> connInfo = connectorOpMap.get(conn
+                .getConnectorId());
+        return connInfo.getLeft().getRight();
+    }
+
+    public List<OperatorDescriptorId> getRoots() {
+        return roots;
+    }
+
+    public IConnectorPolicyAssignmentPolicy getConnectorPolicyAssignmentPolicy() {
+        return connectorPolicyAssignmentPolicy;
+    }
+
+    public void setConnectorPolicyAssignmentPolicy(IConnectorPolicyAssignmentPolicy connectorPolicyAssignmentPolicy) {
+        this.connectorPolicyAssignmentPolicy = connectorPolicyAssignmentPolicy;
+    }
+
+    public void setFrameSize(int frameSize) {
+        this.frameSize = frameSize;
+    }
+
+    public int getFrameSize() {
+        return frameSize;
+    }
+
+    public void setMaxReattempts(int maxReattempts) {
+        this.maxReattempts = maxReattempts;
+    }
+
+    public int getMaxReattempts() {
+        return maxReattempts;
+    }
+
+    public void addUserConstraint(Constraint constraint) {
+        userConstraints.add(constraint);
+    }
+
+    public Set<Constraint> getUserConstraints() {
+        return userConstraints;
+    }
+
+    public IJobletEventListenerFactory getJobletEventListenerFactory() {
+        return jobletEventListenerFactory;
+    }
+
+    public void setJobletEventListenerFactory(IJobletEventListenerFactory jobletEventListenerFactory) {
+        this.jobletEventListenerFactory = jobletEventListenerFactory;
+    }
+
+    public IGlobalJobDataFactory getGlobalJobDataFactory() {
+        return globalJobDataFactory;
+    }
+
+    public void setGlobalJobDataFactory(IGlobalJobDataFactory globalJobDataFactory) {
+        this.globalJobDataFactory = globalJobDataFactory;
+    }
+
+    public boolean isUseConnectorPolicyForScheduling() {
+        return useConnectorPolicyForScheduling;
+    }
+
+    public void setUseConnectorPolicyForScheduling(boolean useConnectorPolicyForScheduling) {
+        this.useConnectorPolicyForScheduling = useConnectorPolicyForScheduling;
+    }
+
+    private <K, V> void insertIntoIndexedMap(Map<K, List<V>> map, K key, int index, V value) {
+        List<V> vList = map.get(key);
+        if (vList == null) {
+            vList = new ArrayList<V>();
+            map.put(key, vList);
+        }
+        extend(vList, index);
+        vList.set(index, value);
+    }
+
+    public String toString() {
+        StringBuilder buffer = new StringBuilder();
+
+        for (Map.Entry<OperatorDescriptorId, IOperatorDescriptor> e : opMap.entrySet()) {
+            buffer.append(e.getKey().getId()).append(" : ").append(e.getValue().toString()).append("\n");
+            List<IConnectorDescriptor> inputs = opInputMap.get(e.getKey());
+            if (inputs != null && !inputs.isEmpty()) {
+                buffer.append("   Inputs:\n");
+                for (IConnectorDescriptor c : inputs) {
+                    buffer.append("      ").append(c.getConnectorId().getId()).append(" : ").append(c.toString())
+                            .append("\n");
+                }
+            }
+            List<IConnectorDescriptor> outputs = opOutputMap.get(e.getKey());
+            if (outputs != null && !outputs.isEmpty()) {
+                buffer.append("   Outputs:\n");
+                for (IConnectorDescriptor c : outputs) {
+                    buffer.append("      ").append(c.getConnectorId().getId()).append(" : ").append(c.toString())
+                            .append("\n");
+                }
+            }
+        }
+
+        buffer.append("\n").append("Constraints:\n").append(userConstraints);
+
+        return buffer.toString();
+    }
+
+    public JSONObject toJSON() throws JSONException {
+        JSONObject jjob = new JSONObject();
+
+        JSONArray jopArray = new JSONArray();
+        for (Map.Entry<OperatorDescriptorId, IOperatorDescriptor> e : opMap.entrySet()) {
+            jopArray.put(e.getValue().toJSON());
+        }
+        jjob.put("operators", jopArray);
+
+        JSONArray jcArray = new JSONArray();
+        for (Map.Entry<ConnectorDescriptorId, IConnectorDescriptor> e : connMap.entrySet()) {
+            JSONObject conn = new JSONObject();
+            Pair<Pair<IOperatorDescriptor, Integer>, Pair<IOperatorDescriptor, Integer>> connection = connectorOpMap
+                    .get(e.getKey());
+            if (connection != null) {
+                conn.put("in-operator-id", connection.getLeft().getLeft().getOperatorId().toString());
+                conn.put("in-operator-port", connection.getLeft().getRight().intValue());
+                conn.put("out-operator-id", connection.getRight().getLeft().getOperatorId().toString());
+                conn.put("out-operator-port", connection.getRight().getRight().intValue());
+            }
+            conn.put("connector", e.getValue().toJSON());
+            jcArray.put(conn);
+        }
+        jjob.put("connectors", jcArray);
+
+        return jjob;
+    }
+}
\ No newline at end of file
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/JobStatus.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/JobStatus.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/JobStatus.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/JobStatus.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/profiling/counters/ICounter.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/profiling/counters/ICounter.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/profiling/counters/ICounter.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/profiling/counters/ICounter.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/profiling/counters/ICounterContext.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/profiling/counters/ICounterContext.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/profiling/counters/ICounterContext.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/profiling/counters/ICounterContext.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/messages/IMessage.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/messages/IMessage.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/messages/IMessage.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/messages/IMessage.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/messages/IMessageBroker.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/messages/IMessageBroker.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/messages/IMessageBroker.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/messages/IMessageBroker.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/partitions/IPartition.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/partitions/IPartition.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/partitions/IPartition.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/partitions/IPartition.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/partitions/PartitionId.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/partitions/PartitionId.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/partitions/PartitionId.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/partitions/PartitionId.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/resources/IDeallocatable.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/resources/IDeallocatable.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/resources/IDeallocatable.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/resources/IDeallocatable.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/resources/IDeallocatableRegistry.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/resources/IDeallocatableRegistry.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/resources/IDeallocatableRegistry.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/resources/IDeallocatableRegistry.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/topology/ClusterTopology.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/topology/ClusterTopology.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/topology/ClusterTopology.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/topology/ClusterTopology.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/topology/NetworkEndpoint.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/topology/NetworkEndpoint.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/topology/NetworkEndpoint.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/topology/NetworkEndpoint.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/topology/NetworkSwitch.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/topology/NetworkSwitch.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/topology/NetworkSwitch.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/topology/NetworkSwitch.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/topology/NetworkTerminal.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/topology/NetworkTerminal.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/topology/NetworkTerminal.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/topology/NetworkTerminal.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/topology/TopologyDefinitionParser.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/topology/TopologyDefinitionParser.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/topology/TopologyDefinitionParser.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/topology/TopologyDefinitionParser.java
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/util/JavaSerializationUtils.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/util/JavaSerializationUtils.java
similarity index 100%
rename from hyracks-api/src/main/java/edu/uci/ics/hyracks/api/util/JavaSerializationUtils.java
rename to hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/util/JavaSerializationUtils.java
diff --git a/hyracks/hyracks-cli/pom.xml b/hyracks/hyracks-cli/pom.xml
new file mode 100644
index 0000000..0456625
--- /dev/null
+++ b/hyracks/hyracks-cli/pom.xml
@@ -0,0 +1,98 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>edu.uci.ics.hyracks</groupId>
+  <artifactId>hyracks-cli</artifactId>
+  <version>0.2.3-SNAPSHOT</version>
+  <name>hyracks-cli</name>
+
+  <parent>
+    <groupId>edu.uci.ics.hyracks</groupId>
+    <artifactId>hyracks</artifactId>
+    <version>0.2.3-SNAPSHOT</version>
+  </parent>
+
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-compiler-plugin</artifactId>
+        <version>2.0.2</version>
+        <configuration>
+          <source>1.7</source>
+          <target>1.7</target>
+        </configuration>
+      </plugin>
+      <plugin>
+        <groupId>org.codehaus.mojo</groupId>
+        <artifactId>javacc-maven-plugin</artifactId>
+        <version>2.6</version>
+        <executions>
+          <execution>
+            <id>javacc</id>
+            <goals>
+              <goal>javacc</goal>
+            </goals>
+            <configuration>
+              <isStatic>false</isStatic>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <groupId>org.codehaus.mojo</groupId>
+        <artifactId>appassembler-maven-plugin</artifactId>
+        <version>1.3</version>
+        <executions>
+          <execution>
+            <configuration>
+              <programs>
+                <program>
+                  <mainClass>edu.uci.ics.hyracks.cli.Main</mainClass>
+                  <name>hyrackscli</name>
+                </program>
+              </programs>
+              <repositoryLayout>flat</repositoryLayout>
+              <repositoryName>lib</repositoryName>
+            </configuration>
+            <phase>package</phase>
+            <goals>
+              <goal>assemble</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <artifactId>maven-assembly-plugin</artifactId>
+        <version>2.2-beta-5</version>
+        <executions>
+          <execution>
+            <configuration>
+              <descriptors>
+                <descriptor>src/main/assembly/binary-assembly.xml</descriptor>
+              </descriptors>
+            </configuration>
+            <phase>package</phase>
+            <goals>
+              <goal>attached</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+    </plugins>
+  </build>
+  <dependencies>
+  	<dependency>
+  		<groupId>jline</groupId>
+  		<artifactId>jline</artifactId>
+  		<version>0.9.94</version>
+  		<type>jar</type>
+  		<scope>compile</scope>
+  	</dependency>
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>hyracks-api</artifactId>
+  		<version>0.2.3-SNAPSHOT</version>
+  		<scope>compile</scope>
+  	</dependency>
+  </dependencies>
+</project>
diff --git a/hyracks-cli/src/main/assembly/binary-assembly.xml b/hyracks/hyracks-cli/src/main/assembly/binary-assembly.xml
similarity index 100%
rename from hyracks-cli/src/main/assembly/binary-assembly.xml
rename to hyracks/hyracks-cli/src/main/assembly/binary-assembly.xml
diff --git a/hyracks-cli/src/main/java/edu/uci/ics/hyracks/cli/CLI.java b/hyracks/hyracks-cli/src/main/java/edu/uci/ics/hyracks/cli/CLI.java
similarity index 100%
rename from hyracks-cli/src/main/java/edu/uci/ics/hyracks/cli/CLI.java
rename to hyracks/hyracks-cli/src/main/java/edu/uci/ics/hyracks/cli/CLI.java
diff --git a/hyracks-cli/src/main/java/edu/uci/ics/hyracks/cli/CommandExecutor.java b/hyracks/hyracks-cli/src/main/java/edu/uci/ics/hyracks/cli/CommandExecutor.java
similarity index 100%
rename from hyracks-cli/src/main/java/edu/uci/ics/hyracks/cli/CommandExecutor.java
rename to hyracks/hyracks-cli/src/main/java/edu/uci/ics/hyracks/cli/CommandExecutor.java
diff --git a/hyracks-cli/src/main/java/edu/uci/ics/hyracks/cli/Main.java b/hyracks/hyracks-cli/src/main/java/edu/uci/ics/hyracks/cli/Main.java
similarity index 100%
rename from hyracks-cli/src/main/java/edu/uci/ics/hyracks/cli/Main.java
rename to hyracks/hyracks-cli/src/main/java/edu/uci/ics/hyracks/cli/Main.java
diff --git a/hyracks-cli/src/main/java/edu/uci/ics/hyracks/cli/Session.java b/hyracks/hyracks-cli/src/main/java/edu/uci/ics/hyracks/cli/Session.java
similarity index 100%
rename from hyracks-cli/src/main/java/edu/uci/ics/hyracks/cli/Session.java
rename to hyracks/hyracks-cli/src/main/java/edu/uci/ics/hyracks/cli/Session.java
diff --git a/hyracks-cli/src/main/java/edu/uci/ics/hyracks/cli/commands/Command.java b/hyracks/hyracks-cli/src/main/java/edu/uci/ics/hyracks/cli/commands/Command.java
similarity index 100%
rename from hyracks-cli/src/main/java/edu/uci/ics/hyracks/cli/commands/Command.java
rename to hyracks/hyracks-cli/src/main/java/edu/uci/ics/hyracks/cli/commands/Command.java
diff --git a/hyracks-cli/src/main/java/edu/uci/ics/hyracks/cli/commands/ConnectCommand.java b/hyracks/hyracks-cli/src/main/java/edu/uci/ics/hyracks/cli/commands/ConnectCommand.java
similarity index 100%
rename from hyracks-cli/src/main/java/edu/uci/ics/hyracks/cli/commands/ConnectCommand.java
rename to hyracks/hyracks-cli/src/main/java/edu/uci/ics/hyracks/cli/commands/ConnectCommand.java
diff --git a/hyracks-cli/src/main/java/edu/uci/ics/hyracks/cli/commands/CreateApplicationCommand.java b/hyracks/hyracks-cli/src/main/java/edu/uci/ics/hyracks/cli/commands/CreateApplicationCommand.java
similarity index 100%
rename from hyracks-cli/src/main/java/edu/uci/ics/hyracks/cli/commands/CreateApplicationCommand.java
rename to hyracks/hyracks-cli/src/main/java/edu/uci/ics/hyracks/cli/commands/CreateApplicationCommand.java
diff --git a/hyracks-cli/src/main/java/edu/uci/ics/hyracks/cli/commands/DestroyApplicationCommand.java b/hyracks/hyracks-cli/src/main/java/edu/uci/ics/hyracks/cli/commands/DestroyApplicationCommand.java
similarity index 100%
rename from hyracks-cli/src/main/java/edu/uci/ics/hyracks/cli/commands/DestroyApplicationCommand.java
rename to hyracks/hyracks-cli/src/main/java/edu/uci/ics/hyracks/cli/commands/DestroyApplicationCommand.java
diff --git a/hyracks-cli/src/main/java/edu/uci/ics/hyracks/cli/commands/DisconnectCommand.java b/hyracks/hyracks-cli/src/main/java/edu/uci/ics/hyracks/cli/commands/DisconnectCommand.java
similarity index 100%
rename from hyracks-cli/src/main/java/edu/uci/ics/hyracks/cli/commands/DisconnectCommand.java
rename to hyracks/hyracks-cli/src/main/java/edu/uci/ics/hyracks/cli/commands/DisconnectCommand.java
diff --git a/hyracks-cli/src/main/javacc/cli.jj b/hyracks/hyracks-cli/src/main/javacc/cli.jj
similarity index 100%
rename from hyracks-cli/src/main/javacc/cli.jj
rename to hyracks/hyracks-cli/src/main/javacc/cli.jj
diff --git a/hyracks/hyracks-control/hyracks-control-cc/pom.xml b/hyracks/hyracks-control/hyracks-control-cc/pom.xml
new file mode 100644
index 0000000..95598da
--- /dev/null
+++ b/hyracks/hyracks-control/hyracks-control-cc/pom.xml
@@ -0,0 +1,57 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <artifactId>hyracks-control-cc</artifactId>
+  <name>hyracks-control-cc</name>
+  <parent>
+    <groupId>edu.uci.ics.hyracks</groupId>
+    <artifactId>hyracks-control</artifactId>
+    <version>0.2.3-SNAPSHOT</version>
+  </parent>
+
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-compiler-plugin</artifactId>
+        <version>2.0.2</version>
+        <configuration>
+          <source>1.7</source>
+          <target>1.7</target>
+        </configuration>
+      </plugin>
+    </plugins>
+  </build>
+  <dependencies>
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>hyracks-control-common</artifactId>
+  		<version>0.2.3-SNAPSHOT</version>
+  		<type>jar</type>
+  		<scope>compile</scope>
+  	</dependency>
+  	<dependency>
+  		<groupId>org.eclipse.jetty</groupId>
+  		<artifactId>jetty-server</artifactId>
+  		<version>8.0.0.RC0</version>
+  		<type>jar</type>
+  		<scope>compile</scope>
+  	</dependency>
+  	<dependency>
+  		<groupId>org.eclipse.jetty</groupId>
+  		<artifactId>jetty-webapp</artifactId>
+  		<version>8.0.0.RC0</version>
+  		<type>jar</type>
+  		<scope>compile</scope>
+  	</dependency>
+  	<dependency>
+  		<groupId>org.apache.wicket</groupId>
+  		<artifactId>wicket-core</artifactId>
+  		<version>1.5.2</version>
+  	</dependency>
+  	<dependency>
+  		<groupId>org.slf4j</groupId>
+  		<artifactId>slf4j-jcl</artifactId>
+  		<version>1.6.3</version>
+  	</dependency>
+  </dependencies>
+</project>
diff --git a/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/CCDriver.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/CCDriver.java
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/CCDriver.java
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/CCDriver.java
diff --git a/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/ClusterControllerService.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/ClusterControllerService.java
new file mode 100644
index 0000000..5a33891
--- /dev/null
+++ b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/ClusterControllerService.java
@@ -0,0 +1,451 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.control.cc;
+
+import java.io.File;
+import java.io.FileReader;
+import java.net.InetSocketAddress;
+import java.util.HashMap;
+import java.util.Hashtable;
+import java.util.LinkedHashMap;
+import java.util.Map;
+import java.util.Set;
+import java.util.Timer;
+import java.util.TimerTask;
+import java.util.concurrent.Executor;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.xml.sax.InputSource;
+
+import edu.uci.ics.hyracks.api.client.ClusterControllerInfo;
+import edu.uci.ics.hyracks.api.client.HyracksClientInterfaceFunctions;
+import edu.uci.ics.hyracks.api.client.NodeControllerInfo;
+import edu.uci.ics.hyracks.api.context.ICCContext;
+import edu.uci.ics.hyracks.api.job.JobId;
+import edu.uci.ics.hyracks.api.job.JobStatus;
+import edu.uci.ics.hyracks.api.topology.ClusterTopology;
+import edu.uci.ics.hyracks.api.topology.TopologyDefinitionParser;
+import edu.uci.ics.hyracks.control.cc.application.CCApplicationContext;
+import edu.uci.ics.hyracks.control.cc.job.JobRun;
+import edu.uci.ics.hyracks.control.cc.web.WebServer;
+import edu.uci.ics.hyracks.control.cc.work.ApplicationCreateWork;
+import edu.uci.ics.hyracks.control.cc.work.ApplicationDestroyWork;
+import edu.uci.ics.hyracks.control.cc.work.ApplicationMessageWork;
+import edu.uci.ics.hyracks.control.cc.work.ApplicationStartWork;
+import edu.uci.ics.hyracks.control.cc.work.ApplicationStateChangeWork;
+import edu.uci.ics.hyracks.control.cc.work.GetIpAddressNodeNameMapWork;
+import edu.uci.ics.hyracks.control.cc.work.GetJobStatusWork;
+import edu.uci.ics.hyracks.control.cc.work.GetNodeControllersInfoWork;
+import edu.uci.ics.hyracks.control.cc.work.JobStartWork;
+import edu.uci.ics.hyracks.control.cc.work.JobletCleanupNotificationWork;
+import edu.uci.ics.hyracks.control.cc.work.NodeHeartbeatWork;
+import edu.uci.ics.hyracks.control.cc.work.RegisterNodeWork;
+import edu.uci.ics.hyracks.control.cc.work.RegisterPartitionAvailibilityWork;
+import edu.uci.ics.hyracks.control.cc.work.RegisterPartitionRequestWork;
+import edu.uci.ics.hyracks.control.cc.work.RemoveDeadNodesWork;
+import edu.uci.ics.hyracks.control.cc.work.ReportProfilesWork;
+import edu.uci.ics.hyracks.control.cc.work.TaskCompleteWork;
+import edu.uci.ics.hyracks.control.cc.work.TaskFailureWork;
+import edu.uci.ics.hyracks.control.cc.work.UnregisterNodeWork;
+import edu.uci.ics.hyracks.control.cc.work.WaitForJobCompletionWork;
+import edu.uci.ics.hyracks.control.common.AbstractRemoteService;
+import edu.uci.ics.hyracks.control.common.context.ServerContext;
+import edu.uci.ics.hyracks.control.common.controllers.CCConfig;
+import edu.uci.ics.hyracks.control.common.ipc.CCNCFunctions;
+import edu.uci.ics.hyracks.control.common.ipc.CCNCFunctions.Function;
+import edu.uci.ics.hyracks.control.common.logs.LogFile;
+import edu.uci.ics.hyracks.control.common.work.IPCResponder;
+import edu.uci.ics.hyracks.control.common.work.IResultCallback;
+import edu.uci.ics.hyracks.control.common.work.WorkQueue;
+import edu.uci.ics.hyracks.ipc.api.IIPCHandle;
+import edu.uci.ics.hyracks.ipc.api.IIPCI;
+import edu.uci.ics.hyracks.ipc.exceptions.IPCException;
+import edu.uci.ics.hyracks.ipc.impl.IPCSystem;
+import edu.uci.ics.hyracks.ipc.impl.JavaSerializationBasedPayloadSerializerDeserializer;
+
+public class ClusterControllerService extends AbstractRemoteService {
+    private static Logger LOGGER = Logger.getLogger(ClusterControllerService.class.getName());
+
+    private final CCConfig ccConfig;
+
+    private IPCSystem clusterIPC;
+
+    private IPCSystem clientIPC;
+
+    private final LogFile jobLog;
+
+    private final Map<String, NodeControllerState> nodeRegistry;
+
+    private final Map<String, Set<String>> ipAddressNodeNameMap;
+
+    private final Map<String, CCApplicationContext> applications;
+
+    private final ServerContext serverCtx;
+
+    private final WebServer webServer;
+
+    private ClusterControllerInfo info;
+
+    private final Map<JobId, JobRun> activeRunMap;
+
+    private final Map<JobId, JobRun> runMapArchive;
+
+    private final WorkQueue workQueue;
+
+    private final ExecutorService executor;
+
+    private final Timer timer;
+
+    private final ICCContext ccContext;
+
+    private final DeadNodeSweeper sweeper;
+
+    private long jobCounter;
+
+    public ClusterControllerService(final CCConfig ccConfig) throws Exception {
+        this.ccConfig = ccConfig;
+        File jobLogFolder = new File(ccConfig.ccRoot, "logs/jobs");
+        jobLog = new LogFile(jobLogFolder);
+        nodeRegistry = new LinkedHashMap<String, NodeControllerState>();
+        ipAddressNodeNameMap = new HashMap<String, Set<String>>();
+        applications = new Hashtable<String, CCApplicationContext>();
+        serverCtx = new ServerContext(ServerContext.ServerType.CLUSTER_CONTROLLER, new File(ccConfig.ccRoot));
+        executor = Executors.newCachedThreadPool();
+        IIPCI ccIPCI = new ClusterControllerIPCI();
+        clusterIPC = new IPCSystem(new InetSocketAddress(ccConfig.clusterNetPort), ccIPCI,
+                new CCNCFunctions.SerializerDeserializer());
+        IIPCI ciIPCI = new HyracksClientInterfaceIPCI();
+        clientIPC = new IPCSystem(new InetSocketAddress(ccConfig.clientNetIpAddress, ccConfig.clientNetPort), ciIPCI,
+                new JavaSerializationBasedPayloadSerializerDeserializer());
+        webServer = new WebServer(this);
+        activeRunMap = new HashMap<JobId, JobRun>();
+        runMapArchive = new LinkedHashMap<JobId, JobRun>() {
+            private static final long serialVersionUID = 1L;
+
+            protected boolean removeEldestEntry(Map.Entry<JobId, JobRun> eldest) {
+                return size() > ccConfig.jobHistorySize;
+            }
+        };
+        workQueue = new WorkQueue();
+        this.timer = new Timer(true);
+        final ClusterTopology topology = computeClusterTopology(ccConfig);
+        ccContext = new ICCContext() {
+            @Override
+            public void getIPAddressNodeMap(Map<String, Set<String>> map) throws Exception {
+                GetIpAddressNodeNameMapWork ginmw = new GetIpAddressNodeNameMapWork(ClusterControllerService.this, map);
+                workQueue.scheduleAndSync(ginmw);
+            }
+
+            @Override
+            public ClusterControllerInfo getClusterControllerInfo() {
+                return info;
+            }
+
+            @Override
+            public ClusterTopology getClusterTopology() {
+                return topology;
+            }
+        };
+        sweeper = new DeadNodeSweeper();
+        jobCounter = 0;
+    }
+
+    private static ClusterTopology computeClusterTopology(CCConfig ccConfig) throws Exception {
+        if (ccConfig.clusterTopologyDefinition == null) {
+            return null;
+        }
+        FileReader fr = new FileReader(ccConfig.clusterTopologyDefinition);
+        InputSource in = new InputSource(fr);
+        try {
+            return TopologyDefinitionParser.parse(in);
+        } finally {
+            fr.close();
+        }
+    }
+
+    @Override
+    public void start() throws Exception {
+        LOGGER.log(Level.INFO, "Starting ClusterControllerService: " + this);
+        clusterIPC.start();
+        clientIPC.start();
+        webServer.setPort(ccConfig.httpPort);
+        webServer.start();
+        workQueue.start();
+        info = new ClusterControllerInfo(ccConfig.clientNetIpAddress, ccConfig.clientNetPort,
+                webServer.getListeningPort());
+        timer.schedule(sweeper, 0, ccConfig.heartbeatPeriod);
+        jobLog.open();
+        LOGGER.log(Level.INFO, "Started ClusterControllerService");
+    }
+
+    @Override
+    public void stop() throws Exception {
+        LOGGER.log(Level.INFO, "Stopping ClusterControllerService");
+        executor.shutdownNow();
+        webServer.stop();
+        sweeper.cancel();
+        workQueue.stop();
+        jobLog.close();
+        LOGGER.log(Level.INFO, "Stopped ClusterControllerService");
+    }
+
+    public ServerContext getServerContext() {
+        return serverCtx;
+    }
+
+    public ICCContext getCCContext() {
+        return ccContext;
+    }
+
+    public Map<String, CCApplicationContext> getApplicationMap() {
+        return applications;
+    }
+
+    public Map<JobId, JobRun> getActiveRunMap() {
+        return activeRunMap;
+    }
+
+    public Map<JobId, JobRun> getRunMapArchive() {
+        return runMapArchive;
+    }
+
+    public Map<String, Set<String>> getIpAddressNodeNameMap() {
+        return ipAddressNodeNameMap;
+    }
+
+    public LogFile getJobLogFile() {
+        return jobLog;
+    }
+
+    public WorkQueue getWorkQueue() {
+        return workQueue;
+    }
+
+    public Executor getExecutor() {
+        return executor;
+    }
+
+    public Map<String, NodeControllerState> getNodeMap() {
+        return nodeRegistry;
+    }
+
+    public CCConfig getConfig() {
+        return ccConfig;
+    }
+
+    private JobId createJobId() {
+        return new JobId(jobCounter++);
+    }
+
+    public ClusterControllerInfo getClusterControllerInfo() {
+        return info;
+    }
+
+    public CCConfig getCCConfig() {
+        return ccConfig;
+    }
+
+    public IPCSystem getClusterIPC() {
+        return clusterIPC;
+    }
+
+    private class DeadNodeSweeper extends TimerTask {
+        @Override
+        public void run() {
+            workQueue.schedule(new RemoveDeadNodesWork(ClusterControllerService.this));
+        }
+    }
+
+    private class HyracksClientInterfaceIPCI implements IIPCI {
+        @Override
+        public void deliverIncomingMessage(IIPCHandle handle, long mid, long rmid, Object payload, Exception exception) {
+            HyracksClientInterfaceFunctions.Function fn = (HyracksClientInterfaceFunctions.Function) payload;
+            switch (fn.getFunctionId()) {
+                case GET_CLUSTER_CONTROLLER_INFO: {
+                    try {
+                        handle.send(mid, info, null);
+                    } catch (IPCException e) {
+                        e.printStackTrace();
+                    }
+                    return;
+                }
+
+                case CREATE_APPLICATION: {
+                    HyracksClientInterfaceFunctions.CreateApplicationFunction caf = (HyracksClientInterfaceFunctions.CreateApplicationFunction) fn;
+                    workQueue.schedule(new ApplicationCreateWork(ClusterControllerService.this, caf.getAppName(),
+                            new IPCResponder<Object>(handle, mid)));
+                    return;
+                }
+
+                case START_APPLICATION: {
+                    HyracksClientInterfaceFunctions.StartApplicationFunction saf = (HyracksClientInterfaceFunctions.StartApplicationFunction) fn;
+                    workQueue.schedule(new ApplicationStartWork(ClusterControllerService.this, saf.getAppName(),
+                            new IPCResponder<Object>(handle, mid)));
+                    return;
+                }
+
+                case DESTROY_APPLICATION: {
+                    HyracksClientInterfaceFunctions.DestroyApplicationFunction daf = (HyracksClientInterfaceFunctions.DestroyApplicationFunction) fn;
+                    workQueue.schedule(new ApplicationDestroyWork(ClusterControllerService.this, daf.getAppName(),
+                            new IPCResponder<Object>(handle, mid)));
+                    return;
+                }
+
+                case GET_JOB_STATUS: {
+                    HyracksClientInterfaceFunctions.GetJobStatusFunction gjsf = (HyracksClientInterfaceFunctions.GetJobStatusFunction) fn;
+                    workQueue.schedule(new GetJobStatusWork(ClusterControllerService.this, gjsf.getJobId(),
+                            new IPCResponder<JobStatus>(handle, mid)));
+                    return;
+                }
+
+                case START_JOB: {
+                    HyracksClientInterfaceFunctions.StartJobFunction sjf = (HyracksClientInterfaceFunctions.StartJobFunction) fn;
+                    JobId jobId = createJobId();
+                    workQueue.schedule(new JobStartWork(ClusterControllerService.this, sjf.getAppName(), sjf
+                            .getACGGFBytes(), sjf.getJobFlags(), jobId, new IPCResponder<JobId>(handle, mid)));
+                    return;
+                }
+
+                case WAIT_FOR_COMPLETION: {
+                    HyracksClientInterfaceFunctions.WaitForCompletionFunction wfcf = (HyracksClientInterfaceFunctions.WaitForCompletionFunction) fn;
+                    workQueue.schedule(new WaitForJobCompletionWork(ClusterControllerService.this, wfcf.getJobId(),
+                            new IPCResponder<Object>(handle, mid)));
+                    return;
+                }
+
+                case GET_NODE_CONTROLLERS_INFO: {
+                    workQueue.schedule(new GetNodeControllersInfoWork(ClusterControllerService.this,
+                            new IPCResponder<Map<String, NodeControllerInfo>>(handle, mid)));
+                    return;
+                }
+
+                case GET_CLUSTER_TOPOLOGY: {
+                    try {
+                        handle.send(mid, ccContext.getClusterTopology(), null);
+                    } catch (IPCException e) {
+                        e.printStackTrace();
+                    }
+                    return;
+                }
+            }
+            try {
+                handle.send(mid, null, new IllegalArgumentException("Unknown function " + fn.getFunctionId()));
+            } catch (IPCException e) {
+                e.printStackTrace();
+            }
+        }
+    }
+
+    private class ClusterControllerIPCI implements IIPCI {
+        @Override
+        public void deliverIncomingMessage(final IIPCHandle handle, long mid, long rmid, Object payload,
+                Exception exception) {
+            CCNCFunctions.Function fn = (Function) payload;
+            switch (fn.getFunctionId()) {
+                case REGISTER_NODE: {
+                    CCNCFunctions.RegisterNodeFunction rnf = (CCNCFunctions.RegisterNodeFunction) fn;
+                    workQueue.schedule(new RegisterNodeWork(ClusterControllerService.this, rnf.getNodeRegistration()));
+                    return;
+                }
+
+                case UNREGISTER_NODE: {
+                    CCNCFunctions.UnregisterNodeFunction unf = (CCNCFunctions.UnregisterNodeFunction) fn;
+                    workQueue.schedule(new UnregisterNodeWork(ClusterControllerService.this, unf.getNodeId()));
+                    return;
+                }
+
+                case NODE_HEARTBEAT: {
+                    CCNCFunctions.NodeHeartbeatFunction nhf = (CCNCFunctions.NodeHeartbeatFunction) fn;
+                    workQueue.schedule(new NodeHeartbeatWork(ClusterControllerService.this, nhf.getNodeId(), nhf
+                            .getHeartbeatData()));
+                    return;
+                }
+
+                case NOTIFY_JOBLET_CLEANUP: {
+                    CCNCFunctions.NotifyJobletCleanupFunction njcf = (CCNCFunctions.NotifyJobletCleanupFunction) fn;
+                    workQueue.schedule(new JobletCleanupNotificationWork(ClusterControllerService.this,
+                            njcf.getJobId(), njcf.getNodeId()));
+                    return;
+                }
+
+                case REPORT_PROFILE: {
+                    CCNCFunctions.ReportProfileFunction rpf = (CCNCFunctions.ReportProfileFunction) fn;
+                    workQueue.schedule(new ReportProfilesWork(ClusterControllerService.this, rpf.getProfiles()));
+                    return;
+                }
+
+                case NOTIFY_TASK_COMPLETE: {
+                    CCNCFunctions.NotifyTaskCompleteFunction ntcf = (CCNCFunctions.NotifyTaskCompleteFunction) fn;
+                    workQueue.schedule(new TaskCompleteWork(ClusterControllerService.this, ntcf.getJobId(), ntcf
+                            .getTaskId(), ntcf.getNodeId(), ntcf.getStatistics()));
+                    return;
+                }
+                case NOTIFY_TASK_FAILURE: {
+                    CCNCFunctions.NotifyTaskFailureFunction ntff = (CCNCFunctions.NotifyTaskFailureFunction) fn;
+                    workQueue.schedule(new TaskFailureWork(ClusterControllerService.this, ntff.getJobId(), ntff
+                            .getTaskId(), ntff.getDetails(), ntff.getDetails()));
+                    return;
+                }
+
+                case REGISTER_PARTITION_PROVIDER: {
+                    CCNCFunctions.RegisterPartitionProviderFunction rppf = (CCNCFunctions.RegisterPartitionProviderFunction) fn;
+                    workQueue.schedule(new RegisterPartitionAvailibilityWork(ClusterControllerService.this, rppf
+                            .getPartitionDescriptor()));
+                    return;
+                }
+
+                case REGISTER_PARTITION_REQUEST: {
+                    CCNCFunctions.RegisterPartitionRequestFunction rprf = (CCNCFunctions.RegisterPartitionRequestFunction) fn;
+                    workQueue.schedule(new RegisterPartitionRequestWork(ClusterControllerService.this, rprf
+                            .getPartitionRequest()));
+                    return;
+                }
+
+                case APPLICATION_STATE_CHANGE_RESPONSE: {
+                    CCNCFunctions.ApplicationStateChangeResponseFunction astrf = (CCNCFunctions.ApplicationStateChangeResponseFunction) fn;
+                    workQueue.schedule(new ApplicationStateChangeWork(ClusterControllerService.this, astrf));
+                    return;
+                }
+                case SEND_APPLICATION_MESSAGE: {
+                    CCNCFunctions.SendApplicationMessageFunction rsf = (CCNCFunctions.SendApplicationMessageFunction) fn;
+                    workQueue.schedule(new ApplicationMessageWork(ClusterControllerService.this, rsf.getMessage(), rsf
+                            .getAppName(), rsf.getNodeId()));
+                    return;
+                }
+
+                case GET_NODE_CONTROLLERS_INFO: {
+                    workQueue.schedule(new GetNodeControllersInfoWork(ClusterControllerService.this,
+                            new IResultCallback<Map<String, NodeControllerInfo>>() {
+                                @Override
+                                public void setValue(Map<String, NodeControllerInfo> result) {
+                                    new IPCResponder<CCNCFunctions.GetNodeControllersInfoResponseFunction>(handle, -1)
+                                            .setValue(new CCNCFunctions.GetNodeControllersInfoResponseFunction(result));
+                                }
+
+                                @Override
+                                public void setException(Exception e) {
+
+                                }
+                            }));
+                    return;
+                }
+            }
+            LOGGER.warning("Unknown function: " + fn.getFunctionId());
+        }
+    }
+}
\ No newline at end of file
diff --git a/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/NodeControllerState.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/NodeControllerState.java
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/NodeControllerState.java
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/NodeControllerState.java
diff --git a/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/adminconsole/HyracksAdminConsoleApplication.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/adminconsole/HyracksAdminConsoleApplication.java
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/adminconsole/HyracksAdminConsoleApplication.java
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/adminconsole/HyracksAdminConsoleApplication.java
diff --git a/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/adminconsole/pages/AbstractPage.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/adminconsole/pages/AbstractPage.java
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/adminconsole/pages/AbstractPage.java
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/adminconsole/pages/AbstractPage.java
diff --git a/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/adminconsole/pages/IndexPage.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/adminconsole/pages/IndexPage.java
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/adminconsole/pages/IndexPage.java
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/adminconsole/pages/IndexPage.java
diff --git a/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/adminconsole/pages/JobDetailsPage.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/adminconsole/pages/JobDetailsPage.java
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/adminconsole/pages/JobDetailsPage.java
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/adminconsole/pages/JobDetailsPage.java
diff --git a/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/adminconsole/pages/NodeDetailsPage.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/adminconsole/pages/NodeDetailsPage.java
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/adminconsole/pages/NodeDetailsPage.java
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/adminconsole/pages/NodeDetailsPage.java
diff --git a/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/application/CCApplicationContext.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/application/CCApplicationContext.java
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/application/CCApplicationContext.java
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/application/CCApplicationContext.java
diff --git a/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/job/ActivityClusterPlan.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/job/ActivityClusterPlan.java
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/job/ActivityClusterPlan.java
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/job/ActivityClusterPlan.java
diff --git a/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/job/ActivityPlan.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/job/ActivityPlan.java
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/job/ActivityPlan.java
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/job/ActivityPlan.java
diff --git a/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/job/IJobStatusConditionVariable.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/job/IJobStatusConditionVariable.java
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/job/IJobStatusConditionVariable.java
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/job/IJobStatusConditionVariable.java
diff --git a/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/job/JobRun.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/job/JobRun.java
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/job/JobRun.java
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/job/JobRun.java
diff --git a/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/job/Task.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/job/Task.java
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/job/Task.java
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/job/Task.java
diff --git a/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/job/TaskAttempt.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/job/TaskAttempt.java
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/job/TaskAttempt.java
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/job/TaskAttempt.java
diff --git a/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/job/TaskCluster.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/job/TaskCluster.java
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/job/TaskCluster.java
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/job/TaskCluster.java
diff --git a/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/job/TaskClusterAttempt.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/job/TaskClusterAttempt.java
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/job/TaskClusterAttempt.java
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/job/TaskClusterAttempt.java
diff --git a/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/job/TaskClusterId.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/job/TaskClusterId.java
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/job/TaskClusterId.java
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/job/TaskClusterId.java
diff --git a/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/partitions/PartitionMatchMaker.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/partitions/PartitionMatchMaker.java
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/partitions/PartitionMatchMaker.java
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/partitions/PartitionMatchMaker.java
diff --git a/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/partitions/PartitionUtils.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/partitions/PartitionUtils.java
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/partitions/PartitionUtils.java
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/partitions/PartitionUtils.java
diff --git a/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/scheduler/ActivityClusterPlanner.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/scheduler/ActivityClusterPlanner.java
new file mode 100644
index 0000000..8879627
--- /dev/null
+++ b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/scheduler/ActivityClusterPlanner.java
@@ -0,0 +1,427 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.control.cc.scheduler;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.BitSet;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.apache.commons.lang3.tuple.Pair;
+
+import edu.uci.ics.hyracks.api.constraints.expressions.LValueConstraintExpression;
+import edu.uci.ics.hyracks.api.constraints.expressions.PartitionCountExpression;
+import edu.uci.ics.hyracks.api.dataflow.ActivityId;
+import edu.uci.ics.hyracks.api.dataflow.ConnectorDescriptorId;
+import edu.uci.ics.hyracks.api.dataflow.IConnectorDescriptor;
+import edu.uci.ics.hyracks.api.dataflow.OperatorDescriptorId;
+import edu.uci.ics.hyracks.api.dataflow.TaskId;
+import edu.uci.ics.hyracks.api.dataflow.connectors.IConnectorPolicy;
+import edu.uci.ics.hyracks.api.dataflow.connectors.IConnectorPolicyAssignmentPolicy;
+import edu.uci.ics.hyracks.api.dataflow.connectors.PipeliningConnectorPolicy;
+import edu.uci.ics.hyracks.api.exceptions.HyracksException;
+import edu.uci.ics.hyracks.api.job.ActivityCluster;
+import edu.uci.ics.hyracks.api.job.ActivityClusterGraph;
+import edu.uci.ics.hyracks.api.partitions.PartitionId;
+import edu.uci.ics.hyracks.control.cc.job.ActivityClusterPlan;
+import edu.uci.ics.hyracks.control.cc.job.ActivityPlan;
+import edu.uci.ics.hyracks.control.cc.job.JobRun;
+import edu.uci.ics.hyracks.control.cc.job.Task;
+import edu.uci.ics.hyracks.control.cc.job.TaskCluster;
+import edu.uci.ics.hyracks.control.cc.job.TaskClusterId;
+
+public class ActivityClusterPlanner {
+    private static final Logger LOGGER = Logger.getLogger(ActivityClusterPlanner.class.getName());
+
+    private final JobScheduler scheduler;
+
+    private final Map<PartitionId, TaskCluster> partitionProducingTaskClusterMap;
+
+    public ActivityClusterPlanner(JobScheduler newJobScheduler) {
+        this.scheduler = newJobScheduler;
+        partitionProducingTaskClusterMap = new HashMap<PartitionId, TaskCluster>();
+    }
+
+    public ActivityClusterPlan planActivityCluster(ActivityCluster ac) throws HyracksException {
+        JobRun jobRun = scheduler.getJobRun();
+        Map<ActivityId, ActivityPartitionDetails> pcMap = computePartitionCounts(ac);
+
+        Map<ActivityId, ActivityPlan> activityPlanMap = buildActivityPlanMap(ac, jobRun, pcMap);
+
+        assignConnectorPolicy(ac, activityPlanMap);
+
+        TaskCluster[] taskClusters = computeTaskClusters(ac, jobRun, activityPlanMap);
+
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Plan for " + ac);
+            LOGGER.info("Built " + taskClusters.length + " Task Clusters");
+            for (TaskCluster tc : taskClusters) {
+                LOGGER.info("Tasks: " + Arrays.toString(tc.getTasks()));
+            }
+        }
+
+        return new ActivityClusterPlan(taskClusters, activityPlanMap);
+    }
+
+    private Map<ActivityId, ActivityPlan> buildActivityPlanMap(ActivityCluster ac, JobRun jobRun,
+            Map<ActivityId, ActivityPartitionDetails> pcMap) {
+        Map<ActivityId, ActivityPlan> activityPlanMap = new HashMap<ActivityId, ActivityPlan>();
+        Set<ActivityId> depAnIds = new HashSet<ActivityId>();
+        for (ActivityId anId : ac.getActivityMap().keySet()) {
+            depAnIds.clear();
+            getDependencyActivityIds(depAnIds, anId, ac);
+            ActivityPartitionDetails apd = pcMap.get(anId);
+            Task[] tasks = new Task[apd.getPartitionCount()];
+            ActivityPlan activityPlan = new ActivityPlan(apd);
+            for (int i = 0; i < tasks.length; ++i) {
+                TaskId tid = new TaskId(anId, i);
+                tasks[i] = new Task(tid, activityPlan);
+                for (ActivityId danId : depAnIds) {
+                    ActivityCluster dAC = ac.getActivityClusterGraph().getActivityMap().get(danId);
+                    ActivityClusterPlan dACP = jobRun.getActivityClusterPlanMap().get(dAC.getId());
+                    assert dACP != null : "IllegalStateEncountered: Dependent AC is being planned without a plan for dependency AC: Encountered no plan for ActivityID "
+                            + danId;
+                    Task[] dATasks = dACP.getActivityPlanMap().get(danId).getTasks();
+                    assert dATasks != null : "IllegalStateEncountered: Dependent AC is being planned without a plan for dependency AC: Encountered no plan for ActivityID "
+                            + danId;
+                    assert dATasks.length == tasks.length : "Dependency activity partitioned differently from dependent: "
+                            + dATasks.length + " != " + tasks.length;
+                    Task dTask = dATasks[i];
+                    TaskId dTaskId = dTask.getTaskId();
+                    tasks[i].getDependencies().add(dTaskId);
+                    dTask.getDependents().add(tid);
+                }
+            }
+            activityPlan.setTasks(tasks);
+            activityPlanMap.put(anId, activityPlan);
+        }
+        return activityPlanMap;
+    }
+
+    private TaskCluster[] computeTaskClusters(ActivityCluster ac, JobRun jobRun,
+            Map<ActivityId, ActivityPlan> activityPlanMap) {
+        Set<ActivityId> activities = ac.getActivityMap().keySet();
+        Map<TaskId, List<Pair<TaskId, ConnectorDescriptorId>>> taskConnectivity = computeTaskConnectivity(jobRun,
+                activityPlanMap, activities);
+
+        TaskCluster[] taskClusters = ac.getActivityClusterGraph().isUseConnectorPolicyForScheduling() ? buildConnectorPolicyAwareTaskClusters(
+                ac, activityPlanMap, taskConnectivity) : buildConnectorPolicyUnawareTaskClusters(ac, activityPlanMap);
+
+        for (TaskCluster tc : taskClusters) {
+            Set<TaskCluster> tcDependencyTaskClusters = tc.getDependencyTaskClusters();
+            for (Task ts : tc.getTasks()) {
+                TaskId tid = ts.getTaskId();
+                List<Pair<TaskId, ConnectorDescriptorId>> cInfoList = taskConnectivity.get(tid);
+                if (cInfoList != null) {
+                    for (Pair<TaskId, ConnectorDescriptorId> p : cInfoList) {
+                        Task targetTS = activityPlanMap.get(p.getLeft().getActivityId()).getTasks()[p.getLeft()
+                                .getPartition()];
+                        TaskCluster targetTC = targetTS.getTaskCluster();
+                        if (targetTC != tc) {
+                            ConnectorDescriptorId cdId = p.getRight();
+                            PartitionId pid = new PartitionId(jobRun.getJobId(), cdId, tid.getPartition(), p.getLeft()
+                                    .getPartition());
+                            tc.getProducedPartitions().add(pid);
+                            targetTC.getRequiredPartitions().add(pid);
+                            partitionProducingTaskClusterMap.put(pid, tc);
+                        }
+                    }
+                }
+
+                for (TaskId dTid : ts.getDependencies()) {
+                    TaskCluster dTC = getTaskCluster(dTid);
+                    dTC.getDependentTaskClusters().add(tc);
+                    tcDependencyTaskClusters.add(dTC);
+                }
+            }
+        }
+        return taskClusters;
+    }
+
+    private TaskCluster[] buildConnectorPolicyUnawareTaskClusters(ActivityCluster ac,
+            Map<ActivityId, ActivityPlan> activityPlanMap) {
+        List<Task> taskStates = new ArrayList<Task>();
+        for (ActivityId anId : ac.getActivityMap().keySet()) {
+            ActivityPlan ap = activityPlanMap.get(anId);
+            Task[] tasks = ap.getTasks();
+            for (Task t : tasks) {
+                taskStates.add(t);
+            }
+        }
+        TaskCluster tc = new TaskCluster(new TaskClusterId(ac.getId(), 0), ac, taskStates.toArray(new Task[taskStates
+                .size()]));
+        for (Task t : tc.getTasks()) {
+            t.setTaskCluster(tc);
+        }
+        return new TaskCluster[] { tc };
+    }
+
+    private Map<TaskId, List<Pair<TaskId, ConnectorDescriptorId>>> computeTaskConnectivity(JobRun jobRun,
+            Map<ActivityId, ActivityPlan> activityPlanMap, Set<ActivityId> activities) {
+        Map<TaskId, List<Pair<TaskId, ConnectorDescriptorId>>> taskConnectivity = new HashMap<TaskId, List<Pair<TaskId, ConnectorDescriptorId>>>();
+        ActivityClusterGraph acg = jobRun.getActivityClusterGraph();
+        BitSet targetBitmap = new BitSet();
+        for (ActivityId ac1 : activities) {
+            ActivityCluster ac = acg.getActivityMap().get(ac1);
+            Task[] ac1TaskStates = activityPlanMap.get(ac1).getTasks();
+            int nProducers = ac1TaskStates.length;
+            List<IConnectorDescriptor> outputConns = ac.getActivityOutputMap().get(ac1);
+            if (outputConns != null) {
+                for (IConnectorDescriptor c : outputConns) {
+                    ConnectorDescriptorId cdId = c.getConnectorId();
+                    ActivityId ac2 = ac.getConsumerActivity(cdId);
+                    Task[] ac2TaskStates = activityPlanMap.get(ac2).getTasks();
+                    int nConsumers = ac2TaskStates.length;
+                    for (int i = 0; i < nProducers; ++i) {
+                        c.indicateTargetPartitions(nProducers, nConsumers, i, targetBitmap);
+                        List<Pair<TaskId, ConnectorDescriptorId>> cInfoList = taskConnectivity.get(ac1TaskStates[i]
+                                .getTaskId());
+                        if (cInfoList == null) {
+                            cInfoList = new ArrayList<Pair<TaskId, ConnectorDescriptorId>>();
+                            taskConnectivity.put(ac1TaskStates[i].getTaskId(), cInfoList);
+                        }
+                        for (int j = targetBitmap.nextSetBit(0); j >= 0; j = targetBitmap.nextSetBit(j + 1)) {
+                            TaskId targetTID = ac2TaskStates[j].getTaskId();
+                            cInfoList.add(Pair.<TaskId, ConnectorDescriptorId> of(targetTID, cdId));
+                        }
+                    }
+                }
+            }
+        }
+        return taskConnectivity;
+    }
+
+    private TaskCluster[] buildConnectorPolicyAwareTaskClusters(ActivityCluster ac,
+            Map<ActivityId, ActivityPlan> activityPlanMap,
+            Map<TaskId, List<Pair<TaskId, ConnectorDescriptorId>>> taskConnectivity) {
+        Map<TaskId, Set<TaskId>> taskClusterMap = new HashMap<TaskId, Set<TaskId>>();
+        for (ActivityId anId : ac.getActivityMap().keySet()) {
+            ActivityPlan ap = activityPlanMap.get(anId);
+            Task[] tasks = ap.getTasks();
+            for (Task t : tasks) {
+                Set<TaskId> cluster = new HashSet<TaskId>();
+                TaskId tid = t.getTaskId();
+                cluster.add(tid);
+                taskClusterMap.put(tid, cluster);
+            }
+        }
+
+        JobRun jobRun = scheduler.getJobRun();
+        Map<ConnectorDescriptorId, IConnectorPolicy> connectorPolicies = jobRun.getConnectorPolicyMap();
+        for (Map.Entry<TaskId, List<Pair<TaskId, ConnectorDescriptorId>>> e : taskConnectivity.entrySet()) {
+            Set<TaskId> cluster = taskClusterMap.get(e.getKey());
+            for (Pair<TaskId, ConnectorDescriptorId> p : e.getValue()) {
+                IConnectorPolicy cPolicy = connectorPolicies.get(p.getRight());
+                if (cPolicy.requiresProducerConsumerCoscheduling()) {
+                    cluster.add(p.getLeft());
+                }
+            }
+        }
+
+        /*
+         * taskClusterMap contains for every TID x, x -> { coscheduled consumer TIDs U x }
+         * We compute the transitive closure of this relation to find the largest set of
+         * tasks that need to be co-scheduled
+         */
+        int counter = 0;
+        TaskId[] ordinalList = new TaskId[taskClusterMap.size()];
+        Map<TaskId, Integer> ordinalMap = new HashMap<TaskId, Integer>();
+        for (TaskId tid : taskClusterMap.keySet()) {
+            ordinalList[counter] = tid;
+            ordinalMap.put(tid, counter);
+            ++counter;
+        }
+
+        int n = ordinalList.length;
+        BitSet[] paths = new BitSet[n];
+        for (Map.Entry<TaskId, Set<TaskId>> e : taskClusterMap.entrySet()) {
+            int i = ordinalMap.get(e.getKey());
+            BitSet bsi = paths[i];
+            if (bsi == null) {
+                bsi = new BitSet(n);
+                paths[i] = bsi;
+            }
+            for (TaskId ttid : e.getValue()) {
+                int j = ordinalMap.get(ttid);
+                paths[i].set(j);
+                BitSet bsj = paths[j];
+                if (bsj == null) {
+                    bsj = new BitSet(n);
+                    paths[j] = bsj;
+                }
+                bsj.set(i);
+            }
+        }
+        for (int k = 0; k < n; ++k) {
+            for (int i = paths[k].nextSetBit(0); i >= 0; i = paths[k].nextSetBit(i + 1)) {
+                for (int j = paths[i].nextClearBit(0); j < n && j >= 0; j = paths[i].nextClearBit(j + 1)) {
+                    paths[i].set(j, paths[k].get(j));
+                    paths[j].set(i, paths[i].get(j));
+                }
+            }
+        }
+        BitSet pending = new BitSet(n);
+        pending.set(0, n);
+        List<List<TaskId>> clusters = new ArrayList<List<TaskId>>();
+        for (int i = pending.nextSetBit(0); i >= 0; i = pending.nextSetBit(i)) {
+            List<TaskId> cluster = new ArrayList<TaskId>();
+            for (int j = paths[i].nextSetBit(0); j >= 0; j = paths[i].nextSetBit(j + 1)) {
+                cluster.add(ordinalList[j]);
+                pending.clear(j);
+            }
+            clusters.add(cluster);
+        }
+
+        List<TaskCluster> tcSet = new ArrayList<TaskCluster>();
+        counter = 0;
+        for (List<TaskId> cluster : clusters) {
+            List<Task> taskStates = new ArrayList<Task>();
+            for (TaskId tid : cluster) {
+                taskStates.add(activityPlanMap.get(tid.getActivityId()).getTasks()[tid.getPartition()]);
+            }
+            TaskCluster tc = new TaskCluster(new TaskClusterId(ac.getId(), counter++), ac,
+                    taskStates.toArray(new Task[taskStates.size()]));
+            tcSet.add(tc);
+            for (TaskId tid : cluster) {
+                activityPlanMap.get(tid.getActivityId()).getTasks()[tid.getPartition()].setTaskCluster(tc);
+            }
+        }
+        TaskCluster[] taskClusters = tcSet.toArray(new TaskCluster[tcSet.size()]);
+        return taskClusters;
+    }
+
+    private TaskCluster getTaskCluster(TaskId tid) {
+        JobRun run = scheduler.getJobRun();
+        ActivityCluster ac = run.getActivityClusterGraph().getActivityMap().get(tid.getActivityId());
+        ActivityClusterPlan acp = run.getActivityClusterPlanMap().get(ac.getId());
+        Task[] tasks = acp.getActivityPlanMap().get(tid.getActivityId()).getTasks();
+        Task task = tasks[tid.getPartition()];
+        assert task.getTaskId().equals(tid);
+        return task.getTaskCluster();
+    }
+
+    private void getDependencyActivityIds(Set<ActivityId> depAnIds, ActivityId anId, ActivityCluster ac) {
+        Set<ActivityId> blockers = ac.getBlocked2BlockerMap().get(anId);
+        if (blockers != null) {
+            depAnIds.addAll(blockers);
+        }
+    }
+
+    private void assignConnectorPolicy(ActivityCluster ac, Map<ActivityId, ActivityPlan> taskMap) {
+        Map<ConnectorDescriptorId, IConnectorPolicy> cPolicyMap = new HashMap<ConnectorDescriptorId, IConnectorPolicy>();
+        Set<ActivityId> activities = ac.getActivityMap().keySet();
+        BitSet targetBitmap = new BitSet();
+        for (ActivityId a1 : activities) {
+            Task[] ac1TaskStates = taskMap.get(a1).getTasks();
+            int nProducers = ac1TaskStates.length;
+            List<IConnectorDescriptor> outputConns = ac.getActivityOutputMap().get(a1);
+            if (outputConns != null) {
+                for (IConnectorDescriptor c : outputConns) {
+                    ConnectorDescriptorId cdId = c.getConnectorId();
+                    ActivityId a2 = ac.getConsumerActivity(cdId);
+                    Task[] ac2TaskStates = taskMap.get(a2).getTasks();
+                    int nConsumers = ac2TaskStates.length;
+
+                    int[] fanouts = new int[nProducers];
+                    for (int i = 0; i < nProducers; ++i) {
+                        c.indicateTargetPartitions(nProducers, nConsumers, i, targetBitmap);
+                        fanouts[i] = targetBitmap.cardinality();
+                    }
+                    IConnectorPolicy cp = assignConnectorPolicy(ac, c, nProducers, nConsumers, fanouts);
+                    cPolicyMap.put(cdId, cp);
+                }
+            }
+        }
+        scheduler.getJobRun().getConnectorPolicyMap().putAll(cPolicyMap);
+    }
+
+    private IConnectorPolicy assignConnectorPolicy(ActivityCluster ac, IConnectorDescriptor c, int nProducers,
+            int nConsumers, int[] fanouts) {
+        IConnectorPolicyAssignmentPolicy cpap = ac.getConnectorPolicyAssignmentPolicy();
+        if (cpap != null) {
+            return cpap.getConnectorPolicyAssignment(c, nProducers, nConsumers, fanouts);
+        }
+        cpap = ac.getActivityClusterGraph().getConnectorPolicyAssignmentPolicy();
+        if (cpap != null) {
+            return cpap.getConnectorPolicyAssignment(c, nProducers, nConsumers, fanouts);
+        }
+        return new PipeliningConnectorPolicy();
+    }
+
+    private Map<ActivityId, ActivityPartitionDetails> computePartitionCounts(ActivityCluster ac)
+            throws HyracksException {
+        PartitionConstraintSolver solver = scheduler.getSolver();
+        Set<LValueConstraintExpression> lValues = new HashSet<LValueConstraintExpression>();
+        for (ActivityId anId : ac.getActivityMap().keySet()) {
+            lValues.add(new PartitionCountExpression(anId.getOperatorDescriptorId()));
+        }
+        solver.solve(lValues);
+        Map<OperatorDescriptorId, Integer> nPartMap = new HashMap<OperatorDescriptorId, Integer>();
+        for (LValueConstraintExpression lv : lValues) {
+            Object value = solver.getValue(lv);
+            if (value == null) {
+                throw new HyracksException("No value found for " + lv);
+            }
+            if (!(value instanceof Number)) {
+                throw new HyracksException("Unexpected type of value bound to " + lv + ": " + value.getClass() + "("
+                        + value + ")");
+            }
+            int nParts = ((Number) value).intValue();
+            if (nParts <= 0) {
+                throw new HyracksException("Unsatisfiable number of partitions for " + lv + ": " + nParts);
+            }
+            nPartMap.put(((PartitionCountExpression) lv).getOperatorDescriptorId(), Integer.valueOf(nParts));
+        }
+        Map<ActivityId, ActivityPartitionDetails> activityPartsMap = new HashMap<ActivityId, ActivityPartitionDetails>();
+        for (ActivityId anId : ac.getActivityMap().keySet()) {
+            int nParts = nPartMap.get(anId.getOperatorDescriptorId());
+            int[] nInputPartitions = null;
+            List<IConnectorDescriptor> inputs = ac.getActivityInputMap().get(anId);
+            if (inputs != null) {
+                nInputPartitions = new int[inputs.size()];
+                for (int i = 0; i < nInputPartitions.length; ++i) {
+                    ConnectorDescriptorId cdId = inputs.get(i).getConnectorId();
+                    ActivityId aid = ac.getProducerActivity(cdId);
+                    Integer nPartInt = nPartMap.get(aid.getOperatorDescriptorId());
+                    nInputPartitions[i] = nPartInt;
+                }
+            }
+            int[] nOutputPartitions = null;
+            List<IConnectorDescriptor> outputs = ac.getActivityOutputMap().get(anId);
+            if (outputs != null) {
+                nOutputPartitions = new int[outputs.size()];
+                for (int i = 0; i < nOutputPartitions.length; ++i) {
+                    ConnectorDescriptorId cdId = outputs.get(i).getConnectorId();
+                    ActivityId aid = ac.getConsumerActivity(cdId);
+                    Integer nPartInt = nPartMap.get(aid.getOperatorDescriptorId());
+                    nOutputPartitions[i] = nPartInt;
+                }
+            }
+            ActivityPartitionDetails apd = new ActivityPartitionDetails(nParts, nInputPartitions, nOutputPartitions);
+            activityPartsMap.put(anId, apd);
+        }
+        return activityPartsMap;
+    }
+
+    public Map<? extends PartitionId, ? extends TaskCluster> getPartitionProducingTaskClusterMap() {
+        return partitionProducingTaskClusterMap;
+    }
+}
\ No newline at end of file
diff --git a/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/scheduler/ActivityPartitionDetails.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/scheduler/ActivityPartitionDetails.java
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/scheduler/ActivityPartitionDetails.java
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/scheduler/ActivityPartitionDetails.java
diff --git a/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/scheduler/JobScheduler.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/scheduler/JobScheduler.java
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/scheduler/JobScheduler.java
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/scheduler/JobScheduler.java
diff --git a/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/scheduler/PartitionConstraintSolver.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/scheduler/PartitionConstraintSolver.java
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/scheduler/PartitionConstraintSolver.java
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/scheduler/PartitionConstraintSolver.java
diff --git a/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/scheduler/RankedRunnableTaskCluster.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/scheduler/RankedRunnableTaskCluster.java
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/scheduler/RankedRunnableTaskCluster.java
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/scheduler/RankedRunnableTaskCluster.java
diff --git a/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/scheduler/Runnability.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/scheduler/Runnability.java
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/scheduler/Runnability.java
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/scheduler/Runnability.java
diff --git a/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/web/ApplicationInstallationHandler.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/web/ApplicationInstallationHandler.java
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/web/ApplicationInstallationHandler.java
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/web/ApplicationInstallationHandler.java
diff --git a/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/web/JobsRESTAPIFunction.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/web/JobsRESTAPIFunction.java
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/web/JobsRESTAPIFunction.java
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/web/JobsRESTAPIFunction.java
diff --git a/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/web/NodesRESTAPIFunction.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/web/NodesRESTAPIFunction.java
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/web/NodesRESTAPIFunction.java
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/web/NodesRESTAPIFunction.java
diff --git a/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/web/WebServer.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/web/WebServer.java
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/web/WebServer.java
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/web/WebServer.java
diff --git a/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/web/util/IJSONOutputFunction.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/web/util/IJSONOutputFunction.java
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/web/util/IJSONOutputFunction.java
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/web/util/IJSONOutputFunction.java
diff --git a/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/web/util/JSONOutputRequestHandler.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/web/util/JSONOutputRequestHandler.java
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/web/util/JSONOutputRequestHandler.java
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/web/util/JSONOutputRequestHandler.java
diff --git a/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/web/util/JSONUtils.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/web/util/JSONUtils.java
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/web/util/JSONUtils.java
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/web/util/JSONUtils.java
diff --git a/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/web/util/RoutingHandler.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/web/util/RoutingHandler.java
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/web/util/RoutingHandler.java
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/web/util/RoutingHandler.java
diff --git a/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/AbstractTaskLifecycleWork.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/AbstractTaskLifecycleWork.java
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/AbstractTaskLifecycleWork.java
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/AbstractTaskLifecycleWork.java
diff --git a/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/ApplicationCreateWork.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/ApplicationCreateWork.java
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/ApplicationCreateWork.java
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/ApplicationCreateWork.java
diff --git a/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/ApplicationDestroyWork.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/ApplicationDestroyWork.java
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/ApplicationDestroyWork.java
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/ApplicationDestroyWork.java
diff --git a/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/ApplicationMessageWork.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/ApplicationMessageWork.java
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/ApplicationMessageWork.java
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/ApplicationMessageWork.java
diff --git a/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/ApplicationStartWork.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/ApplicationStartWork.java
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/ApplicationStartWork.java
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/ApplicationStartWork.java
diff --git a/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/ApplicationStateChangeWork.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/ApplicationStateChangeWork.java
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/ApplicationStateChangeWork.java
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/ApplicationStateChangeWork.java
diff --git a/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/GetActivityClusterGraphJSONWork.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/GetActivityClusterGraphJSONWork.java
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/GetActivityClusterGraphJSONWork.java
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/GetActivityClusterGraphJSONWork.java
diff --git a/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/GetIpAddressNodeNameMapWork.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/GetIpAddressNodeNameMapWork.java
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/GetIpAddressNodeNameMapWork.java
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/GetIpAddressNodeNameMapWork.java
diff --git a/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/GetJobRunJSONWork.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/GetJobRunJSONWork.java
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/GetJobRunJSONWork.java
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/GetJobRunJSONWork.java
diff --git a/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/GetJobStatusWork.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/GetJobStatusWork.java
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/GetJobStatusWork.java
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/GetJobStatusWork.java
diff --git a/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/GetJobSummariesJSONWork.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/GetJobSummariesJSONWork.java
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/GetJobSummariesJSONWork.java
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/GetJobSummariesJSONWork.java
diff --git a/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/GetNodeControllersInfoWork.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/GetNodeControllersInfoWork.java
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/GetNodeControllersInfoWork.java
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/GetNodeControllersInfoWork.java
diff --git a/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/GetNodeDetailsJSONWork.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/GetNodeDetailsJSONWork.java
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/GetNodeDetailsJSONWork.java
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/GetNodeDetailsJSONWork.java
diff --git a/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/GetNodeSummariesJSONWork.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/GetNodeSummariesJSONWork.java
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/GetNodeSummariesJSONWork.java
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/GetNodeSummariesJSONWork.java
diff --git a/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/JobCleanupWork.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/JobCleanupWork.java
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/JobCleanupWork.java
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/JobCleanupWork.java
diff --git a/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/JobStartWork.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/JobStartWork.java
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/JobStartWork.java
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/JobStartWork.java
diff --git a/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/JobletCleanupNotificationWork.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/JobletCleanupNotificationWork.java
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/JobletCleanupNotificationWork.java
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/JobletCleanupNotificationWork.java
diff --git a/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/NodeHeartbeatWork.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/NodeHeartbeatWork.java
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/NodeHeartbeatWork.java
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/NodeHeartbeatWork.java
diff --git a/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/RegisterNodeWork.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/RegisterNodeWork.java
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/RegisterNodeWork.java
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/RegisterNodeWork.java
diff --git a/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/RegisterPartitionAvailibilityWork.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/RegisterPartitionAvailibilityWork.java
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/RegisterPartitionAvailibilityWork.java
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/RegisterPartitionAvailibilityWork.java
diff --git a/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/RegisterPartitionRequestWork.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/RegisterPartitionRequestWork.java
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/RegisterPartitionRequestWork.java
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/RegisterPartitionRequestWork.java
diff --git a/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/RemoveDeadNodesWork.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/RemoveDeadNodesWork.java
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/RemoveDeadNodesWork.java
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/RemoveDeadNodesWork.java
diff --git a/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/ReportProfilesWork.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/ReportProfilesWork.java
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/ReportProfilesWork.java
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/ReportProfilesWork.java
diff --git a/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/TaskCompleteWork.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/TaskCompleteWork.java
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/TaskCompleteWork.java
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/TaskCompleteWork.java
diff --git a/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/TaskFailureWork.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/TaskFailureWork.java
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/TaskFailureWork.java
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/TaskFailureWork.java
diff --git a/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/UnregisterNodeWork.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/UnregisterNodeWork.java
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/UnregisterNodeWork.java
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/UnregisterNodeWork.java
diff --git a/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/WaitForJobCompletionWork.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/WaitForJobCompletionWork.java
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/WaitForJobCompletionWork.java
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/WaitForJobCompletionWork.java
diff --git a/hyracks-control/hyracks-control-cc/src/main/resources/edu/uci/ics/hyracks/control/cc/adminconsole/pages/AbstractPage.html b/hyracks/hyracks-control/hyracks-control-cc/src/main/resources/edu/uci/ics/hyracks/control/cc/adminconsole/pages/AbstractPage.html
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/resources/edu/uci/ics/hyracks/control/cc/adminconsole/pages/AbstractPage.html
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/resources/edu/uci/ics/hyracks/control/cc/adminconsole/pages/AbstractPage.html
diff --git a/hyracks-control/hyracks-control-cc/src/main/resources/edu/uci/ics/hyracks/control/cc/adminconsole/pages/IndexPage.html b/hyracks/hyracks-control/hyracks-control-cc/src/main/resources/edu/uci/ics/hyracks/control/cc/adminconsole/pages/IndexPage.html
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/resources/edu/uci/ics/hyracks/control/cc/adminconsole/pages/IndexPage.html
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/resources/edu/uci/ics/hyracks/control/cc/adminconsole/pages/IndexPage.html
diff --git a/hyracks-control/hyracks-control-cc/src/main/resources/edu/uci/ics/hyracks/control/cc/adminconsole/pages/JobDetailsPage.html b/hyracks/hyracks-control/hyracks-control-cc/src/main/resources/edu/uci/ics/hyracks/control/cc/adminconsole/pages/JobDetailsPage.html
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/resources/edu/uci/ics/hyracks/control/cc/adminconsole/pages/JobDetailsPage.html
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/resources/edu/uci/ics/hyracks/control/cc/adminconsole/pages/JobDetailsPage.html
diff --git a/hyracks-control/hyracks-control-cc/src/main/resources/edu/uci/ics/hyracks/control/cc/adminconsole/pages/NodeDetailsPage.html b/hyracks/hyracks-control/hyracks-control-cc/src/main/resources/edu/uci/ics/hyracks/control/cc/adminconsole/pages/NodeDetailsPage.html
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/resources/edu/uci/ics/hyracks/control/cc/adminconsole/pages/NodeDetailsPage.html
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/resources/edu/uci/ics/hyracks/control/cc/adminconsole/pages/NodeDetailsPage.html
diff --git a/hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/adminconsole/Graphs.js b/hyracks/hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/adminconsole/Graphs.js
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/adminconsole/Graphs.js
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/adminconsole/Graphs.js
diff --git a/hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/adminconsole/JobDetailsPage.js b/hyracks/hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/adminconsole/JobDetailsPage.js
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/adminconsole/JobDetailsPage.js
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/adminconsole/JobDetailsPage.js
diff --git a/hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/adminconsole/NodeDetailsPage.js b/hyracks/hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/adminconsole/NodeDetailsPage.js
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/adminconsole/NodeDetailsPage.js
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/adminconsole/NodeDetailsPage.js
diff --git a/hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/flot/excanvas.min.js b/hyracks/hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/flot/excanvas.min.js
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/flot/excanvas.min.js
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/flot/excanvas.min.js
diff --git a/hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/flot/jquery.colorhelpers.min.js b/hyracks/hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/flot/jquery.colorhelpers.min.js
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/flot/jquery.colorhelpers.min.js
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/flot/jquery.colorhelpers.min.js
diff --git a/hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/flot/jquery.flot.crosshair.min.js b/hyracks/hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/flot/jquery.flot.crosshair.min.js
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/flot/jquery.flot.crosshair.min.js
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/flot/jquery.flot.crosshair.min.js
diff --git a/hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/flot/jquery.flot.fillbetween.min.js b/hyracks/hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/flot/jquery.flot.fillbetween.min.js
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/flot/jquery.flot.fillbetween.min.js
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/flot/jquery.flot.fillbetween.min.js
diff --git a/hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/flot/jquery.flot.image.min.js b/hyracks/hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/flot/jquery.flot.image.min.js
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/flot/jquery.flot.image.min.js
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/flot/jquery.flot.image.min.js
diff --git a/hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/flot/jquery.flot.min.js b/hyracks/hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/flot/jquery.flot.min.js
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/flot/jquery.flot.min.js
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/flot/jquery.flot.min.js
diff --git a/hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/flot/jquery.flot.navigate.min.js b/hyracks/hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/flot/jquery.flot.navigate.min.js
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/flot/jquery.flot.navigate.min.js
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/flot/jquery.flot.navigate.min.js
diff --git a/hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/flot/jquery.flot.pie.min.js b/hyracks/hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/flot/jquery.flot.pie.min.js
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/flot/jquery.flot.pie.min.js
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/flot/jquery.flot.pie.min.js
diff --git a/hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/flot/jquery.flot.resize.min.js b/hyracks/hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/flot/jquery.flot.resize.min.js
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/flot/jquery.flot.resize.min.js
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/flot/jquery.flot.resize.min.js
diff --git a/hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/flot/jquery.flot.selection.min.js b/hyracks/hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/flot/jquery.flot.selection.min.js
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/flot/jquery.flot.selection.min.js
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/flot/jquery.flot.selection.min.js
diff --git a/hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/flot/jquery.flot.stack.min.js b/hyracks/hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/flot/jquery.flot.stack.min.js
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/flot/jquery.flot.stack.min.js
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/flot/jquery.flot.stack.min.js
diff --git a/hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/flot/jquery.flot.symbol.min.js b/hyracks/hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/flot/jquery.flot.symbol.min.js
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/flot/jquery.flot.symbol.min.js
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/flot/jquery.flot.symbol.min.js
diff --git a/hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/flot/jquery.flot.threshold.min.js b/hyracks/hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/flot/jquery.flot.threshold.min.js
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/flot/jquery.flot.threshold.min.js
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/flot/jquery.flot.threshold.min.js
diff --git a/hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/jquery/jquery.min.js b/hyracks/hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/jquery/jquery.min.js
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/jquery/jquery.min.js
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/jquery/jquery.min.js
diff --git a/hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/jquery/plugins/jquery-ui.min.js b/hyracks/hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/jquery/plugins/jquery-ui.min.js
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/jquery/plugins/jquery-ui.min.js
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/jquery/plugins/jquery-ui.min.js
diff --git a/hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/jquery/plugins/jquery.getParams.js b/hyracks/hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/jquery/plugins/jquery.getParams.js
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/jquery/plugins/jquery.getParams.js
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/jquery/plugins/jquery.getParams.js
diff --git a/hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/jquery/plugins/jquery.timer.js b/hyracks/hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/jquery/plugins/jquery.timer.js
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/jquery/plugins/jquery.timer.js
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/jquery/plugins/jquery.timer.js
diff --git a/hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/jsplumb/jquery.jsPlumb-1.3.5-all-min.js b/hyracks/hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/jsplumb/jquery.jsPlumb-1.3.5-all-min.js
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/jsplumb/jquery.jsPlumb-1.3.5-all-min.js
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/jsplumb/jquery.jsPlumb-1.3.5-all-min.js
diff --git a/hyracks-control/hyracks-control-cc/src/main/resources/static/stylesheet/adminconsole.css b/hyracks/hyracks-control/hyracks-control-cc/src/main/resources/static/stylesheet/adminconsole.css
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/resources/static/stylesheet/adminconsole.css
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/resources/static/stylesheet/adminconsole.css
diff --git a/hyracks-control/hyracks-control-cc/src/main/resources/static/stylesheet/jquery-ui/themes/base/jquery-ui.css b/hyracks/hyracks-control/hyracks-control-cc/src/main/resources/static/stylesheet/jquery-ui/themes/base/jquery-ui.css
similarity index 100%
rename from hyracks-control/hyracks-control-cc/src/main/resources/static/stylesheet/jquery-ui/themes/base/jquery-ui.css
rename to hyracks/hyracks-control/hyracks-control-cc/src/main/resources/static/stylesheet/jquery-ui/themes/base/jquery-ui.css
diff --git a/hyracks/hyracks-control/hyracks-control-common/pom.xml b/hyracks/hyracks-control/hyracks-control-common/pom.xml
new file mode 100644
index 0000000..2efdd42
--- /dev/null
+++ b/hyracks/hyracks-control/hyracks-control-common/pom.xml
@@ -0,0 +1,43 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>edu.uci.ics.hyracks</groupId>
+  <artifactId>hyracks-control-common</artifactId>
+  <name>hyracks-control-common</name>
+  <version>0.2.3-SNAPSHOT</version>
+
+  <parent>
+    <groupId>edu.uci.ics.hyracks</groupId>
+    <artifactId>hyracks-control</artifactId>
+    <version>0.2.3-SNAPSHOT</version>
+  </parent>
+
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-compiler-plugin</artifactId>
+        <version>2.0.2</version>
+        <configuration>
+          <source>1.7</source>
+          <target>1.7</target>
+        </configuration>
+      </plugin>
+    </plugins>
+  </build>
+  <dependencies>
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>hyracks-api</artifactId>
+  		<version>0.2.3-SNAPSHOT</version>
+  		<type>jar</type>
+  		<scope>compile</scope>
+  	</dependency>
+  	<dependency>
+  		<groupId>commons-io</groupId>
+  		<artifactId>commons-io</artifactId>
+  		<version>1.4</version>
+  		<type>jar</type>
+  		<scope>compile</scope>
+  	</dependency>
+  </dependencies>
+</project>
diff --git a/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/AbstractRemoteService.java b/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/AbstractRemoteService.java
similarity index 100%
rename from hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/AbstractRemoteService.java
rename to hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/AbstractRemoteService.java
diff --git a/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/application/ApplicationContext.java b/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/application/ApplicationContext.java
similarity index 100%
rename from hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/application/ApplicationContext.java
rename to hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/application/ApplicationContext.java
diff --git a/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/application/ApplicationStatus.java b/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/application/ApplicationStatus.java
similarity index 100%
rename from hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/application/ApplicationStatus.java
rename to hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/application/ApplicationStatus.java
diff --git a/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/base/IClusterController.java b/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/base/IClusterController.java
new file mode 100644
index 0000000..0c5bb2f
--- /dev/null
+++ b/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/base/IClusterController.java
@@ -0,0 +1,54 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.control.common.base;
+
+import java.util.List;
+
+import edu.uci.ics.hyracks.api.dataflow.TaskAttemptId;
+import edu.uci.ics.hyracks.api.job.JobId;
+import edu.uci.ics.hyracks.control.common.application.ApplicationStatus;
+import edu.uci.ics.hyracks.control.common.controllers.NodeRegistration;
+import edu.uci.ics.hyracks.control.common.heartbeat.HeartbeatData;
+import edu.uci.ics.hyracks.control.common.job.PartitionDescriptor;
+import edu.uci.ics.hyracks.control.common.job.PartitionRequest;
+import edu.uci.ics.hyracks.control.common.job.profiling.om.JobProfile;
+import edu.uci.ics.hyracks.control.common.job.profiling.om.TaskProfile;
+
+public interface IClusterController {
+    public void registerNode(NodeRegistration reg) throws Exception;
+
+    public void unregisterNode(String nodeId) throws Exception;
+
+    public void notifyTaskComplete(JobId jobId, TaskAttemptId taskId, String nodeId, TaskProfile statistics)
+            throws Exception;
+
+    public void notifyTaskFailure(JobId jobId, TaskAttemptId taskId, String nodeId, String details) throws Exception;
+
+    public void notifyJobletCleanup(JobId jobId, String nodeId) throws Exception;
+
+    public void nodeHeartbeat(String id, HeartbeatData hbData) throws Exception;
+
+    public void reportProfile(String id, List<JobProfile> profiles) throws Exception;
+
+    public void registerPartitionProvider(PartitionDescriptor partitionDescriptor) throws Exception;
+
+    public void registerPartitionRequest(PartitionRequest partitionRequest) throws Exception;
+
+    public void notifyApplicationStateChange(String nodeId, String appName, ApplicationStatus status) throws Exception;
+
+    public void sendApplicationMessageToCC(byte[] data, String appName, String nodeId) throws Exception;
+
+    public void getNodeControllerInfos() throws Exception;
+}
\ No newline at end of file
diff --git a/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/base/INodeController.java b/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/base/INodeController.java
similarity index 100%
rename from hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/base/INodeController.java
rename to hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/base/INodeController.java
diff --git a/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/context/ServerContext.java b/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/context/ServerContext.java
similarity index 100%
rename from hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/context/ServerContext.java
rename to hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/context/ServerContext.java
diff --git a/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/controllers/CCConfig.java b/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/controllers/CCConfig.java
similarity index 100%
rename from hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/controllers/CCConfig.java
rename to hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/controllers/CCConfig.java
diff --git a/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/controllers/NCConfig.java b/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/controllers/NCConfig.java
similarity index 100%
rename from hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/controllers/NCConfig.java
rename to hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/controllers/NCConfig.java
diff --git a/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/controllers/NodeParameters.java b/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/controllers/NodeParameters.java
similarity index 100%
rename from hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/controllers/NodeParameters.java
rename to hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/controllers/NodeParameters.java
diff --git a/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/controllers/NodeRegistration.java b/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/controllers/NodeRegistration.java
similarity index 100%
rename from hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/controllers/NodeRegistration.java
rename to hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/controllers/NodeRegistration.java
diff --git a/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/heartbeat/HeartbeatData.java b/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/heartbeat/HeartbeatData.java
similarity index 100%
rename from hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/heartbeat/HeartbeatData.java
rename to hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/heartbeat/HeartbeatData.java
diff --git a/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/heartbeat/HeartbeatSchema.java b/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/heartbeat/HeartbeatSchema.java
similarity index 100%
rename from hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/heartbeat/HeartbeatSchema.java
rename to hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/heartbeat/HeartbeatSchema.java
diff --git a/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/ipc/CCNCFunctions.java b/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/ipc/CCNCFunctions.java
new file mode 100644
index 0000000..557a8cb
--- /dev/null
+++ b/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/ipc/CCNCFunctions.java
@@ -0,0 +1,876 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.control.common.ipc;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.DataInputStream;
+import java.io.DataOutputStream;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.io.Serializable;
+import java.nio.ByteBuffer;
+import java.util.EnumSet;
+import java.util.List;
+import java.util.Map;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import edu.uci.ics.hyracks.api.client.NodeControllerInfo;
+import edu.uci.ics.hyracks.api.comm.NetworkAddress;
+import edu.uci.ics.hyracks.api.dataflow.ActivityId;
+import edu.uci.ics.hyracks.api.dataflow.ConnectorDescriptorId;
+import edu.uci.ics.hyracks.api.dataflow.OperatorDescriptorId;
+import edu.uci.ics.hyracks.api.dataflow.TaskAttemptId;
+import edu.uci.ics.hyracks.api.dataflow.TaskId;
+import edu.uci.ics.hyracks.api.dataflow.connectors.IConnectorPolicy;
+import edu.uci.ics.hyracks.api.job.JobFlag;
+import edu.uci.ics.hyracks.api.job.JobId;
+import edu.uci.ics.hyracks.api.job.JobStatus;
+import edu.uci.ics.hyracks.api.partitions.PartitionId;
+import edu.uci.ics.hyracks.control.common.application.ApplicationStatus;
+import edu.uci.ics.hyracks.control.common.controllers.NodeParameters;
+import edu.uci.ics.hyracks.control.common.controllers.NodeRegistration;
+import edu.uci.ics.hyracks.control.common.heartbeat.HeartbeatData;
+import edu.uci.ics.hyracks.control.common.job.PartitionDescriptor;
+import edu.uci.ics.hyracks.control.common.job.PartitionRequest;
+import edu.uci.ics.hyracks.control.common.job.PartitionState;
+import edu.uci.ics.hyracks.control.common.job.TaskAttemptDescriptor;
+import edu.uci.ics.hyracks.control.common.job.profiling.om.JobProfile;
+import edu.uci.ics.hyracks.control.common.job.profiling.om.TaskProfile;
+import edu.uci.ics.hyracks.ipc.api.IPayloadSerializerDeserializer;
+import edu.uci.ics.hyracks.ipc.impl.JavaSerializationBasedPayloadSerializerDeserializer;
+
+public class CCNCFunctions {
+    private static final Logger LOGGER = Logger.getLogger(CCNCFunctions.class.getName());
+
+    private static final int FID_CODE_SIZE = 1;
+
+    public enum FunctionId {
+        REGISTER_NODE,
+        UNREGISTER_NODE,
+        NOTIFY_JOBLET_CLEANUP,
+        NOTIFY_TASK_COMPLETE,
+        NOTIFY_TASK_FAILURE,
+        NODE_HEARTBEAT,
+        REPORT_PROFILE,
+        REGISTER_PARTITION_PROVIDER,
+        REGISTER_PARTITION_REQUEST,
+        APPLICATION_STATE_CHANGE_RESPONSE,
+
+        NODE_REGISTRATION_RESULT,
+        START_TASKS,
+        ABORT_TASKS,
+        CLEANUP_JOBLET,
+        CREATE_APPLICATION,
+        DESTROY_APPLICATION,
+        REPORT_PARTITION_AVAILABILITY,
+        SEND_APPLICATION_MESSAGE,
+        GET_NODE_CONTROLLERS_INFO,
+        GET_NODE_CONTROLLERS_INFO_RESPONSE,
+
+        OTHER
+    }
+
+    public static class SendApplicationMessageFunction extends Function {
+        private static final long serialVersionUID = 1L;
+        private byte[] serializedMessage;
+        private String nodeId;
+        private String appName;
+
+        public String getNodeId() {
+            return nodeId;
+        }
+
+        public void setNodeId(String nodeId) {
+            this.nodeId = nodeId;
+        }
+
+        public byte[] getMessage() {
+            return serializedMessage;
+        }
+
+        public SendApplicationMessageFunction(byte[] data, String appName, String nodeId) {
+            super();
+            this.serializedMessage = data;
+            this.nodeId = nodeId;
+            this.appName = appName;
+        }
+
+        @Override
+        public FunctionId getFunctionId() {
+            return FunctionId.SEND_APPLICATION_MESSAGE;
+        }
+
+        public String getAppName() {
+            return appName;
+        }
+
+    }
+
+    public static abstract class Function implements Serializable {
+        private static final long serialVersionUID = 1L;
+
+        public abstract FunctionId getFunctionId();
+    }
+
+    public static class RegisterNodeFunction extends Function {
+        private static final long serialVersionUID = 1L;
+
+        private final NodeRegistration reg;
+
+        public RegisterNodeFunction(NodeRegistration reg) {
+            this.reg = reg;
+        }
+
+        @Override
+        public FunctionId getFunctionId() {
+            return FunctionId.REGISTER_NODE;
+        }
+
+        public NodeRegistration getNodeRegistration() {
+            return reg;
+        }
+    }
+
+    public static class UnregisterNodeFunction extends Function {
+        private static final long serialVersionUID = 1L;
+
+        private final String nodeId;
+
+        public UnregisterNodeFunction(String nodeId) {
+            this.nodeId = nodeId;
+        }
+
+        @Override
+        public FunctionId getFunctionId() {
+            return FunctionId.UNREGISTER_NODE;
+        }
+
+        public String getNodeId() {
+            return nodeId;
+        }
+    }
+
+    public static class NotifyTaskCompleteFunction extends Function {
+        private static final long serialVersionUID = 1L;
+
+        private final JobId jobId;
+        private final TaskAttemptId taskId;
+        private final String nodeId;
+        private final TaskProfile statistics;
+
+        public NotifyTaskCompleteFunction(JobId jobId, TaskAttemptId taskId, String nodeId, TaskProfile statistics) {
+            this.jobId = jobId;
+            this.taskId = taskId;
+            this.nodeId = nodeId;
+            this.statistics = statistics;
+        }
+
+        @Override
+        public FunctionId getFunctionId() {
+            return FunctionId.NOTIFY_TASK_COMPLETE;
+        }
+
+        public JobId getJobId() {
+            return jobId;
+        }
+
+        public TaskAttemptId getTaskId() {
+            return taskId;
+        }
+
+        public String getNodeId() {
+            return nodeId;
+        }
+
+        public TaskProfile getStatistics() {
+            return statistics;
+        }
+    }
+
+    public static class NotifyTaskFailureFunction extends Function {
+        private static final long serialVersionUID = 1L;
+
+        private final JobId jobId;
+        private final TaskAttemptId taskId;
+        private final String nodeId;
+        private final String details;
+
+        public NotifyTaskFailureFunction(JobId jobId, TaskAttemptId taskId, String nodeId, String details) {
+            this.jobId = jobId;
+            this.taskId = taskId;
+            this.nodeId = nodeId;
+            this.details = details;
+        }
+
+        @Override
+        public FunctionId getFunctionId() {
+            return FunctionId.NOTIFY_TASK_FAILURE;
+        }
+
+        public JobId getJobId() {
+            return jobId;
+        }
+
+        public TaskAttemptId getTaskId() {
+            return taskId;
+        }
+
+        public String getNodeId() {
+            return nodeId;
+        }
+
+        public String getDetails() {
+            return details;
+        }
+    }
+
+    public static class NotifyJobletCleanupFunction extends Function {
+        private static final long serialVersionUID = 1L;
+
+        private final JobId jobId;
+        private final String nodeId;
+
+        public NotifyJobletCleanupFunction(JobId jobId, String nodeId) {
+            this.jobId = jobId;
+            this.nodeId = nodeId;
+        }
+
+        @Override
+        public FunctionId getFunctionId() {
+            return FunctionId.NOTIFY_JOBLET_CLEANUP;
+        }
+
+        public JobId getJobId() {
+            return jobId;
+        }
+
+        public String getNodeId() {
+            return nodeId;
+        }
+    }
+
+    public static class NodeHeartbeatFunction extends Function {
+        private static final long serialVersionUID = 1L;
+
+        private final String nodeId;
+        private final HeartbeatData hbData;
+
+        public NodeHeartbeatFunction(String nodeId, HeartbeatData hbData) {
+            this.nodeId = nodeId;
+            this.hbData = hbData;
+        }
+
+        @Override
+        public FunctionId getFunctionId() {
+            return FunctionId.NODE_HEARTBEAT;
+        }
+
+        public String getNodeId() {
+            return nodeId;
+        }
+
+        public HeartbeatData getHeartbeatData() {
+            return hbData;
+        }
+    }
+
+    public static class ReportProfileFunction extends Function {
+        private static final long serialVersionUID = 1L;
+
+        private final String nodeId;
+        private final List<JobProfile> profiles;
+
+        public ReportProfileFunction(String nodeId, List<JobProfile> profiles) {
+            this.nodeId = nodeId;
+            this.profiles = profiles;
+        }
+
+        @Override
+        public FunctionId getFunctionId() {
+            return FunctionId.REPORT_PROFILE;
+        }
+
+        public String getNodeId() {
+            return nodeId;
+        }
+
+        public List<JobProfile> getProfiles() {
+            return profiles;
+        }
+    }
+
+    public static class RegisterPartitionProviderFunction extends Function {
+        private static final long serialVersionUID = 1L;
+
+        private final PartitionDescriptor partitionDescriptor;
+
+        public RegisterPartitionProviderFunction(PartitionDescriptor partitionDescriptor) {
+            this.partitionDescriptor = partitionDescriptor;
+        }
+
+        @Override
+        public FunctionId getFunctionId() {
+            return FunctionId.REGISTER_PARTITION_PROVIDER;
+        }
+
+        public PartitionDescriptor getPartitionDescriptor() {
+            return partitionDescriptor;
+        }
+
+        public static Object deserialize(ByteBuffer buffer, int length) throws Exception {
+            ByteArrayInputStream bais = new ByteArrayInputStream(buffer.array(), buffer.position(), length);
+            DataInputStream dis = new DataInputStream(bais);
+
+            // Read PartitionId
+            PartitionId pid = readPartitionId(dis);
+
+            // Read nodeId
+            String nodeId = dis.readUTF();
+
+            // Read TaskAttemptId
+            TaskAttemptId taId = readTaskAttemptId(dis);
+
+            // Read reusable flag
+            boolean reusable = dis.readBoolean();
+
+            // Read Partition State
+            PartitionState state = readPartitionState(dis);
+
+            PartitionDescriptor pd = new PartitionDescriptor(pid, nodeId, taId, reusable);
+            pd.setState(state);
+            return new RegisterPartitionProviderFunction(pd);
+        }
+
+        public static void serialize(OutputStream out, Object object) throws Exception {
+            RegisterPartitionProviderFunction fn = (RegisterPartitionProviderFunction) object;
+
+            DataOutputStream dos = new DataOutputStream(out);
+
+            PartitionDescriptor pd = fn.getPartitionDescriptor();
+
+            // Write PartitionId
+            writePartitionId(dos, pd.getPartitionId());
+
+            // Write nodeId
+            dos.writeUTF(pd.getNodeId());
+
+            // Write TaskAttemptId
+            writeTaskAttemptId(dos, pd.getProducingTaskAttemptId());
+
+            // Write reusable flag
+            dos.writeBoolean(pd.isReusable());
+
+            // Write Partition State
+            writePartitionState(dos, pd.getState());
+        }
+    }
+
+    public static class RegisterPartitionRequestFunction extends Function {
+        private static final long serialVersionUID = 1L;
+
+        private final PartitionRequest partitionRequest;
+
+        public RegisterPartitionRequestFunction(PartitionRequest partitionRequest) {
+            this.partitionRequest = partitionRequest;
+        }
+
+        @Override
+        public FunctionId getFunctionId() {
+            return FunctionId.REGISTER_PARTITION_REQUEST;
+        }
+
+        public PartitionRequest getPartitionRequest() {
+            return partitionRequest;
+        }
+
+        public static Object deserialize(ByteBuffer buffer, int length) throws Exception {
+            ByteArrayInputStream bais = new ByteArrayInputStream(buffer.array(), buffer.position(), length);
+            DataInputStream dis = new DataInputStream(bais);
+
+            // Read PartitionId
+            PartitionId pid = readPartitionId(dis);
+
+            // Read nodeId
+            String nodeId = dis.readUTF();
+
+            // Read TaskAttemptId
+            TaskAttemptId taId = readTaskAttemptId(dis);
+
+            // Read Partition State
+            PartitionState state = readPartitionState(dis);
+
+            PartitionRequest pr = new PartitionRequest(pid, nodeId, taId, state);
+            return new RegisterPartitionRequestFunction(pr);
+        }
+
+        public static void serialize(OutputStream out, Object object) throws Exception {
+            RegisterPartitionRequestFunction fn = (RegisterPartitionRequestFunction) object;
+
+            DataOutputStream dos = new DataOutputStream(out);
+
+            PartitionRequest pr = fn.getPartitionRequest();
+
+            // Write PartitionId
+            writePartitionId(dos, pr.getPartitionId());
+
+            // Write nodeId
+            dos.writeUTF(pr.getNodeId());
+
+            // Write TaskAttemptId
+            writeTaskAttemptId(dos, pr.getRequestingTaskAttemptId());
+
+            // Write Partition State
+            writePartitionState(dos, pr.getMinimumState());
+        }
+    }
+
+    public static class ApplicationStateChangeResponseFunction extends Function {
+        private static final long serialVersionUID = 1L;
+
+        private final String nodeId;
+        private final String appName;
+        private final ApplicationStatus status;
+
+        public ApplicationStateChangeResponseFunction(String nodeId, String appName, ApplicationStatus status) {
+            this.nodeId = nodeId;
+            this.appName = appName;
+            this.status = status;
+        }
+
+        @Override
+        public FunctionId getFunctionId() {
+            return FunctionId.APPLICATION_STATE_CHANGE_RESPONSE;
+        }
+
+        public String getNodeId() {
+            return nodeId;
+        }
+
+        public String getApplicationName() {
+            return appName;
+        }
+
+        public ApplicationStatus getStatus() {
+            return status;
+        }
+    }
+
+    public static class NodeRegistrationResult extends Function {
+        private static final long serialVersionUID = 1L;
+
+        private final NodeParameters params;
+
+        private final Exception exception;
+
+        public NodeRegistrationResult(NodeParameters params, Exception exception) {
+            this.params = params;
+            this.exception = exception;
+        }
+
+        @Override
+        public FunctionId getFunctionId() {
+            return FunctionId.NODE_REGISTRATION_RESULT;
+        }
+
+        public NodeParameters getNodeParameters() {
+            return params;
+        }
+
+        public Exception getException() {
+            return exception;
+        }
+    }
+
+    public static class StartTasksFunction extends Function {
+        private static final long serialVersionUID = 1L;
+
+        private final String appName;
+        private final JobId jobId;
+        private final byte[] planBytes;
+        private final List<TaskAttemptDescriptor> taskDescriptors;
+        private final Map<ConnectorDescriptorId, IConnectorPolicy> connectorPolicies;
+        private final EnumSet<JobFlag> flags;
+
+        public StartTasksFunction(String appName, JobId jobId, byte[] planBytes,
+                List<TaskAttemptDescriptor> taskDescriptors,
+                Map<ConnectorDescriptorId, IConnectorPolicy> connectorPolicies, EnumSet<JobFlag> flags) {
+            this.appName = appName;
+            this.jobId = jobId;
+            this.planBytes = planBytes;
+            this.taskDescriptors = taskDescriptors;
+            this.connectorPolicies = connectorPolicies;
+            this.flags = flags;
+        }
+
+        @Override
+        public FunctionId getFunctionId() {
+            return FunctionId.START_TASKS;
+        }
+
+        public String getAppName() {
+            return appName;
+        }
+
+        public JobId getJobId() {
+            return jobId;
+        }
+
+        public byte[] getPlanBytes() {
+            return planBytes;
+        }
+
+        public List<TaskAttemptDescriptor> getTaskDescriptors() {
+            return taskDescriptors;
+        }
+
+        public Map<ConnectorDescriptorId, IConnectorPolicy> getConnectorPolicies() {
+            return connectorPolicies;
+        }
+
+        public EnumSet<JobFlag> getFlags() {
+            return flags;
+        }
+    }
+
+    public static class AbortTasksFunction extends Function {
+        private static final long serialVersionUID = 1L;
+
+        private final JobId jobId;
+        private final List<TaskAttemptId> tasks;
+
+        public AbortTasksFunction(JobId jobId, List<TaskAttemptId> tasks) {
+            this.jobId = jobId;
+            this.tasks = tasks;
+        }
+
+        @Override
+        public FunctionId getFunctionId() {
+            return FunctionId.ABORT_TASKS;
+        }
+
+        public JobId getJobId() {
+            return jobId;
+        }
+
+        public List<TaskAttemptId> getTasks() {
+            return tasks;
+        }
+    }
+
+    public static class CleanupJobletFunction extends Function {
+        private static final long serialVersionUID = 1L;
+
+        private final JobId jobId;
+        private final JobStatus status;
+
+        public CleanupJobletFunction(JobId jobId, JobStatus status) {
+            this.jobId = jobId;
+            this.status = status;
+        }
+
+        @Override
+        public FunctionId getFunctionId() {
+            return FunctionId.CLEANUP_JOBLET;
+        }
+
+        public JobId getJobId() {
+            return jobId;
+        }
+
+        public JobStatus getStatus() {
+            return status;
+        }
+    }
+
+    public static class CreateApplicationFunction extends Function {
+        private static final long serialVersionUID = 1L;
+
+        private final String appName;
+        private final boolean deployHar;
+        private final byte[] serializedDistributedState;
+
+        public CreateApplicationFunction(String appName, boolean deployHar, byte[] serializedDistributedState) {
+            this.appName = appName;
+            this.deployHar = deployHar;
+            this.serializedDistributedState = serializedDistributedState;
+        }
+
+        @Override
+        public FunctionId getFunctionId() {
+            return FunctionId.CREATE_APPLICATION;
+        }
+
+        public String getAppName() {
+            return appName;
+        }
+
+        public boolean isDeployHar() {
+            return deployHar;
+        }
+
+        public byte[] getSerializedDistributedState() {
+            return serializedDistributedState;
+        }
+    }
+
+    public static class DestroyApplicationFunction extends Function {
+        private static final long serialVersionUID = 1L;
+
+        private final String appName;
+
+        public DestroyApplicationFunction(String appName) {
+            this.appName = appName;
+        }
+
+        @Override
+        public FunctionId getFunctionId() {
+            return FunctionId.DESTROY_APPLICATION;
+        }
+
+        public String getAppName() {
+            return appName;
+        }
+    }
+
+    public static class GetNodeControllersInfoFunction extends Function {
+        private static final long serialVersionUID = 1L;
+
+        @Override
+        public FunctionId getFunctionId() {
+            return FunctionId.GET_NODE_CONTROLLERS_INFO;
+        }
+    }
+
+    public static class GetNodeControllersInfoResponseFunction extends Function {
+        private static final long serialVersionUID = 1L;
+
+        private final Map<String, NodeControllerInfo> ncInfos;
+
+        public GetNodeControllersInfoResponseFunction(Map<String, NodeControllerInfo> ncInfos) {
+            this.ncInfos = ncInfos;
+        }
+
+        @Override
+        public FunctionId getFunctionId() {
+            return FunctionId.GET_NODE_CONTROLLERS_INFO_RESPONSE;
+        }
+
+        public Map<String, NodeControllerInfo> getNodeControllerInfos() {
+            return ncInfos;
+        }
+    }
+
+    public static class ReportPartitionAvailabilityFunction extends Function {
+        private static final long serialVersionUID = 1L;
+
+        private final PartitionId pid;
+        private final NetworkAddress networkAddress;
+
+        public ReportPartitionAvailabilityFunction(PartitionId pid, NetworkAddress networkAddress) {
+            this.pid = pid;
+            this.networkAddress = networkAddress;
+        }
+
+        @Override
+        public FunctionId getFunctionId() {
+            return FunctionId.REPORT_PARTITION_AVAILABILITY;
+        }
+
+        public PartitionId getPartitionId() {
+            return pid;
+        }
+
+        public NetworkAddress getNetworkAddress() {
+            return networkAddress;
+        }
+
+        public static Object deserialize(ByteBuffer buffer, int length) throws Exception {
+            ByteArrayInputStream bais = new ByteArrayInputStream(buffer.array(), buffer.position(), length);
+            DataInputStream dis = new DataInputStream(bais);
+
+            // Read PartitionId
+            PartitionId pid = readPartitionId(dis);
+
+            // Read NetworkAddress
+            NetworkAddress networkAddress = readNetworkAddress(dis);
+
+            return new ReportPartitionAvailabilityFunction(pid, networkAddress);
+        }
+
+        public static void serialize(OutputStream out, Object object) throws Exception {
+            ReportPartitionAvailabilityFunction fn = (ReportPartitionAvailabilityFunction) object;
+
+            DataOutputStream dos = new DataOutputStream(out);
+
+            // Write PartitionId
+            writePartitionId(dos, fn.getPartitionId());
+
+            // Write NetworkAddress
+            writeNetworkAddress(dos, fn.getNetworkAddress());
+        }
+    }
+
+    public static class SerializerDeserializer implements IPayloadSerializerDeserializer {
+        private final JavaSerializationBasedPayloadSerializerDeserializer javaSerde;
+
+        public SerializerDeserializer() {
+            javaSerde = new JavaSerializationBasedPayloadSerializerDeserializer();
+        }
+
+        @Override
+        public Object deserializeObject(ByteBuffer buffer, int length) throws Exception {
+            if (length < FID_CODE_SIZE) {
+                throw new IllegalStateException("Message size too small: " + length);
+            }
+            byte fid = buffer.get();
+            return deserialize(fid, buffer, length - FID_CODE_SIZE);
+        }
+
+        @Override
+        public Exception deserializeException(ByteBuffer buffer, int length) throws Exception {
+            if (length < FID_CODE_SIZE) {
+                throw new IllegalStateException("Message size too small: " + length);
+            }
+            byte fid = buffer.get();
+            if (fid != FunctionId.OTHER.ordinal()) {
+                throw new IllegalStateException("Expected FID for OTHER, found: " + fid);
+            }
+            return (Exception) deserialize(fid, buffer, length - FID_CODE_SIZE);
+        }
+
+        @Override
+        public byte[] serializeObject(Object object) throws Exception {
+            if (object instanceof Function) {
+                Function fn = (Function) object;
+                return serialize(object, (byte) fn.getFunctionId().ordinal());
+            } else {
+                return serialize(object, (byte) FunctionId.OTHER.ordinal());
+            }
+        }
+
+        @Override
+        public byte[] serializeException(Exception object) throws Exception {
+            return serialize(object, (byte) FunctionId.OTHER.ordinal());
+        }
+
+        private byte[] serialize(Object object, byte fid) throws Exception {
+            ByteArrayOutputStream baos = new ByteArrayOutputStream();
+            baos.write(fid);
+            try {
+                serialize(baos, object, fid);
+            } catch (Exception e) {
+                LOGGER.log(Level.SEVERE, "Error serializing " + object, e);
+                throw e;
+            }
+            baos.close();
+            return baos.toByteArray();
+        }
+
+        private void serialize(OutputStream out, Object object, byte fid) throws Exception {
+            switch (FunctionId.values()[fid]) {
+                case REGISTER_PARTITION_PROVIDER:
+                    RegisterPartitionProviderFunction.serialize(out, object);
+                    return;
+
+                case REGISTER_PARTITION_REQUEST:
+                    RegisterPartitionRequestFunction.serialize(out, object);
+                    return;
+
+                case REPORT_PARTITION_AVAILABILITY:
+                    ReportPartitionAvailabilityFunction.serialize(out, object);
+                    return;
+            }
+            JavaSerializationBasedPayloadSerializerDeserializer.serialize(out, object);
+        }
+
+        private Object deserialize(byte fid, ByteBuffer buffer, int length) throws Exception {
+            switch (FunctionId.values()[fid]) {
+                case REGISTER_PARTITION_PROVIDER:
+                    return RegisterPartitionProviderFunction.deserialize(buffer, length);
+
+                case REGISTER_PARTITION_REQUEST:
+                    return RegisterPartitionRequestFunction.deserialize(buffer, length);
+
+                case REPORT_PARTITION_AVAILABILITY:
+                    return ReportPartitionAvailabilityFunction.deserialize(buffer, length);
+            }
+
+            return javaSerde.deserializeObject(buffer, length);
+        }
+    }
+
+    private static PartitionId readPartitionId(DataInputStream dis) throws IOException {
+        long jobId = dis.readLong();
+        int cdid = dis.readInt();
+        int senderIndex = dis.readInt();
+        int receiverIndex = dis.readInt();
+        PartitionId pid = new PartitionId(new JobId(jobId), new ConnectorDescriptorId(cdid), senderIndex, receiverIndex);
+        return pid;
+    }
+
+    private static void writePartitionId(DataOutputStream dos, PartitionId pid) throws IOException {
+        dos.writeLong(pid.getJobId().getId());
+        dos.writeInt(pid.getConnectorDescriptorId().getId());
+        dos.writeInt(pid.getSenderIndex());
+        dos.writeInt(pid.getReceiverIndex());
+    }
+
+    private static TaskAttemptId readTaskAttemptId(DataInputStream dis) throws IOException {
+        int odid = dis.readInt();
+        int aid = dis.readInt();
+        int partition = dis.readInt();
+        int attempt = dis.readInt();
+        TaskAttemptId taId = new TaskAttemptId(new TaskId(new ActivityId(new OperatorDescriptorId(odid), aid),
+                partition), attempt);
+        return taId;
+    }
+
+    private static void writeTaskAttemptId(DataOutputStream dos, TaskAttemptId taId) throws IOException {
+        TaskId tid = taId.getTaskId();
+        ActivityId aid = tid.getActivityId();
+        OperatorDescriptorId odId = aid.getOperatorDescriptorId();
+        dos.writeInt(odId.getId());
+        dos.writeInt(aid.getLocalId());
+        dos.writeInt(tid.getPartition());
+        dos.writeInt(taId.getAttempt());
+    }
+
+    private static PartitionState readPartitionState(DataInputStream dis) throws IOException {
+        PartitionState state = PartitionState.values()[dis.readInt()];
+        return state;
+    }
+
+    private static void writePartitionState(DataOutputStream dos, PartitionState state) throws IOException {
+        dos.writeInt(state.ordinal());
+    }
+
+    private static NetworkAddress readNetworkAddress(DataInputStream dis) throws IOException {
+        int bLen = dis.readInt();
+        byte[] ipAddress = new byte[bLen];
+        dis.read(ipAddress);
+        int port = dis.readInt();
+        NetworkAddress networkAddress = new NetworkAddress(ipAddress, port);
+        return networkAddress;
+    }
+
+    private static void writeNetworkAddress(DataOutputStream dos, NetworkAddress networkAddress) throws IOException {
+        byte[] ipAddress = networkAddress.getIpAddress();
+        dos.writeInt(ipAddress.length);
+        dos.write(ipAddress);
+        dos.writeInt(networkAddress.getPort());
+    }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/ipc/ClusterControllerRemoteProxy.java b/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/ipc/ClusterControllerRemoteProxy.java
new file mode 100644
index 0000000..bbaab4e
--- /dev/null
+++ b/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/ipc/ClusterControllerRemoteProxy.java
@@ -0,0 +1,115 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.control.common.ipc;
+
+import java.util.List;
+
+import edu.uci.ics.hyracks.api.dataflow.TaskAttemptId;
+import edu.uci.ics.hyracks.api.job.JobId;
+import edu.uci.ics.hyracks.control.common.application.ApplicationStatus;
+import edu.uci.ics.hyracks.control.common.base.IClusterController;
+import edu.uci.ics.hyracks.control.common.controllers.NodeRegistration;
+import edu.uci.ics.hyracks.control.common.heartbeat.HeartbeatData;
+import edu.uci.ics.hyracks.control.common.job.PartitionDescriptor;
+import edu.uci.ics.hyracks.control.common.job.PartitionRequest;
+import edu.uci.ics.hyracks.control.common.job.profiling.om.JobProfile;
+import edu.uci.ics.hyracks.control.common.job.profiling.om.TaskProfile;
+import edu.uci.ics.hyracks.ipc.api.IIPCHandle;
+
+public class ClusterControllerRemoteProxy implements IClusterController {
+    private final IIPCHandle ipcHandle;
+
+    public ClusterControllerRemoteProxy(IIPCHandle ipcHandle) {
+        this.ipcHandle = ipcHandle;
+    }
+
+    @Override
+    public void registerNode(NodeRegistration reg) throws Exception {
+        CCNCFunctions.RegisterNodeFunction fn = new CCNCFunctions.RegisterNodeFunction(reg);
+        ipcHandle.send(-1, fn, null);
+    }
+
+    @Override
+    public void unregisterNode(String nodeId) throws Exception {
+        CCNCFunctions.UnregisterNodeFunction fn = new CCNCFunctions.UnregisterNodeFunction(nodeId);
+        ipcHandle.send(-1, fn, null);
+    }
+
+    @Override
+    public void notifyTaskComplete(JobId jobId, TaskAttemptId taskId, String nodeId, TaskProfile statistics)
+            throws Exception {
+        CCNCFunctions.NotifyTaskCompleteFunction fn = new CCNCFunctions.NotifyTaskCompleteFunction(jobId, taskId,
+                nodeId, statistics);
+        ipcHandle.send(-1, fn, null);
+    }
+
+    @Override
+    public void notifyTaskFailure(JobId jobId, TaskAttemptId taskId, String nodeId, String details) throws Exception {
+        CCNCFunctions.NotifyTaskFailureFunction fn = new CCNCFunctions.NotifyTaskFailureFunction(jobId, taskId, nodeId,
+                details);
+        ipcHandle.send(-1, fn, null);
+    }
+
+    @Override
+    public void notifyJobletCleanup(JobId jobId, String nodeId) throws Exception {
+        CCNCFunctions.NotifyJobletCleanupFunction fn = new CCNCFunctions.NotifyJobletCleanupFunction(jobId, nodeId);
+        ipcHandle.send(-1, fn, null);
+    }
+
+    @Override
+    public void nodeHeartbeat(String id, HeartbeatData hbData) throws Exception {
+        CCNCFunctions.NodeHeartbeatFunction fn = new CCNCFunctions.NodeHeartbeatFunction(id, hbData);
+        ipcHandle.send(-1, fn, null);
+    }
+
+    @Override
+    public void reportProfile(String id, List<JobProfile> profiles) throws Exception {
+        CCNCFunctions.ReportProfileFunction fn = new CCNCFunctions.ReportProfileFunction(id, profiles);
+        ipcHandle.send(-1, fn, null);
+    }
+
+    @Override
+    public void registerPartitionProvider(PartitionDescriptor partitionDescriptor) throws Exception {
+        CCNCFunctions.RegisterPartitionProviderFunction fn = new CCNCFunctions.RegisterPartitionProviderFunction(
+                partitionDescriptor);
+        ipcHandle.send(-1, fn, null);
+    }
+
+    @Override
+    public void registerPartitionRequest(PartitionRequest partitionRequest) throws Exception {
+        CCNCFunctions.RegisterPartitionRequestFunction fn = new CCNCFunctions.RegisterPartitionRequestFunction(
+                partitionRequest);
+        ipcHandle.send(-1, fn, null);
+    }
+
+    @Override
+    public void notifyApplicationStateChange(String nodeId, String appName, ApplicationStatus status) throws Exception {
+        CCNCFunctions.ApplicationStateChangeResponseFunction fn = new CCNCFunctions.ApplicationStateChangeResponseFunction(
+                nodeId, appName, status);
+        ipcHandle.send(-1, fn, null);
+    }
+
+    @Override
+    public void sendApplicationMessageToCC(byte[] data, String appName, String nodeId) throws Exception {
+        CCNCFunctions.SendApplicationMessageFunction fn = new CCNCFunctions.SendApplicationMessageFunction(data,
+                appName, nodeId);
+        ipcHandle.send(-1, fn, null);
+    }
+
+    @Override
+    public void getNodeControllerInfos() throws Exception {
+        ipcHandle.send(-1, new CCNCFunctions.GetNodeControllersInfoFunction(), null);
+    }
+}
\ No newline at end of file
diff --git a/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/ipc/NodeControllerRemoteProxy.java b/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/ipc/NodeControllerRemoteProxy.java
similarity index 100%
rename from hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/ipc/NodeControllerRemoteProxy.java
rename to hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/ipc/NodeControllerRemoteProxy.java
diff --git a/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/job/PartitionDescriptor.java b/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/job/PartitionDescriptor.java
similarity index 100%
rename from hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/job/PartitionDescriptor.java
rename to hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/job/PartitionDescriptor.java
diff --git a/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/job/PartitionRequest.java b/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/job/PartitionRequest.java
similarity index 100%
rename from hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/job/PartitionRequest.java
rename to hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/job/PartitionRequest.java
diff --git a/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/job/PartitionState.java b/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/job/PartitionState.java
similarity index 100%
rename from hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/job/PartitionState.java
rename to hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/job/PartitionState.java
diff --git a/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/job/TaskAttemptDescriptor.java b/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/job/TaskAttemptDescriptor.java
similarity index 100%
rename from hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/job/TaskAttemptDescriptor.java
rename to hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/job/TaskAttemptDescriptor.java
diff --git a/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/job/profiling/counters/Counter.java b/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/job/profiling/counters/Counter.java
similarity index 100%
rename from hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/job/profiling/counters/Counter.java
rename to hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/job/profiling/counters/Counter.java
diff --git a/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/job/profiling/counters/MultiResolutionEventProfiler.java b/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/job/profiling/counters/MultiResolutionEventProfiler.java
similarity index 100%
rename from hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/job/profiling/counters/MultiResolutionEventProfiler.java
rename to hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/job/profiling/counters/MultiResolutionEventProfiler.java
diff --git a/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/job/profiling/om/AbstractProfile.java b/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/job/profiling/om/AbstractProfile.java
similarity index 100%
rename from hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/job/profiling/om/AbstractProfile.java
rename to hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/job/profiling/om/AbstractProfile.java
diff --git a/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/job/profiling/om/JobProfile.java b/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/job/profiling/om/JobProfile.java
similarity index 100%
rename from hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/job/profiling/om/JobProfile.java
rename to hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/job/profiling/om/JobProfile.java
diff --git a/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/job/profiling/om/JobletProfile.java b/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/job/profiling/om/JobletProfile.java
similarity index 100%
rename from hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/job/profiling/om/JobletProfile.java
rename to hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/job/profiling/om/JobletProfile.java
diff --git a/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/job/profiling/om/PartitionProfile.java b/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/job/profiling/om/PartitionProfile.java
similarity index 100%
rename from hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/job/profiling/om/PartitionProfile.java
rename to hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/job/profiling/om/PartitionProfile.java
diff --git a/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/job/profiling/om/TaskProfile.java b/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/job/profiling/om/TaskProfile.java
similarity index 100%
rename from hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/job/profiling/om/TaskProfile.java
rename to hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/job/profiling/om/TaskProfile.java
diff --git a/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/logs/LogFile.java b/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/logs/LogFile.java
similarity index 100%
rename from hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/logs/LogFile.java
rename to hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/logs/LogFile.java
diff --git a/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/service/AbstractService.java b/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/service/AbstractService.java
similarity index 100%
rename from hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/service/AbstractService.java
rename to hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/service/AbstractService.java
diff --git a/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/service/IService.java b/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/service/IService.java
similarity index 100%
rename from hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/service/IService.java
rename to hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/service/IService.java
diff --git a/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/work/AbstractWork.java b/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/work/AbstractWork.java
similarity index 100%
rename from hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/work/AbstractWork.java
rename to hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/work/AbstractWork.java
diff --git a/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/work/FutureValue.java b/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/work/FutureValue.java
similarity index 100%
rename from hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/work/FutureValue.java
rename to hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/work/FutureValue.java
diff --git a/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/work/IPCResponder.java b/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/work/IPCResponder.java
similarity index 100%
rename from hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/work/IPCResponder.java
rename to hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/work/IPCResponder.java
diff --git a/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/work/IResultCallback.java b/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/work/IResultCallback.java
similarity index 100%
rename from hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/work/IResultCallback.java
rename to hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/work/IResultCallback.java
diff --git a/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/work/SynchronizableWork.java b/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/work/SynchronizableWork.java
similarity index 100%
rename from hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/work/SynchronizableWork.java
rename to hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/work/SynchronizableWork.java
diff --git a/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/work/WorkQueue.java b/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/work/WorkQueue.java
similarity index 100%
rename from hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/work/WorkQueue.java
rename to hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/work/WorkQueue.java
diff --git a/hyracks/hyracks-control/hyracks-control-nc/pom.xml b/hyracks/hyracks-control/hyracks-control-nc/pom.xml
new file mode 100644
index 0000000..c662a75
--- /dev/null
+++ b/hyracks/hyracks-control/hyracks-control-nc/pom.xml
@@ -0,0 +1,53 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <artifactId>hyracks-control-nc</artifactId>
+  <name>hyracks-control-nc</name>
+  <parent>
+    <groupId>edu.uci.ics.hyracks</groupId>
+    <artifactId>hyracks-control</artifactId>
+    <version>0.2.3-SNAPSHOT</version>
+  </parent>
+
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-compiler-plugin</artifactId>
+        <version>2.0.2</version>
+        <configuration>
+          <source>1.7</source>
+          <target>1.7</target>
+        </configuration>
+      </plugin>
+    </plugins>
+  </build>
+  <dependencies>
+  	<dependency>
+  		<groupId>edu.uci.ics.dcache</groupId>
+  		<artifactId>dcache-client</artifactId>
+  		<version>0.0.1</version>
+  		<scope>compile</scope>
+  	</dependency>
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>hyracks-control-common</artifactId>
+  		<version>0.2.3-SNAPSHOT</version>
+  		<type>jar</type>
+  		<scope>compile</scope>
+  	</dependency>
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>hyracks-net</artifactId>
+  		<version>0.2.3-SNAPSHOT</version>
+  	</dependency>
+  </dependencies>
+  <reporting>
+    <plugins>
+      <plugin>
+        <groupId>org.codehaus.mojo</groupId>
+        <artifactId>findbugs-maven-plugin</artifactId>
+        <version>2.0.1</version>
+      </plugin>
+    </plugins>
+  </reporting>
+</project>
diff --git a/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/Joblet.java b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/Joblet.java
similarity index 100%
rename from hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/Joblet.java
rename to hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/Joblet.java
diff --git a/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/NCDriver.java b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/NCDriver.java
similarity index 100%
rename from hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/NCDriver.java
rename to hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/NCDriver.java
diff --git a/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/NodeControllerService.java b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/NodeControllerService.java
new file mode 100644
index 0000000..0195143
--- /dev/null
+++ b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/NodeControllerService.java
@@ -0,0 +1,465 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.control.nc;
+
+import java.io.File;
+import java.lang.management.GarbageCollectorMXBean;
+import java.lang.management.ManagementFactory;
+import java.lang.management.MemoryMXBean;
+import java.lang.management.MemoryUsage;
+import java.lang.management.OperatingSystemMXBean;
+import java.lang.management.RuntimeMXBean;
+import java.lang.management.ThreadMXBean;
+import java.net.InetAddress;
+import java.net.InetSocketAddress;
+import java.text.MessageFormat;
+import java.util.ArrayList;
+import java.util.Hashtable;
+import java.util.List;
+import java.util.Map;
+import java.util.StringTokenizer;
+import java.util.Timer;
+import java.util.TimerTask;
+import java.util.concurrent.Executor;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.apache.commons.lang3.mutable.Mutable;
+import org.apache.commons.lang3.mutable.MutableObject;
+
+import edu.uci.ics.hyracks.api.client.NodeControllerInfo;
+import edu.uci.ics.hyracks.api.context.IHyracksRootContext;
+import edu.uci.ics.hyracks.api.io.IODeviceHandle;
+import edu.uci.ics.hyracks.api.job.JobId;
+import edu.uci.ics.hyracks.control.common.AbstractRemoteService;
+import edu.uci.ics.hyracks.control.common.base.IClusterController;
+import edu.uci.ics.hyracks.control.common.context.ServerContext;
+import edu.uci.ics.hyracks.control.common.controllers.NCConfig;
+import edu.uci.ics.hyracks.control.common.controllers.NodeParameters;
+import edu.uci.ics.hyracks.control.common.controllers.NodeRegistration;
+import edu.uci.ics.hyracks.control.common.heartbeat.HeartbeatData;
+import edu.uci.ics.hyracks.control.common.heartbeat.HeartbeatSchema;
+import edu.uci.ics.hyracks.control.common.ipc.CCNCFunctions;
+import edu.uci.ics.hyracks.control.common.ipc.ClusterControllerRemoteProxy;
+import edu.uci.ics.hyracks.control.common.job.profiling.om.JobProfile;
+import edu.uci.ics.hyracks.control.common.work.FutureValue;
+import edu.uci.ics.hyracks.control.common.work.WorkQueue;
+import edu.uci.ics.hyracks.control.nc.application.NCApplicationContext;
+import edu.uci.ics.hyracks.control.nc.io.IOManager;
+import edu.uci.ics.hyracks.control.nc.net.NetworkManager;
+import edu.uci.ics.hyracks.control.nc.partitions.PartitionManager;
+import edu.uci.ics.hyracks.control.nc.runtime.RootHyracksContext;
+import edu.uci.ics.hyracks.control.nc.work.AbortTasksWork;
+import edu.uci.ics.hyracks.control.nc.work.ApplicationMessageWork;
+import edu.uci.ics.hyracks.control.nc.work.BuildJobProfilesWork;
+import edu.uci.ics.hyracks.control.nc.work.CleanupJobletWork;
+import edu.uci.ics.hyracks.control.nc.work.CreateApplicationWork;
+import edu.uci.ics.hyracks.control.nc.work.DestroyApplicationWork;
+import edu.uci.ics.hyracks.control.nc.work.ReportPartitionAvailabilityWork;
+import edu.uci.ics.hyracks.control.nc.work.StartTasksWork;
+import edu.uci.ics.hyracks.ipc.api.IIPCHandle;
+import edu.uci.ics.hyracks.ipc.api.IIPCI;
+import edu.uci.ics.hyracks.ipc.api.IPCPerformanceCounters;
+import edu.uci.ics.hyracks.ipc.impl.IPCSystem;
+import edu.uci.ics.hyracks.net.protocols.muxdemux.MuxDemuxPerformanceCounters;
+
+public class NodeControllerService extends AbstractRemoteService {
+    private static Logger LOGGER = Logger.getLogger(NodeControllerService.class.getName());
+
+    private NCConfig ncConfig;
+
+    private final String id;
+
+    private final IHyracksRootContext ctx;
+
+    private final IPCSystem ipc;
+
+    private final PartitionManager partitionManager;
+
+    private final NetworkManager netManager;
+
+    private final WorkQueue queue;
+
+    private final Timer timer;
+
+    private boolean registrationPending;
+
+    private Exception registrationException;
+
+    private IClusterController ccs;
+
+    private final Map<JobId, Joblet> jobletMap;
+
+    private final ExecutorService executor;
+
+    private NodeParameters nodeParameters;
+
+    private HeartbeatTask heartbeatTask;
+
+    private final ServerContext serverCtx;
+
+    private final Map<String, NCApplicationContext> applications;
+
+    private final MemoryMXBean memoryMXBean;
+
+    private final List<GarbageCollectorMXBean> gcMXBeans;
+
+    private final ThreadMXBean threadMXBean;
+
+    private final RuntimeMXBean runtimeMXBean;
+
+    private final OperatingSystemMXBean osMXBean;
+
+    private final Mutable<FutureValue<Map<String, NodeControllerInfo>>> getNodeControllerInfosAcceptor;
+
+    public NodeControllerService(NCConfig ncConfig) throws Exception {
+        this.ncConfig = ncConfig;
+        id = ncConfig.nodeId;
+        executor = Executors.newCachedThreadPool();
+        NodeControllerIPCI ipci = new NodeControllerIPCI();
+        ipc = new IPCSystem(new InetSocketAddress(ncConfig.clusterNetIPAddress, 0), ipci,
+                new CCNCFunctions.SerializerDeserializer());
+        this.ctx = new RootHyracksContext(this, new IOManager(getDevices(ncConfig.ioDevices), executor));
+        if (id == null) {
+            throw new Exception("id not set");
+        }
+        partitionManager = new PartitionManager(this);
+        netManager = new NetworkManager(getIpAddress(ncConfig), partitionManager, ncConfig.nNetThreads);
+
+        queue = new WorkQueue();
+        jobletMap = new Hashtable<JobId, Joblet>();
+        timer = new Timer(true);
+        serverCtx = new ServerContext(ServerContext.ServerType.NODE_CONTROLLER, new File(new File(
+                NodeControllerService.class.getName()), id));
+        applications = new Hashtable<String, NCApplicationContext>();
+        memoryMXBean = ManagementFactory.getMemoryMXBean();
+        gcMXBeans = ManagementFactory.getGarbageCollectorMXBeans();
+        threadMXBean = ManagementFactory.getThreadMXBean();
+        runtimeMXBean = ManagementFactory.getRuntimeMXBean();
+        osMXBean = ManagementFactory.getOperatingSystemMXBean();
+        registrationPending = true;
+        getNodeControllerInfosAcceptor = new MutableObject<FutureValue<Map<String, NodeControllerInfo>>>();
+    }
+
+    public IHyracksRootContext getRootContext() {
+        return ctx;
+    }
+
+    private static List<IODeviceHandle> getDevices(String ioDevices) {
+        List<IODeviceHandle> devices = new ArrayList<IODeviceHandle>();
+        StringTokenizer tok = new StringTokenizer(ioDevices, ",");
+        while (tok.hasMoreElements()) {
+            String devPath = tok.nextToken().trim();
+            devices.add(new IODeviceHandle(new File(devPath), "."));
+        }
+        return devices;
+    }
+
+    private synchronized void setNodeRegistrationResult(NodeParameters parameters, Exception exception) {
+        this.nodeParameters = parameters;
+        this.registrationException = exception;
+        this.registrationPending = false;
+        notifyAll();
+    }
+
+    public Map<String, NodeControllerInfo> getNodeControllersInfo() throws Exception {
+        FutureValue<Map<String, NodeControllerInfo>> fv = new FutureValue<Map<String, NodeControllerInfo>>();
+        synchronized (getNodeControllerInfosAcceptor) {
+            while (getNodeControllerInfosAcceptor.getValue() != null) {
+                getNodeControllerInfosAcceptor.wait();
+            }
+            getNodeControllerInfosAcceptor.setValue(fv);
+        }
+        ccs.getNodeControllerInfos();
+        return fv.get();
+    }
+
+    private void setNodeControllersInfo(Map<String, NodeControllerInfo> ncInfos) {
+        FutureValue<Map<String, NodeControllerInfo>> fv;
+        synchronized (getNodeControllerInfosAcceptor) {
+            fv = getNodeControllerInfosAcceptor.getValue();
+            getNodeControllerInfosAcceptor.setValue(null);
+            getNodeControllerInfosAcceptor.notifyAll();
+        }
+        fv.setValue(ncInfos);
+    }
+
+    @Override
+    public void start() throws Exception {
+        LOGGER.log(Level.INFO, "Starting NodeControllerService");
+        ipc.start();
+        netManager.start();
+        IIPCHandle ccIPCHandle = ipc.getHandle(new InetSocketAddress(ncConfig.ccHost, ncConfig.ccPort));
+        this.ccs = new ClusterControllerRemoteProxy(ccIPCHandle);
+        HeartbeatSchema.GarbageCollectorInfo[] gcInfos = new HeartbeatSchema.GarbageCollectorInfo[gcMXBeans.size()];
+        for (int i = 0; i < gcInfos.length; ++i) {
+            gcInfos[i] = new HeartbeatSchema.GarbageCollectorInfo(gcMXBeans.get(i).getName());
+        }
+        HeartbeatSchema hbSchema = new HeartbeatSchema(gcInfos);
+        ccs.registerNode(new NodeRegistration(ipc.getSocketAddress(), id, ncConfig, netManager.getNetworkAddress(),
+                osMXBean.getName(), osMXBean.getArch(), osMXBean.getVersion(), osMXBean.getAvailableProcessors(),
+                runtimeMXBean.getVmName(), runtimeMXBean.getVmVersion(), runtimeMXBean.getVmVendor(), runtimeMXBean
+                        .getClassPath(), runtimeMXBean.getLibraryPath(), runtimeMXBean.getBootClassPath(),
+                runtimeMXBean.getInputArguments(), runtimeMXBean.getSystemProperties(), hbSchema));
+
+        synchronized (this) {
+            while (registrationPending) {
+                wait();
+            }
+        }
+        if (registrationException != null) {
+            throw registrationException;
+        }
+
+        queue.start();
+
+        heartbeatTask = new HeartbeatTask(ccs);
+
+        // Schedule heartbeat generator.
+        timer.schedule(heartbeatTask, 0, nodeParameters.getHeartbeatPeriod());
+
+        if (nodeParameters.getProfileDumpPeriod() > 0) {
+            // Schedule profile dump generator.
+            timer.schedule(new ProfileDumpTask(ccs), 0, nodeParameters.getProfileDumpPeriod());
+        }
+
+        LOGGER.log(Level.INFO, "Started NodeControllerService");
+    }
+
+    @Override
+    public void stop() throws Exception {
+        LOGGER.log(Level.INFO, "Stopping NodeControllerService");
+        executor.shutdownNow();
+        partitionManager.close();
+        heartbeatTask.cancel();
+        netManager.stop();
+        queue.stop();
+        LOGGER.log(Level.INFO, "Stopped NodeControllerService");
+    }
+
+    public String getId() {
+        return id;
+    }
+
+    public ServerContext getServerContext() {
+        return serverCtx;
+    }
+
+    public Map<String, NCApplicationContext> getApplications() {
+        return applications;
+    }
+
+    public Map<JobId, Joblet> getJobletMap() {
+        return jobletMap;
+    }
+
+    public NetworkManager getNetworkManager() {
+        return netManager;
+    }
+
+    public PartitionManager getPartitionManager() {
+        return partitionManager;
+    }
+
+    public IClusterController getClusterController() {
+        return ccs;
+    }
+
+    public NodeParameters getNodeParameters() {
+        return nodeParameters;
+    }
+
+    public Executor getExecutor() {
+        return executor;
+    }
+
+    public NCConfig getConfiguration() {
+        return ncConfig;
+    }
+
+    public WorkQueue getWorkQueue() {
+        return queue;
+    }
+
+    private static InetAddress getIpAddress(NCConfig ncConfig) throws Exception {
+        String ipaddrStr = ncConfig.dataIPAddress;
+        ipaddrStr = ipaddrStr.trim();
+        Pattern pattern = Pattern.compile("(\\d{1,3})\\.(\\d{1,3})\\.(\\d{1,3})\\.(\\d{1,3})");
+        Matcher m = pattern.matcher(ipaddrStr);
+        if (!m.matches()) {
+            throw new Exception(MessageFormat.format(
+                    "Connection Manager IP Address String %s does is not a valid IP Address.", ipaddrStr));
+        }
+        byte[] ipBytes = new byte[4];
+        ipBytes[0] = (byte) Integer.parseInt(m.group(1));
+        ipBytes[1] = (byte) Integer.parseInt(m.group(2));
+        ipBytes[2] = (byte) Integer.parseInt(m.group(3));
+        ipBytes[3] = (byte) Integer.parseInt(m.group(4));
+        return InetAddress.getByAddress(ipBytes);
+    }
+
+    private class HeartbeatTask extends TimerTask {
+        private IClusterController cc;
+
+        private final HeartbeatData hbData;
+
+        public HeartbeatTask(IClusterController cc) {
+            this.cc = cc;
+            hbData = new HeartbeatData();
+            hbData.gcCollectionCounts = new long[gcMXBeans.size()];
+            hbData.gcCollectionTimes = new long[gcMXBeans.size()];
+        }
+
+        @Override
+        public void run() {
+            MemoryUsage heapUsage = memoryMXBean.getHeapMemoryUsage();
+            hbData.heapInitSize = heapUsage.getInit();
+            hbData.heapUsedSize = heapUsage.getUsed();
+            hbData.heapCommittedSize = heapUsage.getCommitted();
+            hbData.heapMaxSize = heapUsage.getMax();
+            MemoryUsage nonheapUsage = memoryMXBean.getNonHeapMemoryUsage();
+            hbData.nonheapInitSize = nonheapUsage.getInit();
+            hbData.nonheapUsedSize = nonheapUsage.getUsed();
+            hbData.nonheapCommittedSize = nonheapUsage.getCommitted();
+            hbData.nonheapMaxSize = nonheapUsage.getMax();
+            hbData.threadCount = threadMXBean.getThreadCount();
+            hbData.peakThreadCount = threadMXBean.getPeakThreadCount();
+            hbData.totalStartedThreadCount = threadMXBean.getTotalStartedThreadCount();
+            hbData.systemLoadAverage = osMXBean.getSystemLoadAverage();
+            int gcN = gcMXBeans.size();
+            for (int i = 0; i < gcN; ++i) {
+                GarbageCollectorMXBean gcMXBean = gcMXBeans.get(i);
+                hbData.gcCollectionCounts[i] = gcMXBean.getCollectionCount();
+                hbData.gcCollectionTimes[i] = gcMXBean.getCollectionTime();
+            }
+
+            MuxDemuxPerformanceCounters netPC = netManager.getPerformanceCounters();
+            hbData.netPayloadBytesRead = netPC.getPayloadBytesRead();
+            hbData.netPayloadBytesWritten = netPC.getPayloadBytesWritten();
+            hbData.netSignalingBytesRead = netPC.getSignalingBytesRead();
+            hbData.netSignalingBytesWritten = netPC.getSignalingBytesWritten();
+
+            IPCPerformanceCounters ipcPC = ipc.getPerformanceCounters();
+            hbData.ipcMessagesSent = ipcPC.getMessageSentCount();
+            hbData.ipcMessageBytesSent = ipcPC.getMessageBytesSent();
+            hbData.ipcMessagesReceived = ipcPC.getMessageReceivedCount();
+            hbData.ipcMessageBytesReceived = ipcPC.getMessageBytesReceived();
+
+            try {
+                cc.nodeHeartbeat(id, hbData);
+            } catch (Exception e) {
+                e.printStackTrace();
+            }
+        }
+    }
+
+    private class ProfileDumpTask extends TimerTask {
+        private IClusterController cc;
+
+        public ProfileDumpTask(IClusterController cc) {
+            this.cc = cc;
+        }
+
+        @Override
+        public void run() {
+            try {
+                FutureValue<List<JobProfile>> fv = new FutureValue<List<JobProfile>>();
+                BuildJobProfilesWork bjpw = new BuildJobProfilesWork(NodeControllerService.this, fv);
+                queue.scheduleAndSync(bjpw);
+                List<JobProfile> profiles = fv.get();
+                if (!profiles.isEmpty()) {
+                    cc.reportProfile(id, profiles);
+                }
+            } catch (Exception e) {
+                e.printStackTrace();
+            }
+        }
+    }
+
+    private final class NodeControllerIPCI implements IIPCI {
+        @Override
+        public void deliverIncomingMessage(IIPCHandle handle, long mid, long rmid, Object payload, Exception exception) {
+            CCNCFunctions.Function fn = (CCNCFunctions.Function) payload;
+            switch (fn.getFunctionId()) {
+                case SEND_APPLICATION_MESSAGE: {
+                    CCNCFunctions.SendApplicationMessageFunction amf = (CCNCFunctions.SendApplicationMessageFunction) fn;
+                    queue.schedule(new ApplicationMessageWork(NodeControllerService.this, amf.getMessage(), amf
+                            .getAppName(), amf.getNodeId()));
+                    return;
+                }
+                case START_TASKS: {
+                    CCNCFunctions.StartTasksFunction stf = (CCNCFunctions.StartTasksFunction) fn;
+                    queue.schedule(new StartTasksWork(NodeControllerService.this, stf.getAppName(), stf.getJobId(), stf
+                            .getPlanBytes(), stf.getTaskDescriptors(), stf.getConnectorPolicies(), stf.getFlags()));
+                    return;
+                }
+
+                case ABORT_TASKS: {
+                    CCNCFunctions.AbortTasksFunction atf = (CCNCFunctions.AbortTasksFunction) fn;
+                    queue.schedule(new AbortTasksWork(NodeControllerService.this, atf.getJobId(), atf.getTasks()));
+                    return;
+                }
+
+                case CLEANUP_JOBLET: {
+                    CCNCFunctions.CleanupJobletFunction cjf = (CCNCFunctions.CleanupJobletFunction) fn;
+                    queue.schedule(new CleanupJobletWork(NodeControllerService.this, cjf.getJobId(), cjf.getStatus()));
+                    return;
+                }
+
+                case CREATE_APPLICATION: {
+                    CCNCFunctions.CreateApplicationFunction caf = (CCNCFunctions.CreateApplicationFunction) fn;
+                    queue.schedule(new CreateApplicationWork(NodeControllerService.this, caf.getAppName(), caf
+                            .isDeployHar(), caf.getSerializedDistributedState()));
+                    return;
+                }
+
+                case DESTROY_APPLICATION: {
+                    CCNCFunctions.DestroyApplicationFunction daf = (CCNCFunctions.DestroyApplicationFunction) fn;
+                    queue.schedule(new DestroyApplicationWork(NodeControllerService.this, daf.getAppName()));
+                    return;
+                }
+
+                case REPORT_PARTITION_AVAILABILITY: {
+                    CCNCFunctions.ReportPartitionAvailabilityFunction rpaf = (CCNCFunctions.ReportPartitionAvailabilityFunction) fn;
+                    queue.schedule(new ReportPartitionAvailabilityWork(NodeControllerService.this, rpaf
+                            .getPartitionId(), rpaf.getNetworkAddress()));
+                    return;
+                }
+
+                case NODE_REGISTRATION_RESULT: {
+                    CCNCFunctions.NodeRegistrationResult nrrf = (CCNCFunctions.NodeRegistrationResult) fn;
+                    setNodeRegistrationResult(nrrf.getNodeParameters(), nrrf.getException());
+                    return;
+                }
+
+                case GET_NODE_CONTROLLERS_INFO_RESPONSE: {
+                    CCNCFunctions.GetNodeControllersInfoResponseFunction gncirf = (CCNCFunctions.GetNodeControllersInfoResponseFunction) fn;
+                    setNodeControllersInfo(gncirf.getNodeControllerInfos());
+                    return;
+                }
+            }
+            throw new IllegalArgumentException("Unknown function: " + fn.getFunctionId());
+
+        }
+    }
+
+    public void sendApplicationMessageToCC(byte[] data, String appName, String nodeId) throws Exception {
+        ccs.sendApplicationMessageToCC(data, appName, nodeId);
+    }
+}
\ No newline at end of file
diff --git a/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/Task.java b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/Task.java
similarity index 100%
rename from hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/Task.java
rename to hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/Task.java
diff --git a/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/application/NCApplicationContext.java b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/application/NCApplicationContext.java
similarity index 100%
rename from hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/application/NCApplicationContext.java
rename to hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/application/NCApplicationContext.java
diff --git a/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/io/FileHandle.java b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/io/FileHandle.java
similarity index 100%
rename from hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/io/FileHandle.java
rename to hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/io/FileHandle.java
diff --git a/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/io/IOManager.java b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/io/IOManager.java
similarity index 100%
rename from hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/io/IOManager.java
rename to hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/io/IOManager.java
diff --git a/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/io/WorkspaceFileFactory.java b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/io/WorkspaceFileFactory.java
similarity index 100%
rename from hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/io/WorkspaceFileFactory.java
rename to hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/io/WorkspaceFileFactory.java
diff --git a/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/net/NetworkInputChannel.java b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/net/NetworkInputChannel.java
similarity index 100%
rename from hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/net/NetworkInputChannel.java
rename to hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/net/NetworkInputChannel.java
diff --git a/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/net/NetworkManager.java b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/net/NetworkManager.java
similarity index 100%
rename from hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/net/NetworkManager.java
rename to hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/net/NetworkManager.java
diff --git a/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/net/NetworkOutputChannel.java b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/net/NetworkOutputChannel.java
similarity index 100%
rename from hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/net/NetworkOutputChannel.java
rename to hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/net/NetworkOutputChannel.java
diff --git a/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/partitions/MaterializedPartition.java b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/partitions/MaterializedPartition.java
similarity index 100%
rename from hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/partitions/MaterializedPartition.java
rename to hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/partitions/MaterializedPartition.java
diff --git a/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/partitions/MaterializedPartitionInputChannel.java b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/partitions/MaterializedPartitionInputChannel.java
similarity index 100%
rename from hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/partitions/MaterializedPartitionInputChannel.java
rename to hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/partitions/MaterializedPartitionInputChannel.java
diff --git a/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/partitions/MaterializedPartitionWriter.java b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/partitions/MaterializedPartitionWriter.java
similarity index 100%
rename from hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/partitions/MaterializedPartitionWriter.java
rename to hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/partitions/MaterializedPartitionWriter.java
diff --git a/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/partitions/MaterializingPipelinedPartition.java b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/partitions/MaterializingPipelinedPartition.java
similarity index 100%
rename from hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/partitions/MaterializingPipelinedPartition.java
rename to hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/partitions/MaterializingPipelinedPartition.java
diff --git a/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/partitions/PartitionManager.java b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/partitions/PartitionManager.java
similarity index 100%
rename from hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/partitions/PartitionManager.java
rename to hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/partitions/PartitionManager.java
diff --git a/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/partitions/PipelinedPartition.java b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/partitions/PipelinedPartition.java
similarity index 100%
rename from hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/partitions/PipelinedPartition.java
rename to hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/partitions/PipelinedPartition.java
diff --git a/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/partitions/ReceiveSideMaterializingCollector.java b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/partitions/ReceiveSideMaterializingCollector.java
similarity index 100%
rename from hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/partitions/ReceiveSideMaterializingCollector.java
rename to hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/partitions/ReceiveSideMaterializingCollector.java
diff --git a/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/profiling/ConnectorReceiverProfilingFrameReader.java b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/profiling/ConnectorReceiverProfilingFrameReader.java
similarity index 100%
rename from hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/profiling/ConnectorReceiverProfilingFrameReader.java
rename to hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/profiling/ConnectorReceiverProfilingFrameReader.java
diff --git a/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/profiling/ConnectorSenderProfilingFrameWriter.java b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/profiling/ConnectorSenderProfilingFrameWriter.java
similarity index 100%
rename from hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/profiling/ConnectorSenderProfilingFrameWriter.java
rename to hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/profiling/ConnectorSenderProfilingFrameWriter.java
diff --git a/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/profiling/ProfilingPartitionWriterFactory.java b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/profiling/ProfilingPartitionWriterFactory.java
similarity index 100%
rename from hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/profiling/ProfilingPartitionWriterFactory.java
rename to hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/profiling/ProfilingPartitionWriterFactory.java
diff --git a/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/resources/DefaultDeallocatableRegistry.java b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/resources/DefaultDeallocatableRegistry.java
similarity index 100%
rename from hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/resources/DefaultDeallocatableRegistry.java
rename to hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/resources/DefaultDeallocatableRegistry.java
diff --git a/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/runtime/RootHyracksContext.java b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/runtime/RootHyracksContext.java
new file mode 100644
index 0000000..4651149
--- /dev/null
+++ b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/runtime/RootHyracksContext.java
@@ -0,0 +1,43 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.control.nc.runtime;
+
+import java.util.Map;
+
+import edu.uci.ics.hyracks.api.client.NodeControllerInfo;
+import edu.uci.ics.hyracks.api.context.IHyracksRootContext;
+import edu.uci.ics.hyracks.api.io.IIOManager;
+import edu.uci.ics.hyracks.control.nc.NodeControllerService;
+
+public class RootHyracksContext implements IHyracksRootContext {
+    private final NodeControllerService ncs;
+
+    private final IIOManager ioManager;
+
+    public RootHyracksContext(NodeControllerService ncs, IIOManager ioManager) {
+        this.ncs = ncs;
+        this.ioManager = ioManager;
+    }
+
+    @Override
+    public IIOManager getIOManager() {
+        return ioManager;
+    }
+
+    @Override
+    public Map<String, NodeControllerInfo> getNodeControllerInfos() throws Exception {
+        return ncs.getNodeControllersInfo();
+    }
+}
\ No newline at end of file
diff --git a/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/work/AbortTasksWork.java b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/work/AbortTasksWork.java
similarity index 100%
rename from hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/work/AbortTasksWork.java
rename to hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/work/AbortTasksWork.java
diff --git a/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/work/ApplicationMessageWork.java b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/work/ApplicationMessageWork.java
similarity index 100%
rename from hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/work/ApplicationMessageWork.java
rename to hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/work/ApplicationMessageWork.java
diff --git a/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/work/BuildJobProfilesWork.java b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/work/BuildJobProfilesWork.java
similarity index 100%
rename from hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/work/BuildJobProfilesWork.java
rename to hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/work/BuildJobProfilesWork.java
diff --git a/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/work/CleanupJobletWork.java b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/work/CleanupJobletWork.java
similarity index 100%
rename from hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/work/CleanupJobletWork.java
rename to hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/work/CleanupJobletWork.java
diff --git a/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/work/CreateApplicationWork.java b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/work/CreateApplicationWork.java
similarity index 100%
rename from hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/work/CreateApplicationWork.java
rename to hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/work/CreateApplicationWork.java
diff --git a/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/work/DestroyApplicationWork.java b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/work/DestroyApplicationWork.java
similarity index 100%
rename from hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/work/DestroyApplicationWork.java
rename to hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/work/DestroyApplicationWork.java
diff --git a/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/work/NotifyTaskCompleteWork.java b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/work/NotifyTaskCompleteWork.java
similarity index 100%
rename from hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/work/NotifyTaskCompleteWork.java
rename to hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/work/NotifyTaskCompleteWork.java
diff --git a/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/work/NotifyTaskFailureWork.java b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/work/NotifyTaskFailureWork.java
similarity index 100%
rename from hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/work/NotifyTaskFailureWork.java
rename to hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/work/NotifyTaskFailureWork.java
diff --git a/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/work/ReportPartitionAvailabilityWork.java b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/work/ReportPartitionAvailabilityWork.java
similarity index 100%
rename from hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/work/ReportPartitionAvailabilityWork.java
rename to hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/work/ReportPartitionAvailabilityWork.java
diff --git a/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/work/StartTasksWork.java b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/work/StartTasksWork.java
similarity index 100%
rename from hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/work/StartTasksWork.java
rename to hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/work/StartTasksWork.java
diff --git a/hyracks/hyracks-control/pom.xml b/hyracks/hyracks-control/pom.xml
new file mode 100644
index 0000000..13ebf41
--- /dev/null
+++ b/hyracks/hyracks-control/pom.xml
@@ -0,0 +1,18 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <artifactId>hyracks-control</artifactId>
+  <packaging>pom</packaging>
+  <name>hyracks-control</name>
+
+  <parent>
+    <groupId>edu.uci.ics.hyracks</groupId>
+    <artifactId>hyracks</artifactId>
+    <version>0.2.3-SNAPSHOT</version>
+  </parent>
+
+  <modules>
+    <module>hyracks-control-common</module>
+    <module>hyracks-control-cc</module>
+    <module>hyracks-control-nc</module>
+  </modules>
+</project>
diff --git a/hyracks/hyracks-data/hyracks-data-std/pom.xml b/hyracks/hyracks-data/hyracks-data-std/pom.xml
new file mode 100644
index 0000000..85c8fd5
--- /dev/null
+++ b/hyracks/hyracks-data/hyracks-data-std/pom.xml
@@ -0,0 +1,32 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <artifactId>hyracks-data-std</artifactId>
+  <name>hyracks-data-std</name>
+
+  <parent>
+    <groupId>edu.uci.ics.hyracks</groupId>
+    <artifactId>hyracks-data</artifactId>
+    <version>0.2.3-SNAPSHOT</version>
+  </parent>
+
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-compiler-plugin</artifactId>
+        <version>2.0.2</version>
+        <configuration>
+          <source>1.7</source>
+          <target>1.7</target>
+        </configuration>
+      </plugin>
+    </plugins>
+  </build>
+  <dependencies>
+  <dependency>
+  	<groupId>edu.uci.ics.hyracks</groupId>
+  	<artifactId>hyracks-api</artifactId>
+  	<version>0.2.3-SNAPSHOT</version>
+  </dependency>
+  </dependencies>
+</project>
diff --git a/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/accessors/PointableBinaryComparatorFactory.java b/hyracks/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/accessors/PointableBinaryComparatorFactory.java
similarity index 100%
rename from hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/accessors/PointableBinaryComparatorFactory.java
rename to hyracks/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/accessors/PointableBinaryComparatorFactory.java
diff --git a/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/accessors/PointableBinaryHashFunctionFactory.java b/hyracks/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/accessors/PointableBinaryHashFunctionFactory.java
similarity index 100%
rename from hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/accessors/PointableBinaryHashFunctionFactory.java
rename to hyracks/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/accessors/PointableBinaryHashFunctionFactory.java
diff --git a/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/accessors/UTF8StringBinaryHashFunctionFamily.java b/hyracks/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/accessors/UTF8StringBinaryHashFunctionFamily.java
similarity index 100%
rename from hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/accessors/UTF8StringBinaryHashFunctionFamily.java
rename to hyracks/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/accessors/UTF8StringBinaryHashFunctionFamily.java
diff --git a/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/algorithms/BinarySearchAlgorithm.java b/hyracks/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/algorithms/BinarySearchAlgorithm.java
similarity index 100%
rename from hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/algorithms/BinarySearchAlgorithm.java
rename to hyracks/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/algorithms/BinarySearchAlgorithm.java
diff --git a/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/api/AbstractPointable.java b/hyracks/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/api/AbstractPointable.java
similarity index 100%
rename from hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/api/AbstractPointable.java
rename to hyracks/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/api/AbstractPointable.java
diff --git a/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/api/IComparable.java b/hyracks/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/api/IComparable.java
similarity index 100%
rename from hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/api/IComparable.java
rename to hyracks/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/api/IComparable.java
diff --git a/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/api/IDataOutputProvider.java b/hyracks/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/api/IDataOutputProvider.java
similarity index 100%
rename from hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/api/IDataOutputProvider.java
rename to hyracks/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/api/IDataOutputProvider.java
diff --git a/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/api/IHashable.java b/hyracks/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/api/IHashable.java
similarity index 100%
rename from hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/api/IHashable.java
rename to hyracks/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/api/IHashable.java
diff --git a/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/api/IMutableValueStorage.java b/hyracks/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/api/IMutableValueStorage.java
similarity index 100%
rename from hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/api/IMutableValueStorage.java
rename to hyracks/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/api/IMutableValueStorage.java
diff --git a/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/api/INumeric.java b/hyracks/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/api/INumeric.java
similarity index 100%
rename from hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/api/INumeric.java
rename to hyracks/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/api/INumeric.java
diff --git a/hyracks/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/api/IPointable.java b/hyracks/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/api/IPointable.java
new file mode 100644
index 0000000..d3accba
--- /dev/null
+++ b/hyracks/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/api/IPointable.java
@@ -0,0 +1,21 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.data.std.api;
+
+public interface IPointable extends IValueReference {
+    public void set(byte[] bytes, int start, int length);
+
+    public void set(IValueReference pointer);
+}
\ No newline at end of file
diff --git a/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/api/IPointableFactory.java b/hyracks/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/api/IPointableFactory.java
similarity index 100%
rename from hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/api/IPointableFactory.java
rename to hyracks/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/api/IPointableFactory.java
diff --git a/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/api/IValueReference.java b/hyracks/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/api/IValueReference.java
similarity index 100%
rename from hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/api/IValueReference.java
rename to hyracks/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/api/IValueReference.java
diff --git a/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/collections/api/IValueReferenceVector.java b/hyracks/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/collections/api/IValueReferenceVector.java
similarity index 100%
rename from hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/collections/api/IValueReferenceVector.java
rename to hyracks/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/collections/api/IValueReferenceVector.java
diff --git a/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/primitive/BooleanPointable.java b/hyracks/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/primitive/BooleanPointable.java
similarity index 100%
rename from hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/primitive/BooleanPointable.java
rename to hyracks/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/primitive/BooleanPointable.java
diff --git a/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/primitive/BytePointable.java b/hyracks/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/primitive/BytePointable.java
similarity index 100%
rename from hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/primitive/BytePointable.java
rename to hyracks/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/primitive/BytePointable.java
diff --git a/hyracks/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/primitive/DoublePointable.java b/hyracks/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/primitive/DoublePointable.java
new file mode 100644
index 0000000..5267086
--- /dev/null
+++ b/hyracks/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/primitive/DoublePointable.java
@@ -0,0 +1,137 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.data.std.primitive;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.data.std.api.AbstractPointable;
+import edu.uci.ics.hyracks.data.std.api.IComparable;
+import edu.uci.ics.hyracks.data.std.api.IHashable;
+import edu.uci.ics.hyracks.data.std.api.INumeric;
+import edu.uci.ics.hyracks.data.std.api.IPointable;
+import edu.uci.ics.hyracks.data.std.api.IPointableFactory;
+
+public final class DoublePointable extends AbstractPointable implements IHashable, IComparable, INumeric {
+    public static final ITypeTraits TYPE_TRAITS = new ITypeTraits() {
+        private static final long serialVersionUID = 1L;
+
+        @Override
+        public boolean isFixedLength() {
+            return true;
+        }
+
+        @Override
+        public int getFixedLength() {
+            return 8;
+        }
+    };
+
+    public static final IPointableFactory FACTORY = new IPointableFactory() {
+        private static final long serialVersionUID = 1L;
+
+        @Override
+        public IPointable createPointable() {
+            return new DoublePointable();
+        }
+
+        @Override
+        public ITypeTraits getTypeTraits() {
+            return TYPE_TRAITS;
+        }
+    };
+
+    public static long getLongBits(byte[] bytes, int start) {
+        return LongPointable.getLong(bytes, start);
+    }
+
+    public static double getDouble(byte[] bytes, int start) {
+        long bits = getLongBits(bytes, start);
+        return Double.longBitsToDouble(bits);
+    }
+
+    public static void setDouble(byte[] bytes, int start, double value) {
+        long bits = Double.doubleToLongBits(value);
+        LongPointable.setLong(bytes, start, bits);
+    }
+
+    public double getDouble() {
+        return getDouble(bytes, start);
+    }
+
+    public void setDouble(double value) {
+        setDouble(bytes, start, value);
+    }
+
+    public double preIncrement() {
+        double v = getDouble();
+        ++v;
+        setDouble(v);
+        return v;
+    }
+
+    public double postIncrement() {
+        double v = getDouble();
+        double ov = v++;
+        setDouble(v);
+        return ov;
+    }
+
+    @Override
+    public int compareTo(IPointable pointer) {
+        return compareTo(pointer.getByteArray(), pointer.getStartOffset(), pointer.getLength());
+    }
+
+    @Override
+    public int compareTo(byte[] bytes, int start, int length) {
+        double v = getDouble();
+        double ov = getDouble(bytes, start);
+        return v < ov ? -1 : (v > ov ? 1 : 0);
+    }
+
+    @Override
+    public int hash() {
+        long bits = getLongBits(bytes, start);
+        return (int) (bits ^ (bits >>> 32));
+    }
+
+    @Override
+    public byte byteValue() {
+        return (byte) getDouble();
+    }
+
+    @Override
+    public short shortValue() {
+        return (short) getDouble();
+    }
+
+    @Override
+    public int intValue() {
+        return (int) getDouble();
+    }
+
+    @Override
+    public long longValue() {
+        return (long) getDouble();
+    }
+
+    @Override
+    public float floatValue() {
+        return (float) getDouble();
+    }
+
+    @Override
+    public double doubleValue() {
+        return getDouble();
+    }
+}
\ No newline at end of file
diff --git a/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/primitive/FloatPointable.java b/hyracks/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/primitive/FloatPointable.java
similarity index 100%
rename from hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/primitive/FloatPointable.java
rename to hyracks/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/primitive/FloatPointable.java
diff --git a/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/primitive/IntegerPointable.java b/hyracks/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/primitive/IntegerPointable.java
similarity index 100%
rename from hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/primitive/IntegerPointable.java
rename to hyracks/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/primitive/IntegerPointable.java
diff --git a/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/primitive/LongPointable.java b/hyracks/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/primitive/LongPointable.java
similarity index 100%
rename from hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/primitive/LongPointable.java
rename to hyracks/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/primitive/LongPointable.java
diff --git a/hyracks/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/primitive/RawUTF8StringPointable.java b/hyracks/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/primitive/RawUTF8StringPointable.java
new file mode 100644
index 0000000..c90ce5a
--- /dev/null
+++ b/hyracks/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/primitive/RawUTF8StringPointable.java
@@ -0,0 +1,105 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.data.std.primitive;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.data.std.api.AbstractPointable;
+import edu.uci.ics.hyracks.data.std.api.IComparable;
+import edu.uci.ics.hyracks.data.std.api.IHashable;
+import edu.uci.ics.hyracks.data.std.api.IPointable;
+import edu.uci.ics.hyracks.data.std.api.IPointableFactory;
+
+/**
+ * This class provides the raw bytes-based comparison and hash function for UTF8 strings.
+ * Note that the comparison may not deliver the correct ordering for certain languages that include 2 or 3 bytes characters.
+ * But it works for single-byte character languages.
+ */
+public final class RawUTF8StringPointable extends AbstractPointable implements IHashable, IComparable {
+    public static final ITypeTraits TYPE_TRAITS = new ITypeTraits() {
+        private static final long serialVersionUID = 1L;
+
+        @Override
+        public boolean isFixedLength() {
+            return false;
+        }
+
+        @Override
+        public int getFixedLength() {
+            return 0;
+        }
+    };
+
+    public static final IPointableFactory FACTORY = new IPointableFactory() {
+        private static final long serialVersionUID = 1L;
+
+        @Override
+        public IPointable createPointable() {
+            return new RawUTF8StringPointable();
+        }
+
+        @Override
+        public ITypeTraits getTypeTraits() {
+            return TYPE_TRAITS;
+        }
+    };
+
+    @Override
+    public int compareTo(IPointable pointer) {
+        return compareTo(pointer.getByteArray(), pointer.getStartOffset(), pointer.getLength());
+    }
+
+    @Override
+    public int compareTo(byte[] bytes, int start, int length) {
+        int utflen1 = UTF8StringPointable.getUTFLength(this.bytes, this.start);
+        int utflen2 = UTF8StringPointable.getUTFLength(bytes, start);
+
+        int c1 = 0;
+        int c2 = 0;
+
+        int s1Start = this.start + 2;
+        int s2Start = start + 2;
+
+        while (c1 < utflen1 && c2 < utflen2) {
+            char ch1 = (char) this.bytes[s1Start + c1];
+            char ch2 = (char) bytes[s2Start + c2];
+
+            if (ch1 != ch2) {
+                return ch1 - ch2;
+            }
+            c1++;
+            c2++;
+        }
+        return utflen1 - utflen2;
+    }
+
+    @Override
+    public int hash() {
+        int h = 0;
+        int utflen = UTF8StringPointable.getUTFLength(bytes, start);
+        int sStart = start + 2;
+        int c = 0;
+
+        while (c < utflen) {
+            char ch = (char) bytes[sStart + c];
+            h = 31 * h + ch;
+            c++;
+        }
+        return h;
+    }
+
+    public void toString(StringBuilder buffer) {
+        UTF8StringPointable.toString(buffer, bytes, start);
+    }
+}
diff --git a/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/primitive/ShortPointable.java b/hyracks/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/primitive/ShortPointable.java
similarity index 100%
rename from hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/primitive/ShortPointable.java
rename to hyracks/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/primitive/ShortPointable.java
diff --git a/hyracks/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/primitive/UTF8StringPointable.java b/hyracks/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/primitive/UTF8StringPointable.java
new file mode 100644
index 0000000..866ebb0
--- /dev/null
+++ b/hyracks/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/primitive/UTF8StringPointable.java
@@ -0,0 +1,219 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.data.std.primitive;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.data.std.api.AbstractPointable;
+import edu.uci.ics.hyracks.data.std.api.IComparable;
+import edu.uci.ics.hyracks.data.std.api.IHashable;
+import edu.uci.ics.hyracks.data.std.api.IPointable;
+import edu.uci.ics.hyracks.data.std.api.IPointableFactory;
+
+public final class UTF8StringPointable extends AbstractPointable implements IHashable, IComparable {
+    public static final ITypeTraits TYPE_TRAITS = new ITypeTraits() {
+        private static final long serialVersionUID = 1L;
+
+        @Override
+        public boolean isFixedLength() {
+            return false;
+        }
+
+        @Override
+        public int getFixedLength() {
+            return 0;
+        }
+    };
+
+    public static final IPointableFactory FACTORY = new IPointableFactory() {
+        private static final long serialVersionUID = 1L;
+
+        @Override
+        public IPointable createPointable() {
+            return new UTF8StringPointable();
+        }
+
+        @Override
+        public ITypeTraits getTypeTraits() {
+            return TYPE_TRAITS;
+        }
+    };
+
+    /**
+     * Returns the character at the given byte offset. The caller is responsible for making sure that
+     * the provided offset is within bounds and points to the beginning of a valid UTF8 character.
+     * 
+     * @param offset
+     *            - Byte offset
+     * @return Character at the given offset.
+     */
+    public char charAt(int offset) {
+        return charAt(bytes, start + offset);
+    }
+
+    public static char charAt(byte[] b, int s) {
+        int c = b[s] & 0xff;
+        switch (c >> 4) {
+            case 0:
+            case 1:
+            case 2:
+            case 3:
+            case 4:
+            case 5:
+            case 6:
+            case 7:
+                return (char) c;
+
+            case 12:
+            case 13:
+                return (char) (((c & 0x1F) << 6) | ((b[s + 1]) & 0x3F));
+
+            case 14:
+                return (char) (((c & 0x0F) << 12) | (((b[s + 1]) & 0x3F) << 6) | (((b[s + 2]) & 0x3F) << 0));
+
+            default:
+                throw new IllegalArgumentException();
+        }
+    }
+
+    public int charSize(int offset) {
+        return charSize(bytes, start + offset);
+    }
+
+    public static int charSize(byte[] b, int s) {
+        int c = b[s] & 0xff;
+        switch (c >> 4) {
+            case 0:
+            case 1:
+            case 2:
+            case 3:
+            case 4:
+            case 5:
+            case 6:
+            case 7:
+                return 1;
+
+            case 12:
+            case 13:
+                return 2;
+
+            case 14:
+                return 3;
+        }
+        throw new IllegalStateException();
+    }
+
+    public static int getModifiedUTF8Len(char c) {
+        if (c >= 0x0000 && c <= 0x007F) {
+            return 1;
+        } else if (c <= 0x07FF) {
+            return 2;
+        } else {
+            return 3;
+        }
+    }
+
+    /**
+     * Gets the length of the string in characters.
+     * 
+     * @return length of string in characters
+     */
+    public int getStringLength() {
+        return getStringLength(bytes, start);
+    }
+
+    public static int getStringLength(byte[] b, int s) {
+        int pos = s + 2;
+        int end = pos + getUTFLength(b, s);
+        int charCount = 0;
+        while (pos < end) {
+            charCount++;
+            pos += charSize(b, pos);
+        }
+        return charCount;
+    }
+
+    /**
+     * Gets the length of the UTF-8 encoded string in bytes.
+     * 
+     * @return length of UTF-8 encoded string in bytes
+     */
+    public int getUTFLength() {
+        return getUTFLength(bytes, start);
+    }
+
+    public static int getUTFLength(byte[] b, int s) {
+        return ((b[s] & 0xff) << 8) + ((b[s + 1] & 0xff) << 0);
+    }
+
+    @Override
+    public int compareTo(IPointable pointer) {
+        return compareTo(pointer.getByteArray(), pointer.getStartOffset(), pointer.getLength());
+    }
+
+    @Override
+    public int compareTo(byte[] bytes, int start, int length) {
+        int utflen1 = getUTFLength(this.bytes, this.start);
+        int utflen2 = getUTFLength(bytes, start);
+
+        int c1 = 0;
+        int c2 = 0;
+
+        int s1Start = this.start + 2;
+        int s2Start = start + 2;
+
+        while (c1 < utflen1 && c2 < utflen2) {
+            char ch1 = charAt(this.bytes, s1Start + c1);
+            char ch2 = charAt(bytes, s2Start + c2);
+
+            if (ch1 != ch2) {
+                return ch1 - ch2;
+            }
+            c1 += charSize(this.bytes, s1Start + c1);
+            c2 += charSize(bytes, s2Start + c2);
+        }
+        return utflen1 - utflen2;
+    }
+
+    @Override
+    public int hash() {
+        int h = 0;
+        int utflen = getUTFLength(bytes, start);
+        int sStart = start + 2;
+        int c = 0;
+
+        while (c < utflen) {
+            char ch = charAt(bytes, sStart + c);
+            h = 31 * h + ch;
+            c += charSize(bytes, sStart + c);
+        }
+        return h;
+    }
+
+    public static void toString(StringBuilder buffer, byte[] bytes, int start) {
+        int utfLen = getUTFLength(bytes, start);
+        int offset = 2;
+        while (utfLen > 0) {
+            char c = charAt(bytes, start + offset);
+            buffer.append(c);
+            int cLen = UTF8StringPointable.getModifiedUTF8Len(c);
+            offset += cLen;
+            utfLen -= cLen;
+        }
+    }
+
+    public void toString(StringBuilder buffer) {
+        toString(buffer, bytes, start);
+    }
+}
diff --git a/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/primitive/UTF8StringWriter.java b/hyracks/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/primitive/UTF8StringWriter.java
similarity index 100%
rename from hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/primitive/UTF8StringWriter.java
rename to hyracks/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/primitive/UTF8StringWriter.java
diff --git a/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/primitive/VoidPointable.java b/hyracks/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/primitive/VoidPointable.java
similarity index 100%
rename from hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/primitive/VoidPointable.java
rename to hyracks/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/primitive/VoidPointable.java
diff --git a/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/util/ArrayBackedValueStorage.java b/hyracks/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/util/ArrayBackedValueStorage.java
similarity index 100%
rename from hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/util/ArrayBackedValueStorage.java
rename to hyracks/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/util/ArrayBackedValueStorage.java
diff --git a/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/util/ByteArrayAccessibleOutputStream.java b/hyracks/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/util/ByteArrayAccessibleOutputStream.java
similarity index 100%
rename from hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/util/ByteArrayAccessibleOutputStream.java
rename to hyracks/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/util/ByteArrayAccessibleOutputStream.java
diff --git a/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/util/GrowableArray.java b/hyracks/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/util/GrowableArray.java
similarity index 100%
rename from hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/util/GrowableArray.java
rename to hyracks/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/util/GrowableArray.java
diff --git a/hyracks/hyracks-data/pom.xml b/hyracks/hyracks-data/pom.xml
new file mode 100644
index 0000000..db4d8e9
--- /dev/null
+++ b/hyracks/hyracks-data/pom.xml
@@ -0,0 +1,16 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <artifactId>hyracks-data</artifactId>
+  <packaging>pom</packaging>
+  <name>hyracks-data</name>
+
+  <parent>
+    <groupId>edu.uci.ics.hyracks</groupId>
+    <artifactId>hyracks</artifactId>
+    <version>0.2.3-SNAPSHOT</version>
+  </parent>
+
+  <modules>
+    <module>hyracks-data-std</module>
+  </modules>
+</project>
diff --git a/hyracks/hyracks-dataflow-common/pom.xml b/hyracks/hyracks-dataflow-common/pom.xml
new file mode 100644
index 0000000..393e97f
--- /dev/null
+++ b/hyracks/hyracks-dataflow-common/pom.xml
@@ -0,0 +1,38 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <artifactId>hyracks-dataflow-common</artifactId>
+  <name>hyracks-dataflow-common</name>
+  <parent>
+    <groupId>edu.uci.ics.hyracks</groupId>
+    <artifactId>hyracks</artifactId>
+    <version>0.2.3-SNAPSHOT</version>
+  </parent>
+
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-compiler-plugin</artifactId>
+        <version>2.0.2</version>
+        <configuration>
+          <source>1.7</source>
+          <target>1.7</target>
+        </configuration>
+      </plugin>
+    </plugins>
+  </build>
+  <dependencies>
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>hyracks-api</artifactId>
+  		<version>0.2.3-SNAPSHOT</version>
+  		<type>jar</type>
+  		<scope>compile</scope>
+  	</dependency>
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>hyracks-data-std</artifactId>
+  		<version>0.2.3-SNAPSHOT</version>
+  	</dependency>
+  </dependencies>
+</project>
diff --git a/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/comm/io/ArrayTupleBuilder.java b/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/comm/io/ArrayTupleBuilder.java
similarity index 100%
rename from hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/comm/io/ArrayTupleBuilder.java
rename to hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/comm/io/ArrayTupleBuilder.java
diff --git a/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/comm/io/ArrayTupleReference.java b/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/comm/io/ArrayTupleReference.java
similarity index 100%
rename from hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/comm/io/ArrayTupleReference.java
rename to hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/comm/io/ArrayTupleReference.java
diff --git a/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/comm/io/FrameConstants.java b/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/comm/io/FrameConstants.java
similarity index 100%
rename from hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/comm/io/FrameConstants.java
rename to hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/comm/io/FrameConstants.java
diff --git a/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/comm/io/FrameDeserializer.java b/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/comm/io/FrameDeserializer.java
similarity index 100%
rename from hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/comm/io/FrameDeserializer.java
rename to hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/comm/io/FrameDeserializer.java
diff --git a/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/comm/io/FrameDeserializingDataReader.java b/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/comm/io/FrameDeserializingDataReader.java
similarity index 100%
rename from hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/comm/io/FrameDeserializingDataReader.java
rename to hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/comm/io/FrameDeserializingDataReader.java
diff --git a/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/comm/io/FrameDeserializingDataWriter.java b/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/comm/io/FrameDeserializingDataWriter.java
similarity index 100%
rename from hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/comm/io/FrameDeserializingDataWriter.java
rename to hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/comm/io/FrameDeserializingDataWriter.java
diff --git a/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/comm/io/FrameTupleAccessor.java b/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/comm/io/FrameTupleAccessor.java
similarity index 100%
rename from hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/comm/io/FrameTupleAccessor.java
rename to hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/comm/io/FrameTupleAccessor.java
diff --git a/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/comm/io/FrameTupleAppender.java b/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/comm/io/FrameTupleAppender.java
similarity index 100%
rename from hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/comm/io/FrameTupleAppender.java
rename to hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/comm/io/FrameTupleAppender.java
diff --git a/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/comm/io/FrameTuplePairComparator.java b/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/comm/io/FrameTuplePairComparator.java
similarity index 100%
rename from hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/comm/io/FrameTuplePairComparator.java
rename to hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/comm/io/FrameTuplePairComparator.java
diff --git a/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/comm/io/SerializingDataWriter.java b/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/comm/io/SerializingDataWriter.java
similarity index 100%
rename from hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/comm/io/SerializingDataWriter.java
rename to hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/comm/io/SerializingDataWriter.java
diff --git a/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/comm/util/ByteBufferInputStream.java b/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/comm/util/ByteBufferInputStream.java
similarity index 100%
rename from hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/comm/util/ByteBufferInputStream.java
rename to hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/comm/util/ByteBufferInputStream.java
diff --git a/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/comm/util/FrameUtils.java b/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/comm/util/FrameUtils.java
similarity index 100%
rename from hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/comm/util/FrameUtils.java
rename to hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/comm/util/FrameUtils.java
diff --git a/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/accessors/FrameTupleFieldValueReference.java b/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/accessors/FrameTupleFieldValueReference.java
similarity index 100%
rename from hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/accessors/FrameTupleFieldValueReference.java
rename to hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/accessors/FrameTupleFieldValueReference.java
diff --git a/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/accessors/FrameTupleReference.java b/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/accessors/FrameTupleReference.java
similarity index 100%
rename from hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/accessors/FrameTupleReference.java
rename to hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/accessors/FrameTupleReference.java
diff --git a/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/accessors/IFrameTupleReference.java b/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/accessors/IFrameTupleReference.java
similarity index 100%
rename from hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/accessors/IFrameTupleReference.java
rename to hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/accessors/IFrameTupleReference.java
diff --git a/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/accessors/ITupleReference.java b/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/accessors/ITupleReference.java
similarity index 100%
rename from hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/accessors/ITupleReference.java
rename to hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/accessors/ITupleReference.java
diff --git a/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/marshalling/BooleanSerializerDeserializer.java b/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/marshalling/BooleanSerializerDeserializer.java
similarity index 100%
rename from hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/marshalling/BooleanSerializerDeserializer.java
rename to hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/marshalling/BooleanSerializerDeserializer.java
diff --git a/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/marshalling/DoubleSerializerDeserializer.java b/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/marshalling/DoubleSerializerDeserializer.java
similarity index 100%
rename from hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/marshalling/DoubleSerializerDeserializer.java
rename to hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/marshalling/DoubleSerializerDeserializer.java
diff --git a/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/marshalling/FloatSerializerDeserializer.java b/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/marshalling/FloatSerializerDeserializer.java
similarity index 100%
rename from hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/marshalling/FloatSerializerDeserializer.java
rename to hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/marshalling/FloatSerializerDeserializer.java
diff --git a/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/marshalling/IntArraySerializerDeserializer.java b/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/marshalling/IntArraySerializerDeserializer.java
similarity index 100%
rename from hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/marshalling/IntArraySerializerDeserializer.java
rename to hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/marshalling/IntArraySerializerDeserializer.java
diff --git a/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/marshalling/Integer64SerializerDeserializer.java b/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/marshalling/Integer64SerializerDeserializer.java
similarity index 100%
rename from hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/marshalling/Integer64SerializerDeserializer.java
rename to hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/marshalling/Integer64SerializerDeserializer.java
diff --git a/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/marshalling/IntegerSerializerDeserializer.java b/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/marshalling/IntegerSerializerDeserializer.java
similarity index 100%
rename from hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/marshalling/IntegerSerializerDeserializer.java
rename to hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/marshalling/IntegerSerializerDeserializer.java
diff --git a/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/marshalling/UTF8StringSerializerDeserializer.java b/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/marshalling/UTF8StringSerializerDeserializer.java
similarity index 100%
rename from hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/marshalling/UTF8StringSerializerDeserializer.java
rename to hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/marshalling/UTF8StringSerializerDeserializer.java
diff --git a/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/normalizers/DoubleNormalizedKeyComputerFactory.java b/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/normalizers/DoubleNormalizedKeyComputerFactory.java
new file mode 100644
index 0000000..e95e9c2
--- /dev/null
+++ b/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/normalizers/DoubleNormalizedKeyComputerFactory.java
@@ -0,0 +1,28 @@
+package edu.uci.ics.hyracks.dataflow.common.data.normalizers;
+
+import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputer;
+import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputerFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+
+public class DoubleNormalizedKeyComputerFactory implements INormalizedKeyComputerFactory {
+
+    private static final long serialVersionUID = 1L;
+
+    @Override
+    public INormalizedKeyComputer createNormalizedKeyComputer() {
+        return new INormalizedKeyComputer() {
+
+            @Override
+            public int normalize(byte[] bytes, int start, int length) {
+                int prefix = IntegerSerializerDeserializer.getInt(bytes, start);
+                if (prefix >= 0) {
+                    return prefix ^ Integer.MIN_VALUE;
+                } else {
+                    return (int) ((long) 0xffffffff - (long) prefix);
+                }
+            }
+
+        };
+    }
+
+}
diff --git a/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/normalizers/FloatNormalizedKeyComputerFactory.java b/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/normalizers/FloatNormalizedKeyComputerFactory.java
new file mode 100644
index 0000000..d58afc1
--- /dev/null
+++ b/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/normalizers/FloatNormalizedKeyComputerFactory.java
@@ -0,0 +1,28 @@
+package edu.uci.ics.hyracks.dataflow.common.data.normalizers;
+
+import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputer;
+import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputerFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+
+public class FloatNormalizedKeyComputerFactory implements INormalizedKeyComputerFactory {
+
+    private static final long serialVersionUID = 1L;
+
+    @Override
+    public INormalizedKeyComputer createNormalizedKeyComputer() {
+        return new INormalizedKeyComputer() {
+
+            @Override
+            public int normalize(byte[] bytes, int start, int length) {
+                int prefix = IntegerSerializerDeserializer.getInt(bytes, start);
+                if (prefix >= 0) {
+                    return prefix ^ Integer.MIN_VALUE;
+                } else {
+                    return (int) ((long) 0xffffffff - (long) prefix);
+                }
+            }
+
+        };
+    }
+
+}
diff --git a/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/normalizers/Integer64NormalizedKeyComputerFactory.java b/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/normalizers/Integer64NormalizedKeyComputerFactory.java
new file mode 100644
index 0000000..4589909
--- /dev/null
+++ b/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/normalizers/Integer64NormalizedKeyComputerFactory.java
@@ -0,0 +1,55 @@
+package edu.uci.ics.hyracks.dataflow.common.data.normalizers;
+
+import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputer;
+import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputerFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.Integer64SerializerDeserializer;
+
+public class Integer64NormalizedKeyComputerFactory implements INormalizedKeyComputerFactory {
+
+    private static final long serialVersionUID = 8735044913496854551L;
+
+    @Override
+    public INormalizedKeyComputer createNormalizedKeyComputer() {
+        return new INormalizedKeyComputer() {
+            private static final int POSTIVE_LONG_MASK = (3 << 30);
+            private static final int NON_NEGATIVE_INT_MASK = (2 << 30);
+            private static final int NEGATIVE_LONG_MASK = (0 << 30);
+
+            @Override
+            public int normalize(byte[] bytes, int start, int length) {
+                long value = Integer64SerializerDeserializer.getLong(bytes, start);
+                int highValue = (int) (value >> 32);
+                if (highValue > 0) {
+                    /**
+                     * larger than Integer.MAX
+                     */
+                    int highNmk = getKey(highValue);
+                    highNmk >>= 2;
+                    highNmk |= POSTIVE_LONG_MASK;
+                    return highNmk;
+                } else if (highValue == 0) {
+                    /**
+                     * smaller than Integer.MAX but >=0
+                     */
+                    int lowNmk = (int) value;
+                    lowNmk >>= 2;
+                    lowNmk |= NON_NEGATIVE_INT_MASK;
+                    return lowNmk;
+                } else {
+                    /**
+                     * less than 0: have not optimized for that
+                     */
+                    int highNmk = getKey(highValue);
+                    highNmk >>= 2;
+                    highNmk |= NEGATIVE_LONG_MASK;
+                    return highNmk;
+                }
+            }
+
+            private int getKey(int value) {
+                return value ^ Integer.MIN_VALUE;
+            }
+
+        };
+    }
+}
diff --git a/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/normalizers/IntegerNormalizedKeyComputerFactory.java b/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/normalizers/IntegerNormalizedKeyComputerFactory.java
new file mode 100644
index 0000000..6a01842
--- /dev/null
+++ b/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/normalizers/IntegerNormalizedKeyComputerFactory.java
@@ -0,0 +1,34 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.dataflow.common.data.normalizers;
+
+import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputer;
+import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputerFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+
+public class IntegerNormalizedKeyComputerFactory implements INormalizedKeyComputerFactory {
+    private static final long serialVersionUID = 1L;
+
+    @Override
+    public INormalizedKeyComputer createNormalizedKeyComputer() {
+        return new INormalizedKeyComputer() {
+            @Override
+            public int normalize(byte[] bytes, int start, int length) {
+                int value = IntegerSerializerDeserializer.getInt(bytes, start);
+                return value ^Integer.MIN_VALUE;
+            }
+        };
+    }
+}
\ No newline at end of file
diff --git a/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/normalizers/UTF8StringNormalizedKeyComputerFactory.java b/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/normalizers/UTF8StringNormalizedKeyComputerFactory.java
similarity index 100%
rename from hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/normalizers/UTF8StringNormalizedKeyComputerFactory.java
rename to hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/normalizers/UTF8StringNormalizedKeyComputerFactory.java
diff --git a/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/parsers/DoubleParserFactory.java b/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/parsers/DoubleParserFactory.java
similarity index 100%
rename from hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/parsers/DoubleParserFactory.java
rename to hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/parsers/DoubleParserFactory.java
diff --git a/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/parsers/FloatParserFactory.java b/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/parsers/FloatParserFactory.java
similarity index 100%
rename from hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/parsers/FloatParserFactory.java
rename to hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/parsers/FloatParserFactory.java
diff --git a/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/parsers/IValueParser.java b/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/parsers/IValueParser.java
similarity index 100%
rename from hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/parsers/IValueParser.java
rename to hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/parsers/IValueParser.java
diff --git a/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/parsers/IValueParserFactory.java b/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/parsers/IValueParserFactory.java
similarity index 100%
rename from hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/parsers/IValueParserFactory.java
rename to hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/parsers/IValueParserFactory.java
diff --git a/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/parsers/IntegerParserFactory.java b/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/parsers/IntegerParserFactory.java
similarity index 100%
rename from hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/parsers/IntegerParserFactory.java
rename to hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/parsers/IntegerParserFactory.java
diff --git a/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/parsers/LongParserFactory.java b/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/parsers/LongParserFactory.java
new file mode 100644
index 0000000..d9191c7
--- /dev/null
+++ b/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/parsers/LongParserFactory.java
@@ -0,0 +1,114 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.dataflow.common.data.parsers;
+
+import java.io.DataOutput;
+import java.io.IOException;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+
+public class LongParserFactory implements IValueParserFactory {
+    public static final IValueParserFactory INSTANCE = new LongParserFactory();
+
+    private static final long serialVersionUID = 1L;
+
+    private LongParserFactory() {
+    }
+
+    @Override
+    public IValueParser createValueParser() {
+        return new IValueParser() {
+            @Override
+            public void parse(char[] buffer, int start, int length, DataOutput out) throws HyracksDataException {
+                int n = 0;
+                int sign = 1;
+                int i = 0;
+                boolean pre = true;
+                for (; pre && i < length; ++i) {
+                    char ch = buffer[i + start];
+                    switch (ch) {
+                        case ' ':
+                        case '\t':
+                        case '\n':
+                        case '\r':
+                        case '\f':
+                            break;
+
+                        case '-':
+                            sign = -1;
+                            pre = false;
+                            break;
+
+                        case '0':
+                        case '1':
+                        case '2':
+                        case '3':
+                        case '4':
+                        case '5':
+                        case '6':
+                        case '7':
+                        case '8':
+                        case '9':
+                            pre = false;
+                            n = n * 10 + (ch - '0');
+                            break;
+
+                        default:
+                            throw new HyracksDataException("Encountered " + ch);
+                    }
+                }
+                boolean post = false;
+                for (; !post && i < length; ++i) {
+                    char ch = buffer[i + start];
+                    switch (ch) {
+                        case '0':
+                        case '1':
+                        case '2':
+                        case '3':
+                        case '4':
+                        case '5':
+                        case '6':
+                        case '7':
+                        case '8':
+                        case '9':
+                            n = n * 10 + (ch - '0');
+                            break;
+                    }
+                }
+
+                for (; i < length; ++i) {
+                    char ch = buffer[i + start];
+                    switch (ch) {
+                        case ' ':
+                        case '\t':
+                        case '\n':
+                        case '\r':
+                        case '\f':
+                            break;
+
+                        default:
+                            throw new HyracksDataException("Encountered " + ch);
+                    }
+                }
+                
+                try {
+                    out.writeLong(n * sign);
+                } catch (IOException e) {
+                    throw new HyracksDataException(e);
+                }
+            }
+        };
+    }
+}
\ No newline at end of file
diff --git a/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/parsers/UTF8StringParserFactory.java b/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/parsers/UTF8StringParserFactory.java
similarity index 100%
rename from hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/parsers/UTF8StringParserFactory.java
rename to hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/parsers/UTF8StringParserFactory.java
diff --git a/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/partition/FieldHashPartitionComputerFactory.java b/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/partition/FieldHashPartitionComputerFactory.java
similarity index 100%
rename from hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/partition/FieldHashPartitionComputerFactory.java
rename to hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/partition/FieldHashPartitionComputerFactory.java
diff --git a/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/partition/FieldHashPartitionComputerFamily.java b/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/partition/FieldHashPartitionComputerFamily.java
similarity index 100%
rename from hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/partition/FieldHashPartitionComputerFamily.java
rename to hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/partition/FieldHashPartitionComputerFamily.java
diff --git a/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/partition/RepartitionComputerFactory.java b/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/partition/RepartitionComputerFactory.java
similarity index 100%
rename from hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/partition/RepartitionComputerFactory.java
rename to hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/partition/RepartitionComputerFactory.java
diff --git a/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/partition/RepartitionComputerGeneratorFactory.java b/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/partition/RepartitionComputerGeneratorFactory.java
similarity index 100%
rename from hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/partition/RepartitionComputerGeneratorFactory.java
rename to hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/partition/RepartitionComputerGeneratorFactory.java
diff --git a/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/util/StringUtils.java b/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/util/StringUtils.java
similarity index 100%
rename from hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/util/StringUtils.java
rename to hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/util/StringUtils.java
diff --git a/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/io/RunFileReader.java b/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/io/RunFileReader.java
similarity index 100%
rename from hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/io/RunFileReader.java
rename to hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/io/RunFileReader.java
diff --git a/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/io/RunFileWriter.java b/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/io/RunFileWriter.java
similarity index 100%
rename from hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/io/RunFileWriter.java
rename to hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/io/RunFileWriter.java
diff --git a/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/util/ReflectionUtils.java b/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/util/ReflectionUtils.java
similarity index 100%
rename from hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/util/ReflectionUtils.java
rename to hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/util/ReflectionUtils.java
diff --git a/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/util/SerdeUtils.java b/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/util/SerdeUtils.java
new file mode 100644
index 0000000..00575f4
--- /dev/null
+++ b/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/util/SerdeUtils.java
@@ -0,0 +1,135 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.dataflow.common.util;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
+import edu.uci.ics.hyracks.data.std.primitive.BooleanPointable;
+import edu.uci.ics.hyracks.data.std.primitive.DoublePointable;
+import edu.uci.ics.hyracks.data.std.primitive.FloatPointable;
+import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
+import edu.uci.ics.hyracks.data.std.primitive.LongPointable;
+import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.BooleanSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.DoubleSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.FloatSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.Integer64SerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+
+@SuppressWarnings("rawtypes")
+public class SerdeUtils {
+	public static class PayloadTypeTraits implements ITypeTraits {
+		private static final long serialVersionUID = 1L;
+		final int payloadSize;
+		
+		public PayloadTypeTraits(int payloadSize) {
+			this.payloadSize = payloadSize;
+		}
+		
+		@Override
+		public boolean isFixedLength() {
+			return true;
+		}
+
+		@Override
+		public int getFixedLength() {
+			return payloadSize;
+		}
+	}
+	
+	public static ITypeTraits[] serdesToTypeTraits(ISerializerDeserializer[] serdes) {
+        ITypeTraits[] typeTraits = new ITypeTraits[serdes.length];
+        for (int i = 0; i < serdes.length; i++) {
+            typeTraits[i] = serdeToTypeTrait(serdes[i]);
+        }
+        return typeTraits;
+    }
+    
+    public static ITypeTraits[] serdesToTypeTraits(ISerializerDeserializer[] serdes, int payloadSize) {
+        ITypeTraits[] typeTraits = new ITypeTraits[serdes.length + 1];
+        for (int i = 0; i < serdes.length; i++) {
+            typeTraits[i] = serdeToTypeTrait(serdes[i]);
+        }
+        typeTraits[serdes.length] = new PayloadTypeTraits(payloadSize);
+        return typeTraits;
+    }
+
+    public static ITypeTraits serdeToTypeTrait(ISerializerDeserializer serde) {
+        if (serde instanceof IntegerSerializerDeserializer) {
+            return IntegerPointable.TYPE_TRAITS;
+        }
+        if (serde instanceof Integer64SerializerDeserializer) {
+            return LongPointable.TYPE_TRAITS;
+        }
+        if (serde instanceof FloatSerializerDeserializer) {
+            return FloatPointable.TYPE_TRAITS;
+        }
+        if (serde instanceof DoubleSerializerDeserializer) {
+            return DoublePointable.TYPE_TRAITS;
+        }
+        if (serde instanceof BooleanSerializerDeserializer) {
+            return BooleanPointable.TYPE_TRAITS;
+        }
+        return UTF8StringPointable.TYPE_TRAITS;
+    }
+
+    public static IBinaryComparator[] serdesToComparators(ISerializerDeserializer[] serdes, int numSerdes) {
+        IBinaryComparator[] comparators = new IBinaryComparator[numSerdes];
+        for (int i = 0; i < numSerdes; i++) {
+            comparators[i] = serdeToComparator(serdes[i]);
+        }
+        return comparators;
+    }
+
+    public static IBinaryComparator serdeToComparator(ISerializerDeserializer serde) {
+        IBinaryComparatorFactory f = serdeToComparatorFactory(serde);
+        return f.createBinaryComparator();
+    }
+
+    public static IBinaryComparatorFactory[] serdesToComparatorFactories(ISerializerDeserializer[] serdes, int numSerdes) {
+        IBinaryComparatorFactory[] comparatorsFactories = new IBinaryComparatorFactory[numSerdes];
+        for (int i = 0; i < numSerdes; i++) {
+            comparatorsFactories[i] = serdeToComparatorFactory(serdes[i]);
+        }
+        return comparatorsFactories;
+    }
+
+    public static IBinaryComparatorFactory serdeToComparatorFactory(ISerializerDeserializer serde) {
+        if (serde instanceof IntegerSerializerDeserializer) {
+            return PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+        }
+        if (serde instanceof Integer64SerializerDeserializer) {
+            return PointableBinaryComparatorFactory.of(LongPointable.FACTORY);
+        }
+        if (serde instanceof FloatSerializerDeserializer) {
+            return PointableBinaryComparatorFactory.of(FloatPointable.FACTORY);
+        }
+        if (serde instanceof DoubleSerializerDeserializer) {
+            return PointableBinaryComparatorFactory.of(DoublePointable.FACTORY);
+        }
+        if (serde instanceof BooleanSerializerDeserializer) {
+            throw new UnsupportedOperationException("Binary comparator factory for Boolean not implemented.");
+        }
+        if (serde instanceof UTF8StringSerializerDeserializer) {
+            return PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY);
+        }
+        throw new UnsupportedOperationException("Binary comparator for + " + serde.toString() + " not implemented.");
+    }
+}
diff --git a/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/util/TupleUtils.java b/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/util/TupleUtils.java
new file mode 100644
index 0000000..b35dd75
--- /dev/null
+++ b/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/util/TupleUtils.java
@@ -0,0 +1,129 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.dataflow.common.util;
+
+import java.io.ByteArrayInputStream;
+import java.io.DataInput;
+import java.io.DataInputStream;
+import java.io.DataOutput;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.DoubleSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+
+@SuppressWarnings("rawtypes")
+public class TupleUtils {
+    @SuppressWarnings("unchecked")
+    public static void createTuple(ArrayTupleBuilder tupleBuilder, ArrayTupleReference tuple,
+            ISerializerDeserializer[] fieldSerdes, final Object... fields) throws HyracksDataException {
+        DataOutput dos = tupleBuilder.getDataOutput();
+        tupleBuilder.reset();
+        int numFields = Math.min(tupleBuilder.getFieldEndOffsets().length, fields.length);
+        for (int i = 0; i < numFields; i++) {
+            fieldSerdes[i].serialize(fields[i], dos);
+            tupleBuilder.addFieldEndOffset();
+        }
+        tuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());
+    }
+
+    public static ITupleReference createTuple(ISerializerDeserializer[] fieldSerdes, final Object... fields)
+            throws HyracksDataException {
+        ArrayTupleBuilder tupleBuilder = new ArrayTupleBuilder(fields.length);
+        ArrayTupleReference tuple = new ArrayTupleReference();
+        createTuple(tupleBuilder, tuple, fieldSerdes, fields);
+        return tuple;
+    }
+
+    public static void createIntegerTuple(ArrayTupleBuilder tupleBuilder, ArrayTupleReference tuple,
+            final int... fields) throws HyracksDataException {
+        DataOutput dos = tupleBuilder.getDataOutput();
+        tupleBuilder.reset();
+        for (final int i : fields) {
+            IntegerSerializerDeserializer.INSTANCE.serialize(i, dos);
+            tupleBuilder.addFieldEndOffset();
+        }
+        tuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());
+    }
+
+    public static ITupleReference createIntegerTuple(final int... fields) throws HyracksDataException {
+        ArrayTupleBuilder tupleBuilder = new ArrayTupleBuilder(fields.length);
+        ArrayTupleReference tuple = new ArrayTupleReference();
+        createIntegerTuple(tupleBuilder, tuple, fields);
+        return tuple;
+    }
+
+    public static void createDoubleTuple(ArrayTupleBuilder tupleBuilder, ArrayTupleReference tuple,
+            final double... fields) throws HyracksDataException {
+        DataOutput dos = tupleBuilder.getDataOutput();
+        tupleBuilder.reset();
+        for (final double i : fields) {
+            DoubleSerializerDeserializer.INSTANCE.serialize(i, dos);
+            tupleBuilder.addFieldEndOffset();
+        }
+        tuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());
+    }
+
+    public static ITupleReference createDoubleTuple(final double... fields) throws HyracksDataException {
+        ArrayTupleBuilder tupleBuilder = new ArrayTupleBuilder(fields.length);
+        ArrayTupleReference tuple = new ArrayTupleReference();
+        createDoubleTuple(tupleBuilder, tuple, fields);
+        return tuple;
+    }
+
+    public static String printTuple(ITupleReference tuple, ISerializerDeserializer[] fields)
+            throws HyracksDataException {
+        StringBuilder strBuilder = new StringBuilder();
+        int numPrintFields = Math.min(tuple.getFieldCount(), fields.length);
+        for (int i = 0; i < numPrintFields; i++) {
+            ByteArrayInputStream inStream = new ByteArrayInputStream(tuple.getFieldData(i), tuple.getFieldStart(i),
+                    tuple.getFieldLength(i));
+            DataInput dataIn = new DataInputStream(inStream);
+            Object o = fields[i].deserialize(dataIn);
+            strBuilder.append(o.toString());
+            if (i != fields.length - 1) {
+                strBuilder.append(" ");
+            }
+        }
+        return strBuilder.toString();
+    }
+
+    public static Object[] deserializeTuple(ITupleReference tuple, ISerializerDeserializer[] fields)
+            throws HyracksDataException {
+        int numFields = Math.min(tuple.getFieldCount(), fields.length);
+        Object[] objs = new Object[numFields];
+        for (int i = 0; i < numFields; i++) {
+            ByteArrayInputStream inStream = new ByteArrayInputStream(tuple.getFieldData(i), tuple.getFieldStart(i),
+                    tuple.getFieldLength(i));
+            DataInput dataIn = new DataInputStream(inStream);
+            objs[i] = fields[i].deserialize(dataIn);
+        }
+        return objs;
+    }
+
+    public static ITupleReference copyTuple(ITupleReference tuple) throws HyracksDataException {
+        ArrayTupleBuilder tupleBuilder = new ArrayTupleBuilder(tuple.getFieldCount());
+        for (int i = 0; i < tuple.getFieldCount(); i++) {
+            tupleBuilder.addField(tuple.getFieldData(i), tuple.getFieldStart(i), tuple.getFieldLength(i));
+        }
+        ArrayTupleReference tupleCopy = new ArrayTupleReference();
+        tupleCopy.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());
+        return tupleCopy;
+    }
+}
diff --git a/hyracks/hyracks-dataflow-hadoop/pom.xml b/hyracks/hyracks-dataflow-hadoop/pom.xml
new file mode 100644
index 0000000..c01a6a7
--- /dev/null
+++ b/hyracks/hyracks-dataflow-hadoop/pom.xml
@@ -0,0 +1,62 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>edu.uci.ics.hyracks</groupId>
+  <artifactId>hyracks-dataflow-hadoop</artifactId>
+  <version>0.2.3-SNAPSHOT</version>
+  <name>hyracks-dataflow-hadoop</name>
+
+  <parent>
+    <groupId>edu.uci.ics.hyracks</groupId>
+    <artifactId>hyracks</artifactId>
+    <version>0.2.3-SNAPSHOT</version>
+  </parent>
+
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-compiler-plugin</artifactId>
+        <version>2.0.2</version>
+        <configuration>
+          <source>1.7</source>
+          <target>1.7</target>
+        </configuration>
+      </plugin>
+    </plugins>
+  </build>
+  <dependencies>
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>hyracks-api</artifactId>
+  		<version>0.2.3-SNAPSHOT</version>
+  		<type>jar</type>
+  		<scope>compile</scope>
+  	</dependency>
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>hyracks-dataflow-common</artifactId>
+  		<version>0.2.3-SNAPSHOT</version>
+  		<type>jar</type>
+  		<scope>compile</scope>
+  	</dependency>
+  	<dependency>
+  		<groupId>org.apache.hadoop</groupId>
+  		<artifactId>hadoop-core</artifactId>
+  		<version>0.20.2</version>
+  		<type>jar</type>
+  		<scope>compile</scope>
+  	</dependency>
+  	<dependency>
+  		<groupId>edu.uci.ics.dcache</groupId>
+  		<artifactId>dcache-client</artifactId>
+  		<version>0.0.1</version>
+  		<scope>compile</scope>
+  	</dependency>
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>hyracks-dataflow-std</artifactId>
+  		<version>0.2.3-SNAPSHOT</version>
+  		<scope>compile</scope>
+  	</dependency>
+  </dependencies>
+</project>
diff --git a/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/AbstractHadoopOperatorDescriptor.java b/hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/AbstractHadoopOperatorDescriptor.java
similarity index 100%
rename from hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/AbstractHadoopOperatorDescriptor.java
rename to hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/AbstractHadoopOperatorDescriptor.java
diff --git a/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/HadoopMapperOperatorDescriptor.java b/hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/HadoopMapperOperatorDescriptor.java
similarity index 100%
rename from hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/HadoopMapperOperatorDescriptor.java
rename to hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/HadoopMapperOperatorDescriptor.java
diff --git a/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/HadoopReadOperatorDescriptor.java b/hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/HadoopReadOperatorDescriptor.java
similarity index 100%
rename from hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/HadoopReadOperatorDescriptor.java
rename to hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/HadoopReadOperatorDescriptor.java
diff --git a/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/HadoopReducerOperatorDescriptor.java b/hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/HadoopReducerOperatorDescriptor.java
similarity index 100%
rename from hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/HadoopReducerOperatorDescriptor.java
rename to hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/HadoopReducerOperatorDescriptor.java
diff --git a/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/HadoopWriteOperatorDescriptor.java b/hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/HadoopWriteOperatorDescriptor.java
similarity index 100%
rename from hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/HadoopWriteOperatorDescriptor.java
rename to hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/HadoopWriteOperatorDescriptor.java
diff --git a/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/data/AbstractClassBasedDelegate.java b/hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/data/AbstractClassBasedDelegate.java
similarity index 100%
rename from hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/data/AbstractClassBasedDelegate.java
rename to hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/data/AbstractClassBasedDelegate.java
diff --git a/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/data/HadoopHashTuplePartitionComputerFactory.java b/hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/data/HadoopHashTuplePartitionComputerFactory.java
similarity index 100%
rename from hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/data/HadoopHashTuplePartitionComputerFactory.java
rename to hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/data/HadoopHashTuplePartitionComputerFactory.java
diff --git a/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/data/HadoopNewPartitionerTuplePartitionComputerFactory.java b/hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/data/HadoopNewPartitionerTuplePartitionComputerFactory.java
similarity index 100%
rename from hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/data/HadoopNewPartitionerTuplePartitionComputerFactory.java
rename to hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/data/HadoopNewPartitionerTuplePartitionComputerFactory.java
diff --git a/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/data/HadoopPartitionerTuplePartitionComputerFactory.java b/hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/data/HadoopPartitionerTuplePartitionComputerFactory.java
similarity index 100%
rename from hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/data/HadoopPartitionerTuplePartitionComputerFactory.java
rename to hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/data/HadoopPartitionerTuplePartitionComputerFactory.java
diff --git a/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/data/KeyBinaryComparatorFactory.java b/hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/data/KeyBinaryComparatorFactory.java
similarity index 100%
rename from hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/data/KeyBinaryComparatorFactory.java
rename to hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/data/KeyBinaryComparatorFactory.java
diff --git a/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/data/KeyComparatorFactory.java b/hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/data/KeyComparatorFactory.java
similarity index 100%
rename from hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/data/KeyComparatorFactory.java
rename to hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/data/KeyComparatorFactory.java
diff --git a/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/data/RawComparingComparatorFactory.java b/hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/data/RawComparingComparatorFactory.java
similarity index 100%
rename from hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/data/RawComparingComparatorFactory.java
rename to hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/data/RawComparingComparatorFactory.java
diff --git a/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/data/WritableComparingBinaryComparatorFactory.java b/hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/data/WritableComparingBinaryComparatorFactory.java
similarity index 100%
rename from hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/data/WritableComparingBinaryComparatorFactory.java
rename to hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/data/WritableComparingBinaryComparatorFactory.java
diff --git a/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/mapreduce/HadoopHelper.java b/hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/mapreduce/HadoopHelper.java
similarity index 100%
rename from hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/mapreduce/HadoopHelper.java
rename to hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/mapreduce/HadoopHelper.java
diff --git a/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/mapreduce/HadoopTools.java b/hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/mapreduce/HadoopTools.java
similarity index 100%
rename from hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/mapreduce/HadoopTools.java
rename to hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/mapreduce/HadoopTools.java
diff --git a/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/mapreduce/HashPartitioningShuffleConnectorDescriptor.java b/hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/mapreduce/HashPartitioningShuffleConnectorDescriptor.java
similarity index 100%
rename from hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/mapreduce/HashPartitioningShuffleConnectorDescriptor.java
rename to hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/mapreduce/HashPartitioningShuffleConnectorDescriptor.java
diff --git a/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/mapreduce/IInputSplitProvider.java b/hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/mapreduce/IInputSplitProvider.java
similarity index 100%
rename from hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/mapreduce/IInputSplitProvider.java
rename to hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/mapreduce/IInputSplitProvider.java
diff --git a/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/mapreduce/IInputSplitProviderFactory.java b/hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/mapreduce/IInputSplitProviderFactory.java
similarity index 100%
rename from hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/mapreduce/IInputSplitProviderFactory.java
rename to hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/mapreduce/IInputSplitProviderFactory.java
diff --git a/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/mapreduce/InputFileSplit.java b/hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/mapreduce/InputFileSplit.java
similarity index 100%
rename from hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/mapreduce/InputFileSplit.java
rename to hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/mapreduce/InputFileSplit.java
diff --git a/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/mapreduce/KVIterator.java b/hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/mapreduce/KVIterator.java
similarity index 100%
rename from hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/mapreduce/KVIterator.java
rename to hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/mapreduce/KVIterator.java
diff --git a/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/mapreduce/MapperOperatorDescriptor.java b/hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/mapreduce/MapperOperatorDescriptor.java
similarity index 100%
rename from hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/mapreduce/MapperOperatorDescriptor.java
rename to hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/mapreduce/MapperOperatorDescriptor.java
diff --git a/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/mapreduce/MarshalledWritable.java b/hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/mapreduce/MarshalledWritable.java
similarity index 100%
rename from hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/mapreduce/MarshalledWritable.java
rename to hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/mapreduce/MarshalledWritable.java
diff --git a/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/mapreduce/ReduceWriter.java b/hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/mapreduce/ReduceWriter.java
similarity index 100%
rename from hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/mapreduce/ReduceWriter.java
rename to hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/mapreduce/ReduceWriter.java
diff --git a/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/mapreduce/ReducerOperatorDescriptor.java b/hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/mapreduce/ReducerOperatorDescriptor.java
similarity index 100%
rename from hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/mapreduce/ReducerOperatorDescriptor.java
rename to hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/mapreduce/ReducerOperatorDescriptor.java
diff --git a/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/mapreduce/ShuffleFrameReader.java b/hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/mapreduce/ShuffleFrameReader.java
similarity index 100%
rename from hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/mapreduce/ShuffleFrameReader.java
rename to hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/mapreduce/ShuffleFrameReader.java
diff --git a/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/util/ClasspathBasedHadoopClassFactory.java b/hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/util/ClasspathBasedHadoopClassFactory.java
similarity index 100%
rename from hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/util/ClasspathBasedHadoopClassFactory.java
rename to hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/util/ClasspathBasedHadoopClassFactory.java
diff --git a/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/util/DatatypeHelper.java b/hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/util/DatatypeHelper.java
similarity index 100%
rename from hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/util/DatatypeHelper.java
rename to hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/util/DatatypeHelper.java
diff --git a/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/util/DuplicateKeyMapper.java b/hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/util/DuplicateKeyMapper.java
similarity index 100%
rename from hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/util/DuplicateKeyMapper.java
rename to hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/util/DuplicateKeyMapper.java
diff --git a/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/util/IHadoopClassFactory.java b/hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/util/IHadoopClassFactory.java
similarity index 100%
rename from hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/util/IHadoopClassFactory.java
rename to hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/util/IHadoopClassFactory.java
diff --git a/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/util/InputSplitsProxy.java b/hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/util/InputSplitsProxy.java
similarity index 100%
rename from hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/util/InputSplitsProxy.java
rename to hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/util/InputSplitsProxy.java
diff --git a/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/util/PreappendLongWritableMapper.java b/hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/util/PreappendLongWritableMapper.java
similarity index 100%
rename from hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/util/PreappendLongWritableMapper.java
rename to hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/util/PreappendLongWritableMapper.java
diff --git a/hyracks/hyracks-dataflow-std/pom.xml b/hyracks/hyracks-dataflow-std/pom.xml
new file mode 100644
index 0000000..0cb5516
--- /dev/null
+++ b/hyracks/hyracks-dataflow-std/pom.xml
@@ -0,0 +1,50 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>edu.uci.ics.hyracks</groupId>
+  <artifactId>hyracks-dataflow-std</artifactId>
+  <version>0.2.3-SNAPSHOT</version>
+  <name>hyracks-dataflow-std</name>
+
+  <parent>
+    <groupId>edu.uci.ics.hyracks</groupId>
+    <artifactId>hyracks</artifactId>
+    <version>0.2.3-SNAPSHOT</version>
+  </parent>
+
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-compiler-plugin</artifactId>
+        <version>2.0.2</version>
+        <configuration>
+          <source>1.7</source>
+          <target>1.7</target>
+        </configuration>
+      </plugin>
+    </plugins>
+  </build>
+  <dependencies>
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>hyracks-api</artifactId>
+  		<version>0.2.3-SNAPSHOT</version>
+  		<type>jar</type>
+  		<scope>compile</scope>
+  	</dependency>
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>hyracks-dataflow-common</artifactId>
+  		<version>0.2.3-SNAPSHOT</version>
+  		<type>jar</type>
+  		<scope>compile</scope>
+  	</dependency>
+  	<dependency>
+  		<groupId>junit</groupId>
+  		<artifactId>junit</artifactId>
+  		<version>4.8.2</version>
+  		<type>jar</type>
+  		<scope>test</scope>
+  	</dependency>
+  </dependencies>
+</project>
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/base/AbstractActivityNode.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/base/AbstractActivityNode.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/base/AbstractActivityNode.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/base/AbstractActivityNode.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/base/AbstractConnectorDescriptor.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/base/AbstractConnectorDescriptor.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/base/AbstractConnectorDescriptor.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/base/AbstractConnectorDescriptor.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/base/AbstractMToNConnectorDescriptor.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/base/AbstractMToNConnectorDescriptor.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/base/AbstractMToNConnectorDescriptor.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/base/AbstractMToNConnectorDescriptor.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/base/AbstractOperatorDescriptor.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/base/AbstractOperatorDescriptor.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/base/AbstractOperatorDescriptor.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/base/AbstractOperatorDescriptor.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/base/AbstractOperatorNodePushable.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/base/AbstractOperatorNodePushable.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/base/AbstractOperatorNodePushable.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/base/AbstractOperatorNodePushable.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/base/AbstractSingleActivityOperatorDescriptor.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/base/AbstractSingleActivityOperatorDescriptor.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/base/AbstractSingleActivityOperatorDescriptor.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/base/AbstractSingleActivityOperatorDescriptor.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/base/AbstractStateObject.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/base/AbstractStateObject.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/base/AbstractStateObject.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/base/AbstractStateObject.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/base/AbstractUnaryInputOperatorNodePushable.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/base/AbstractUnaryInputOperatorNodePushable.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/base/AbstractUnaryInputOperatorNodePushable.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/base/AbstractUnaryInputOperatorNodePushable.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/base/AbstractUnaryInputSinkOperatorNodePushable.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/base/AbstractUnaryInputSinkOperatorNodePushable.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/base/AbstractUnaryInputSinkOperatorNodePushable.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/base/AbstractUnaryInputSinkOperatorNodePushable.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/base/AbstractUnaryInputUnaryOutputOperatorNodePushable.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/base/AbstractUnaryInputUnaryOutputOperatorNodePushable.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/base/AbstractUnaryInputUnaryOutputOperatorNodePushable.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/base/AbstractUnaryInputUnaryOutputOperatorNodePushable.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/base/AbstractUnaryOutputOperatorNodePushable.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/base/AbstractUnaryOutputOperatorNodePushable.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/base/AbstractUnaryOutputOperatorNodePushable.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/base/AbstractUnaryOutputOperatorNodePushable.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/base/AbstractUnaryOutputSourceOperatorNodePushable.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/base/AbstractUnaryOutputSourceOperatorNodePushable.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/base/AbstractUnaryOutputSourceOperatorNodePushable.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/base/AbstractUnaryOutputSourceOperatorNodePushable.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/base/IOpenableDataWriterOperator.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/base/IOpenableDataWriterOperator.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/base/IOpenableDataWriterOperator.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/base/IOpenableDataWriterOperator.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/collectors/AbstractPartitionCollector.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/collectors/AbstractPartitionCollector.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/collectors/AbstractPartitionCollector.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/collectors/AbstractPartitionCollector.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/collectors/IPartitionAcceptor.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/collectors/IPartitionAcceptor.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/collectors/IPartitionAcceptor.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/collectors/IPartitionAcceptor.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/collectors/IPartitionBatchManager.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/collectors/IPartitionBatchManager.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/collectors/IPartitionBatchManager.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/collectors/IPartitionBatchManager.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/collectors/InputChannelFrameReader.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/collectors/InputChannelFrameReader.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/collectors/InputChannelFrameReader.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/collectors/InputChannelFrameReader.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/collectors/NonDeterministicChannelReader.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/collectors/NonDeterministicChannelReader.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/collectors/NonDeterministicChannelReader.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/collectors/NonDeterministicChannelReader.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/collectors/NonDeterministicFrameReader.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/collectors/NonDeterministicFrameReader.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/collectors/NonDeterministicFrameReader.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/collectors/NonDeterministicFrameReader.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/collectors/NonDeterministicPartitionBatchManager.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/collectors/NonDeterministicPartitionBatchManager.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/collectors/NonDeterministicPartitionBatchManager.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/collectors/NonDeterministicPartitionBatchManager.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/collectors/PartitionCollector.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/collectors/PartitionCollector.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/collectors/PartitionCollector.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/collectors/PartitionCollector.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/collectors/SortMergeFrameReader.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/collectors/SortMergeFrameReader.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/collectors/SortMergeFrameReader.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/collectors/SortMergeFrameReader.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/connectors/GlobalHashingLocalityMap.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/connectors/GlobalHashingLocalityMap.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/connectors/GlobalHashingLocalityMap.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/connectors/GlobalHashingLocalityMap.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/connectors/HashtableLocalityMap.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/connectors/HashtableLocalityMap.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/connectors/HashtableLocalityMap.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/connectors/HashtableLocalityMap.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/connectors/ILocalityMap.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/connectors/ILocalityMap.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/connectors/ILocalityMap.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/connectors/ILocalityMap.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/connectors/LocalityAwareMToNPartitioningConnectorDescriptor.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/connectors/LocalityAwareMToNPartitioningConnectorDescriptor.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/connectors/LocalityAwareMToNPartitioningConnectorDescriptor.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/connectors/LocalityAwareMToNPartitioningConnectorDescriptor.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/connectors/LocalityAwarePartitionDataWriter.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/connectors/LocalityAwarePartitionDataWriter.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/connectors/LocalityAwarePartitionDataWriter.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/connectors/LocalityAwarePartitionDataWriter.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/connectors/MToNPartitioningConnectorDescriptor.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/connectors/MToNPartitioningConnectorDescriptor.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/connectors/MToNPartitioningConnectorDescriptor.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/connectors/MToNPartitioningConnectorDescriptor.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/connectors/MToNPartitioningMergingConnectorDescriptor.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/connectors/MToNPartitioningMergingConnectorDescriptor.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/connectors/MToNPartitioningMergingConnectorDescriptor.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/connectors/MToNPartitioningMergingConnectorDescriptor.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/connectors/MToNReplicatingConnectorDescriptor.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/connectors/MToNReplicatingConnectorDescriptor.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/connectors/MToNReplicatingConnectorDescriptor.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/connectors/MToNReplicatingConnectorDescriptor.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/connectors/OneToOneConnectorDescriptor.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/connectors/OneToOneConnectorDescriptor.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/connectors/OneToOneConnectorDescriptor.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/connectors/OneToOneConnectorDescriptor.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/connectors/PartitionDataWriter.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/connectors/PartitionDataWriter.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/connectors/PartitionDataWriter.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/connectors/PartitionDataWriter.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/file/AbstractDeserializedFileScanOperatorDescriptor.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/file/AbstractDeserializedFileScanOperatorDescriptor.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/file/AbstractDeserializedFileScanOperatorDescriptor.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/file/AbstractDeserializedFileScanOperatorDescriptor.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/file/AbstractFileWriteOperatorDescriptor.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/file/AbstractFileWriteOperatorDescriptor.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/file/AbstractFileWriteOperatorDescriptor.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/file/AbstractFileWriteOperatorDescriptor.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/file/ConstantFileSplitProvider.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/file/ConstantFileSplitProvider.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/file/ConstantFileSplitProvider.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/file/ConstantFileSplitProvider.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/file/DelimitedDataTupleParserFactory.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/file/DelimitedDataTupleParserFactory.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/file/DelimitedDataTupleParserFactory.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/file/DelimitedDataTupleParserFactory.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/file/FileScanOperatorDescriptor.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/file/FileScanOperatorDescriptor.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/file/FileScanOperatorDescriptor.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/file/FileScanOperatorDescriptor.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/file/FileSplit.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/file/FileSplit.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/file/FileSplit.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/file/FileSplit.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/file/FrameFileWriterOperatorDescriptor.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/file/FrameFileWriterOperatorDescriptor.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/file/FrameFileWriterOperatorDescriptor.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/file/FrameFileWriterOperatorDescriptor.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/file/IFileSplitProvider.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/file/IFileSplitProvider.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/file/IFileSplitProvider.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/file/IFileSplitProvider.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/file/IRecordReader.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/file/IRecordReader.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/file/IRecordReader.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/file/IRecordReader.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/file/IRecordWriter.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/file/IRecordWriter.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/file/IRecordWriter.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/file/IRecordWriter.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/file/ITupleParser.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/file/ITupleParser.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/file/ITupleParser.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/file/ITupleParser.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/file/ITupleParserFactory.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/file/ITupleParserFactory.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/file/ITupleParserFactory.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/file/ITupleParserFactory.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/file/LineFileWriteOperatorDescriptor.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/file/LineFileWriteOperatorDescriptor.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/file/LineFileWriteOperatorDescriptor.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/file/LineFileWriteOperatorDescriptor.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/file/PlainFileWriterOperatorDescriptor.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/file/PlainFileWriterOperatorDescriptor.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/file/PlainFileWriterOperatorDescriptor.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/file/PlainFileWriterOperatorDescriptor.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/file/RecordFileScanOperatorDescriptor.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/file/RecordFileScanOperatorDescriptor.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/file/RecordFileScanOperatorDescriptor.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/file/RecordFileScanOperatorDescriptor.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/file/RecordWriter.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/file/RecordWriter.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/file/RecordWriter.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/file/RecordWriter.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/AggregateState.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/AggregateState.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/AggregateState.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/AggregateState.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/DeserializedPreclusteredGroupOperator.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/DeserializedPreclusteredGroupOperator.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/DeserializedPreclusteredGroupOperator.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/DeserializedPreclusteredGroupOperator.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/FrameToolsForGroupers.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/FrameToolsForGroupers.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/FrameToolsForGroupers.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/FrameToolsForGroupers.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/HashSpillableTableFactory.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/HashSpillableTableFactory.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/HashSpillableTableFactory.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/HashSpillableTableFactory.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/IAggregatorDescriptor.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/IAggregatorDescriptor.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/IAggregatorDescriptor.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/IAggregatorDescriptor.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/IAggregatorDescriptorFactory.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/IAggregatorDescriptorFactory.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/IAggregatorDescriptorFactory.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/IAggregatorDescriptorFactory.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/IFieldAggregateDescriptor.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/IFieldAggregateDescriptor.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/IFieldAggregateDescriptor.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/IFieldAggregateDescriptor.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/IFieldAggregateDescriptorFactory.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/IFieldAggregateDescriptorFactory.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/IFieldAggregateDescriptorFactory.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/IFieldAggregateDescriptorFactory.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/IGroupAggregator.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/IGroupAggregator.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/IGroupAggregator.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/IGroupAggregator.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/ISpillableTable.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/ISpillableTable.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/ISpillableTable.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/ISpillableTable.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/ISpillableTableFactory.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/ISpillableTableFactory.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/ISpillableTableFactory.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/ISpillableTableFactory.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/aggregators/AvgFieldGroupAggregatorFactory.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/aggregators/AvgFieldGroupAggregatorFactory.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/aggregators/AvgFieldGroupAggregatorFactory.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/aggregators/AvgFieldGroupAggregatorFactory.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/aggregators/AvgFieldMergeAggregatorFactory.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/aggregators/AvgFieldMergeAggregatorFactory.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/aggregators/AvgFieldMergeAggregatorFactory.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/aggregators/AvgFieldMergeAggregatorFactory.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/aggregators/CountFieldAggregatorFactory.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/aggregators/CountFieldAggregatorFactory.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/aggregators/CountFieldAggregatorFactory.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/aggregators/CountFieldAggregatorFactory.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/aggregators/FloatSumFieldAggregatorFactory.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/aggregators/FloatSumFieldAggregatorFactory.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/aggregators/FloatSumFieldAggregatorFactory.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/aggregators/FloatSumFieldAggregatorFactory.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/aggregators/IntSumFieldAggregatorFactory.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/aggregators/IntSumFieldAggregatorFactory.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/aggregators/IntSumFieldAggregatorFactory.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/aggregators/IntSumFieldAggregatorFactory.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/aggregators/MinMaxStringFieldAggregatorFactory.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/aggregators/MinMaxStringFieldAggregatorFactory.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/aggregators/MinMaxStringFieldAggregatorFactory.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/aggregators/MinMaxStringFieldAggregatorFactory.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/aggregators/MultiFieldsAggregatorFactory.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/aggregators/MultiFieldsAggregatorFactory.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/aggregators/MultiFieldsAggregatorFactory.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/aggregators/MultiFieldsAggregatorFactory.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/external/ExternalGroupBuildOperatorNodePushable.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/external/ExternalGroupBuildOperatorNodePushable.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/external/ExternalGroupBuildOperatorNodePushable.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/external/ExternalGroupBuildOperatorNodePushable.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/external/ExternalGroupMergeOperatorNodePushable.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/external/ExternalGroupMergeOperatorNodePushable.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/external/ExternalGroupMergeOperatorNodePushable.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/external/ExternalGroupMergeOperatorNodePushable.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/external/ExternalGroupOperatorDescriptor.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/external/ExternalGroupOperatorDescriptor.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/external/ExternalGroupOperatorDescriptor.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/external/ExternalGroupOperatorDescriptor.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/external/ExternalGroupState.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/external/ExternalGroupState.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/external/ExternalGroupState.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/external/ExternalGroupState.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/hash/GroupingHashTable.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/hash/GroupingHashTable.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/hash/GroupingHashTable.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/hash/GroupingHashTable.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/hash/HashGroupBuildOperatorNodePushable.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/hash/HashGroupBuildOperatorNodePushable.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/hash/HashGroupBuildOperatorNodePushable.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/hash/HashGroupBuildOperatorNodePushable.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/hash/HashGroupOperatorDescriptor.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/hash/HashGroupOperatorDescriptor.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/hash/HashGroupOperatorDescriptor.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/hash/HashGroupOperatorDescriptor.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/hash/HashGroupOutputOperatorNodePushable.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/hash/HashGroupOutputOperatorNodePushable.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/hash/HashGroupOutputOperatorNodePushable.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/hash/HashGroupOutputOperatorNodePushable.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/hash/HashGroupState.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/hash/HashGroupState.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/hash/HashGroupState.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/hash/HashGroupState.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/preclustered/PreclusteredGroupOperatorDescriptor.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/preclustered/PreclusteredGroupOperatorDescriptor.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/preclustered/PreclusteredGroupOperatorDescriptor.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/preclustered/PreclusteredGroupOperatorDescriptor.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/preclustered/PreclusteredGroupOperatorNodePushable.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/preclustered/PreclusteredGroupOperatorNodePushable.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/preclustered/PreclusteredGroupOperatorNodePushable.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/preclustered/PreclusteredGroupOperatorNodePushable.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/preclustered/PreclusteredGroupWriter.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/preclustered/PreclusteredGroupWriter.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/preclustered/PreclusteredGroupWriter.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/preclustered/PreclusteredGroupWriter.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/GraceHashJoinOperatorDescriptor.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/GraceHashJoinOperatorDescriptor.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/GraceHashJoinOperatorDescriptor.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/GraceHashJoinOperatorDescriptor.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/GraceHashJoinOperatorNodePushable.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/GraceHashJoinOperatorNodePushable.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/GraceHashJoinOperatorNodePushable.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/GraceHashJoinOperatorNodePushable.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/GraceHashJoinPartitionBuildOperatorNodePushable.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/GraceHashJoinPartitionBuildOperatorNodePushable.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/GraceHashJoinPartitionBuildOperatorNodePushable.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/GraceHashJoinPartitionBuildOperatorNodePushable.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/GraceHashJoinPartitionState.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/GraceHashJoinPartitionState.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/GraceHashJoinPartitionState.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/GraceHashJoinPartitionState.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/HybridHashJoinOperatorDescriptor.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/HybridHashJoinOperatorDescriptor.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/HybridHashJoinOperatorDescriptor.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/HybridHashJoinOperatorDescriptor.java
diff --git a/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/InMemoryHashJoin.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/InMemoryHashJoin.java
new file mode 100644
index 0000000..d86f1d5
--- /dev/null
+++ b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/InMemoryHashJoin.java
@@ -0,0 +1,172 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.dataflow.std.join;
+
+import java.io.DataOutput;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.List;
+
+import edu.uci.ics.hyracks.api.comm.IFrameWriter;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.value.INullWriter;
+import edu.uci.ics.hyracks.api.dataflow.value.ITuplePartitionComputer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTuplePairComparator;
+import edu.uci.ics.hyracks.dataflow.std.structures.ISerializableTable;
+import edu.uci.ics.hyracks.dataflow.std.structures.TuplePointer;
+
+public class InMemoryHashJoin {
+	
+    private final List<ByteBuffer> buffers;
+    private final FrameTupleAccessor accessorBuild;
+    private final ITuplePartitionComputer tpcBuild;
+    private final FrameTupleAccessor accessorProbe;
+    private final ITuplePartitionComputer tpcProbe;
+    private final FrameTupleAppender appender;
+    private final FrameTuplePairComparator tpComparator;
+    private final ByteBuffer outBuffer;
+    private final boolean isLeftOuter;
+    private final ArrayTupleBuilder nullTupleBuild;
+    private final ISerializableTable table;
+	private final int tableSize;
+    private final TuplePointer storedTuplePointer;
+    private final boolean reverseOutputOrder;	//Should we reverse the order of tuples, we are writing in output
+    
+    public InMemoryHashJoin(IHyracksTaskContext ctx, int tableSize, FrameTupleAccessor accessor0,
+            ITuplePartitionComputer tpc0, FrameTupleAccessor accessor1, ITuplePartitionComputer tpc1,
+            FrameTuplePairComparator comparator, boolean isLeftOuter, INullWriter[] nullWriters1, ISerializableTable table)
+            throws HyracksDataException {
+    	this(ctx, tableSize, accessor0, tpc0, accessor1, tpc1, comparator, isLeftOuter, nullWriters1, table, false);
+    }
+    
+    public InMemoryHashJoin(IHyracksTaskContext ctx, int tableSize, FrameTupleAccessor accessor0,
+            ITuplePartitionComputer tpc0, FrameTupleAccessor accessor1, ITuplePartitionComputer tpc1,
+            FrameTuplePairComparator comparator, boolean isLeftOuter, INullWriter[] nullWriters1, ISerializableTable table, boolean reverse) throws HyracksDataException {
+    	this.tableSize = tableSize;
+       	this.table = table;
+       	storedTuplePointer = new TuplePointer();
+       	buffers = new ArrayList<ByteBuffer>();
+        this.accessorBuild = accessor1;
+        this.tpcBuild = tpc1;
+        this.accessorProbe = accessor0;
+        this.tpcProbe = tpc0;
+        appender = new FrameTupleAppender(ctx.getFrameSize());
+        tpComparator = comparator;
+        outBuffer = ctx.allocateFrame();
+        appender.reset(outBuffer, true);
+        this.isLeftOuter = isLeftOuter;
+        if (isLeftOuter) {
+            int fieldCountOuter = accessor1.getFieldCount();
+            nullTupleBuild = new ArrayTupleBuilder(fieldCountOuter);
+            DataOutput out = nullTupleBuild.getDataOutput();
+            for (int i = 0; i < fieldCountOuter; i++) {
+                nullWriters1[i].writeNull(out);
+                nullTupleBuild.addFieldEndOffset();
+            }
+        } else {
+            nullTupleBuild = null;
+        }
+    	reverseOutputOrder = reverse;
+    }
+
+    public void build(ByteBuffer buffer) throws HyracksDataException {
+        buffers.add(buffer);
+        int bIndex = buffers.size() - 1;
+        accessorBuild.reset(buffer);
+        int tCount = accessorBuild.getTupleCount();
+        for (int i = 0; i < tCount; ++i) {
+            int entry = tpcBuild.partition(accessorBuild, i, tableSize);
+            storedTuplePointer.frameIndex = bIndex;
+            storedTuplePointer.tupleIndex = i;
+            table.insert(entry, storedTuplePointer);
+        }
+    }
+
+    public void join(ByteBuffer buffer, IFrameWriter writer) throws HyracksDataException {
+        accessorProbe.reset(buffer);
+        int tupleCount0 = accessorProbe.getTupleCount();
+        for (int i = 0; i < tupleCount0; ++i) {
+            int entry = tpcProbe.partition(accessorProbe, i, tableSize);
+            boolean matchFound = false;
+            int offset = 0;
+            do {
+                table.getTuplePointer(entry, offset++, storedTuplePointer);
+                if (storedTuplePointer.frameIndex < 0)
+                    break;
+                int bIndex = storedTuplePointer.frameIndex;
+                int tIndex = storedTuplePointer.tupleIndex;
+                accessorBuild.reset(buffers.get(bIndex));
+                int c = tpComparator.compare(accessorProbe, i, accessorBuild, tIndex);
+                if (c == 0) {
+                    matchFound = true;
+                    appendToResult(i, tIndex, writer);
+                }
+            } while (true);
+
+            if (!matchFound && isLeftOuter) {
+                
+            	if (!appender.appendConcat(accessorProbe, i, nullTupleBuild.getFieldEndOffsets(),
+                        nullTupleBuild.getByteArray(), 0, nullTupleBuild.getSize())) {
+                    flushFrame(outBuffer, writer);
+                    appender.reset(outBuffer, true);
+                    if (!appender.appendConcat(accessorProbe, i, nullTupleBuild.getFieldEndOffsets(),
+                            nullTupleBuild.getByteArray(), 0, nullTupleBuild.getSize())) {
+                        throw new IllegalStateException();
+                    }
+                }
+                
+            }
+        }
+    }
+
+    public void closeJoin(IFrameWriter writer) throws HyracksDataException {
+        if (appender.getTupleCount() > 0) {
+            flushFrame(outBuffer, writer);
+        }
+    }
+
+    private void flushFrame(ByteBuffer buffer, IFrameWriter writer) throws HyracksDataException {
+        buffer.position(0);
+        buffer.limit(buffer.capacity());
+        writer.nextFrame(buffer);
+        buffer.position(0);
+        buffer.limit(buffer.capacity());
+    }
+    
+    private void appendToResult(int probeSidetIx, int buildSidetIx, IFrameWriter writer) throws HyracksDataException{
+    	if(!reverseOutputOrder){
+    		if (!appender.appendConcat(accessorProbe, probeSidetIx, accessorBuild, buildSidetIx)) {
+                flushFrame(outBuffer, writer);
+                appender.reset(outBuffer, true);
+                if (!appender.appendConcat(accessorProbe, probeSidetIx, accessorBuild, buildSidetIx)) {
+                    throw new IllegalStateException();
+                }
+            }
+    	}
+    	else{
+    		if (!appender.appendConcat(accessorBuild, buildSidetIx, accessorProbe, probeSidetIx)) {
+                flushFrame(outBuffer, writer);
+                appender.reset(outBuffer, true);
+                if (!appender.appendConcat(accessorBuild, buildSidetIx, accessorProbe, probeSidetIx)) {
+                    throw new IllegalStateException();
+                }
+            }
+    	}
+    }
+}
\ No newline at end of file
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/InMemoryHashJoinOperatorDescriptor.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/InMemoryHashJoinOperatorDescriptor.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/InMemoryHashJoinOperatorDescriptor.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/InMemoryHashJoinOperatorDescriptor.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/JoinComparator.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/JoinComparator.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/JoinComparator.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/JoinComparator.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/JoinComparatorFactory.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/JoinComparatorFactory.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/JoinComparatorFactory.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/JoinComparatorFactory.java
diff --git a/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/NestedLoopJoin.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/NestedLoopJoin.java
new file mode 100644
index 0000000..6870e71
--- /dev/null
+++ b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/NestedLoopJoin.java
@@ -0,0 +1,199 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.dataflow.std.join;
+
+import java.io.DataOutput;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.List;
+
+import edu.uci.ics.hyracks.api.comm.IFrameWriter;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.value.INullWriter;
+import edu.uci.ics.hyracks.api.dataflow.value.ITuplePairComparator;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.io.FileReference;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
+import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
+import edu.uci.ics.hyracks.dataflow.common.io.RunFileReader;
+import edu.uci.ics.hyracks.dataflow.common.io.RunFileWriter;
+
+public class NestedLoopJoin {
+    private final FrameTupleAccessor accessorInner;
+    private final FrameTupleAccessor accessorOuter;
+    private final FrameTupleAppender appender;
+    private final ITuplePairComparator tpComparator;
+    private final ByteBuffer outBuffer;
+    private final ByteBuffer innerBuffer;
+    private final List<ByteBuffer> outBuffers;
+    private final int memSize;
+    private final IHyracksTaskContext ctx;
+    private RunFileReader runFileReader;
+    private int currentMemSize = 0;
+    private final RunFileWriter runFileWriter;
+    private final boolean isLeftOuter;
+    private final ArrayTupleBuilder nullTupleBuilder;
+
+    public NestedLoopJoin(IHyracksTaskContext ctx, FrameTupleAccessor accessor0, FrameTupleAccessor accessor1,
+            ITuplePairComparator comparators, int memSize, boolean isLeftOuter, INullWriter[] nullWriters1)
+            throws HyracksDataException {
+        this.accessorInner = accessor1;
+        this.accessorOuter = accessor0;
+        this.appender = new FrameTupleAppender(ctx.getFrameSize());
+        this.tpComparator = comparators;
+        this.outBuffer = ctx.allocateFrame();
+        this.innerBuffer = ctx.allocateFrame();
+        this.appender.reset(outBuffer, true);
+        this.outBuffers = new ArrayList<ByteBuffer>();
+        this.memSize = memSize;
+        this.ctx = ctx;
+
+        this.isLeftOuter = isLeftOuter;
+        if (isLeftOuter) {
+            int innerFieldCount = accessorInner.getFieldCount();
+            nullTupleBuilder = new ArrayTupleBuilder(innerFieldCount);
+            DataOutput out = nullTupleBuilder.getDataOutput();
+            for (int i = 0; i < innerFieldCount; i++) {
+                nullWriters1[i].writeNull(out);
+                nullTupleBuilder.addFieldEndOffset();
+            }
+        } else {
+            nullTupleBuilder = null;
+        }
+
+        FileReference file = ctx.getJobletContext().createManagedWorkspaceFile(
+                this.getClass().getSimpleName() + this.toString());
+        runFileWriter = new RunFileWriter(file, ctx.getIOManager());
+        runFileWriter.open();
+    }
+
+    public void cache(ByteBuffer buffer) throws HyracksDataException {
+        runFileWriter.nextFrame(buffer);
+    }
+
+    public void join(ByteBuffer outerBuffer, IFrameWriter writer) throws HyracksDataException {
+        if (outBuffers.size() < memSize - 3) {
+            createAndCopyFrame(outerBuffer);
+            return;
+        }
+        if (currentMemSize < memSize - 3) {
+            reloadFrame(outerBuffer);
+            return;
+        }
+        for (ByteBuffer outBuffer : outBuffers) {
+            runFileReader = runFileWriter.createReader();
+            runFileReader.open();
+            while (runFileReader.nextFrame(innerBuffer)) {
+                blockJoin(outBuffer, innerBuffer, writer);
+            }
+            runFileReader.close();
+        }
+        currentMemSize = 0;
+        reloadFrame(outerBuffer);
+    }
+
+    private void createAndCopyFrame(ByteBuffer outerBuffer) {
+        ByteBuffer outerBufferCopy = ctx.allocateFrame();
+        FrameUtils.copy(outerBuffer, outerBufferCopy);
+        outBuffers.add(outerBufferCopy);
+        currentMemSize++;
+    }
+
+    private void reloadFrame(ByteBuffer outerBuffer) {
+        outBuffers.get(currentMemSize).clear();
+        FrameUtils.copy(outerBuffer, outBuffers.get(currentMemSize));
+        currentMemSize++;
+    }
+
+    private void blockJoin(ByteBuffer outerBuffer, ByteBuffer innerBuffer, IFrameWriter writer)
+            throws HyracksDataException {
+        accessorOuter.reset(outerBuffer);
+        accessorInner.reset(innerBuffer);
+        int tupleCount0 = accessorOuter.getTupleCount();
+        int tupleCount1 = accessorInner.getTupleCount();
+
+        for (int i = 0; i < tupleCount0; ++i) {
+            boolean matchFound = false;
+            for (int j = 0; j < tupleCount1; ++j) {
+                int c = compare(accessorOuter, i, accessorInner, j);
+                if (c == 0) {
+                    matchFound = true;
+                    if (!appender.appendConcat(accessorOuter, i, accessorInner, j)) {
+                        flushFrame(outBuffer, writer);
+                        appender.reset(outBuffer, true);
+                        if (!appender.appendConcat(accessorOuter, i, accessorInner, j)) {
+                            throw new IllegalStateException();
+                        }
+                    }
+                }
+            }
+
+            if (!matchFound && isLeftOuter) {
+                if (!appender.appendConcat(accessorOuter, i, nullTupleBuilder.getFieldEndOffsets(),
+                        nullTupleBuilder.getByteArray(), 0, nullTupleBuilder.getSize())) {
+                    flushFrame(outBuffer, writer);
+                    appender.reset(outBuffer, true);
+                    if (!appender.appendConcat(accessorOuter, i, nullTupleBuilder.getFieldEndOffsets(),
+                            nullTupleBuilder.getByteArray(), 0, nullTupleBuilder.getSize())) {
+                        throw new IllegalStateException();
+                    }
+                }
+            }
+        }
+    }
+
+    public void closeCache() throws HyracksDataException {
+        if (runFileWriter != null) {
+            runFileWriter.close();
+        }
+    }
+
+    public void closeJoin(IFrameWriter writer) throws HyracksDataException {
+        for (int i = 0; i < currentMemSize; i++) {
+            ByteBuffer outBuffer = outBuffers.get(i);
+            runFileReader = runFileWriter.createReader();
+            runFileReader.open();
+            while (runFileReader.nextFrame(innerBuffer)) {
+                blockJoin(outBuffer, innerBuffer, writer);
+            }
+            runFileReader.close();
+        }
+        outBuffers.clear();
+        currentMemSize = 0;
+
+        if (appender.getTupleCount() > 0) {
+            flushFrame(outBuffer, writer);
+        }
+    }
+
+    private void flushFrame(ByteBuffer buffer, IFrameWriter writer) throws HyracksDataException {
+        buffer.position(0);
+        buffer.limit(buffer.capacity());
+        writer.nextFrame(buffer);
+        buffer.position(0);
+        buffer.limit(buffer.capacity());
+    }
+
+    private int compare(FrameTupleAccessor accessor0, int tIndex0, FrameTupleAccessor accessor1, int tIndex1)
+            throws HyracksDataException {
+        int c = tpComparator.compare(accessor0, tIndex0, accessor1, tIndex1);
+        if (c != 0) {
+            return c;
+        }
+        return 0;
+    }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/NestedLoopJoinOperatorDescriptor.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/NestedLoopJoinOperatorDescriptor.java
new file mode 100644
index 0000000..0be01c1
--- /dev/null
+++ b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/NestedLoopJoinOperatorDescriptor.java
@@ -0,0 +1,204 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.dataflow.std.join;
+
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.ActivityId;
+import edu.uci.ics.hyracks.api.dataflow.IActivityGraphBuilder;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
+import edu.uci.ics.hyracks.api.dataflow.TaskId;
+import edu.uci.ics.hyracks.api.dataflow.value.INullWriter;
+import edu.uci.ics.hyracks.api.dataflow.value.INullWriterFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import edu.uci.ics.hyracks.api.dataflow.value.ITuplePairComparator;
+import edu.uci.ics.hyracks.api.dataflow.value.ITuplePairComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.job.IOperatorDescriptorRegistry;
+import edu.uci.ics.hyracks.api.job.JobId;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractActivityNode;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractStateObject;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryInputSinkOperatorNodePushable;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryInputUnaryOutputOperatorNodePushable;
+
+public class NestedLoopJoinOperatorDescriptor extends AbstractOperatorDescriptor {
+    private static final int JOIN_CACHE_ACTIVITY_ID = 0;
+    private static final int NL_JOIN_ACTIVITY_ID = 1;
+
+    private static final long serialVersionUID = 1L;
+    private final ITuplePairComparatorFactory comparatorFactory;
+    private final int memSize;
+    private final boolean isLeftOuter;
+    private final INullWriterFactory[] nullWriterFactories1;
+
+    public NestedLoopJoinOperatorDescriptor(IOperatorDescriptorRegistry spec,
+            ITuplePairComparatorFactory comparatorFactory, RecordDescriptor recordDescriptor, int memSize,
+            boolean isLeftOuter, INullWriterFactory[] nullWriterFactories1) {
+        super(spec, 2, 1);
+        this.comparatorFactory = comparatorFactory;
+        this.recordDescriptors[0] = recordDescriptor;
+        this.memSize = memSize;
+        this.isLeftOuter = isLeftOuter;
+        this.nullWriterFactories1 = nullWriterFactories1;
+    }
+
+    @Override
+    public void contributeActivities(IActivityGraphBuilder builder) {
+        ActivityId jcaId = new ActivityId(getOperatorId(), JOIN_CACHE_ACTIVITY_ID);
+        ActivityId nljAid = new ActivityId(getOperatorId(), NL_JOIN_ACTIVITY_ID);
+        JoinCacheActivityNode jc = new JoinCacheActivityNode(jcaId, nljAid);
+        NestedLoopJoinActivityNode nlj = new NestedLoopJoinActivityNode(nljAid);
+
+        builder.addActivity(this, jc);
+        builder.addSourceEdge(1, jc, 0);
+
+        builder.addActivity(this, nlj);
+        builder.addSourceEdge(0, nlj, 0);
+
+        builder.addTargetEdge(0, nlj, 0);
+        builder.addBlockingEdge(jc, nlj);
+    }
+
+    public static class JoinCacheTaskState extends AbstractStateObject {
+        private NestedLoopJoin joiner;
+
+        public JoinCacheTaskState() {
+        }
+
+        private JoinCacheTaskState(JobId jobId, TaskId taskId) {
+            super(jobId, taskId);
+        }
+
+        @Override
+        public void toBytes(DataOutput out) throws IOException {
+
+        }
+
+        @Override
+        public void fromBytes(DataInput in) throws IOException {
+
+        }
+    }
+
+    private class JoinCacheActivityNode extends AbstractActivityNode {
+        private static final long serialVersionUID = 1L;
+
+        private final ActivityId nljAid;
+
+        public JoinCacheActivityNode(ActivityId id, ActivityId nljAid) {
+            super(id);
+            this.nljAid = nljAid;
+        }
+
+        @Override
+        public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx,
+                IRecordDescriptorProvider recordDescProvider, final int partition, int nPartitions) {
+            final RecordDescriptor rd0 = recordDescProvider.getInputRecordDescriptor(nljAid, 0);
+            final RecordDescriptor rd1 = recordDescProvider.getInputRecordDescriptor(getActivityId(), 0);
+            final ITuplePairComparator comparator = comparatorFactory.createTuplePairComparator(ctx);
+
+            final INullWriter[] nullWriters1 = isLeftOuter ? new INullWriter[nullWriterFactories1.length] : null;
+            if (isLeftOuter) {
+                for (int i = 0; i < nullWriterFactories1.length; i++) {
+                    nullWriters1[i] = nullWriterFactories1[i].createNullWriter();
+                }
+            }
+
+            IOperatorNodePushable op = new AbstractUnaryInputSinkOperatorNodePushable() {
+                private JoinCacheTaskState state;
+
+                @Override
+                public void open() throws HyracksDataException {
+                    state = new JoinCacheTaskState(ctx.getJobletContext().getJobId(), new TaskId(getActivityId(),
+                            partition));
+
+                    state.joiner = new NestedLoopJoin(ctx, new FrameTupleAccessor(ctx.getFrameSize(), rd0),
+                            new FrameTupleAccessor(ctx.getFrameSize(), rd1), comparator, memSize, isLeftOuter,
+                            nullWriters1);
+
+                }
+
+                @Override
+                public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
+                    ByteBuffer copyBuffer = ctx.allocateFrame();
+                    FrameUtils.copy(buffer, copyBuffer);
+                    FrameUtils.makeReadable(copyBuffer);
+                    state.joiner.cache(copyBuffer);
+                }
+
+                @Override
+                public void close() throws HyracksDataException {
+                    state.joiner.closeCache();
+                    ctx.setStateObject(state);
+                }
+
+                @Override
+                public void fail() throws HyracksDataException {
+                }
+            };
+            return op;
+        }
+    }
+
+    private class NestedLoopJoinActivityNode extends AbstractActivityNode {
+        private static final long serialVersionUID = 1L;
+
+        public NestedLoopJoinActivityNode(ActivityId id) {
+            super(id);
+        }
+
+        @Override
+        public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx,
+                IRecordDescriptorProvider recordDescProvider, final int partition, int nPartitions) {
+
+            IOperatorNodePushable op = new AbstractUnaryInputUnaryOutputOperatorNodePushable() {
+                private JoinCacheTaskState state;
+
+                @Override
+                public void open() throws HyracksDataException {
+                    state = (JoinCacheTaskState) ctx.getStateObject(new TaskId(new ActivityId(getOperatorId(),
+                            JOIN_CACHE_ACTIVITY_ID), partition));
+                    writer.open();
+                }
+
+                @Override
+                public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
+                    state.joiner.join(buffer, writer);
+                }
+
+                @Override
+                public void close() throws HyracksDataException {
+                    state.joiner.closeJoin(writer);
+                    writer.close();
+                }
+
+                @Override
+                public void fail() throws HyracksDataException {
+                    writer.fail();
+                }
+            };
+            return op;
+        }
+    }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/OptimizedHybridHashJoin.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/OptimizedHybridHashJoin.java
new file mode 100644
index 0000000..6e2b16a
--- /dev/null
+++ b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/OptimizedHybridHashJoin.java
@@ -0,0 +1,624 @@
+package edu.uci.ics.hyracks.dataflow.std.join;
+
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.BitSet;
+
+import edu.uci.ics.hyracks.api.comm.IFrameWriter;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
+import edu.uci.ics.hyracks.api.dataflow.value.INullWriter;
+import edu.uci.ics.hyracks.api.dataflow.value.INullWriterFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ITuplePartitionComputer;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.io.FileReference;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTuplePairComparator;
+import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
+import edu.uci.ics.hyracks.dataflow.common.io.RunFileReader;
+import edu.uci.ics.hyracks.dataflow.common.io.RunFileWriter;
+import edu.uci.ics.hyracks.dataflow.std.structures.ISerializableTable;
+import edu.uci.ics.hyracks.dataflow.std.structures.SerializableHashTable;
+
+/**
+ * @author pouria
+       This class mainly applies one level of HHJ on a pair of
+       relations. It is always called by the descriptor.
+ */
+public class OptimizedHybridHashJoin {
+
+    private final int NO_MORE_FREE_BUFFER = -1;
+    private final int END_OF_PARTITION = -1;
+    private final int INVALID_BUFFER = -2;
+    private final int UNALLOCATED_FRAME = -3;
+    private final int BUFFER_FOR_RESIDENT_PARTS = -1;
+    
+    private IHyracksTaskContext ctx;
+
+    private final String rel0Name;
+    private final String rel1Name;
+
+    private final int[] buildKeys;
+    private final int[] probeKeys;
+
+    private final IBinaryComparator[] comparators;
+
+    private ITuplePartitionComputer buildHpc;
+    private ITuplePartitionComputer probeHpc;
+
+    private final RecordDescriptor buildRd;
+    private final RecordDescriptor probeRd;
+
+    private RunFileWriter[] buildRFWriters; //writing spilled build partitions
+    private RunFileWriter[] probeRFWriters; //writing spilled probe partitions
+
+    private final boolean isLeftOuter;
+    private final INullWriter[] nullWriters1;
+
+    private ByteBuffer[] memBuffs; //Memory buffers for build
+    private int[] curPBuff; //Current (last) Buffer for each partition
+    private int[] nextBuff; //Next buffer in the partition's buffer chain
+    private int[] buildPSizeInTups; //Size of build partitions (in tuples)
+    private int[] probePSizeInTups; //Size of probe partitions (in tuples)
+    private int nextFreeBuffIx; //Index of next available free buffer to allocate/use
+    private BitSet pStatus; //0=resident, 1=spilled
+    private int numOfPartitions;
+    private int memForJoin;
+    private InMemoryHashJoin inMemJoiner; //Used for joining resident partitions
+
+    private final FrameTupleAccessor accessorBuild;
+    private final FrameTupleAccessor accessorProbe;
+    private FrameTupleAppender buildTupAppender;
+    private FrameTupleAppender probeTupAppenderToResident;
+    private FrameTupleAppender probeTupAppenderToSpilled;
+
+    private int numOfSpilledParts;
+    private ByteBuffer[] sPartBuffs;    //Buffers for probe spilled partitions (one buffer per spilled partition)
+    private ByteBuffer probeResBuff;    //Buffer for probe resident partition tuples
+    private ByteBuffer reloadBuffer;    //Buffer for reloading spilled partitions during partition tuning 
+
+    private int[] buildPSizeInFrames; //Used for partition tuning
+    private int freeFramesCounter; //Used for partition tuning
+    
+    private boolean isTableEmpty;	//Added for handling the case, where build side is empty (tableSize is 0)
+
+    public OptimizedHybridHashJoin(IHyracksTaskContext ctx, int memForJoin, int numOfPartitions, String rel0Name,
+            String rel1Name, int[] keys0, int[] keys1, IBinaryComparator[] comparators, RecordDescriptor buildRd,
+            RecordDescriptor probeRd, ITuplePartitionComputer probeHpc, ITuplePartitionComputer buildHpc) {
+        this.ctx = ctx;
+        this.memForJoin = memForJoin;
+        this.buildRd = buildRd;
+        this.probeRd = probeRd;
+        this.buildHpc = buildHpc; 	
+        this.probeHpc = probeHpc; 	
+        this.buildKeys = keys1; 	
+        this.probeKeys = keys0;		
+        this.comparators = comparators;
+        this.rel0Name = rel0Name;
+        this.rel1Name = rel1Name;
+
+        this.numOfPartitions = numOfPartitions;
+        this.buildRFWriters = new RunFileWriter[numOfPartitions];
+        this.probeRFWriters = new RunFileWriter[numOfPartitions];
+
+        this.accessorBuild = new FrameTupleAccessor(ctx.getFrameSize(), buildRd);
+        this.accessorProbe = new FrameTupleAccessor(ctx.getFrameSize(), probeRd);
+
+        this.isLeftOuter = false;
+        this.nullWriters1 = null;
+
+    }
+
+    public OptimizedHybridHashJoin(IHyracksTaskContext ctx, int memForJoin, int numOfPartitions, String rel0Name,
+            String rel1Name, int[] keys0, int[] keys1, IBinaryComparator[] comparators, RecordDescriptor buildRd,
+            RecordDescriptor probeRd, ITuplePartitionComputer probeHpc, ITuplePartitionComputer buildHpc,
+            boolean isLeftOuter, INullWriterFactory[] nullWriterFactories1) {
+        this.ctx = ctx;
+        this.memForJoin = memForJoin;
+        this.buildRd = buildRd;
+        this.probeRd = probeRd;
+        this.buildHpc = buildHpc; 	
+        this.probeHpc = probeHpc; 	
+        this.buildKeys = keys1; 	
+        this.probeKeys = keys0;		
+        this.comparators = comparators;
+        this.rel0Name = rel0Name;
+        this.rel1Name = rel1Name;
+
+        this.numOfPartitions = numOfPartitions;
+        this.buildRFWriters = new RunFileWriter[numOfPartitions];
+        this.probeRFWriters = new RunFileWriter[numOfPartitions];
+
+        this.accessorBuild = new FrameTupleAccessor(ctx.getFrameSize(), buildRd);
+        this.accessorProbe = new FrameTupleAccessor(ctx.getFrameSize(), probeRd);
+
+        this.isLeftOuter = isLeftOuter;
+
+        this.nullWriters1 = isLeftOuter ? new INullWriter[nullWriterFactories1.length] : null;
+        if (isLeftOuter) {
+            for (int i = 0; i < nullWriterFactories1.length; i++) {
+                nullWriters1[i] = nullWriterFactories1[i].createNullWriter();
+            }
+        }
+    }
+
+    public void initBuild() {
+        memBuffs = new ByteBuffer[memForJoin];
+        curPBuff = new int[numOfPartitions];
+        nextBuff = new int[memForJoin];
+        pStatus = new BitSet(numOfPartitions);
+        buildPSizeInTups = new int[numOfPartitions];
+
+        buildPSizeInFrames = new int[numOfPartitions];
+        freeFramesCounter = memForJoin - numOfPartitions;
+
+        for (int i = 0; i < numOfPartitions; i++) { //Allocating one buffer per partition and setting as the head of the chain of buffers for that partition
+            memBuffs[i] = ctx.allocateFrame();
+            curPBuff[i] = i;
+            nextBuff[i] = -1;
+            buildPSizeInFrames[i] = 1; //The dedicated initial buffer
+        }
+
+        nextFreeBuffIx = ((numOfPartitions < memForJoin) ? numOfPartitions : NO_MORE_FREE_BUFFER); //Setting the chain of unallocated frames
+        for (int i = numOfPartitions; i < memBuffs.length; i++) {
+            nextBuff[i] = UNALLOCATED_FRAME;
+        }
+
+        buildTupAppender = new FrameTupleAppender(ctx.getFrameSize());
+
+    }
+
+    public void build(ByteBuffer buffer) throws HyracksDataException {
+        accessorBuild.reset(buffer);
+        int tupleCount = accessorBuild.getTupleCount();
+   
+        boolean print = false;
+    	if(print){
+    		accessorBuild.prettyPrint();
+    	}
+        
+        for (int i = 0; i < tupleCount; ++i) {
+            int pid = buildHpc.partition(accessorBuild, i, numOfPartitions);
+            processTuple(i, pid);
+            buildPSizeInTups[pid]++;
+        }
+
+    }
+
+    private void processTuple(int tid, int pid) throws HyracksDataException {
+        ByteBuffer partition = memBuffs[curPBuff[pid]]; //Getting current buffer for the target partition
+
+        if (!pStatus.get(pid)) { //resident partition
+            buildTupAppender.reset(partition, false);
+            while (true) {
+                if (buildTupAppender.append(accessorBuild, tid)) { //Tuple added to resident partition successfully
+                    break;
+                }
+                //partition does not have enough room
+                int newBuffIx = allocateFreeBuffer(pid);
+                if (newBuffIx == NO_MORE_FREE_BUFFER) { //Spill one partition
+                    int pidToSpill = selectPartitionToSpill();
+                    if (pidToSpill == -1) { //No more partition to spill
+                        throw new HyracksDataException("not enough memory for Hash Join (Allocation exceeds the limit)");
+                    }
+                    spillPartition(pidToSpill);
+                    buildTupAppender.reset(memBuffs[pidToSpill], true);
+                    processTuple(tid, pid);
+                    break;
+                }  //New Buffer allocated successfully
+                partition = memBuffs[curPBuff[pid]]; //Current Buffer for the partition is now updated by allocateFreeBuffer() call above
+                buildTupAppender.reset(partition, true);
+                if (!buildTupAppender.append(accessorBuild, tid)) {
+                    throw new HyracksDataException("Invalid State (Can not append to newly allocated buffer)");
+                }
+                buildPSizeInFrames[pid]++;
+                break;
+            }
+        } else { //spilled partition
+            boolean needClear = false;
+            while (true) {
+                buildTupAppender.reset(partition, needClear);
+                if (buildTupAppender.append(accessorBuild, tid)) {
+                    break;
+                }
+                //Dedicated in-memory buffer for the partition is full, needed to be flushed first 
+                buildWrite(pid, partition);
+                partition.clear();
+                needClear = true;
+                buildPSizeInFrames[pid]++;
+            }
+        }
+    }
+
+    private int allocateFreeBuffer(int pid) {
+        if (nextFreeBuffIx != NO_MORE_FREE_BUFFER) {
+            if (memBuffs[nextFreeBuffIx] == null) {
+                memBuffs[nextFreeBuffIx] = ctx.allocateFrame();
+            }
+            int curPartBuffIx = curPBuff[pid];
+            curPBuff[pid] = nextFreeBuffIx;
+            int oldNext = nextBuff[nextFreeBuffIx];
+            nextBuff[nextFreeBuffIx] = curPartBuffIx;
+            if (oldNext == UNALLOCATED_FRAME) {
+                nextFreeBuffIx++;
+                if (nextFreeBuffIx == memForJoin) { //No more free buffer
+                    nextFreeBuffIx = NO_MORE_FREE_BUFFER;
+                }
+            } else {
+                nextFreeBuffIx = oldNext;
+            }
+            (memBuffs[curPBuff[pid]]).clear();
+
+            freeFramesCounter--;
+            return (curPBuff[pid]);
+        } else {
+            return NO_MORE_FREE_BUFFER; //A partitions needs to be spilled (if feasible)
+        }
+    }
+
+    private int selectPartitionToSpill() {
+        int maxSize = -1;
+        int partitionToSpill = -1;
+        for (int i = 0; i < buildPSizeInTups.length; i++) { //Find the largest partition, to spill
+            if (!pStatus.get(i) && (buildPSizeInTups[i] > maxSize)) {
+                maxSize = buildPSizeInTups[i];
+                partitionToSpill = i;
+            }
+        }
+        return partitionToSpill;
+    }
+
+    private void spillPartition(int pid) throws HyracksDataException {
+        int curBuffIx = curPBuff[pid];
+        ByteBuffer buff = null;
+        while (curBuffIx != END_OF_PARTITION) {
+            buff = memBuffs[curBuffIx];
+            buildWrite(pid, buff);
+            buff.clear();
+
+            int freedBuffIx = curBuffIx;
+            curBuffIx = nextBuff[curBuffIx];
+
+            if (freedBuffIx != pid) {
+                nextBuff[freedBuffIx] = nextFreeBuffIx;
+                nextFreeBuffIx = freedBuffIx;
+                freeFramesCounter++;
+            }
+        }
+        curPBuff[pid] = pid;
+        pStatus.set(pid);
+    }
+
+    private void buildWrite(int pid, ByteBuffer buff) throws HyracksDataException {
+        RunFileWriter writer = buildRFWriters[pid];
+        if (writer == null) {
+            FileReference file = ctx.getJobletContext().createManagedWorkspaceFile(rel0Name);
+            writer = new RunFileWriter(file, ctx.getIOManager());
+            writer.open();
+            buildRFWriters[pid] = writer;
+        }
+        writer.nextFrame(buff);
+    }
+
+    public void closeBuild() throws HyracksDataException {
+        for (int i = 0; i < numOfPartitions; i++) { //Remove Empty Partitions' allocated frame
+            if (buildPSizeInTups[i] == 0) {
+                buildPSizeInFrames[i]--;
+                nextBuff[curPBuff[i]] = nextFreeBuffIx;
+                nextFreeBuffIx = curPBuff[i];
+                curPBuff[i] = INVALID_BUFFER;
+                freeFramesCounter++;
+            }
+        }
+
+        ByteBuffer buff = null;
+        for (int i = pStatus.nextSetBit(0); i >= 0; i = pStatus.nextSetBit(i + 1)) { //flushing and DeAllocating the dedicated buffers for the spilled partitions
+            buff = memBuffs[i];
+            accessorBuild.reset(buff);
+            if (accessorBuild.getTupleCount() > 0) {
+                buildWrite(i, buff);
+                buildPSizeInFrames[i]++;
+            }
+            nextBuff[i] = nextFreeBuffIx;
+            nextFreeBuffIx = i;
+            freeFramesCounter++;
+            curPBuff[i] = INVALID_BUFFER;
+
+            if (buildRFWriters[i] != null) {
+                buildRFWriters[i].close();
+            }
+        }
+
+        partitionTune(); //Trying to bring back as many spilled partitions as possible, making them resident
+
+        int inMemTupCount = 0;
+        numOfSpilledParts = 0;
+
+        for (int i = 0; i < numOfPartitions; i++) {
+            if (!pStatus.get(i)) {
+                inMemTupCount += buildPSizeInTups[i];
+            } else {
+                numOfSpilledParts++;
+            }
+        }
+
+        createInMemoryJoiner(inMemTupCount);
+        cacheInMemJoin();
+        this.isTableEmpty = (inMemTupCount == 0);
+    }
+
+    private void partitionTune() throws HyracksDataException {
+        reloadBuffer = ctx.allocateFrame();
+        ArrayList<Integer> reloadSet = selectPartitionsToReload();
+        for (int i = 0; i < reloadSet.size(); i++) {
+            int pid = reloadSet.get(i);
+            int[] buffsToLoad = new int[buildPSizeInFrames[pid]];
+            for (int j = 0; j < buffsToLoad.length; j++) {
+                buffsToLoad[j] = nextFreeBuffIx;
+                int oldNext = nextBuff[nextFreeBuffIx];
+                if (oldNext == UNALLOCATED_FRAME) {
+                    nextFreeBuffIx++;
+                    if (nextFreeBuffIx == memForJoin) { //No more free buffer
+                        nextFreeBuffIx = NO_MORE_FREE_BUFFER;
+                    }
+                } else {
+                    nextFreeBuffIx = oldNext;
+                }
+
+            }
+            curPBuff[pid] = buffsToLoad[0];
+            for (int k = 1; k < buffsToLoad.length; k++) {
+                nextBuff[buffsToLoad[k - 1]] = buffsToLoad[k];
+            }
+            loadPartitionInMem(pid, buildRFWriters[pid], buffsToLoad);
+        }
+        reloadSet.clear();
+        reloadSet = null;
+    }
+
+    private void loadPartitionInMem(int pid, RunFileWriter wr, int[] buffs) throws HyracksDataException {
+        RunFileReader r = wr.createReader();
+        r.open();
+        int counter = 0;
+        ByteBuffer mBuff = null;
+        reloadBuffer.clear();
+        while (r.nextFrame(reloadBuffer)) {
+            mBuff = memBuffs[buffs[counter]];
+            if (mBuff == null) {
+                mBuff = ctx.allocateFrame();
+                memBuffs[buffs[counter]] = mBuff;
+            }
+            FrameUtils.copy(reloadBuffer, mBuff);
+            counter++;
+            reloadBuffer.clear();
+        }
+
+        int curNext = nextBuff[buffs[buffs.length - 1]];
+        nextBuff[buffs[buffs.length - 1]] = END_OF_PARTITION;
+        nextFreeBuffIx = curNext;
+
+        r.close();
+        pStatus.set(pid, false);
+        buildRFWriters[pid] = null;
+    }
+
+    private ArrayList<Integer> selectPartitionsToReload() {
+        ArrayList<Integer> p = new ArrayList<Integer>();
+        for (int i = pStatus.nextSetBit(0); i >= 0; i = pStatus.nextSetBit(i + 1)) {
+            if (buildPSizeInFrames[i]>0 && (freeFramesCounter - buildPSizeInFrames[i] >= 0) ) {
+                p.add(i);
+                freeFramesCounter -= buildPSizeInFrames[i];
+            }
+            if (freeFramesCounter < 1) { //No more free buffer available
+                return p;
+            }
+        }
+        return p;
+    }
+
+    private void createInMemoryJoiner(int inMemTupCount) throws HyracksDataException {
+        ISerializableTable table = new SerializableHashTable(inMemTupCount, ctx);
+        this.inMemJoiner = new InMemoryHashJoin(ctx, inMemTupCount,
+                new FrameTupleAccessor(ctx.getFrameSize(), probeRd), probeHpc, new FrameTupleAccessor(
+                        ctx.getFrameSize(), buildRd), buildHpc, new FrameTuplePairComparator(probeKeys, buildKeys,
+                        comparators), isLeftOuter, nullWriters1, table);
+    }
+
+    private void cacheInMemJoin() throws HyracksDataException {
+
+        for (int pid = 0; pid < numOfPartitions; pid++) {
+            if (!pStatus.get(pid)) {
+                int nextBuffIx = curPBuff[pid];
+                while (nextBuffIx > -1) { //It is not Invalid or End_Of_Partition
+                    inMemJoiner.build(memBuffs[nextBuffIx]);
+                    nextBuffIx = nextBuff[nextBuffIx];
+                }
+            }
+        }
+    }
+
+    public void initProbe() {
+
+        sPartBuffs = new ByteBuffer[numOfSpilledParts];
+        for (int i = 0; i < numOfSpilledParts; i++) {
+            sPartBuffs[i] = ctx.allocateFrame();
+        }
+        curPBuff = new int[numOfPartitions];
+        int nextBuffIxToAlloc = 0;
+        /* We only need to allocate one frame per spilled partition. 
+         * Resident partitions do not need frames in probe, as their tuples join 
+         * immediately with the resident build tuples using the inMemoryHashJoin */
+        for (int i = 0; i < numOfPartitions; i++) { 
+            curPBuff[i] = (pStatus.get(i)) ? nextBuffIxToAlloc++ : BUFFER_FOR_RESIDENT_PARTS;
+        }
+        probePSizeInTups = new int[numOfPartitions];
+        probeRFWriters = new RunFileWriter[numOfPartitions];
+
+        probeResBuff = ctx.allocateFrame();
+
+        probeTupAppenderToResident = new FrameTupleAppender(ctx.getFrameSize());
+        probeTupAppenderToResident.reset(probeResBuff, true);
+
+        probeTupAppenderToSpilled = new FrameTupleAppender(ctx.getFrameSize());
+
+    }
+
+    public void probe(ByteBuffer buffer, IFrameWriter writer) throws HyracksDataException {
+        accessorProbe.reset(buffer);
+        int tupleCount = accessorProbe.getTupleCount();
+
+        boolean print = false;
+    	if(print){
+    		accessorProbe.prettyPrint();
+    	}
+        
+        if (numOfSpilledParts == 0) {
+            inMemJoiner.join(buffer, writer);
+            return;
+        }
+
+        for (int i = 0; i < tupleCount; ++i) {
+            int pid = probeHpc.partition(accessorProbe, i, numOfPartitions);
+
+            if (buildPSizeInTups[pid] > 0) { //Tuple has potential match from previous phase
+                if (pStatus.get(pid)) { //pid is Spilled
+                    boolean needToClear = false;
+                    ByteBuffer buff = sPartBuffs[curPBuff[pid]];
+                    while (true) {
+                        probeTupAppenderToSpilled.reset(buff, needToClear);
+                        if (probeTupAppenderToSpilled.append(accessorProbe, i)) {
+                            break;
+                        } 
+                        probeWrite(pid, buff);
+                        buff.clear();
+                        needToClear = true;
+                    }
+                } else { //pid is Resident
+                    while (true) {
+                        if (probeTupAppenderToResident.append(accessorProbe, i)){
+                            break;
+                        }
+                        inMemJoiner.join(probeResBuff, writer);
+                        probeTupAppenderToResident.reset(probeResBuff, true);
+                    }
+
+                }
+                probePSizeInTups[pid]++;
+            }
+
+        }
+
+    }
+
+    public void closeProbe(IFrameWriter writer) throws HyracksDataException { //We do NOT join the spilled partitions here, that decision is made at the descriptor level (which join technique to use)
+        inMemJoiner.join(probeResBuff, writer);
+        inMemJoiner.closeJoin(writer);
+
+        for (int pid = pStatus.nextSetBit(0); pid >= 0; pid = pStatus.nextSetBit(pid + 1)) {
+            ByteBuffer buff = sPartBuffs[curPBuff[pid]];
+            accessorProbe.reset(buff);
+            if (accessorProbe.getTupleCount() > 0) {
+                probeWrite(pid, buff);
+            }
+            closeProbeWriter(pid);
+        }
+    }
+
+    private void probeWrite(int pid, ByteBuffer buff) throws HyracksDataException {
+        RunFileWriter pWriter = probeRFWriters[pid];
+        if (pWriter == null) {
+            FileReference file = ctx.createManagedWorkspaceFile(rel1Name);
+            pWriter = new RunFileWriter(file, ctx.getIOManager());
+            pWriter.open();
+            probeRFWriters[pid] = pWriter;
+        }
+        pWriter.nextFrame(buff);
+    }
+
+    private void closeProbeWriter(int pid) throws HyracksDataException {
+        RunFileWriter writer = probeRFWriters[pid];
+        if (writer != null) {
+            writer.close();
+        }
+    }
+
+    public RunFileReader getBuildRFReader(int pid) throws HyracksDataException {
+        return ((buildRFWriters[pid] == null) ? null : (buildRFWriters[pid]).createReader());
+    }
+
+    public long getBuildPartitionSize(int pid) {
+        return ((buildRFWriters[pid] == null) ? 0 : buildRFWriters[pid].getFileSize());
+    }
+
+    public int getBuildPartitionSizeInTup(int pid) {
+        return (buildPSizeInTups[pid]);
+    }
+
+    public RunFileReader getProbeRFReader(int pid) throws HyracksDataException {
+        return ((probeRFWriters[pid] == null) ? null : (probeRFWriters[pid]).createReader());
+    }
+
+    public long getProbePartitionSize(int pid) {
+        return ((probeRFWriters[pid] == null) ? 0 : probeRFWriters[pid].getFileSize());
+    }
+
+    public int getProbePartitionSizeInTup(int pid) {
+        return (probePSizeInTups[pid]);
+    }
+
+    public int getMaxBuildPartitionSize() {
+        int max = buildPSizeInTups[0];
+        for (int i = 1; i < buildPSizeInTups.length; i++) {
+            if (buildPSizeInTups[i] > max) {
+                max = buildPSizeInTups[i];
+            }
+        }
+        return max;
+    }
+
+    public int getMaxProbePartitionSize() {
+        int max = probePSizeInTups[0];
+        for (int i = 1; i < probePSizeInTups.length; i++) {
+            if (probePSizeInTups[i] > max) {
+                max = probePSizeInTups[i];
+            }
+        }
+        return max;
+    }
+
+    public BitSet getPartitinStatus() {
+        return pStatus;
+    }
+
+    public String debugGetStats() {
+        int numOfResidentPartitions = 0;
+        int numOfSpilledPartitions = 0;
+        double sumOfBuildSpilledSizes = 0;
+        double sumOfProbeSpilledSizes = 0;
+        int numOfInMemTups = 0;
+        for (int i = 0; i < numOfPartitions; i++) {
+            if (pStatus.get(i)) { //Spilled
+                numOfSpilledPartitions++;
+                sumOfBuildSpilledSizes += buildPSizeInTups[i];
+                sumOfProbeSpilledSizes += probePSizeInTups[i];
+            } else { //Resident
+                numOfResidentPartitions++;
+                numOfInMemTups += buildPSizeInTups[i];
+            }
+        }
+
+        double avgBuildSpSz = sumOfBuildSpilledSizes / numOfSpilledPartitions;
+        double avgProbeSpSz = sumOfProbeSpilledSizes / numOfSpilledPartitions;
+        String s = "Resident Partitions:\t" + numOfResidentPartitions + "\nSpilled Partitions:\t"
+                + numOfSpilledPartitions + "\nAvg Build Spilled Size:\t" + avgBuildSpSz + "\nAvg Probe Spilled Size:\t"
+                + avgProbeSpSz + "\nIn-Memory Tups:\t" + numOfInMemTups + "\nNum of Free Buffers:\t"
+                + freeFramesCounter;
+        return s;
+    }
+    
+    public boolean isTableEmpty(){
+    	return this.isTableEmpty;
+    }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/OptimizedHybridHashJoinOperatorDescriptor.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/OptimizedHybridHashJoinOperatorDescriptor.java
new file mode 100644
index 0000000..cf39416
--- /dev/null
+++ b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/OptimizedHybridHashJoinOperatorDescriptor.java
@@ -0,0 +1,648 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.dataflow.std.join;
+
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.BitSet;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.ActivityId;
+import edu.uci.ics.hyracks.api.dataflow.IActivityGraphBuilder;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
+import edu.uci.ics.hyracks.api.dataflow.TaskId;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFamily;
+import edu.uci.ics.hyracks.api.dataflow.value.INullWriter;
+import edu.uci.ics.hyracks.api.dataflow.value.INullWriterFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.ITuplePairComparator;
+import edu.uci.ics.hyracks.api.dataflow.value.ITuplePairComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ITuplePartitionComputer;
+import edu.uci.ics.hyracks.api.dataflow.value.ITuplePartitionComputerFamily;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.job.IOperatorDescriptorRegistry;
+import edu.uci.ics.hyracks.api.job.JobId;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTuplePairComparator;
+import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
+import edu.uci.ics.hyracks.dataflow.common.data.partition.FieldHashPartitionComputerFamily;
+import edu.uci.ics.hyracks.dataflow.common.data.partition.RepartitionComputerGeneratorFactory;
+import edu.uci.ics.hyracks.dataflow.common.io.RunFileReader;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractActivityNode;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractStateObject;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryInputSinkOperatorNodePushable;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryInputUnaryOutputOperatorNodePushable;
+import edu.uci.ics.hyracks.dataflow.std.structures.ISerializableTable;
+import edu.uci.ics.hyracks.dataflow.std.structures.SerializableHashTable;
+
+/**
+ * @author pouria
+ *         This class guides the joining process, and switches between different
+ *         joining techniques, w.r.t the implemented optimizations and skew in size of the
+ *         partitions.
+ *         - Operator overview:
+ *         Assume we are trying to do (R Join S), with M buffers available, while we have an estimate on the size
+ *         of R (in terms of buffers). HHJ (Hybrid Hash Join) has two main phases: Build and Probe, where in our implementation Probe phase
+ *         can apply HHJ recursively, based on the value of M and size of R and S. HHJ phases proceed as follow:
+ *         BUILD:
+ *         Calculate number of partitions (Based on the size of R, fudge factor and M) [See Shapiro's paper for the detailed discussion].
+ *         Initialize the build phase (one frame per partition, all partitions considered resident at first)
+ *         Read tuples of R, frame by frame, and hash each tuple (based on a given hash function) to find
+ *         its target partition and try to append it to that partition:
+ *         If target partition's buffer is full, try to allocate a new buffer for it.
+ *         if no free buffer is available, find the largest resident partition and spill it. Using its freed
+ *         buffers after spilling, allocate a new buffer for the target partition.
+ *         Being done with R, close the build phase. (During closing we write the very last buffer of each
+ *         spilled partition to the disk, and we do partition tuning, where we try to bring back as many buffers, belonging to
+ *         spilled partitions as possible into memory, based on the free buffers - We will stop at the point where remaining free buffers is not enough
+ *         for reloading an entire partition back into memory)
+ *         Create the hash table for the resident partitions (basically we create an in-memory hash join here)
+ *         PROBE:
+ *         Initialize the probe phase on S (mainly allocate one buffer per spilled partition, and one buffer
+ *         for the whole resident partitions)
+ *         Read tuples of S, frame by frame and hash each tuple T to its target partition P
+ *         if P is a resident partition, pass T to the in-memory hash join and generate the output record,
+ *         if any matching(s) record found
+ *         if P is spilled, write T to the dedicated buffer for P (on the probe side)
+ *         Once scanning of S is done, we try to join partition pairs (Ri, Si) of the spilled partitions:
+ *         if any of Ri or Si is smaller than M, then we simply use an in-memory hash join to join them
+ *         otherwise we apply HHJ recursively:
+ *         if after applying HHJ recursively, we do not gain enough size reduction (max size of the
+ *         resulting partitions were more than 80% of the initial Ri,Si size) then we switch to
+ *         nested loop join for joining.
+ *         (At each step of partition-pair joining, we consider role reversal, which means if size of Si were
+ *         greater than Ri, then we make sure that we switch the roles of build/probe between them)
+ */
+
+public class OptimizedHybridHashJoinOperatorDescriptor extends AbstractOperatorDescriptor {
+    private static final int BUILD_AND_PARTITION_ACTIVITY_ID = 0;
+    private static final int PARTITION_AND_JOIN_ACTIVITY_ID = 1;
+
+    private static final long serialVersionUID = 1L;
+    private static final double NLJ_SWITCH_THRESHOLD = 0.8;
+
+    private static final String PROBE_REL = "RelR";
+    private static final String BUILD_REL = "RelS";
+
+    private final int memsize;
+    private final int inputsize0;
+    private final double fudgeFactor;
+    private final int[] probeKeys;
+    private final int[] buildKeys;
+    private final IBinaryHashFunctionFamily[] hashFunctionGeneratorFactories;
+    private final IBinaryComparatorFactory[] comparatorFactories; //For in-mem HJ
+    private final ITuplePairComparatorFactory tuplePairComparatorFactory0; //For NLJ in probe
+    private final ITuplePairComparatorFactory tuplePairComparatorFactory1; //For NLJ in probe
+
+    private final boolean isLeftOuter;
+    private final INullWriterFactory[] nullWriterFactories1;
+
+    public OptimizedHybridHashJoinOperatorDescriptor(IOperatorDescriptorRegistry spec, int memsize, int inputsize0,
+            double factor, int[] keys0, int[] keys1, IBinaryHashFunctionFamily[] hashFunctionGeneratorFactories,
+            IBinaryComparatorFactory[] comparatorFactories, RecordDescriptor recordDescriptor,
+            ITuplePairComparatorFactory tupPaircomparatorFactory0,
+            ITuplePairComparatorFactory tupPaircomparatorFactory1, boolean isLeftOuter,
+            INullWriterFactory[] nullWriterFactories1) throws HyracksDataException {
+
+        super(spec, 2, 1);
+        this.memsize = memsize;
+        this.inputsize0 = inputsize0;
+        this.fudgeFactor = factor;
+        this.probeKeys = keys0;
+        this.buildKeys = keys1;
+        this.hashFunctionGeneratorFactories = hashFunctionGeneratorFactories;
+        this.comparatorFactories = comparatorFactories;
+        this.tuplePairComparatorFactory0 = tupPaircomparatorFactory0;
+        this.tuplePairComparatorFactory1 = tupPaircomparatorFactory1;
+        recordDescriptors[0] = recordDescriptor;
+        this.isLeftOuter = isLeftOuter;
+        this.nullWriterFactories1 = nullWriterFactories1;
+        
+
+    }
+
+    public OptimizedHybridHashJoinOperatorDescriptor(IOperatorDescriptorRegistry spec, int memsize, int inputsize0,
+            double factor, int[] keys0, int[] keys1, IBinaryHashFunctionFamily[] hashFunctionGeneratorFactories,
+            IBinaryComparatorFactory[] comparatorFactories, RecordDescriptor recordDescriptor,
+            ITuplePairComparatorFactory tupPaircomparatorFactory0, ITuplePairComparatorFactory tupPaircomparatorFactory1)
+            throws HyracksDataException {
+
+        super(spec, 2, 1);
+        this.memsize = memsize;
+        this.inputsize0 = inputsize0;
+        this.fudgeFactor = factor;
+        this.probeKeys = keys0;
+        this.buildKeys = keys1;
+        this.hashFunctionGeneratorFactories = hashFunctionGeneratorFactories;
+        this.comparatorFactories = comparatorFactories;
+        this.tuplePairComparatorFactory0 = tupPaircomparatorFactory0;
+        this.tuplePairComparatorFactory1 = tupPaircomparatorFactory1;
+        recordDescriptors[0] = recordDescriptor;
+        this.isLeftOuter = false;
+        this.nullWriterFactories1 = null;
+    }
+
+    @Override
+    public void contributeActivities(IActivityGraphBuilder builder) {
+        ActivityId buildAid = new ActivityId(odId, BUILD_AND_PARTITION_ACTIVITY_ID);
+        ActivityId probeAid = new ActivityId(odId, PARTITION_AND_JOIN_ACTIVITY_ID);
+        PartitionAndBuildActivityNode phase1 = new PartitionAndBuildActivityNode(buildAid, probeAid);
+        ProbeAndJoinActivityNode phase2 = new ProbeAndJoinActivityNode(probeAid, buildAid);
+
+        builder.addActivity(this, phase1);
+        builder.addSourceEdge(1, phase1, 0);
+
+        builder.addActivity(this, phase2);
+        builder.addSourceEdge(0, phase2, 0);
+
+        builder.addBlockingEdge(phase1, phase2);
+
+        builder.addTargetEdge(0, phase2, 0);
+
+    }
+
+    //memorySize is the memory for join (we have already excluded the 2 buffers for in/out)
+    private int getNumberOfPartitions(int memorySize, int buildSize, double factor, int nPartitions)
+            throws HyracksDataException {
+        int numberOfPartitions = 0;
+        if (memorySize <= 1) {
+            throw new HyracksDataException("not enough memory is available for Hybrid Hash Join");
+        }
+        if (memorySize > buildSize) {
+            return 1; //We will switch to in-Mem HJ eventually
+        }
+        numberOfPartitions = (int) (Math.ceil((double) (buildSize * factor / nPartitions - memorySize)
+                / (double) (memorySize - 1)));
+        if (numberOfPartitions <= 0) {
+            numberOfPartitions = 1; //becomes in-memory hash join
+        }
+        if (numberOfPartitions > memorySize) {
+            numberOfPartitions = (int) Math.ceil(Math.sqrt(buildSize * factor / nPartitions));
+            return (numberOfPartitions < memorySize ? numberOfPartitions : memorySize);
+        }
+        return numberOfPartitions;
+    }
+
+    public static class BuildAndPartitionTaskState extends AbstractStateObject {
+
+        private int memForJoin;
+        private int numOfPartitions;
+        private OptimizedHybridHashJoin hybridHJ;
+
+        public BuildAndPartitionTaskState() {
+        }
+
+        private BuildAndPartitionTaskState(JobId jobId, TaskId taskId) {
+            super(jobId, taskId);
+        }
+
+        @Override
+        public void toBytes(DataOutput out) throws IOException {
+
+        }
+
+        @Override
+        public void fromBytes(DataInput in) throws IOException {
+
+        }
+
+    }
+
+    /*
+     * Build phase of Hybrid Hash Join:
+     * Creating an instance of Hybrid Hash Join, using Shapiro's formula
+     * to get the optimal number of partitions, build relation is read and
+     * partitioned, and hybrid hash join instance gets ready for the probing.
+     * (See OptimizedHybridHashJoin for the details on different steps)
+     */
+    private class PartitionAndBuildActivityNode extends AbstractActivityNode {
+        private static final long serialVersionUID = 1L;
+
+        private final ActivityId probeAid;
+
+        public PartitionAndBuildActivityNode(ActivityId id, ActivityId probeAid) {
+            super(id);
+            this.probeAid = probeAid;
+        }
+
+        @Override
+        public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx,
+                IRecordDescriptorProvider recordDescProvider, final int partition, final int nPartitions) {
+
+            final RecordDescriptor probeRd = recordDescProvider.getInputRecordDescriptor(getActivityId(), 0);
+            final RecordDescriptor buildRd = recordDescProvider.getInputRecordDescriptor(probeAid, 0);
+
+            final IBinaryComparator[] comparators = new IBinaryComparator[comparatorFactories.length];
+            for (int i = 0; i < comparatorFactories.length; i++) {
+                comparators[i] = comparatorFactories[i].createBinaryComparator();
+            }
+            
+            
+            IOperatorNodePushable op = new AbstractUnaryInputSinkOperatorNodePushable() {
+                private BuildAndPartitionTaskState state = new BuildAndPartitionTaskState(ctx.getJobletContext()
+                        .getJobId(), new TaskId(getActivityId(), partition));
+
+                ITuplePartitionComputer probeHpc = new FieldHashPartitionComputerFamily(probeKeys,
+                        hashFunctionGeneratorFactories).createPartitioner(0);
+                ITuplePartitionComputer buildHpc = new FieldHashPartitionComputerFamily(buildKeys,
+                        hashFunctionGeneratorFactories).createPartitioner(0);
+
+                @Override
+                public void open() throws HyracksDataException {
+                    if (memsize <= 2) { //Dedicated buffers: One buffer to read and one buffer for output
+                        throw new HyracksDataException("not enough memory for Hybrid Hash Join");
+                    }
+                    state.memForJoin = memsize - 2;
+                    state.numOfPartitions = getNumberOfPartitions(state.memForJoin, inputsize0, fudgeFactor,
+                            nPartitions);
+                    if(!isLeftOuter){
+                    	state.hybridHJ = new OptimizedHybridHashJoin(ctx, state.memForJoin, state.numOfPartitions,
+                                PROBE_REL, BUILD_REL, probeKeys, buildKeys, comparators, probeRd, buildRd, probeHpc,
+                                buildHpc);
+                    }
+                    else{
+                    	state.hybridHJ = new OptimizedHybridHashJoin(ctx, state.memForJoin, state.numOfPartitions,
+                                PROBE_REL, BUILD_REL, probeKeys, buildKeys, comparators, probeRd, buildRd, probeHpc,
+                                buildHpc, isLeftOuter, nullWriterFactories1);
+                    }
+                    
+                    state.hybridHJ.initBuild();
+                }
+
+                @Override
+                public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
+                    state.hybridHJ.build(buffer);
+                }
+
+                @Override
+                public void close() throws HyracksDataException {
+                    state.hybridHJ.closeBuild();
+                    ctx.setStateObject(state);
+                }
+
+                @Override
+                public void fail() throws HyracksDataException {
+                }
+
+            };
+            return op;
+        }
+    }
+
+    /*
+     * Probe phase of Hybrid Hash Join:
+     * Reading the probe side and partitioning it, resident tuples get
+     * joined with the build side residents (through formerly created HybridHashJoin in the build phase)
+     * and spilled partitions get written to run files. During the close() call, pairs of spilled partition
+     * (build side spilled partition and its corresponding probe side spilled partition) join, by applying
+     * Hybrid Hash Join recursively on them.
+     */
+    private class ProbeAndJoinActivityNode extends AbstractActivityNode {
+
+        private static final long serialVersionUID = 1L;
+
+        private final ActivityId buildAid;
+
+        public ProbeAndJoinActivityNode(ActivityId id, ActivityId buildAid) {
+            super(id);
+            this.buildAid = buildAid;
+        }
+
+        @Override
+        public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx,
+                IRecordDescriptorProvider recordDescProvider, final int partition, final int nPartitions) {
+
+            final RecordDescriptor probeRd = recordDescProvider.getInputRecordDescriptor(buildAid, 0);
+            final RecordDescriptor buildRd = recordDescProvider.getInputRecordDescriptor(getActivityId(), 0);
+            final IBinaryComparator[] comparators = new IBinaryComparator[comparatorFactories.length];
+            final ITuplePairComparator nljComparator0 = tuplePairComparatorFactory0.createTuplePairComparator(ctx);
+            final ITuplePairComparator nljComparator1 = tuplePairComparatorFactory1.createTuplePairComparator(ctx);
+
+            for (int i = 0; i < comparatorFactories.length; i++) {
+                comparators[i] = comparatorFactories[i].createBinaryComparator();
+            }
+
+            final INullWriter[] nullWriters1 = isLeftOuter ? new INullWriter[nullWriterFactories1.length] : null;
+            if (isLeftOuter) {
+                for (int i = 0; i < nullWriterFactories1.length; i++) {
+                    nullWriters1[i] = nullWriterFactories1[i].createNullWriter();
+                }
+            }
+
+            IOperatorNodePushable op = new AbstractUnaryInputUnaryOutputOperatorNodePushable() {
+                private BuildAndPartitionTaskState state;
+                private ByteBuffer rPartbuff = ctx.allocateFrame();
+
+                private ITuplePartitionComputerFamily hpcf0 = new FieldHashPartitionComputerFamily(probeKeys,
+                        hashFunctionGeneratorFactories);
+                private ITuplePartitionComputerFamily hpcf1 = new FieldHashPartitionComputerFamily(buildKeys,
+                        hashFunctionGeneratorFactories);
+
+                private ITuplePartitionComputer hpcRep0;
+                private ITuplePartitionComputer hpcRep1;
+
+                @Override
+                public void open() throws HyracksDataException {
+                    state = (BuildAndPartitionTaskState) ctx.getStateObject(new TaskId(new ActivityId(getOperatorId(),
+                            BUILD_AND_PARTITION_ACTIVITY_ID), partition));
+
+                    writer.open();
+                    state.hybridHJ.initProbe();
+
+                }
+
+                @Override
+                public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
+                	if(!state.hybridHJ.isTableEmpty()){
+                		state.hybridHJ.probe(buffer, writer);
+                	}
+                }
+
+                @Override
+                public void fail() throws HyracksDataException {
+                    writer.fail();
+                }
+
+                @Override
+                public void close() throws HyracksDataException {
+
+                    state.hybridHJ.closeProbe(writer);
+
+                    BitSet partitionStatus = state.hybridHJ.getPartitinStatus();
+                    hpcRep0 = new RepartitionComputerGeneratorFactory(state.numOfPartitions, hpcf0)
+                            .createPartitioner(0);
+                    hpcRep1 = new RepartitionComputerGeneratorFactory(state.numOfPartitions, hpcf1)
+                            .createPartitioner(0);
+
+                    rPartbuff.clear();
+                    for (int pid = partitionStatus.nextSetBit(0); pid >= 0; pid = partitionStatus.nextSetBit(pid + 1)) {
+
+                        RunFileReader bReader = state.hybridHJ.getBuildRFReader(pid);
+                        RunFileReader pReader = state.hybridHJ.getProbeRFReader(pid);
+
+                        if (bReader == null || pReader == null) { //either of sides (or both) does not have any tuple, thus no need for joining (no potential match)
+                            continue;
+                        }
+                        int bSize = state.hybridHJ.getBuildPartitionSizeInTup(pid);
+                        int pSize = state.hybridHJ.getProbePartitionSizeInTup(pid);
+                        int beforeMax = (bSize > pSize) ? bSize : pSize;
+                        joinPartitionPair(state.hybridHJ, bReader, pReader, pid, beforeMax, 1);
+
+                    }
+                    writer.close();
+                }
+
+                private void joinPartitionPair(OptimizedHybridHashJoin ohhj, RunFileReader buildSideReader,
+                        RunFileReader probeSideReader, int pid, int beforeMax, int level) throws HyracksDataException {
+                    ITuplePartitionComputer probeHpc = new FieldHashPartitionComputerFamily(probeKeys,
+                            hashFunctionGeneratorFactories).createPartitioner(level);
+                    ITuplePartitionComputer buildHpc = new FieldHashPartitionComputerFamily(buildKeys,
+                            hashFunctionGeneratorFactories).createPartitioner(level);
+
+                    long buildPartSize = ohhj.getBuildPartitionSize(pid) / ctx.getFrameSize();
+                    long probePartSize = ohhj.getProbePartitionSize(pid) / ctx.getFrameSize();
+
+                    //Apply in-Mem HJ if possible
+                    if ((buildPartSize < state.memForJoin) || (probePartSize < state.memForJoin)) {
+                        int tabSize = -1;
+                        
+                        if (isLeftOuter || buildPartSize < probePartSize) {
+                            tabSize = ohhj.getBuildPartitionSizeInTup(pid);
+                           
+                            if (tabSize == 0) {
+                                throw new HyracksDataException(
+                                        "Trying to join an empty partition. Invalid table size for inMemoryHashJoin.");
+                            }
+                          //Build Side is smaller
+                            applyInMemHashJoin(buildKeys, probeKeys, tabSize, probeRd, buildRd, hpcRep0, hpcRep1,
+                                    buildSideReader, probeSideReader, false, pid);
+
+                        } 
+                        
+                        else { //Role Reversal
+                            tabSize = ohhj.getProbePartitionSizeInTup(pid);
+                            if (tabSize == 0) {
+                                throw new HyracksDataException(
+                                        "Trying to join an empty partition. Invalid table size for inMemoryHashJoin.");
+                            }
+                            //Probe Side is smaller
+                            
+                            applyInMemHashJoin(probeKeys, buildKeys, tabSize, buildRd, probeRd, hpcRep1, hpcRep0,
+                                    probeSideReader, buildSideReader, true, pid);
+                        }
+                    }
+                    //Apply (Recursive) HHJ
+                    else {
+                        OptimizedHybridHashJoin rHHj;
+                        if (isLeftOuter || buildPartSize < probePartSize) { //Build Side is smaller
+
+                            int n = getNumberOfPartitions(state.memForJoin, (int) buildPartSize, fudgeFactor,
+                                    nPartitions);
+                           
+                            rHHj = new OptimizedHybridHashJoin(ctx, state.memForJoin, n, PROBE_REL, BUILD_REL,
+                                    probeKeys, buildKeys, comparators, probeRd, buildRd, probeHpc, buildHpc);
+
+                            buildSideReader.open();
+                            rHHj.initBuild();
+                            rPartbuff.clear();
+                            while (buildSideReader.nextFrame(rPartbuff)) {
+                                rHHj.build(rPartbuff);
+                            }
+
+                            rHHj.closeBuild();
+
+                            probeSideReader.open();
+                            rHHj.initProbe();
+                            rPartbuff.clear();
+                            while (probeSideReader.nextFrame(rPartbuff)) {
+                                rHHj.probe(rPartbuff, writer);
+                            }
+                            rHHj.closeProbe(writer);
+
+                            int maxAfterBuildSize = rHHj.getMaxBuildPartitionSize();
+                            int maxAfterProbeSize = rHHj.getMaxProbePartitionSize();
+                            int afterMax = (maxAfterBuildSize > maxAfterProbeSize) ? maxAfterBuildSize
+                                    : maxAfterProbeSize;
+
+                            BitSet rPStatus = rHHj.getPartitinStatus();
+                            if (afterMax < NLJ_SWITCH_THRESHOLD * beforeMax) {
+                                for (int rPid = rPStatus.nextSetBit(0); rPid >= 0; rPid = rPStatus.nextSetBit(rPid + 1)) {
+                                    RunFileReader rbrfw = rHHj.getBuildRFReader(rPid);
+                                    RunFileReader rprfw = rHHj.getProbeRFReader(rPid);
+
+                                    if (rbrfw == null || rprfw == null) {
+                                        continue;
+                                    }
+
+                                    joinPartitionPair(rHHj, rbrfw, rprfw, rPid, afterMax, (level + 1));
+                                }
+
+                            } else { //Switch to NLJ (Further recursion seems not to be useful)
+                                for (int rPid = rPStatus.nextSetBit(0); rPid >= 0; rPid = rPStatus.nextSetBit(rPid + 1)) {
+                                    RunFileReader rbrfw = rHHj.getBuildRFReader(rPid);
+                                    RunFileReader rprfw = rHHj.getProbeRFReader(rPid);
+                                    
+                                    if (rbrfw == null || rprfw == null) {
+                                        continue;
+                                    }
+
+                                    int buildSideInTups = rHHj.getBuildPartitionSizeInTup(rPid);
+                                    int probeSideInTups = rHHj.getProbePartitionSizeInTup(rPid);
+                                    if (isLeftOuter || buildSideInTups < probeSideInTups) {
+                                        applyNestedLoopJoin(probeRd, buildRd, state.memForJoin, rbrfw, rprfw,
+                                                nljComparator0);
+                                    } else {
+                                        applyNestedLoopJoin(buildRd, probeRd, state.memForJoin, rprfw, rbrfw,
+                                                nljComparator1);
+                                    }
+                                }
+                            }
+                        } else { //Role Reversal (Probe Side is smaller)
+                            int n = getNumberOfPartitions(state.memForJoin, (int) probePartSize, fudgeFactor,
+                                    nPartitions);
+                            
+                            rHHj = new OptimizedHybridHashJoin(ctx, state.memForJoin, n, BUILD_REL, PROBE_REL,
+                                    buildKeys, probeKeys, comparators, buildRd, probeRd, buildHpc, probeHpc);
+
+                            probeSideReader.open();
+                            rHHj.initBuild();
+                            rPartbuff.clear();
+                            while (probeSideReader.nextFrame(rPartbuff)) {
+                                rHHj.build(rPartbuff);
+                            }
+                            rHHj.closeBuild();
+                            rHHj.initProbe();
+                            buildSideReader.open();
+                            rPartbuff.clear();
+                            while (buildSideReader.nextFrame(rPartbuff)) {
+                                rHHj.probe(rPartbuff, writer);
+                            }
+                            rHHj.closeProbe(writer);
+                            int maxAfterBuildSize = rHHj.getMaxBuildPartitionSize();
+                            int maxAfterProbeSize = rHHj.getMaxProbePartitionSize();
+                            int afterMax = (maxAfterBuildSize > maxAfterProbeSize) ? maxAfterBuildSize
+                                    : maxAfterProbeSize;
+                            BitSet rPStatus = rHHj.getPartitinStatus();
+
+                            if (afterMax < NLJ_SWITCH_THRESHOLD * beforeMax) {
+                                for (int rPid = rPStatus.nextSetBit(0); rPid >= 0; rPid = rPStatus.nextSetBit(rPid + 1)) {
+                                    RunFileReader rbrfw = rHHj.getBuildRFReader(rPid);
+                                    RunFileReader rprfw = rHHj.getProbeRFReader(rPid);
+
+                                    if (rbrfw == null || rprfw == null) {
+                                        continue;
+                                    }
+
+                                    joinPartitionPair(rHHj, rprfw, rbrfw, rPid, afterMax, (level + 1));
+                                }
+                            } else { //Switch to NLJ (Further recursion seems not to be effective)
+                                for (int rPid = rPStatus.nextSetBit(0); rPid >= 0; rPid = rPStatus.nextSetBit(rPid + 1)) {
+                                    RunFileReader rbrfw = rHHj.getBuildRFReader(rPid);
+                                    RunFileReader rprfw = rHHj.getProbeRFReader(rPid);
+                                    
+                                    if (rbrfw == null || rprfw == null) {
+                                        continue;
+                                    }
+
+                                    long buildSideSize = rbrfw.getFileSize();
+                                    long probeSideSize = rprfw.getFileSize();
+                                    if (buildSideSize > probeSideSize) {
+                                        applyNestedLoopJoin(buildRd, probeRd, state.memForJoin, rbrfw, rprfw,
+                                                nljComparator1);
+                                    } else {
+                                        applyNestedLoopJoin(probeRd, buildRd, state.memForJoin, rprfw, rbrfw,
+                                                nljComparator0);
+                                    }
+                                }
+                            }
+                        }
+                        buildSideReader.close();
+                        probeSideReader.close();
+                    }
+                }
+
+                private void applyInMemHashJoin(int[] bKeys, int[] pKeys, int tabSize, RecordDescriptor buildRDesc,
+                        RecordDescriptor probeRDesc, ITuplePartitionComputer hpcRepLarger,
+                        ITuplePartitionComputer hpcRepSmaller, RunFileReader bReader, RunFileReader pReader, boolean reverse, int pid)
+                        throws HyracksDataException {
+
+                    ISerializableTable table = new SerializableHashTable(tabSize, ctx);
+                    InMemoryHashJoin joiner = new InMemoryHashJoin(ctx, tabSize, new FrameTupleAccessor(
+                            ctx.getFrameSize(), probeRDesc), hpcRepLarger, new FrameTupleAccessor(ctx.getFrameSize(),
+                            buildRDesc), hpcRepSmaller, new FrameTuplePairComparator(pKeys, bKeys, comparators),
+                            isLeftOuter, nullWriters1, table, reverse);
+
+                    bReader.open();
+                    rPartbuff.clear();
+                    while (bReader.nextFrame(rPartbuff)) {
+                        ByteBuffer copyBuffer = ctx.allocateFrame(); //We need to allocate a copyBuffer, because this buffer gets added to the buffers list in the InMemoryHashJoin
+                        FrameUtils.copy(rPartbuff, copyBuffer);
+                        FrameUtils.makeReadable(copyBuffer);
+                        joiner.build(copyBuffer);
+                        rPartbuff.clear();
+                    }
+                    bReader.close();
+                    rPartbuff.clear();
+                    // probe
+                    pReader.open();
+                    while (pReader.nextFrame(rPartbuff)) {
+                        joiner.join(rPartbuff, writer);
+                        rPartbuff.clear();
+                    }
+                    pReader.close();
+                    joiner.closeJoin(writer);
+                }
+
+                private void applyNestedLoopJoin(RecordDescriptor outerRd, RecordDescriptor innerRd, int memorySize,
+                        RunFileReader outerReader, RunFileReader innerReader, ITuplePairComparator nljComparator)
+                        throws HyracksDataException {
+
+                    NestedLoopJoin nlj = new NestedLoopJoin(ctx, new FrameTupleAccessor(ctx.getFrameSize(), outerRd),
+                            new FrameTupleAccessor(ctx.getFrameSize(), innerRd), nljComparator, memorySize, false, null);
+
+                    ByteBuffer cacheBuff = ctx.allocateFrame();
+                    innerReader.open();
+                    while (innerReader.nextFrame(cacheBuff)) {
+                        FrameUtils.makeReadable(cacheBuff);
+                        nlj.cache(cacheBuff);
+                        cacheBuff.clear();
+                    }
+                    nlj.closeCache();
+
+                    ByteBuffer joinBuff = ctx.allocateFrame();
+                    outerReader.open();
+
+                    while (outerReader.nextFrame(joinBuff)) {
+                        FrameUtils.makeReadable(joinBuff);
+                        nlj.join(joinBuff, writer);
+                        joinBuff.clear();
+                    }
+
+                    nlj.closeJoin(writer);
+                    outerReader.close();
+                    innerReader.close();
+                }
+            };
+            return op;
+        }
+    }
+}
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/map/DeserializedMapperOperatorDescriptor.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/map/DeserializedMapperOperatorDescriptor.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/map/DeserializedMapperOperatorDescriptor.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/map/DeserializedMapperOperatorDescriptor.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/map/IDeserializedMapper.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/map/IDeserializedMapper.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/map/IDeserializedMapper.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/map/IDeserializedMapper.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/map/IDeserializedMapperFactory.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/map/IDeserializedMapperFactory.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/map/IDeserializedMapperFactory.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/map/IDeserializedMapperFactory.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/map/ReflectionBasedDeserializedMapperFactory.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/map/ReflectionBasedDeserializedMapperFactory.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/map/ReflectionBasedDeserializedMapperFactory.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/map/ReflectionBasedDeserializedMapperFactory.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/map/SamplerDeserializedMapperFactory.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/map/SamplerDeserializedMapperFactory.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/map/SamplerDeserializedMapperFactory.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/map/SamplerDeserializedMapperFactory.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/misc/ConstantTupleSourceOperatorDescriptor.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/misc/ConstantTupleSourceOperatorDescriptor.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/misc/ConstantTupleSourceOperatorDescriptor.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/misc/ConstantTupleSourceOperatorDescriptor.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/misc/ConstantTupleSourceOperatorNodePushable.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/misc/ConstantTupleSourceOperatorNodePushable.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/misc/ConstantTupleSourceOperatorNodePushable.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/misc/ConstantTupleSourceOperatorNodePushable.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/misc/IdentityOperatorDescriptor.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/misc/IdentityOperatorDescriptor.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/misc/IdentityOperatorDescriptor.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/misc/IdentityOperatorDescriptor.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/misc/LimitOperatorDescriptor.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/misc/LimitOperatorDescriptor.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/misc/LimitOperatorDescriptor.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/misc/LimitOperatorDescriptor.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/misc/MaterializingOperatorDescriptor.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/misc/MaterializingOperatorDescriptor.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/misc/MaterializingOperatorDescriptor.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/misc/MaterializingOperatorDescriptor.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/misc/NullSinkOperatorDescriptor.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/misc/NullSinkOperatorDescriptor.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/misc/NullSinkOperatorDescriptor.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/misc/NullSinkOperatorDescriptor.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/misc/PrinterOperatorDescriptor.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/misc/PrinterOperatorDescriptor.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/misc/PrinterOperatorDescriptor.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/misc/PrinterOperatorDescriptor.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/misc/SplitOperatorDescriptor.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/misc/SplitOperatorDescriptor.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/misc/SplitOperatorDescriptor.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/misc/SplitOperatorDescriptor.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/misc/SplitVectorOperatorDescriptor.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/misc/SplitVectorOperatorDescriptor.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/misc/SplitVectorOperatorDescriptor.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/misc/SplitVectorOperatorDescriptor.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/sort/BSTMemMgr.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/sort/BSTMemMgr.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/sort/BSTMemMgr.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/sort/BSTMemMgr.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/sort/BSTNodeUtil.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/sort/BSTNodeUtil.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/sort/BSTNodeUtil.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/sort/BSTNodeUtil.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/sort/ExternalSortOperatorDescriptor.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/sort/ExternalSortOperatorDescriptor.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/sort/ExternalSortOperatorDescriptor.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/sort/ExternalSortOperatorDescriptor.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/sort/ExternalSortRunGenerator.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/sort/ExternalSortRunGenerator.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/sort/ExternalSortRunGenerator.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/sort/ExternalSortRunGenerator.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/sort/ExternalSortRunMerger.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/sort/ExternalSortRunMerger.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/sort/ExternalSortRunMerger.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/sort/ExternalSortRunMerger.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/sort/FrameSorter.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/sort/FrameSorter.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/sort/FrameSorter.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/sort/FrameSorter.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/sort/IMemoryManager.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/sort/IMemoryManager.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/sort/IMemoryManager.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/sort/IMemoryManager.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/sort/IRunGenerator.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/sort/IRunGenerator.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/sort/IRunGenerator.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/sort/IRunGenerator.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/sort/ISelectionTree.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/sort/ISelectionTree.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/sort/ISelectionTree.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/sort/ISelectionTree.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/sort/InMemorySortOperatorDescriptor.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/sort/InMemorySortOperatorDescriptor.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/sort/InMemorySortOperatorDescriptor.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/sort/InMemorySortOperatorDescriptor.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/sort/OptimizedExternalSortOperatorDescriptor.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/sort/OptimizedExternalSortOperatorDescriptor.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/sort/OptimizedExternalSortOperatorDescriptor.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/sort/OptimizedExternalSortOperatorDescriptor.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/sort/OptimizedExternalSortRunGenerator.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/sort/OptimizedExternalSortRunGenerator.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/sort/OptimizedExternalSortRunGenerator.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/sort/OptimizedExternalSortRunGenerator.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/sort/OptimizedExternalSortRunGeneratorWithLimit.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/sort/OptimizedExternalSortRunGeneratorWithLimit.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/sort/OptimizedExternalSortRunGeneratorWithLimit.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/sort/OptimizedExternalSortRunGeneratorWithLimit.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/sort/RunMergingFrameReader.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/sort/RunMergingFrameReader.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/sort/RunMergingFrameReader.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/sort/RunMergingFrameReader.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/sort/Slot.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/sort/Slot.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/sort/Slot.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/sort/Slot.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/sort/SortMinHeap.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/sort/SortMinHeap.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/sort/SortMinHeap.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/sort/SortMinHeap.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/sort/SortMinMaxHeap.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/sort/SortMinMaxHeap.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/sort/SortMinMaxHeap.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/sort/SortMinMaxHeap.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/structures/ISerializableTable.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/structures/ISerializableTable.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/structures/ISerializableTable.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/structures/ISerializableTable.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/structures/SerializableHashTable.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/structures/SerializableHashTable.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/structures/SerializableHashTable.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/structures/SerializableHashTable.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/structures/TuplePointer.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/structures/TuplePointer.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/structures/TuplePointer.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/structures/TuplePointer.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/union/UnionAllOperatorDescriptor.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/union/UnionAllOperatorDescriptor.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/union/UnionAllOperatorDescriptor.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/union/UnionAllOperatorDescriptor.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/util/DeserializedOperatorNodePushable.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/util/DeserializedOperatorNodePushable.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/util/DeserializedOperatorNodePushable.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/util/DeserializedOperatorNodePushable.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/util/ReferenceEntry.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/util/ReferenceEntry.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/util/ReferenceEntry.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/util/ReferenceEntry.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/util/ReferencedPriorityQueue.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/util/ReferencedPriorityQueue.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/util/ReferencedPriorityQueue.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/util/ReferencedPriorityQueue.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/util/SelectionTree.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/util/SelectionTree.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/util/SelectionTree.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/util/SelectionTree.java
diff --git a/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/util/StringSerializationUtils.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/util/StringSerializationUtils.java
similarity index 100%
rename from hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/util/StringSerializationUtils.java
rename to hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/util/StringSerializationUtils.java
diff --git a/hyracks-dataflow-std/src/test/java/edu/uci/ics/hyracks/dataflow/std/test/util/SelectionTreeTest.java b/hyracks/hyracks-dataflow-std/src/test/java/edu/uci/ics/hyracks/dataflow/std/test/util/SelectionTreeTest.java
similarity index 100%
rename from hyracks-dataflow-std/src/test/java/edu/uci/ics/hyracks/dataflow/std/test/util/SelectionTreeTest.java
rename to hyracks/hyracks-dataflow-std/src/test/java/edu/uci/ics/hyracks/dataflow/std/test/util/SelectionTreeTest.java
diff --git a/hyracks/hyracks-dist/pom.xml b/hyracks/hyracks-dist/pom.xml
new file mode 100755
index 0000000..58a4b1c
--- /dev/null
+++ b/hyracks/hyracks-dist/pom.xml
@@ -0,0 +1,74 @@
+<?xml version="1.0"?>
+<project
+	xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"
+	xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
+	<modelVersion>4.0.0</modelVersion>
+	<parent>
+		<artifactId>hyracks</artifactId>
+		<groupId>edu.uci.ics.hyracks</groupId>
+		<version>0.2.3-SNAPSHOT</version>
+	</parent>
+
+	<artifactId>hyracks-dist</artifactId>
+	<name>hyracks-dist</name>
+
+	<properties>
+		<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+	</properties>
+
+	<build>
+		<plugins>
+			<plugin>
+				<groupId>org.apache.maven.plugins</groupId>
+				<artifactId>maven-compiler-plugin</artifactId>
+				<version>2.0.2</version>
+				<configuration>
+					<source>1.7</source>
+					<target>1.7</target>
+				</configuration>
+			</plugin>
+			<plugin>
+				<artifactId>maven-resources-plugin</artifactId>
+				<version>2.5</version>
+				<executions>
+					<execution>
+						<id>copy-scripts</id>
+						<!-- here the phase you need -->
+						<phase>package</phase>
+						<goals>
+							<goal>copy-resources</goal>
+						</goals>
+						<configuration>
+							<outputDirectory>target/appassembler/</outputDirectory>
+							<resources>
+								<resource>
+									<directory>src/main/resources</directory>
+								</resource>
+							</resources>
+							<directoryMode>0755</directoryMode>
+						</configuration>
+					</execution>
+				</executions>
+			</plugin>
+			<plugin>
+				<groupId>org.apache.maven.plugins</groupId>
+				<artifactId>maven-antrun-plugin</artifactId>
+				<version>1.6</version>
+				<executions>
+					<execution>
+						<id>process-test-classes</id>
+						<phase>package</phase>
+						<configuration>
+							<target>
+								<chmod file="target/appassembler/bin/*)" perm="755" />
+							</target>
+						</configuration>
+						<goals>
+							<goal>run</goal>
+						</goals>
+					</execution>
+				</executions>
+			</plugin>
+		</plugins>
+	</build>
+</project>
diff --git a/hyracks/hyracks-dist/src/main/resources/bin/getip.sh b/hyracks/hyracks-dist/src/main/resources/bin/getip.sh
new file mode 100755
index 0000000..e0cdf73
--- /dev/null
+++ b/hyracks/hyracks-dist/src/main/resources/bin/getip.sh
@@ -0,0 +1,21 @@
+#get the OS
+OS_NAME=`uname -a|awk '{print $1}'`
+LINUX_OS='Linux'
+
+if [ $OS_NAME = $LINUX_OS ];
+then
+        #Get IP Address
+        IPADDR=`/sbin/ifconfig eth0 | grep "inet " | awk '{print $2}' | cut -f 2 -d ':'`
+	if [ "$IPADDR" = "" ]
+        then
+		IPADDR=`/sbin/ifconfig lo | grep "inet " | awk '{print $2}' | cut -f 2 -d ':'`
+        fi 
+else
+        IPADDR=`/sbin/ifconfig en1 | grep "inet " | awk '{print $2}' | cut -f 2 -d ':'`
+	if [ "$IPADDR" = "" ]
+        then
+                IPADDR=`/sbin/ifconfig lo0 | grep "inet " | awk '{print $2}' | cut -f 2 -d ':'`
+        fi
+
+fi
+echo $IPADDR
diff --git a/hyracks/hyracks-dist/src/main/resources/bin/startAllNCs.sh b/hyracks/hyracks-dist/src/main/resources/bin/startAllNCs.sh
new file mode 100755
index 0000000..629bd90
--- /dev/null
+++ b/hyracks/hyracks-dist/src/main/resources/bin/startAllNCs.sh
@@ -0,0 +1,6 @@
+PREGELIX_PATH=`pwd`
+
+for i in `cat conf/slaves`
+do
+   ssh $i "cd ${PREGELIX_PATH}; bin/startnc.sh"
+done
diff --git a/hyracks/hyracks-dist/src/main/resources/bin/startCluster.sh b/hyracks/hyracks-dist/src/main/resources/bin/startCluster.sh
new file mode 100755
index 0000000..a0c2063
--- /dev/null
+++ b/hyracks/hyracks-dist/src/main/resources/bin/startCluster.sh
@@ -0,0 +1,3 @@
+bin/startcc.sh
+sleep 5
+bin/startAllNCs.sh
diff --git a/hyracks/hyracks-dist/src/main/resources/bin/startDebugNc.sh b/hyracks/hyracks-dist/src/main/resources/bin/startDebugNc.sh
new file mode 100755
index 0000000..fe6cf27
--- /dev/null
+++ b/hyracks/hyracks-dist/src/main/resources/bin/startDebugNc.sh
@@ -0,0 +1,50 @@
+hostname
+
+#Get the IP address of the cc
+CCHOST_NAME=`cat conf/master`
+CURRENT_PATH=`pwd`
+CCHOST=`ssh ${CCHOST_NAME} "cd ${CURRENT_PATH}; bin/getip.sh"`
+
+#Import cluster properties
+. conf/cluster.properties
+. conf/debugnc.properties
+
+#Clean up temp dir
+
+rm -rf $NCTMP_DIR2
+mkdir $NCTMP_DIR2
+
+#Clean up log dir
+rm -rf $NCLOGS_DIR2
+mkdir $NCLOGS_DIR2
+
+
+#Clean up I/O working dir
+io_dirs=$(echo $IO_DIRS2 | tr "," "\n")
+for io_dir in $io_dirs
+do
+	rm -rf $io_dir
+	mkdir $io_dir
+done
+
+#Set JAVA_HOME
+export JAVA_HOME=$JAVA_HOME
+
+#Get OS
+IPADDR=`bin/getip.sh`
+
+#Get node ID
+NODEID=`hostname | cut -d '.' -f 1`
+NODEID=${NODEID}2
+
+#Set JAVA_OPTS
+export JAVA_OPTS=$NCJAVA_OPTS2
+
+cd $HYRACKS_HOME
+HYRACKS_HOME=`pwd`
+
+#Enter the temp dir
+cd $NCTMP_DIR2
+
+#Launch hyracks nc
+$HYRACKS_HOME/hyracks-server/target/appassembler/bin/hyracksnc -cc-host $CCHOST -cc-port $CC_CLUSTERPORT -cluster-net-ip-address $IPADDR  -data-ip-address $IPADDR -node-id $NODEID -iodevices "${IO_DIRS2}" &> $NCLOGS_DIR2/$NODEID.log &
diff --git a/hyracks/hyracks-dist/src/main/resources/bin/startcc.sh b/hyracks/hyracks-dist/src/main/resources/bin/startcc.sh
new file mode 100755
index 0000000..fe2551d
--- /dev/null
+++ b/hyracks/hyracks-dist/src/main/resources/bin/startcc.sh
@@ -0,0 +1,25 @@
+#!/bin/bash
+hostname
+
+#Import cluster properties
+. conf/cluster.properties
+
+#Get the IP address of the cc
+CCHOST_NAME=`cat conf/master`
+CCHOST=`bin/getip.sh`
+
+#Remove the temp dir
+rm -rf $CCTMP_DIR
+mkdir $CCTMP_DIR
+
+#Remove the logs dir
+rm -rf $CCLOGS_DIR
+mkdir $CCLOGS_DIR
+
+#Export JAVA_HOME and JAVA_OPTS
+export JAVA_HOME=$JAVA_HOME
+export JAVA_OPTS=$CCJAVA_OPTS
+
+#Launch hyracks cc script
+chmod -R 755 $HYRACKS_HOME
+$HYRACKS_HOME/hyracks-server/target/appassembler/bin/hyrackscc -client-net-ip-address $CCHOST -cluster-net-ip-address $CCHOST -client-net-port $CC_CLIENTPORT -cluster-net-port $CC_CLUSTERPORT -max-heartbeat-lapse-periods 999999 -default-max-job-attempts 0 -job-history-size 3 &> $CCLOGS_DIR/cc.log &
diff --git a/hyracks/hyracks-dist/src/main/resources/bin/startnc.sh b/hyracks/hyracks-dist/src/main/resources/bin/startnc.sh
new file mode 100755
index 0000000..6e0f90e
--- /dev/null
+++ b/hyracks/hyracks-dist/src/main/resources/bin/startnc.sh
@@ -0,0 +1,49 @@
+hostname
+
+MY_NAME=`hostname`
+#Get the IP address of the cc
+CCHOST_NAME=`cat conf/master`
+CURRENT_PATH=`pwd`
+CCHOST=`ssh ${CCHOST_NAME} "cd ${CURRENT_PATH}; bin/getip.sh"`
+
+#Import cluster properties
+. conf/cluster.properties
+
+#Clean up temp dir
+
+rm -rf $NCTMP_DIR
+mkdir $NCTMP_DIR
+
+#Clean up log dir
+rm -rf $NCLOGS_DIR
+mkdir $NCLOGS_DIR
+
+
+#Clean up I/O working dir
+io_dirs=$(echo $IO_DIRS | tr "," "\n")
+for io_dir in $io_dirs
+do
+	rm -rf $io_dir
+	mkdir $io_dir
+done
+
+#Set JAVA_HOME
+export JAVA_HOME=$JAVA_HOME
+
+IPADDR=`bin/getip.sh`
+#echo $IPADDR
+
+#Get node ID
+NODEID=`hostname | cut -d '.' -f 1`
+
+#Set JAVA_OPTS
+export JAVA_OPTS=$NCJAVA_OPTS
+
+cd $HYRACKS_HOME
+HYRACKS_HOME=`pwd`
+
+#Enter the temp dir
+cd $NCTMP_DIR
+
+#Launch hyracks nc
+$HYRACKS_HOME/hyracks-server/target/appassembler/bin/hyracksnc -cc-host $CCHOST -cc-port $CC_CLUSTERPORT -cluster-net-ip-address $IPADDR  -data-ip-address $IPADDR -node-id $NODEID -iodevices "${IO_DIRS}" &> $NCLOGS_DIR/$NODEID.log &
diff --git a/hyracks/hyracks-dist/src/main/resources/bin/stopAllNCs.sh b/hyracks/hyracks-dist/src/main/resources/bin/stopAllNCs.sh
new file mode 100755
index 0000000..12367c1
--- /dev/null
+++ b/hyracks/hyracks-dist/src/main/resources/bin/stopAllNCs.sh
@@ -0,0 +1,6 @@
+PREGELIX_PATH=`pwd`
+
+for i in `cat conf/slaves`
+do
+   ssh $i "cd ${PREGELIX_PATH}; bin/stopnc.sh"
+done
diff --git a/hyracks/hyracks-dist/src/main/resources/bin/stopCluster.sh b/hyracks/hyracks-dist/src/main/resources/bin/stopCluster.sh
new file mode 100755
index 0000000..4889934
--- /dev/null
+++ b/hyracks/hyracks-dist/src/main/resources/bin/stopCluster.sh
@@ -0,0 +1,3 @@
+bin/stopAllNCs.sh
+sleep 2
+bin/stopcc.sh
diff --git a/hyracks/hyracks-dist/src/main/resources/bin/stopcc.sh b/hyracks/hyracks-dist/src/main/resources/bin/stopcc.sh
new file mode 100755
index 0000000..c2f525a
--- /dev/null
+++ b/hyracks/hyracks-dist/src/main/resources/bin/stopcc.sh
@@ -0,0 +1,10 @@
+hostname
+. conf/cluster.properties
+
+#Kill process
+PID=`ps -ef|grep ${USER}|grep java|grep hyracks|awk '{print $2}'`
+echo $PID
+kill -9 $PID
+
+#Clean up CC temp dir
+rm -rf $CCTMP_DIR/*
diff --git a/hyracks/hyracks-dist/src/main/resources/bin/stopnc.sh b/hyracks/hyracks-dist/src/main/resources/bin/stopnc.sh
new file mode 100755
index 0000000..03ce4e7
--- /dev/null
+++ b/hyracks/hyracks-dist/src/main/resources/bin/stopnc.sh
@@ -0,0 +1,23 @@
+hostname
+. conf/cluster.properties
+
+#Kill process
+PID=`ps -ef|grep ${USER}|grep java|grep 'Dapp.name=hyracksnc'|awk '{print $2}'`
+
+if [ "$PID" == "" ]; then
+  USERID=`id | sed 's/^uid=//;s/(.*$//'`
+  PID=`ps -ef|grep ${USERID}|grep java|grep 'Dapp.name=hyracksnc'|awk '{print $2}'`
+fi
+
+echo $PID
+kill -9 $PID
+
+#Clean up I/O working dir
+io_dirs=$(echo $IO_DIRS | tr "," "\n")
+for io_dir in $io_dirs
+do
+	rm -rf $io_dir/*
+done
+
+#Clean up NC temp dir
+rm -rf $NCTMP_DIR/*
diff --git a/hyracks/hyracks-dist/src/main/resources/conf/cluster.properties b/hyracks/hyracks-dist/src/main/resources/conf/cluster.properties
new file mode 100755
index 0000000..3b382f7
--- /dev/null
+++ b/hyracks/hyracks-dist/src/main/resources/conf/cluster.properties
@@ -0,0 +1,37 @@
+#The CC port for Hyracks clients
+CC_CLIENTPORT=3099
+
+#The CC port for Hyracks cluster management
+CC_CLUSTERPORT=1099
+
+#The directory of hyracks binaries
+HYRACKS_HOME=../../../
+
+#The tmp directory for cc to install jars
+CCTMP_DIR=/tmp/t1
+
+#The tmp directory for nc to install jars
+NCTMP_DIR=/tmp/t2
+
+#The directory to put cc logs
+CCLOGS_DIR=$CCTMP_DIR/logs
+
+#The directory to put nc logs
+NCLOGS_DIR=$NCTMP_DIR/logs
+
+#Comma separated I/O directories for the spilling of external sort
+IO_DIRS="/tmp/t3,/tmp/t4"
+
+#The JAVA_HOME
+JAVA_HOME=$JAVA_HOME
+
+#The frame size of the internal dataflow engine
+FRAME_SIZE=65536
+
+#CC JAVA_OPTS
+CCJAVA_OPTS="-Xdebug -Xrunjdwp:transport=dt_socket,address=7001,server=y,suspend=n -Xmx1g -Djava.util.logging.config.file=logging.properties"
+# Yourkit option: -agentpath:/grid/0/dev/vborkar/tools/yjp-10.0.4/bin/linux-x86-64/libyjpagent.so=port=20001"
+
+#NC JAVA_OPTS
+NCJAVA_OPTS="-Xdebug -Xrunjdwp:transport=dt_socket,address=7002,server=y,suspend=n -Xmx1g -Djava.util.logging.config.file=logging.properties"
+
diff --git a/hyracks/hyracks-dist/src/main/resources/conf/debugnc.properties b/hyracks/hyracks-dist/src/main/resources/conf/debugnc.properties
new file mode 100755
index 0000000..27afa26
--- /dev/null
+++ b/hyracks/hyracks-dist/src/main/resources/conf/debugnc.properties
@@ -0,0 +1,12 @@
+#The tmp directory for nc to install jars
+NCTMP_DIR2=/tmp/t-1
+
+#The directory to put nc logs
+NCLOGS_DIR2=$NCTMP_DIR/logs
+
+#Comma separated I/O directories for the spilling of external sort
+IO_DIRS2="/tmp/t-2,/tmp/t-3"
+
+#NC JAVA_OPTS
+NCJAVA_OPTS2="-Xdebug -Xrunjdwp:transport=dt_socket,address=7003,server=y,suspend=n -Xmx1g -Djava.util.logging.config.file=logging.properties"
+
diff --git a/hyracks/hyracks-dist/src/main/resources/conf/master b/hyracks/hyracks-dist/src/main/resources/conf/master
new file mode 100755
index 0000000..2fbb50c
--- /dev/null
+++ b/hyracks/hyracks-dist/src/main/resources/conf/master
@@ -0,0 +1 @@
+localhost
diff --git a/hyracks/hyracks-dist/src/main/resources/conf/slaves b/hyracks/hyracks-dist/src/main/resources/conf/slaves
new file mode 100755
index 0000000..2fbb50c
--- /dev/null
+++ b/hyracks/hyracks-dist/src/main/resources/conf/slaves
@@ -0,0 +1 @@
+localhost
diff --git a/hyracks/hyracks-documentation/pom.xml b/hyracks/hyracks-documentation/pom.xml
new file mode 100644
index 0000000..ed24adb
--- /dev/null
+++ b/hyracks/hyracks-documentation/pom.xml
@@ -0,0 +1,49 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>edu.uci.ics.hyracks</groupId>
+  <artifactId>hyracks-documentation</artifactId>
+  <version>0.2.3-SNAPSHOT</version>
+  <name>hyracks-documentation</name>
+
+  <parent>
+    <groupId>edu.uci.ics.hyracks</groupId>
+    <artifactId>hyracks</artifactId>
+    <version>0.2.3-SNAPSHOT</version>
+  </parent>
+
+  <build>
+    <plugins>
+    	<plugin>
+    		<groupId>org.apache.maven.doxia</groupId>
+    		<artifactId>doxia-maven-plugin</artifactId>
+    		<version>1.1.3</version>
+    		<executions>
+    		  <execution>
+    		    <phase>package</phase>
+    		    <goals>
+    		      <goal>render-books</goal>
+    		    </goals>
+    		  </execution>
+    		</executions>
+    		<configuration>
+    		  <books>
+    		    <book>
+    		      <directory>src/books/user-guide</directory>
+    		      <descriptor>src/books/user-guide/doxia-descriptor.xml</descriptor>
+    		      <formats>
+    		        <format>
+    		          <id>pdf</id>
+    		        </format>
+    		        <format>
+    		          <id>xhtml</id>
+    		        </format>
+    		      </formats>
+    		    </book>
+    		  </books>
+    		</configuration>
+    	</plugin>
+    </plugins>
+  </build>
+  <dependencies>
+  </dependencies>
+</project>
diff --git a/hyracks-documentation/src/books/user-guide/doxia-descriptor.xml b/hyracks/hyracks-documentation/src/books/user-guide/doxia-descriptor.xml
similarity index 100%
rename from hyracks-documentation/src/books/user-guide/doxia-descriptor.xml
rename to hyracks/hyracks-documentation/src/books/user-guide/doxia-descriptor.xml
diff --git a/hyracks-documentation/src/books/user-guide/sec-concepts-applications.apt b/hyracks/hyracks-documentation/src/books/user-guide/sec-concepts-applications.apt
similarity index 100%
rename from hyracks-documentation/src/books/user-guide/sec-concepts-applications.apt
rename to hyracks/hyracks-documentation/src/books/user-guide/sec-concepts-applications.apt
diff --git a/hyracks-documentation/src/books/user-guide/sec-concepts-terminology.apt b/hyracks/hyracks-documentation/src/books/user-guide/sec-concepts-terminology.apt
similarity index 100%
rename from hyracks-documentation/src/books/user-guide/sec-concepts-terminology.apt
rename to hyracks/hyracks-documentation/src/books/user-guide/sec-concepts-terminology.apt
diff --git a/hyracks-documentation/src/books/user-guide/sec-hyrackscli-commands.apt b/hyracks/hyracks-documentation/src/books/user-guide/sec-hyrackscli-commands.apt
similarity index 100%
rename from hyracks-documentation/src/books/user-guide/sec-hyrackscli-commands.apt
rename to hyracks/hyracks-documentation/src/books/user-guide/sec-hyrackscli-commands.apt
diff --git a/hyracks-documentation/src/books/user-guide/sec-hyrackscli-running.apt b/hyracks/hyracks-documentation/src/books/user-guide/sec-hyrackscli-running.apt
similarity index 100%
rename from hyracks-documentation/src/books/user-guide/sec-hyrackscli-running.apt
rename to hyracks/hyracks-documentation/src/books/user-guide/sec-hyrackscli-running.apt
diff --git a/hyracks-documentation/src/books/user-guide/sec-introduction-overview.apt b/hyracks/hyracks-documentation/src/books/user-guide/sec-introduction-overview.apt
similarity index 100%
rename from hyracks-documentation/src/books/user-guide/sec-introduction-overview.apt
rename to hyracks/hyracks-documentation/src/books/user-guide/sec-introduction-overview.apt
diff --git a/hyracks-documentation/src/books/user-guide/sec-introduction-whatis.apt b/hyracks/hyracks-documentation/src/books/user-guide/sec-introduction-whatis.apt
similarity index 100%
rename from hyracks-documentation/src/books/user-guide/sec-introduction-whatis.apt
rename to hyracks/hyracks-documentation/src/books/user-guide/sec-introduction-whatis.apt
diff --git a/hyracks/hyracks-examples/btree-example/btreeapp/pom.xml b/hyracks/hyracks-examples/btree-example/btreeapp/pom.xml
new file mode 100644
index 0000000..6350054
--- /dev/null
+++ b/hyracks/hyracks-examples/btree-example/btreeapp/pom.xml
@@ -0,0 +1,88 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>edu.uci.ics.hyracks.examples.btree</groupId>
+  <artifactId>btreeapp</artifactId>
+  <version>0.2.3-SNAPSHOT</version>
+  <name>btreeapp</name>
+
+  <parent>
+    <groupId>edu.uci.ics.hyracks.examples</groupId>
+    <artifactId>btree-example</artifactId>
+    <version>0.2.3-SNAPSHOT</version>
+  </parent>
+
+  <build>
+    <pluginManagement>
+      <plugins>
+        <plugin>
+          <groupId>org.eclipse.m2e</groupId>
+          <artifactId>lifecycle-mapping</artifactId>
+          <version>1.0.0</version>
+          <configuration>
+            <lifecycleMappingMetadata>
+              <pluginExecutions>
+                <pluginExecution>
+                  <pluginExecutionFilter>
+                    <groupId>org.apache.maven.plugins</groupId>
+                    <artifactId>maven-dependency-plugin</artifactId>
+                    <versionRange>[1.0.0,)</versionRange>
+                    <goals>
+                      <goal>copy-dependencies</goal>
+                    </goals>
+                  </pluginExecutionFilter>
+                  <action>
+                    <ignore />
+                  </action>
+                </pluginExecution>
+              </pluginExecutions>
+            </lifecycleMappingMetadata>
+          </configuration>
+        </plugin>
+      </plugins>
+	</pluginManagement>
+  
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-dependency-plugin</artifactId>
+        <executions>
+          <execution>
+            <id>copy-dependencies</id>
+            <phase>package</phase>
+            <goals>
+              <goal>copy-dependencies</goal>
+            </goals>
+            <configuration>
+              <outputDirectory>target/application/lib</outputDirectory>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <artifactId>maven-assembly-plugin</artifactId>
+        <version>2.2-beta-5</version>
+        <executions>
+          <execution>
+            <configuration>
+              <descriptors>
+                <descriptor>src/main/assembly/app-assembly.xml</descriptor>
+              </descriptors>
+            </configuration>
+            <phase>package</phase>
+            <goals>
+              <goal>attached</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+    </plugins>
+  </build>
+  <dependencies>
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks.examples.btree</groupId>
+  		<artifactId>btreehelper</artifactId>
+  		<version>0.2.3-SNAPSHOT</version>
+  		<scope>compile</scope>
+  	</dependency>
+  </dependencies>
+</project>
diff --git a/hyracks-examples/btree-example/btreeapp/src/main/assembly/app-assembly.xml b/hyracks/hyracks-examples/btree-example/btreeapp/src/main/assembly/app-assembly.xml
similarity index 100%
rename from hyracks-examples/btree-example/btreeapp/src/main/assembly/app-assembly.xml
rename to hyracks/hyracks-examples/btree-example/btreeapp/src/main/assembly/app-assembly.xml
diff --git a/hyracks/hyracks-examples/btree-example/btreeclient/pom.xml b/hyracks/hyracks-examples/btree-example/btreeclient/pom.xml
new file mode 100644
index 0000000..dce275f
--- /dev/null
+++ b/hyracks/hyracks-examples/btree-example/btreeclient/pom.xml
@@ -0,0 +1,86 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>edu.uci.ics.hyracks.examples.btree</groupId>
+  <artifactId>btreeclient</artifactId>
+  <name>btreeclient</name>
+  <parent>
+    <groupId>edu.uci.ics.hyracks.examples</groupId>
+    <artifactId>btree-example</artifactId>
+    <version>0.2.3-SNAPSHOT</version>
+  </parent>
+
+  <dependencies>
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>hyracks-dataflow-std</artifactId>
+  		<version>0.2.3-SNAPSHOT</version>
+  		<scope>compile</scope>
+  	</dependency>
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>hyracks-storage-am-btree</artifactId>
+  		<version>0.2.3-SNAPSHOT</version>
+  		<scope>compile</scope>
+  	</dependency>
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks.examples.btree</groupId>
+  		<artifactId>btreehelper</artifactId>
+  		<version>0.2.3-SNAPSHOT</version>
+  		<type>jar</type>
+  		<scope>compile</scope>
+  	</dependency>
+  </dependencies>
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-compiler-plugin</artifactId>
+        <version>2.0.2</version>
+        <configuration>
+          <source>1.7</source>
+          <target>1.7</target>
+        </configuration>
+      </plugin>
+      <plugin>
+        <groupId>org.codehaus.mojo</groupId>
+        <artifactId>appassembler-maven-plugin</artifactId>
+        <version>1.3</version>
+        <executions>
+          <execution>
+            <configuration>
+              <programs>
+                <program>
+                  <mainClass>edu.uci.ics.hyracks.examples.btree.client.BTreeBulkLoadExample</mainClass>
+                  <name>btreebulkload</name>
+                </program>
+              </programs>
+              <repositoryLayout>flat</repositoryLayout>
+              <repositoryName>lib</repositoryName>
+            </configuration>
+            <phase>package</phase>
+            <goals>
+              <goal>assemble</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <artifactId>maven-assembly-plugin</artifactId>
+        <version>2.2-beta-5</version>
+        <executions>
+          <execution>
+            <configuration>
+              <descriptors>
+                <descriptor>src/main/assembly/binary-assembly.xml</descriptor>
+              </descriptors>
+            </configuration>
+            <phase>package</phase>
+            <goals>
+              <goal>attached</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+    </plugins>
+  </build>
+</project>
diff --git a/hyracks-examples/btree-example/btreeclient/src/main/assembly/binary-assembly.xml b/hyracks/hyracks-examples/btree-example/btreeclient/src/main/assembly/binary-assembly.xml
similarity index 100%
rename from hyracks-examples/btree-example/btreeclient/src/main/assembly/binary-assembly.xml
rename to hyracks/hyracks-examples/btree-example/btreeclient/src/main/assembly/binary-assembly.xml
diff --git a/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/InsertPipelineExample.java b/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/InsertPipelineExample.java
new file mode 100644
index 0000000..b6e8c72
--- /dev/null
+++ b/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/InsertPipelineExample.java
@@ -0,0 +1,206 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.examples.btree.client;
+
+import org.kohsuke.args4j.CmdLineParser;
+import org.kohsuke.args4j.Option;
+
+import edu.uci.ics.hyracks.api.client.HyracksConnection;
+import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
+import edu.uci.ics.hyracks.api.constraints.PartitionConstraintHelper;
+import edu.uci.ics.hyracks.api.dataflow.IConnectorDescriptor;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.job.JobId;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
+import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryHashFunctionFactory;
+import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
+import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.partition.FieldHashPartitionComputerFactory;
+import edu.uci.ics.hyracks.dataflow.std.connectors.MToNPartitioningConnectorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.dataflow.std.misc.NullSinkOperatorDescriptor;
+import edu.uci.ics.hyracks.examples.btree.helper.DataGenOperatorDescriptor;
+import edu.uci.ics.hyracks.examples.btree.helper.IndexRegistryProvider;
+import edu.uci.ics.hyracks.examples.btree.helper.StorageManagerInterface;
+import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndex;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexRegistryProvider;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexInsertUpdateDeleteOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackProvider;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOp;
+import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
+
+// This example will insert tuples into the primary and secondary index using an insert pipeline
+
+public class InsertPipelineExample {
+    private static class Options {
+        @Option(name = "-host", usage = "Hyracks Cluster Controller Host name", required = true)
+        public String host;
+
+        @Option(name = "-port", usage = "Hyracks Cluster Controller Port (default: 1098)")
+        public int port = 1098;
+
+        @Option(name = "-app", usage = "Hyracks Application name", required = true)
+        public String app;
+
+        @Option(name = "-target-ncs", usage = "Comma separated list of node-controller names to use", required = true)
+        public String ncs;
+
+        @Option(name = "-num-tuples", usage = "Total number of tuples to to be generated for insertion", required = true)
+        public int numTuples;
+
+        @Option(name = "-primary-btreename", usage = "B-Tree file name of primary index", required = true)
+        public String primaryBTreeName;
+
+        @Option(name = "-secondary-btreename", usage = "B-Tree file name of secondary index", required = true)
+        public String secondaryBTreeName;
+    }
+
+    public static void main(String[] args) throws Exception {
+        Options options = new Options();
+        CmdLineParser parser = new CmdLineParser(options);
+        parser.parseArgument(args);
+
+        IHyracksClientConnection hcc = new HyracksConnection(options.host, options.port);
+
+        JobSpecification job = createJob(options);
+
+        long start = System.currentTimeMillis();
+        JobId jobId = hcc.startJob(options.app, job);
+        hcc.waitForCompletion(jobId);
+        long end = System.currentTimeMillis();
+        System.err.println(start + " " + end + " " + (end - start));
+    }
+
+    private static JobSpecification createJob(Options options) {
+
+        JobSpecification spec = new JobSpecification();
+
+        String[] splitNCs = options.ncs.split(",");
+
+        // schema of tuples to be generated: 4 fields with int, string, string,
+        // string
+        // we will use field 2 as primary key to fill a clustered index
+        RecordDescriptor recDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, // this field will
+                                                           // not go into B-Tree
+                UTF8StringSerializerDeserializer.INSTANCE, // we will use this
+                                                           // as payload
+                IntegerSerializerDeserializer.INSTANCE, // we will use this
+                                                        // field as key
+                IntegerSerializerDeserializer.INSTANCE, // we will use this as
+                                                        // payload
+                UTF8StringSerializerDeserializer.INSTANCE // we will use this as
+                                                          // payload
+                });
+
+        // generate numRecords records with field 2 being unique, integer values
+        // in [0, 100000], and strings with max length of 10 characters, and
+        // random seed 100
+        DataGenOperatorDescriptor dataGen = new DataGenOperatorDescriptor(spec, recDesc, options.numTuples, 2, 0,
+                100000, 10, 100);
+        // run data generator on first nodecontroller given
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, dataGen, splitNCs[0]);
+
+        IIndexRegistryProvider<IIndex> indexRegistryProvider = IndexRegistryProvider.INSTANCE;
+        IStorageManagerInterface storageManager = StorageManagerInterface.INSTANCE;
+
+        // prepare insertion into primary index
+        // tuples to be put into B-Tree shall have 4 fields
+        int primaryFieldCount = 4;
+        ITypeTraits[] primaryTypeTraits = new ITypeTraits[primaryFieldCount];
+        primaryTypeTraits[0] = IntegerPointable.TYPE_TRAITS;
+        primaryTypeTraits[1] = UTF8StringPointable.TYPE_TRAITS;
+        primaryTypeTraits[2] = IntegerPointable.TYPE_TRAITS;
+        primaryTypeTraits[3] = UTF8StringPointable.TYPE_TRAITS;
+
+        // comparator factories for primary index
+        IBinaryComparatorFactory[] primaryComparatorFactories = new IBinaryComparatorFactory[1];
+        primaryComparatorFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+
+        // the B-Tree expects its keyfields to be at the front of its input
+        // tuple
+        int[] primaryFieldPermutation = { 2, 1, 3, 4 }; // map field 2 of input
+                                                        // tuple to field 0 of
+                                                        // B-Tree tuple, etc.        
+        IFileSplitProvider primarySplitProvider = JobHelper.createFileSplitProvider(splitNCs, options.primaryBTreeName);
+
+        IIndexDataflowHelperFactory dataflowHelperFactory = new BTreeDataflowHelperFactory();
+
+        // create operator descriptor
+        TreeIndexInsertUpdateDeleteOperatorDescriptor primaryInsert = new TreeIndexInsertUpdateDeleteOperatorDescriptor(
+                spec, recDesc, storageManager, indexRegistryProvider, primarySplitProvider, primaryTypeTraits,
+                primaryComparatorFactories, primaryFieldPermutation, IndexOp.INSERT, dataflowHelperFactory, null,
+                NoOpOperationCallbackProvider.INSTANCE);
+        JobHelper.createPartitionConstraint(spec, primaryInsert, splitNCs);
+
+        // prepare insertion into secondary index
+        // tuples to be put into B-Tree shall have 2 fields
+        int secondaryFieldCount = 2;
+        ITypeTraits[] secondaryTypeTraits = new ITypeTraits[secondaryFieldCount];
+        secondaryTypeTraits[0] = UTF8StringPointable.TYPE_TRAITS;
+        secondaryTypeTraits[1] = IntegerPointable.TYPE_TRAITS;
+
+        // comparator factories for secondary index
+        IBinaryComparatorFactory[] secondaryComparatorFactories = new IBinaryComparatorFactory[2];
+        secondaryComparatorFactories[0] = PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY);
+        secondaryComparatorFactories[1] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+
+        // the B-Tree expects its keyfields to be at the front of its input
+        // tuple
+        int[] secondaryFieldPermutation = { 1, 2 };
+        IFileSplitProvider secondarySplitProvider = JobHelper.createFileSplitProvider(splitNCs,
+                options.secondaryBTreeName);
+        // create operator descriptor
+        TreeIndexInsertUpdateDeleteOperatorDescriptor secondaryInsert = new TreeIndexInsertUpdateDeleteOperatorDescriptor(
+                spec, recDesc, storageManager, indexRegistryProvider, secondarySplitProvider, secondaryTypeTraits,
+                secondaryComparatorFactories, secondaryFieldPermutation, IndexOp.INSERT, dataflowHelperFactory, null,
+                NoOpOperationCallbackProvider.INSTANCE);
+        JobHelper.createPartitionConstraint(spec, secondaryInsert, splitNCs);
+
+        // end the insert pipeline at this sink operator
+        NullSinkOperatorDescriptor nullSink = new NullSinkOperatorDescriptor(spec);
+        JobHelper.createPartitionConstraint(spec, nullSink, splitNCs);
+
+        // distribute the records from the datagen via hashing to the bulk load
+        // ops
+        IBinaryHashFunctionFactory[] hashFactories = new IBinaryHashFunctionFactory[1];
+        hashFactories[0] = PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY);
+        IConnectorDescriptor hashConn = new MToNPartitioningConnectorDescriptor(spec,
+                new FieldHashPartitionComputerFactory(new int[] { 0 }, hashFactories));
+
+        // connect the ops
+
+        spec.connect(hashConn, dataGen, 0, primaryInsert, 0);
+
+        spec.connect(new OneToOneConnectorDescriptor(spec), primaryInsert, 0, secondaryInsert, 0);
+
+        spec.connect(new OneToOneConnectorDescriptor(spec), secondaryInsert, 0, nullSink, 0);
+
+        spec.addRoot(nullSink);
+
+        return spec;
+    }
+}
\ No newline at end of file
diff --git a/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/JobHelper.java b/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/JobHelper.java
similarity index 100%
rename from hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/JobHelper.java
rename to hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/JobHelper.java
diff --git a/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/PrimaryIndexBulkLoadExample.java b/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/PrimaryIndexBulkLoadExample.java
new file mode 100644
index 0000000..a6c7ea6
--- /dev/null
+++ b/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/PrimaryIndexBulkLoadExample.java
@@ -0,0 +1,174 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.examples.btree.client;
+
+import org.kohsuke.args4j.CmdLineParser;
+import org.kohsuke.args4j.Option;
+
+import edu.uci.ics.hyracks.api.client.HyracksConnection;
+import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
+import edu.uci.ics.hyracks.api.constraints.PartitionConstraintHelper;
+import edu.uci.ics.hyracks.api.dataflow.IConnectorDescriptor;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.job.JobId;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
+import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryHashFunctionFactory;
+import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
+import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.partition.FieldHashPartitionComputerFactory;
+import edu.uci.ics.hyracks.dataflow.std.connectors.MToNPartitioningConnectorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.dataflow.std.sort.ExternalSortOperatorDescriptor;
+import edu.uci.ics.hyracks.examples.btree.helper.DataGenOperatorDescriptor;
+import edu.uci.ics.hyracks.examples.btree.helper.IndexRegistryProvider;
+import edu.uci.ics.hyracks.examples.btree.helper.StorageManagerInterface;
+import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndex;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexRegistryProvider;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexBulkLoadOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackProvider;
+import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
+
+// This example will load a primary index from randomly generated data
+
+public class PrimaryIndexBulkLoadExample {
+    private static class Options {
+        @Option(name = "-host", usage = "Hyracks Cluster Controller Host name", required = true)
+        public String host;
+
+        @Option(name = "-port", usage = "Hyracks Cluster Controller Port (default: 1098)")
+        public int port = 1098;
+
+        @Option(name = "-app", usage = "Hyracks Application name", required = true)
+        public String app;
+
+        @Option(name = "-target-ncs", usage = "Comma separated list of node-controller names to use", required = true)
+        public String ncs;
+
+        @Option(name = "-num-tuples", usage = "Total number of tuples to to be generated for loading", required = true)
+        public int numTuples;
+
+        @Option(name = "-btreename", usage = "B-Tree file name", required = true)
+        public String btreeName;
+
+        @Option(name = "-sortbuffer-size", usage = "Sort buffer size in frames (default: 32768)", required = false)
+        public int sbSize = 32768;
+    }
+
+    public static void main(String[] args) throws Exception {
+        Options options = new Options();
+        CmdLineParser parser = new CmdLineParser(options);
+        parser.parseArgument(args);
+
+        IHyracksClientConnection hcc = new HyracksConnection(options.host, options.port);
+
+        JobSpecification job = createJob(options);
+
+        long start = System.currentTimeMillis();
+        JobId jobId = hcc.startJob(options.app, job);
+        hcc.waitForCompletion(jobId);
+        long end = System.currentTimeMillis();
+        System.err.println(start + " " + end + " " + (end - start));
+    }
+
+    private static JobSpecification createJob(Options options) {
+
+        JobSpecification spec = new JobSpecification();
+
+        String[] splitNCs = options.ncs.split(",");
+
+        // schema of tuples to be generated: 5 fields with string, string, int,
+        // int, string
+        // we will use field-index 2 as primary key to fill a clustered index
+        RecordDescriptor recDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, // this field will
+                                                           // not go into B-Tree
+                UTF8StringSerializerDeserializer.INSTANCE, // we will use this
+                                                           // as payload
+                IntegerSerializerDeserializer.INSTANCE, // we will use this
+                                                        // field as key
+                IntegerSerializerDeserializer.INSTANCE, // we will use this as
+                                                        // payload
+                UTF8StringSerializerDeserializer.INSTANCE // we will use this as
+                                                          // payload
+                });
+
+        // generate numRecords records with field 2 being unique, integer values
+        // in [0, 100000], and strings with max length of 10 characters, and
+        // random seed 50
+        DataGenOperatorDescriptor dataGen = new DataGenOperatorDescriptor(spec, recDesc, options.numTuples, 2, 0,
+                100000, 10, 50);
+        // run data generator on first nodecontroller given
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, dataGen, splitNCs[0]);
+
+        // sort the tuples as preparation for bulk load
+        // fields to sort on
+        int[] sortFields = { 2 };
+        // comparators for sort fields
+        IBinaryComparatorFactory[] comparatorFactories = new IBinaryComparatorFactory[1];
+        comparatorFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+        ExternalSortOperatorDescriptor sorter = new ExternalSortOperatorDescriptor(spec, options.sbSize, sortFields,
+                comparatorFactories, recDesc);
+        JobHelper.createPartitionConstraint(spec, sorter, splitNCs);
+
+        // tuples to be put into B-Tree shall have 4 fields
+        int fieldCount = 4;
+        ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
+        typeTraits[0] = IntegerPointable.TYPE_TRAITS;
+        typeTraits[1] = UTF8StringPointable.TYPE_TRAITS;
+        typeTraits[2] = IntegerPointable.TYPE_TRAITS;
+        typeTraits[3] = UTF8StringPointable.TYPE_TRAITS;
+
+        // create providers for B-Tree
+        IIndexRegistryProvider<IIndex> indexRegistryProvider = IndexRegistryProvider.INSTANCE;
+        IStorageManagerInterface storageManager = StorageManagerInterface.INSTANCE;
+
+        // the B-Tree expects its keyfields to be at the front of its input
+        // tuple
+        int[] fieldPermutation = { 2, 1, 3, 4 }; // map field 2 of input tuple
+                                                 // to field 0 of B-Tree tuple,
+                                                 // etc.
+        IFileSplitProvider btreeSplitProvider = JobHelper.createFileSplitProvider(splitNCs, options.btreeName);
+        IIndexDataflowHelperFactory dataflowHelperFactory = new BTreeDataflowHelperFactory();
+        TreeIndexBulkLoadOperatorDescriptor btreeBulkLoad = new TreeIndexBulkLoadOperatorDescriptor(spec,
+                storageManager, indexRegistryProvider, btreeSplitProvider, typeTraits, comparatorFactories,
+                fieldPermutation, 0.7f, dataflowHelperFactory, NoOpOperationCallbackProvider.INSTANCE);
+        JobHelper.createPartitionConstraint(spec, btreeBulkLoad, splitNCs);
+
+        // distribute the records from the datagen via hashing to the bulk load
+        // ops
+        IBinaryHashFunctionFactory[] hashFactories = new IBinaryHashFunctionFactory[1];
+        hashFactories[0] = PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY);
+        IConnectorDescriptor hashConn = new MToNPartitioningConnectorDescriptor(spec,
+                new FieldHashPartitionComputerFactory(new int[] { 0 }, hashFactories));
+
+        spec.connect(hashConn, dataGen, 0, sorter, 0);
+
+        spec.connect(new OneToOneConnectorDescriptor(spec), sorter, 0, btreeBulkLoad, 0);
+
+        spec.addRoot(btreeBulkLoad);
+
+        return spec;
+    }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/PrimaryIndexSearchExample.java b/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/PrimaryIndexSearchExample.java
new file mode 100644
index 0000000..d24ba33
--- /dev/null
+++ b/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/PrimaryIndexSearchExample.java
@@ -0,0 +1,161 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.examples.btree.client;
+
+import java.io.DataOutput;
+
+import org.kohsuke.args4j.CmdLineParser;
+import org.kohsuke.args4j.Option;
+
+import edu.uci.ics.hyracks.api.client.HyracksConnection;
+import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.job.JobId;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
+import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
+import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.dataflow.std.misc.ConstantTupleSourceOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.misc.PrinterOperatorDescriptor;
+import edu.uci.ics.hyracks.examples.btree.helper.IndexRegistryProvider;
+import edu.uci.ics.hyracks.examples.btree.helper.StorageManagerInterface;
+import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeSearchOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndex;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexRegistryProvider;
+import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackProvider;
+import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
+
+// This example will perform an ordered scan on the primary index
+// i.e. a range-search for [-infinity, +infinity]
+
+public class PrimaryIndexSearchExample {
+    private static class Options {
+        @Option(name = "-host", usage = "Hyracks Cluster Controller Host name", required = true)
+        public String host;
+
+        @Option(name = "-port", usage = "Hyracks Cluster Controller Port (default: 1098)")
+        public int port = 1098;
+
+        @Option(name = "-app", usage = "Hyracks Application name", required = true)
+        public String app;
+
+        @Option(name = "-target-ncs", usage = "Comma separated list of node-controller names to use", required = true)
+        public String ncs;
+
+        @Option(name = "-btreename", usage = "B-Tree file name to search", required = true)
+        public String btreeName;
+    }
+
+    public static void main(String[] args) throws Exception {
+        Options options = new Options();
+        CmdLineParser parser = new CmdLineParser(options);
+        parser.parseArgument(args);
+
+        IHyracksClientConnection hcc = new HyracksConnection(options.host, options.port);
+
+        JobSpecification job = createJob(options);
+
+        long start = System.currentTimeMillis();
+        JobId jobId = hcc.startJob(options.app, job);
+        hcc.waitForCompletion(jobId);
+        long end = System.currentTimeMillis();
+        System.err.println(start + " " + end + " " + (end - start));
+    }
+
+    private static JobSpecification createJob(Options options) throws HyracksDataException {
+
+        JobSpecification spec = new JobSpecification();
+
+        String[] splitNCs = options.ncs.split(",");
+
+        int fieldCount = 4;
+        ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
+        typeTraits[0] = IntegerPointable.TYPE_TRAITS;
+        typeTraits[1] = UTF8StringPointable.TYPE_TRAITS;
+        typeTraits[2] = IntegerPointable.TYPE_TRAITS;
+        typeTraits[3] = UTF8StringPointable.TYPE_TRAITS;
+
+        // comparators for btree
+        IBinaryComparatorFactory[] comparatorFactories = new IBinaryComparatorFactory[1];
+        comparatorFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+
+        // create roviders for B-Tree
+        IIndexRegistryProvider<IIndex> indexRegistryProvider = IndexRegistryProvider.INSTANCE;
+        IStorageManagerInterface storageManager = StorageManagerInterface.INSTANCE;
+
+        // schema of tuples coming out of primary index
+        RecordDescriptor recDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                IntegerSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                IntegerSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE, });
+
+        // build tuple containing low and high search keys
+        ArrayTupleBuilder tb = new ArrayTupleBuilder(comparatorFactories.length * 2); // high
+                                                                                      // key
+                                                                                      // and
+                                                                                      // low
+                                                                                      // key
+        DataOutput dos = tb.getDataOutput();
+
+        tb.reset();
+        IntegerSerializerDeserializer.INSTANCE.serialize(100, dos); // low key
+        tb.addFieldEndOffset();
+        IntegerSerializerDeserializer.INSTANCE.serialize(200, dos); // build
+                                                                    // high key
+        tb.addFieldEndOffset();
+
+        ISerializerDeserializer[] keyRecDescSers = { UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE };
+        RecordDescriptor keyRecDesc = new RecordDescriptor(keyRecDescSers);
+
+        ConstantTupleSourceOperatorDescriptor keyProviderOp = new ConstantTupleSourceOperatorDescriptor(spec,
+                keyRecDesc, tb.getFieldEndOffsets(), tb.getByteArray(), tb.getSize());
+        JobHelper.createPartitionConstraint(spec, keyProviderOp, splitNCs);
+
+        int[] lowKeyFields = { 0 }; // low key is in field 0 of tuples going
+                                    // into search op
+        int[] highKeyFields = { 1 }; // low key is in field 1 of tuples going
+                                     // into search op
+
+        IFileSplitProvider btreeSplitProvider = JobHelper.createFileSplitProvider(splitNCs, options.btreeName);
+        IIndexDataflowHelperFactory dataflowHelperFactory = new BTreeDataflowHelperFactory();
+        BTreeSearchOperatorDescriptor btreeSearchOp = new BTreeSearchOperatorDescriptor(spec, recDesc, storageManager,
+                indexRegistryProvider, btreeSplitProvider, typeTraits, comparatorFactories, lowKeyFields,
+                highKeyFields, true, true, dataflowHelperFactory, false, NoOpOperationCallbackProvider.INSTANCE);
+        JobHelper.createPartitionConstraint(spec, btreeSearchOp, splitNCs);
+
+        // have each node print the results of its respective B-Tree
+        PrinterOperatorDescriptor printer = new PrinterOperatorDescriptor(spec);
+        JobHelper.createPartitionConstraint(spec, printer, splitNCs);
+
+        spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0, btreeSearchOp, 0);
+
+        spec.connect(new OneToOneConnectorDescriptor(spec), btreeSearchOp, 0, printer, 0);
+
+        spec.addRoot(printer);
+
+        return spec;
+    }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/SecondaryIndexBulkLoadExample.java b/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/SecondaryIndexBulkLoadExample.java
new file mode 100644
index 0000000..5aa338a
--- /dev/null
+++ b/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/SecondaryIndexBulkLoadExample.java
@@ -0,0 +1,161 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.examples.btree.client;
+
+import org.kohsuke.args4j.CmdLineParser;
+import org.kohsuke.args4j.Option;
+
+import edu.uci.ics.hyracks.api.client.HyracksConnection;
+import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.job.JobId;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
+import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
+import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.dataflow.std.sort.ExternalSortOperatorDescriptor;
+import edu.uci.ics.hyracks.examples.btree.helper.IndexRegistryProvider;
+import edu.uci.ics.hyracks.examples.btree.helper.StorageManagerInterface;
+import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndex;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexRegistryProvider;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexBulkLoadOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexDiskOrderScanOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackProvider;
+import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
+
+// This example will load a secondary index with <key, primary-index key> pairs
+// We require an existing primary index built with PrimaryIndexBulkLoadExample
+
+public class SecondaryIndexBulkLoadExample {
+    private static class Options {
+        @Option(name = "-host", usage = "Hyracks Cluster Controller Host name", required = true)
+        public String host;
+
+        @Option(name = "-port", usage = "Hyracks Cluster Controller Port (default: 1098)")
+        public int port = 1098;
+
+        @Option(name = "-app", usage = "Hyracks Application name", required = true)
+        public String app;
+
+        @Option(name = "-target-ncs", usage = "Comma separated list of node-controller names to use", required = true)
+        public String ncs;
+
+        @Option(name = "-primary-btreename", usage = "Name of primary-index B-Tree to load from", required = true)
+        public String primaryBTreeName;
+
+        @Option(name = "-secondary-btreename", usage = "B-Tree file name for secondary index to be built", required = true)
+        public String secondaryBTreeName;
+
+        @Option(name = "-sortbuffer-size", usage = "Sort buffer size in frames (default: 32768)", required = false)
+        public int sbSize = 32768;
+    }
+
+    public static void main(String[] args) throws Exception {
+        Options options = new Options();
+        CmdLineParser parser = new CmdLineParser(options);
+        parser.parseArgument(args);
+
+        IHyracksClientConnection hcc = new HyracksConnection(options.host, options.port);
+
+        JobSpecification job = createJob(options);
+
+        long start = System.currentTimeMillis();
+        JobId jobId = hcc.startJob(options.app, job);
+        hcc.waitForCompletion(jobId);
+        long end = System.currentTimeMillis();
+        System.err.println(start + " " + end + " " + (end - start));
+    }
+
+    private static JobSpecification createJob(Options options) {
+
+        JobSpecification spec = new JobSpecification();
+
+        String[] splitNCs = options.ncs.split(",");
+
+        IIndexRegistryProvider<IIndex> indexRegistryProvider = IndexRegistryProvider.INSTANCE;
+        IStorageManagerInterface storageManager = StorageManagerInterface.INSTANCE;
+
+        // schema of tuples that we are retrieving from the primary index
+        RecordDescriptor recDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                IntegerSerializerDeserializer.INSTANCE, // we will use this as
+                                                        // payload in secondary
+                                                        // index
+                UTF8StringSerializerDeserializer.INSTANCE, // we will use this
+                                                           // ask key in
+                                                           // secondary index
+                IntegerSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE });
+
+        int primaryFieldCount = 4;
+        ITypeTraits[] primaryTypeTraits = new ITypeTraits[primaryFieldCount];
+        primaryTypeTraits[0] = IntegerPointable.TYPE_TRAITS;
+        primaryTypeTraits[1] = UTF8StringPointable.TYPE_TRAITS;
+        primaryTypeTraits[2] = IntegerPointable.TYPE_TRAITS;
+        primaryTypeTraits[3] = UTF8StringPointable.TYPE_TRAITS;
+
+        // comparators for sort fields and BTree fields
+        IBinaryComparatorFactory[] comparatorFactories = new IBinaryComparatorFactory[2];
+        comparatorFactories[0] = PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY);
+        comparatorFactories[1] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+
+        // use a disk-order scan to read primary index
+        IFileSplitProvider primarySplitProvider = JobHelper.createFileSplitProvider(splitNCs, options.primaryBTreeName);
+        IIndexDataflowHelperFactory dataflowHelperFactory = new BTreeDataflowHelperFactory();
+        TreeIndexDiskOrderScanOperatorDescriptor btreeScanOp = new TreeIndexDiskOrderScanOperatorDescriptor(spec,
+                recDesc, storageManager, indexRegistryProvider, primarySplitProvider, primaryTypeTraits,
+                dataflowHelperFactory, NoOpOperationCallbackProvider.INSTANCE);
+        JobHelper.createPartitionConstraint(spec, btreeScanOp, splitNCs);
+
+        // sort the tuples as preparation for bulk load into secondary index
+        // fields to sort on
+        int[] sortFields = { 1, 0 };
+        ExternalSortOperatorDescriptor sorter = new ExternalSortOperatorDescriptor(spec, options.sbSize, sortFields,
+                comparatorFactories, recDesc);
+        JobHelper.createPartitionConstraint(spec, sorter, splitNCs);
+
+        // tuples to be put into B-Tree shall have 2 fields
+        int secondaryFieldCount = 2;
+        ITypeTraits[] secondaryTypeTraits = new ITypeTraits[secondaryFieldCount];
+        secondaryTypeTraits[0] = UTF8StringPointable.TYPE_TRAITS;
+        secondaryTypeTraits[1] = IntegerPointable.TYPE_TRAITS;
+
+        // the B-Tree expects its keyfields to be at the front of its input
+        // tuple
+        int[] fieldPermutation = { 1, 0 };
+        IFileSplitProvider btreeSplitProvider = JobHelper.createFileSplitProvider(splitNCs, options.secondaryBTreeName);
+        TreeIndexBulkLoadOperatorDescriptor btreeBulkLoad = new TreeIndexBulkLoadOperatorDescriptor(spec,
+                storageManager, indexRegistryProvider, btreeSplitProvider, secondaryTypeTraits, comparatorFactories,
+                fieldPermutation, 0.7f, dataflowHelperFactory, NoOpOperationCallbackProvider.INSTANCE);
+        JobHelper.createPartitionConstraint(spec, btreeBulkLoad, splitNCs);
+
+        // connect the ops
+
+        spec.connect(new OneToOneConnectorDescriptor(spec), btreeScanOp, 0, sorter, 0);
+
+        spec.connect(new OneToOneConnectorDescriptor(spec), sorter, 0, btreeBulkLoad, 0);
+
+        spec.addRoot(btreeBulkLoad);
+
+        return spec;
+    }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/SecondaryIndexSearchExample.java b/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/SecondaryIndexSearchExample.java
new file mode 100644
index 0000000..277668b
--- /dev/null
+++ b/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/SecondaryIndexSearchExample.java
@@ -0,0 +1,206 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.examples.btree.client;
+
+import java.io.DataOutput;
+
+import org.kohsuke.args4j.CmdLineParser;
+import org.kohsuke.args4j.Option;
+
+import edu.uci.ics.hyracks.api.client.HyracksConnection;
+import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.job.JobId;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
+import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
+import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.dataflow.std.misc.ConstantTupleSourceOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.misc.PrinterOperatorDescriptor;
+import edu.uci.ics.hyracks.examples.btree.helper.IndexRegistryProvider;
+import edu.uci.ics.hyracks.examples.btree.helper.StorageManagerInterface;
+import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeSearchOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndex;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexRegistryProvider;
+import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackProvider;
+import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
+
+// This example will perform range search on the secondary index
+// and then retrieve the corresponding source records from the primary index
+
+public class SecondaryIndexSearchExample {
+    private static class Options {
+        @Option(name = "-host", usage = "Hyracks Cluster Controller Host name", required = true)
+        public String host;
+
+        @Option(name = "-port", usage = "Hyracks Cluster Controller Port (default: 1098)")
+        public int port = 1098;
+
+        @Option(name = "-app", usage = "Hyracks Application name", required = true)
+        public String app;
+
+        @Option(name = "-target-ncs", usage = "Comma separated list of node-controller names to use", required = true)
+        public String ncs;
+
+        @Option(name = "-primary-btreename", usage = "Primary B-Tree file name", required = true)
+        public String primaryBTreeName;
+
+        @Option(name = "-secondary-btreename", usage = "Secondary B-Tree file name to search", required = true)
+        public String secondaryBTreeName;
+    }
+
+    public static void main(String[] args) throws Exception {
+        Options options = new Options();
+        CmdLineParser parser = new CmdLineParser(options);
+        parser.parseArgument(args);
+
+        IHyracksClientConnection hcc = new HyracksConnection(options.host, options.port);
+
+        JobSpecification job = createJob(options);
+
+        long start = System.currentTimeMillis();
+        JobId jobId = hcc.startJob(options.app, job);
+        hcc.waitForCompletion(jobId);
+        long end = System.currentTimeMillis();
+        System.err.println(start + " " + end + " " + (end - start));
+    }
+
+    private static JobSpecification createJob(Options options) throws HyracksDataException {
+
+        JobSpecification spec = new JobSpecification();
+
+        String[] splitNCs = options.ncs.split(",");
+
+        IIndexRegistryProvider<IIndex> indexRegistryProvider = IndexRegistryProvider.INSTANCE;
+        IStorageManagerInterface storageManager = StorageManagerInterface.INSTANCE;
+
+        // schema of tuples coming out of secondary index
+        RecordDescriptor secondaryRecDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE });
+
+        int secondaryFieldCount = 2;
+        ITypeTraits[] secondaryTypeTraits = new ITypeTraits[secondaryFieldCount];
+        secondaryTypeTraits[0] = UTF8StringPointable.TYPE_TRAITS;
+        secondaryTypeTraits[1] = IntegerPointable.TYPE_TRAITS;
+
+        // comparators for sort fields and BTree fields
+        IBinaryComparatorFactory[] secondaryComparatorFactories = new IBinaryComparatorFactory[2];
+        secondaryComparatorFactories[0] = PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY);
+        secondaryComparatorFactories[1] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+
+        // comparators for primary index
+        IBinaryComparatorFactory[] primaryComparatorFactories = new IBinaryComparatorFactory[1];
+        primaryComparatorFactories[1] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+
+        // schema of tuples coming out of primary index
+        RecordDescriptor primaryRecDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                IntegerSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                IntegerSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE, });
+
+        int primaryFieldCount = 4;
+        ITypeTraits[] primaryTypeTraits = new ITypeTraits[primaryFieldCount];
+        primaryTypeTraits[0] = IntegerPointable.TYPE_TRAITS;
+        primaryTypeTraits[1] = UTF8StringPointable.TYPE_TRAITS;
+        primaryTypeTraits[2] = IntegerPointable.TYPE_TRAITS;
+        primaryTypeTraits[3] = UTF8StringPointable.TYPE_TRAITS;
+
+        // comparators for btree, note that we only need a comparator for the
+        // non-unique key
+        // i.e. we will have a range condition on the first field only (implying
+        // [-infinity, +infinity] for the second field)
+        IBinaryComparatorFactory[] searchComparatorFactories = new IBinaryComparatorFactory[1];
+        searchComparatorFactories[0] = PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY);
+
+        // build tuple containing low and high search keys
+        ArrayTupleBuilder tb = new ArrayTupleBuilder(searchComparatorFactories.length * 2); // low
+        // and
+        // high
+        // key
+        DataOutput dos = tb.getDataOutput();
+
+        tb.reset();
+        UTF8StringSerializerDeserializer.INSTANCE.serialize("0", dos); // low
+                                                                       // key
+        tb.addFieldEndOffset();
+        UTF8StringSerializerDeserializer.INSTANCE.serialize("f", dos); // high
+                                                                       // key
+        tb.addFieldEndOffset();
+
+        ISerializerDeserializer[] keyRecDescSers = { UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE };
+        RecordDescriptor keyRecDesc = new RecordDescriptor(keyRecDescSers);
+
+        ConstantTupleSourceOperatorDescriptor keyProviderOp = new ConstantTupleSourceOperatorDescriptor(spec,
+                keyRecDesc, tb.getFieldEndOffsets(), tb.getByteArray(), tb.getSize());
+        JobHelper.createPartitionConstraint(spec, keyProviderOp, splitNCs);
+
+        int[] secondaryLowKeyFields = { 0 }; // low key is in field 0 of tuples
+                                             // going into secondary index
+                                             // search op
+        int[] secondaryHighKeyFields = { 1 }; // high key is in field 1 of
+                                              // tuples going into secondary
+                                              // index search op
+
+        IFileSplitProvider secondarySplitProvider = JobHelper.createFileSplitProvider(splitNCs,
+                options.secondaryBTreeName);
+        IIndexDataflowHelperFactory dataflowHelperFactory = new BTreeDataflowHelperFactory();
+        BTreeSearchOperatorDescriptor secondarySearchOp = new BTreeSearchOperatorDescriptor(spec, secondaryRecDesc,
+                storageManager, indexRegistryProvider, secondarySplitProvider, secondaryTypeTraits,
+                searchComparatorFactories, secondaryLowKeyFields, secondaryHighKeyFields, true, true,
+                dataflowHelperFactory, false, NoOpOperationCallbackProvider.INSTANCE);
+        JobHelper.createPartitionConstraint(spec, secondarySearchOp, splitNCs);
+
+        // secondary index will output tuples with [UTF8String, Integer]
+        // the Integer field refers to the key in the primary index of the
+        // source data records
+        int[] primaryLowKeyFields = { 1 }; // low key is in field 0 of tuples
+                                           // going into primary index search op
+        int[] primaryHighKeyFields = { 1 }; // high key is in field 1 of tuples
+                                            // going into primary index search
+                                            // op
+
+        IFileSplitProvider primarySplitProvider = JobHelper.createFileSplitProvider(splitNCs, options.primaryBTreeName);
+        BTreeSearchOperatorDescriptor primarySearchOp = new BTreeSearchOperatorDescriptor(spec, primaryRecDesc,
+                storageManager, indexRegistryProvider, primarySplitProvider, primaryTypeTraits,
+                primaryComparatorFactories, primaryLowKeyFields, primaryHighKeyFields, true, true,
+                dataflowHelperFactory, false, NoOpOperationCallbackProvider.INSTANCE);
+        JobHelper.createPartitionConstraint(spec, primarySearchOp, splitNCs);
+
+        // have each node print the results of its respective B-Tree
+        PrinterOperatorDescriptor printer = new PrinterOperatorDescriptor(spec);
+        JobHelper.createPartitionConstraint(spec, printer, splitNCs);
+
+        spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0, secondarySearchOp, 0);
+
+        spec.connect(new OneToOneConnectorDescriptor(spec), secondarySearchOp, 0, primarySearchOp, 0);
+
+        spec.connect(new OneToOneConnectorDescriptor(spec), primarySearchOp, 0, printer, 0);
+
+        spec.addRoot(printer);
+
+        return spec;
+    }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-examples/btree-example/btreehelper/pom.xml b/hyracks/hyracks-examples/btree-example/btreehelper/pom.xml
new file mode 100644
index 0000000..d94feb7
--- /dev/null
+++ b/hyracks/hyracks-examples/btree-example/btreehelper/pom.xml
@@ -0,0 +1,50 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>edu.uci.ics.hyracks.examples.btree</groupId>
+  <artifactId>btreehelper</artifactId>
+  <name>btreehelper</name>
+  <parent>
+    <groupId>edu.uci.ics.hyracks.examples</groupId>
+    <artifactId>btree-example</artifactId>
+    <version>0.2.3-SNAPSHOT</version>
+  </parent>
+
+  <dependencies>
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>hyracks-dataflow-std</artifactId>
+  		<version>0.2.3-SNAPSHOT</version>
+  		<scope>compile</scope>
+  	</dependency>
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>hyracks-storage-am-btree</artifactId>
+  		<version>0.2.3-SNAPSHOT</version>
+  		<scope>compile</scope>
+  	</dependency>
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>hyracks-api</artifactId>
+  		<version>0.2.3-SNAPSHOT</version>
+  		<scope>compile</scope>
+  	</dependency>
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>hyracks-data-std</artifactId>
+  		<version>0.2.3-SNAPSHOT</version>
+  	</dependency>
+  </dependencies>
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-compiler-plugin</artifactId>
+        <version>2.0.2</version>
+        <configuration>
+          <source>1.7</source>
+          <target>1.7</target>
+        </configuration>
+      </plugin>
+    </plugins>
+  </build>
+</project>
diff --git a/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/DataGenOperatorDescriptor.java b/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/DataGenOperatorDescriptor.java
similarity index 100%
rename from hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/DataGenOperatorDescriptor.java
rename to hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/DataGenOperatorDescriptor.java
diff --git a/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/IndexRegistryProvider.java b/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/IndexRegistryProvider.java
new file mode 100644
index 0000000..d1f40d8
--- /dev/null
+++ b/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/IndexRegistryProvider.java
@@ -0,0 +1,35 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.examples.btree.helper;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndex;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexRegistryProvider;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IndexRegistry;
+
+public class IndexRegistryProvider implements IIndexRegistryProvider<IIndex> {
+    private static final long serialVersionUID = 1L;
+
+    public static final IndexRegistryProvider INSTANCE = new IndexRegistryProvider();
+
+    private IndexRegistryProvider() {
+    }
+
+    @Override
+    public IndexRegistry<IIndex> getRegistry(IHyracksTaskContext ctx) {
+        return RuntimeContext.get(ctx).getIndexRegistry();
+    }
+}
\ No newline at end of file
diff --git a/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/NCBootstrap.java b/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/NCBootstrap.java
similarity index 100%
rename from hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/NCBootstrap.java
rename to hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/NCBootstrap.java
diff --git a/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/RuntimeContext.java b/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/RuntimeContext.java
new file mode 100644
index 0000000..56d517f
--- /dev/null
+++ b/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/RuntimeContext.java
@@ -0,0 +1,66 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.examples.btree.helper;
+
+import edu.uci.ics.hyracks.api.application.INCApplicationContext;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndex;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IndexRegistry;
+import edu.uci.ics.hyracks.storage.common.buffercache.BufferCache;
+import edu.uci.ics.hyracks.storage.common.buffercache.ClockPageReplacementStrategy;
+import edu.uci.ics.hyracks.storage.common.buffercache.DelayPageCleanerPolicy;
+import edu.uci.ics.hyracks.storage.common.buffercache.HeapBufferAllocator;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+import edu.uci.ics.hyracks.storage.common.buffercache.ICacheMemoryAllocator;
+import edu.uci.ics.hyracks.storage.common.buffercache.IPageReplacementStrategy;
+import edu.uci.ics.hyracks.storage.common.file.IFileMapManager;
+import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
+import edu.uci.ics.hyracks.storage.common.smi.TransientFileMapManager;
+
+public class RuntimeContext {
+    private IndexRegistry<IIndex> indexRegistry;
+    private IBufferCache bufferCache;
+    private IFileMapManager fileMapManager;
+
+    public RuntimeContext(INCApplicationContext appCtx) {
+        fileMapManager = new TransientFileMapManager();
+        ICacheMemoryAllocator allocator = new HeapBufferAllocator();
+        IPageReplacementStrategy prs = new ClockPageReplacementStrategy();
+        bufferCache = new BufferCache(appCtx.getRootContext().getIOManager(), allocator, prs,
+                new DelayPageCleanerPolicy(1000), fileMapManager, 32768, 50, 100);
+        indexRegistry = new IndexRegistry<IIndex>();
+    }
+
+    public void close() {
+        bufferCache.close();
+    }
+
+    public IBufferCache getBufferCache() {
+        return bufferCache;
+    }
+
+    public IFileMapProvider getFileMapManager() {
+        return fileMapManager;
+    }
+
+    public IndexRegistry<IIndex> getIndexRegistry() {
+        return indexRegistry;
+    }
+
+    public static RuntimeContext get(IHyracksTaskContext ctx) {
+        return (RuntimeContext) ctx.getJobletContext().getApplicationContext().getApplicationObject();
+    }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/StorageManagerInterface.java b/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/StorageManagerInterface.java
new file mode 100644
index 0000000..4c6363b
--- /dev/null
+++ b/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/StorageManagerInterface.java
@@ -0,0 +1,40 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.examples.btree.helper;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
+
+public class StorageManagerInterface implements IStorageManagerInterface {
+    private static final long serialVersionUID = 1L;
+
+    public static final StorageManagerInterface INSTANCE = new StorageManagerInterface();
+
+    private StorageManagerInterface() {
+    }
+
+    @Override
+    public IBufferCache getBufferCache(IHyracksTaskContext ctx) {
+        return RuntimeContext.get(ctx).getBufferCache();
+    }
+
+    @Override
+    public IFileMapProvider getFileMapProvider(IHyracksTaskContext ctx) {
+        return RuntimeContext.get(ctx).getFileMapManager();
+    }
+}
\ No newline at end of file
diff --git a/hyracks-examples/btree-example/btreehelper/src/main/resources/hyracks-deployment.properties b/hyracks/hyracks-examples/btree-example/btreehelper/src/main/resources/hyracks-deployment.properties
similarity index 100%
rename from hyracks-examples/btree-example/btreehelper/src/main/resources/hyracks-deployment.properties
rename to hyracks/hyracks-examples/btree-example/btreehelper/src/main/resources/hyracks-deployment.properties
diff --git a/hyracks/hyracks-examples/btree-example/pom.xml b/hyracks/hyracks-examples/btree-example/pom.xml
new file mode 100644
index 0000000..0ff4fc3
--- /dev/null
+++ b/hyracks/hyracks-examples/btree-example/pom.xml
@@ -0,0 +1,20 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>edu.uci.ics.hyracks.examples</groupId>
+  <artifactId>btree-example</artifactId>
+  <version>0.2.3-SNAPSHOT</version>
+  <packaging>pom</packaging>
+  <name>btree-example</name>
+
+  <parent>
+    <groupId>edu.uci.ics.hyracks</groupId>
+    <artifactId>hyracks-examples</artifactId>
+    <version>0.2.3-SNAPSHOT</version>
+  </parent>
+
+  <modules>
+    <module>btreehelper</module>
+    <module>btreeclient</module>
+    <module>btreeapp</module>
+  </modules>
+</project>
diff --git a/hyracks-examples/hadoop-compat-example/hadoopcompatapp/conf/local_cluster.conf b/hyracks/hyracks-examples/hadoop-compat-example/hadoopcompatapp/conf/local_cluster.conf
similarity index 100%
rename from hyracks-examples/hadoop-compat-example/hadoopcompatapp/conf/local_cluster.conf
rename to hyracks/hyracks-examples/hadoop-compat-example/hadoopcompatapp/conf/local_cluster.conf
diff --git a/hyracks-examples/hadoop-compat-example/hadoopcompatapp/data/file1.txt b/hyracks/hyracks-examples/hadoop-compat-example/hadoopcompatapp/data/file1.txt
similarity index 100%
rename from hyracks-examples/hadoop-compat-example/hadoopcompatapp/data/file1.txt
rename to hyracks/hyracks-examples/hadoop-compat-example/hadoopcompatapp/data/file1.txt
diff --git a/hyracks-examples/hadoop-compat-example/hadoopcompatapp/data/file2.txt b/hyracks/hyracks-examples/hadoop-compat-example/hadoopcompatapp/data/file2.txt
similarity index 100%
rename from hyracks-examples/hadoop-compat-example/hadoopcompatapp/data/file2.txt
rename to hyracks/hyracks-examples/hadoop-compat-example/hadoopcompatapp/data/file2.txt
diff --git a/hyracks-examples/hadoop-compat-example/hadoopcompatapp/job/wordcount/wordcount.job b/hyracks/hyracks-examples/hadoop-compat-example/hadoopcompatapp/job/wordcount/wordcount.job
similarity index 100%
rename from hyracks-examples/hadoop-compat-example/hadoopcompatapp/job/wordcount/wordcount.job
rename to hyracks/hyracks-examples/hadoop-compat-example/hadoopcompatapp/job/wordcount/wordcount.job
diff --git a/hyracks/hyracks-examples/hadoop-compat-example/hadoopcompatapp/pom.xml b/hyracks/hyracks-examples/hadoop-compat-example/hadoopcompatapp/pom.xml
new file mode 100644
index 0000000..95cfaff
--- /dev/null
+++ b/hyracks/hyracks-examples/hadoop-compat-example/hadoopcompatapp/pom.xml
@@ -0,0 +1,192 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>edu.uci.ics.hyracks.examples.compat</groupId>
+  <artifactId>hadoopcompatapp</artifactId>
+  <version>0.2.3-SNAPSHOT</version>
+  <name>hadoopcompatapp</name>
+
+  <parent>
+    <groupId>edu.uci.ics.hyracks.examples</groupId>
+    <artifactId>hadoop-compat-example</artifactId>
+    <version>0.2.3-SNAPSHOT</version>
+  </parent>
+
+  <build>
+    <pluginManagement>
+      <plugins>
+        <plugin>
+          <groupId>org.eclipse.m2e</groupId>
+          <artifactId>lifecycle-mapping</artifactId>
+          <version>1.0.0</version>
+          <configuration>
+            <lifecycleMappingMetadata>
+              <pluginExecutions>
+                <pluginExecution>
+                  <pluginExecutionFilter>
+                    <groupId>org.apache.maven.plugins</groupId>
+                    <artifactId>maven-dependency-plugin</artifactId>
+                    <versionRange>[1.0.0,)</versionRange>
+                    <goals>
+                      <goal>copy-dependencies</goal>
+                    </goals>
+                  </pluginExecutionFilter>
+                  <action>
+                    <ignore />
+                  </action>
+                </pluginExecution>
+              </pluginExecutions>
+            </lifecycleMappingMetadata>
+          </configuration>
+        </plugin>
+      </plugins>
+	</pluginManagement>
+  
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-dependency-plugin</artifactId>
+        <executions>
+          <execution>
+            <id>copy-dependencies</id>
+            <phase>package</phase>
+            <goals>
+              <goal>copy-dependencies</goal>
+            </goals>
+            <configuration>
+              <outputDirectory>target/application/lib</outputDirectory>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <artifactId>maven-assembly-plugin</artifactId>
+        <version>2.2-beta-5</version>
+        <executions>
+          <execution>
+            <configuration>
+              <descriptors>
+                <descriptor>src/main/assembly/app-assembly.xml</descriptor>
+              </descriptors>
+            </configuration>
+            <phase>package</phase>
+            <goals>
+              <goal>attached</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+      	<groupId>edu.uci.ics.hyracks</groupId>
+      	<artifactId>hyracks-virtualcluster-maven-plugin</artifactId>
+      	<version>0.2.3-SNAPSHOT</version>
+        <configuration>
+          <hyracksServerHome>${basedir}/../../../hyracks-server/target/hyracks-server-${project.version}-binary-assembly</hyracksServerHome>
+          <hyracksCLIHome>${basedir}/../../../hyracks-cli/target/hyracks-cli-${project.version}-binary-assembly</hyracksCLIHome>
+          <jvmOptions>${jvm.extraargs}</jvmOptions>
+        </configuration>
+        <executions>
+          <execution>
+            <id>hyracks-cc-start</id>
+            <phase>pre-integration-test</phase>
+            <goals>
+              <goal>start-cc</goal>
+            </goals>
+            <configuration>
+	      <workingDir>${project.build.directory}</workingDir>
+            </configuration>
+          </execution>
+          <execution>
+            <id>hyracks-nc1-start</id>
+            <phase>pre-integration-test</phase>
+            <goals>
+              <goal>start-nc</goal>
+            </goals>
+            <configuration>
+              <nodeId>NC1</nodeId>
+              <dataIpAddress>127.0.0.1</dataIpAddress>
+              <ccHost>localhost</ccHost>
+	      <workingDir>${project.build.directory}</workingDir>
+            </configuration>
+          </execution>
+          <execution>
+            <id>hyracks-nc2-start</id>
+            <phase>pre-integration-test</phase>
+            <goals>
+              <goal>start-nc</goal>
+            </goals>
+            <configuration>
+              <nodeId>NC2</nodeId>
+              <dataIpAddress>127.0.0.1</dataIpAddress>
+              <ccHost>localhost</ccHost>
+	      <workingDir>${project.build.directory}</workingDir>
+            </configuration>
+          </execution>
+          <execution>
+            <id>deploy-app</id>
+            <phase>pre-integration-test</phase>
+            <goals>
+              <goal>deploy-app</goal>
+            </goals>
+            <configuration>
+              <ccHost>localhost</ccHost>
+              <appName>compat</appName>
+              <harFile>${project.build.directory}/hadoopcompatapp-${project.version}-app-assembly.zip</harFile>
+            </configuration>
+          </execution>
+	     <execution>
+	       <id>stop-services</id>
+	       <phase>post-integration-test</phase>
+	       <goals>
+	         <goal>stop-services</goal>
+	       </goals>
+             </execution>
+          </executions>
+      </plugin>
+      <plugin>
+      	<groupId>org.apache.maven.plugins</groupId>
+      	<artifactId>maven-compiler-plugin</artifactId>
+      	<version>2.0.2</version>
+        <configuration>
+          <source>1.7</source>
+          <target>1.7</target>
+        </configuration>
+      </plugin>
+      <plugin>
+      	<groupId>org.apache.maven.plugins</groupId>
+      	<artifactId>maven-failsafe-plugin</artifactId>
+      	<version>2.8.1</version>
+      	<executions>
+      	  <execution>
+      	    <id>it</id>
+      	    <phase>integration-test</phase>
+      	    <goals>
+      	      <goal>integration-test</goal>
+      	    </goals>
+      	  </execution>
+      	</executions>
+      </plugin>
+    </plugins>
+  </build>
+  <dependencies>
+     <dependency>
+        <groupId>edu.uci.ics.hyracks.examples.compat</groupId>
+        <artifactId>hadoopcompathelper</artifactId>
+        <version>0.2.3-SNAPSHOT</version>
+        <scope>compile</scope>
+     </dependency>
+     <dependency>
+        <groupId>edu.uci.ics.hyracks.examples.compat</groupId>
+  	    <artifactId>hadoopcompatclient</artifactId>
+  	    <version>0.2.3-SNAPSHOT</version>
+  	    <type>jar</type>
+  	    <scope>test</scope>
+     </dependency>
+     <dependency>
+  	    <groupId>junit</groupId>
+  	    <artifactId>junit</artifactId>
+  	    <version>4.8.2</version>
+  	    <type>jar</type>
+  	    <scope>test</scope>
+     </dependency>
+  </dependencies>
+</project>
diff --git a/hyracks-examples/hadoop-compat-example/hadoopcompatapp/src/main/assembly/app-assembly.xml b/hyracks/hyracks-examples/hadoop-compat-example/hadoopcompatapp/src/main/assembly/app-assembly.xml
similarity index 100%
rename from hyracks-examples/hadoop-compat-example/hadoopcompatapp/src/main/assembly/app-assembly.xml
rename to hyracks/hyracks-examples/hadoop-compat-example/hadoopcompatapp/src/main/assembly/app-assembly.xml
diff --git a/hyracks-examples/hadoop-compat-example/hadoopcompatapp/src/test/java/edu/uci/ics/hyracks/examples/compat/test/WordCountCompatibilityIT.java b/hyracks/hyracks-examples/hadoop-compat-example/hadoopcompatapp/src/test/java/edu/uci/ics/hyracks/examples/compat/test/WordCountCompatibilityIT.java
similarity index 100%
rename from hyracks-examples/hadoop-compat-example/hadoopcompatapp/src/test/java/edu/uci/ics/hyracks/examples/compat/test/WordCountCompatibilityIT.java
rename to hyracks/hyracks-examples/hadoop-compat-example/hadoopcompatapp/src/test/java/edu/uci/ics/hyracks/examples/compat/test/WordCountCompatibilityIT.java
diff --git a/hyracks/hyracks-examples/hadoop-compat-example/hadoopcompatclient/pom.xml b/hyracks/hyracks-examples/hadoop-compat-example/hadoopcompatclient/pom.xml
new file mode 100644
index 0000000..d260601
--- /dev/null
+++ b/hyracks/hyracks-examples/hadoop-compat-example/hadoopcompatclient/pom.xml
@@ -0,0 +1,82 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>edu.uci.ics.hyracks.examples.compat</groupId>
+  <artifactId>hadoopcompatclient</artifactId>
+  <version>0.2.3-SNAPSHOT</version>
+  <name>hadoopcompatclient</name>
+
+  <parent>
+    <groupId>edu.uci.ics.hyracks.examples</groupId>
+    <artifactId>hadoop-compat-example</artifactId>
+    <version>0.2.3-SNAPSHOT</version>
+  </parent>
+
+  <dependencies>
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>hyracks-dataflow-std</artifactId>
+  		<version>0.2.3-SNAPSHOT</version>
+  		<scope>compile</scope>
+  	</dependency>
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks.examples.compat</groupId>
+  		<artifactId>hadoopcompathelper</artifactId>
+  		<version>0.2.3-SNAPSHOT</version>
+  		<type>jar</type>
+  		<scope>compile</scope>
+  	</dependency>
+  </dependencies>
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-compiler-plugin</artifactId>
+        <version>2.0.2</version>
+        <configuration>
+          <source>1.7</source>
+          <target>1.7</target>
+        </configuration>
+      </plugin>
+      <plugin>
+        <groupId>org.codehaus.mojo</groupId>
+        <artifactId>appassembler-maven-plugin</artifactId>
+        <version>1.3</version>
+        <executions>
+          <execution>
+            <configuration>
+              <programs>
+                <program>
+                  <mainClass>edu.uci.ics.hyracks.examples.compat.client.WordCountCompatibility</mainClass>
+                  <name>hadoopcompatclient</name>
+                </program>
+              </programs>
+              <repositoryLayout>flat</repositoryLayout>
+              <repositoryName>lib</repositoryName>
+            </configuration>
+            <phase>package</phase>
+            <goals>
+              <goal>assemble</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <artifactId>maven-assembly-plugin</artifactId>
+        <version>2.2-beta-5</version>
+        <executions>
+          <execution>
+            <configuration>
+              <descriptors>
+                <descriptor>src/main/assembly/binary-assembly.xml</descriptor>
+              </descriptors>
+            </configuration>
+            <phase>package</phase>
+            <goals>
+              <goal>attached</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+    </plugins>
+  </build>
+</project>
diff --git a/hyracks-examples/hadoop-compat-example/hadoopcompatclient/src/main/assembly/binary-assembly.xml b/hyracks/hyracks-examples/hadoop-compat-example/hadoopcompatclient/src/main/assembly/binary-assembly.xml
similarity index 100%
rename from hyracks-examples/hadoop-compat-example/hadoopcompatclient/src/main/assembly/binary-assembly.xml
rename to hyracks/hyracks-examples/hadoop-compat-example/hadoopcompatclient/src/main/assembly/binary-assembly.xml
diff --git a/hyracks-examples/hadoop-compat-example/hadoopcompatclient/src/main/java/edu/uci/ics/hyracks/examples/compat/client/WordCountCompatibility.java b/hyracks/hyracks-examples/hadoop-compat-example/hadoopcompatclient/src/main/java/edu/uci/ics/hyracks/examples/compat/client/WordCountCompatibility.java
similarity index 100%
rename from hyracks-examples/hadoop-compat-example/hadoopcompatclient/src/main/java/edu/uci/ics/hyracks/examples/compat/client/WordCountCompatibility.java
rename to hyracks/hyracks-examples/hadoop-compat-example/hadoopcompatclient/src/main/java/edu/uci/ics/hyracks/examples/compat/client/WordCountCompatibility.java
diff --git a/hyracks/hyracks-examples/hadoop-compat-example/hadoopcompathelper/pom.xml b/hyracks/hyracks-examples/hadoop-compat-example/hadoopcompathelper/pom.xml
new file mode 100644
index 0000000..f61b9e8
--- /dev/null
+++ b/hyracks/hyracks-examples/hadoop-compat-example/hadoopcompathelper/pom.xml
@@ -0,0 +1,41 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>edu.uci.ics.hyracks.examples.compat</groupId>
+  <artifactId>hadoopcompathelper</artifactId>
+  <version>0.2.3-SNAPSHOT</version>
+  <name>hadoopcompathelper</name>
+
+  <parent>
+    <groupId>edu.uci.ics.hyracks.examples</groupId>
+    <artifactId>hadoop-compat-example</artifactId>
+    <version>0.2.3-SNAPSHOT</version>
+  </parent>
+
+  <dependencies>
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>hyracks-dataflow-std</artifactId>
+  		<version>0.2.3-SNAPSHOT</version>
+  		<scope>compile</scope>
+  	</dependency>
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>hyracks-api</artifactId>
+  		<version>0.2.3-SNAPSHOT</version>
+  		<scope>compile</scope>
+  	</dependency>
+  </dependencies>
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-compiler-plugin</artifactId>
+        <version>2.0.2</version>
+        <configuration>
+          <source>1.7</source>
+          <target>1.7</target>
+        </configuration>
+      </plugin>
+    </plugins>
+  </build>
+</project>
diff --git a/hyracks-examples/hadoop-compat-example/hadoopcompathelper/src/main/java/edu/uci/ics/hyracks/examples/wordcount/WordCount.java b/hyracks/hyracks-examples/hadoop-compat-example/hadoopcompathelper/src/main/java/edu/uci/ics/hyracks/examples/wordcount/WordCount.java
similarity index 100%
rename from hyracks-examples/hadoop-compat-example/hadoopcompathelper/src/main/java/edu/uci/ics/hyracks/examples/wordcount/WordCount.java
rename to hyracks/hyracks-examples/hadoop-compat-example/hadoopcompathelper/src/main/java/edu/uci/ics/hyracks/examples/wordcount/WordCount.java
diff --git a/hyracks/hyracks-examples/hadoop-compat-example/pom.xml b/hyracks/hyracks-examples/hadoop-compat-example/pom.xml
new file mode 100644
index 0000000..a2cf5ae
--- /dev/null
+++ b/hyracks/hyracks-examples/hadoop-compat-example/pom.xml
@@ -0,0 +1,38 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>edu.uci.ics.hyracks.examples</groupId>
+  <artifactId>hadoop-compat-example</artifactId>
+  <version>0.2.3-SNAPSHOT</version>
+  <packaging>pom</packaging>
+  <name>hadoop-compat-example</name>
+
+  <parent>
+    <groupId>edu.uci.ics.hyracks</groupId>
+    <artifactId>hyracks-examples</artifactId>
+    <version>0.2.3-SNAPSHOT</version>
+  </parent>
+
+  <modules>
+    <module>hadoopcompathelper</module>
+    <module>hadoopcompatclient</module>
+    <module>hadoopcompatapp</module>
+  </modules>
+
+  <dependencies>
+      <dependency>
+         <groupId>org.apache.hadoop</groupId>
+         <artifactId>hadoop-core</artifactId>
+         <version>0.20.2</version>
+         <type>jar</type>
+         <scope>compile</scope>
+      </dependency>
+
+      <dependency>
+         <groupId>edu.uci.ics.hyracks</groupId>
+         <artifactId>hyracks-hadoop-compat</artifactId>
+         <version>0.2.3-SNAPSHOT</version>
+         <type>jar</type>
+         <scope>compile</scope>
+      </dependency>
+  </dependencies>
+</project>
diff --git a/hyracks-examples/hyracks-integration-tests/data/cleanednumbereddblptitles.txt b/hyracks/hyracks-examples/hyracks-integration-tests/data/cleanednumbereddblptitles.txt
similarity index 100%
rename from hyracks-examples/hyracks-integration-tests/data/cleanednumbereddblptitles.txt
rename to hyracks/hyracks-examples/hyracks-integration-tests/data/cleanednumbereddblptitles.txt
diff --git a/hyracks-examples/hyracks-integration-tests/data/dblp.txt b/hyracks/hyracks-examples/hyracks-integration-tests/data/dblp.txt
similarity index 100%
rename from hyracks-examples/hyracks-integration-tests/data/dblp.txt
rename to hyracks/hyracks-examples/hyracks-integration-tests/data/dblp.txt
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/data/orders-with-locations.txt b/hyracks/hyracks-examples/hyracks-integration-tests/data/orders-with-locations.txt
new file mode 100644
index 0000000..f936a6e
--- /dev/null
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/data/orders-with-locations.txt
@@ -0,0 +1,750 @@
+1|37|O|131251.81|1996-01-02|5-LOW|Clerk#000000951|0|nstructions sleep furiously among |42.3631|-71.065|42.3631|-71.065|
+2|79|O|40183.29|1996-12-01|1-URGENT|Clerk#000000880|0| foxes. pending accounts at the pending, silent asymptot|42.1091|-70.696|42.1091|-70.696|
+3|124|F|160882.76|1993-10-14|5-LOW|Clerk#000000955|0|sly final accounts boost. carefully regular ideas cajole carefully. depos|40.8151|-73.0452|40.8151|-73.0452|
+4|137|O|31084.79|1995-10-11|5-LOW|Clerk#000000124|0|sits. slyly regular warthogs cajole. regular, regular theodolites acro|40.8151|-73.0452|40.8151|-73.0452|
+5|46|F|86615.25|1994-07-30|5-LOW|Clerk#000000925|0|quickly. bold deposits sleep slyly. packages use slyly|42.2481|-71.174|42.2481|-71.174|
+6|56|F|36468.55|1992-02-21|4-NOT SPECIFIED|Clerk#000000058|0|ggle. special, final requests are against the furiously specia|61.1201|-149.89|61.1201|-149.89|
+7|40|O|171488.73|1996-01-10|2-HIGH|Clerk#000000470|0|ly special requests |61.1501|-149.926|61.1501|-149.926|
+32|131|O|116923.00|1995-07-16|2-HIGH|Clerk#000000616|0|ise blithely bold, regular requests. quickly unusual dep|61.181|-149.814|61.181|-149.814|
+33|67|F|99798.76|1993-10-27|3-MEDIUM|Clerk#000000409|0|uriously. furiously final request|61.1517|-149.86|61.1517|-149.86|
+34|62|O|41670.02|1998-07-21|3-MEDIUM|Clerk#000000223|0|ly final packages. fluffily final deposits wake blithely ideas. spe|61.1806|-149.814|61.1806|-149.814|
+35|128|O|148789.52|1995-10-23|4-NOT SPECIFIED|Clerk#000000259|0|zzle. carefully enticing deposits nag furio|61.1806|-149.775|61.1806|-149.775|
+36|116|O|38988.98|1995-11-03|1-URGENT|Clerk#000000358|0| quick packages are blithely. slyly silent accounts wake qu|61.1806|-149.775|61.1806|-149.775|
+37|88|F|113701.89|1992-06-03|3-MEDIUM|Clerk#000000456|0|kly regular pinto beans. carefully unusual waters cajole never|61.1806|-149.775|61.1806|-149.775|
+38|125|O|46366.56|1996-08-21|4-NOT SPECIFIED|Clerk#000000604|0|haggle blithely. furiously express ideas haggle blithely furiously regular re|61.2113|-149.824|61.2113|-149.824|
+39|82|O|219707.84|1996-09-20|3-MEDIUM|Clerk#000000659|0|ole express, ironic requests: ir|61.1967|-149.877|61.1967|-149.877|
+64|34|F|20065.73|1994-07-16|3-MEDIUM|Clerk#000000661|0|wake fluffily. sometimes ironic pinto beans about the dolphin|61.2164|-149.892|61.2164|-149.892|
+65|17|P|65883.92|1995-03-18|1-URGENT|Clerk#000000632|0|ular requests are blithely pending orbits-- even requests against the deposit|61.1571|-149.883|61.1571|-149.883|
+66|130|F|79258.24|1994-01-20|5-LOW|Clerk#000000743|0|y pending requests integrate|61.2048|-149.834|61.2048|-149.834|
+67|58|O|116227.05|1996-12-19|4-NOT SPECIFIED|Clerk#000000547|0|symptotes haggle slyly around the furiously iron|61.0956|-149.843|61.0956|-149.843|
+68|29|O|215135.72|1998-04-18|3-MEDIUM|Clerk#000000440|0| pinto beans sleep carefully. blithely ironic deposits haggle furiously acro|61.1491|-149.809|61.1491|-149.809|
+69|85|F|162176.23|1994-06-04|4-NOT SPECIFIED|Clerk#000000330|0| depths atop the slyly thin deposits detect among the furiously silent accou|61.1981|-149.871|61.1981|-149.871|
+70|65|F|84651.80|1993-12-18|5-LOW|Clerk#000000322|0| carefully ironic request|61.1982|-149.876|61.1982|-149.876|
+71|4|O|178821.73|1998-01-24|4-NOT SPECIFIED|Clerk#000000271|0| express deposits along the blithely regul|61.1924|-149.909|61.1924|-149.909|
+96|109|F|55090.67|1994-04-17|2-HIGH|Clerk#000000395|0|oost furiously. pinto|61.2204|-149.728|61.2204|-149.728|
+97|22|F|68908.31|1993-01-29|3-MEDIUM|Clerk#000000547|0|hang blithely along the regular accounts. furiously even ideas after the|61.196|-149.864|61.196|-149.864|
+98|106|F|51004.44|1994-09-25|1-URGENT|Clerk#000000448|0|c asymptotes. quickly regular packages should have to nag re|61.1987|-149.889|61.1987|-149.889|
+99|89|F|92326.79|1994-03-13|4-NOT SPECIFIED|Clerk#000000973|0|e carefully ironic packages. pending|61.1984|-149.897|61.1984|-149.897|
+100|148|O|141311.01|1998-02-28|4-NOT SPECIFIED|Clerk#000000577|0|heodolites detect slyly alongside of the ent|61.1897|-149.898|61.1897|-149.898|
+101|28|O|95591.40|1996-03-17|3-MEDIUM|Clerk#000000419|0|ding accounts above the slyly final asymptote|61.1228|-149.81|61.1228|-149.81|
+102|1|O|113954.89|1997-05-09|2-HIGH|Clerk#000000596|0| slyly according to the asymptotes. carefully final packages integrate furious|61.1649|-149.881|61.1649|-149.881|
+103|31|O|95563.95|1996-06-20|4-NOT SPECIFIED|Clerk#000000090|0|ges. carefully unusual instructions haggle quickly regular f|61.1934|-149.887|61.1934|-149.887|
+128|74|F|36333.34|1992-06-15|1-URGENT|Clerk#000000385|0|ns integrate fluffily. ironic asymptotes after the regular excuses nag around |61.2164|-149.892|61.2164|-149.892|
+129|73|F|188124.55|1992-11-19|5-LOW|Clerk#000000859|0|ing tithes. carefully pending deposits boost about the silently express |61.1932|-149.886|61.1932|-149.886|
+130|37|F|115717.37|1992-05-08|2-HIGH|Clerk#000000036|0|le slyly unusual, regular packages? express deposits det|61.2072|-149.888|61.2072|-149.888|
+131|94|F|96596.81|1994-06-08|3-MEDIUM|Clerk#000000625|0|after the fluffily special foxes integrate s|61.2125|-149.904|61.2125|-149.904|
+132|28|F|118802.62|1993-06-11|3-MEDIUM|Clerk#000000488|0|sits are daringly accounts. carefully regular foxes sleep slyly about the|61.2142|-149.806|61.2142|-149.806|
+133|44|O|80437.72|1997-11-29|1-URGENT|Clerk#000000738|0|usly final asymptotes |61.1866|-149.923|61.1866|-149.923|
+134|7|F|154260.84|1992-05-01|4-NOT SPECIFIED|Clerk#000000711|0|lar theodolites boos|61.1089|-149.857|61.1089|-149.857|
+135|61|O|174569.88|1995-10-21|4-NOT SPECIFIED|Clerk#000000804|0|l platelets use according t|61.1024|-149.853|61.1024|-149.853|
+160|83|O|86076.86|1996-12-19|4-NOT SPECIFIED|Clerk#000000342|0|thely special sauternes wake slyly of t|61.1891|-149.906|61.1891|-149.906|
+161|17|F|19056.99|1994-08-31|2-HIGH|Clerk#000000322|0|carefully! special instructions sin|61.1891|-149.906|61.1891|-149.906|
+162|16|O|2158.13|1995-05-08|3-MEDIUM|Clerk#000000378|0|nts hinder fluffily ironic instructions. express, express excuses |61.1891|-149.906|61.1891|-149.906|
+163|88|O|125170.86|1997-09-05|3-MEDIUM|Clerk#000000379|0|y final packages. final foxes since the quickly even|61.1891|-149.906|61.1891|-149.906|
+164|1|F|202660.52|1992-10-21|5-LOW|Clerk#000000209|0|cajole ironic courts. slyly final ideas are slyly. blithely final Tiresias sub|61.1891|-149.906|61.1891|-149.906|
+165|28|F|141824.23|1993-01-30|4-NOT SPECIFIED|Clerk#000000292|0|across the blithely regular accounts. bold|61.1891|-149.906|61.1891|-149.906|
+166|109|O|93335.60|1995-09-12|2-HIGH|Clerk#000000440|0|lets. ironic, bold asymptotes kindle|61.1891|-149.906|61.1891|-149.906|
+167|121|F|52982.23|1993-01-04|4-NOT SPECIFIED|Clerk#000000731|0|s nag furiously bold excuses. fluffily iron|61.1891|-149.906|61.1891|-149.906|
+192|83|O|133002.55|1997-11-25|5-LOW|Clerk#000000483|0|y unusual platelets among the final instructions integrate rut|61.1891|-149.906|61.1891|-149.906|
+193|80|F|48053.18|1993-08-08|1-URGENT|Clerk#000000025|0|the furiously final pin|61.1891|-149.906|61.1891|-149.906|
+194|62|F|114097.63|1992-04-05|3-MEDIUM|Clerk#000000352|0|egular requests haggle slyly regular, regular pinto beans. asymptote|61.1891|-149.906|61.1891|-149.906|
+195|136|F|120053.52|1993-12-28|3-MEDIUM|Clerk#000000216|0|old forges are furiously sheaves. slyly fi|61.1891|-149.906|61.1891|-149.906|
+196|65|F|33248.04|1993-03-17|2-HIGH|Clerk#000000988|0|beans boost at the foxes. silent foxes|61.1891|-149.906|61.1891|-149.906|
+197|34|P|100290.07|1995-04-07|2-HIGH|Clerk#000000969|0|solve quickly about the even braids. carefully express deposits affix care|61.1891|-149.906|61.1891|-149.906|
+198|112|O|125792.83|1998-01-02|4-NOT SPECIFIED|Clerk#000000331|0|its. carefully ironic requests sleep. furiously express fox|61.1891|-149.906|61.1891|-149.906|
+199|53|O|80592.44|1996-03-07|2-HIGH|Clerk#000000489|0|g theodolites. special packag|61.1891|-149.906|61.1891|-149.906|
+224|4|F|155680.60|1994-06-18|4-NOT SPECIFIED|Clerk#000000642|0|r the quickly thin courts. carefully|61.1891|-149.906|61.1891|-149.906|
+225|34|P|165890.47|1995-05-25|1-URGENT|Clerk#000000177|0|s. blithely ironic accounts wake quickly fluffily special acc|61.1891|-149.906|61.1891|-149.906|
+226|128|F|180119.22|1993-03-10|2-HIGH|Clerk#000000756|0|s are carefully at the blithely ironic acc|61.1891|-149.906|61.1891|-149.906|
+227|10|O|46076.46|1995-11-10|5-LOW|Clerk#000000919|0| express instructions. slyly regul|61.1891|-149.906|61.1891|-149.906|
+228|46|F|2638.98|1993-02-25|1-URGENT|Clerk#000000562|0|es was slyly among the regular foxes. blithely regular dependenci|61.1891|-149.906|61.1891|-149.906|
+229|112|F|142290.77|1993-12-29|1-URGENT|Clerk#000000628|0|he fluffily even instructions. furiously i|61.1891|-149.906|61.1891|-149.906|
+230|103|F|107231.60|1993-10-27|1-URGENT|Clerk#000000520|0|odolites. carefully quick requ|61.1891|-149.906|61.1891|-149.906|
+231|91|F|141554.06|1994-09-29|2-HIGH|Clerk#000000446|0| packages haggle slyly after the carefully ironic instruct|61.1891|-149.906|61.1891|-149.906|
+256|125|F|106315.25|1993-10-19|4-NOT SPECIFIED|Clerk#000000834|0|he fluffily final ideas might are final accounts. carefully f|61.1891|-149.906|61.1891|-149.906|
+257|124|O|7102.74|1998-03-28|3-MEDIUM|Clerk#000000680|0|ts against the sly warhorses cajole slyly accounts|61.1891|-149.906|61.1891|-149.906|
+258|43|F|186669.10|1993-12-29|1-URGENT|Clerk#000000167|0|dencies. blithely quick packages cajole. ruthlessly final accounts|61.1891|-149.906|61.1891|-149.906|
+259|44|F|75661.70|1993-09-29|4-NOT SPECIFIED|Clerk#000000601|0|ages doubt blithely against the final foxes. carefully express deposits dazzle|61.1891|-149.906|61.1891|-149.906|
+260|106|O|179292.14|1996-12-10|3-MEDIUM|Clerk#000000960|0|lently regular pinto beans sleep after the slyly e|61.1891|-149.906|61.1891|-149.906|
+261|47|F|201003.12|1993-06-29|3-MEDIUM|Clerk#000000310|0|ully fluffily brave instructions. furiousl|61.1891|-149.906|61.1891|-149.906|
+262|31|O|108443.84|1995-11-25|4-NOT SPECIFIED|Clerk#000000551|0|l packages. blithely final pinto beans use carefu|61.1891|-149.906|61.1891|-149.906|
+263|118|F|79782.56|1994-05-17|2-HIGH|Clerk#000000088|0| pending instructions. blithely un|61.1891|-149.906|61.1891|-149.906|
+288|8|O|163794.53|1997-02-21|1-URGENT|Clerk#000000109|0|uriously final requests. even, final ideas det|61.1891|-149.906|61.1891|-149.906|
+289|104|O|131092.67|1997-02-10|3-MEDIUM|Clerk#000000103|0|sily. slyly special excuse|61.1891|-149.906|61.1891|-149.906|
+290|118|F|62814.89|1994-01-01|4-NOT SPECIFIED|Clerk#000000735|0|efully dogged deposits. furiou|61.1891|-149.906|61.1891|-149.906|
+291|142|F|66817.05|1994-03-13|1-URGENT|Clerk#000000923|0|dolites. carefully regular pinto beans cajol|64.8541|-147.813|64.8541|-147.813|
+292|23|F|30783.05|1992-01-13|2-HIGH|Clerk#000000193|0|g pinto beans will have to sleep f|64.8414|-147.606|64.8414|-147.606|
+293|31|F|37248.78|1992-10-02|2-HIGH|Clerk#000000629|0|re bold, ironic deposits. platelets c|64.8371|-147.746|64.8371|-147.746|
+294|52|F|30059.47|1993-07-16|3-MEDIUM|Clerk#000000499|0|kly according to the frays. final dolphins affix quickly |64.8151|-147.707|64.8151|-147.707|
+295|19|F|89345.99|1994-09-29|2-HIGH|Clerk#000000155|0| unusual pinto beans play. regular ideas haggle|64.8371|-147.746|64.8371|-147.746|
+320|1|O|39835.54|1997-11-21|2-HIGH|Clerk#000000573|0|ar foxes nag blithely|64.849|-147.813|64.849|-147.813|
+321|124|F|62251.15|1993-03-21|3-MEDIUM|Clerk#000000289|0|equests run. blithely final dependencies after the deposits wake caref|64.8425|-147.724|64.8425|-147.724|
+322|134|F|127068.89|1992-03-19|1-URGENT|Clerk#000000158|0|fully across the slyly bold packages. packages against the quickly regular i|64.8425|-147.724|64.8425|-147.724|
+323|40|F|79683.42|1994-03-26|1-URGENT|Clerk#000000959|0|arefully pending foxes sleep blithely. slyly express accoun|64.849|-147.826|64.849|-147.826|
+324|106|F|26868.85|1992-03-20|1-URGENT|Clerk#000000352|0| about the ironic, regular deposits run blithely against the excuses|64.815|-147.882|64.815|-147.882|
+325|41|F|71543.41|1993-10-17|5-LOW|Clerk#000000844|0|ly sometimes pending pa|64.8906|-147.628|64.8906|-147.628|
+326|76|O|229165.17|1995-06-04|2-HIGH|Clerk#000000466|0| requests. furiously ironic asymptotes mold carefully alongside of the blit|64.8276|-147.639|64.8276|-147.639|
+327|145|P|24468.16|1995-04-17|5-LOW|Clerk#000000992|0|ng the slyly final courts. slyly even escapades eat |64.8461|-147.813|64.8461|-147.813|
+352|107|F|16003.86|1994-03-08|2-HIGH|Clerk#000000932|0|ke slyly bold pinto beans. blithely regular accounts against the spe|64.8281|-147.812|64.8281|-147.812|
+353|2|F|179984.42|1993-12-31|5-LOW|Clerk#000000449|0| quiet ideas sleep. even instructions cajole slyly. silently spe|64.8377|-147.718|64.8377|-147.718|
+354|139|O|157062.70|1996-03-14|2-HIGH|Clerk#000000511|0|ly regular ideas wake across the slyly silent ideas. final deposits eat b|64.8417|-147.718|64.8417|-147.718|
+355|71|F|69447.25|1994-06-14|5-LOW|Clerk#000000532|0|s. sometimes regular requests cajole. regular, pending accounts a|64.8145|-147.772|64.8145|-147.772|
+356|148|F|162786.67|1994-06-30|4-NOT SPECIFIED|Clerk#000000944|0|as wake along the bold accounts. even, |64.8541|-147.813|64.8541|-147.813|
+357|61|O|98723.11|1996-10-09|2-HIGH|Clerk#000000301|0|e blithely about the express, final accounts. quickl|64.8169|-147.779|64.8169|-147.779|
+358|4|F|226806.66|1993-09-20|2-HIGH|Clerk#000000392|0|l, silent instructions are slyly. silently even de|64.8378|-147.71|64.8378|-147.71|
+359|79|F|142891.22|1994-12-19|3-MEDIUM|Clerk#000000934|0|n dolphins. special courts above the carefully ironic requests use|64.8436|-147.722|64.8436|-147.722|
+384|115|F|122785.82|1992-03-03|5-LOW|Clerk#000000206|0|, even accounts use furiously packages. slyly ironic pla|64.9401|-147.402|64.9401|-147.402|
+385|34|O|50724.06|1996-03-22|5-LOW|Clerk#000000600|0|hless accounts unwind bold pain|64.8426|-147.719|64.8426|-147.719|
+386|61|F|90380.40|1995-01-25|2-HIGH|Clerk#000000648|0| haggle quickly. stealthily bold asymptotes haggle among the furiously even re|64.8534|-147.811|64.8534|-147.811|
+387|4|O|130647.18|1997-01-26|4-NOT SPECIFIED|Clerk#000000768|0| are carefully among the quickly even deposits. furiously silent req|64.9341|-147.928|64.9341|-147.928|
+388|46|F|120533.46|1992-12-16|4-NOT SPECIFIED|Clerk#000000356|0|ar foxes above the furiously ironic deposits nag slyly final reque|64.8393|-147.72|64.8393|-147.72|
+389|127|F|1984.14|1994-02-17|2-HIGH|Clerk#000000062|0|ing to the regular asymptotes. final, pending foxes about the blithely sil|64.8406|-147.731|64.8406|-147.731|
+390|103|O|168562.27|1998-04-07|5-LOW|Clerk#000000404|0|xpress asymptotes use among the regular, final pinto b|64.9281|-147.865|64.9281|-147.865|
+391|112|F|13282.23|1994-11-17|2-HIGH|Clerk#000000256|0|orges thrash fluffil|64.8371|-147.716|64.8371|-147.716|
+416|41|F|71362.50|1993-09-27|5-LOW|Clerk#000000294|0| the accounts. fluffily bold depo|64.9414|-147.841|64.9414|-147.841|
+417|55|F|91982.29|1994-02-06|3-MEDIUM|Clerk#000000468|0|ironic, even packages. thinly unusual accounts sleep along the slyly unusual |64.8363|-147.79|64.8363|-147.79|
+418|95|P|33124.96|1995-04-13|4-NOT SPECIFIED|Clerk#000000643|0|. furiously ironic instruc|64.8371|-147.716|64.8371|-147.716|
+419|118|O|111597.96|1996-10-01|3-MEDIUM|Clerk#000000376|0|osits. blithely pending theodolites boost carefully|64.8591|-147.917|64.8591|-147.917|
+420|91|O|198039.23|1995-10-31|4-NOT SPECIFIED|Clerk#000000756|0|leep carefully final excuses. fluffily pending requests unwind carefully above|64.8363|-147.79|64.8363|-147.79|
+421|40|F|1084.38|1992-02-22|5-LOW|Clerk#000000405|0|egular, even packages according to the final, un|55.3801|-131.682|55.3801|-131.682|
+422|74|O|106045.89|1997-05-31|4-NOT SPECIFIED|Clerk#000000049|0|aggle carefully across the accounts. regular accounts eat fluffi|55.3073|-131.528|55.3073|-131.528|
+423|104|O|26981.31|1996-06-01|1-URGENT|Clerk#000000674|0|quests. deposits cajole quickly. furiously bold accounts haggle q|55.3801|-131.682|55.3801|-131.682|
+448|149|O|114978.03|1995-08-21|3-MEDIUM|Clerk#000000597|0| regular, express foxes use blithely. quic|55.3601|-131.681|55.3601|-131.681|
+449|97|O|41605.63|1995-07-20|2-HIGH|Clerk#000000841|0|. furiously regular theodolites affix blithely |55.3279|-131.613|55.3279|-131.613|
+450|49|P|153386.61|1995-03-05|4-NOT SPECIFIED|Clerk#000000293|0|d theodolites. boldly bold foxes since the pack|55.3129|-131.588|55.3129|-131.588|
+451|100|O|104664.40|1998-05-25|5-LOW|Clerk#000000048|0|nic pinto beans. theodolites poach carefully; |55.3801|-131.682|55.3801|-131.682|
+452|61|O|2007.48|1997-10-14|1-URGENT|Clerk#000000498|0|t, unusual instructions above the blithely bold pint|55.3801|-131.682|55.3801|-131.682|
+453|46|O|216826.73|1997-05-26|5-LOW|Clerk#000000504|0|ss foxes. furiously regular ideas sleep according to t|55.4299|-131.789|55.4299|-131.789|
+454|49|O|23198.24|1995-12-27|5-LOW|Clerk#000000890|0|dolites sleep carefully blithely regular deposits. quickly regul|55.3801|-131.682|55.3801|-131.682|
+455|13|O|138010.76|1996-12-04|1-URGENT|Clerk#000000796|0| about the final platelets. dependen|55.3507|-131.671|55.3507|-131.671|
+480|73|F|20530.97|1993-05-08|5-LOW|Clerk#000000004|0|ealthy pinto beans. fluffily regular requests along the special sheaves wake |55.3801|-131.682|55.3801|-131.682|
+481|31|F|117827.18|1992-10-08|2-HIGH|Clerk#000000230|0|ly final ideas. packages haggle fluffily|55.3394|-131.636|55.3394|-131.636|
+482|127|O|136634.34|1996-03-26|1-URGENT|Clerk#000000295|0|ts. deposits wake: final acco|55.3801|-131.682|55.3801|-131.682|
+483|35|O|39793.05|1995-07-11|2-HIGH|Clerk#000000025|0|cross the carefully final e|55.3103|-131.582|55.3103|-131.582|
+484|55|O|219920.62|1997-01-03|3-MEDIUM|Clerk#000000545|0|grouches use. furiously bold accounts maintain. bold, regular deposits|55.3801|-131.682|55.3801|-131.682|
+485|101|O|110432.76|1997-03-26|2-HIGH|Clerk#000000105|0| regular ideas nag thinly furiously s|55.3801|-131.682|55.3801|-131.682|
+486|52|O|185968.15|1996-03-11|4-NOT SPECIFIED|Clerk#000000803|0|riously dolphins. fluffily ironic requ|55.3801|-131.682|55.3801|-131.682|
+487|109|F|48502.79|1992-08-18|1-URGENT|Clerk#000000086|0|ithely unusual courts eat accordi|55.3801|-131.682|55.3801|-131.682|
+512|64|P|124661.48|1995-05-20|5-LOW|Clerk#000000814|0|ding requests. carefully express theodolites was quickly. furious|55.3801|-131.682|55.3801|-131.682|
+513|61|O|63703.92|1995-05-01|2-HIGH|Clerk#000000522|0|regular packages. pinto beans cajole carefully against the even|55.3424|-131.634|55.3424|-131.634|
+514|76|O|104585.77|1996-04-04|2-HIGH|Clerk#000000094|0| cajole furiously. slyly final excuses cajole. slyly special instructions |55.4097|-131.729|55.4097|-131.729|
+515|142|F|153720.22|1993-08-29|4-NOT SPECIFIED|Clerk#000000700|0|eposits are furiously furiously silent pinto beans. pending pack|55.3801|-131.682|55.3801|-131.682|
+516|44|O|10677.86|1998-04-21|2-HIGH|Clerk#000000305|0|lar, unusual platelets are carefully. even courts sleep bold, final pinto bea|55.3801|-131.682|55.3801|-131.682|
+517|10|O|82197.79|1997-04-07|5-LOW|Clerk#000000359|0|slyly pending deposits cajole quickly packages. furiou|55.3462|-131.658|55.3462|-131.658|
+518|145|O|223537.09|1998-02-08|2-HIGH|Clerk#000000768|0| the carefully bold accounts. quickly regular excuses are|55.3801|-131.682|55.3801|-131.682|
+519|64|O|95731.50|1997-10-31|1-URGENT|Clerk#000000985|0|ains doze furiously against the f|55.3801|-131.682|55.3801|-131.682|
+544|94|F|47627.89|1993-02-17|2-HIGH|Clerk#000000145|0|the special, final accounts. dogged dolphins|55.3801|-131.682|55.3801|-131.682|
+545|64|O|23476.12|1995-11-07|2-HIGH|Clerk#000000537|0|as. blithely final hockey players about th|55.3801|-131.682|55.3801|-131.682|
+546|145|O|14790.37|1996-11-01|2-HIGH|Clerk#000000041|0|osits sleep. slyly special dolphins about the q|55.3801|-131.682|55.3801|-131.682|
+547|100|O|96855.29|1996-06-22|3-MEDIUM|Clerk#000000976|0|ing accounts eat. carefully regular packa|55.3801|-131.682|55.3801|-131.682|
+548|124|F|99088.75|1994-09-21|1-URGENT|Clerk#000000435|0|arefully express instru|55.3801|-131.682|55.3801|-131.682|
+549|110|F|141679.41|1992-07-13|1-URGENT|Clerk#000000196|0|ideas alongside of |55.3801|-131.682|55.3801|-131.682|
+550|25|O|33123.28|1995-08-02|1-URGENT|Clerk#000000204|0|t requests. blithely |61.5856|-149.316|61.5856|-149.316|
+551|91|O|46355.83|1995-05-30|1-URGENT|Clerk#000000179|0|xpress accounts boost quic|61.5781|-149.429|61.5781|-149.429|
+576|31|O|18307.45|1997-05-13|3-MEDIUM|Clerk#000000955|0|l requests affix regular requests. final account|61.6141|-149.457|61.6141|-149.457|
+577|56|F|34768.68|1994-12-19|5-LOW|Clerk#000000154|0| deposits engage stealthil|61.5801|-149.461|61.5801|-149.461|
+578|94|O|70392.02|1997-01-10|5-LOW|Clerk#000000281|0|e blithely even packages. slyly pending platelets bes|61.9071|-150.067|61.9071|-150.067|
+579|68|O|120828.12|1998-03-11|2-HIGH|Clerk#000000862|0| regular instructions. blithely even p|61.5928|-149.392|61.5928|-149.392|
+580|61|O|88219.12|1997-07-05|2-HIGH|Clerk#000000314|0|tegrate fluffily regular accou|61.6141|-149.457|61.6141|-149.457|
+581|70|O|126066.00|1997-02-23|4-NOT SPECIFIED|Clerk#000000239|0| requests. even requests use slyly. blithely ironic |61.5792|-149.36|61.5792|-149.36|
+582|50|O|129004.81|1997-10-21|1-URGENT|Clerk#000000378|0|n pinto beans print a|61.6049|-149.463|61.6049|-149.463|
+583|49|O|127817.38|1997-03-19|3-MEDIUM|Clerk#000000792|0|efully express requests. a|61.6099|-149.328|61.6099|-149.328|
+608|26|O|62567.99|1996-02-28|3-MEDIUM|Clerk#000000995|0|nic waters wake slyly slyly expre|61.5531|-149.651|61.5531|-149.651|
+609|127|F|21088.59|1994-06-01|3-MEDIUM|Clerk#000000348|0|- ironic gifts believe furiously ca|61.6141|-149.457|61.6141|-149.457|
+610|52|O|175142.28|1995-08-02|1-URGENT|Clerk#000000610|0|totes. ironic, unusual packag|61.6141|-149.457|61.6141|-149.457|
+611|106|F|73907.63|1993-01-27|1-URGENT|Clerk#000000401|0|ounts detect furiously ac|61.5531|-149.651|61.5531|-149.651|
+612|82|F|145695.42|1992-10-21|3-MEDIUM|Clerk#000000759|0|boost quickly quickly final excuses. final foxes use bravely afte|61.6141|-149.457|61.6141|-149.457|
+613|139|O|33396.35|1995-06-18|2-HIGH|Clerk#000000172|0|ts hinder among the deposits. fluffily ironic depos|61.7321|-150.12|61.7321|-150.12|
+614|134|F|218116.21|1992-12-01|2-HIGH|Clerk#000000388|0| deposits! even, daring theodol|61.6141|-149.457|61.6141|-149.457|
+615|67|F|32890.89|1992-05-09|5-LOW|Clerk#000000388|0|t to promise asymptotes. packages haggle alongside of the fluffil|61.582|-149.441|61.582|-149.441|
+640|97|F|145495.62|1993-01-23|2-HIGH|Clerk#000000433|0|r, unusual accounts boost carefully final ideas. slyly silent theod|61.5818|-149.44|61.5818|-149.44|
+641|133|F|120626.49|1993-08-30|5-LOW|Clerk#000000175|0|ents cajole furiously about the quickly silent pac|61.6141|-149.457|61.6141|-149.457|
+642|40|F|22994.51|1993-12-16|3-MEDIUM|Clerk#000000357|0| among the requests wake slyly alongside of th|61.7321|-150.12|61.7321|-150.12|
+643|58|P|180396.95|1995-03-25|2-HIGH|Clerk#000000354|0|g dependencies. regular accounts |61.6308|-149.415|61.6308|-149.415|
+644|8|F|201268.06|1992-05-01|1-URGENT|Clerk#000000550|0| blithely unusual platelets haggle ironic, special excuses. excuses unwi|61.5801|-149.461|61.5801|-149.461|
+645|115|F|234763.73|1994-12-03|2-HIGH|Clerk#000000090|0|quickly daring theodolites across the regu|61.5811|-149.444|61.5811|-149.444|
+646|52|F|142070.65|1994-11-22|2-HIGH|Clerk#000000203|0|carefully even foxes. fina|61.6521|-149.92|61.6521|-149.92|
+647|143|O|56449.23|1997-08-07|1-URGENT|Clerk#000000270|0|egular pearls. carefully express asymptotes are. even account|61.6141|-149.307|61.6141|-149.307|
+672|109|F|89877.09|1994-04-14|5-LOW|Clerk#000000106|0|egular requests are furiously according to |61.6168|-149.328|61.6168|-149.328|
+673|80|F|21137.08|1994-03-10|1-URGENT|Clerk#000000448|0| special pinto beans use quickly furiously even depende|61.5714|-149.381|61.5714|-149.381|
+674|34|F|27204.60|1992-08-29|5-LOW|Clerk#000000448|0|ully special deposits. furiously final warhorses affix carefully. fluffily f|61.6521|-149.92|61.6521|-149.92|
+675|13|O|125188.72|1997-07-31|2-HIGH|Clerk#000000168|0|ffily between the careful|61.5858|-149.376|61.5858|-149.376|
+676|38|O|163966.67|1996-12-13|2-HIGH|Clerk#000000248|0|the final deposits. special, pending|61.5822|-149.463|61.5822|-149.463|
+677|124|F|147915.68|1993-11-24|3-MEDIUM|Clerk#000000824|0|uriously special pinto beans cajole carefully. fi|61.5861|-149.303|61.5861|-149.303|
+678|131|F|135761.05|1993-02-27|5-LOW|Clerk#000000530|0|. blithely final somas about the|61.5821|-149.438|61.5821|-149.438|
+679|49|O|8945.03|1995-12-15|2-HIGH|Clerk#000000853|0|tealthy, final pinto beans haggle slyly. pending platelets about the special, |61.6281|-149.338|61.6281|-149.338|
+704|85|O|56210.26|1996-11-21|3-MEDIUM|Clerk#000000682|0|blithely pending platelets wake alongside of the final, iron|61.5771|-149.335|61.5771|-149.335|
+705|43|O|83773.49|1997-02-13|4-NOT SPECIFIED|Clerk#000000294|0|ithely regular dependencies. express, even packages sleep slyly pending t|61.5917|-149.464|61.5917|-149.464|
+706|148|O|23973.60|1995-09-09|1-URGENT|Clerk#000000448|0|g the packages. deposits caj|61.1927|-149.86|61.1927|-149.86|
+707|118|F|58218.35|1994-11-20|3-MEDIUM|Clerk#000000199|0| ideas about the silent, bold deposits nag dolphins|61.1879|-149.85|61.1879|-149.85|
+708|32|O|100445.59|1998-07-03|3-MEDIUM|Clerk#000000101|0|lphins cajole about t|61.1814|-149.849|61.1814|-149.849|
+709|37|O|72055.87|1998-04-21|1-URGENT|Clerk#000000461|0|ons alongside of the carefully bold pinto bea|61.2104|-149.892|61.2104|-149.892|
+710|133|F|208974.42|1993-01-02|5-LOW|Clerk#000000026|0| regular, regular requests boost. fluffily re|61.2093|-149.903|61.2093|-149.903|
+711|64|F|92484.70|1993-09-23|4-NOT SPECIFIED|Clerk#000000856|0|its. fluffily regular gifts are furi|61.1481|-149.829|61.1481|-149.829|
+736|47|O|130204.17|1998-06-21|5-LOW|Clerk#000000881|0|refully of the final pi|61.2161|-149.876|61.2161|-149.876|
+737|121|F|12984.85|1992-04-26|5-LOW|Clerk#000000233|0|ake blithely express, ironic theodolites. blithely special accounts wa|61.1972|-149.75|61.1972|-149.75|
+738|22|F|114145.18|1993-03-02|4-NOT SPECIFIED|Clerk#000000669|0|ly even foxes. furiously regular accounts cajole ca|61.2066|-149.887|61.2066|-149.887|
+739|1|O|159171.69|1998-05-31|5-LOW|Clerk#000000900|0| against the slyly ironic packages nag slyly ironic|61.2161|-149.876|61.2161|-149.876|
+740|44|O|83490.99|1995-07-16|3-MEDIUM|Clerk#000000583|0|courts haggle furiously across the final, regul|61.195|-149.834|61.195|-149.834|
+741|106|O|47985.98|1998-07-07|2-HIGH|Clerk#000000295|0|ic instructions. slyly express instructions solv|61.2038|-149.808|61.2038|-149.808|
+742|103|F|207632.55|1994-12-23|5-LOW|Clerk#000000543|0|equests? slyly ironic dolphins boost carefully above the blithely|61.1228|-149.862|61.1228|-149.862|
+743|79|O|23614.89|1996-10-04|4-NOT SPECIFIED|Clerk#000000933|0|eans. furiously ironic deposits sleep carefully carefully qui|61.2005|-149.785|61.2005|-149.785|
+768|98|O|220636.82|1996-08-20|3-MEDIUM|Clerk#000000411|0|jole slyly ironic packages. slyly even idea|61.181|-149.825|61.181|-149.825|
+769|80|F|43092.76|1993-06-02|3-MEDIUM|Clerk#000000172|0|ggle furiously. ironic packages haggle slyly. bold platelets affix s|61.1867|-149.919|61.1867|-149.919|
+770|32|O|64271.75|1998-05-23|5-LOW|Clerk#000000572|0|heodolites. furiously special pinto beans cajole pac|61.1955|-149.911|61.1955|-149.911|
+771|46|O|105302.05|1995-06-17|1-URGENT|Clerk#000000105|0|s. furiously final instructions across the deposit|61.1089|-149.858|61.1089|-149.858|
+772|97|F|128234.96|1993-04-17|2-HIGH|Clerk#000000430|0|s boost blithely fluffily idle ideas? fluffily even pin|61.1805|-149.889|61.1805|-149.889|
+773|133|F|146862.27|1993-09-26|3-MEDIUM|Clerk#000000307|0|tions are quickly accounts. accounts use bold, even pinto beans. gifts ag|61.1534|-149.985|61.1534|-149.985|
+774|80|O|145857.60|1995-12-04|1-URGENT|Clerk#000000883|0|tealthily even depths|61.1901|-149.911|61.1901|-149.911|
+775|134|F|59455.61|1995-03-18|1-URGENT|Clerk#000000191|0|kly express requests. fluffily silent accounts poach furiously|61.2122|-149.734|61.2122|-149.734|
+800|56|O|87892.38|1998-07-14|2-HIGH|Clerk#000000213|0|y alongside of the pending packages? final platelets nag fluffily carefu|61.1951|-149.906|61.1951|-149.906|
+801|118|F|127717.72|1992-02-18|1-URGENT|Clerk#000000186|0|iously from the furiously enticing reques|61.2043|-149.869|61.2043|-149.869|
+802|137|F|156381.95|1995-01-05|1-URGENT|Clerk#000000516|0|posits. ironic, pending requests cajole. even theodol|61.2036|-149.869|61.2036|-149.869|
+803|16|O|27629.66|1997-04-29|5-LOW|Clerk#000000260|0|ic instructions. even deposits haggle furiously at the deposits-- regular de|61.1883|-149.886|61.1883|-149.886|
+804|50|F|94400.43|1993-03-12|3-MEDIUM|Clerk#000000931|0|s. blithely final foxes are about the packag|61.2141|-149.864|61.2141|-149.864|
+805|127|O|90042.41|1995-07-05|4-NOT SPECIFIED|Clerk#000000856|0|y according to the fluffily |61.1955|-149.782|61.1955|-149.782|
+806|131|O|26839.16|1996-06-20|2-HIGH|Clerk#000000240|0| the ironic packages wake carefully fina|61.2183|-149.894|61.2183|-149.894|
+807|145|F|222392.53|1993-11-24|3-MEDIUM|Clerk#000000012|0|refully special tithes. blithely regular accoun|61.1417|-149.864|61.1417|-149.864|
+832|29|F|68494.08|1992-04-19|5-LOW|Clerk#000000495|0|xes. bravely regular packages sleep up the furiously bold accou|61.1883|-149.883|61.1883|-149.883|
+833|56|F|49033.69|1994-02-13|3-MEDIUM|Clerk#000000437|0|ts haggle quickly across the slyl|61.2161|-149.876|61.2161|-149.876|
+834|43|F|46459.92|1994-05-23|3-MEDIUM|Clerk#000000805|0| sleep. quickly even foxes are boldly. slyly express requests use slyly|61.2193|-149.869|61.2193|-149.869|
+835|65|O|62430.67|1995-10-08|4-NOT SPECIFIED|Clerk#000000416|0|s about the carefully special foxes haggle quickly about the|61.2191|-149.888|61.2191|-149.888|
+836|70|O|72843.48|1996-11-25|4-NOT SPECIFIED|Clerk#000000729|0|ely bold excuses sleep regular ideas. furiously unusual ideas wake furiou|61.2191|-149.888|61.2191|-149.888|
+837|116|F|60918.41|1994-06-15|4-NOT SPECIFIED|Clerk#000000563|0|kages sleep slyly above the ironic, final orbits|61.2191|-149.888|61.2191|-149.888|
+838|17|O|82918.36|1998-01-29|5-LOW|Clerk#000000213|0| slyly around the slyly even|61.2191|-149.888|61.2191|-149.888|
+839|28|O|70182.63|1995-08-08|1-URGENT|Clerk#000000951|0|the carefully even platelets. furiously unusual fo|61.2191|-149.888|61.2191|-149.888|
+864|139|O|74710.74|1997-08-17|1-URGENT|Clerk#000000036|0|ly after the slyly regular deposits. express, regular asymptotes nag ca|61.2191|-149.888|61.2191|-149.888|
+865|4|F|70430.54|1993-05-04|3-MEDIUM|Clerk#000000337|0|. special packages wake after the carefully final accounts. express pinto be|61.2191|-149.888|61.2191|-149.888|
+866|40|F|4766.19|1992-11-28|3-MEDIUM|Clerk#000000718|0|ins after the even, even accounts nod blithel|61.2191|-149.888|61.2191|-149.888|
+867|26|F|7471.75|1993-11-16|3-MEDIUM|Clerk#000000877|0|pades nag quickly final, |61.2191|-149.888|61.2191|-149.888|
+868|104|F|127345.45|1992-06-09|4-NOT SPECIFIED|Clerk#000000782|0|onic theodolites print carefully. blithely dogge|61.2191|-149.888|61.2191|-149.888|
+869|136|O|58932.19|1997-01-12|2-HIGH|Clerk#000000245|0|ar sheaves are slowly. slyly even attainments boost theodolites. furiously|61.2191|-149.888|61.2191|-149.888|
+870|34|F|40492.37|1993-06-20|4-NOT SPECIFIED|Clerk#000000123|0|blithely ironic ideas nod. sly, r|61.2191|-149.888|61.2191|-149.888|
+871|16|O|172861.58|1995-11-15|5-LOW|Clerk#000000882|0|oss the ironic theodolites.|61.1891|-149.906|61.1891|-149.906|
+896|2|F|169847.63|1993-03-09|1-URGENT|Clerk#000000187|0|inal packages eat blithely according to the warhorses. furiously quiet de|61.2191|-149.888|61.2191|-149.888|
+897|49|P|57697.44|1995-03-20|1-URGENT|Clerk#000000316|0| wake quickly against |61.2191|-149.888|61.2191|-149.888|
+898|55|F|101020.75|1993-06-03|2-HIGH|Clerk#000000611|0|. unusual pinto beans haggle quickly across |61.1101|-149.857|61.1101|-149.857|
+899|109|O|125562.09|1998-04-08|5-LOW|Clerk#000000575|0|rts engage carefully final theodolites.|61.1101|-149.857|61.1101|-149.857|
+900|46|F|120073.51|1994-10-01|4-NOT SPECIFIED|Clerk#000000060|0| fluffily express deposits nag furiousl|61.1101|-149.857|61.1101|-149.857|
+901|13|O|81826.12|1998-07-21|4-NOT SPECIFIED|Clerk#000000929|0|lyly even foxes are furious, silent requests. requests about the quickly |61.1101|-149.857|61.1101|-149.857|
+902|10|F|37348.62|1994-07-27|4-NOT SPECIFIED|Clerk#000000811|0|yly final requests over the furiously regula|61.1101|-149.857|61.1101|-149.857|
+903|11|O|109351.87|1995-07-07|4-NOT SPECIFIED|Clerk#000000793|0|e slyly about the final pl|61.1101|-149.857|61.1101|-149.857|
+928|67|F|228136.49|1995-03-02|5-LOW|Clerk#000000450|0|ithely express pinto beans. |61.1101|-149.857|61.1101|-149.857|
+929|83|F|109301.02|1992-10-02|2-HIGH|Clerk#000000160|0|its. furiously even foxes affix carefully finally silent accounts. express req|61.1101|-149.857|61.1101|-149.857|
+930|131|F|199102.23|1994-12-17|1-URGENT|Clerk#000000004|0| accounts nag slyly. ironic, ironic accounts wake blithel|61.1101|-149.857|61.1101|-149.857|
+931|103|F|117909.23|1992-12-07|1-URGENT|Clerk#000000881|0|ss packages haggle furiously express, regular deposits. even, e|61.1101|-149.857|61.1101|-149.857|
+932|41|O|40234.50|1997-05-16|2-HIGH|Clerk#000000218|0|ly express instructions boost furiously reg|61.1101|-149.857|61.1101|-149.857|
+933|97|F|71349.30|1992-08-05|4-NOT SPECIFIED|Clerk#000000752|0|ial courts wake permanently against the furiously regular ideas. unusual |61.1101|-149.857|61.1101|-149.857|
+934|52|O|17213.59|1996-07-03|1-URGENT|Clerk#000000229|0|ts integrate carefully. sly, regular deposits af|61.1101|-149.857|61.1101|-149.857|
+935|50|O|97733.87|1997-09-24|5-LOW|Clerk#000000180|0|iously final deposits cajole. blithely even packages |61.1101|-149.857|61.1101|-149.857|
+960|35|F|63537.13|1994-09-21|3-MEDIUM|Clerk#000000120|0|regular accounts. requests|61.1101|-149.857|61.1101|-149.857|
+961|56|P|158893.16|1995-06-04|4-NOT SPECIFIED|Clerk#000000720|0|ons nag furiously among the quickl|61.1101|-149.857|61.1101|-149.857|
+962|37|F|98258.73|1994-05-06|5-LOW|Clerk#000000463|0|ments nag deposits. fluffily ironic a|61.1101|-149.857|61.1101|-149.857|
+963|26|F|53287.25|1994-05-26|3-MEDIUM|Clerk#000000497|0|uses haggle carefully. slyly even dependencies after the packages ha|61.1101|-149.857|61.1101|-149.857|
+964|76|O|131146.47|1995-05-20|3-MEDIUM|Clerk#000000657|0|print blithely ironic, careful theodolit|61.1101|-149.857|61.1101|-149.857|
+965|70|P|41758.44|1995-05-15|5-LOW|Clerk#000000218|0|iously special packages. slyly pending requests are carefully |64.8591|-147.917|64.8591|-147.917|
+966|14|O|120516.93|1998-04-30|2-HIGH|Clerk#000000239|0|special deposits. furious|64.8273|-147.715|64.8273|-147.715|
+967|110|F|179287.95|1992-06-21|3-MEDIUM|Clerk#000000167|0|excuses engage quickly bold dep|64.8281|-147.715|64.8281|-147.715|
+992|55|O|133665.12|1997-11-11|3-MEDIUM|Clerk#000000875|0|ts. regular pinto beans thrash carefully sl|64.8552|-147.763|64.8552|-147.763|
+993|80|O|198238.65|1995-09-10|3-MEDIUM|Clerk#000000894|0|quickly express accounts among the furiously bol|64.8481|-147.684|64.8481|-147.684|
+994|2|F|41433.48|1994-04-20|5-LOW|Clerk#000000497|0|ole. slyly bold excuses nag caref|64.8522|-147.773|64.8522|-147.773|
+995|116|P|135157.92|1995-05-31|3-MEDIUM|Clerk#000000439|0|deas. blithely final deposits play. express accounts wake blithely caref|64.8467|-147.703|64.8467|-147.703|
+996|71|O|47447.63|1997-12-29|1-URGENT|Clerk#000000497|0|arefully final packages into the slyly final requests affix blit|64.8963|-147.662|64.8963|-147.662|
+997|109|O|27561.82|1997-05-19|2-HIGH|Clerk#000000651|0|ly express depths. furiously final requests haggle furiously. carefu|64.8372|-147.796|64.8372|-147.796|
+998|32|F|65269.38|1994-11-26|4-NOT SPECIFIED|Clerk#000000956|0|ronic dolphins. ironic, bold ideas haggle furiously furious|64.8312|-147.716|64.8312|-147.716|
+999|61|F|145249.13|1993-09-05|5-LOW|Clerk#000000464|0|pitaphs sleep. regular accounts use. f|64.811|-147.71|64.811|-147.71|
+1024|4|O|176084.63|1997-12-23|5-LOW|Clerk#000000903|0| blithely. even, express theodolites cajole slyly across|64.8971|-147.663|64.8971|-147.663|
+1025|103|F|82034.03|1995-05-05|2-HIGH|Clerk#000000376|0|ross the slyly final pa|64.85|-147.699|64.85|-147.699|
+1026|73|O|36464.76|1997-06-04|5-LOW|Clerk#000000223|0|s wake blithely. special acco|64.8389|-147.743|64.8389|-147.743|
+1027|128|F|112770.89|1992-06-03|3-MEDIUM|Clerk#000000241|0|equests cajole. slyly final pinto bean|64.781|-148|64.781|-148|
+1028|70|F|153864.67|1994-01-01|2-HIGH|Clerk#000000131|0|ts are. final, silent deposits are among the fl|64.8377|-147.718|64.8377|-147.718|
+1029|130|F|47440.91|1994-06-21|2-HIGH|Clerk#000000700|0|quests sleep. slyly even foxes wake quickly final theodolites. clo|64.8248|-147.886|64.8248|-147.886|
+1030|134|F|16346.94|1994-06-15|5-LOW|Clerk#000000422|0|ully ironic accounts sleep carefully. requests are carefully alongside of the |64.818|-147.679|64.818|-147.679|
+1031|4|F|128024.71|1994-09-01|3-MEDIUM|Clerk#000000448|0|s; ironic theodolites along the carefully ex|64.8271|-147.79|64.8271|-147.79|
+1056|28|F|38446.39|1995-02-11|1-URGENT|Clerk#000000125|0|t, even deposits hang about the slyly special i|64.8451|-147.812|64.8451|-147.812|
+1057|76|F|108107.42|1992-02-20|1-URGENT|Clerk#000000124|0|cuses dazzle carefully careful, ironic pinto beans. carefully even theod|64.8311|-147.729|64.8311|-147.729|
+1058|53|F|89359.11|1993-04-26|3-MEDIUM|Clerk#000000373|0|kly pending courts haggle. blithely regular sheaves integrate carefully fi|64.8454|-147.855|64.8454|-147.855|
+1059|127|F|198360.22|1994-02-27|1-URGENT|Clerk#000000104|0|en accounts. carefully bold packages cajole daringly special depende|64.8302|-147.744|64.8302|-147.744|
+1060|140|F|121994.04|1993-02-21|3-MEDIUM|Clerk#000000989|0|l platelets sleep quickly slyly special requests. furiously |64.8113|-147.91|64.8113|-147.91|
+1061|103|O|166947.75|1998-05-15|5-LOW|Clerk#000000576|0|uests sleep at the packages. fur|64.8271|-147.789|64.8271|-147.789|
+1062|106|O|39805.04|1997-01-15|1-URGENT|Clerk#000000152|0|eposits use blithely |64.8451|-147.698|64.8451|-147.698|
+1063|37|F|41392.31|1994-04-02|2-HIGH|Clerk#000000024|0|deposits nag quickly regular deposits. quickl|64.8586|-147.69|64.8586|-147.69|
+1088|148|F|47120.41|1992-05-21|5-LOW|Clerk#000000347|0|counts are blithely. platelets print. carefully |64.8507|-147.702|64.8507|-147.702|
+1089|49|O|103192.74|1996-05-04|4-NOT SPECIFIED|Clerk#000000226|0|ns haggle ruthlessly. even requests are quickly abov|64.8371|-147.716|64.8371|-147.716|
+1090|19|O|32929.30|1997-11-15|2-HIGH|Clerk#000000300|0| furiously regular platelets haggle along the slyly unusual foxes! |64.8449|-147.743|64.8449|-147.743|
+1091|83|O|35795.22|1996-08-27|1-URGENT|Clerk#000000549|0| even pinto beans haggle quickly alongside of the eve|64.8475|-147.706|64.8475|-147.706|
+1092|124|P|85552.21|1995-03-04|3-MEDIUM|Clerk#000000006|0|re quickly along the blithe|64.8452|-147.714|64.8452|-147.714|
+1093|101|O|79189.58|1997-07-31|4-NOT SPECIFIED|Clerk#000000159|0| after the carefully ironic requests. carefully ironic packages wake fluffil|64.8125|-147.787|64.8125|-147.787|
+1094|145|O|9006.25|1997-12-24|3-MEDIUM|Clerk#000000570|0|beans affix furiously about the pending, even deposits. finally pendi|55.3801|-131.682|55.3801|-131.682|
+1095|145|O|178491.24|1995-08-22|3-MEDIUM|Clerk#000000709|0|sly bold requests cajole carefully according to|55.3801|-131.682|55.3801|-131.682|
+1120|140|O|107958.62|1997-11-07|3-MEDIUM|Clerk#000000319|0|lly special requests. slyly pending platelets are quickly pending requ|55.3801|-131.682|55.3801|-131.682|
+1121|29|O|241837.88|1997-01-13|3-MEDIUM|Clerk#000000541|0|r escapades. deposits above the fluffily bold requests hag|55.3801|-131.682|55.3801|-131.682|
+1122|121|O|179747.47|1997-01-10|1-URGENT|Clerk#000000083|0|uffily carefully final theodolites. furiously express packages affix|55.3801|-131.682|55.3801|-131.682|
+1123|73|O|93259.93|1996-08-03|3-MEDIUM|Clerk#000000929|0|uriously pending requests. slyly regular instruction|55.3801|-131.682|55.3801|-131.682|
+1124|80|O|141858.97|1998-07-30|5-LOW|Clerk#000000326|0|regular pinto beans along the fluffily silent packages|55.3599|-131.687|55.3599|-131.687|
+1125|25|F|80438.38|1994-10-27|2-HIGH|Clerk#000000510|0|ithely final requests. i|55.4381|-131.803|55.4381|-131.803|
+1126|145|O|59982.31|1998-01-28|4-NOT SPECIFIED|Clerk#000000928|0|d slyly regular ideas: special ideas believe slyly. slyly ironic sheaves w|55.3751|-131.718|55.3751|-131.718|
+1127|58|O|103320.91|1995-09-19|4-NOT SPECIFIED|Clerk#000000397|0|usly silent, regular pinto beans. blithely express requests boos|55.3421|-131.641|55.3421|-131.641|
+1152|49|F|51775.54|1994-08-14|4-NOT SPECIFIED|Clerk#000000496|0|equests. deposits ab|55.3408|-131.64|55.3408|-131.64|
+1153|121|O|220727.97|1996-04-18|5-LOW|Clerk#000000059|0| across the pending deposi|55.2978|-131.534|55.2978|-131.534|
+1154|37|F|192417.85|1992-02-15|1-URGENT|Clerk#000000268|0|old asymptotes are special requests. blithely even deposits sleep furiously|55.3801|-131.682|55.3801|-131.682|
+1155|149|O|126902.81|1997-10-06|2-HIGH|Clerk#000000164|0|c deposits haggle among the ironic, even requests. carefully ironic sheaves n|55.3801|-131.682|55.3801|-131.682|
+1156|133|O|217682.81|1996-10-19|1-URGENT|Clerk#000000200|0| blithely ironic dolphins. furiously pendi|55.3421|-131.622|55.3421|-131.622|
+1157|97|O|85394.06|1998-01-14|4-NOT SPECIFIED|Clerk#000000207|0|out the regular excuses boost carefully against the furio|55.3801|-131.682|55.3801|-131.682|
+1158|142|O|31075.51|1996-06-30|2-HIGH|Clerk#000000549|0|integrate slyly furiously ironic deposit|55.3801|-131.682|55.3801|-131.682|
+1159|70|F|55553.68|1992-09-18|3-MEDIUM|Clerk#000000992|0|ts may sleep. requests according to the|55.3801|-131.682|55.3801|-131.682|
+1184|89|O|39700.29|1997-10-26|5-LOW|Clerk#000000777|0|iously even packages haggle fluffily care|55.3267|-131.523|55.3267|-131.523|
+1185|74|F|47033.21|1992-08-24|5-LOW|Clerk#000000344|0| even escapades are. package|55.3522|-131.685|55.3522|-131.685|
+1186|59|O|82026.18|1996-08-15|4-NOT SPECIFIED|Clerk#000000798|0|ingly regular pinto beans: instructi|55.5351|-133.014|55.5351|-133.014|
+1187|134|F|85948.02|1992-11-20|3-MEDIUM|Clerk#000000047|0|s after the furiously final deposits boost slyly under the|55.5351|-133.014|55.5351|-133.014|
+1188|20|O|54655.07|1996-04-11|2-HIGH|Clerk#000000256|0|ully ironic deposits. slyl|55.5351|-133.014|55.5351|-133.014|
+1189|46|F|71017.99|1994-04-09|1-URGENT|Clerk#000000243|0|f the even accounts. courts print blithely ironic accounts. sile|55.5351|-133.014|55.5351|-133.014|
+1190|13|O|31043.39|1997-03-16|5-LOW|Clerk#000000575|0|ccounts above the foxes integrate carefully after the |55.5351|-133.014|55.5351|-133.014|
+1191|112|O|28623.04|1995-11-07|3-MEDIUM|Clerk#000000011|0|uests nag furiously. carefully even requests|55.4691|-132.855|55.4691|-132.855|
+1216|122|F|68056.57|1992-12-07|5-LOW|Clerk#000000918|0|nal foxes around the e|55.5511|-133.081|55.5511|-133.081|
+1217|7|F|40982.08|1992-04-26|4-NOT SPECIFIED|Clerk#000000538|0| foxes nag quickly. ironic excuses nod. blithely pending|55.5351|-133.014|55.5351|-133.014|
+1218|10|F|99834.47|1994-06-20|4-NOT SPECIFIED|Clerk#000000994|0|s cajole. special, silent deposits about the theo|55.5531|-133.097|55.5531|-133.097|
+1219|28|O|10163.56|1995-10-05|3-MEDIUM|Clerk#000000800|0|od carefully. slyly final dependencies across the even fray|55.5351|-133.014|55.5351|-133.014|
+1220|49|O|122157.14|1996-08-29|1-URGENT|Clerk#000000712|0|inal theodolites wake. fluffily ironic asymptotes cajol|55.4726|-131.793|55.4726|-131.793|
+1221|14|F|117397.16|1992-04-19|4-NOT SPECIFIED|Clerk#000000852|0| detect against the silent, even deposits. carefully ironic|55.3801|-131.682|55.3801|-131.682|
+1222|10|F|47623.94|1993-02-05|3-MEDIUM|Clerk#000000811|0|theodolites use quickly even accounts. carefully final asympto|55.3801|-131.682|55.3801|-131.682|
+1223|10|O|26714.67|1996-05-25|4-NOT SPECIFIED|Clerk#000000238|0|posits was blithely fr|55.3801|-131.682|55.3801|-131.682|
+1248|49|F|210713.88|1992-01-02|1-URGENT|Clerk#000000890|0|t the carefully regular dugouts. s|61.5745|-149.562|61.5745|-149.562|
+1249|149|F|45889.09|1994-01-05|1-URGENT|Clerk#000000095|0|al ideas sleep above the pending pin|61.7321|-150.12|61.7321|-150.12|
+1250|37|F|12907.62|1992-09-29|4-NOT SPECIFIED|Clerk#000000652|0|ts after the fluffily pending instructions use slyly about the s|61.5421|-149.419|61.5421|-149.419|
+1251|38|O|109536.55|1997-10-30|1-URGENT|Clerk#000000276|0|, brave sauternes. deposits boost fluffily.|61.5722|-149.702|61.5722|-149.702|
+1252|149|O|93403.05|1997-08-04|5-LOW|Clerk#000000348|0|ng the slyly regular excuses. special courts nag furiously blithely e|61.5743|-149.405|61.5743|-149.405|
+1253|115|F|92730.74|1993-01-26|1-URGENT|Clerk#000000775|0| requests sleep furiously even foxes. ruthless packag|61.578|-149.441|61.578|-149.441|
+1254|70|O|94649.25|1995-12-22|1-URGENT|Clerk#000000607|0| pinto beans. carefully regular request|61.5826|-149.427|61.5826|-149.427|
+1255|122|F|62518.31|1994-05-30|4-NOT SPECIFIED|Clerk#000000798|0|ct slyly regular accounts. quick|61.5586|-149.351|61.5586|-149.351|
+1280|97|F|91664.85|1993-01-11|5-LOW|Clerk#000000160|0|posits thrash quickly after the theodolites. furiously iro|61.5844|-149.442|61.5844|-149.442|
+1281|62|F|165454.51|1994-12-11|1-URGENT|Clerk#000000430|0|counts. carefully pending accounts eat |61.5817|-149.472|61.5817|-149.472|
+1282|116|F|61297.42|1992-02-29|4-NOT SPECIFIED|Clerk#000000168|0|he quickly special packages. furiously final re|61.6141|-149.457|61.6141|-149.457|
+1283|118|O|202623.92|1996-08-30|4-NOT SPECIFIED|Clerk#000000260|0| pinto beans boost slyly ac|61.5761|-149.602|61.5761|-149.602|
+1284|134|O|106122.38|1996-01-07|2-HIGH|Clerk#000000492|0|s. blithely silent deposits s|61.6141|-149.457|61.6141|-149.457|
+1285|11|F|139124.72|1992-06-01|1-URGENT|Clerk#000000423|0|cial deposits cajole after the ironic requests. p|61.58|-149.434|61.58|-149.434|
+1286|109|F|207291.83|1993-05-14|4-NOT SPECIFIED|Clerk#000000939|0| deposits use carefully from the excuses. slyly bold p|61.6002|-149.429|61.6002|-149.429|
+1287|19|F|131432.42|1994-07-05|2-HIGH|Clerk#000000288|0|ly ironic dolphins integrate furiously among the final packages. st|61.569|-149.347|61.569|-149.347|
+1312|112|F|58111.00|1994-05-19|3-MEDIUM|Clerk#000000538|0|n, express accounts across the ironic|61.5812|-149.448|61.5812|-149.448|
+1313|148|F|46598.65|1994-09-13|1-URGENT|Clerk#000000774|0|ld accounts. regular deposits cajole. ironically pending theodolites use car|61.6141|-149.457|61.6141|-149.457|
+1314|143|F|56207.66|1994-05-13|3-MEDIUM|Clerk#000000485|0|ickly blithe packages nod ideas. furiously bold braids boost around the car|61.6141|-149.457|61.6141|-149.457|
+1315|22|O|121935.23|1998-03-22|5-LOW|Clerk#000000840|0|final theodolites alongside of the carefu|61.6141|-149.457|61.6141|-149.457|
+1316|16|F|163746.47|1993-12-03|1-URGENT|Clerk#000000857|0|ully bold theodolites? pending, bold pin|61.5969|-149.367|61.5969|-149.367|
+1317|100|P|139714.71|1995-05-19|2-HIGH|Clerk#000000373|0|sts. furiously special deposits lose fur|61.58|-149.4|61.58|-149.4|
+1318|128|O|81663.65|1998-06-27|3-MEDIUM|Clerk#000000581|0|s hang bold requests. pending, re|61.5848|-149.445|61.5848|-149.445|
+1319|32|O|31103.83|1996-09-27|2-HIGH|Clerk#000000257|0|y across the ruthlessly ironic accounts. unusu|61.5811|-149.444|61.5811|-149.444|
+1344|17|F|43809.37|1992-04-16|5-LOW|Clerk#000000178|0|omise close, silent requests. pending theodolites boost pending |61.5733|-149.389|61.5733|-149.389|
+1345|95|F|111207.93|1992-10-28|5-LOW|Clerk#000000447|0| regular tithes. quickly fluffy de|61.6141|-149.457|61.6141|-149.457|
+1346|76|F|171975.62|1992-06-18|2-HIGH|Clerk#000000374|0|ges sleep quickly-- even pint|61.5952|-149.436|61.5952|-149.436|
+1347|41|O|173444.60|1997-06-20|5-LOW|Clerk#000000977|0|he furiously even foxes use carefully express req|61.5421|-149.419|61.5421|-149.419|
+1348|19|O|94135.77|1998-04-18|5-LOW|Clerk#000000206|0|tly. quickly even deposi|61.5783|-149.362|61.5783|-149.362|
+1349|64|O|46376.09|1997-10-26|1-URGENT|Clerk#000000543|0|yly! blithely special theodolites cajole. unusual, reg|61.7321|-150.12|61.7321|-150.12|
+1350|52|F|49305.98|1993-08-24|1-URGENT|Clerk#000000635|0|iously about the blithely special a|61.5691|-149.328|61.5691|-149.328|
+1351|106|O|24637.96|1998-04-20|1-URGENT|Clerk#000000012|0| cajole. regular, special re|61.6141|-149.457|61.6141|-149.457|
+1376|47|O|23984.88|1997-05-04|4-NOT SPECIFIED|Clerk#000000730|0|der furiously final, final frets. carefull|61.5819|-149.3|61.5819|-149.3|
+1377|20|O|108334.30|1998-04-24|4-NOT SPECIFIED|Clerk#000000625|0|lly across the blithely express accounts. ironic excuses promise carefully de|61.6431|-149.289|61.6431|-149.289|
+1378|20|O|118495.12|1996-03-09|4-NOT SPECIFIED|Clerk#000000705|0| furiously even tithes cajole slyly among the quick|61.6431|-149.292|61.6431|-149.292|
+1379|65|O|84627.76|1998-05-25|5-LOW|Clerk#000000861|0|y deposits are caref|61.6228|-149.313|61.6228|-149.313|
+1380|137|O|94969.41|1996-07-07|3-MEDIUM|Clerk#000000969|0|inal deposits wake slyly daringly even requests. bold, even foxe|61.2125|-149.894|61.2125|-149.894|
+1381|127|O|58212.22|1998-05-25|3-MEDIUM|Clerk#000000107|0|even requests breach after the bold, ironic instructions. slyly even|61.1879|-149.886|61.1879|-149.886|
+1382|133|F|173522.71|1993-08-17|5-LOW|Clerk#000000241|0|fully final packages sl|61.1594|-149.835|61.1594|-149.835|
+1383|121|F|34797.72|1993-04-27|2-HIGH|Clerk#000000785|0|ts. express requests sleep blithel|61.2123|-149.854|61.2123|-149.854|
+1408|55|O|183965.61|1997-12-26|4-NOT SPECIFIED|Clerk#000000942|0|t the quickly final asymptotes. unusual|61.1951|-149.945|61.1951|-149.945|
+1409|143|F|72440.52|1992-12-31|4-NOT SPECIFIED|Clerk#000000065|0|ructions. furiously unusual excuses are regular, unusual theodolites. fin|61.2138|-149.856|61.2138|-149.856|
+1410|113|O|114879.19|1997-04-12|5-LOW|Clerk#000000123|0|iously along the bravely regular dolphins. pinto beans cajole furiously sp|61.1255|-149.864|61.1255|-149.864|
+1411|95|F|164462.61|1994-12-21|2-HIGH|Clerk#000000566|0|s. furiously special excuses across the pending pinto beans haggle sp|61.2066|-149.808|61.2066|-149.808|
+1412|53|F|78676.54|1993-03-13|4-NOT SPECIFIED|Clerk#000000083|0|uffily daring theodolit|61.2138|-149.896|61.2138|-149.896|
+1413|91|O|75733.58|1997-06-14|3-MEDIUM|Clerk#000000342|0|, ironic instructions. carefully even packages dazzle|61.2161|-149.876|61.2161|-149.876|
+1414|77|O|38057.81|1995-08-16|1-URGENT|Clerk#000000883|0|ccounts. ironic foxes haggle car|61.1594|-149.888|61.1594|-149.888|
+1415|79|F|24654.79|1994-05-29|4-NOT SPECIFIED|Clerk#000000601|0|rays. blithely final ideas affix quickl|61.1806|-149.775|61.1806|-149.775|
+1440|98|O|50201.16|1995-08-10|5-LOW|Clerk#000000956|0| pending requests. closely s|61.1101|-149.857|61.1101|-149.857|
+1441|122|O|156477.94|1997-03-06|4-NOT SPECIFIED|Clerk#000000156|0|ter the excuses. ironic dependencies m|61.1541|-149.958|61.1541|-149.958|
+1442|112|F|7108.12|1994-07-05|4-NOT SPECIFIED|Clerk#000000935|0|nal pinto beans. slyly ironic ideas cajol|61.1268|-149.947|61.1268|-149.947|
+1443|44|O|44672.03|1996-12-16|5-LOW|Clerk#000000185|0|x blithely against the carefully final somas. even asymptotes are. quickly spe|61.0931|-149.785|61.0931|-149.785|
+1444|134|F|207907.60|1994-12-06|3-MEDIUM|Clerk#000000783|0|ove the bold accounts cajole fluffily about|61.1901|-149.892|61.1901|-149.892|
+1445|115|F|154653.32|1995-01-10|3-MEDIUM|Clerk#000000211|0|even packages wake fluffily |61.2183|-149.889|61.2183|-149.889|
+1446|41|O|27663.16|1998-02-16|5-LOW|Clerk#000000274|0|lly regular notornis above the requests sleep final accounts! |61.2164|-149.882|61.2164|-149.882|
+1447|91|F|108171.38|1992-10-15|2-HIGH|Clerk#000000880|0|inly against the blithely pending excuses. regular, pe|61.2161|-149.876|61.2161|-149.876|
+1472|149|O|65331.05|1996-10-06|5-LOW|Clerk#000000303|0|y special dolphins around the final dependencies wake quick|61.219|-149.792|61.219|-149.792|
+1473|94|O|80624.38|1997-03-17|3-MEDIUM|Clerk#000000960|0|furiously close accoun|61.2188|-149.892|61.2188|-149.892|
+1474|70|F|51697.18|1995-01-09|1-URGENT|Clerk#000000438|0|detect quickly above the carefully even |61.2143|-149.837|61.2143|-149.837|
+1475|5|O|185496.66|1997-11-12|2-HIGH|Clerk#000000972|0|cally final packages boost. blithely ironic packa|61.1608|-149.835|61.1608|-149.835|
+1476|145|O|18795.62|1996-06-27|2-HIGH|Clerk#000000673|0|ding accounts hinder alongside of the quickly pending requests. fluf|61.1886|-149.944|61.1886|-149.944|
+1477|76|O|231831.35|1997-08-24|5-LOW|Clerk#000000612|0|ly bold foxes. final ideas would cajo|61.1201|-149.89|61.1201|-149.89|
+1478|50|O|20791.50|1997-08-03|2-HIGH|Clerk#000000827|0|lessly. carefully express|61.1201|-149.89|61.1201|-149.89|
+1479|16|O|31471.04|1995-12-16|4-NOT SPECIFIED|Clerk#000000697|0|he furiously even foxes. thinly bold deposits|61.1585|-149.872|61.1585|-149.872|
+1504|2|F|89399.40|1992-08-28|3-MEDIUM|Clerk#000000381|0|, brave deposits. bold de|61.195|-149.892|61.195|-149.892|
+1505|37|F|55892.35|1992-08-21|2-HIGH|Clerk#000000544|0|s. slyly ironic packages cajole. carefully regular packages haggle |61.0895|-149.694|61.0895|-149.694|
+1506|148|F|195844.84|1992-09-21|3-MEDIUM|Clerk#000000620|0| dependencies. accounts affix blithely slowly unusual deposits. slyly regular |61.2201|-149.831|61.2201|-149.831|
+1507|121|F|96166.92|1993-10-14|3-MEDIUM|Clerk#000000305|0|stealthy, ironic de|61.1663|-149.867|61.1663|-149.867|
+1508|103|O|151282.65|1998-04-10|5-LOW|Clerk#000000117|0| after the furiously regular pinto beans hang slyly quickly ironi|61.2138|-149.906|61.2138|-149.906|
+1509|64|F|180455.98|1993-07-08|5-LOW|Clerk#000000770|0|the regular ideas. regul|61.2193|-149.902|61.2193|-149.902|
+1510|53|O|154590.05|1996-09-17|5-LOW|Clerk#000000128|0|ld carefully. furiously final asymptotes haggle furiously|61.1201|-149.89|61.1201|-149.89|
+1511|79|O|59651.38|1996-12-22|4-NOT SPECIFIED|Clerk#000000386|0|ts above the depend|61.1601|-149.984|61.1601|-149.984|
+1536|94|O|5184.26|1997-01-26|3-MEDIUM|Clerk#000000117|0|ges are! furiously final deposits cajole iron|61.1101|-149.857|61.1101|-149.857|
+1537|109|F|108317.51|1992-02-15|4-NOT SPECIFIED|Clerk#000000862|0|g to the even deposits. ironic, final packages |61.1101|-149.857|61.1101|-149.857|
+1538|29|O|179554.41|1995-06-18|4-NOT SPECIFIED|Clerk#000000258|0| instructions. regular theod|61.1101|-149.857|61.1101|-149.857|
+1539|112|F|39612.63|1995-03-10|5-LOW|Clerk#000000840|0|nstructions boost pa|61.1101|-149.857|61.1101|-149.857|
+1540|16|F|128014.15|1992-08-05|2-HIGH|Clerk#000000927|0|r ideas hinder blithe|61.1101|-149.857|61.1101|-149.857|
+1541|94|P|47286.32|1995-05-18|1-URGENT|Clerk#000000906|0|y. slyly ironic warhorses around the furiously regul|61.1101|-149.857|61.1101|-149.857|
+1542|143|F|132972.24|1993-09-15|3-MEDIUM|Clerk#000000435|0|t the furiously close deposits do was f|61.1101|-149.857|61.1101|-149.857|
+1543|52|O|139047.22|1997-02-20|1-URGENT|Clerk#000000398|0|unts. furiously pend|61.1101|-149.857|61.1101|-149.857|
+1568|17|O|76119.72|1997-01-30|4-NOT SPECIFIED|Clerk#000000554|0|d notornis. carefully |61.1101|-149.857|61.1101|-149.857|
+1569|104|O|87803.55|1998-04-02|5-LOW|Clerk#000000786|0|orbits. fluffily even decoys serve blithely. furiously furious realms nag acro|61.1101|-149.857|61.1101|-149.857|
+1570|124|O|35589.57|1998-03-16|1-URGENT|Clerk#000000745|0|pinto beans haggle furiousl|61.1101|-149.857|61.1101|-149.857|
+1571|103|F|151404.78|1992-12-05|2-HIGH|Clerk#000000565|0|ously furiously bold warthogs. slyly ironic instructions are quickly a|61.1101|-149.857|61.1101|-149.857|
+1572|11|O|47232.79|1996-02-24|2-HIGH|Clerk#000000994|0|fluffily ironic accounts haggle blithely final platelets! slyly regular foxes|61.1101|-149.857|61.1101|-149.857|
+1573|148|F|86918.57|1992-12-28|2-HIGH|Clerk#000000940|0|ess, ironic deposits use along the carefu|61.1101|-149.857|61.1101|-149.857|
+1574|134|O|179923.54|1996-12-12|3-MEDIUM|Clerk#000000809|0| ideas hinder after the carefully unusual |61.1101|-149.857|61.1101|-149.857|
+1575|145|O|197031.52|1995-09-13|3-MEDIUM|Clerk#000000497|0|. furiously regular dep|61.1101|-149.857|61.1101|-149.857|
+1600|94|F|130515.61|1993-03-03|3-MEDIUM|Clerk#000000627|0|tions cajole quietly above the regular, silent requests. slyly fin|61.1101|-149.857|61.1101|-149.857|
+1601|53|F|73962.95|1994-08-27|5-LOW|Clerk#000000469|0|ent deposits are ca|61.1101|-149.857|61.1101|-149.857|
+1602|1|F|4225.26|1993-08-05|5-LOW|Clerk#000000660|0|deposits. busily silent instructions haggle furiously. fin|61.1101|-149.857|61.1101|-149.857|
+1603|2|F|29305.47|1993-07-31|4-NOT SPECIFIED|Clerk#000000869|0|s. slyly silent deposits boo|61.1101|-149.857|61.1101|-149.857|
+1604|113|F|107139.29|1993-07-17|5-LOW|Clerk#000000512|0|lithely silent waters. blithely unusual packages alongside |61.1101|-149.857|61.1101|-149.857|
+1605|58|O|130687.64|1998-04-24|4-NOT SPECIFIED|Clerk#000000616|0|sleep furiously? ruthless, even pinto beans |61.1101|-149.857|61.1101|-149.857|
+1606|53|O|115877.40|1997-04-17|4-NOT SPECIFIED|Clerk#000000550|0|r requests. quickly even platelets breach before the ironically|61.1101|-149.857|61.1101|-149.857|
+1607|149|O|166335.03|1995-12-16|2-HIGH|Clerk#000000498|0| bold, pending foxes haggle. slyly silent |61.1101|-149.857|61.1101|-149.857|
+1632|67|O|183286.33|1997-01-08|3-MEDIUM|Clerk#000000351|0|onic requests are accounts. bold a|61.1101|-149.857|61.1101|-149.857|
+1633|16|O|52359.51|1995-10-14|2-HIGH|Clerk#000000666|0|y silent accounts sl|61.1101|-149.857|61.1101|-149.857|
+1634|70|O|145898.47|1996-09-10|1-URGENT|Clerk#000000360|0|arefully blithely ironic requests. slyly unusual instructions alongside|61.1101|-149.857|61.1101|-149.857|
+1635|4|O|70232.26|1997-02-13|3-MEDIUM|Clerk#000000958|0|s. slyly ironic requests affix slyly |61.1101|-149.857|61.1101|-149.857|
+1636|79|O|172021.87|1997-06-17|3-MEDIUM|Clerk#000000457|0|ding requests. slyly ironic courts wake quickl|61.1101|-149.857|61.1101|-149.857|
+1637|73|F|180912.15|1995-02-08|4-NOT SPECIFIED|Clerk#000000189|0| final accounts. blithely silent ideas cajole bravely. carefully express |61.1101|-149.857|61.1101|-149.857|
+1638|139|O|172436.30|1997-08-13|2-HIGH|Clerk#000000643|0|he fluffily regular asymp|61.1101|-149.857|61.1101|-149.857|
+1639|5|O|104166.56|1995-08-20|4-NOT SPECIFIED|Clerk#000000939|0|haggle furiously. final requests detect furious|61.1101|-149.857|61.1101|-149.857|
+1664|64|O|178060.22|1996-03-03|1-URGENT|Clerk#000000090|0|y quickly even asymptotes. furiously regular packages haggle quickly fin|61.1101|-149.857|61.1101|-149.857|
+1665|76|F|4819.91|1994-05-08|2-HIGH|Clerk#000000920|0|ly regular packages are fluffily even ideas. fluffily final|61.1101|-149.857|61.1101|-149.857|
+1666|95|O|128367.97|1995-10-18|1-URGENT|Clerk#000000849|0|ffily pending dependencies wake fluffily. pending, final accounts |61.1101|-149.857|61.1101|-149.857|
+1667|5|O|125030.37|1997-10-10|2-HIGH|Clerk#000000103|0|e accounts. slyly express accounts must are a|64.8459|-147.759|64.8459|-147.759|
+1668|142|O|137576.19|1997-07-12|4-NOT SPECIFIED|Clerk#000000148|0|eodolites. carefully dogged dolphins haggle q|64.8426|-147.725|64.8426|-147.725|
+1669|2|O|24362.39|1997-06-09|3-MEDIUM|Clerk#000000663|0|er ironic requests detect furiously blithely sp|64.9401|-147.402|64.9401|-147.402|
+1670|25|O|89999.72|1997-05-24|2-HIGH|Clerk#000000320|0|unusual dependencies. furiously special platelets main|64.9401|-147.402|64.9401|-147.402|
+1671|35|O|104391.11|1996-07-27|4-NOT SPECIFIED|Clerk#000000275|0|ly. slyly pending requests was above the |64.8331|-147.647|64.8331|-147.647|
+1696|4|O|102665.03|1998-01-08|4-NOT SPECIFIED|Clerk#000000041|0|bravely bold accounts above the quickly bold|64.8371|-147.716|64.8371|-147.716|
+1697|76|O|122621.31|1996-10-07|1-URGENT|Clerk#000000815|0|o x-ray blithely. pl|64.8574|-147.759|64.8574|-147.759|
+1698|40|O|141118.87|1997-04-23|2-HIGH|Clerk#000000432|0|slyly. carefully express deposit|64.836|-147.727|64.836|-147.727|
+1699|85|F|66408.29|1993-12-30|1-URGENT|Clerk#000000125|0|jole blithely. furiously un|64.8132|-147.76|64.8132|-147.76|
+1700|65|O|89143.36|1996-06-15|3-MEDIUM|Clerk#000000328|0|ely final dolphins wake sometimes above the quietly regular deposits. fur|64.8451|-147.96|64.8451|-147.96|
+1701|130|F|72835.95|1992-05-19|2-HIGH|Clerk#000000395|0|furiously. regular, close theodoli|64.8891|-147.851|64.8891|-147.851|
+1702|67|P|194119.31|1995-05-07|2-HIGH|Clerk#000000300|0|around the carefully final deposits cajole carefully according to the b|64.8151|-147.707|64.8151|-147.707|
+1703|134|F|121220.59|1993-01-28|3-MEDIUM|Clerk#000000463|0| pinto beans poach. bold courts boost. regular, express deposits at|64.8363|-147.803|64.8363|-147.803|
+1728|64|O|131604.34|1996-05-22|2-HIGH|Clerk#000000711|0|beans. slyly regular instructions sleep! slyly final packages|64.8298|-147.611|64.8298|-147.611|
+1729|133|F|12137.76|1992-05-19|2-HIGH|Clerk#000000158|0|pending foxes wake. accounts|64.8989|-147.701|64.8989|-147.701|
+1730|124|O|150886.49|1998-07-24|5-LOW|Clerk#000000794|0| fluffily pending deposits serve. furiously even requests wake furiou|64.8371|-147.716|64.8371|-147.716|
+1731|128|O|190490.78|1996-01-06|1-URGENT|Clerk#000000268|0|lithely regular, final instructions. ironic, express packages are above|64.8147|-147.706|64.8147|-147.706|
+1732|146|F|179854.51|1993-11-29|5-LOW|Clerk#000000903|0|inal requests integrate dolph|64.8451|-147.812|64.8451|-147.812|
+1733|148|O|165489.52|1996-05-12|2-HIGH|Clerk#000000789|0|e carefully according to the accounts. furiously pending instructions sleep|64.8386|-147.788|64.8386|-147.788|
+1734|7|F|44002.53|1994-06-11|2-HIGH|Clerk#000000722|0| final ideas haggle. blithely quick foxes sleep busily bold ideas. i|64.8372|-147.768|64.8372|-147.768|
+1735|22|F|98541.95|1992-12-27|1-URGENT|Clerk#000000458|0|ully idle requests wake qu|64.8151|-147.707|64.8151|-147.707|
+1760|115|O|82151.12|1996-05-17|5-LOW|Clerk#000000917|0| deposits. busily regular deposits wake blithely along the furiously even re|64.843|-147.722|64.843|-147.722|
+1761|106|F|211925.95|1993-12-24|2-HIGH|Clerk#000000817|0|efully slyly bold frets. packages boost b|64.8426|-147.725|64.8426|-147.725|
+1762|77|F|202227.17|1994-08-20|4-NOT SPECIFIED|Clerk#000000653|0|ly ironic packages. furi|64.8615|-147.723|64.8615|-147.723|
+1763|121|O|140685.01|1996-10-29|2-HIGH|Clerk#000000321|0|es. bold dependencies haggle furiously along |64.8694|-147.067|64.8694|-147.067|
+1764|29|F|47384.71|1992-03-25|1-URGENT|Clerk#000000182|0|. slyly final packages integrate carefully acro|64.8404|-147.724|64.8404|-147.724|
+1765|73|O|36551.43|1995-12-03|4-NOT SPECIFIED|Clerk#000000490|0| regular excuses wake slyly|64.9686|-147.577|64.9686|-147.577|
+1766|139|O|41032.81|1996-10-12|2-HIGH|Clerk#000000983|0|unusual deposits affix quickly beyond the carefully s|64.8497|-147.732|64.8497|-147.732|
+1767|25|P|136582.60|1995-03-14|2-HIGH|Clerk#000000327|0|eposits use carefully carefully regular platelets. quickly regular packages al|64.8861|-147.587|64.8861|-147.587|
+1792|49|F|107919.86|1993-11-09|5-LOW|Clerk#000000102|0|ructions haggle along the pending packages. carefully speci|64.8508|-147.703|64.8508|-147.703|
+1793|19|F|82504.56|1992-07-12|4-NOT SPECIFIED|Clerk#000000291|0|regular packages cajole. blithely special packages according to the final d|64.841|-147.72|64.841|-147.72|
+1794|140|O|179462.21|1997-09-28|1-URGENT|Clerk#000000686|0|ally silent pinto beans. regular package|64.8375|-147.721|64.8375|-147.721|
+1795|94|F|146849.33|1994-03-19|2-HIGH|Clerk#000000815|0| quickly final packages! blithely dogged accounts c|64.849|-147.813|64.849|-147.813|
+1796|47|F|33755.47|1992-11-21|2-HIGH|Clerk#000000245|0|eans use furiously around th|55.3801|-131.682|55.3801|-131.682|
+1797|125|O|51494.47|1996-05-07|3-MEDIUM|Clerk#000000508|0|quiet platelets haggle since the quickly ironic instructi|55.3801|-131.682|55.3801|-131.682|
+1798|52|O|46393.97|1997-07-28|1-URGENT|Clerk#000000741|0|al foxes are blithe|55.3603|-131.702|55.3603|-131.702|
+1799|61|F|46815.93|1994-03-07|4-NOT SPECIFIED|Clerk#000000339|0|ns sleep furiously final waters. blithely regular instructions h|55.7511|-132.865|55.7511|-132.865|
+1824|49|F|81351.53|1994-05-05|1-URGENT|Clerk#000000972|0|e blithely fluffily|55.7511|-132.865|55.7511|-132.865|
+1825|148|F|150582.77|1993-12-05|3-MEDIUM|Clerk#000000345|0|ironic, final accou|60.3311|-151.284|60.3311|-151.284|
+1826|82|F|124719.97|1992-04-16|4-NOT SPECIFIED|Clerk#000000718|0|the even asymptotes dazzle fluffily slyly regular asymptotes. final, unu|60.3311|-151.284|60.3311|-151.284|
+1827|106|O|210113.88|1996-06-22|4-NOT SPECIFIED|Clerk#000000369|0|luffily even requests haggle sly|60.3311|-151.284|60.3311|-151.284|
+1828|32|F|137369.50|1994-04-18|3-MEDIUM|Clerk#000000840|0|y quickly bold packag|60.4341|-151.283|60.4341|-151.283|
+1829|112|F|127532.20|1994-05-08|2-HIGH|Clerk#000000537|0| accounts wake above the furiously unusual requests. pending package|60.3311|-151.284|60.3311|-151.284|
+1830|133|F|85122.24|1995-02-23|1-URGENT|Clerk#000000045|0|according to the even,|60.3311|-151.284|60.3311|-151.284|
+1831|71|F|58032.77|1993-12-02|1-URGENT|Clerk#000000854|0| accounts. carefully even accounts boost furiously. regular ideas engage. |60.3311|-151.284|60.3311|-151.284|
+1856|106|F|189361.42|1992-03-20|4-NOT SPECIFIED|Clerk#000000952|0|. special pinto beans run acr|60.3311|-151.284|60.3311|-151.284|
+1857|133|F|102793.59|1993-01-13|2-HIGH|Clerk#000000083|0|hely final ideas slee|60.3311|-151.284|60.3311|-151.284|
+1858|143|O|30457.91|1997-12-13|1-URGENT|Clerk#000000389|0|thely. slyly final deposits sleep|60.4311|-151.286|60.4311|-151.286|
+1859|61|O|105094.09|1997-04-11|4-NOT SPECIFIED|Clerk#000000949|0| the foxes. bravely special excuses nag carefully special r|60.3311|-151.284|60.3311|-151.284|
+1860|10|O|9103.40|1996-04-04|3-MEDIUM|Clerk#000000556|0|osits. quickly bold deposits according to |60.3311|-151.284|60.3311|-151.284|
+1861|70|F|95063.41|1994-01-03|3-MEDIUM|Clerk#000000847|0|r the fluffily close sauternes. furio|60.3311|-151.284|60.3311|-151.284|
+1862|34|O|97981.06|1998-02-24|5-LOW|Clerk#000000348|0|ts snooze ironically abou|60.3311|-151.284|60.3311|-151.284|
+1863|74|F|96359.65|1993-09-23|4-NOT SPECIFIED|Clerk#000000658|0|old sentiments. careful, |60.3191|-151.296|60.3191|-151.296|
+1888|121|F|224724.11|1993-10-31|4-NOT SPECIFIED|Clerk#000000659|0|olites. pinto beans cajole. regular deposits affix. slyly regular|60.3311|-151.284|60.3311|-151.284|
+1889|25|O|96431.77|1997-03-16|1-URGENT|Clerk#000000854|0|p around the regular notornis. unusual deposits|60.3311|-151.284|60.3311|-151.284|
+1890|10|O|202364.58|1996-12-18|4-NOT SPECIFIED|Clerk#000000627|0|romise final, regular deposits. regular fox|60.3311|-151.284|60.3311|-151.284|
+1891|61|F|76848.96|1994-12-15|5-LOW|Clerk#000000495|0|unusual foxes sleep regular deposits. requests wake special pac|60.5563|-151.241|60.5563|-151.241|
+1892|25|F|133273.64|1994-03-26|5-LOW|Clerk#000000733|0|sts. slyly regular dependencies use slyly. ironic, spec|60.6331|-151.163|60.6331|-151.163|
+1893|125|O|116792.13|1997-10-30|2-HIGH|Clerk#000000111|0|olites. silent, special deposits eat slyly quickly express packages; hockey p|60.6331|-151.163|60.6331|-151.163|
+1894|76|F|44387.23|1992-03-30|1-URGENT|Clerk#000000626|0|e furiously. furiously even accounts are slyly final accounts. closely speci|60.6331|-151.163|60.6331|-151.163|
+1895|7|F|44429.81|1994-05-30|3-MEDIUM|Clerk#000000878|0|ress accounts. bold accounts cajole. slyly final pinto beans poach regul|60.6331|-151.163|60.6331|-151.163|
+1920|110|O|119605.91|1998-06-24|5-LOW|Clerk#000000018|0|hely; furiously regular excuses|60.5551|-151.245|60.5551|-151.245|
+1921|88|F|57584.12|1994-01-18|3-MEDIUM|Clerk#000000293|0|counts. slyly quiet requests along the ruthlessly regular accounts are |60.6331|-151.163|60.6331|-151.163|
+1922|56|O|11575.77|1996-07-13|3-MEDIUM|Clerk#000000984|0|side of the blithely final re|60.5506|-151.141|60.5506|-151.141|
+1923|136|O|171128.10|1997-07-07|1-URGENT|Clerk#000000471|0| express dolphins. |60.5681|-151.281|60.5681|-151.281|
+1924|76|O|169756.19|1996-09-07|4-NOT SPECIFIED|Clerk#000000823|0| of the ironic accounts. instructions near the final instr|60.5465|-151.147|60.5465|-151.147|
+1925|17|F|146382.71|1992-03-05|1-URGENT|Clerk#000000986|0|e slyly regular deposits. furiously |60.6331|-151.163|60.6331|-151.163|
+1926|94|O|100035.03|1996-01-31|2-HIGH|Clerk#000000568|0|cajole. even warhorses sleep carefully. |60.5578|-151.116|60.5578|-151.116|
+1927|140|O|23327.88|1995-09-30|3-MEDIUM|Clerk#000000616|0|riously special packages. permanent pearls wake furiously. even packages alo|61.6182|-149.385|61.6182|-149.385|
+1952|67|F|12896.25|1994-03-16|2-HIGH|Clerk#000000254|0| silent accounts boost |61.6141|-149.457|61.6141|-149.457|
+1953|149|F|57213.18|1993-11-30|3-MEDIUM|Clerk#000000891|0| fluffily along the quickly even packages. |61.5765|-149.407|61.5765|-149.407|
+1954|56|O|158853.63|1997-05-31|4-NOT SPECIFIED|Clerk#000000104|0| unusual excuses cajole according to the blithely regular theodolites.|61.6091|-149.77|61.6091|-149.77|
+1955|13|F|103085.13|1992-04-20|1-URGENT|Clerk#000000792|0|ly special ideas. sometimes final |61.5821|-149.438|61.5821|-149.438|
+1956|127|F|88704.26|1992-09-20|4-NOT SPECIFIED|Clerk#000000600|0|ironic ideas are silent ideas. furiously final deposits sleep slyly carefu|61.6183|-149.373|61.6183|-149.373|
+1957|31|O|77482.87|1998-07-21|2-HIGH|Clerk#000000639|0|nding excuses about the |61.6131|-149.403|61.6131|-149.403|
+1958|53|O|176294.34|1995-09-22|5-LOW|Clerk#000000343|0| haggle blithely. flu|61.6352|-149.265|61.6352|-149.265|
+1959|43|O|62277.18|1997-01-13|4-NOT SPECIFIED|Clerk#000000631|0| cajole about the blithely express requests. even excuses mold bl|61.5751|-149.645|61.5751|-149.645|
+1984|52|O|79230.47|1998-04-01|1-URGENT|Clerk#000000416|0| slyly special instructions. unusual foxes use packages. carefully regular req|61.6168|-149.374|61.6168|-149.374|
+1985|7|F|171522.54|1994-09-02|4-NOT SPECIFIED|Clerk#000000741|0|slyly slyly even pains. slyly reg|61.5939|-149.43|61.5939|-149.43|
+1986|149|F|34269.96|1994-05-05|2-HIGH|Clerk#000000609|0|across the theodolites. quick|61.5792|-149.495|61.5792|-149.495|
+1987|100|F|6406.29|1994-04-30|2-HIGH|Clerk#000000652|0|gular platelets alongside |61.6141|-149.457|61.6141|-149.457|
+1988|109|O|117132.72|1995-10-06|4-NOT SPECIFIED|Clerk#000000011|0|ly ironic dolphins serve quickly busy accounts. bu|61.5829|-149.448|61.5829|-149.448|
+1989|118|F|39263.28|1994-03-16|4-NOT SPECIFIED|Clerk#000000747|0|ely bold pinto beans ha|61.5938|-149.387|61.5938|-149.387|
+1990|119|F|48781.39|1994-12-16|2-HIGH|Clerk#000000114|0|e bold patterns. always regul|61.5849|-149.38|61.5849|-149.38|
+1991|19|F|139854.41|1992-09-07|4-NOT SPECIFIED|Clerk#000000854|0|ing accounts can haggle at the carefully final Tiresias-- pending, regular|61.5729|-149.389|61.5729|-149.389|
+2016|8|O|24347.36|1996-08-16|3-MEDIUM|Clerk#000000641|0|the carefully ironic foxes. requests nag bold, r|61.5823|-149.462|61.5823|-149.462|
+2017|101|O|70529.27|1998-05-13|3-MEDIUM|Clerk#000000427|0|nusual requests. blit|61.57|-149.331|61.57|-149.331|
+2018|19|P|25007.95|1995-04-05|4-NOT SPECIFIED|Clerk#000000920|0|gular accounts wake fur|61.5821|-149.438|61.5821|-149.438|
+2019|136|F|43789.14|1992-10-23|1-URGENT|Clerk#000000565|0| furiously bold packages. fluffily fi|61.6141|-149.457|61.6141|-149.457|
+2020|73|F|136162.13|1993-06-21|3-MEDIUM|Clerk#000000192|0|es. furiously regular packages above the furiously special theodolites are a|61.6115|-149.331|61.6115|-149.331|
+2021|70|O|27016.74|1995-07-15|1-URGENT|Clerk#000000155|0|ong the furiously regular requests. unusual deposits wake fluffily inside|61.6091|-149.77|61.6091|-149.77|
+2022|62|F|206742.11|1992-03-15|1-URGENT|Clerk#000000268|0| dependencies sleep fluffily even, ironic deposits. express, silen|61.6141|-149.457|61.6141|-149.457|
+2023|118|F|144123.37|1992-05-06|5-LOW|Clerk#000000137|0|ular courts engage according to the|61.5826|-149.427|61.5826|-149.427|
+2048|17|F|33401.77|1993-11-15|1-URGENT|Clerk#000000934|0|s cajole after the blithely final accounts. f|61.5976|-149.366|61.5976|-149.366|
+2049|31|O|153048.74|1995-12-07|2-HIGH|Clerk#000000859|0|ly regular requests thrash blithely about the fluffily even theodolites. r|61.5976|-149.366|61.5976|-149.366|
+2050|28|F|208517.98|1994-06-02|4-NOT SPECIFIED|Clerk#000000821|0|d accounts against the furiously regular packages use bli|61.5531|-149.651|61.5531|-149.651|
+2051|40|O|87988.34|1996-03-18|4-NOT SPECIFIED|Clerk#000000333|0|ctions sleep blithely. blithely regu|61.5531|-149.651|61.5531|-149.651|
+2052|91|F|141822.19|1992-04-13|2-HIGH|Clerk#000000767|0| requests sleep around the even, even courts. ironic theodolites affix furious|61.5883|-149.456|61.5883|-149.456|
+2053|142|F|125125.57|1995-02-07|1-URGENT|Clerk#000000717|0|ar requests: blithely sly accounts boost carefully across t|61.6249|-149.435|61.6249|-149.435|
+2054|41|F|144335.16|1992-06-08|4-NOT SPECIFIED|Clerk#000000103|0|l requests affix carefully about the furiously special|61.6141|-149.457|61.6141|-149.457|
+2055|97|F|57092.26|1993-09-04|1-URGENT|Clerk#000000067|0|. warhorses affix slyly blithely express instructions? fur|61.5709|-149.452|61.5709|-149.452|
+2080|95|F|45767.69|1993-06-18|5-LOW|Clerk#000000190|0|ironic, pending theodolites are carefully about the quickly regular theodolite|61.6651|-149.465|61.6651|-149.465|
+2081|121|O|145654.97|1997-07-05|2-HIGH|Clerk#000000136|0|ong the regular theo|61.5841|-149.441|61.5841|-149.441|
+2082|49|F|46753.63|1995-01-10|2-HIGH|Clerk#000000354|0|cial accounts. ironic, express dolphins nod slyly sometimes final reques|61.1571|-149.883|61.1571|-149.883|
+2083|101|F|31795.52|1993-07-14|3-MEDIUM|Clerk#000000361|0|al patterns. bold, final foxes nag bravely about the furiously express|61.2198|-149.733|61.2198|-149.733|
+2084|80|F|190652.53|1993-03-17|2-HIGH|Clerk#000000048|0|zle furiously final, careful packages. slyly ironic ideas amo|61.1863|-149.976|61.1863|-149.976|
+2085|49|F|45311.07|1993-11-21|3-MEDIUM|Clerk#000000818|0|ress, express ideas haggle|61.2161|-149.876|61.2161|-149.876|
+2086|142|F|188985.18|1994-10-19|1-URGENT|Clerk#000000046|0| permanently regular|61.2031|-149.749|61.2031|-149.749|
+2087|50|O|53581.41|1998-01-31|2-HIGH|Clerk#000000626|0|e always regular packages nod against the furiously spec|61.1644|-149.897|61.1644|-149.897|
+2112|64|O|17986.15|1997-02-05|2-HIGH|Clerk#000000351|0|against the slyly even id|61.1834|-149.866|61.1834|-149.866|
+2113|32|O|65678.21|1997-11-08|2-HIGH|Clerk#000000527|0|slyly regular instruct|61.1731|-149.889|61.1731|-149.889|
+2114|79|F|106446.02|1995-01-16|5-LOW|Clerk#000000751|0|r, unusual accounts haggle across the busy platelets. carefully |61.1089|-149.854|61.1089|-149.854|
+2115|106|O|134814.65|1998-05-23|4-NOT SPECIFIED|Clerk#000000101|0|odolites boost. carefully regular excuses cajole. quickly ironic pinto be|61.1951|-149.916|61.1951|-149.916|
+2116|23|F|60887.90|1994-08-26|1-URGENT|Clerk#000000197|0|efully after the asymptotes. furiously sp|61.2157|-149.821|61.2157|-149.821|
+2117|22|O|145713.03|1997-04-26|2-HIGH|Clerk#000000887|0|ely even dependencies. regular foxes use blithely.|61.1372|-149.954|61.1372|-149.954|
+2118|134|O|38974.67|1996-10-09|1-URGENT|Clerk#000000196|0|ial requests wake carefully special packages. f|61.1955|-149.737|61.1955|-149.737|
+2119|64|O|34632.57|1996-08-20|2-HIGH|Clerk#000000434|0|uickly pending escapades. fluffily ir|61.1444|-149.867|61.1444|-149.867|
+2144|136|F|119917.28|1994-03-29|3-MEDIUM|Clerk#000000546|0|t. carefully quick requests across the deposits wake regu|61.2178|-149.882|61.2178|-149.882|
+2145|134|F|18885.35|1992-10-03|1-URGENT|Clerk#000000886|0|sts would snooze blithely alongside of th|61.1824|-149.849|61.1824|-149.849|
+2146|118|F|179686.07|1992-09-14|4-NOT SPECIFIED|Clerk#000000476|0|ven packages. dependencies wake slyl|61.2161|-149.876|61.2161|-149.876|
+2147|100|F|91513.79|1992-09-06|4-NOT SPECIFIED|Clerk#000000424|0| haggle carefully furiously final foxes. pending escapades thrash. bold theod|61.2022|-149.84|61.2022|-149.84|
+2148|130|F|19612.03|1995-04-19|4-NOT SPECIFIED|Clerk#000000517|0|ross the furiously unusual theodolites. always expre|61.2099|-149.762|61.2099|-149.762|
+2149|101|F|105145.40|1993-03-13|5-LOW|Clerk#000000555|0|nusual accounts nag furiously special reques|61.1951|-149.84|61.1951|-149.84|
+2150|82|F|166961.06|1994-06-03|3-MEDIUM|Clerk#000000154|0|ect slyly against the even, final packages. quickly regular pinto beans wake c|61.1069|-149.859|61.1069|-149.859|
+2151|58|O|124608.69|1996-11-11|3-MEDIUM|Clerk#000000996|0|c requests. ironic platelets cajole across the quickly fluffy deposits.|61.1635|-149.881|61.1635|-149.881|
+2176|104|F|87248.17|1992-11-10|1-URGENT|Clerk#000000195|0|s haggle regularly accor|61.1201|-149.89|61.1201|-149.89|
+2177|136|O|183493.42|1997-01-20|3-MEDIUM|Clerk#000000161|0|ove the blithely unusual packages cajole carefully fluffily special request|61.1902|-149.908|61.1902|-149.908|
+2178|8|O|79594.68|1996-12-12|3-MEDIUM|Clerk#000000656|0|thely according to the instructions. furious|61.2104|-149.857|61.2104|-149.857|
+2179|41|O|77487.09|1996-09-07|2-HIGH|Clerk#000000935|0|ounts alongside of the furiously unusual braids cajol|61.1771|-149.97|61.1771|-149.97|
+2180|76|O|208481.57|1996-09-14|4-NOT SPECIFIED|Clerk#000000650|0|xpress, unusual pains. furiously ironic excu|61.1859|-149.976|61.1859|-149.976|
+2181|76|O|100954.64|1995-09-13|3-MEDIUM|Clerk#000000814|0|y against the ironic, even|61.2171|-149.9|61.2171|-149.9|
+2182|23|F|116003.11|1994-04-05|2-HIGH|Clerk#000000071|0|ccounts. quickly bold deposits across the excuses sl|61.1162|-149.755|61.1162|-149.755|
+2183|113|O|49841.12|1996-06-22|1-URGENT|Clerk#000000287|0| among the express, ironic packages. slyly ironic platelets integrat|61.1381|-149.844|61.1381|-149.844|
+2208|68|P|245388.06|1995-05-01|4-NOT SPECIFIED|Clerk#000000900|0|symptotes wake slyly blithely unusual packages.|61.1775|-149.941|61.1775|-149.941|
+2209|91|F|129086.93|1992-07-10|2-HIGH|Clerk#000000056|0|er above the slyly silent requests. furiously reg|61.1938|-149.878|61.1938|-149.878|
+2210|32|F|31689.46|1992-01-16|2-HIGH|Clerk#000000941|0| believe carefully quickly express pinto beans. deposi|61.1571|-149.883|61.1571|-149.883|
+2211|92|F|140031.23|1994-06-30|2-HIGH|Clerk#000000464|0|ffily bold courts e|61.1541|-149.958|61.1541|-149.958|
+2212|118|F|17231.05|1994-03-23|3-MEDIUM|Clerk#000000954|0|structions above the unusual requests use fur|61.135|-149.88|61.135|-149.88|
+2213|122|F|146136.10|1993-01-15|4-NOT SPECIFIED|Clerk#000000598|0|osits are carefully reg|61.1101|-149.857|61.1101|-149.857|
+2214|115|O|150345.63|1998-05-05|3-MEDIUM|Clerk#000000253|0|packages. fluffily even accounts haggle blithely. carefully ironic depen|61.1101|-149.857|61.1101|-149.857|
+2215|40|O|108239.46|1996-06-16|4-NOT SPECIFIED|Clerk#000000817|0|le final, final foxes. quickly regular gifts are carefully deposit|61.1101|-149.857|61.1101|-149.857|
+2240|56|F|174090.30|1992-03-06|4-NOT SPECIFIED|Clerk#000000622|0|accounts against the slyly express foxes are after the slyly regular |61.1101|-149.857|61.1101|-149.857|
+2241|103|F|165219.08|1993-05-11|1-URGENT|Clerk#000000081|0|y about the silent excuses. furiously ironic instructions along the sil|61.1101|-149.857|61.1101|-149.857|
+2242|82|O|15082.82|1997-07-20|4-NOT SPECIFIED|Clerk#000000360|0| pending multipliers. carefully express asymptotes use quickl|61.1101|-149.857|61.1101|-149.857|
+2243|49|O|10451.97|1995-06-10|2-HIGH|Clerk#000000813|0|ously regular deposits integrate s|61.1101|-149.857|61.1101|-149.857|
+2244|127|F|21207.08|1993-01-09|1-URGENT|Clerk#000001000|0|ckages. ironic, ironic accounts haggle blithely express excuses. |61.1101|-149.857|61.1101|-149.857|
+2245|58|F|150585.73|1993-04-28|3-MEDIUM|Clerk#000000528|0|ake carefully. braids haggle slyly quickly b|61.1101|-149.857|61.1101|-149.857|
+2246|113|O|85755.84|1996-05-27|4-NOT SPECIFIED|Clerk#000000739|0| final gifts sleep |61.1101|-149.857|61.1101|-149.857|
+2247|95|F|13491.31|1992-08-02|4-NOT SPECIFIED|Clerk#000000947|0|furiously regular packages. final brai|61.1101|-149.857|61.1101|-149.857|
+2272|139|F|127934.71|1993-04-13|2-HIGH|Clerk#000000449|0|s. bold, ironic pinto beans wake. silently specia|61.1101|-149.857|61.1101|-149.857|
+2273|136|O|142291.79|1996-12-14|5-LOW|Clerk#000000155|0|uickly express foxes haggle quickly against|61.1101|-149.857|61.1101|-149.857|
+2274|104|F|58273.89|1993-09-04|4-NOT SPECIFIED|Clerk#000000258|0|nstructions try to hag|61.1101|-149.857|61.1101|-149.857|
+2275|149|F|37398.90|1992-10-22|4-NOT SPECIFIED|Clerk#000000206|0| furiously furious platelets. slyly final packa|61.1101|-149.857|61.1101|-149.857|
+2276|43|O|141159.63|1996-04-29|4-NOT SPECIFIED|Clerk#000000821|0|ecial requests. fox|61.1101|-149.857|61.1101|-149.857|
+2277|89|F|79270.23|1995-01-02|4-NOT SPECIFIED|Clerk#000000385|0|accounts cajole. even i|61.1101|-149.857|61.1101|-149.857|
+2278|142|O|101878.46|1998-04-25|3-MEDIUM|Clerk#000000186|0|r pinto beans integrate after the carefully even deposits. blit|61.1101|-149.857|61.1101|-149.857|
+2279|80|F|142322.33|1993-02-23|3-MEDIUM|Clerk#000000898|0|de of the quickly unusual instructio|61.2141|-149.864|61.2141|-149.864|
+2304|46|F|93769.28|1994-01-07|4-NOT SPECIFIED|Clerk#000000415|0|onic platelets. ironic packages haggle. packages nag doggedly according to|61.2171|-149.9|61.2171|-149.9|
+2305|43|F|122964.66|1993-01-26|2-HIGH|Clerk#000000440|0|ove the furiously even acco|61.2171|-149.9|61.2171|-149.9|
+2306|28|O|244704.23|1995-07-26|2-HIGH|Clerk#000000975|0| wake furiously requests. permanent requests affix. final packages caj|61.2171|-149.9|61.2171|-149.9|
+2307|106|F|59417.76|1993-06-29|5-LOW|Clerk#000000952|0|furiously even asymptotes? carefully regular accounts|61.2171|-149.9|61.2171|-149.9|
+2308|25|F|58546.02|1992-10-25|4-NOT SPECIFIED|Clerk#000000609|0|ts. slyly final depo|61.2171|-149.9|61.2171|-149.9|
+2309|100|O|146933.07|1995-09-04|5-LOW|Clerk#000000803|0|he carefully pending packages. fluffily stealthy foxes engage carefully|61.2171|-149.9|61.2171|-149.9|
+2310|31|O|82928.12|1996-09-20|5-LOW|Clerk#000000917|0|wake carefully. unusual instructions nag ironic, regular excuse|61.2171|-149.9|61.2171|-149.9|
+2311|73|P|153233.93|1995-05-02|2-HIGH|Clerk#000000761|0|ly pending asymptotes-- furiously bold excus|61.2171|-149.9|61.2171|-149.9|
+2336|142|O|22294.51|1996-01-07|4-NOT SPECIFIED|Clerk#000000902|0|c, final excuses sleep furiously among the even theodolites. f|61.2171|-149.9|61.2171|-149.9|
+2337|142|O|45704.96|1997-06-18|4-NOT SPECIFIED|Clerk#000000754|0| quickly. final accounts haggle. carefully final acco|61.2171|-149.9|61.2171|-149.9|
+2338|140|O|28155.92|1997-09-15|2-HIGH|Clerk#000000951|0|riously final dugouts. final, ironic packages wake express, ironic id|61.2171|-149.9|61.2171|-149.9|
+2339|109|F|63470.78|1993-12-15|5-LOW|Clerk#000000847|0| against the regular |61.2171|-149.9|61.2171|-149.9|
+2340|65|O|30778.78|1996-01-12|1-URGENT|Clerk#000000964|0|ter the deposits sleep according to the slyly regular packages. carefully |61.2171|-149.9|61.2171|-149.9|
+2341|82|F|55950.21|1993-05-30|5-LOW|Clerk#000000443|0|sts-- blithely bold dolphins through the deposits nag blithely carefully re|61.2171|-149.9|61.2171|-149.9|
+2342|37|O|104038.78|1996-06-09|1-URGENT|Clerk#000000615|0|oost carefully across the regular accounts. blithely final d|61.2171|-149.9|61.2171|-149.9|
+2343|73|O|85381.00|1995-08-21|3-MEDIUM|Clerk#000000170|0|fluffily over the slyly special deposits. quickl|64.8487|-147.704|64.8487|-147.704|
+2368|13|F|101240.96|1993-08-20|1-URGENT|Clerk#000000830|0|t the bold instructions. carefully unusual |64.8486|-147.705|64.8486|-147.705|
+2369|110|O|73517.91|1996-12-24|2-HIGH|Clerk#000000752|0|iously even requests are dogged, express |64.8087|-147.71|64.8087|-147.71|
+2370|142|F|73924.21|1994-01-17|1-URGENT|Clerk#000000231|0|lyly final packages. quickly final deposits haggl|64.8363|-147.758|64.8363|-147.758|
+2371|19|O|193857.67|1998-01-07|1-URGENT|Clerk#000000028|0|ckages haggle at th|64.8476|-147.704|64.8476|-147.704|
+2372|31|O|104927.66|1997-11-21|5-LOW|Clerk#000000342|0|s: deposits haggle along the final ideas. careful|64.8302|-147.744|64.8302|-147.744|
+2373|28|F|55211.04|1994-03-12|4-NOT SPECIFIED|Clerk#000000306|0| even, special courts grow quickly. pending,|64.8476|-147.812|64.8476|-147.812|
+2374|4|F|115219.88|1993-10-29|4-NOT SPECIFIED|Clerk#000000081|0| blithely regular packages. blithely unusua|64.8144|-147.756|64.8144|-147.756|
+2375|5|O|106612.48|1996-11-20|3-MEDIUM|Clerk#000000197|0|unusual, pending theodolites cajole carefully |64.8183|-147.778|64.8183|-147.778|
+2400|37|O|92798.66|1998-07-25|5-LOW|Clerk#000000782|0|nusual courts nag against the carefully unusual pinto b|64.8494|-147.818|64.8494|-147.818|
+2401|148|O|88448.24|1997-07-29|4-NOT SPECIFIED|Clerk#000000531|0|ully unusual instructions boost carefully silently regular requests. |64.849|-147.822|64.849|-147.822|
+2402|67|O|70403.62|1996-09-06|4-NOT SPECIFIED|Clerk#000000162|0|slyly final sheaves sleep slyly. q|64.8367|-147.716|64.8367|-147.716|
+2403|55|O|111020.79|1998-04-11|3-MEDIUM|Clerk#000000820|0|furiously regular deposits use. furiously unusual accounts wake along the |64.8127|-147.772|64.8127|-147.772|
+2404|77|O|109077.69|1997-03-13|4-NOT SPECIFIED|Clerk#000000409|0|deposits breach furiously. ironic foxes haggle carefully bold packag|64.8143|-147.751|64.8143|-147.751|
+2405|73|O|115929.14|1996-12-23|3-MEDIUM|Clerk#000000535|0|ular, regular asympto|64.842|-147.721|64.842|-147.721|
+2406|7|O|182516.77|1996-10-28|5-LOW|Clerk#000000561|0|blithely regular accounts u|64.8403|-147.714|64.8403|-147.714|
+2407|55|O|112843.52|1998-06-19|2-HIGH|Clerk#000000068|0|uests affix slyly among the slyly regular depos|64.8371|-147.881|64.8371|-147.881|
+2432|103|O|62661.93|1996-07-13|1-URGENT|Clerk#000000115|0|re. slyly even deposits wake bra|64.8151|-147.707|64.8151|-147.707|
+2433|31|F|147071.86|1994-08-22|4-NOT SPECIFIED|Clerk#000000324|0|ess patterns are slyly. packages haggle carefu|64.8151|-147.707|64.8151|-147.707|
+2434|25|O|123956.25|1997-04-27|3-MEDIUM|Clerk#000000190|0|s. quickly ironic dolphins impress final deposits. blithel|64.8541|-147.81|64.8541|-147.81|
+2435|73|F|122490.66|1993-02-21|5-LOW|Clerk#000000112|0|es are carefully along the carefully final instructions. pe|64.8878|-147.496|64.8878|-147.496|
+2436|125|O|73990.08|1995-09-11|4-NOT SPECIFIED|Clerk#000000549|0|arefully. blithely bold deposits affix special accounts. final foxes nag. spe|64.8299|-147.728|64.8299|-147.728|
+2437|85|F|143411.69|1993-04-21|4-NOT SPECIFIED|Clerk#000000578|0|. theodolites wake slyly-- ironic, pending platelets above the carefully exp|64.8132|-147.762|64.8132|-147.762|
+2438|13|F|214494.39|1993-07-15|2-HIGH|Clerk#000000744|0|the final, regular warhorses. regularly |64.8372|-147.713|64.8372|-147.713|
+2439|55|O|41811.12|1997-03-15|2-HIGH|Clerk#000000819|0|lithely after the car|64.7927|-148.036|64.7927|-148.036|
+2464|145|O|30495.65|1997-11-23|5-LOW|Clerk#000000633|0|le about the instructions. courts wake carefully even|64.8717|-147.819|64.8717|-147.819|
+2465|34|O|180737.75|1995-06-24|1-URGENT|Clerk#000000078|0|al pinto beans. final, bold packages wake quickly|64.8527|-147.686|64.8527|-147.686|
+2466|19|F|161625.50|1994-03-06|1-URGENT|Clerk#000000424|0|c pinto beans. express deposits wake quickly. even, final courts nag. package|64.8371|-147.811|64.8371|-147.811|
+2467|35|O|7231.91|1995-07-16|4-NOT SPECIFIED|Clerk#000000914|0|pades sleep furiously. sometimes regular packages again|64.846|-147.705|64.846|-147.705|
+2468|112|O|160627.01|1997-06-09|4-NOT SPECIFIED|Clerk#000000260|0|ickly regular packages. slyly ruthless requests snooze quickly blithe|64.9064|-147.726|64.9064|-147.726|
+2469|124|O|192074.23|1996-11-26|5-LOW|Clerk#000000730|0| sleep closely regular instructions. furiously ironic instructi|64.9347|-147.56|64.9347|-147.56|
+2470|58|O|104966.33|1997-04-19|3-MEDIUM|Clerk#000000452|0|to the furiously final packages? pa|64.8861|-147.677|64.8861|-147.677|
+2471|89|O|34936.31|1998-03-12|4-NOT SPECIFIED|Clerk#000000860|0|carefully blithely regular pac|64.8302|-147.744|64.8302|-147.744|
+2496|136|F|140390.60|1994-01-09|2-HIGH|Clerk#000000142|0|slyly. pending instructions sleep. quic|60.6673|-151.311|60.6673|-151.311|
+2497|47|F|171326.48|1992-08-27|1-URGENT|Clerk#000000977|0|ily ironic pinto beans. furiously final platelets alongside of t|60.6997|-151.38|60.6997|-151.38|
+2498|97|F|45514.27|1993-11-08|5-LOW|Clerk#000000373|0|g the slyly special pinto beans. |60.5658|-151.244|60.5658|-151.244|
+2499|121|O|147243.86|1995-09-24|1-URGENT|Clerk#000000277|0|r the quickly bold foxes. bold instructi|60.6331|-151.163|60.6331|-151.163|
+2500|133|F|131122.82|1992-08-15|2-HIGH|Clerk#000000447|0|integrate slyly pending deposits. furiously ironic accounts across the s|60.6331|-151.163|60.6331|-151.163|
+2501|67|O|79380.51|1997-05-25|5-LOW|Clerk#000000144|0|ickly special theodolite|60.6331|-151.163|60.6331|-151.163|
+2502|70|F|33470.40|1993-05-28|4-NOT SPECIFIED|Clerk#000000914|0|lyly: carefully pending ideas affix again|60.6201|-151.332|60.6201|-151.332|
+2503|7|F|183671.08|1993-06-20|3-MEDIUM|Clerk#000000294|0|ly even packages was. ironic, regular deposits unwind furiously across the p|60.5004|-151.276|60.5004|-151.276|
+2528|55|F|92069.62|1994-11-20|1-URGENT|Clerk#000000789|0|ular dependencies? regular frays kindle according to the blith|60.6331|-151.163|60.6331|-151.163|
+2529|136|O|4104.30|1996-08-20|2-HIGH|Clerk#000000511|0|posits across the silent instructions wake blithely across |60.6331|-151.163|60.6331|-151.163|
+2530|128|F|58853.11|1994-03-21|3-MEDIUM|Clerk#000000291|0|ular instructions about the quic|60.6901|-151.321|60.6901|-151.321|
+2531|44|O|143212.85|1996-05-06|4-NOT SPECIFIED|Clerk#000000095|0|even accounts. furiously ironic excuses sleep fluffily. carefully silen|60.6676|-151.29|60.6676|-151.29|
+2532|94|O|116093.49|1995-10-11|2-HIGH|Clerk#000000498|0|the blithely pending accounts. regular, regular excuses boost aro|60.6331|-151.163|60.6331|-151.163|
+2533|50|O|168495.03|1997-03-24|1-URGENT|Clerk#000000594|0|ecial instructions. spec|60.5632|-151.266|60.5632|-151.266|
+2534|76|O|202784.54|1996-07-17|3-MEDIUM|Clerk#000000332|0|packages cajole ironic requests. furiously regular|60.6331|-151.163|60.6331|-151.163|
+2535|121|F|67018.30|1993-05-25|5-LOW|Clerk#000000296|0|phins cajole beneath the fluffily express asymptotes. c|60.6331|-151.163|60.6331|-151.163|
+2560|131|F|153426.79|1992-09-05|1-URGENT|Clerk#000000538|0|atelets; quickly sly requests|60.6509|-151.342|60.6509|-151.342|
+2561|58|O|137473.58|1997-11-14|1-URGENT|Clerk#000000861|0|ual requests. unusual deposits cajole furiously pending, regular platelets. |60.5601|-151.107|60.5601|-151.107|
+2562|10|F|136360.37|1992-08-01|1-URGENT|Clerk#000000467|0|elets. pending dolphins promise slyly. bo|60.5123|-151.275|60.5123|-151.275|
+2563|62|F|168952.10|1993-11-19|4-NOT SPECIFIED|Clerk#000000150|0|sly even packages after the furio|60.6076|-151.325|60.6076|-151.325|
+2564|77|F|3967.47|1994-09-09|2-HIGH|Clerk#000000718|0|usly regular pinto beans. orbits wake carefully. slyly e|60.6331|-151.163|60.6331|-151.163|
+2565|56|O|204438.57|1998-02-28|3-MEDIUM|Clerk#000000032|0|x-ray blithely along|60.5175|-151.235|60.5175|-151.235|
+2566|86|F|89992.48|1992-10-10|3-MEDIUM|Clerk#000000414|0|ructions boost bold ideas. idly ironic accounts use according to th|60.5535|-151.108|60.5535|-151.108|
+2567|70|O|263411.29|1998-02-27|2-HIGH|Clerk#000000031|0|detect. furiously ironic requests|60.5614|-151.275|60.5614|-151.275|
+2592|101|F|8225.96|1993-03-05|4-NOT SPECIFIED|Clerk#000000524|0|ts nag fluffily. quickly stealthy theodolite|60.5647|-151.195|60.5647|-151.195|
+2593|92|F|134726.09|1993-09-04|2-HIGH|Clerk#000000468|0|r the carefully final|60.6331|-151.163|60.6331|-151.163|
+2594|79|F|94866.39|1992-12-17|1-URGENT|Clerk#000000550|0|ests. theodolites above the blithely even accounts detect furio|60.6331|-151.163|60.6331|-151.163|
+2595|74|O|173130.20|1995-12-14|4-NOT SPECIFIED|Clerk#000000222|0|arefully ironic requests nag carefully ideas. |60.6331|-151.163|60.6331|-151.163|
+2596|43|O|74940.13|1996-08-17|1-URGENT|Clerk#000000242|0|requests. ironic, bold theodolites wak|60.6331|-151.163|60.6331|-151.163|
+2597|104|F|21964.66|1993-02-04|2-HIGH|Clerk#000000757|0|iously ruthless exc|60.6331|-151.163|60.6331|-151.163|
+2598|112|O|84871.50|1996-03-05|3-MEDIUM|Clerk#000000391|0| ironic notornis according to the blithely final requests should |60.6678|-151.31|60.6678|-151.31|
+2599|149|O|62807.13|1996-11-07|2-HIGH|Clerk#000000722|0|ts. slyly regular theodolites wake sil|60.5003|-151.276|60.5003|-151.276|
+2624|52|O|27148.63|1996-11-28|5-LOW|Clerk#000000930|0|ic, regular packages|60.6331|-151.163|60.6331|-151.163|
+2625|40|F|39382.74|1992-10-14|4-NOT SPECIFIED|Clerk#000000386|0| final deposits. blithely ironic ideas |61.5855|-149.326|61.5855|-149.326|
+2626|139|O|84314.51|1995-09-08|4-NOT SPECIFIED|Clerk#000000289|0|gside of the carefully special packages are furiously after the slyly express |61.5979|-149.437|61.5979|-149.437|
+2627|149|F|26798.65|1992-03-24|3-MEDIUM|Clerk#000000181|0|s. silent, ruthless requests|61.6141|-149.457|61.6141|-149.457|
+2628|56|F|165655.99|1993-10-22|5-LOW|Clerk#000000836|0|ajole across the blithely careful accounts. blithely silent deposits sl|61.5799|-149.461|61.5799|-149.461|
+2629|139|O|96458.03|1998-04-06|5-LOW|Clerk#000000680|0|uches dazzle carefully even, express excuses. ac|61.5845|-149.337|61.5845|-149.337|
+2630|85|F|127132.51|1992-10-24|5-LOW|Clerk#000000712|0|inal theodolites. ironic instructions s|61.5351|-149.558|61.5351|-149.558|
+2631|37|F|63103.32|1993-09-24|5-LOW|Clerk#000000833|0| quickly unusual deposits doubt around |61.5811|-149.45|61.5811|-149.45|
+2656|77|F|105492.37|1993-05-04|1-URGENT|Clerk#000000307|0|elets. slyly final accou|61.5793|-149.442|61.5793|-149.442|
+2657|25|O|148176.06|1995-10-17|2-HIGH|Clerk#000000160|0| foxes-- slyly final dependencies around the slyly final theodo|61.5661|-149.313|61.5661|-149.313|
+2658|14|O|163834.46|1995-09-23|3-MEDIUM|Clerk#000000400|0|bout the slyly regular accounts. ironic, |61.6141|-149.457|61.6141|-149.457|
+2659|83|F|79785.52|1993-12-18|4-NOT SPECIFIED|Clerk#000000758|0|cross the pending requests maintain |61.5786|-149.332|61.5786|-149.332|
+2660|127|O|16922.51|1995-08-05|5-LOW|Clerk#000000480|0|ly finally regular deposits. ironic theodolites cajole|61.5811|-149.45|61.5811|-149.45|
+2661|74|O|106036.84|1997-01-04|3-MEDIUM|Clerk#000000217|0|al, regular pinto beans. silently final deposits should have t|61.5825|-149.429|61.5825|-149.429|
+2662|37|O|87689.88|1996-08-21|3-MEDIUM|Clerk#000000589|0|bold pinto beans above the slyly final accounts affix furiously deposits. pac|61.6141|-149.457|61.6141|-149.457|
+2663|95|O|35131.80|1995-09-06|1-URGENT|Clerk#000000950|0|ar requests. furiously final dolphins along the fluffily spe|61.5531|-149.651|61.5531|-149.651|
+2688|98|F|181077.36|1992-01-24|2-HIGH|Clerk#000000720|0|have to nag according to the pending theodolites. sly|61.5531|-149.651|61.5531|-149.651|
+2689|103|F|41552.78|1992-04-09|4-NOT SPECIFIED|Clerk#000000698|0|press pains wake. furiously express theodolites alongsid|61.5698|-149.62|61.5698|-149.62|
+2690|94|O|224674.27|1996-03-31|3-MEDIUM|Clerk#000000760|0|ravely even theodolites |61.6141|-149.457|61.6141|-149.457|
+2691|7|F|30137.17|1992-04-30|5-LOW|Clerk#000000439|0|es at the regular deposits sleep slyly by the fluffy requests. eve|61.5474|-149.458|61.5474|-149.458|
+2692|62|O|24265.24|1997-12-02|3-MEDIUM|Clerk#000000878|0|es. regular asymptotes cajole above t|61.5825|-149.429|61.5825|-149.429|
+2693|19|O|66158.13|1996-09-04|1-URGENT|Clerk#000000370|0|ndle never. blithely regular packages nag carefully enticing platelets. ca|61.5955|-149.423|61.5955|-149.423|
+2694|121|O|102807.59|1996-03-14|5-LOW|Clerk#000000722|0| requests. bold deposits above the theodol|61.5801|-149.461|61.5801|-149.461|
+2695|58|O|138584.20|1996-08-20|1-URGENT|Clerk#000000697|0|ven deposits around the quickly regular packa|61.5785|-149.415|61.5785|-149.415|
+2720|31|F|161307.05|1993-06-08|1-URGENT|Clerk#000000948|0|quickly. special asymptotes are fluffily ironi|61.6402|-149.34|61.6402|-149.34|
+2721|79|O|59180.25|1996-01-27|2-HIGH|Clerk#000000401|0| ideas eat even, unusual ideas. theodolites are carefully|61.583|-149.457|61.583|-149.457|
+2722|35|F|50328.84|1994-04-09|5-LOW|Clerk#000000638|0|rding to the carefully quick deposits. bli|61.5907|-149.295|61.5907|-149.295|
+2723|61|O|104759.25|1995-10-06|5-LOW|Clerk#000000836|0|nts must have to cajo|61.6141|-149.457|61.6141|-149.457|
+2724|137|F|116069.66|1994-09-14|2-HIGH|Clerk#000000217|0| sleep blithely. blithely idle |61.5933|-149.397|61.5933|-149.397|
+2725|89|F|75144.68|1994-05-21|4-NOT SPECIFIED|Clerk#000000835|0|ular deposits. spec|61.6091|-149.77|61.6091|-149.77|
+2726|7|F|47753.00|1992-11-27|5-LOW|Clerk#000000470|0| blithely even dinos sleep care|61.577|-149.411|61.577|-149.411|
+2727|74|O|3089.42|1998-04-19|4-NOT SPECIFIED|Clerk#000000879|0|sual theodolites cajole enticingly above the furiously fin|61.6078|-149.322|61.6078|-149.322|
+2752|59|F|187932.30|1993-11-19|2-HIGH|Clerk#000000648|0| carefully regular foxes are quickly quickl|61.6131|-149.397|61.6131|-149.397|
+2753|16|F|159720.39|1993-11-30|2-HIGH|Clerk#000000380|0|ending instructions. unusual deposits|61.6648|-149.372|61.6648|-149.372|
+2754|145|F|25985.52|1994-04-03|2-HIGH|Clerk#000000960|0|cies detect slyly. |61.5531|-149.651|61.5531|-149.651|
+2755|118|F|101202.18|1992-02-07|4-NOT SPECIFIED|Clerk#000000177|0|ously according to the sly foxes. blithely regular pinto bean|61.5811|-149.45|61.5811|-149.45|
+2756|118|F|142323.38|1994-04-18|1-URGENT|Clerk#000000537|0|arefully special warho|61.583|-149.457|61.583|-149.457|
+2757|76|O|89792.48|1995-07-20|2-HIGH|Clerk#000000216|0| regular requests subl|61.1955|-149.9|61.1955|-149.9|
+2758|43|O|36671.88|1998-07-12|5-LOW|Clerk#000000863|0|s cajole according to the carefully special |61.1844|-149.897|61.1844|-149.897|
+2759|116|F|89731.10|1993-11-25|4-NOT SPECIFIED|Clerk#000000071|0|ts. regular, pending pinto beans sleep ab|61.1901|-149.892|61.1901|-149.892|
+2784|95|O|106635.21|1998-01-07|1-URGENT|Clerk#000000540|0|g deposits alongside of the silent requests s|61.1444|-149.867|61.1444|-149.867|
+2785|148|O|132854.79|1995-07-21|2-HIGH|Clerk#000000098|0|iously pending packages sleep according to the blithely unusual foxe|61.1955|-149.9|61.1955|-149.9|
+2786|79|F|178254.66|1992-03-22|2-HIGH|Clerk#000000976|0|al platelets cajole blithely ironic requests. ironic re|61.1893|-149.887|61.1893|-149.887|
+2787|103|O|3726.14|1995-09-30|1-URGENT|Clerk#000000906|0|he ironic, regular |61.2174|-149.888|61.2174|-149.888|
+2788|124|F|17172.66|1994-09-22|1-URGENT|Clerk#000000641|0|nts wake across the fluffily bold accoun|61.2227|-149.842|61.2227|-149.842|
+2789|37|O|219123.27|1998-03-14|2-HIGH|Clerk#000000972|0|gular patterns boost. carefully even re|61.1263|-149.872|61.1263|-149.872|
+2790|25|F|177458.97|1994-08-19|2-HIGH|Clerk#000000679|0| the carefully express deposits sleep slyly |61.1138|-149.866|61.1138|-149.866|
+2791|121|F|156697.55|1994-10-10|2-HIGH|Clerk#000000662|0|as. slyly ironic accounts play furiously bl|61.2157|-149.821|61.2157|-149.821|
+2816|58|F|42225.53|1994-09-20|2-HIGH|Clerk#000000289|0|kages at the final deposits cajole furious foxes. quickly |61.2174|-149.888|61.2174|-149.888|
+2817|40|F|71453.85|1994-04-19|3-MEDIUM|Clerk#000000982|0|ic foxes haggle upon the daringly even pinto beans. slyly|61.1855|-149.868|61.1855|-149.868|
+2818|49|F|120086.84|1994-12-12|3-MEDIUM|Clerk#000000413|0|eep furiously special ideas. express |61.1951|-149.873|61.1951|-149.873|
+2819|103|F|66927.16|1994-05-05|1-URGENT|Clerk#000000769|0|ngside of the blithely ironic dolphins. furio|61.1444|-149.867|61.1444|-149.867|
+2820|19|F|143813.39|1994-05-20|3-MEDIUM|Clerk#000000807|0|equests are furiously. carefu|61.1883|-149.735|61.1883|-149.735|
+2821|118|F|36592.48|1993-08-09|3-MEDIUM|Clerk#000000323|0|ng requests. even instructions are quickly express, silent instructi|61.2161|-149.876|61.2161|-149.876|
+2822|79|F|40142.15|1993-07-26|2-HIGH|Clerk#000000510|0|furiously against the accounts. unusual accounts aft|61.2161|-149.876|61.2161|-149.876|
+2823|79|O|171894.45|1995-09-09|2-HIGH|Clerk#000000567|0|encies. carefully fluffy accounts m|61.1893|-149.888|61.1893|-149.888|
+2848|70|F|116258.53|1992-03-10|1-URGENT|Clerk#000000256|0|ly fluffy foxes sleep furiously across the slyly regu|61.2174|-149.888|61.2174|-149.888|
+2849|46|O|180054.29|1996-04-30|2-HIGH|Clerk#000000659|0|al packages are after the quickly bold requests. carefully special |61.1914|-149.886|61.1914|-149.886|
+2850|100|O|122969.79|1996-10-02|2-HIGH|Clerk#000000392|0|, regular deposits. furiously pending packages hinder carefully carefully u|61.1541|-149.958|61.1541|-149.958|
+2851|145|O|7859.36|1997-09-07|5-LOW|Clerk#000000566|0|Tiresias wake quickly quickly even|61.1259|-149.717|61.1259|-149.717|
+2852|91|F|99050.81|1993-01-16|1-URGENT|Clerk#000000740|0|ruthless deposits against the final instructions use quickly al|61.2193|-149.902|61.2193|-149.902|
+2853|94|F|103641.15|1994-05-05|2-HIGH|Clerk#000000878|0|the carefully even packages.|61.1879|-149.886|61.1879|-149.886|
+2854|139|F|153568.02|1994-06-27|1-URGENT|Clerk#000000010|0| furiously ironic tithes use furiously |61.1372|-149.912|61.1372|-149.912|
+2855|49|F|48419.58|1993-04-04|4-NOT SPECIFIED|Clerk#000000973|0| silent, regular packages sleep |61.1101|-149.857|61.1101|-149.857|
+2880|8|F|145761.99|1992-03-15|2-HIGH|Clerk#000000756|0|ves maintain doggedly spec|61.1791|-149.94|61.1791|-149.94|
+2881|100|F|45695.84|1992-05-10|5-LOW|Clerk#000000864|0|uriously. slyly express requests according to the silent dol|61.2031|-149.749|61.2031|-149.749|
+2882|121|O|172872.37|1995-08-22|2-HIGH|Clerk#000000891|0|pending deposits. carefully eve|61.1914|-149.877|61.1914|-149.877|
+2883|121|F|170360.27|1995-01-23|5-LOW|Clerk#000000180|0|uses. carefully ironic accounts lose fluffil|61.1944|-149.883|61.1944|-149.883|
+2884|92|O|71683.84|1997-10-12|3-MEDIUM|Clerk#000000780|0|efully express instructions sleep against|61.1923|-149.886|61.1923|-149.886|
+2885|7|F|146896.72|1992-09-19|4-NOT SPECIFIED|Clerk#000000280|0|ly sometimes special excuses. final requests are |61.2123|-149.854|61.2123|-149.854|
+2886|109|F|94527.23|1994-11-13|4-NOT SPECIFIED|Clerk#000000619|0|uctions. ironic packages sle|61.2161|-149.876|61.2161|-149.876|
+2887|109|O|28571.39|1997-05-26|5-LOW|Clerk#000000566|0|slyly even pinto beans. slyly bold epitaphs cajole blithely above t|61.2171|-149.9|61.2171|-149.9|
+2912|94|F|27727.52|1992-03-12|5-LOW|Clerk#000000186|0|jole blithely above the quickly regular packages. carefully regular pinto bean|61.1125|-149.861|61.1125|-149.861|
+2913|43|O|130702.19|1997-07-12|3-MEDIUM|Clerk#000000118|0|mptotes doubt furiously slyly regu|61.1419|-149.896|61.1419|-149.896|
+2914|109|F|60867.14|1993-03-03|3-MEDIUM|Clerk#000000543|0|he slyly regular theodolites are furiously sile|61.145|-149.878|61.145|-149.878|
+2915|94|F|96015.13|1994-03-31|5-LOW|Clerk#000000410|0|ld packages. bold deposits boost blithely. ironic, unusual theodoli|61.1044|-149.865|61.1044|-149.865|
+2916|8|O|20182.22|1995-12-27|2-HIGH|Clerk#000000681|0|ithely blithe deposits sleep beyond the|61.1444|-149.876|61.1444|-149.876|
+2917|91|O|100714.13|1997-12-09|4-NOT SPECIFIED|Clerk#000000061|0| special dugouts among the special deposi|61.1|-149.85|61.1|-149.85|
+2918|118|O|21760.09|1996-09-08|3-MEDIUM|Clerk#000000439|0|ular deposits across th|61.1105|-149.861|61.1105|-149.861|
+2919|53|F|137223.14|1993-12-10|2-HIGH|Clerk#000000209|0|es. pearls wake quietly slyly ironic instructions--|61.1286|-149.957|61.1286|-149.957|
+2944|14|O|146581.14|1997-09-24|4-NOT SPECIFIED|Clerk#000000740|0|deas. permanently special foxes haggle carefully ab|61.1201|-149.89|61.1201|-149.89|
+2945|29|O|223507.72|1996-01-03|2-HIGH|Clerk#000000499|0|ons are carefully toward the permanent, bold pinto beans. regu|61.112|-149.871|61.112|-149.871|
+2946|125|O|102226.59|1996-02-05|5-LOW|Clerk#000000329|0|g instructions about the regular accounts sleep carefully along the pen|61.1427|-149.864|61.1427|-149.864|
+2947|70|P|43360.95|1995-04-26|1-URGENT|Clerk#000000464|0|ronic accounts. accounts run furiously d|61.1212|-149.947|61.1212|-149.947|
+2948|44|F|100758.71|1994-08-23|5-LOW|Clerk#000000701|0| deposits according to the blithely pending |61.1228|-149.939|61.1228|-149.939|
+2949|137|F|94231.71|1994-04-12|2-HIGH|Clerk#000000184|0|y ironic accounts use. quickly blithe accou|61.1093|-149.871|61.1093|-149.871|
+2950|136|O|183620.33|1997-07-06|1-URGENT|Clerk#000000833|0| dolphins around the furiously |61.145|-149.878|61.145|-149.878|
+2951|74|O|125509.17|1996-02-06|2-HIGH|Clerk#000000680|0|gular deposits above the finally regular ideas integrate idly stealthil|61.1191|-149.871|61.1191|-149.871|
+2976|29|F|145768.47|1993-12-10|4-NOT SPECIFIED|Clerk#000000159|0|. furiously ironic asymptotes haggle ruthlessly silently regular r|61.1003|-149.856|61.1003|-149.856|
+2977|73|O|25170.88|1996-08-27|3-MEDIUM|Clerk#000000252|0|quickly special platelets are furio|61.1113|-149.872|61.1113|-149.872|
+2978|44|P|139542.14|1995-05-03|1-URGENT|Clerk#000000135|0|d. even platelets are. ironic dependencies cajole slow, e|61.1084|-149.861|61.1084|-149.861|
+2979|133|O|116789.98|1996-03-23|3-MEDIUM|Clerk#000000820|0|even, ironic foxes sleep along|61.144|-149.878|61.144|-149.878|
+2980|4|O|187514.11|1996-09-14|3-MEDIUM|Clerk#000000661|0|y quick pinto beans wake. slyly re|61.1426|-149.877|61.1426|-149.877|
+2981|49|O|37776.79|1998-07-29|5-LOW|Clerk#000000299|0|hely among the express foxes. blithely stealthy requests cajole boldly. regu|61.1173|-149.861|61.1173|-149.861|
+2982|85|F|55582.94|1995-03-19|2-HIGH|Clerk#000000402|0|lyly. express theodolites affix slyly after the slyly speci|61.1347|-149.914|61.1347|-149.914|
diff --git a/hyracks-examples/hyracks-integration-tests/data/spatial.txt b/hyracks/hyracks-examples/hyracks-integration-tests/data/spatial.txt
similarity index 100%
rename from hyracks-examples/hyracks-integration-tests/data/spatial.txt
rename to hyracks/hyracks-examples/hyracks-integration-tests/data/spatial.txt
diff --git a/hyracks-examples/hyracks-integration-tests/data/tpch0.001/customer-part1.tbl b/hyracks/hyracks-examples/hyracks-integration-tests/data/tpch0.001/customer-part1.tbl
similarity index 100%
rename from hyracks-examples/hyracks-integration-tests/data/tpch0.001/customer-part1.tbl
rename to hyracks/hyracks-examples/hyracks-integration-tests/data/tpch0.001/customer-part1.tbl
diff --git a/hyracks-examples/hyracks-integration-tests/data/tpch0.001/customer-part2.tbl b/hyracks/hyracks-examples/hyracks-integration-tests/data/tpch0.001/customer-part2.tbl
similarity index 100%
rename from hyracks-examples/hyracks-integration-tests/data/tpch0.001/customer-part2.tbl
rename to hyracks/hyracks-examples/hyracks-integration-tests/data/tpch0.001/customer-part2.tbl
diff --git a/hyracks-examples/hyracks-integration-tests/data/tpch0.001/customer.tbl b/hyracks/hyracks-examples/hyracks-integration-tests/data/tpch0.001/customer.tbl
similarity index 100%
rename from hyracks-examples/hyracks-integration-tests/data/tpch0.001/customer.tbl
rename to hyracks/hyracks-examples/hyracks-integration-tests/data/tpch0.001/customer.tbl
diff --git a/hyracks-examples/hyracks-integration-tests/data/tpch0.001/customer3.tbl b/hyracks/hyracks-examples/hyracks-integration-tests/data/tpch0.001/customer3.tbl
similarity index 100%
rename from hyracks-examples/hyracks-integration-tests/data/tpch0.001/customer3.tbl
rename to hyracks/hyracks-examples/hyracks-integration-tests/data/tpch0.001/customer3.tbl
diff --git a/hyracks-examples/hyracks-integration-tests/data/tpch0.001/customer4.tbl b/hyracks/hyracks-examples/hyracks-integration-tests/data/tpch0.001/customer4.tbl
similarity index 100%
rename from hyracks-examples/hyracks-integration-tests/data/tpch0.001/customer4.tbl
rename to hyracks/hyracks-examples/hyracks-integration-tests/data/tpch0.001/customer4.tbl
diff --git a/hyracks-examples/hyracks-integration-tests/data/tpch0.001/lineitem.tbl b/hyracks/hyracks-examples/hyracks-integration-tests/data/tpch0.001/lineitem.tbl
similarity index 100%
rename from hyracks-examples/hyracks-integration-tests/data/tpch0.001/lineitem.tbl
rename to hyracks/hyracks-examples/hyracks-integration-tests/data/tpch0.001/lineitem.tbl
diff --git a/hyracks-examples/hyracks-integration-tests/data/tpch0.001/nation.tbl b/hyracks/hyracks-examples/hyracks-integration-tests/data/tpch0.001/nation.tbl
similarity index 100%
rename from hyracks-examples/hyracks-integration-tests/data/tpch0.001/nation.tbl
rename to hyracks/hyracks-examples/hyracks-integration-tests/data/tpch0.001/nation.tbl
diff --git a/hyracks-examples/hyracks-integration-tests/data/tpch0.001/orders-part1.tbl b/hyracks/hyracks-examples/hyracks-integration-tests/data/tpch0.001/orders-part1.tbl
similarity index 100%
rename from hyracks-examples/hyracks-integration-tests/data/tpch0.001/orders-part1.tbl
rename to hyracks/hyracks-examples/hyracks-integration-tests/data/tpch0.001/orders-part1.tbl
diff --git a/hyracks-examples/hyracks-integration-tests/data/tpch0.001/orders-part2.tbl b/hyracks/hyracks-examples/hyracks-integration-tests/data/tpch0.001/orders-part2.tbl
similarity index 100%
rename from hyracks-examples/hyracks-integration-tests/data/tpch0.001/orders-part2.tbl
rename to hyracks/hyracks-examples/hyracks-integration-tests/data/tpch0.001/orders-part2.tbl
diff --git a/hyracks-examples/hyracks-integration-tests/data/tpch0.001/orders.tbl b/hyracks/hyracks-examples/hyracks-integration-tests/data/tpch0.001/orders.tbl
similarity index 100%
rename from hyracks-examples/hyracks-integration-tests/data/tpch0.001/orders.tbl
rename to hyracks/hyracks-examples/hyracks-integration-tests/data/tpch0.001/orders.tbl
diff --git a/hyracks-examples/hyracks-integration-tests/data/tpch0.001/orders1.tbl b/hyracks/hyracks-examples/hyracks-integration-tests/data/tpch0.001/orders1.tbl
similarity index 100%
rename from hyracks-examples/hyracks-integration-tests/data/tpch0.001/orders1.tbl
rename to hyracks/hyracks-examples/hyracks-integration-tests/data/tpch0.001/orders1.tbl
diff --git a/hyracks-examples/hyracks-integration-tests/data/tpch0.001/orders4.tbl b/hyracks/hyracks-examples/hyracks-integration-tests/data/tpch0.001/orders4.tbl
similarity index 100%
rename from hyracks-examples/hyracks-integration-tests/data/tpch0.001/orders4.tbl
rename to hyracks/hyracks-examples/hyracks-integration-tests/data/tpch0.001/orders4.tbl
diff --git a/hyracks-examples/hyracks-integration-tests/data/tpch0.001/part.tbl b/hyracks/hyracks-examples/hyracks-integration-tests/data/tpch0.001/part.tbl
similarity index 100%
rename from hyracks-examples/hyracks-integration-tests/data/tpch0.001/part.tbl
rename to hyracks/hyracks-examples/hyracks-integration-tests/data/tpch0.001/part.tbl
diff --git a/hyracks-examples/hyracks-integration-tests/data/tpch0.001/partsupp.tbl b/hyracks/hyracks-examples/hyracks-integration-tests/data/tpch0.001/partsupp.tbl
similarity index 100%
rename from hyracks-examples/hyracks-integration-tests/data/tpch0.001/partsupp.tbl
rename to hyracks/hyracks-examples/hyracks-integration-tests/data/tpch0.001/partsupp.tbl
diff --git a/hyracks-examples/hyracks-integration-tests/data/tpch0.001/region.tbl b/hyracks/hyracks-examples/hyracks-integration-tests/data/tpch0.001/region.tbl
similarity index 100%
rename from hyracks-examples/hyracks-integration-tests/data/tpch0.001/region.tbl
rename to hyracks/hyracks-examples/hyracks-integration-tests/data/tpch0.001/region.tbl
diff --git a/hyracks-examples/hyracks-integration-tests/data/tpch0.001/supplier.tbl b/hyracks/hyracks-examples/hyracks-integration-tests/data/tpch0.001/supplier.tbl
similarity index 100%
rename from hyracks-examples/hyracks-integration-tests/data/tpch0.001/supplier.tbl
rename to hyracks/hyracks-examples/hyracks-integration-tests/data/tpch0.001/supplier.tbl
diff --git a/hyracks-examples/hyracks-integration-tests/data/tpch0.001/tpch.ddl b/hyracks/hyracks-examples/hyracks-integration-tests/data/tpch0.001/tpch.ddl
similarity index 100%
rename from hyracks-examples/hyracks-integration-tests/data/tpch0.001/tpch.ddl
rename to hyracks/hyracks-examples/hyracks-integration-tests/data/tpch0.001/tpch.ddl
diff --git a/hyracks-examples/hyracks-integration-tests/data/wordcount.tsv b/hyracks/hyracks-examples/hyracks-integration-tests/data/wordcount.tsv
similarity index 100%
rename from hyracks-examples/hyracks-integration-tests/data/wordcount.tsv
rename to hyracks/hyracks-examples/hyracks-integration-tests/data/wordcount.tsv
diff --git a/hyracks-examples/hyracks-integration-tests/data/words.txt b/hyracks/hyracks-examples/hyracks-integration-tests/data/words.txt
similarity index 100%
rename from hyracks-examples/hyracks-integration-tests/data/words.txt
rename to hyracks/hyracks-examples/hyracks-integration-tests/data/words.txt
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/pom.xml b/hyracks/hyracks-examples/hyracks-integration-tests/pom.xml
new file mode 100644
index 0000000..f4cf908
--- /dev/null
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/pom.xml
@@ -0,0 +1,88 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>edu.uci.ics.hyracks.examples</groupId>
+  <artifactId>hyracks-integration-tests</artifactId>
+  <name>hyracks-integration-tests</name>
+  <parent>
+    <groupId>edu.uci.ics.hyracks</groupId>
+    <artifactId>hyracks-examples</artifactId>
+    <version>0.2.3-SNAPSHOT</version>
+  </parent>
+
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-compiler-plugin</artifactId>
+        <version>2.0.2</version>
+        <configuration>
+          <source>1.7</source>
+          <target>1.7</target>
+        </configuration>
+      </plugin>
+    </plugins>
+  </build>
+  <dependencies>
+  	<dependency>
+  		<groupId>junit</groupId>
+  		<artifactId>junit</artifactId>
+  		<version>4.8.1</version>
+  		<type>jar</type>
+  		<scope>test</scope>
+  	</dependency>
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>hyracks-dataflow-std</artifactId>
+  		<version>0.2.3-SNAPSHOT</version>
+  		<type>jar</type>
+  		<scope>compile</scope>
+  	</dependency>
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>hyracks-control-cc</artifactId>
+  		<version>0.2.3-SNAPSHOT</version>
+  		<type>jar</type>
+  		<scope>compile</scope>
+  	</dependency>
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>hyracks-control-nc</artifactId>
+  		<version>0.2.3-SNAPSHOT</version>
+  		<type>jar</type>
+  		<scope>compile</scope>
+  	</dependency>
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>hyracks-storage-am-btree</artifactId>
+  		<version>0.2.3-SNAPSHOT</version>
+  		<type>jar</type>
+  		<scope>compile</scope>
+  	</dependency>
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>hyracks-storage-am-invertedindex</artifactId>
+  		<version>0.2.3-SNAPSHOT</version>
+  		<type>jar</type>
+  		<scope>compile</scope>
+  	</dependency>
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>hyracks-storage-am-rtree</artifactId>
+  		<version>0.2.3-SNAPSHOT</version>
+  		<type>jar</type>
+  		<scope>compile</scope>
+  	</dependency>
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>hyracks-test-support</artifactId>
+  		<version>0.2.3-SNAPSHOT</version>
+  		<type>jar</type>
+  		<scope>test</scope>
+  	</dependency>
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>hyracks-data-std</artifactId>
+  		<version>0.2.3-SNAPSHOT</version>
+  	</dependency>
+  </dependencies>
+</project>
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreePrimaryIndexScanOperatorTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreePrimaryIndexScanOperatorTest.java
new file mode 100644
index 0000000..8482083
--- /dev/null
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreePrimaryIndexScanOperatorTest.java
@@ -0,0 +1,203 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.tests.btree;
+
+import java.io.DataOutput;
+import java.io.File;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+
+import org.junit.AfterClass;
+import org.junit.Before;
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.api.constraints.PartitionConstraintHelper;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.io.FileReference;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
+import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.parsers.IValueParserFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.parsers.UTF8StringParserFactory;
+import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.file.ConstantFileSplitProvider;
+import edu.uci.ics.hyracks.dataflow.std.file.DelimitedDataTupleParserFactory;
+import edu.uci.ics.hyracks.dataflow.std.file.FileScanOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.dataflow.std.file.PlainFileWriterOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.misc.ConstantTupleSourceOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.sort.ExternalSortOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeSearchOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndex;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexRegistryProvider;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexBulkLoadOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexCreateOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackProvider;
+import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
+import edu.uci.ics.hyracks.test.support.TestIndexRegistryProvider;
+import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
+import edu.uci.ics.hyracks.test.support.TestStorageManagerInterface;
+import edu.uci.ics.hyracks.tests.integration.AbstractIntegrationTest;
+
+public class BTreePrimaryIndexScanOperatorTest extends AbstractIntegrationTest {
+    static {
+        TestStorageManagerComponentHolder.init(8192, 20, 20);
+    }
+
+    private IStorageManagerInterface storageManager = new TestStorageManagerInterface();
+    private IIndexRegistryProvider<IIndex> indexRegistryProvider = new TestIndexRegistryProvider();
+    private IIndexDataflowHelperFactory dataflowHelperFactory = new BTreeDataflowHelperFactory();
+
+    private final static SimpleDateFormat simpleDateFormat = new SimpleDateFormat("ddMMyy-hhmmssSS");
+    private final static String sep = System.getProperty("file.separator");
+
+    // field, type and key declarations for primary index
+    private int primaryFieldCount = 6;
+    private ITypeTraits[] primaryTypeTraits = new ITypeTraits[primaryFieldCount];
+    private int primaryKeyFieldCount = 1;
+    private IBinaryComparatorFactory[] primaryComparatorFactories = new IBinaryComparatorFactory[primaryKeyFieldCount];
+
+    private static String primaryBtreeName = "primary" + simpleDateFormat.format(new Date());
+    private static String primaryFileName = System.getProperty("java.io.tmpdir") + sep + primaryBtreeName;
+
+    private IFileSplitProvider primaryBtreeSplitProvider = new ConstantFileSplitProvider(
+            new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(primaryFileName))) });
+
+    private RecordDescriptor primaryRecDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+            UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+            UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+            UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE });
+
+    @Before
+    public void setup() throws Exception {
+        // field, type and key declarations for primary index
+        primaryTypeTraits[0] = UTF8StringPointable.TYPE_TRAITS;
+        primaryTypeTraits[1] = UTF8StringPointable.TYPE_TRAITS;
+        primaryTypeTraits[2] = UTF8StringPointable.TYPE_TRAITS;
+        primaryTypeTraits[3] = UTF8StringPointable.TYPE_TRAITS;
+        primaryTypeTraits[4] = UTF8StringPointable.TYPE_TRAITS;
+        primaryTypeTraits[5] = UTF8StringPointable.TYPE_TRAITS;
+        primaryComparatorFactories[0] = PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY);
+
+        createPrimaryIndex();
+        loadPrimaryIndexTest();
+    }
+
+    public void createPrimaryIndex() throws Exception {
+        JobSpecification spec = new JobSpecification();
+        TreeIndexCreateOperatorDescriptor primaryCreateOp = new TreeIndexCreateOperatorDescriptor(spec, storageManager,
+                indexRegistryProvider, primaryBtreeSplitProvider, primaryTypeTraits, primaryComparatorFactories,
+                dataflowHelperFactory, NoOpOperationCallbackProvider.INSTANCE);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryCreateOp, NC1_ID);
+        spec.addRoot(primaryCreateOp);
+        runTest(spec);
+    }
+    
+    public void loadPrimaryIndexTest() throws Exception {
+        JobSpecification spec = new JobSpecification();
+
+        FileSplit[] ordersSplits = new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(
+                "data/tpch0.001/orders-part1.tbl"))) };
+        IFileSplitProvider ordersSplitProvider = new ConstantFileSplitProvider(ordersSplits);
+        RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE });
+
+        FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitProvider,
+                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'), ordersDesc);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID);
+
+        ExternalSortOperatorDescriptor sorter = new ExternalSortOperatorDescriptor(spec, 1000, new int[] { 0 },
+                new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
+                ordersDesc);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, sorter, NC1_ID);
+
+        int[] fieldPermutation = { 0, 1, 2, 4, 5, 7 };
+        TreeIndexBulkLoadOperatorDescriptor primaryBtreeBulkLoad = new TreeIndexBulkLoadOperatorDescriptor(spec,
+                storageManager, indexRegistryProvider, primaryBtreeSplitProvider, primaryTypeTraits, primaryComparatorFactories, fieldPermutation, 0.7f,
+                dataflowHelperFactory, NoOpOperationCallbackProvider.INSTANCE);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryBtreeBulkLoad, NC1_ID);
+
+        spec.connect(new OneToOneConnectorDescriptor(spec), ordScanner, 0, sorter, 0);
+
+        spec.connect(new OneToOneConnectorDescriptor(spec), sorter, 0, primaryBtreeBulkLoad, 0);
+
+        spec.addRoot(primaryBtreeBulkLoad);
+        runTest(spec);
+    }
+
+    @Test
+    public void scanPrimaryIndexTest() throws Exception {
+        JobSpecification spec = new JobSpecification();
+
+        // build dummy tuple containing nothing
+        ArrayTupleBuilder tb = new ArrayTupleBuilder(primaryKeyFieldCount * 2);
+        DataOutput dos = tb.getDataOutput();
+
+        tb.reset();
+        UTF8StringSerializerDeserializer.INSTANCE.serialize("0", dos);
+        tb.addFieldEndOffset();
+
+        ISerializerDeserializer[] keyRecDescSers = { UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE };
+        RecordDescriptor keyRecDesc = new RecordDescriptor(keyRecDescSers);
+
+        ConstantTupleSourceOperatorDescriptor keyProviderOp = new ConstantTupleSourceOperatorDescriptor(spec,
+                keyRecDesc, tb.getFieldEndOffsets(), tb.getByteArray(), tb.getSize());
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, keyProviderOp, NC1_ID);
+
+        int[] lowKeyFields = null; // - infinity
+        int[] highKeyFields = null; // + infinity
+
+        BTreeSearchOperatorDescriptor primaryBtreeSearchOp = new BTreeSearchOperatorDescriptor(spec, primaryRecDesc,
+                storageManager, indexRegistryProvider, primaryBtreeSplitProvider, primaryTypeTraits, primaryComparatorFactories, lowKeyFields,
+                highKeyFields, true, true, dataflowHelperFactory, false, NoOpOperationCallbackProvider.INSTANCE);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryBtreeSearchOp, NC1_ID);
+
+        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
+                createTempFile().getAbsolutePath()) });
+        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
+
+        spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0, primaryBtreeSearchOp, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), primaryBtreeSearchOp, 0, printer, 0);
+
+        spec.addRoot(printer);
+        runTest(spec);
+    }
+
+    @AfterClass
+    public static void cleanup() throws Exception {
+        File primary = new File(primaryFileName);
+        primary.deleteOnExit();
+    }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreePrimaryIndexSearchOperatorTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreePrimaryIndexSearchOperatorTest.java
new file mode 100644
index 0000000..82fecbe
--- /dev/null
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreePrimaryIndexSearchOperatorTest.java
@@ -0,0 +1,208 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.tests.btree;
+
+import java.io.DataOutput;
+import java.io.File;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+
+import org.junit.AfterClass;
+import org.junit.Before;
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.api.constraints.PartitionConstraintHelper;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.io.FileReference;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
+import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.parsers.IValueParserFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.parsers.UTF8StringParserFactory;
+import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.file.ConstantFileSplitProvider;
+import edu.uci.ics.hyracks.dataflow.std.file.DelimitedDataTupleParserFactory;
+import edu.uci.ics.hyracks.dataflow.std.file.FileScanOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.dataflow.std.file.PlainFileWriterOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.misc.ConstantTupleSourceOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.sort.ExternalSortOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeSearchOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndex;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexRegistryProvider;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexBulkLoadOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexCreateOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackProvider;
+import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
+import edu.uci.ics.hyracks.test.support.TestIndexRegistryProvider;
+import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
+import edu.uci.ics.hyracks.test.support.TestStorageManagerInterface;
+import edu.uci.ics.hyracks.tests.integration.AbstractIntegrationTest;
+
+public class BTreePrimaryIndexSearchOperatorTest extends AbstractIntegrationTest {
+    static {
+        TestStorageManagerComponentHolder.init(8192, 20, 20);
+    }
+
+    private IStorageManagerInterface storageManager = new TestStorageManagerInterface();
+    private IIndexRegistryProvider<IIndex> indexRegistryProvider = new TestIndexRegistryProvider();
+    private IIndexDataflowHelperFactory dataflowHelperFactory = new BTreeDataflowHelperFactory();
+
+    private final static SimpleDateFormat simpleDateFormat = new SimpleDateFormat("ddMMyy-hhmmssSS");
+    private final static String sep = System.getProperty("file.separator");
+
+    // field, type and key declarations for primary index
+    private int primaryFieldCount = 6;
+    private ITypeTraits[] primaryTypeTraits = new ITypeTraits[primaryFieldCount];
+    private int primaryKeyFieldCount = 1;
+    private IBinaryComparatorFactory[] primaryComparatorFactories = new IBinaryComparatorFactory[primaryKeyFieldCount];
+
+    private static String primaryBtreeName = "primary" + simpleDateFormat.format(new Date());
+    private static String primaryFileName = System.getProperty("java.io.tmpdir") + sep + primaryBtreeName;
+
+    private IFileSplitProvider primaryBtreeSplitProvider = new ConstantFileSplitProvider(
+            new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(primaryFileName))) });
+
+    private RecordDescriptor primaryRecDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+            UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+            UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+            UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE });
+
+    @Before
+    public void setup() throws Exception {
+        // field, type and key declarations for primary index
+        primaryTypeTraits[0] = UTF8StringPointable.TYPE_TRAITS;
+        primaryTypeTraits[1] = UTF8StringPointable.TYPE_TRAITS;
+        primaryTypeTraits[2] = UTF8StringPointable.TYPE_TRAITS;
+        primaryTypeTraits[3] = UTF8StringPointable.TYPE_TRAITS;
+        primaryTypeTraits[4] = UTF8StringPointable.TYPE_TRAITS;
+        primaryTypeTraits[5] = UTF8StringPointable.TYPE_TRAITS;
+        primaryComparatorFactories[0] = PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY);
+
+        createPrimaryIndex();
+        loadPrimaryIndexTest();
+    }
+
+    public void createPrimaryIndex() throws Exception {
+        JobSpecification spec = new JobSpecification();
+        TreeIndexCreateOperatorDescriptor primaryCreateOp = new TreeIndexCreateOperatorDescriptor(spec, storageManager,
+                indexRegistryProvider, primaryBtreeSplitProvider, primaryTypeTraits, primaryComparatorFactories,
+                dataflowHelperFactory, NoOpOperationCallbackProvider.INSTANCE);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryCreateOp, NC1_ID);
+        spec.addRoot(primaryCreateOp);
+        runTest(spec);
+    }
+    
+    public void loadPrimaryIndexTest() throws Exception {
+        JobSpecification spec = new JobSpecification();
+
+        FileSplit[] ordersSplits = new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(
+                "data/tpch0.001/orders-part1.tbl"))) };
+        IFileSplitProvider ordersSplitProvider = new ConstantFileSplitProvider(ordersSplits);
+        RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE });
+
+        FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitProvider,
+                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'), ordersDesc);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID);
+
+        ExternalSortOperatorDescriptor sorter = new ExternalSortOperatorDescriptor(spec, 1000, new int[] { 0 },
+                new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
+                ordersDesc);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, sorter, NC1_ID);
+
+        int[] fieldPermutation = { 0, 1, 2, 4, 5, 7 };
+        TreeIndexBulkLoadOperatorDescriptor primaryBtreeBulkLoad = new TreeIndexBulkLoadOperatorDescriptor(spec,
+                storageManager, indexRegistryProvider, primaryBtreeSplitProvider, primaryTypeTraits, primaryComparatorFactories, fieldPermutation, 0.7f,
+                dataflowHelperFactory, NoOpOperationCallbackProvider.INSTANCE);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryBtreeBulkLoad, NC1_ID);
+
+        spec.connect(new OneToOneConnectorDescriptor(spec), ordScanner, 0, sorter, 0);
+
+        spec.connect(new OneToOneConnectorDescriptor(spec), sorter, 0, primaryBtreeBulkLoad, 0);
+
+        spec.addRoot(primaryBtreeBulkLoad);
+        runTest(spec);
+    }
+
+    @Test
+    public void searchPrimaryIndexTest() throws Exception {
+        JobSpecification spec = new JobSpecification();
+
+        // build tuple containing low and high search key
+        // high key and low key
+        ArrayTupleBuilder tb = new ArrayTupleBuilder(primaryKeyFieldCount * 2);
+        DataOutput dos = tb.getDataOutput();
+
+        tb.reset();
+        // low key
+        UTF8StringSerializerDeserializer.INSTANCE.serialize("100", dos);
+        tb.addFieldEndOffset();
+        // high key
+        UTF8StringSerializerDeserializer.INSTANCE.serialize("200", dos);
+        tb.addFieldEndOffset();
+
+        ISerializerDeserializer[] keyRecDescSers = { UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE };
+        RecordDescriptor keyRecDesc = new RecordDescriptor(keyRecDescSers);
+
+        ConstantTupleSourceOperatorDescriptor keyProviderOp = new ConstantTupleSourceOperatorDescriptor(spec,
+                keyRecDesc, tb.getFieldEndOffsets(), tb.getByteArray(), tb.getSize());
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, keyProviderOp, NC1_ID);
+
+        int[] lowKeyFields = { 0 };
+        int[] highKeyFields = { 1 };
+
+        BTreeSearchOperatorDescriptor primaryBtreeSearchOp = new BTreeSearchOperatorDescriptor(spec, primaryRecDesc,
+                storageManager, indexRegistryProvider, primaryBtreeSplitProvider, primaryTypeTraits, primaryComparatorFactories, lowKeyFields,
+                highKeyFields, true, true, dataflowHelperFactory, false, NoOpOperationCallbackProvider.INSTANCE);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryBtreeSearchOp, NC1_ID);
+
+        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
+                createTempFile().getAbsolutePath()) });
+        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
+
+        spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0, primaryBtreeSearchOp, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), primaryBtreeSearchOp, 0, printer, 0);
+
+        spec.addRoot(printer);
+        runTest(spec);
+    }
+
+    @AfterClass
+    public static void cleanup() throws Exception {
+        File primary = new File(primaryFileName);
+        primary.deleteOnExit();
+    }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreePrimaryIndexStatsOperatorTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreePrimaryIndexStatsOperatorTest.java
new file mode 100644
index 0000000..e63ce11
--- /dev/null
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreePrimaryIndexStatsOperatorTest.java
@@ -0,0 +1,174 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.tests.btree;
+
+import java.io.File;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+
+import org.junit.AfterClass;
+import org.junit.Before;
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.api.constraints.PartitionConstraintHelper;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.io.FileReference;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
+import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.parsers.IValueParserFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.parsers.UTF8StringParserFactory;
+import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.file.ConstantFileSplitProvider;
+import edu.uci.ics.hyracks.dataflow.std.file.DelimitedDataTupleParserFactory;
+import edu.uci.ics.hyracks.dataflow.std.file.FileScanOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.dataflow.std.file.PlainFileWriterOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.sort.ExternalSortOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndex;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexRegistryProvider;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexBulkLoadOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexCreateOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexStatsOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackProvider;
+import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
+import edu.uci.ics.hyracks.test.support.TestIndexRegistryProvider;
+import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
+import edu.uci.ics.hyracks.test.support.TestStorageManagerInterface;
+import edu.uci.ics.hyracks.tests.integration.AbstractIntegrationTest;
+
+public class BTreePrimaryIndexStatsOperatorTest extends AbstractIntegrationTest {
+    static {
+        TestStorageManagerComponentHolder.init(8192, 20, 20);
+    }
+
+    private IStorageManagerInterface storageManager = new TestStorageManagerInterface();
+    private IIndexRegistryProvider<IIndex> indexRegistryProvider = new TestIndexRegistryProvider();
+    private IIndexDataflowHelperFactory dataflowHelperFactory = new BTreeDataflowHelperFactory();
+
+    private final static SimpleDateFormat simpleDateFormat = new SimpleDateFormat("ddMMyy-hhmmssSS");
+    private final static String sep = System.getProperty("file.separator");
+
+    // field, type and key declarations for primary index
+    private int primaryFieldCount = 6;
+    private ITypeTraits[] primaryTypeTraits = new ITypeTraits[primaryFieldCount];
+    private int primaryKeyFieldCount = 1;
+    private IBinaryComparatorFactory[] primaryComparatorFactories = new IBinaryComparatorFactory[primaryKeyFieldCount];
+
+    private static String primaryBtreeName = "primary" + simpleDateFormat.format(new Date());
+    private static String primaryFileName = System.getProperty("java.io.tmpdir") + sep + primaryBtreeName;
+
+    private IFileSplitProvider primaryBtreeSplitProvider = new ConstantFileSplitProvider(
+            new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(primaryFileName))) });
+
+
+    @Before
+    public void setup() throws Exception {
+        // field, type and key declarations for primary index
+        primaryTypeTraits[0] = UTF8StringPointable.TYPE_TRAITS;
+        primaryTypeTraits[1] = UTF8StringPointable.TYPE_TRAITS;
+        primaryTypeTraits[2] = UTF8StringPointable.TYPE_TRAITS;
+        primaryTypeTraits[3] = UTF8StringPointable.TYPE_TRAITS;
+        primaryTypeTraits[4] = UTF8StringPointable.TYPE_TRAITS;
+        primaryTypeTraits[5] = UTF8StringPointable.TYPE_TRAITS;
+        primaryComparatorFactories[0] = PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY);
+
+        createPrimaryIndex();
+        loadPrimaryIndexTest();
+    }
+
+    public void createPrimaryIndex() throws Exception {
+        JobSpecification spec = new JobSpecification();
+        TreeIndexCreateOperatorDescriptor primaryCreateOp = new TreeIndexCreateOperatorDescriptor(spec, storageManager,
+                indexRegistryProvider, primaryBtreeSplitProvider, primaryTypeTraits, primaryComparatorFactories,
+                dataflowHelperFactory, NoOpOperationCallbackProvider.INSTANCE);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryCreateOp, NC1_ID);
+        spec.addRoot(primaryCreateOp);
+        runTest(spec);
+    }
+    
+    public void loadPrimaryIndexTest() throws Exception {
+        JobSpecification spec = new JobSpecification();
+
+        FileSplit[] ordersSplits = new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(
+                "data/tpch0.001/orders-part1.tbl"))) };
+        IFileSplitProvider ordersSplitProvider = new ConstantFileSplitProvider(ordersSplits);
+        RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE });
+
+        FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitProvider,
+                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'), ordersDesc);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID);
+
+        ExternalSortOperatorDescriptor sorter = new ExternalSortOperatorDescriptor(spec, 1000, new int[] { 0 },
+                new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
+                ordersDesc);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, sorter, NC1_ID);
+
+        int[] fieldPermutation = { 0, 1, 2, 4, 5, 7 };
+        TreeIndexBulkLoadOperatorDescriptor primaryBtreeBulkLoad = new TreeIndexBulkLoadOperatorDescriptor(spec,
+                storageManager, indexRegistryProvider, primaryBtreeSplitProvider, primaryTypeTraits, primaryComparatorFactories, fieldPermutation, 0.7f,
+                dataflowHelperFactory, NoOpOperationCallbackProvider.INSTANCE);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryBtreeBulkLoad, NC1_ID);
+
+        spec.connect(new OneToOneConnectorDescriptor(spec), ordScanner, 0, sorter, 0);
+
+        spec.connect(new OneToOneConnectorDescriptor(spec), sorter, 0, primaryBtreeBulkLoad, 0);
+
+        spec.addRoot(primaryBtreeBulkLoad);
+        runTest(spec);
+    }
+
+    @Test
+    public void showPrimaryIndexStats() throws Exception {
+        JobSpecification spec = new JobSpecification();
+
+        TreeIndexStatsOperatorDescriptor primaryStatsOp = new TreeIndexStatsOperatorDescriptor(spec, storageManager,
+                indexRegistryProvider, primaryBtreeSplitProvider,
+                primaryTypeTraits, primaryComparatorFactories, dataflowHelperFactory, NoOpOperationCallbackProvider.INSTANCE);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryStatsOp, NC1_ID);
+        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
+                createTempFile().getAbsolutePath()) });
+        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
+
+        spec.connect(new OneToOneConnectorDescriptor(spec), primaryStatsOp, 0, printer, 0);
+        spec.addRoot(printer);
+        runTest(spec);
+    }
+
+    @AfterClass
+    public static void cleanup() throws Exception {
+        File primary = new File(primaryFileName);
+        primary.deleteOnExit();
+    }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreeSecondaryIndexInsertOperatorTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreeSecondaryIndexInsertOperatorTest.java
new file mode 100644
index 0000000..3c87ae3
--- /dev/null
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreeSecondaryIndexInsertOperatorTest.java
@@ -0,0 +1,363 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.tests.btree;
+
+import java.io.DataOutput;
+import java.io.File;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+
+import org.junit.AfterClass;
+import org.junit.Before;
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.api.constraints.PartitionConstraintHelper;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.io.FileReference;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
+import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.parsers.IValueParserFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.parsers.UTF8StringParserFactory;
+import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.file.ConstantFileSplitProvider;
+import edu.uci.ics.hyracks.dataflow.std.file.DelimitedDataTupleParserFactory;
+import edu.uci.ics.hyracks.dataflow.std.file.FileScanOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.dataflow.std.file.PlainFileWriterOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.misc.ConstantTupleSourceOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.misc.NullSinkOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.sort.ExternalSortOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeSearchOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndex;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexRegistryProvider;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexBulkLoadOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexCreateOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexInsertUpdateDeleteOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackProvider;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOp;
+import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
+import edu.uci.ics.hyracks.test.support.TestIndexRegistryProvider;
+import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
+import edu.uci.ics.hyracks.test.support.TestStorageManagerInterface;
+import edu.uci.ics.hyracks.tests.integration.AbstractIntegrationTest;
+
+public class BTreeSecondaryIndexInsertOperatorTest extends AbstractIntegrationTest {
+    static {
+        TestStorageManagerComponentHolder.init(8192, 20, 20);
+    }
+
+    private IStorageManagerInterface storageManager = new TestStorageManagerInterface();
+    private IIndexRegistryProvider<IIndex> indexRegistryProvider = new TestIndexRegistryProvider();
+    private IIndexDataflowHelperFactory dataflowHelperFactory = new BTreeDataflowHelperFactory();
+
+    private final static SimpleDateFormat simpleDateFormat = new SimpleDateFormat("ddMMyy-hhmmssSS");
+    private final static String sep = System.getProperty("file.separator");
+
+    // field, type and key declarations for primary index
+    private int primaryFieldCount = 6;
+    private ITypeTraits[] primaryTypeTraits = new ITypeTraits[primaryFieldCount];
+    private int primaryKeyFieldCount = 1;
+    private IBinaryComparatorFactory[] primaryComparatorFactories = new IBinaryComparatorFactory[primaryKeyFieldCount];
+
+    private static String primaryBtreeName = "primary" + simpleDateFormat.format(new Date());
+    private static String primaryFileName = System.getProperty("java.io.tmpdir") + sep + primaryBtreeName;
+
+    private IFileSplitProvider primaryBtreeSplitProvider = new ConstantFileSplitProvider(
+            new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(primaryFileName))) });
+
+    private RecordDescriptor primaryRecDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+            UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+            UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+            UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE });
+
+    // field, type and key declarations for secondary indexes
+    private int secondaryFieldCount = 2;
+    private ITypeTraits[] secondaryTypeTraits = new ITypeTraits[secondaryFieldCount];
+    private int secondaryKeyFieldCount = 2;
+    private IBinaryComparatorFactory[] secondaryComparatorFactories = new IBinaryComparatorFactory[secondaryKeyFieldCount];
+
+    private static String secondaryBtreeName = "secondary" + simpleDateFormat.format(new Date());
+    private static String secondaryFileName = System.getProperty("java.io.tmpdir") + sep + secondaryBtreeName;
+
+    private IFileSplitProvider secondaryBtreeSplitProvider = new ConstantFileSplitProvider(
+            new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(secondaryFileName))) });
+
+    private RecordDescriptor secondaryRecDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+            UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE });
+
+    @Before
+    public void setup() throws Exception {
+        // field, type and key declarations for primary index
+        primaryTypeTraits[0] = UTF8StringPointable.TYPE_TRAITS;
+        primaryTypeTraits[1] = UTF8StringPointable.TYPE_TRAITS;
+        primaryTypeTraits[2] = UTF8StringPointable.TYPE_TRAITS;
+        primaryTypeTraits[3] = UTF8StringPointable.TYPE_TRAITS;
+        primaryTypeTraits[4] = UTF8StringPointable.TYPE_TRAITS;
+        primaryTypeTraits[5] = UTF8StringPointable.TYPE_TRAITS;
+        primaryComparatorFactories[0] = PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY);
+
+        // field, type and key declarations for secondary indexes
+        secondaryTypeTraits[0] = UTF8StringPointable.TYPE_TRAITS;
+        secondaryTypeTraits[1] = UTF8StringPointable.TYPE_TRAITS;
+        secondaryComparatorFactories[0] = PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY);
+        secondaryComparatorFactories[1] = PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY);
+
+        createPrimaryIndex();
+        loadPrimaryIndexTest();
+        createSecondaryIndex();
+        loadSecondaryIndexTest();
+        insertPipelineTest();
+    }
+
+    public void createPrimaryIndex() throws Exception {
+        JobSpecification spec = new JobSpecification();
+        TreeIndexCreateOperatorDescriptor primaryCreateOp = new TreeIndexCreateOperatorDescriptor(spec, storageManager,
+                indexRegistryProvider, primaryBtreeSplitProvider, primaryTypeTraits, primaryComparatorFactories,
+                dataflowHelperFactory, NoOpOperationCallbackProvider.INSTANCE);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryCreateOp, NC1_ID);
+        spec.addRoot(primaryCreateOp);
+        runTest(spec);
+    }
+    
+    public void loadPrimaryIndexTest() throws Exception {
+        JobSpecification spec = new JobSpecification();
+
+        FileSplit[] ordersSplits = new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(
+                "data/tpch0.001/orders-part1.tbl"))) };
+        IFileSplitProvider ordersSplitProvider = new ConstantFileSplitProvider(ordersSplits);
+        RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE });
+
+        FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitProvider,
+                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'), ordersDesc);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID);
+
+        ExternalSortOperatorDescriptor sorter = new ExternalSortOperatorDescriptor(spec, 1000, new int[] { 0 },
+                new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
+                ordersDesc);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, sorter, NC1_ID);
+
+        int[] fieldPermutation = { 0, 1, 2, 4, 5, 7 };
+        TreeIndexBulkLoadOperatorDescriptor primaryBtreeBulkLoad = new TreeIndexBulkLoadOperatorDescriptor(spec,
+                storageManager, indexRegistryProvider, primaryBtreeSplitProvider, primaryTypeTraits, primaryComparatorFactories, fieldPermutation, 0.7f,
+                dataflowHelperFactory, NoOpOperationCallbackProvider.INSTANCE);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryBtreeBulkLoad, NC1_ID);
+
+        spec.connect(new OneToOneConnectorDescriptor(spec), ordScanner, 0, sorter, 0);
+
+        spec.connect(new OneToOneConnectorDescriptor(spec), sorter, 0, primaryBtreeBulkLoad, 0);
+
+        spec.addRoot(primaryBtreeBulkLoad);
+        runTest(spec);
+    }
+
+    public void createSecondaryIndex() throws Exception {
+        JobSpecification spec = new JobSpecification();
+        TreeIndexCreateOperatorDescriptor secondaryCreateOp = new TreeIndexCreateOperatorDescriptor(spec, storageManager,
+                indexRegistryProvider, secondaryBtreeSplitProvider, secondaryTypeTraits, secondaryComparatorFactories,
+                dataflowHelperFactory, NoOpOperationCallbackProvider.INSTANCE);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, secondaryCreateOp, NC1_ID);
+        spec.addRoot(secondaryCreateOp);
+        runTest(spec);
+    }
+    
+    public void loadSecondaryIndexTest() throws Exception {
+        JobSpecification spec = new JobSpecification();
+
+        // build dummy tuple containing nothing
+        ArrayTupleBuilder tb = new ArrayTupleBuilder(primaryKeyFieldCount * 2);
+        DataOutput dos = tb.getDataOutput();
+
+        tb.reset();
+        UTF8StringSerializerDeserializer.INSTANCE.serialize("0", dos);
+        tb.addFieldEndOffset();
+
+        ISerializerDeserializer[] keyRecDescSers = { UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE };
+        RecordDescriptor keyRecDesc = new RecordDescriptor(keyRecDescSers);
+
+        ConstantTupleSourceOperatorDescriptor keyProviderOp = new ConstantTupleSourceOperatorDescriptor(spec,
+                keyRecDesc, tb.getFieldEndOffsets(), tb.getByteArray(), tb.getSize());
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, keyProviderOp, NC1_ID);
+
+        int[] lowKeyFields = null; // - infinity
+        int[] highKeyFields = null; // + infinity
+
+        // scan primary index
+        BTreeSearchOperatorDescriptor primaryBtreeSearchOp = new BTreeSearchOperatorDescriptor(spec, primaryRecDesc,
+                storageManager, indexRegistryProvider, primaryBtreeSplitProvider, primaryTypeTraits, primaryComparatorFactories, lowKeyFields,
+                highKeyFields, true, true, dataflowHelperFactory, false, NoOpOperationCallbackProvider.INSTANCE);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryBtreeSearchOp, NC1_ID);
+
+        // sort based on secondary keys
+        ExternalSortOperatorDescriptor sorter = new ExternalSortOperatorDescriptor(spec, 1000, new int[] { 3, 0 },
+                new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
+                primaryRecDesc);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, sorter, NC1_ID);
+
+        // load secondary index
+        int[] fieldPermutation = { 3, 0 };
+        TreeIndexBulkLoadOperatorDescriptor secondaryBtreeBulkLoad = new TreeIndexBulkLoadOperatorDescriptor(spec,
+                storageManager, indexRegistryProvider, secondaryBtreeSplitProvider, secondaryTypeTraits, secondaryComparatorFactories, fieldPermutation, 0.7f,
+                dataflowHelperFactory, NoOpOperationCallbackProvider.INSTANCE);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, secondaryBtreeBulkLoad, NC1_ID);
+
+        spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0, primaryBtreeSearchOp, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), primaryBtreeSearchOp, 0, sorter, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), sorter, 0, secondaryBtreeBulkLoad, 0);
+
+        spec.addRoot(secondaryBtreeBulkLoad);
+        runTest(spec);
+    }
+
+    public void insertPipelineTest() throws Exception {
+
+        JobSpecification spec = new JobSpecification();
+
+        FileSplit[] ordersSplits = new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(
+                "data/tpch0.001/orders-part2.tbl"))) };
+        IFileSplitProvider ordersSplitProvider = new ConstantFileSplitProvider(ordersSplits);
+        RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE });
+
+        FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitProvider,
+                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'), ordersDesc);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID);
+
+        // insert into primary index
+        int[] primaryFieldPermutation = { 0, 1, 2, 4, 5, 7 };
+        TreeIndexInsertUpdateDeleteOperatorDescriptor primaryBtreeInsertOp = new TreeIndexInsertUpdateDeleteOperatorDescriptor(
+                spec, ordersDesc, storageManager, indexRegistryProvider, primaryBtreeSplitProvider,
+                primaryTypeTraits, primaryComparatorFactories,
+                primaryFieldPermutation, IndexOp.INSERT, dataflowHelperFactory, null, NoOpOperationCallbackProvider.INSTANCE);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryBtreeInsertOp, NC1_ID);
+
+        // first secondary index
+        int[] fieldPermutationB = { 4, 0 };
+        TreeIndexInsertUpdateDeleteOperatorDescriptor secondaryInsertOp = new TreeIndexInsertUpdateDeleteOperatorDescriptor(
+                spec, ordersDesc, storageManager, indexRegistryProvider, secondaryBtreeSplitProvider,
+                secondaryTypeTraits,
+                secondaryComparatorFactories, fieldPermutationB, IndexOp.INSERT, dataflowHelperFactory, null, NoOpOperationCallbackProvider.INSTANCE);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, secondaryInsertOp, NC1_ID);
+
+        NullSinkOperatorDescriptor nullSink = new NullSinkOperatorDescriptor(spec);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, nullSink, NC1_ID);
+
+        spec.connect(new OneToOneConnectorDescriptor(spec), ordScanner, 0, primaryBtreeInsertOp, 0);
+
+        spec.connect(new OneToOneConnectorDescriptor(spec), primaryBtreeInsertOp, 0, secondaryInsertOp, 0);
+
+        spec.connect(new OneToOneConnectorDescriptor(spec), secondaryInsertOp, 0, nullSink, 0);
+
+        spec.addRoot(nullSink);
+        runTest(spec);
+    }
+
+    @Test
+    public void searchUpdatedSecondaryIndexTest() throws Exception {
+        JobSpecification spec = new JobSpecification();
+
+        // build tuple containing search keys (only use the first key as search
+        // key)
+        ArrayTupleBuilder tb = new ArrayTupleBuilder(secondaryKeyFieldCount);
+        DataOutput dos = tb.getDataOutput();
+
+        tb.reset();
+        // low key
+        UTF8StringSerializerDeserializer.INSTANCE.serialize("1998-07-21", dos);
+        tb.addFieldEndOffset();
+        // high key
+        UTF8StringSerializerDeserializer.INSTANCE.serialize("2000-10-18", dos);
+        tb.addFieldEndOffset();
+
+        ISerializerDeserializer[] keyRecDescSers = { UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE };
+        RecordDescriptor keyRecDesc = new RecordDescriptor(keyRecDescSers);
+
+        ConstantTupleSourceOperatorDescriptor keyProviderOp = new ConstantTupleSourceOperatorDescriptor(spec,
+                keyRecDesc, tb.getFieldEndOffsets(), tb.getByteArray(), tb.getSize());
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, keyProviderOp, NC1_ID);
+
+        int[] secondaryLowKeyFields = { 0 };
+        int[] secondaryHighKeyFields = { 1 };
+
+        // search secondary index
+        BTreeSearchOperatorDescriptor secondaryBtreeSearchOp = new BTreeSearchOperatorDescriptor(spec,
+                secondaryRecDesc, storageManager, indexRegistryProvider, secondaryBtreeSplitProvider,
+                secondaryTypeTraits,
+                secondaryComparatorFactories, secondaryLowKeyFields, secondaryHighKeyFields, true, true,
+                dataflowHelperFactory, false, NoOpOperationCallbackProvider.INSTANCE);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, secondaryBtreeSearchOp, NC1_ID);
+
+        // second field from the tuples coming from secondary index
+        int[] primaryLowKeyFields = { 1 };
+        // second field from the tuples coming from secondary index
+        int[] primaryHighKeyFields = { 1 };
+
+        // search primary index
+        BTreeSearchOperatorDescriptor primaryBtreeSearchOp = new BTreeSearchOperatorDescriptor(spec, primaryRecDesc,
+                storageManager, indexRegistryProvider, primaryBtreeSplitProvider, primaryTypeTraits, primaryComparatorFactories, primaryLowKeyFields,
+                primaryHighKeyFields, true, true, dataflowHelperFactory, false, NoOpOperationCallbackProvider.INSTANCE);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryBtreeSearchOp, NC1_ID);
+
+        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
+                createTempFile().getAbsolutePath()) });
+        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
+
+        spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0, secondaryBtreeSearchOp, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), secondaryBtreeSearchOp, 0, primaryBtreeSearchOp, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), primaryBtreeSearchOp, 0, printer, 0);
+
+        spec.addRoot(printer);
+        runTest(spec);
+    }
+
+    @AfterClass
+    public static void cleanup() throws Exception {
+        File primary = new File(primaryFileName);
+        primary.deleteOnExit();
+
+        File secondary = new File(secondaryFileName);
+        secondary.deleteOnExit();
+    }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreeSecondaryIndexSearchOperatorTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreeSecondaryIndexSearchOperatorTest.java
new file mode 100644
index 0000000..1304f12
--- /dev/null
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreeSecondaryIndexSearchOperatorTest.java
@@ -0,0 +1,306 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.tests.btree;
+
+import java.io.DataOutput;
+import java.io.File;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+
+import org.junit.AfterClass;
+import org.junit.Before;
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.api.constraints.PartitionConstraintHelper;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.io.FileReference;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
+import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.parsers.IValueParserFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.parsers.UTF8StringParserFactory;
+import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.file.ConstantFileSplitProvider;
+import edu.uci.ics.hyracks.dataflow.std.file.DelimitedDataTupleParserFactory;
+import edu.uci.ics.hyracks.dataflow.std.file.FileScanOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.dataflow.std.file.PlainFileWriterOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.misc.ConstantTupleSourceOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.sort.ExternalSortOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeSearchOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndex;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexRegistryProvider;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexBulkLoadOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexCreateOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackProvider;
+import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
+import edu.uci.ics.hyracks.test.support.TestIndexRegistryProvider;
+import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
+import edu.uci.ics.hyracks.test.support.TestStorageManagerInterface;
+import edu.uci.ics.hyracks.tests.integration.AbstractIntegrationTest;
+
+public class BTreeSecondaryIndexSearchOperatorTest extends AbstractIntegrationTest {
+    static {
+        TestStorageManagerComponentHolder.init(8192, 20, 20);
+    }
+
+    private IStorageManagerInterface storageManager = new TestStorageManagerInterface();
+    private IIndexRegistryProvider<IIndex> indexRegistryProvider = new TestIndexRegistryProvider();
+    private IIndexDataflowHelperFactory dataflowHelperFactory = new BTreeDataflowHelperFactory();
+
+    private final static SimpleDateFormat simpleDateFormat = new SimpleDateFormat("ddMMyy-hhmmssSS");
+    private final static String sep = System.getProperty("file.separator");
+
+    // field, type and key declarations for primary index
+    private int primaryFieldCount = 6;
+    private ITypeTraits[] primaryTypeTraits = new ITypeTraits[primaryFieldCount];
+    private int primaryKeyFieldCount = 1;
+    private IBinaryComparatorFactory[] primaryComparatorFactories = new IBinaryComparatorFactory[primaryKeyFieldCount];
+
+    private static String primaryBtreeName = "primary" + simpleDateFormat.format(new Date());
+    private static String primaryFileName = System.getProperty("java.io.tmpdir") + sep + primaryBtreeName;
+
+    private IFileSplitProvider primaryBtreeSplitProvider = new ConstantFileSplitProvider(
+            new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(primaryFileName))) });
+
+    private RecordDescriptor primaryRecDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+            UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+            UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+            UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE });
+
+    // field, type and key declarations for secondary indexes
+    private int secondaryFieldCount = 2;
+    private ITypeTraits[] secondaryTypeTraits = new ITypeTraits[secondaryFieldCount];
+    private int secondaryKeyFieldCount = 2;
+    private IBinaryComparatorFactory[] secondaryComparatorFactories = new IBinaryComparatorFactory[secondaryKeyFieldCount];
+
+    private static String secondaryBtreeName = "secondary" + simpleDateFormat.format(new Date());
+    private static String secondaryFileName = System.getProperty("java.io.tmpdir") + sep + secondaryBtreeName;
+
+    private IFileSplitProvider secondaryBtreeSplitProvider = new ConstantFileSplitProvider(
+            new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(secondaryFileName))) });
+
+    private RecordDescriptor secondaryRecDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+            UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE });
+
+    @Before
+    public void setup() throws Exception {
+        // field, type and key declarations for primary index
+        primaryTypeTraits[0] = UTF8StringPointable.TYPE_TRAITS;
+        primaryTypeTraits[1] = UTF8StringPointable.TYPE_TRAITS;
+        primaryTypeTraits[2] = UTF8StringPointable.TYPE_TRAITS;
+        primaryTypeTraits[3] = UTF8StringPointable.TYPE_TRAITS;
+        primaryTypeTraits[4] = UTF8StringPointable.TYPE_TRAITS;
+        primaryTypeTraits[5] = UTF8StringPointable.TYPE_TRAITS;
+        primaryComparatorFactories[0] = PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY);
+
+        // field, type and key declarations for secondary indexes
+        secondaryTypeTraits[0] = UTF8StringPointable.TYPE_TRAITS;
+        secondaryTypeTraits[1] = UTF8StringPointable.TYPE_TRAITS;
+        secondaryComparatorFactories[0] = PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY);
+        secondaryComparatorFactories[1] = PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY);
+
+        createPrimaryIndex();
+        loadPrimaryIndexTest();
+        createSecondaryIndex();
+        loadSecondaryIndexTest();
+    }
+
+    public void createPrimaryIndex() throws Exception {
+        JobSpecification spec = new JobSpecification();
+        TreeIndexCreateOperatorDescriptor primaryCreateOp = new TreeIndexCreateOperatorDescriptor(spec, storageManager,
+                indexRegistryProvider, primaryBtreeSplitProvider, primaryTypeTraits, primaryComparatorFactories,
+                dataflowHelperFactory, NoOpOperationCallbackProvider.INSTANCE);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryCreateOp, NC1_ID);
+        spec.addRoot(primaryCreateOp);
+        runTest(spec);
+    }
+    
+    public void loadPrimaryIndexTest() throws Exception {
+        JobSpecification spec = new JobSpecification();
+
+        FileSplit[] ordersSplits = new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(
+                "data/tpch0.001/orders-part1.tbl"))) };
+        IFileSplitProvider ordersSplitProvider = new ConstantFileSplitProvider(ordersSplits);
+        RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE });
+
+        FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitProvider,
+                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'), ordersDesc);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID);
+
+        ExternalSortOperatorDescriptor sorter = new ExternalSortOperatorDescriptor(spec, 1000, new int[] { 0 },
+                new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
+                ordersDesc);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, sorter, NC1_ID);
+
+        int[] fieldPermutation = { 0, 1, 2, 4, 5, 7 };
+        TreeIndexBulkLoadOperatorDescriptor primaryBtreeBulkLoad = new TreeIndexBulkLoadOperatorDescriptor(spec,
+                storageManager, indexRegistryProvider, primaryBtreeSplitProvider, primaryTypeTraits, primaryComparatorFactories, fieldPermutation, 0.7f,
+                dataflowHelperFactory, NoOpOperationCallbackProvider.INSTANCE);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryBtreeBulkLoad, NC1_ID);
+
+        spec.connect(new OneToOneConnectorDescriptor(spec), ordScanner, 0, sorter, 0);
+
+        spec.connect(new OneToOneConnectorDescriptor(spec), sorter, 0, primaryBtreeBulkLoad, 0);
+
+        spec.addRoot(primaryBtreeBulkLoad);
+        runTest(spec);
+    }
+
+    public void createSecondaryIndex() throws Exception {
+        JobSpecification spec = new JobSpecification();
+        TreeIndexCreateOperatorDescriptor secondaryCreateOp = new TreeIndexCreateOperatorDescriptor(spec, storageManager,
+                indexRegistryProvider, secondaryBtreeSplitProvider, secondaryTypeTraits, secondaryComparatorFactories,
+                dataflowHelperFactory, NoOpOperationCallbackProvider.INSTANCE);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, secondaryCreateOp, NC1_ID);
+        spec.addRoot(secondaryCreateOp);
+        runTest(spec);
+    }
+    
+    public void loadSecondaryIndexTest() throws Exception {
+        JobSpecification spec = new JobSpecification();
+
+        // build dummy tuple containing nothing
+        ArrayTupleBuilder tb = new ArrayTupleBuilder(primaryKeyFieldCount * 2);
+        DataOutput dos = tb.getDataOutput();
+
+        tb.reset();
+        UTF8StringSerializerDeserializer.INSTANCE.serialize("0", dos);
+        tb.addFieldEndOffset();
+
+        ISerializerDeserializer[] keyRecDescSers = { UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE };
+        RecordDescriptor keyRecDesc = new RecordDescriptor(keyRecDescSers);
+
+        ConstantTupleSourceOperatorDescriptor keyProviderOp = new ConstantTupleSourceOperatorDescriptor(spec,
+                keyRecDesc, tb.getFieldEndOffsets(), tb.getByteArray(), tb.getSize());
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, keyProviderOp, NC1_ID);
+
+        int[] lowKeyFields = null; // - infinity
+        int[] highKeyFields = null; // + infinity
+
+        // scan primary index
+        BTreeSearchOperatorDescriptor primaryBtreeSearchOp = new BTreeSearchOperatorDescriptor(spec, primaryRecDesc,
+                storageManager, indexRegistryProvider, primaryBtreeSplitProvider, primaryTypeTraits, primaryComparatorFactories, lowKeyFields,
+                highKeyFields, true, true, dataflowHelperFactory, false, NoOpOperationCallbackProvider.INSTANCE);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryBtreeSearchOp, NC1_ID);
+
+        // sort based on secondary keys
+        ExternalSortOperatorDescriptor sorter = new ExternalSortOperatorDescriptor(spec, 1000, new int[] { 3, 0 },
+                new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
+                primaryRecDesc);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, sorter, NC1_ID);
+
+        // load secondary index
+        int[] fieldPermutation = { 3, 0 };
+        TreeIndexBulkLoadOperatorDescriptor secondaryBtreeBulkLoad = new TreeIndexBulkLoadOperatorDescriptor(spec,
+                storageManager, indexRegistryProvider, secondaryBtreeSplitProvider, secondaryTypeTraits, secondaryComparatorFactories, fieldPermutation, 0.7f,
+                dataflowHelperFactory, NoOpOperationCallbackProvider.INSTANCE);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, secondaryBtreeBulkLoad, NC1_ID);
+
+        spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0, primaryBtreeSearchOp, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), primaryBtreeSearchOp, 0, sorter, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), sorter, 0, secondaryBtreeBulkLoad, 0);
+
+        spec.addRoot(secondaryBtreeBulkLoad);
+        runTest(spec);
+    }
+
+    @Test
+    public void searchSecondaryIndexTest() throws Exception {
+        JobSpecification spec = new JobSpecification();
+
+        // build tuple containing search keys (only use the first key as search
+        // key)
+        ArrayTupleBuilder tb = new ArrayTupleBuilder(secondaryKeyFieldCount);
+        DataOutput dos = tb.getDataOutput();
+
+        tb.reset();
+        // low key
+        UTF8StringSerializerDeserializer.INSTANCE.serialize("1998-07-21", dos);
+        tb.addFieldEndOffset();
+        // high key
+        UTF8StringSerializerDeserializer.INSTANCE.serialize("2000-10-18", dos);
+        tb.addFieldEndOffset();
+
+        ISerializerDeserializer[] keyRecDescSers = { UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE };
+        RecordDescriptor keyRecDesc = new RecordDescriptor(keyRecDescSers);
+
+        ConstantTupleSourceOperatorDescriptor keyProviderOp = new ConstantTupleSourceOperatorDescriptor(spec,
+                keyRecDesc, tb.getFieldEndOffsets(), tb.getByteArray(), tb.getSize());
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, keyProviderOp, NC1_ID);
+
+        int[] secondaryLowKeyFields = { 0 };
+        int[] secondaryHighKeyFields = { 1 };
+
+        // search secondary index
+        BTreeSearchOperatorDescriptor secondaryBtreeSearchOp = new BTreeSearchOperatorDescriptor(spec,
+                secondaryRecDesc, storageManager, indexRegistryProvider, secondaryBtreeSplitProvider,
+                secondaryTypeTraits, secondaryComparatorFactories, secondaryLowKeyFields, secondaryHighKeyFields, true, true,
+                dataflowHelperFactory, false, NoOpOperationCallbackProvider.INSTANCE);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, secondaryBtreeSearchOp, NC1_ID);
+
+        int[] primaryLowKeyFields = { 1 }; // second field from the tuples
+        // coming from secondary index
+        int[] primaryHighKeyFields = { 1 }; // second field from the tuples
+        // coming from secondary index
+
+        // search primary index
+        BTreeSearchOperatorDescriptor primaryBtreeSearchOp = new BTreeSearchOperatorDescriptor(spec, primaryRecDesc,
+                storageManager, indexRegistryProvider, primaryBtreeSplitProvider, primaryTypeTraits, primaryComparatorFactories, primaryLowKeyFields,
+                primaryHighKeyFields, true, true, dataflowHelperFactory, false, NoOpOperationCallbackProvider.INSTANCE);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryBtreeSearchOp, NC1_ID);
+
+        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
+                createTempFile().getAbsolutePath()) });
+        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
+
+        spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0, secondaryBtreeSearchOp, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), secondaryBtreeSearchOp, 0, primaryBtreeSearchOp, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), primaryBtreeSearchOp, 0, printer, 0);
+
+        spec.addRoot(printer);
+        runTest(spec);
+    }
+
+    @AfterClass
+    public static void cleanup() throws Exception {
+        File primary = new File(primaryFileName);
+        primary.deleteOnExit();
+        File secondary = new File(secondaryFileName);
+        secondary.deleteOnExit();
+    }
+}
\ No newline at end of file
diff --git a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/comm/SerializationDeserializationTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/comm/SerializationDeserializationTest.java
similarity index 100%
rename from hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/comm/SerializationDeserializationTest.java
rename to hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/comm/SerializationDeserializationTest.java
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/AbstractIntegrationTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/AbstractIntegrationTest.java
new file mode 100644
index 0000000..023bdd9
--- /dev/null
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/AbstractIntegrationTest.java
@@ -0,0 +1,147 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.tests.integration;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.EnumSet;
+import java.util.List;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.apache.commons.io.FileUtils;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Rule;
+import org.junit.rules.TemporaryFolder;
+
+import edu.uci.ics.hyracks.api.client.HyracksConnection;
+import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
+import edu.uci.ics.hyracks.api.job.JobFlag;
+import edu.uci.ics.hyracks.api.job.JobId;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.control.cc.ClusterControllerService;
+import edu.uci.ics.hyracks.control.common.controllers.CCConfig;
+import edu.uci.ics.hyracks.control.common.controllers.NCConfig;
+import edu.uci.ics.hyracks.control.nc.NodeControllerService;
+
+public abstract class AbstractIntegrationTest {
+    private static final Logger LOGGER = Logger.getLogger(AbstractIntegrationTest.class.getName());
+
+    public static final String NC1_ID = "nc1";
+    public static final String NC2_ID = "nc2";
+
+    private static ClusterControllerService cc;
+    private static NodeControllerService nc1;
+    private static NodeControllerService nc2;
+    private static IHyracksClientConnection hcc;
+
+    private final List<File> outputFiles;
+
+    @Rule
+    public TemporaryFolder outputFolder = new TemporaryFolder();
+
+    public AbstractIntegrationTest() {
+        outputFiles = new ArrayList<File>();
+    }
+
+    @BeforeClass
+    public static void init() throws Exception {
+        CCConfig ccConfig = new CCConfig();
+        ccConfig.clientNetIpAddress = "127.0.0.1";
+        ccConfig.clientNetPort = 39000;
+        ccConfig.clusterNetIpAddress = "127.0.0.1";
+        ccConfig.clusterNetPort = 39001;
+        ccConfig.profileDumpPeriod = 10000;
+        File outDir = new File("target/ClusterController");
+        outDir.mkdirs();
+        File ccRoot = File.createTempFile(AbstractIntegrationTest.class.getName(), ".data", outDir);
+        ccRoot.delete();
+        ccRoot.mkdir();
+        ccConfig.ccRoot = ccRoot.getAbsolutePath();
+        cc = new ClusterControllerService(ccConfig);
+        cc.start();
+
+        NCConfig ncConfig1 = new NCConfig();
+        ncConfig1.ccHost = "localhost";
+        ncConfig1.ccPort = 39001;
+        ncConfig1.clusterNetIPAddress = "127.0.0.1";
+        ncConfig1.dataIPAddress = "127.0.0.1";
+        ncConfig1.nodeId = NC1_ID;
+        nc1 = new NodeControllerService(ncConfig1);
+        nc1.start();
+
+        NCConfig ncConfig2 = new NCConfig();
+        ncConfig2.ccHost = "localhost";
+        ncConfig2.ccPort = 39001;
+        ncConfig2.clusterNetIPAddress = "127.0.0.1";
+        ncConfig2.dataIPAddress = "127.0.0.1";
+        ncConfig2.nodeId = NC2_ID;
+        nc2 = new NodeControllerService(ncConfig2);
+        nc2.start();
+
+        hcc = new HyracksConnection(ccConfig.clientNetIpAddress, ccConfig.clientNetPort);
+        hcc.createApplication("test", null);
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Starting CC in " + ccRoot.getAbsolutePath());
+        }
+    }
+
+    @AfterClass
+    public static void deinit() throws Exception {
+        nc2.stop();
+        nc1.stop();
+        cc.stop();
+    }
+
+    protected void runTest(JobSpecification spec) throws Exception {
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info(spec.toJSON().toString(2));
+        }
+        JobId jobId = hcc.startJob("test", spec, EnumSet.of(JobFlag.PROFILE_RUNTIME));
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info(jobId.toString());
+        }
+        hcc.waitForCompletion(jobId);
+        dumpOutputFiles();
+    }
+
+    private void dumpOutputFiles() {
+        if (LOGGER.isLoggable(Level.INFO)) {
+            for (File f : outputFiles) {
+                if (f.exists() && f.isFile()) {
+                    try {
+                        LOGGER.info("Reading file: " + f.getAbsolutePath() + " in test: " + getClass().getName());
+                        String data = FileUtils.readFileToString(f);
+                        LOGGER.info(data);
+                    } catch (IOException e) {
+                        LOGGER.info("Error reading file: " + f.getAbsolutePath());
+                        LOGGER.info(e.getMessage());
+                    }
+                }
+            }
+        }
+    }
+
+    protected File createTempFile() throws IOException {
+        File tempFile = File.createTempFile(getClass().getName(), ".tmp", outputFolder.getRoot());
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Output file: " + tempFile.getAbsolutePath());
+        }
+        outputFiles.add(tempFile);
+        return tempFile;
+    }
+}
\ No newline at end of file
diff --git a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/AbstractMultiNCIntegrationTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/AbstractMultiNCIntegrationTest.java
similarity index 100%
rename from hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/AbstractMultiNCIntegrationTest.java
rename to hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/AbstractMultiNCIntegrationTest.java
diff --git a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/AggregationTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/AggregationTest.java
similarity index 100%
rename from hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/AggregationTest.java
rename to hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/AggregationTest.java
diff --git a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/CountOfCountsTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/CountOfCountsTest.java
similarity index 100%
rename from hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/CountOfCountsTest.java
rename to hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/CountOfCountsTest.java
diff --git a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/LocalityAwareConnectorTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/LocalityAwareConnectorTest.java
similarity index 100%
rename from hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/LocalityAwareConnectorTest.java
rename to hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/LocalityAwareConnectorTest.java
diff --git a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/OptimizedSortMergeTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/OptimizedSortMergeTest.java
similarity index 100%
rename from hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/OptimizedSortMergeTest.java
rename to hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/OptimizedSortMergeTest.java
diff --git a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/ScanPrintTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/ScanPrintTest.java
similarity index 100%
rename from hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/ScanPrintTest.java
rename to hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/ScanPrintTest.java
diff --git a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/SortMergeTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/SortMergeTest.java
similarity index 100%
rename from hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/SortMergeTest.java
rename to hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/SortMergeTest.java
diff --git a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/SplitOperatorTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/SplitOperatorTest.java
similarity index 100%
rename from hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/SplitOperatorTest.java
rename to hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/SplitOperatorTest.java
diff --git a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/TPCHCustomerOptimizedHybridHashJoinTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/TPCHCustomerOptimizedHybridHashJoinTest.java
similarity index 100%
rename from hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/TPCHCustomerOptimizedHybridHashJoinTest.java
rename to hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/TPCHCustomerOptimizedHybridHashJoinTest.java
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/TPCHCustomerOrderHashJoinTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/TPCHCustomerOrderHashJoinTest.java
new file mode 100644
index 0000000..622942b
--- /dev/null
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/TPCHCustomerOrderHashJoinTest.java
@@ -0,0 +1,1008 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.tests.integration;
+
+import java.io.DataOutput;
+import java.io.File;
+import java.io.IOException;
+
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.api.constraints.PartitionConstraintHelper;
+import edu.uci.ics.hyracks.api.dataflow.IConnectorDescriptor;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.INullWriter;
+import edu.uci.ics.hyracks.api.dataflow.value.INullWriterFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.io.FileReference;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
+import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryHashFunctionFactory;
+import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.parsers.IValueParserFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.parsers.UTF8StringParserFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.partition.FieldHashPartitionComputerFactory;
+import edu.uci.ics.hyracks.dataflow.std.connectors.MToNPartitioningConnectorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.connectors.MToNReplicatingConnectorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.file.ConstantFileSplitProvider;
+import edu.uci.ics.hyracks.dataflow.std.file.DelimitedDataTupleParserFactory;
+import edu.uci.ics.hyracks.dataflow.std.file.FileScanOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.dataflow.std.file.PlainFileWriterOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.join.GraceHashJoinOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.join.HybridHashJoinOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.join.InMemoryHashJoinOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.misc.MaterializingOperatorDescriptor;
+import edu.uci.ics.hyracks.tests.util.NoopNullWriterFactory;
+
+public class TPCHCustomerOrderHashJoinTest extends AbstractIntegrationTest {
+
+    /*
+     * TPCH Customer table: CREATE TABLE CUSTOMER ( C_CUSTKEY INTEGER NOT NULL,
+     * C_NAME VARCHAR(25) NOT NULL, C_ADDRESS VARCHAR(40) NOT NULL, C_NATIONKEY
+     * INTEGER NOT NULL, C_PHONE CHAR(15) NOT NULL, C_ACCTBAL DECIMAL(15,2) NOT
+     * NULL, C_MKTSEGMENT CHAR(10) NOT NULL, C_COMMENT VARCHAR(117) NOT NULL );
+     * TPCH Orders table: CREATE TABLE ORDERS ( O_ORDERKEY INTEGER NOT NULL,
+     * O_CUSTKEY INTEGER NOT NULL, O_ORDERSTATUS CHAR(1) NOT NULL, O_TOTALPRICE
+     * DECIMAL(15,2) NOT NULL, O_ORDERDATE DATE NOT NULL, O_ORDERPRIORITY
+     * CHAR(15) NOT NULL, O_CLERK CHAR(15) NOT NULL, O_SHIPPRIORITY INTEGER NOT
+     * NULL, O_COMMENT VARCHAR(79) NOT NULL );
+     */
+
+    @Test
+    public void customerOrderCIDJoin() throws Exception {
+        JobSpecification spec = new JobSpecification();
+
+        FileSplit[] custSplits = new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(
+                "data/tpch0.001/customer.tbl"))) };
+        IFileSplitProvider custSplitsProvider = new ConstantFileSplitProvider(custSplits);
+        RecordDescriptor custDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE });
+
+        FileSplit[] ordersSplits = new FileSplit[] { new FileSplit(NC2_ID, new FileReference(new File(
+                "data/tpch0.001/orders.tbl"))) };
+        IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
+        RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE });
+
+        RecordDescriptor custOrderJoinDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE });
+
+        FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitsProvider,
+                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'), ordersDesc);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID);
+
+        FileScanOperatorDescriptor custScanner = new FileScanOperatorDescriptor(spec, custSplitsProvider,
+                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE }, '|'), custDesc);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, custScanner, NC1_ID);
+
+        InMemoryHashJoinOperatorDescriptor join = new InMemoryHashJoinOperatorDescriptor(
+                spec,
+                new int[] { 1 },
+                new int[] { 0 },
+                new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) },
+                new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
+                custOrderJoinDesc, 128);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, join, NC1_ID);
+
+        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
+                createTempFile().getAbsolutePath()) });
+        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
+
+        IConnectorDescriptor ordJoinConn = new OneToOneConnectorDescriptor(spec);
+        spec.connect(ordJoinConn, ordScanner, 0, join, 0);
+
+        IConnectorDescriptor custJoinConn = new OneToOneConnectorDescriptor(spec);
+        spec.connect(custJoinConn, custScanner, 0, join, 1);
+
+        IConnectorDescriptor joinPrinterConn = new OneToOneConnectorDescriptor(spec);
+        spec.connect(joinPrinterConn, join, 0, printer, 0);
+
+        spec.addRoot(printer);
+        runTest(spec);
+    }
+
+    @Test
+    public void customerOrderCIDGraceJoin() throws Exception {
+        JobSpecification spec = new JobSpecification();
+
+        FileSplit[] custSplits = new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(
+                "data/tpch0.001/customer.tbl"))) };
+        IFileSplitProvider custSplitsProvider = new ConstantFileSplitProvider(custSplits);
+        RecordDescriptor custDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE });
+
+        FileSplit[] ordersSplits = new FileSplit[] { new FileSplit(NC2_ID, new FileReference(new File(
+                "data/tpch0.001/orders.tbl"))) };
+        IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
+        RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE });
+
+        RecordDescriptor custOrderJoinDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE });
+
+        FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitsProvider,
+                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'), ordersDesc);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID);
+
+        FileScanOperatorDescriptor custScanner = new FileScanOperatorDescriptor(spec, custSplitsProvider,
+                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE }, '|'), custDesc);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, custScanner, NC1_ID);
+
+        GraceHashJoinOperatorDescriptor join = new GraceHashJoinOperatorDescriptor(
+                spec,
+                4,
+                10,
+                200,
+                1.2,
+                new int[] { 1 },
+                new int[] { 0 },
+                new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) },
+                new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
+                custOrderJoinDesc);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, join, NC1_ID);
+
+        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
+                createTempFile().getAbsolutePath()) });
+        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
+
+        IConnectorDescriptor ordJoinConn = new OneToOneConnectorDescriptor(spec);
+        spec.connect(ordJoinConn, ordScanner, 0, join, 0);
+
+        IConnectorDescriptor custJoinConn = new OneToOneConnectorDescriptor(spec);
+        spec.connect(custJoinConn, custScanner, 0, join, 1);
+
+        IConnectorDescriptor joinPrinterConn = new OneToOneConnectorDescriptor(spec);
+        spec.connect(joinPrinterConn, join, 0, printer, 0);
+
+        spec.addRoot(printer);
+        runTest(spec);
+    }
+
+    @Test
+    public void customerOrderCIDHybridHashJoin() throws Exception {
+        JobSpecification spec = new JobSpecification();
+
+        FileSplit[] custSplits = new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(
+                "data/tpch0.001/customer.tbl"))) };
+        IFileSplitProvider custSplitsProvider = new ConstantFileSplitProvider(custSplits);
+        RecordDescriptor custDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE });
+
+        FileSplit[] ordersSplits = new FileSplit[] { new FileSplit(NC2_ID, new FileReference(new File(
+                "data/tpch0.001/orders.tbl"))) };
+        IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
+        RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE });
+
+        RecordDescriptor custOrderJoinDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE });
+
+        FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitsProvider,
+                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'), ordersDesc);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID);
+
+        FileScanOperatorDescriptor custScanner = new FileScanOperatorDescriptor(spec, custSplitsProvider,
+                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE }, '|'), custDesc);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, custScanner, NC1_ID);
+
+        HybridHashJoinOperatorDescriptor join = new HybridHashJoinOperatorDescriptor(
+                spec,
+                5,
+                20,
+                200,
+                1.2,
+                new int[] { 1 },
+                new int[] { 0 },
+                new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) },
+                new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
+                custOrderJoinDesc);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, join, NC1_ID);
+
+        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
+                createTempFile().getAbsolutePath()) });
+        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
+
+        IConnectorDescriptor ordJoinConn = new OneToOneConnectorDescriptor(spec);
+        spec.connect(ordJoinConn, ordScanner, 0, join, 0);
+
+        IConnectorDescriptor custJoinConn = new OneToOneConnectorDescriptor(spec);
+        spec.connect(custJoinConn, custScanner, 0, join, 1);
+
+        IConnectorDescriptor joinPrinterConn = new OneToOneConnectorDescriptor(spec);
+        spec.connect(joinPrinterConn, join, 0, printer, 0);
+
+        spec.addRoot(printer);
+        runTest(spec);
+    }
+
+    @Test
+    public void customerOrderCIDInMemoryHashLeftOuterJoin() throws Exception {
+        JobSpecification spec = new JobSpecification();
+
+        FileSplit[] custSplits = new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(
+                "data/tpch0.001/customer.tbl"))) };
+        IFileSplitProvider custSplitsProvider = new ConstantFileSplitProvider(custSplits);
+        RecordDescriptor custDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE });
+
+        FileSplit[] ordersSplits = new FileSplit[] { new FileSplit(NC2_ID, new FileReference(new File(
+                "data/tpch0.001/orders.tbl"))) };
+        IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
+        RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE });
+
+        RecordDescriptor custOrderJoinDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE });
+
+        FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitsProvider,
+                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'), ordersDesc);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC2_ID);
+
+        FileScanOperatorDescriptor custScanner = new FileScanOperatorDescriptor(spec, custSplitsProvider,
+                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE }, '|'), custDesc);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, custScanner, NC1_ID);
+
+        INullWriterFactory[] nullWriterFactories = new INullWriterFactory[ordersDesc.getFieldCount()];
+        for (int j = 0; j < nullWriterFactories.length; j++) {
+            nullWriterFactories[j] = NoopNullWriterFactory.INSTANCE;
+        }
+
+        InMemoryHashJoinOperatorDescriptor join = new InMemoryHashJoinOperatorDescriptor(
+                spec,
+                new int[] { 0 },
+                new int[] { 1 },
+                new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) },
+                new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
+                custOrderJoinDesc, true, nullWriterFactories, 128);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, join, NC1_ID);
+
+        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
+                createTempFile().getAbsolutePath()) });
+        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
+
+        IConnectorDescriptor ordJoinConn = new OneToOneConnectorDescriptor(spec);
+        spec.connect(ordJoinConn, ordScanner, 0, join, 1);
+
+        IConnectorDescriptor custJoinConn = new OneToOneConnectorDescriptor(spec);
+        spec.connect(custJoinConn, custScanner, 0, join, 0);
+
+        IConnectorDescriptor joinPrinterConn = new OneToOneConnectorDescriptor(spec);
+        spec.connect(joinPrinterConn, join, 0, printer, 0);
+
+        spec.addRoot(printer);
+        runTest(spec);
+    }
+
+    @Test
+    public void customerOrderCIDGraceHashLeftOuterJoin() throws Exception {
+        JobSpecification spec = new JobSpecification();
+
+        FileSplit[] custSplits = new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(
+                "data/tpch0.001/customer.tbl"))) };
+        IFileSplitProvider custSplitsProvider = new ConstantFileSplitProvider(custSplits);
+        RecordDescriptor custDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE });
+
+        FileSplit[] ordersSplits = new FileSplit[] { new FileSplit(NC2_ID, new FileReference(new File(
+                "data/tpch0.001/orders.tbl"))) };
+        IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
+        RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE });
+
+        RecordDescriptor custOrderJoinDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE });
+
+        FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitsProvider,
+                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'), ordersDesc);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC2_ID);
+
+        FileScanOperatorDescriptor custScanner = new FileScanOperatorDescriptor(spec, custSplitsProvider,
+                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE }, '|'), custDesc);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, custScanner, NC1_ID);
+
+        INullWriterFactory[] nullWriterFactories = new INullWriterFactory[ordersDesc.getFieldCount()];
+        for (int j = 0; j < nullWriterFactories.length; j++) {
+            nullWriterFactories[j] = NoopNullWriterFactory.INSTANCE;
+        }
+
+        GraceHashJoinOperatorDescriptor join = new GraceHashJoinOperatorDescriptor(
+                spec,
+                5,
+                20,
+                200,
+                1.2,
+                new int[] { 0 },
+                new int[] { 1 },
+                new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) },
+                new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
+                custOrderJoinDesc, true, nullWriterFactories);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, join, NC1_ID);
+
+        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
+                createTempFile().getAbsolutePath()) });
+        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
+
+        IConnectorDescriptor ordJoinConn = new OneToOneConnectorDescriptor(spec);
+        spec.connect(ordJoinConn, ordScanner, 0, join, 1);
+
+        IConnectorDescriptor custJoinConn = new OneToOneConnectorDescriptor(spec);
+        spec.connect(custJoinConn, custScanner, 0, join, 0);
+
+        IConnectorDescriptor joinPrinterConn = new OneToOneConnectorDescriptor(spec);
+        spec.connect(joinPrinterConn, join, 0, printer, 0);
+
+        spec.addRoot(printer);
+        runTest(spec);
+    }
+
+    @Test
+    public void customerOrderCIDHybridHashLeftOuterJoin() throws Exception {
+        JobSpecification spec = new JobSpecification();
+
+        FileSplit[] custSplits = new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(
+                "data/tpch0.001/customer.tbl"))) };
+        IFileSplitProvider custSplitsProvider = new ConstantFileSplitProvider(custSplits);
+        RecordDescriptor custDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE });
+
+        FileSplit[] ordersSplits = new FileSplit[] { new FileSplit(NC2_ID, new FileReference(new File(
+                "data/tpch0.001/orders.tbl"))) };
+        IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
+        RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE });
+
+        RecordDescriptor custOrderJoinDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE });
+
+        FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitsProvider,
+                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'), ordersDesc);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC2_ID);
+
+        FileScanOperatorDescriptor custScanner = new FileScanOperatorDescriptor(spec, custSplitsProvider,
+                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE }, '|'), custDesc);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, custScanner, NC1_ID);
+
+        INullWriterFactory[] nullWriterFactories = new INullWriterFactory[ordersDesc.getFieldCount()];
+        for (int j = 0; j < nullWriterFactories.length; j++) {
+            nullWriterFactories[j] = NoopNullWriterFactory.INSTANCE;
+        }
+
+        HybridHashJoinOperatorDescriptor join = new HybridHashJoinOperatorDescriptor(
+                spec,
+                5,
+                20,
+                200,
+                1.2,
+                new int[] { 0 },
+                new int[] { 1 },
+                new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) },
+                new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
+                custOrderJoinDesc, true, nullWriterFactories);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, join, NC1_ID);
+
+        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
+                createTempFile().getAbsolutePath()) });
+        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
+
+        IConnectorDescriptor ordJoinConn = new OneToOneConnectorDescriptor(spec);
+        spec.connect(ordJoinConn, ordScanner, 0, join, 1);
+
+        IConnectorDescriptor custJoinConn = new OneToOneConnectorDescriptor(spec);
+        spec.connect(custJoinConn, custScanner, 0, join, 0);
+
+        IConnectorDescriptor joinPrinterConn = new OneToOneConnectorDescriptor(spec);
+        spec.connect(joinPrinterConn, join, 0, printer, 0);
+
+        spec.addRoot(printer);
+        runTest(spec);
+    }
+
+    @Test
+    public void customerOrderCIDJoinMulti() throws Exception {
+        JobSpecification spec = new JobSpecification();
+
+        FileSplit[] custSplits = new FileSplit[] {
+                new FileSplit(NC1_ID, new FileReference(new File("data/tpch0.001/customer-part1.tbl"))),
+                new FileSplit(NC2_ID, new FileReference(new File("data/tpch0.001/customer-part2.tbl"))) };
+        IFileSplitProvider custSplitsProvider = new ConstantFileSplitProvider(custSplits);
+        RecordDescriptor custDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE });
+
+        FileSplit[] ordersSplits = new FileSplit[] {
+                new FileSplit(NC1_ID, new FileReference(new File("data/tpch0.001/orders-part1.tbl"))),
+                new FileSplit(NC2_ID, new FileReference(new File("data/tpch0.001/orders-part2.tbl"))) };
+        IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
+        RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE });
+
+        RecordDescriptor custOrderJoinDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE });
+
+        FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitsProvider,
+                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'), ordersDesc);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID, NC2_ID);
+
+        FileScanOperatorDescriptor custScanner = new FileScanOperatorDescriptor(spec, custSplitsProvider,
+                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE }, '|'), custDesc);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, custScanner, NC1_ID, NC2_ID);
+
+        InMemoryHashJoinOperatorDescriptor join = new InMemoryHashJoinOperatorDescriptor(
+                spec,
+                new int[] { 1 },
+                new int[] { 0 },
+                new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) },
+                new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
+                custOrderJoinDesc, 128);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, join, NC1_ID, NC2_ID);
+
+        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
+                createTempFile().getAbsolutePath()) });
+        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
+
+        IConnectorDescriptor ordJoinConn = new MToNPartitioningConnectorDescriptor(spec,
+                new FieldHashPartitionComputerFactory(new int[] { 1 },
+                        new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory
+                                .of(UTF8StringPointable.FACTORY) }));
+        spec.connect(ordJoinConn, ordScanner, 0, join, 0);
+
+        IConnectorDescriptor custJoinConn = new MToNPartitioningConnectorDescriptor(spec,
+                new FieldHashPartitionComputerFactory(new int[] { 0 },
+                        new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory
+                                .of(UTF8StringPointable.FACTORY) }));
+        spec.connect(custJoinConn, custScanner, 0, join, 1);
+
+        IConnectorDescriptor joinPrinterConn = new MToNReplicatingConnectorDescriptor(spec);
+        spec.connect(joinPrinterConn, join, 0, printer, 0);
+
+        spec.addRoot(printer);
+        runTest(spec);
+    }
+
+    @Test
+    public void customerOrderCIDGraceJoinMulti() throws Exception {
+        JobSpecification spec = new JobSpecification();
+
+        FileSplit[] custSplits = new FileSplit[] {
+                new FileSplit(NC1_ID, new FileReference(new File("data/tpch0.001/customer-part1.tbl"))),
+                new FileSplit(NC2_ID, new FileReference(new File("data/tpch0.001/customer-part2.tbl"))) };
+        IFileSplitProvider custSplitsProvider = new ConstantFileSplitProvider(custSplits);
+        RecordDescriptor custDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE });
+
+        FileSplit[] ordersSplits = new FileSplit[] {
+                new FileSplit(NC1_ID, new FileReference(new File("data/tpch0.001/orders-part1.tbl"))),
+                new FileSplit(NC2_ID, new FileReference(new File("data/tpch0.001/orders-part2.tbl"))) };
+        IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
+        RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE });
+
+        RecordDescriptor custOrderJoinDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE });
+
+        FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitsProvider,
+                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'), ordersDesc);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID, NC2_ID);
+
+        FileScanOperatorDescriptor custScanner = new FileScanOperatorDescriptor(spec, custSplitsProvider,
+                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE }, '|'), custDesc);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, custScanner, NC1_ID, NC2_ID);
+
+        GraceHashJoinOperatorDescriptor join = new GraceHashJoinOperatorDescriptor(
+                spec,
+                3,
+                20,
+                100,
+                1.2,
+                new int[] { 1 },
+                new int[] { 0 },
+                new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) },
+                new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
+                custOrderJoinDesc);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, join, NC1_ID, NC2_ID);
+
+        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
+                createTempFile().getAbsolutePath()) });
+        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
+
+        IConnectorDescriptor ordJoinConn = new MToNPartitioningConnectorDescriptor(spec,
+                new FieldHashPartitionComputerFactory(new int[] { 1 },
+                        new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory
+                                .of(UTF8StringPointable.FACTORY) }));
+        spec.connect(ordJoinConn, ordScanner, 0, join, 0);
+
+        IConnectorDescriptor custJoinConn = new MToNPartitioningConnectorDescriptor(spec,
+                new FieldHashPartitionComputerFactory(new int[] { 0 },
+                        new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory
+                                .of(UTF8StringPointable.FACTORY) }));
+        spec.connect(custJoinConn, custScanner, 0, join, 1);
+
+        IConnectorDescriptor joinPrinterConn = new MToNReplicatingConnectorDescriptor(spec);
+        spec.connect(joinPrinterConn, join, 0, printer, 0);
+
+        spec.addRoot(printer);
+        runTest(spec);
+    }
+
+    @Test
+    public void customerOrderCIDHybridHashJoinMulti() throws Exception {
+        JobSpecification spec = new JobSpecification();
+
+        FileSplit[] custSplits = new FileSplit[] {
+                new FileSplit(NC1_ID, new FileReference(new File("data/tpch0.001/customer-part1.tbl"))),
+                new FileSplit(NC2_ID, new FileReference(new File("data/tpch0.001/customer-part2.tbl"))) };
+        IFileSplitProvider custSplitsProvider = new ConstantFileSplitProvider(custSplits);
+        RecordDescriptor custDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE });
+
+        FileSplit[] ordersSplits = new FileSplit[] {
+                new FileSplit(NC1_ID, new FileReference(new File("data/tpch0.001/orders-part1.tbl"))),
+                new FileSplit(NC2_ID, new FileReference(new File("data/tpch0.001/orders-part2.tbl"))) };
+        IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
+        RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE });
+
+        RecordDescriptor custOrderJoinDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE });
+
+        FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitsProvider,
+                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'), ordersDesc);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID, NC2_ID);
+
+        FileScanOperatorDescriptor custScanner = new FileScanOperatorDescriptor(spec, custSplitsProvider,
+                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE }, '|'), custDesc);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, custScanner, NC1_ID, NC2_ID);
+
+        HybridHashJoinOperatorDescriptor join = new HybridHashJoinOperatorDescriptor(
+                spec,
+                3,
+                20,
+                100,
+                1.2,
+                new int[] { 1 },
+                new int[] { 0 },
+                new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) },
+                new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
+                custOrderJoinDesc);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, join, NC1_ID, NC2_ID);
+
+        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
+                createTempFile().getAbsolutePath()) });
+        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
+
+        IConnectorDescriptor ordJoinConn = new MToNPartitioningConnectorDescriptor(spec,
+                new FieldHashPartitionComputerFactory(new int[] { 1 },
+                        new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory
+                                .of(UTF8StringPointable.FACTORY) }));
+        spec.connect(ordJoinConn, ordScanner, 0, join, 0);
+
+        IConnectorDescriptor custJoinConn = new MToNPartitioningConnectorDescriptor(spec,
+                new FieldHashPartitionComputerFactory(new int[] { 0 },
+                        new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory
+                                .of(UTF8StringPointable.FACTORY) }));
+        spec.connect(custJoinConn, custScanner, 0, join, 1);
+
+        IConnectorDescriptor joinPrinterConn = new MToNReplicatingConnectorDescriptor(spec);
+        spec.connect(joinPrinterConn, join, 0, printer, 0);
+
+        spec.addRoot(printer);
+        runTest(spec);
+    }
+
+    @Test
+    public void customerOrderCIDJoinAutoExpand() throws Exception {
+        JobSpecification spec = new JobSpecification();
+
+        FileSplit[] custSplits = new FileSplit[] {
+                new FileSplit(NC1_ID, new FileReference(new File("data/tpch0.001/customer-part1.tbl"))),
+                new FileSplit(NC2_ID, new FileReference(new File("data/tpch0.001/customer-part2.tbl"))) };
+        IFileSplitProvider custSplitsProvider = new ConstantFileSplitProvider(custSplits);
+        RecordDescriptor custDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE });
+
+        FileSplit[] ordersSplits = new FileSplit[] {
+                new FileSplit(NC1_ID, new FileReference(new File("data/tpch0.001/orders-part1.tbl"))),
+                new FileSplit(NC2_ID, new FileReference(new File("data/tpch0.001/orders-part2.tbl"))) };
+        IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
+        RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE });
+
+        RecordDescriptor custOrderJoinDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE });
+
+        FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitsProvider,
+                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'), ordersDesc);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID, NC2_ID);
+
+        FileScanOperatorDescriptor custScanner = new FileScanOperatorDescriptor(spec, custSplitsProvider,
+                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE }, '|'), custDesc);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, custScanner, NC1_ID, NC2_ID);
+
+        InMemoryHashJoinOperatorDescriptor join = new InMemoryHashJoinOperatorDescriptor(
+                spec,
+                new int[] { 1 },
+                new int[] { 0 },
+                new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) },
+                new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
+                custOrderJoinDesc, 128);
+        PartitionConstraintHelper.addPartitionCountConstraint(spec, join, 2);
+
+        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
+                createTempFile().getAbsolutePath()) });
+        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
+
+        IConnectorDescriptor ordJoinConn = new MToNPartitioningConnectorDescriptor(spec,
+                new FieldHashPartitionComputerFactory(new int[] { 1 },
+                        new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory
+                                .of(UTF8StringPointable.FACTORY) }));
+        spec.connect(ordJoinConn, ordScanner, 0, join, 0);
+
+        IConnectorDescriptor custJoinConn = new MToNPartitioningConnectorDescriptor(spec,
+                new FieldHashPartitionComputerFactory(new int[] { 0 },
+                        new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory
+                                .of(UTF8StringPointable.FACTORY) }));
+        spec.connect(custJoinConn, custScanner, 0, join, 1);
+
+        IConnectorDescriptor joinPrinterConn = new MToNReplicatingConnectorDescriptor(spec);
+        spec.connect(joinPrinterConn, join, 0, printer, 0);
+
+        spec.addRoot(printer);
+        runTest(spec);
+    }
+
+    @Test
+    public void customerOrderCIDJoinMultiMaterialized() throws Exception {
+        JobSpecification spec = new JobSpecification();
+
+        FileSplit[] custSplits = new FileSplit[] {
+                new FileSplit(NC1_ID, new FileReference(new File("data/tpch0.001/customer-part1.tbl"))),
+                new FileSplit(NC2_ID, new FileReference(new File("data/tpch0.001/customer-part2.tbl"))) };
+        IFileSplitProvider custSplitsProvider = new ConstantFileSplitProvider(custSplits);
+        RecordDescriptor custDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE });
+
+        FileSplit[] ordersSplits = new FileSplit[] {
+                new FileSplit(NC1_ID, new FileReference(new File("data/tpch0.001/orders-part1.tbl"))),
+                new FileSplit(NC2_ID, new FileReference(new File("data/tpch0.001/orders-part2.tbl"))) };
+        IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
+        RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE });
+
+        RecordDescriptor custOrderJoinDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE });
+
+        FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitsProvider,
+                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'), ordersDesc);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID, NC2_ID);
+
+        FileScanOperatorDescriptor custScanner = new FileScanOperatorDescriptor(spec, custSplitsProvider,
+                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE }, '|'), custDesc);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, custScanner, NC1_ID, NC2_ID);
+
+        MaterializingOperatorDescriptor ordMat = new MaterializingOperatorDescriptor(spec, ordersDesc);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordMat, NC1_ID, NC2_ID);
+
+        MaterializingOperatorDescriptor custMat = new MaterializingOperatorDescriptor(spec, custDesc);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, custMat, NC1_ID, NC2_ID);
+
+        InMemoryHashJoinOperatorDescriptor join = new InMemoryHashJoinOperatorDescriptor(
+                spec,
+                new int[] { 1 },
+                new int[] { 0 },
+                new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) },
+                new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
+                custOrderJoinDesc, 128);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, join, NC1_ID, NC2_ID);
+
+        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
+                createTempFile().getAbsolutePath()) });
+        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
+
+        IConnectorDescriptor ordPartConn = new MToNPartitioningConnectorDescriptor(spec,
+                new FieldHashPartitionComputerFactory(new int[] { 1 },
+                        new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory
+                                .of(UTF8StringPointable.FACTORY) }));
+        spec.connect(ordPartConn, ordScanner, 0, ordMat, 0);
+
+        IConnectorDescriptor custPartConn = new MToNPartitioningConnectorDescriptor(spec,
+                new FieldHashPartitionComputerFactory(new int[] { 0 },
+                        new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory
+                                .of(UTF8StringPointable.FACTORY) }));
+        spec.connect(custPartConn, custScanner, 0, custMat, 0);
+
+        IConnectorDescriptor ordJoinConn = new OneToOneConnectorDescriptor(spec);
+        spec.connect(ordJoinConn, ordMat, 0, join, 0);
+
+        IConnectorDescriptor custJoinConn = new OneToOneConnectorDescriptor(spec);
+        spec.connect(custJoinConn, custMat, 0, join, 1);
+
+        IConnectorDescriptor joinPrinterConn = new MToNReplicatingConnectorDescriptor(spec);
+        spec.connect(joinPrinterConn, join, 0, printer, 0);
+
+        spec.addRoot(printer);
+        runTest(spec);
+    }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/TPCHCustomerOrderNestedLoopJoinTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/TPCHCustomerOrderNestedLoopJoinTest.java
new file mode 100644
index 0000000..9233e39
--- /dev/null
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/TPCHCustomerOrderNestedLoopJoinTest.java
@@ -0,0 +1,420 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.tests.integration;
+
+import java.io.File;
+import org.junit.Test;
+import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
+import edu.uci.ics.hyracks.api.constraints.PartitionConstraintHelper;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.IConnectorDescriptor;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.INullWriterFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.ITuplePairComparator;
+import edu.uci.ics.hyracks.api.dataflow.value.ITuplePairComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.io.FileReference;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
+import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.parsers.IValueParserFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.parsers.UTF8StringParserFactory;
+import edu.uci.ics.hyracks.dataflow.std.connectors.MToNReplicatingConnectorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.file.ConstantFileSplitProvider;
+import edu.uci.ics.hyracks.dataflow.std.file.DelimitedDataTupleParserFactory;
+import edu.uci.ics.hyracks.dataflow.std.file.FileScanOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.dataflow.std.file.PlainFileWriterOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.join.NestedLoopJoinOperatorDescriptor;
+import edu.uci.ics.hyracks.tests.util.NoopNullWriterFactory;
+
+public class TPCHCustomerOrderNestedLoopJoinTest extends AbstractIntegrationTest {
+    private static class JoinComparatorFactory implements ITuplePairComparatorFactory {
+        private static final long serialVersionUID = 1L;
+
+        private final IBinaryComparatorFactory bFactory;
+        private final int pos0;
+        private final int pos1;
+
+        public JoinComparatorFactory(IBinaryComparatorFactory bFactory, int pos0, int pos1) {
+            this.bFactory = bFactory;
+            this.pos0 = pos0;
+            this.pos1 = pos1;
+        }
+
+        @Override
+        public ITuplePairComparator createTuplePairComparator(IHyracksTaskContext ctx) {
+            return new JoinComparator(bFactory.createBinaryComparator(), pos0, pos1);
+        }
+    }
+
+    private static class JoinComparator implements ITuplePairComparator {
+
+        private final IBinaryComparator bComparator;
+        private final int field0;
+        private final int field1;
+
+        public JoinComparator(IBinaryComparator bComparator, int field0, int field1) {
+            this.bComparator = bComparator;
+            this.field0 = field0;
+            this.field1 = field1;
+        }
+
+        @Override
+        public int compare(IFrameTupleAccessor accessor0, int tIndex0, IFrameTupleAccessor accessor1, int tIndex1) {
+            int tStart0 = accessor0.getTupleStartOffset(tIndex0);
+            int fStartOffset0 = accessor0.getFieldSlotsLength() + tStart0;
+
+            int tStart1 = accessor1.getTupleStartOffset(tIndex1);
+            int fStartOffset1 = accessor1.getFieldSlotsLength() + tStart1;
+
+            int fStart0 = accessor0.getFieldStartOffset(tIndex0, field0);
+            int fEnd0 = accessor0.getFieldEndOffset(tIndex0, field0);
+            int fLen0 = fEnd0 - fStart0;
+
+            int fStart1 = accessor1.getFieldStartOffset(tIndex1, field1);
+            int fEnd1 = accessor1.getFieldEndOffset(tIndex1, field1);
+            int fLen1 = fEnd1 - fStart1;
+
+            int c = bComparator.compare(accessor0.getBuffer().array(), fStart0 + fStartOffset0, fLen0, accessor1
+                    .getBuffer().array(), fStart1 + fStartOffset1, fLen1);
+            if (c != 0) {
+                return c;
+            }
+            return 0;
+        }
+    }
+
+    /*
+     * TPCH Customer table: CREATE TABLE CUSTOMER ( C_CUSTKEY INTEGER NOT NULL,
+     * C_NAME VARCHAR(25) NOT NULL, C_ADDRESS VARCHAR(40) NOT NULL, C_NATIONKEY
+     * INTEGER NOT NULL, C_PHONE CHAR(15) NOT NULL, C_ACCTBAL DECIMAL(15,2) NOT
+     * NULL, C_MKTSEGMENT CHAR(10) NOT NULL, C_COMMENT VARCHAR(117) NOT NULL );
+     * TPCH Orders table: CREATE TABLE ORDERS ( O_ORDERKEY INTEGER NOT NULL,
+     * O_CUSTKEY INTEGER NOT NULL, O_ORDERSTATUS CHAR(1) NOT NULL, O_TOTALPRICE
+     * DECIMAL(15,2) NOT NULL, O_ORDERDATE DATE NOT NULL, O_ORDERPRIORITY
+     * CHAR(15) NOT NULL, O_CLERK CHAR(15) NOT NULL, O_SHIPPRIORITY INTEGER NOT
+     * NULL, O_COMMENT VARCHAR(79) NOT NULL );
+     */
+    @Test
+    public void customerOrderCIDJoin() throws Exception {
+        JobSpecification spec = new JobSpecification();
+
+        FileSplit[] custSplits = new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(
+                "data/tpch0.001/customer.tbl"))) };
+        IFileSplitProvider custSplitsProvider = new ConstantFileSplitProvider(custSplits);
+        RecordDescriptor custDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE });
+
+        FileSplit[] ordersSplits = new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(
+                "data/tpch0.001/orders.tbl"))) };
+        IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
+        RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE });
+
+        RecordDescriptor custOrderJoinDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE });
+
+        FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitsProvider,
+                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'), ordersDesc);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID);
+
+        FileScanOperatorDescriptor custScanner = new FileScanOperatorDescriptor(spec, custSplitsProvider,
+                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE }, '|'), custDesc);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, custScanner, NC1_ID);
+
+        NestedLoopJoinOperatorDescriptor join = new NestedLoopJoinOperatorDescriptor(spec, new JoinComparatorFactory(
+                PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY), 1, 0), custOrderJoinDesc, 4, false,
+                null);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, join, NC1_ID);
+
+        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
+                createTempFile().getAbsolutePath()) });
+        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
+
+        IConnectorDescriptor ordJoinConn = new OneToOneConnectorDescriptor(spec);
+        spec.connect(ordJoinConn, ordScanner, 0, join, 0);
+
+        IConnectorDescriptor custJoinConn = new MToNReplicatingConnectorDescriptor(spec);
+        spec.connect(custJoinConn, custScanner, 0, join, 1);
+
+        IConnectorDescriptor joinPrinterConn = new OneToOneConnectorDescriptor(spec);
+        spec.connect(joinPrinterConn, join, 0, printer, 0);
+
+        spec.addRoot(printer);
+        runTest(spec);
+    }
+
+    @Test
+    public void customerOrderCIDJoinMulti() throws Exception {
+        JobSpecification spec = new JobSpecification();
+
+        FileSplit[] custSplits = new FileSplit[] {
+                new FileSplit(NC1_ID, new FileReference(new File("data/tpch0.001/customer-part1.tbl"))),
+                new FileSplit(NC2_ID, new FileReference(new File("data/tpch0.001/customer-part2.tbl"))) };
+        IFileSplitProvider custSplitsProvider = new ConstantFileSplitProvider(custSplits);
+        RecordDescriptor custDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE });
+
+        FileSplit[] ordersSplits = new FileSplit[] {
+                new FileSplit(NC1_ID, new FileReference(new File("data/tpch0.001/orders-part1.tbl"))),
+                new FileSplit(NC2_ID, new FileReference(new File("data/tpch0.001/orders-part2.tbl"))) };
+        IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
+        RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE });
+
+        RecordDescriptor custOrderJoinDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE });
+
+        FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitsProvider,
+                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'), ordersDesc);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID, NC2_ID);
+
+        FileScanOperatorDescriptor custScanner = new FileScanOperatorDescriptor(spec, custSplitsProvider,
+                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE }, '|'), custDesc);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, custScanner, NC1_ID, NC2_ID);
+
+        NestedLoopJoinOperatorDescriptor join = new NestedLoopJoinOperatorDescriptor(spec, new JoinComparatorFactory(
+                PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY), 1, 0), custOrderJoinDesc, 5, false,
+                null);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, join, NC1_ID, NC2_ID);
+
+        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
+                createTempFile().getAbsolutePath()) });
+        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
+
+        IConnectorDescriptor ordJoinConn = new OneToOneConnectorDescriptor(spec);
+        spec.connect(ordJoinConn, ordScanner, 0, join, 0);
+
+        IConnectorDescriptor custJoinConn = new MToNReplicatingConnectorDescriptor(spec);
+        spec.connect(custJoinConn, custScanner, 0, join, 1);
+
+        IConnectorDescriptor joinPrinterConn = new MToNReplicatingConnectorDescriptor(spec);
+        spec.connect(joinPrinterConn, join, 0, printer, 0);
+
+        spec.addRoot(printer);
+        runTest(spec);
+    }
+
+    @Test
+    public void customerOrderCIDJoinAutoExpand() throws Exception {
+        JobSpecification spec = new JobSpecification();
+
+        FileSplit[] custSplits = new FileSplit[] {
+                new FileSplit(NC1_ID, new FileReference(new File("data/tpch0.001/customer-part1.tbl"))),
+                new FileSplit(NC2_ID, new FileReference(new File("data/tpch0.001/customer-part2.tbl"))) };
+        IFileSplitProvider custSplitsProvider = new ConstantFileSplitProvider(custSplits);
+        RecordDescriptor custDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE });
+
+        FileSplit[] ordersSplits = new FileSplit[] {
+                new FileSplit(NC1_ID, new FileReference(new File("data/tpch0.001/orders-part1.tbl"))),
+                new FileSplit(NC2_ID, new FileReference(new File("data/tpch0.001/orders-part2.tbl"))) };
+        IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
+        RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE });
+
+        RecordDescriptor custOrderJoinDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE });
+
+        FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitsProvider,
+                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'), ordersDesc);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID, NC2_ID);
+
+        FileScanOperatorDescriptor custScanner = new FileScanOperatorDescriptor(spec, custSplitsProvider,
+                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE }, '|'), custDesc);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, custScanner, NC1_ID, NC2_ID);
+
+        NestedLoopJoinOperatorDescriptor join = new NestedLoopJoinOperatorDescriptor(spec, new JoinComparatorFactory(
+                PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY), 1, 0), custOrderJoinDesc, 6, false,
+                null);
+        PartitionConstraintHelper.addPartitionCountConstraint(spec, join, 2);
+
+        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
+                createTempFile().getAbsolutePath()) });
+        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
+
+        IConnectorDescriptor ordJoinConn = new OneToOneConnectorDescriptor(spec);
+        spec.connect(ordJoinConn, ordScanner, 0, join, 0);
+
+        IConnectorDescriptor custJoinConn = new MToNReplicatingConnectorDescriptor(spec);
+        spec.connect(custJoinConn, custScanner, 0, join, 1);
+
+        IConnectorDescriptor joinPrinterConn = new MToNReplicatingConnectorDescriptor(spec);
+        spec.connect(joinPrinterConn, join, 0, printer, 0);
+
+        spec.addRoot(printer);
+        runTest(spec);
+    }
+
+    @Test
+    public void customerOrderCIDOuterJoinMulti() throws Exception {
+        JobSpecification spec = new JobSpecification();
+
+        FileSplit[] custSplits = new FileSplit[] {
+                new FileSplit(NC1_ID, new FileReference(new File("data/tpch0.001/customer-part1.tbl"))),
+                new FileSplit(NC2_ID, new FileReference(new File("data/tpch0.001/customer-part2.tbl"))) };
+        IFileSplitProvider custSplitsProvider = new ConstantFileSplitProvider(custSplits);
+        RecordDescriptor custDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE });
+
+        FileSplit[] ordersSplits = new FileSplit[] {
+                new FileSplit(NC1_ID, new FileReference(new File("data/tpch0.001/orders-part1.tbl"))),
+                new FileSplit(NC2_ID, new FileReference(new File("data/tpch0.001/orders-part2.tbl"))) };
+        IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
+        RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE });
+
+        RecordDescriptor custOrderJoinDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE });
+
+        FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitsProvider,
+                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'), ordersDesc);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID, NC2_ID);
+
+        FileScanOperatorDescriptor custScanner = new FileScanOperatorDescriptor(spec, custSplitsProvider,
+                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE }, '|'), custDesc);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, custScanner, NC1_ID, NC2_ID);
+
+        INullWriterFactory[] nullWriterFactories = new INullWriterFactory[ordersDesc.getFieldCount()];
+        for (int j = 0; j < nullWriterFactories.length; j++) {
+            nullWriterFactories[j] = NoopNullWriterFactory.INSTANCE;
+        }
+
+        NestedLoopJoinOperatorDescriptor join = new NestedLoopJoinOperatorDescriptor(spec, new JoinComparatorFactory(
+                PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY), 1, 0), custOrderJoinDesc, 5, true,
+                nullWriterFactories);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, join, NC1_ID, NC2_ID);
+
+        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
+                createTempFile().getAbsolutePath()) });
+        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
+
+        IConnectorDescriptor ordJoinConn = new OneToOneConnectorDescriptor(spec);
+        spec.connect(ordJoinConn, ordScanner, 0, join, 0);
+
+        IConnectorDescriptor custJoinConn = new MToNReplicatingConnectorDescriptor(spec);
+        spec.connect(custJoinConn, custScanner, 0, join, 1);
+
+        IConnectorDescriptor joinPrinterConn = new MToNReplicatingConnectorDescriptor(spec);
+        spec.connect(joinPrinterConn, join, 0, printer, 0);
+
+        spec.addRoot(printer);
+        runTest(spec);
+    }
+
+}
\ No newline at end of file
diff --git a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/UnionTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/UnionTest.java
similarity index 100%
rename from hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/UnionTest.java
rename to hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/UnionTest.java
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/invertedindex/BinaryTokenizerOperatorTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/invertedindex/BinaryTokenizerOperatorTest.java
new file mode 100644
index 0000000..836e72e
--- /dev/null
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/invertedindex/BinaryTokenizerOperatorTest.java
@@ -0,0 +1,73 @@
+package edu.uci.ics.hyracks.tests.invertedindex;
+
+import java.io.File;
+
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.api.constraints.PartitionConstraintHelper;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.io.FileReference;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.parsers.IValueParserFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.parsers.IntegerParserFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.parsers.UTF8StringParserFactory;
+import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.file.ConstantFileSplitProvider;
+import edu.uci.ics.hyracks.dataflow.std.file.DelimitedDataTupleParserFactory;
+import edu.uci.ics.hyracks.dataflow.std.file.FileScanOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.dataflow.std.file.PlainFileWriterOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.invertedindex.dataflow.BinaryTokenizerOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.DelimitedUTF8StringBinaryTokenizerFactory;
+import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.IBinaryTokenizerFactory;
+import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.ITokenFactory;
+import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.UTF8WordTokenFactory;
+import edu.uci.ics.hyracks.tests.integration.AbstractIntegrationTest;
+
+public class BinaryTokenizerOperatorTest extends AbstractIntegrationTest {
+
+    @Test
+    public void tokenizerTest() throws Exception {
+        JobSpecification spec = new JobSpecification();
+
+        FileSplit[] dblpTitleFileSplits = new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(
+                "data/cleanednumbereddblptitles.txt"))) };
+        IFileSplitProvider dblpTitleSplitProvider = new ConstantFileSplitProvider(dblpTitleFileSplits);
+        RecordDescriptor dblpTitleRecDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                IntegerSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE });
+
+        FileScanOperatorDescriptor dblpTitleScanner = new FileScanOperatorDescriptor(spec, dblpTitleSplitProvider,
+                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { IntegerParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE }, '|'), dblpTitleRecDesc);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, dblpTitleScanner, NC1_ID);
+
+        RecordDescriptor tokenizerRecDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE });
+
+        ITokenFactory tokenFactory = new UTF8WordTokenFactory();
+        IBinaryTokenizerFactory tokenizerFactory = new DelimitedUTF8StringBinaryTokenizerFactory(true, false,
+                tokenFactory);
+        int[] tokenFields = { 1 };
+        int[] keyFields = { 0 };
+        BinaryTokenizerOperatorDescriptor binaryTokenizer = new BinaryTokenizerOperatorDescriptor(spec,
+                tokenizerRecDesc, tokenizerFactory, tokenFields, keyFields);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, binaryTokenizer, NC1_ID);
+
+        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
+                createTempFile().getAbsolutePath()) });
+        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
+
+        spec.connect(new OneToOneConnectorDescriptor(spec), dblpTitleScanner, 0, binaryTokenizer, 0);
+
+        spec.connect(new OneToOneConnectorDescriptor(spec), binaryTokenizer, 0, printer, 0);
+
+        spec.addRoot(printer);
+        runTest(spec);
+    }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/invertedindex/InvertedIndexOperatorsTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/invertedindex/InvertedIndexOperatorsTest.java
new file mode 100644
index 0000000..2206a26
--- /dev/null
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/invertedindex/InvertedIndexOperatorsTest.java
@@ -0,0 +1,73 @@
+package edu.uci.ics.hyracks.tests.invertedindex;
+
+import java.io.File;
+
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.api.constraints.PartitionConstraintHelper;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.io.FileReference;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.parsers.IValueParserFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.parsers.IntegerParserFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.parsers.UTF8StringParserFactory;
+import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.file.ConstantFileSplitProvider;
+import edu.uci.ics.hyracks.dataflow.std.file.DelimitedDataTupleParserFactory;
+import edu.uci.ics.hyracks.dataflow.std.file.FileScanOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.dataflow.std.file.PlainFileWriterOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.invertedindex.dataflow.BinaryTokenizerOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.DelimitedUTF8StringBinaryTokenizerFactory;
+import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.IBinaryTokenizerFactory;
+import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.ITokenFactory;
+import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.UTF8WordTokenFactory;
+import edu.uci.ics.hyracks.tests.integration.AbstractIntegrationTest;
+
+public class InvertedIndexOperatorsTest extends AbstractIntegrationTest {
+
+    @Test
+    public void tokenizerTest() throws Exception {
+        JobSpecification spec = new JobSpecification();
+
+        FileSplit[] dblpTitleFileSplits = new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(
+                "data/cleanednumbereddblptitles.txt"))) };
+        IFileSplitProvider dblpTitleSplitProvider = new ConstantFileSplitProvider(dblpTitleFileSplits);
+        RecordDescriptor dblpTitleRecDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                IntegerSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE });
+
+        FileScanOperatorDescriptor dblpTitleScanner = new FileScanOperatorDescriptor(spec, dblpTitleSplitProvider,
+                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { IntegerParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE }, '|'), dblpTitleRecDesc);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, dblpTitleScanner, NC1_ID);
+
+        RecordDescriptor tokenizerRecDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE });
+
+        ITokenFactory tokenFactory = new UTF8WordTokenFactory();
+        IBinaryTokenizerFactory tokenizerFactory = new DelimitedUTF8StringBinaryTokenizerFactory(true, false,
+                tokenFactory);
+        int[] tokenFields = { 1 };
+        int[] projFields = { 0 };
+        BinaryTokenizerOperatorDescriptor binaryTokenizer = new BinaryTokenizerOperatorDescriptor(spec,
+                tokenizerRecDesc, tokenizerFactory, tokenFields, projFields);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, binaryTokenizer, NC1_ID);
+
+        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
+                createTempFile().getAbsolutePath()) });
+        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
+
+        spec.connect(new OneToOneConnectorDescriptor(spec), dblpTitleScanner, 0, binaryTokenizer, 0);
+
+        spec.connect(new OneToOneConnectorDescriptor(spec), binaryTokenizer, 0, printer, 0);
+
+        spec.addRoot(printer);
+        runTest(spec);
+    }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/invertedindex/WordInvertedIndexTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/invertedindex/WordInvertedIndexTest.java
new file mode 100644
index 0000000..d8fd48e
--- /dev/null
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/invertedindex/WordInvertedIndexTest.java
@@ -0,0 +1,346 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.tests.invertedindex;
+
+import java.io.DataOutput;
+import java.io.File;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+
+import org.junit.AfterClass;
+import org.junit.Before;
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.api.constraints.PartitionConstraintHelper;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.io.FileReference;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
+import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
+import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.parsers.IValueParserFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.parsers.IntegerParserFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.parsers.UTF8StringParserFactory;
+import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.file.ConstantFileSplitProvider;
+import edu.uci.ics.hyracks.dataflow.std.file.DelimitedDataTupleParserFactory;
+import edu.uci.ics.hyracks.dataflow.std.file.FileScanOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.dataflow.std.file.PlainFileWriterOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.misc.ConstantTupleSourceOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.sort.ExternalSortOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeSearchOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndex;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexRegistryProvider;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexBulkLoadOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexCreateOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackProvider;
+import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedIndexSearchModifierFactory;
+import edu.uci.ics.hyracks.storage.am.invertedindex.dataflow.BinaryTokenizerOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.invertedindex.dataflow.InvertedIndexBulkLoadOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.invertedindex.dataflow.InvertedIndexCreateOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.invertedindex.dataflow.InvertedIndexSearchOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.invertedindex.searchmodifiers.ConjunctiveSearchModifierFactory;
+import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.DelimitedUTF8StringBinaryTokenizerFactory;
+import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.IBinaryTokenizerFactory;
+import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.ITokenFactory;
+import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.UTF8WordTokenFactory;
+import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
+import edu.uci.ics.hyracks.test.support.TestIndexRegistryProvider;
+import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
+import edu.uci.ics.hyracks.test.support.TestStorageManagerInterface;
+import edu.uci.ics.hyracks.tests.integration.AbstractIntegrationTest;
+
+public class WordInvertedIndexTest extends AbstractIntegrationTest {
+    static {
+        TestStorageManagerComponentHolder.init(8192, 20, 20);
+    }
+
+    private IStorageManagerInterface storageManager = new TestStorageManagerInterface();
+    private IIndexRegistryProvider<IIndex> indexRegistryProvider = new TestIndexRegistryProvider();
+    private IIndexDataflowHelperFactory btreeDataflowHelperFactory = new BTreeDataflowHelperFactory();
+
+    private final static SimpleDateFormat simpleDateFormat = new SimpleDateFormat("ddMMyy-hhmmssSS");
+    private final static String sep = System.getProperty("file.separator");
+    private final static String dateString = simpleDateFormat.format(new Date());
+    private final static String primaryFileName = System.getProperty("java.io.tmpdir") + sep + "primaryBtree" + dateString;
+    private final static String btreeFileName = System.getProperty("java.io.tmpdir") + sep + "invIndexBtree" + dateString;
+    private final static String invListsFileName = System.getProperty("java.io.tmpdir") + sep + "invIndexLists" + dateString;
+
+    private IFileSplitProvider primaryFileSplitProvider = new ConstantFileSplitProvider(
+            new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(primaryFileName))) });
+    private IFileSplitProvider btreeFileSplitProvider = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(
+            NC1_ID, new FileReference(new File(btreeFileName))) });
+    private IFileSplitProvider invListsFileSplitProvider = new ConstantFileSplitProvider(
+            new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(invListsFileName))) });
+
+    // Primary BTree index.
+    private int primaryFieldCount = 2;
+    private ITypeTraits[] primaryTypeTraits = new ITypeTraits[primaryFieldCount];
+    private int primaryKeyFieldCount = 1;
+    private IBinaryComparatorFactory[] primaryComparatorFactories = new IBinaryComparatorFactory[primaryKeyFieldCount];
+    private RecordDescriptor primaryRecDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+            IntegerSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE });
+
+    // Inverted index BTree dictionary.
+    private ITypeTraits[] tokenTypeTraits = new ITypeTraits[1];
+    private IBinaryComparatorFactory[] tokenComparatorFactories = new IBinaryComparatorFactory[1];
+
+    // Inverted index stuff.
+    private int invListElementFieldCount = 1;
+    private ITypeTraits[] invListsTypeTraits = new ITypeTraits[invListElementFieldCount];
+    private IBinaryComparatorFactory[] invListsComparatorFactories = new IBinaryComparatorFactory[invListElementFieldCount];
+    private RecordDescriptor tokenizerRecDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+            UTF8StringSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE });
+    private RecordDescriptor invListsRecDesc = new RecordDescriptor(
+            new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE });
+
+    // Tokenizer stuff.
+    private ITokenFactory tokenFactory = new UTF8WordTokenFactory();
+    private IBinaryTokenizerFactory tokenizerFactory = new DelimitedUTF8StringBinaryTokenizerFactory(true, false,
+            tokenFactory);
+
+    @Before
+    public void setup() throws Exception {
+        // Field declarations and comparators for primary BTree index.
+        primaryTypeTraits[0] = IntegerPointable.TYPE_TRAITS;
+        primaryTypeTraits[1] = UTF8StringPointable.TYPE_TRAITS;
+        primaryComparatorFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+
+        // Field declarations and comparators for tokens.
+        tokenTypeTraits[0] = UTF8StringPointable.TYPE_TRAITS;
+        tokenComparatorFactories[0] = PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY);
+
+        // Field declarations and comparators for inverted lists.
+        invListsTypeTraits[0] = IntegerPointable.TYPE_TRAITS;
+        invListsComparatorFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+
+        createPrimaryIndex();
+        loadPrimaryIndex();
+        printPrimaryIndex();
+        createInvertedIndex();
+        loadInvertedIndex();
+    }
+
+    public void createPrimaryIndex() throws Exception {
+        JobSpecification spec = new JobSpecification();
+        TreeIndexCreateOperatorDescriptor primaryCreateOp = new TreeIndexCreateOperatorDescriptor(spec, storageManager,
+                indexRegistryProvider, primaryFileSplitProvider, primaryTypeTraits, primaryComparatorFactories,
+                btreeDataflowHelperFactory, NoOpOperationCallbackProvider.INSTANCE);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryCreateOp, NC1_ID);
+        spec.addRoot(primaryCreateOp);
+        runTest(spec);
+    }
+    
+    @Test
+    public void testConjunctiveSearcher() throws Exception {
+        IInvertedIndexSearchModifierFactory conjunctiveSearchModifierFactory = new ConjunctiveSearchModifierFactory();
+        searchInvertedIndex("of", conjunctiveSearchModifierFactory);
+        searchInvertedIndex("3d", conjunctiveSearchModifierFactory);
+        searchInvertedIndex("of the human", conjunctiveSearchModifierFactory);
+    }
+
+    private IOperatorDescriptor createFileScanOp(JobSpecification spec) {
+        FileSplit[] dblpTitleFileSplits = new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(
+                "data/cleanednumbereddblptitles.txt"))) };
+        IFileSplitProvider dblpTitleSplitProvider = new ConstantFileSplitProvider(dblpTitleFileSplits);
+        RecordDescriptor dblpTitleRecDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                IntegerSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE });
+        FileScanOperatorDescriptor dblpTitleScanner = new FileScanOperatorDescriptor(spec, dblpTitleSplitProvider,
+                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { IntegerParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE }, '|'), dblpTitleRecDesc);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, dblpTitleScanner, NC1_ID);
+        return dblpTitleScanner;
+    }
+
+    private IOperatorDescriptor createPrimaryBulkLoadOp(JobSpecification spec) {
+        int[] fieldPermutation = { 0, 1 };
+        TreeIndexBulkLoadOperatorDescriptor primaryBtreeBulkLoad = new TreeIndexBulkLoadOperatorDescriptor(spec,
+                storageManager, indexRegistryProvider, primaryFileSplitProvider, primaryTypeTraits, primaryComparatorFactories, fieldPermutation, 0.7f,
+                btreeDataflowHelperFactory, NoOpOperationCallbackProvider.INSTANCE);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryBtreeBulkLoad, NC1_ID);
+        return primaryBtreeBulkLoad;
+    }
+
+    private IOperatorDescriptor createScanKeyProviderOp(JobSpecification spec) throws HyracksDataException {
+        // build dummy tuple containing nothing
+        ArrayTupleBuilder tb = new ArrayTupleBuilder(primaryKeyFieldCount * 2);
+        DataOutput dos = tb.getDataOutput();
+        tb.reset();
+        UTF8StringSerializerDeserializer.INSTANCE.serialize("0", dos);
+        tb.addFieldEndOffset();
+        ISerializerDeserializer[] keyRecDescSers = { UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE };
+        RecordDescriptor keyRecDesc = new RecordDescriptor(keyRecDescSers);
+        ConstantTupleSourceOperatorDescriptor keyProviderOp = new ConstantTupleSourceOperatorDescriptor(spec,
+                keyRecDesc, tb.getFieldEndOffsets(), tb.getByteArray(), tb.getSize());
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, keyProviderOp, NC1_ID);
+        return keyProviderOp;
+    }
+
+    private IOperatorDescriptor createPrimaryScanOp(JobSpecification spec) throws HyracksDataException {
+        int[] lowKeyFields = null; // - infinity
+        int[] highKeyFields = null; // + infinity
+        BTreeSearchOperatorDescriptor primaryBtreeSearchOp = new BTreeSearchOperatorDescriptor(spec, primaryRecDesc,
+                storageManager, indexRegistryProvider, primaryFileSplitProvider, primaryTypeTraits, primaryComparatorFactories, lowKeyFields,
+                highKeyFields, true, true, btreeDataflowHelperFactory, false, NoOpOperationCallbackProvider.INSTANCE);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryBtreeSearchOp, NC1_ID);
+        return primaryBtreeSearchOp;
+    }
+
+    private void loadPrimaryIndex() throws Exception {
+        JobSpecification spec = new JobSpecification();
+        // Assuming that the data is pre-sorted on the key. No need to sort
+        // before bulk load.
+        IOperatorDescriptor fileScanOp = createFileScanOp(spec);
+        IOperatorDescriptor primaryBulkLoad = createPrimaryBulkLoadOp(spec);
+        spec.connect(new OneToOneConnectorDescriptor(spec), fileScanOp, 0, primaryBulkLoad, 0);
+        spec.addRoot(primaryBulkLoad);
+        runTest(spec);
+    }
+
+    private void printPrimaryIndex() throws Exception {
+        JobSpecification spec = new JobSpecification();
+        IOperatorDescriptor keyProviderOp = createScanKeyProviderOp(spec);
+        IOperatorDescriptor primaryScanOp = createPrimaryScanOp(spec);
+        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
+                createTempFile().getAbsolutePath()) });
+        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
+        spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0, primaryScanOp, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), primaryScanOp, 0, printer, 0);
+        spec.addRoot(printer);
+        runTest(spec);
+    }
+
+    private IOperatorDescriptor createExternalSortOp(JobSpecification spec, int[] sortFields,
+            RecordDescriptor outputRecDesc) {
+        ExternalSortOperatorDescriptor externalSortOp = new ExternalSortOperatorDescriptor(spec, 1000, sortFields,
+                new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY),
+                        PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY) }, outputRecDesc);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, externalSortOp, NC1_ID);
+        return externalSortOp;
+    }
+
+    private IOperatorDescriptor createBinaryTokenizerOp(JobSpecification spec, int[] tokenFields, int[] keyFields) {
+        BinaryTokenizerOperatorDescriptor binaryTokenizer = new BinaryTokenizerOperatorDescriptor(spec,
+                tokenizerRecDesc, tokenizerFactory, tokenFields, keyFields);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, binaryTokenizer, NC1_ID);
+        return binaryTokenizer;
+    }
+
+    private IOperatorDescriptor createInvertedIndexBulkLoadOp(JobSpecification spec, int[] fieldPermutation) {
+        InvertedIndexBulkLoadOperatorDescriptor invIndexBulkLoadOp = new InvertedIndexBulkLoadOperatorDescriptor(spec,
+                fieldPermutation, storageManager, btreeFileSplitProvider, invListsFileSplitProvider,
+                indexRegistryProvider, tokenTypeTraits, tokenComparatorFactories, invListsTypeTraits,
+                invListsComparatorFactories, tokenizerFactory, btreeDataflowHelperFactory,
+                NoOpOperationCallbackProvider.INSTANCE);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, invIndexBulkLoadOp, NC1_ID);
+        return invIndexBulkLoadOp;
+    }
+
+    public void createInvertedIndex() throws Exception {
+        JobSpecification spec = new JobSpecification();
+        InvertedIndexCreateOperatorDescriptor invIndexCreateOp = new InvertedIndexCreateOperatorDescriptor(spec,
+                storageManager, btreeFileSplitProvider, invListsFileSplitProvider,
+                indexRegistryProvider, tokenTypeTraits, tokenComparatorFactories, invListsTypeTraits,
+                invListsComparatorFactories, tokenizerFactory, btreeDataflowHelperFactory,
+                NoOpOperationCallbackProvider.INSTANCE);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, invIndexCreateOp, NC1_ID);
+        spec.addRoot(invIndexCreateOp);
+        runTest(spec);
+    }
+    
+    public void loadInvertedIndex() throws Exception {
+        JobSpecification spec = new JobSpecification();
+        IOperatorDescriptor keyProviderOp = createScanKeyProviderOp(spec);
+        IOperatorDescriptor primaryScanOp = createPrimaryScanOp(spec);
+        int[] tokenFields = { 1 };
+        int[] keyFields = { 0 };
+        IOperatorDescriptor binaryTokenizerOp = createBinaryTokenizerOp(spec, tokenFields, keyFields);
+        int[] sortFields = { 0, 1 };
+        IOperatorDescriptor externalSortOp = createExternalSortOp(spec, sortFields, tokenizerRecDesc);
+        int[] fieldPermutation = { 0, 1 };
+        IOperatorDescriptor invIndexBulkLoadOp = createInvertedIndexBulkLoadOp(spec, fieldPermutation);
+        spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0, primaryScanOp, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), primaryScanOp, 0, binaryTokenizerOp, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), binaryTokenizerOp, 0, externalSortOp, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), externalSortOp, 0, invIndexBulkLoadOp, 0);
+        spec.addRoot(invIndexBulkLoadOp);
+        runTest(spec);
+    }
+
+    private IOperatorDescriptor createQueryProviderOp(JobSpecification spec, String queryString)
+            throws HyracksDataException {
+        // Build tuple with exactly one field, which is the query,
+        ArrayTupleBuilder tb = new ArrayTupleBuilder(1);
+        DataOutput dos = tb.getDataOutput();
+        tb.reset();
+        UTF8StringSerializerDeserializer.INSTANCE.serialize(queryString, dos);
+        tb.addFieldEndOffset();
+        ISerializerDeserializer[] querySerde = { UTF8StringSerializerDeserializer.INSTANCE };
+        RecordDescriptor queryRecDesc = new RecordDescriptor(querySerde);
+        ConstantTupleSourceOperatorDescriptor queryProviderOp = new ConstantTupleSourceOperatorDescriptor(spec,
+                queryRecDesc, tb.getFieldEndOffsets(), tb.getByteArray(), tb.getSize());
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, queryProviderOp, NC1_ID);
+        return queryProviderOp;
+    }
+
+    private IOperatorDescriptor createInvertedIndexSearchOp(JobSpecification spec,
+            IInvertedIndexSearchModifierFactory searchModifierFactory) {
+        InvertedIndexSearchOperatorDescriptor invIndexSearchOp = new InvertedIndexSearchOperatorDescriptor(spec, 0,
+                storageManager, btreeFileSplitProvider, invListsFileSplitProvider, indexRegistryProvider,
+                tokenTypeTraits, tokenComparatorFactories, invListsTypeTraits, invListsComparatorFactories,
+                btreeDataflowHelperFactory, tokenizerFactory, searchModifierFactory, invListsRecDesc, false,
+                NoOpOperationCallbackProvider.INSTANCE);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, invIndexSearchOp, NC1_ID);
+        return invIndexSearchOp;
+    }
+
+    public void searchInvertedIndex(String queryString, IInvertedIndexSearchModifierFactory searchModifierFactory)
+            throws Exception {
+        JobSpecification spec = new JobSpecification();
+        IOperatorDescriptor queryProviderOp = createQueryProviderOp(spec, queryString);
+        IOperatorDescriptor invIndexSearchOp = createInvertedIndexSearchOp(spec, searchModifierFactory);
+        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
+                createTempFile().getAbsolutePath()) });
+        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
+        spec.connect(new OneToOneConnectorDescriptor(spec), queryProviderOp, 0, invIndexSearchOp, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), invIndexSearchOp, 0, printer, 0);
+        spec.addRoot(printer);
+        runTest(spec);
+    }
+    
+    @AfterClass
+    public static void cleanup() throws Exception {
+    	File primary = new File(primaryFileName);
+    	File btree = new File(btreeFileName);
+    	File invLists = new File(invListsFileName);
+        primary.deleteOnExit();
+        btree.deleteOnExit();
+        invLists.deleteOnExit();
+    }
+}
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/rtree/RTreePrimaryIndexSearchOperatorTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/rtree/RTreePrimaryIndexSearchOperatorTest.java
new file mode 100644
index 0000000..6625148
--- /dev/null
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/rtree/RTreePrimaryIndexSearchOperatorTest.java
@@ -0,0 +1,210 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.tests.rtree;
+
+import java.io.DataOutput;
+import java.io.File;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+
+import org.junit.AfterClass;
+import org.junit.Before;
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.api.constraints.PartitionConstraintHelper;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.io.FileReference;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
+import edu.uci.ics.hyracks.data.std.primitive.DoublePointable;
+import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.DoubleSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.parsers.DoubleParserFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.parsers.IValueParserFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.parsers.UTF8StringParserFactory;
+import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.file.ConstantFileSplitProvider;
+import edu.uci.ics.hyracks.dataflow.std.file.DelimitedDataTupleParserFactory;
+import edu.uci.ics.hyracks.dataflow.std.file.FileScanOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.dataflow.std.file.PlainFileWriterOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.misc.ConstantTupleSourceOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndex;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexRegistryProvider;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexBulkLoadOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexCreateOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackProvider;
+import edu.uci.ics.hyracks.storage.am.rtree.dataflow.RTreeDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.rtree.dataflow.RTreeSearchOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.rtree.util.RTreeUtils;
+import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
+import edu.uci.ics.hyracks.test.support.TestIndexRegistryProvider;
+import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
+import edu.uci.ics.hyracks.test.support.TestStorageManagerInterface;
+import edu.uci.ics.hyracks.tests.integration.AbstractIntegrationTest;
+
+public class RTreePrimaryIndexSearchOperatorTest extends AbstractIntegrationTest {
+    static {
+        TestStorageManagerComponentHolder.init(8192, 20, 20);
+    }
+
+    private IStorageManagerInterface storageManager = new TestStorageManagerInterface();
+    private IIndexRegistryProvider<IIndex> indexRegistryProvider = new TestIndexRegistryProvider();
+    private IIndexDataflowHelperFactory dataflowHelperFactory;
+
+    private final static SimpleDateFormat simpleDateFormat = new SimpleDateFormat("ddMMyy-hhmmssSS");
+    private final static String sep = System.getProperty("file.separator");
+
+    // field, type and key declarations for primary R-tree index
+    private int primaryFieldCount = 5;
+    private int primaryKeyFieldCount = 4;
+    private ITypeTraits[] primaryTypeTraits = new ITypeTraits[primaryFieldCount];
+    private IBinaryComparatorFactory[] primaryComparatorFactories = new IBinaryComparatorFactory[primaryKeyFieldCount];
+
+    private RecordDescriptor primaryRecDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+            DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
+            DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
+            UTF8StringSerializerDeserializer.INSTANCE });
+
+    private static String primaryRTreeName = "primary" + simpleDateFormat.format(new Date());
+    private static String primaryFileName = System.getProperty("java.io.tmpdir") + sep + primaryRTreeName;
+
+    private IFileSplitProvider primaryRTreeSplitProvider = new ConstantFileSplitProvider(
+            new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(primaryFileName))) });
+
+    private IPrimitiveValueProviderFactory[] primaryValueProviderFactories;
+    
+    @Before
+    public void setup() throws Exception {
+        // field, type and key declarations for primary R-tree index
+        primaryTypeTraits[0] = DoublePointable.TYPE_TRAITS;
+        primaryTypeTraits[1] = DoublePointable.TYPE_TRAITS;
+        primaryTypeTraits[2] = DoublePointable.TYPE_TRAITS;
+        primaryTypeTraits[3] = DoublePointable.TYPE_TRAITS;
+        primaryTypeTraits[4] = UTF8StringPointable.TYPE_TRAITS;
+        primaryComparatorFactories[0] = PointableBinaryComparatorFactory.of(DoublePointable.FACTORY);
+        primaryComparatorFactories[1] = primaryComparatorFactories[0];
+        primaryComparatorFactories[2] = primaryComparatorFactories[0];
+        primaryComparatorFactories[3] = primaryComparatorFactories[0];
+
+        primaryValueProviderFactories = RTreeUtils
+                .createPrimitiveValueProviderFactories(primaryComparatorFactories.length, DoublePointable.FACTORY);
+        dataflowHelperFactory = new RTreeDataflowHelperFactory(primaryValueProviderFactories);
+        
+        createPrimaryIndex();
+        loadPrimaryIndexTest();
+    }
+
+    public void createPrimaryIndex() throws Exception {
+        JobSpecification spec = new JobSpecification();
+        TreeIndexCreateOperatorDescriptor primaryCreateOp = new TreeIndexCreateOperatorDescriptor(spec, storageManager,
+                indexRegistryProvider, primaryRTreeSplitProvider, primaryTypeTraits, primaryComparatorFactories,
+                dataflowHelperFactory, NoOpOperationCallbackProvider.INSTANCE);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryCreateOp, NC1_ID);
+        spec.addRoot(primaryCreateOp);
+        runTest(spec);
+    }
+    
+    public void loadPrimaryIndexTest() throws Exception {
+        JobSpecification spec = new JobSpecification();
+
+        FileSplit[] objectsSplits = new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(
+                "data/spatial.txt"))) };
+        IFileSplitProvider objectsSplitProvider = new ConstantFileSplitProvider(objectsSplits);
+        RecordDescriptor objectsDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
+                DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE });
+
+        FileScanOperatorDescriptor objScanner = new FileScanOperatorDescriptor(spec, objectsSplitProvider,
+                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { DoubleParserFactory.INSTANCE,
+                        DoubleParserFactory.INSTANCE, DoubleParserFactory.INSTANCE, DoubleParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE }, '|'), objectsDesc);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, objScanner, NC1_ID);
+
+        int[] fieldPermutation = { 0, 1, 2, 3, 4 };
+        TreeIndexBulkLoadOperatorDescriptor primaryRTreeBulkLoad = new TreeIndexBulkLoadOperatorDescriptor(spec,
+                storageManager, indexRegistryProvider, primaryRTreeSplitProvider, primaryTypeTraits, primaryComparatorFactories, fieldPermutation, 0.7f,
+                dataflowHelperFactory, NoOpOperationCallbackProvider.INSTANCE);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryRTreeBulkLoad, NC1_ID);
+
+        spec.connect(new OneToOneConnectorDescriptor(spec), objScanner, 0, primaryRTreeBulkLoad, 0);
+
+        spec.addRoot(primaryRTreeBulkLoad);
+        runTest(spec);
+    }
+
+    @Test
+    public void searchPrimaryIndexTest() throws Exception {
+        JobSpecification spec = new JobSpecification();
+
+        // build tuple
+        ArrayTupleBuilder tb = new ArrayTupleBuilder(primaryKeyFieldCount);
+        DataOutput dos = tb.getDataOutput();
+
+        tb.reset();
+        DoubleSerializerDeserializer.INSTANCE.serialize(61.2894, dos);
+        tb.addFieldEndOffset();
+        DoubleSerializerDeserializer.INSTANCE.serialize(-149.624, dos);
+        tb.addFieldEndOffset();
+        DoubleSerializerDeserializer.INSTANCE.serialize(61.8894, dos);
+        tb.addFieldEndOffset();
+        DoubleSerializerDeserializer.INSTANCE.serialize(-149.024, dos);
+        tb.addFieldEndOffset();
+
+        ISerializerDeserializer[] keyRecDescSers = { DoubleSerializerDeserializer.INSTANCE,
+                DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
+                DoubleSerializerDeserializer.INSTANCE };
+        RecordDescriptor keyRecDesc = new RecordDescriptor(keyRecDescSers);
+
+        ConstantTupleSourceOperatorDescriptor keyProviderOp = new ConstantTupleSourceOperatorDescriptor(spec,
+                keyRecDesc, tb.getFieldEndOffsets(), tb.getByteArray(), tb.getSize());
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, keyProviderOp, NC1_ID);
+
+        int[] keyFields = { 0, 1, 2, 3 };
+
+        RTreeSearchOperatorDescriptor primaryRTreeSearchOp = new RTreeSearchOperatorDescriptor(spec, primaryRecDesc,
+                storageManager, indexRegistryProvider, primaryRTreeSplitProvider, primaryTypeTraits, primaryComparatorFactories, keyFields,
+                dataflowHelperFactory, false, NoOpOperationCallbackProvider.INSTANCE);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryRTreeSearchOp, NC1_ID);
+
+        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
+                createTempFile().getAbsolutePath()) });
+        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
+
+        spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0, primaryRTreeSearchOp, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), primaryRTreeSearchOp, 0, printer, 0);
+
+        spec.addRoot(printer);
+        runTest(spec);
+    }
+
+    @AfterClass
+    public static void cleanup() throws Exception {
+        File primary = new File(primaryFileName);
+        primary.deleteOnExit();
+    }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/rtree/RTreePrimaryIndexStatsOperatorTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/rtree/RTreePrimaryIndexStatsOperatorTest.java
new file mode 100644
index 0000000..ef2950e
--- /dev/null
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/rtree/RTreePrimaryIndexStatsOperatorTest.java
@@ -0,0 +1,193 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.tests.rtree;
+
+import java.io.File;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+
+import org.junit.AfterClass;
+import org.junit.Before;
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.api.constraints.PartitionConstraintHelper;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.io.FileReference;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
+import edu.uci.ics.hyracks.data.std.primitive.DoublePointable;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.DoubleSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.parsers.DoubleParserFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.parsers.IValueParserFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.parsers.UTF8StringParserFactory;
+import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.file.ConstantFileSplitProvider;
+import edu.uci.ics.hyracks.dataflow.std.file.DelimitedDataTupleParserFactory;
+import edu.uci.ics.hyracks.dataflow.std.file.FileScanOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.dataflow.std.file.PlainFileWriterOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndex;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexRegistryProvider;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexBulkLoadOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexCreateOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexStatsOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackProvider;
+import edu.uci.ics.hyracks.storage.am.rtree.dataflow.RTreeDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreeNSMInteriorFrameFactory;
+import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreeNSMLeafFrameFactory;
+import edu.uci.ics.hyracks.storage.am.rtree.tuples.RTreeTypeAwareTupleWriterFactory;
+import edu.uci.ics.hyracks.storage.am.rtree.util.RTreeUtils;
+import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
+import edu.uci.ics.hyracks.test.support.TestIndexRegistryProvider;
+import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
+import edu.uci.ics.hyracks.test.support.TestStorageManagerInterface;
+import edu.uci.ics.hyracks.tests.integration.AbstractIntegrationTest;
+
+public class RTreePrimaryIndexStatsOperatorTest extends AbstractIntegrationTest {
+    static {
+        TestStorageManagerComponentHolder.init(8192, 20, 20);
+    }
+
+    private IStorageManagerInterface storageManager = new TestStorageManagerInterface();
+    private IIndexRegistryProvider<IIndex> indexRegistryProvider = new TestIndexRegistryProvider();
+    private IIndexDataflowHelperFactory dataflowHelperFactory;
+
+    private final static SimpleDateFormat simpleDateFormat = new SimpleDateFormat("ddMMyy-hhmmssSS");
+    private final static String sep = System.getProperty("file.separator");
+
+    // field, type and key declarations for primary R-tree index
+    private int primaryFieldCount = 5;
+    private int primaryKeyFieldCount = 4;
+    private ITypeTraits[] primaryTypeTraits = new ITypeTraits[primaryFieldCount];
+    private IBinaryComparatorFactory[] primaryComparatorFactories = new IBinaryComparatorFactory[primaryKeyFieldCount];
+
+    private RTreeTypeAwareTupleWriterFactory primaryTupleWriterFactory = new RTreeTypeAwareTupleWriterFactory(
+            primaryTypeTraits);
+
+    private RecordDescriptor primaryRecDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+            DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
+            DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
+            UTF8StringSerializerDeserializer.INSTANCE });
+
+    private ITreeIndexFrameFactory primaryInteriorFrameFactory;
+    private ITreeIndexFrameFactory primaryLeafFrameFactory;
+
+    private static String primaryRTreeName = "primary" + simpleDateFormat.format(new Date());
+    private static String primaryFileName = System.getProperty("java.io.tmpdir") + sep + primaryRTreeName;
+
+    private IFileSplitProvider primaryRTreeSplitProvider = new ConstantFileSplitProvider(
+            new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(primaryFileName))) });
+
+    private IPrimitiveValueProviderFactory[] primaryValueProviderFactories;
+    
+    @Before
+    public void setup() throws Exception {
+        // field, type and key declarations for primary R-tree index
+        primaryTypeTraits[0] = DoublePointable.TYPE_TRAITS;
+        primaryTypeTraits[1] = DoublePointable.TYPE_TRAITS;
+        primaryTypeTraits[2] = DoublePointable.TYPE_TRAITS;
+        primaryTypeTraits[3] = DoublePointable.TYPE_TRAITS;
+        primaryTypeTraits[4] = DoublePointable.TYPE_TRAITS;
+        primaryComparatorFactories[0] = PointableBinaryComparatorFactory.of(DoublePointable.FACTORY);
+        primaryComparatorFactories[1] = primaryComparatorFactories[0];
+        primaryComparatorFactories[2] = primaryComparatorFactories[0];
+        primaryComparatorFactories[3] = primaryComparatorFactories[0];
+
+        primaryValueProviderFactories = RTreeUtils
+                .createPrimitiveValueProviderFactories(primaryComparatorFactories.length, DoublePointable.FACTORY);
+        dataflowHelperFactory = new RTreeDataflowHelperFactory(primaryValueProviderFactories);
+        
+        primaryInteriorFrameFactory = new RTreeNSMInteriorFrameFactory(primaryTupleWriterFactory,
+                primaryValueProviderFactories);
+        primaryLeafFrameFactory = new RTreeNSMLeafFrameFactory(primaryTupleWriterFactory, primaryValueProviderFactories);
+
+        createPrimaryIndex();
+        loadPrimaryIndexTest();
+    }
+
+    public void createPrimaryIndex() throws Exception {
+        JobSpecification spec = new JobSpecification();
+        TreeIndexCreateOperatorDescriptor primaryCreateOp = new TreeIndexCreateOperatorDescriptor(spec, storageManager,
+                indexRegistryProvider, primaryRTreeSplitProvider, primaryTypeTraits, primaryComparatorFactories,
+                dataflowHelperFactory, NoOpOperationCallbackProvider.INSTANCE);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryCreateOp, NC1_ID);
+        spec.addRoot(primaryCreateOp);
+        runTest(spec);
+    }
+    
+    public void loadPrimaryIndexTest() throws Exception {
+        JobSpecification spec = new JobSpecification();
+
+        FileSplit[] objectsSplits = new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(
+                "data/spatial.txt"))) };
+        IFileSplitProvider objectsSplitProvider = new ConstantFileSplitProvider(objectsSplits);
+        RecordDescriptor objectsDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
+                DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE });
+
+        FileScanOperatorDescriptor objScanner = new FileScanOperatorDescriptor(spec, objectsSplitProvider,
+                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { DoubleParserFactory.INSTANCE,
+                        DoubleParserFactory.INSTANCE, DoubleParserFactory.INSTANCE, DoubleParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE }, '|'), objectsDesc);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, objScanner, NC1_ID);
+
+        int[] fieldPermutation = { 0, 1, 2, 3, 4 };
+        TreeIndexBulkLoadOperatorDescriptor primaryRTreeBulkLoad = new TreeIndexBulkLoadOperatorDescriptor(spec,
+                storageManager, indexRegistryProvider, primaryRTreeSplitProvider, primaryTypeTraits, primaryComparatorFactories, fieldPermutation, 0.7f,
+                dataflowHelperFactory, NoOpOperationCallbackProvider.INSTANCE);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryRTreeBulkLoad, NC1_ID);
+
+        spec.connect(new OneToOneConnectorDescriptor(spec), objScanner, 0, primaryRTreeBulkLoad, 0);
+
+        spec.addRoot(primaryRTreeBulkLoad);
+        runTest(spec);
+    }
+
+    @Test
+    public void showPrimaryIndexStats() throws Exception {
+        JobSpecification spec = new JobSpecification();
+
+        TreeIndexStatsOperatorDescriptor primaryStatsOp = new TreeIndexStatsOperatorDescriptor(spec, storageManager,
+                indexRegistryProvider, primaryRTreeSplitProvider, 
+                primaryTypeTraits, primaryComparatorFactories, dataflowHelperFactory, NoOpOperationCallbackProvider.INSTANCE);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryStatsOp, NC1_ID);
+
+        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
+                createTempFile().getAbsolutePath()) });
+        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
+
+        spec.connect(new OneToOneConnectorDescriptor(spec), primaryStatsOp, 0, printer, 0);
+        spec.addRoot(printer);
+        runTest(spec);
+    }
+
+    @AfterClass
+    public static void cleanup() throws Exception {
+        File primary = new File(primaryFileName);
+        primary.deleteOnExit();
+    }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/rtree/RTreeSecondaryIndexSearchOperatorTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/rtree/RTreeSecondaryIndexSearchOperatorTest.java
new file mode 100644
index 0000000..030afcf
--- /dev/null
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/rtree/RTreeSecondaryIndexSearchOperatorTest.java
@@ -0,0 +1,320 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.tests.rtree;
+
+import java.io.DataOutput;
+import java.io.File;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+
+import org.junit.AfterClass;
+import org.junit.Before;
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.api.constraints.PartitionConstraintHelper;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.io.FileReference;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
+import edu.uci.ics.hyracks.data.std.primitive.DoublePointable;
+import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.DoubleSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.parsers.DoubleParserFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.parsers.IValueParserFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.parsers.UTF8StringParserFactory;
+import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.file.ConstantFileSplitProvider;
+import edu.uci.ics.hyracks.dataflow.std.file.DelimitedDataTupleParserFactory;
+import edu.uci.ics.hyracks.dataflow.std.file.FileScanOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.dataflow.std.file.PlainFileWriterOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.misc.ConstantTupleSourceOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.sort.ExternalSortOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeSearchOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndex;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexRegistryProvider;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexBulkLoadOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexCreateOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackProvider;
+import edu.uci.ics.hyracks.storage.am.rtree.dataflow.RTreeDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.rtree.dataflow.RTreeSearchOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.rtree.util.RTreeUtils;
+import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
+import edu.uci.ics.hyracks.test.support.TestIndexRegistryProvider;
+import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
+import edu.uci.ics.hyracks.test.support.TestStorageManagerInterface;
+import edu.uci.ics.hyracks.tests.integration.AbstractIntegrationTest;
+
+public class RTreeSecondaryIndexSearchOperatorTest extends AbstractIntegrationTest {
+    static {
+        TestStorageManagerComponentHolder.init(8192, 20, 20);
+    }
+
+    private IStorageManagerInterface storageManager = new TestStorageManagerInterface();
+    private IIndexRegistryProvider<IIndex> indexRegistryProvider = new TestIndexRegistryProvider();
+    private IIndexDataflowHelperFactory dataflowHelperFactory;
+    private IIndexDataflowHelperFactory btreeDataflowHelperFactory = new BTreeDataflowHelperFactory();
+
+    private final static SimpleDateFormat simpleDateFormat = new SimpleDateFormat("ddMMyy-hhmmssSS");
+    private final static String sep = System.getProperty("file.separator");
+
+    // field, type and key declarations for primary B-tree index
+    private int primaryBTreeFieldCount = 10;
+    private ITypeTraits[] primaryBTreeTypeTraits = new ITypeTraits[primaryBTreeFieldCount];
+    private int primaryBTreeKeyFieldCount = 1;
+    private IBinaryComparatorFactory[] primaryBTreeComparatorFactories = new IBinaryComparatorFactory[primaryBTreeKeyFieldCount];
+
+    private static String primaryBTreeName = "primaryBTree" + simpleDateFormat.format(new Date());
+    private static String primaryBTreeFileName = System.getProperty("java.io.tmpdir") + sep + primaryBTreeName;
+
+    private IFileSplitProvider primaryBTreeSplitProvider = new ConstantFileSplitProvider(
+            new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(primaryBTreeFileName))) });
+
+    private RecordDescriptor primaryBTreeRecDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+            UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+            UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+            UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+            DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
+            DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE, });
+
+    // field, type and key declarations for secondary indexes
+    private int secondaryFieldCount = 5;
+    private ITypeTraits[] secondaryTypeTraits = new ITypeTraits[secondaryFieldCount];
+    private int secondaryKeyFieldCount = 4;
+    private IBinaryComparatorFactory[] secondaryComparatorFactories = new IBinaryComparatorFactory[secondaryKeyFieldCount];
+
+    private static String secondaryRTreeName = "secondary" + simpleDateFormat.format(new Date());
+    private static String secondaryFileName = System.getProperty("java.io.tmpdir") + sep + secondaryRTreeName;
+
+    private IFileSplitProvider secondaryRTreeSplitProvider = new ConstantFileSplitProvider(
+            new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(secondaryFileName))) });
+
+    private RecordDescriptor secondaryRecDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+            DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
+            DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
+            UTF8StringSerializerDeserializer.INSTANCE });
+
+    private IPrimitiveValueProviderFactory[] secondaryValueProviderFactories;
+    
+    @Before
+    public void setup() throws Exception {
+        // field, type and key declarations for primary B-tree index
+        primaryBTreeTypeTraits[0] = UTF8StringPointable.TYPE_TRAITS;
+        primaryBTreeTypeTraits[1] = UTF8StringPointable.TYPE_TRAITS;
+        primaryBTreeTypeTraits[2] = UTF8StringPointable.TYPE_TRAITS;
+        primaryBTreeTypeTraits[3] = UTF8StringPointable.TYPE_TRAITS;
+        primaryBTreeTypeTraits[4] = UTF8StringPointable.TYPE_TRAITS;
+        primaryBTreeTypeTraits[5] = UTF8StringPointable.TYPE_TRAITS;
+        primaryBTreeTypeTraits[6] = DoublePointable.TYPE_TRAITS;
+        primaryBTreeTypeTraits[7] = DoublePointable.TYPE_TRAITS;
+        primaryBTreeTypeTraits[8] = DoublePointable.TYPE_TRAITS;
+        primaryBTreeTypeTraits[9] = DoublePointable.TYPE_TRAITS;
+        primaryBTreeComparatorFactories[0] = PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY);
+
+        // field, type and key declarations for secondary indexes
+        secondaryTypeTraits[0] = DoublePointable.TYPE_TRAITS;
+        secondaryTypeTraits[1] = DoublePointable.TYPE_TRAITS;
+        secondaryTypeTraits[2] = DoublePointable.TYPE_TRAITS;
+        secondaryTypeTraits[3] = DoublePointable.TYPE_TRAITS;
+        secondaryTypeTraits[4] = UTF8StringPointable.TYPE_TRAITS;
+        secondaryComparatorFactories[0] = PointableBinaryComparatorFactory.of(DoublePointable.FACTORY);
+        secondaryComparatorFactories[1] = secondaryComparatorFactories[0];
+        secondaryComparatorFactories[2] = secondaryComparatorFactories[0];
+        secondaryComparatorFactories[3] = secondaryComparatorFactories[0];
+
+        secondaryValueProviderFactories = RTreeUtils
+                .createPrimitiveValueProviderFactories(secondaryComparatorFactories.length, DoublePointable.FACTORY);
+
+        dataflowHelperFactory = new RTreeDataflowHelperFactory(secondaryValueProviderFactories);
+        
+        createPrimaryIndex();
+        loadPrimaryBTreeIndexTest();
+        createSecondaryIndex();
+        loadSecondaryIndexTest();
+    }
+
+    public void createPrimaryIndex() throws Exception {
+        JobSpecification spec = new JobSpecification();
+        TreeIndexCreateOperatorDescriptor primaryCreateOp = new TreeIndexCreateOperatorDescriptor(spec, storageManager,
+                indexRegistryProvider, primaryBTreeSplitProvider, primaryBTreeTypeTraits,
+                primaryBTreeComparatorFactories, btreeDataflowHelperFactory, NoOpOperationCallbackProvider.INSTANCE);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryCreateOp, NC1_ID);
+        spec.addRoot(primaryCreateOp);
+        runTest(spec);
+    }
+    
+    public void loadPrimaryBTreeIndexTest() throws Exception {
+        JobSpecification spec = new JobSpecification();
+
+        FileSplit[] ordersSplits = new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(
+                "data/orders-with-locations.txt"))) };
+        IFileSplitProvider ordersSplitProvider = new ConstantFileSplitProvider(ordersSplits);
+        RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
+                DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
+                DoubleSerializerDeserializer.INSTANCE });
+
+        FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitProvider,
+                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        DoubleParserFactory.INSTANCE, DoubleParserFactory.INSTANCE, DoubleParserFactory.INSTANCE,
+                        DoubleParserFactory.INSTANCE }, '|'), ordersDesc);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID);
+
+        ExternalSortOperatorDescriptor sorter = new ExternalSortOperatorDescriptor(spec, 1000, new int[] { 0 },
+                new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
+                ordersDesc);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, sorter, NC1_ID);
+
+        int[] fieldPermutation = { 0, 1, 2, 4, 5, 7, 9, 10, 11, 12 };
+        TreeIndexBulkLoadOperatorDescriptor primaryBTreeBulkLoad = new TreeIndexBulkLoadOperatorDescriptor(spec,
+                storageManager, indexRegistryProvider, primaryBTreeSplitProvider, primaryBTreeTypeTraits, primaryBTreeComparatorFactories,
+                fieldPermutation, 0.7f, btreeDataflowHelperFactory, NoOpOperationCallbackProvider.INSTANCE);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryBTreeBulkLoad, NC1_ID);
+
+        spec.connect(new OneToOneConnectorDescriptor(spec), ordScanner, 0, sorter, 0);
+
+        spec.connect(new OneToOneConnectorDescriptor(spec), sorter, 0, primaryBTreeBulkLoad, 0);
+
+        spec.addRoot(primaryBTreeBulkLoad);
+        runTest(spec);
+    }
+
+    public void createSecondaryIndex() throws Exception {
+        JobSpecification spec = new JobSpecification();
+        TreeIndexCreateOperatorDescriptor primaryCreateOp = new TreeIndexCreateOperatorDescriptor(spec, storageManager,
+                indexRegistryProvider, secondaryRTreeSplitProvider, secondaryTypeTraits, secondaryComparatorFactories,
+                dataflowHelperFactory, NoOpOperationCallbackProvider.INSTANCE);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryCreateOp, NC1_ID);
+        spec.addRoot(primaryCreateOp);
+        runTest(spec);
+    }
+    
+    public void loadSecondaryIndexTest() throws Exception {
+        JobSpecification spec = new JobSpecification();
+
+        // build dummy tuple containing nothing
+        ArrayTupleBuilder tb = new ArrayTupleBuilder(primaryBTreeKeyFieldCount * 2);
+        DataOutput dos = tb.getDataOutput();
+
+        tb.reset();
+        UTF8StringSerializerDeserializer.INSTANCE.serialize("0", dos);
+        tb.addFieldEndOffset();
+
+        ISerializerDeserializer[] keyRecDescSers = { UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE };
+        RecordDescriptor keyRecDesc = new RecordDescriptor(keyRecDescSers);
+
+        ConstantTupleSourceOperatorDescriptor keyProviderOp = new ConstantTupleSourceOperatorDescriptor(spec,
+                keyRecDesc, tb.getFieldEndOffsets(), tb.getByteArray(), tb.getSize());
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, keyProviderOp, NC1_ID);
+
+        int[] lowKeyFields = null; // - infinity
+        int[] highKeyFields = null; // + infinity
+
+        // scan primary index
+        BTreeSearchOperatorDescriptor primaryBTreeSearchOp = new BTreeSearchOperatorDescriptor(spec,
+                primaryBTreeRecDesc, storageManager, indexRegistryProvider, primaryBTreeSplitProvider,
+                primaryBTreeTypeTraits, primaryBTreeComparatorFactories, lowKeyFields, highKeyFields, 
+                true, true, btreeDataflowHelperFactory, false, NoOpOperationCallbackProvider.INSTANCE);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryBTreeSearchOp, NC1_ID);
+
+        // load secondary index
+        int[] fieldPermutation = { 6, 7, 8, 9, 0 };
+        TreeIndexBulkLoadOperatorDescriptor secondaryRTreeBulkLoad = new TreeIndexBulkLoadOperatorDescriptor(spec,
+                storageManager, indexRegistryProvider, secondaryRTreeSplitProvider, secondaryTypeTraits, secondaryComparatorFactories, fieldPermutation, 0.7f,
+                dataflowHelperFactory, NoOpOperationCallbackProvider.INSTANCE);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, secondaryRTreeBulkLoad, NC1_ID);
+
+        spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0, primaryBTreeSearchOp, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), primaryBTreeSearchOp, 0, secondaryRTreeBulkLoad, 0);
+
+        spec.addRoot(secondaryRTreeBulkLoad);
+        runTest(spec);
+    }
+
+    @Test
+    public void searchSecondaryIndexTest() throws Exception {
+        JobSpecification spec = new JobSpecification();
+
+        // build tuple
+        ArrayTupleBuilder tb = new ArrayTupleBuilder(secondaryKeyFieldCount);
+        DataOutput dos = tb.getDataOutput();
+
+        tb.reset();
+        DoubleSerializerDeserializer.INSTANCE.serialize(61.2894, dos);
+        tb.addFieldEndOffset();
+        DoubleSerializerDeserializer.INSTANCE.serialize(-149.624, dos);
+        tb.addFieldEndOffset();
+        DoubleSerializerDeserializer.INSTANCE.serialize(61.8894, dos);
+        tb.addFieldEndOffset();
+        DoubleSerializerDeserializer.INSTANCE.serialize(-149.024, dos);
+        tb.addFieldEndOffset();
+
+        ISerializerDeserializer[] keyRecDescSers = { DoubleSerializerDeserializer.INSTANCE,
+                DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
+                DoubleSerializerDeserializer.INSTANCE };
+        RecordDescriptor keyRecDesc = new RecordDescriptor(keyRecDescSers);
+
+        ConstantTupleSourceOperatorDescriptor keyProviderOp = new ConstantTupleSourceOperatorDescriptor(spec,
+                keyRecDesc, tb.getFieldEndOffsets(), tb.getByteArray(), tb.getSize());
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, keyProviderOp, NC1_ID);
+
+        int[] keyFields = { 0, 1, 2, 3 };
+
+        RTreeSearchOperatorDescriptor secondaryRTreeSearchOp = new RTreeSearchOperatorDescriptor(spec,
+                secondaryRecDesc, storageManager, indexRegistryProvider, secondaryRTreeSplitProvider,
+                secondaryTypeTraits, secondaryComparatorFactories, keyFields, dataflowHelperFactory, false, NoOpOperationCallbackProvider.INSTANCE);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, secondaryRTreeSearchOp, NC1_ID);
+
+        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
+                createTempFile().getAbsolutePath()) });
+        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
+
+        spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0, secondaryRTreeSearchOp, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), secondaryRTreeSearchOp, 0, printer, 0);
+
+        spec.addRoot(printer);
+        runTest(spec);
+    }
+
+    @AfterClass
+    public static void cleanup() throws Exception {
+        File primaryBTree = new File(primaryBTreeFileName);
+        primaryBTree.deleteOnExit();
+
+        File secondary = new File(secondaryFileName);
+        secondary.deleteOnExit();
+    }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/util/NoopNullWriterFactory.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/util/NoopNullWriterFactory.java
new file mode 100644
index 0000000..d119509
--- /dev/null
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/util/NoopNullWriterFactory.java
@@ -0,0 +1,31 @@
+package edu.uci.ics.hyracks.tests.util;
+
+import java.io.DataOutput;
+import java.io.IOException;
+
+import edu.uci.ics.hyracks.api.dataflow.value.INullWriter;
+import edu.uci.ics.hyracks.api.dataflow.value.INullWriterFactory;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+
+public class NoopNullWriterFactory implements INullWriterFactory {
+
+    private static final long serialVersionUID = 1L;
+    public static final NoopNullWriterFactory INSTANCE = new NoopNullWriterFactory();
+
+    private NoopNullWriterFactory() {
+    }
+
+    @Override
+    public INullWriter createNullWriter() {
+        return new INullWriter() {
+            @Override
+            public void writeNull(DataOutput out) throws HyracksDataException {
+                try {
+                    out.writeShort(0);
+                } catch (IOException e) {
+                    throw new HyracksDataException(e);
+                }
+            }
+        };
+    }
+}
diff --git a/hyracks/hyracks-examples/pom.xml b/hyracks/hyracks-examples/pom.xml
new file mode 100644
index 0000000..8ce8108
--- /dev/null
+++ b/hyracks/hyracks-examples/pom.xml
@@ -0,0 +1,22 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>edu.uci.ics.hyracks</groupId>
+  <artifactId>hyracks-examples</artifactId>
+  <version>0.2.3-SNAPSHOT</version>
+  <packaging>pom</packaging>
+  <name>hyracks-examples</name>
+
+  <parent>
+    <groupId>edu.uci.ics.hyracks</groupId>
+    <artifactId>hyracks</artifactId>
+    <version>0.2.3-SNAPSHOT</version>
+  </parent>
+
+  <modules>
+    <module>tpch-example</module>
+    <module>text-example</module>
+    <module>btree-example</module>
+    <module>hyracks-integration-tests</module>
+    <module>hadoop-compat-example</module>
+  </modules>
+</project>
diff --git a/hyracks/hyracks-examples/text-example/pom.xml b/hyracks/hyracks-examples/text-example/pom.xml
new file mode 100644
index 0000000..ba8649e
--- /dev/null
+++ b/hyracks/hyracks-examples/text-example/pom.xml
@@ -0,0 +1,20 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>edu.uci.ics.hyracks.examples</groupId>
+  <artifactId>text-example</artifactId>
+  <version>0.2.3-SNAPSHOT</version>
+  <packaging>pom</packaging>
+  <name>text-example</name>
+
+  <parent>
+    <groupId>edu.uci.ics.hyracks</groupId>
+    <artifactId>hyracks-examples</artifactId>
+    <version>0.2.3-SNAPSHOT</version>
+  </parent>
+
+  <modules>
+    <module>texthelper</module>
+    <module>textclient</module>
+    <module>textapp</module>
+  </modules>
+</project>
diff --git a/hyracks-examples/text-example/textapp/data/file1.txt b/hyracks/hyracks-examples/text-example/textapp/data/file1.txt
similarity index 100%
rename from hyracks-examples/text-example/textapp/data/file1.txt
rename to hyracks/hyracks-examples/text-example/textapp/data/file1.txt
diff --git a/hyracks-examples/text-example/textapp/data/file2.txt b/hyracks/hyracks-examples/text-example/textapp/data/file2.txt
similarity index 100%
rename from hyracks-examples/text-example/textapp/data/file2.txt
rename to hyracks/hyracks-examples/text-example/textapp/data/file2.txt
diff --git a/hyracks/hyracks-examples/text-example/textapp/pom.xml b/hyracks/hyracks-examples/text-example/textapp/pom.xml
new file mode 100644
index 0000000..3834d08
--- /dev/null
+++ b/hyracks/hyracks-examples/text-example/textapp/pom.xml
@@ -0,0 +1,187 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>edu.uci.ics.hyracks.examples.text</groupId>
+  <artifactId>textapp</artifactId>
+  <version>0.2.3-SNAPSHOT</version>
+  <name>textapp</name>
+
+  <parent>
+    <groupId>edu.uci.ics.hyracks.examples</groupId>
+    <artifactId>text-example</artifactId>
+    <version>0.2.3-SNAPSHOT</version>
+  </parent>
+
+  <build>
+    <pluginManagement>
+      <plugins>
+        <plugin>
+          <groupId>org.eclipse.m2e</groupId>
+          <artifactId>lifecycle-mapping</artifactId>
+          <version>1.0.0</version>
+          <configuration>
+            <lifecycleMappingMetadata>
+              <pluginExecutions>
+                <pluginExecution>
+                  <pluginExecutionFilter>
+                    <groupId>org.apache.maven.plugins</groupId>
+                    <artifactId>maven-dependency-plugin</artifactId>
+                    <versionRange>[1.0.0,)</versionRange>
+                    <goals>
+                      <goal>copy-dependencies</goal>
+                    </goals>
+                  </pluginExecutionFilter>
+                  <action>
+                    <ignore />
+                  </action>
+                </pluginExecution>
+              </pluginExecutions>
+            </lifecycleMappingMetadata>
+          </configuration>
+        </plugin>
+      </plugins>
+	</pluginManagement>
+  
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-dependency-plugin</artifactId>
+        <executions>
+          <execution>
+            <id>copy-dependencies</id>
+            <phase>package</phase>
+            <goals>
+              <goal>copy-dependencies</goal>
+            </goals>
+            <configuration>
+              <outputDirectory>target/application/lib</outputDirectory>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <artifactId>maven-assembly-plugin</artifactId>
+        <version>2.2-beta-5</version>
+        <executions>
+          <execution>
+            <configuration>
+              <descriptors>
+                <descriptor>src/main/assembly/app-assembly.xml</descriptor>
+              </descriptors>
+            </configuration>
+            <phase>package</phase>
+            <goals>
+              <goal>attached</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+      	<groupId>edu.uci.ics.hyracks</groupId>
+      	<artifactId>hyracks-virtualcluster-maven-plugin</artifactId>
+      	<version>0.2.3-SNAPSHOT</version>
+        <configuration>
+          <hyracksServerHome>${basedir}/../../../hyracks-server/target/hyracks-server-${project.version}-binary-assembly</hyracksServerHome>
+          <hyracksCLIHome>${basedir}/../../../hyracks-cli/target/hyracks-cli-${project.version}-binary-assembly</hyracksCLIHome>
+          <jvmOptions>${jvm.extraargs}</jvmOptions>
+        </configuration>
+        <executions>
+          <execution>
+            <id>hyracks-cc-start</id>
+            <phase>pre-integration-test</phase>
+            <goals>
+              <goal>start-cc</goal>
+            </goals>
+          </execution>
+          <execution>
+            <id>hyracks-nc1-start</id>
+            <phase>pre-integration-test</phase>
+            <goals>
+              <goal>start-nc</goal>
+            </goals>
+            <configuration>
+              <nodeId>NC1</nodeId>
+              <dataIpAddress>127.0.0.1</dataIpAddress>
+              <ccHost>localhost</ccHost>
+            </configuration>
+          </execution>
+          <execution>
+            <id>hyracks-nc2-start</id>
+            <phase>pre-integration-test</phase>
+            <goals>
+              <goal>start-nc</goal>
+            </goals>
+            <configuration>
+              <nodeId>NC2</nodeId>
+              <dataIpAddress>127.0.0.1</dataIpAddress>
+              <ccHost>localhost</ccHost>
+            </configuration>
+          </execution>
+          <execution>
+            <id>deploy-app</id>
+            <phase>pre-integration-test</phase>
+            <goals>
+              <goal>deploy-app</goal>
+            </goals>
+            <configuration>
+              <ccHost>localhost</ccHost>
+              <appName>text</appName>
+              <harFile>${project.build.directory}/textapp-${project.version}-app-assembly.zip</harFile>
+            </configuration>
+          </execution>
+          <execution>
+            <id>stop-services</id>
+            <phase>post-integration-test</phase>
+            <goals>
+              <goal>stop-services</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+      	<groupId>org.apache.maven.plugins</groupId>
+      	<artifactId>maven-compiler-plugin</artifactId>
+      	<version>2.0.2</version>
+        <configuration>
+          <source>1.7</source>
+          <target>1.7</target>
+        </configuration>
+      </plugin>
+      <plugin>
+      	<groupId>org.apache.maven.plugins</groupId>
+      	<artifactId>maven-failsafe-plugin</artifactId>
+      	<version>2.8.1</version>
+      	<executions>
+      	  <execution>
+      	    <id>it</id>
+      	    <phase>integration-test</phase>
+      	    <goals>
+      	      <goal>integration-test</goal>
+      	    </goals>
+      	  </execution>
+      	</executions>
+      </plugin>
+    </plugins>
+  </build>
+  <dependencies>
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks.examples.text</groupId>
+  		<artifactId>texthelper</artifactId>
+  		<version>0.2.3-SNAPSHOT</version>
+  		<scope>compile</scope>
+  	</dependency>
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks.examples.text</groupId>
+  		<artifactId>textclient</artifactId>
+  		<version>0.2.3-SNAPSHOT</version>
+  		<type>jar</type>
+  		<scope>test</scope>
+  	</dependency>
+  	<dependency>
+  		<groupId>junit</groupId>
+  		<artifactId>junit</artifactId>
+  		<version>4.8.2</version>
+  		<type>jar</type>
+  		<scope>test</scope>
+  	</dependency>
+  </dependencies>
+</project>
diff --git a/hyracks-examples/text-example/textapp/src/main/assembly/app-assembly.xml b/hyracks/hyracks-examples/text-example/textapp/src/main/assembly/app-assembly.xml
similarity index 100%
rename from hyracks-examples/text-example/textapp/src/main/assembly/app-assembly.xml
rename to hyracks/hyracks-examples/text-example/textapp/src/main/assembly/app-assembly.xml
diff --git a/hyracks-examples/text-example/textapp/src/test/java/edu/uci/ics/hyracks/examples/text/test/WordCountIT.java b/hyracks/hyracks-examples/text-example/textapp/src/test/java/edu/uci/ics/hyracks/examples/text/test/WordCountIT.java
similarity index 100%
rename from hyracks-examples/text-example/textapp/src/test/java/edu/uci/ics/hyracks/examples/text/test/WordCountIT.java
rename to hyracks/hyracks-examples/text-example/textapp/src/test/java/edu/uci/ics/hyracks/examples/text/test/WordCountIT.java
diff --git a/hyracks/hyracks-examples/text-example/textclient/pom.xml b/hyracks/hyracks-examples/text-example/textclient/pom.xml
new file mode 100644
index 0000000..af72b71
--- /dev/null
+++ b/hyracks/hyracks-examples/text-example/textclient/pom.xml
@@ -0,0 +1,100 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>edu.uci.ics.hyracks.examples.text</groupId>
+  <artifactId>textclient</artifactId>
+  <version>0.2.3-SNAPSHOT</version>
+  <name>textclient</name>
+
+  <parent>
+    <groupId>edu.uci.ics.hyracks.examples</groupId>
+    <artifactId>text-example</artifactId>
+    <version>0.2.3-SNAPSHOT</version>
+  </parent>
+
+  <dependencies>
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>hyracks-dataflow-std</artifactId>
+  		<version>0.2.3-SNAPSHOT</version>
+  		<scope>compile</scope>
+  	</dependency>
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks.examples.text</groupId>
+  		<artifactId>texthelper</artifactId>
+  		<version>0.2.3-SNAPSHOT</version>
+  		<type>jar</type>
+  		<scope>compile</scope>
+  	</dependency>
+  </dependencies>
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-compiler-plugin</artifactId>
+        <version>2.0.2</version>
+        <configuration>
+          <source>1.7</source>
+          <target>1.7</target>
+        </configuration>
+      </plugin>
+      <plugin>
+        <groupId>org.codehaus.mojo</groupId>
+        <artifactId>appassembler-maven-plugin</artifactId>
+        <version>1.3</version>
+        <executions>
+          <execution>
+          <id>textclient</id>
+            <configuration>
+              <programs>
+                <program>
+                  <mainClass>edu.uci.ics.hyracks.examples.text.client.WordCountMain</mainClass>
+                  <name>textclient</name>
+                </program>
+              </programs>
+              <repositoryLayout>flat</repositoryLayout>
+              <repositoryName>lib</repositoryName>
+            </configuration>
+            <phase>package</phase>
+            <goals>
+              <goal>assemble</goal>
+            </goals>
+          </execution>
+          <execution>
+          	<id>groupclient</id>
+            <configuration>
+              <programs>
+                <program>
+                  <mainClass>edu.uci.ics.hyracks.examples.text.client.ExternalGroupClient</mainClass>
+                  <name>groupclient</name>
+                </program>
+              </programs>
+              <repositoryLayout>flat</repositoryLayout>
+              <repositoryName>lib</repositoryName>
+            </configuration>
+            <phase>package</phase>
+            <goals>
+              <goal>assemble</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <artifactId>maven-assembly-plugin</artifactId>
+        <version>2.2-beta-5</version>
+        <executions>
+          <execution>
+            <configuration>
+              <descriptors>
+                <descriptor>src/main/assembly/binary-assembly.xml</descriptor>
+              </descriptors>
+            </configuration>
+            <phase>package</phase>
+            <goals>
+              <goal>attached</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+    </plugins>
+  </build>
+</project>
diff --git a/hyracks-examples/text-example/textclient/src/main/assembly/binary-assembly.xml b/hyracks/hyracks-examples/text-example/textclient/src/main/assembly/binary-assembly.xml
similarity index 100%
rename from hyracks-examples/text-example/textclient/src/main/assembly/binary-assembly.xml
rename to hyracks/hyracks-examples/text-example/textclient/src/main/assembly/binary-assembly.xml
diff --git a/hyracks-examples/text-example/textclient/src/main/java/edu/uci/ics/hyracks/examples/text/client/ExternalGroupClient.java b/hyracks/hyracks-examples/text-example/textclient/src/main/java/edu/uci/ics/hyracks/examples/text/client/ExternalGroupClient.java
similarity index 100%
rename from hyracks-examples/text-example/textclient/src/main/java/edu/uci/ics/hyracks/examples/text/client/ExternalGroupClient.java
rename to hyracks/hyracks-examples/text-example/textclient/src/main/java/edu/uci/ics/hyracks/examples/text/client/ExternalGroupClient.java
diff --git a/hyracks-examples/text-example/textclient/src/main/java/edu/uci/ics/hyracks/examples/text/client/WordCountMain.java b/hyracks/hyracks-examples/text-example/textclient/src/main/java/edu/uci/ics/hyracks/examples/text/client/WordCountMain.java
similarity index 100%
rename from hyracks-examples/text-example/textclient/src/main/java/edu/uci/ics/hyracks/examples/text/client/WordCountMain.java
rename to hyracks/hyracks-examples/text-example/textclient/src/main/java/edu/uci/ics/hyracks/examples/text/client/WordCountMain.java
diff --git a/hyracks/hyracks-examples/text-example/texthelper/pom.xml b/hyracks/hyracks-examples/text-example/texthelper/pom.xml
new file mode 100644
index 0000000..5095db8
--- /dev/null
+++ b/hyracks/hyracks-examples/text-example/texthelper/pom.xml
@@ -0,0 +1,45 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>edu.uci.ics.hyracks.examples.text</groupId>
+  <artifactId>texthelper</artifactId>
+  <name>texthelper</name>
+
+  <parent>
+    <groupId>edu.uci.ics.hyracks.examples</groupId>
+    <artifactId>text-example</artifactId>
+    <version>0.2.3-SNAPSHOT</version>
+  </parent>
+
+  <dependencies>
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>hyracks-dataflow-std</artifactId>
+  		<version>0.2.3-SNAPSHOT</version>
+  		<scope>compile</scope>
+  	</dependency>
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>hyracks-api</artifactId>
+  		<version>0.2.3-SNAPSHOT</version>
+  		<scope>compile</scope>
+  	</dependency>
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>hyracks-data-std</artifactId>
+  		<version>0.2.3-SNAPSHOT</version>
+  	</dependency>
+  </dependencies>
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-compiler-plugin</artifactId>
+        <version>2.0.2</version>
+        <configuration>
+          <source>1.7</source>
+          <target>1.7</target>
+        </configuration>
+      </plugin>
+    </plugins>
+  </build>
+</project>
diff --git a/hyracks-examples/text-example/texthelper/src/main/java/edu/uci/ics/hyracks/examples/text/WordTupleParserFactory.java b/hyracks/hyracks-examples/text-example/texthelper/src/main/java/edu/uci/ics/hyracks/examples/text/WordTupleParserFactory.java
similarity index 100%
rename from hyracks-examples/text-example/texthelper/src/main/java/edu/uci/ics/hyracks/examples/text/WordTupleParserFactory.java
rename to hyracks/hyracks-examples/text-example/texthelper/src/main/java/edu/uci/ics/hyracks/examples/text/WordTupleParserFactory.java
diff --git a/hyracks/hyracks-examples/tpch-example/pom.xml b/hyracks/hyracks-examples/tpch-example/pom.xml
new file mode 100644
index 0000000..b237c9b
--- /dev/null
+++ b/hyracks/hyracks-examples/tpch-example/pom.xml
@@ -0,0 +1,19 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>edu.uci.ics.hyracks.examples</groupId>
+  <artifactId>tpch-example</artifactId>
+  <version>0.2.3-SNAPSHOT</version>
+  <packaging>pom</packaging>
+  <name>tpch-example</name>
+
+  <parent>
+    <groupId>edu.uci.ics.hyracks</groupId>
+    <artifactId>hyracks-examples</artifactId>
+    <version>0.2.3-SNAPSHOT</version>
+  </parent>
+
+  <modules>
+    <module>tpchclient</module>
+    <module>tpchapp</module>
+  </modules>
+</project>
diff --git a/hyracks/hyracks-examples/tpch-example/tpchapp/pom.xml b/hyracks/hyracks-examples/tpch-example/tpchapp/pom.xml
new file mode 100644
index 0000000..76eb257
--- /dev/null
+++ b/hyracks/hyracks-examples/tpch-example/tpchapp/pom.xml
@@ -0,0 +1,91 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>edu.uci.ics.hyracks.examples.tpch</groupId>
+  <artifactId>tpchapp</artifactId>
+  <name>tpchapp</name>
+  <parent>
+    <groupId>edu.uci.ics.hyracks.examples</groupId>
+    <artifactId>tpch-example</artifactId>
+    <version>0.2.3-SNAPSHOT</version>
+  </parent>
+
+  <build>
+    <pluginManagement>
+      <plugins>
+        <plugin>
+          <groupId>org.eclipse.m2e</groupId>
+          <artifactId>lifecycle-mapping</artifactId>
+          <version>1.0.0</version>
+          <configuration>
+            <lifecycleMappingMetadata>
+              <pluginExecutions>
+                <pluginExecution>
+                  <pluginExecutionFilter>
+                    <groupId>org.apache.maven.plugins</groupId>
+                    <artifactId>maven-dependency-plugin</artifactId>
+                    <versionRange>[1.0.0,)</versionRange>
+                    <goals>
+                      <goal>copy-dependencies</goal>
+                    </goals>
+                  </pluginExecutionFilter>
+                  <action>
+                    <ignore />
+                  </action>
+                </pluginExecution>
+              </pluginExecutions>
+            </lifecycleMappingMetadata>
+          </configuration>
+        </plugin>
+      </plugins>
+	</pluginManagement>
+  
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-dependency-plugin</artifactId>
+        <executions>
+          <execution>
+            <id>copy-dependencies</id>
+            <phase>package</phase>
+            <goals>
+              <goal>copy-dependencies</goal>
+            </goals>
+            <configuration>
+              <outputDirectory>target/application/lib</outputDirectory>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <artifactId>maven-assembly-plugin</artifactId>
+        <version>2.2-beta-5</version>
+        <executions>
+          <execution>
+            <configuration>
+              <descriptors>
+                <descriptor>src/main/assembly/app-assembly.xml</descriptor>
+              </descriptors>
+            </configuration>
+            <phase>package</phase>
+            <goals>
+              <goal>attached</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+    </plugins>
+  </build>
+  <dependencies>
+    <dependency>
+        <groupId>edu.uci.ics.hyracks</groupId>
+        <artifactId>hyracks-dataflow-std</artifactId>
+        <version>0.2.3-SNAPSHOT</version>
+        <scope>compile</scope>
+    </dependency>
+    <dependency>
+    	<groupId>edu.uci.ics.hyracks</groupId>
+    	<artifactId>hyracks-data-std</artifactId>
+    	<version>0.2.3-SNAPSHOT</version>
+    </dependency>
+  </dependencies>
+</project>
diff --git a/hyracks-examples/tpch-example/tpchapp/src/main/assembly/app-assembly.xml b/hyracks/hyracks-examples/tpch-example/tpchapp/src/main/assembly/app-assembly.xml
similarity index 100%
rename from hyracks-examples/tpch-example/tpchapp/src/main/assembly/app-assembly.xml
rename to hyracks/hyracks-examples/tpch-example/tpchapp/src/main/assembly/app-assembly.xml
diff --git a/hyracks/hyracks-examples/tpch-example/tpchclient/pom.xml b/hyracks/hyracks-examples/tpch-example/tpchclient/pom.xml
new file mode 100644
index 0000000..0f8d8fc
--- /dev/null
+++ b/hyracks/hyracks-examples/tpch-example/tpchclient/pom.xml
@@ -0,0 +1,61 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>edu.uci.ics.hyracks.examples.tpch</groupId>
+  <artifactId>tpchclient</artifactId>
+  <name>tpchclient</name>
+  <parent>
+    <groupId>edu.uci.ics.hyracks.examples</groupId>
+    <artifactId>tpch-example</artifactId>
+    <version>0.2.3-SNAPSHOT</version>
+  </parent>
+
+  <dependencies>
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>hyracks-dataflow-std</artifactId>
+  		<version>0.2.3-SNAPSHOT</version>
+  		<scope>compile</scope>
+  	</dependency>
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>hyracks-data-std</artifactId>
+  		<version>0.2.3-SNAPSHOT</version>
+  	</dependency>
+  </dependencies>
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-compiler-plugin</artifactId>
+        <version>2.0.2</version>
+        <configuration>
+          <source>1.7</source>
+          <target>1.7</target>
+        </configuration>
+      </plugin>
+      <plugin>
+        <groupId>org.codehaus.mojo</groupId>
+        <artifactId>appassembler-maven-plugin</artifactId>
+        <version>1.3</version>
+        <executions>
+          <execution>
+            <configuration>
+              <programs>
+                <program>
+                  <mainClass>edu.uci.ics.hyracks.examples.tpch.client.Main</mainClass>
+                  <name>tpchclient</name>
+                </program>
+              </programs>
+              <repositoryLayout>flat</repositoryLayout>
+              <repositoryName>lib</repositoryName>
+            </configuration>
+            <phase>package</phase>
+            <goals>
+              <goal>assemble</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+    </plugins>
+  </build>
+</project>
diff --git a/hyracks/hyracks-examples/tpch-example/tpchclient/src/main/java/edu/uci/ics/hyracks/examples/tpch/client/Main.java b/hyracks/hyracks-examples/tpch-example/tpchclient/src/main/java/edu/uci/ics/hyracks/examples/tpch/client/Main.java
new file mode 100644
index 0000000..0ad0ff0
--- /dev/null
+++ b/hyracks/hyracks-examples/tpch-example/tpchclient/src/main/java/edu/uci/ics/hyracks/examples/tpch/client/Main.java
@@ -0,0 +1,359 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.examples.tpch.client;
+
+import java.io.File;
+import java.util.EnumSet;
+
+import org.kohsuke.args4j.CmdLineParser;
+import org.kohsuke.args4j.Option;
+
+import edu.uci.ics.hyracks.api.client.HyracksConnection;
+import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
+import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
+import edu.uci.ics.hyracks.api.constraints.PartitionConstraintHelper;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.IConnectorDescriptor;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.ITuplePairComparator;
+import edu.uci.ics.hyracks.api.dataflow.value.ITuplePairComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.io.FileReference;
+import edu.uci.ics.hyracks.api.job.JobFlag;
+import edu.uci.ics.hyracks.api.job.JobId;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
+import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryHashFunctionFactory;
+import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.parsers.IValueParserFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.parsers.UTF8StringParserFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.partition.FieldHashPartitionComputerFactory;
+import edu.uci.ics.hyracks.dataflow.std.connectors.MToNPartitioningConnectorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.file.ConstantFileSplitProvider;
+import edu.uci.ics.hyracks.dataflow.std.file.DelimitedDataTupleParserFactory;
+import edu.uci.ics.hyracks.dataflow.std.file.FileScanOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
+import edu.uci.ics.hyracks.dataflow.std.file.FrameFileWriterOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.dataflow.std.group.IFieldAggregateDescriptorFactory;
+import edu.uci.ics.hyracks.dataflow.std.group.aggregators.CountFieldAggregatorFactory;
+import edu.uci.ics.hyracks.dataflow.std.group.aggregators.MultiFieldsAggregatorFactory;
+import edu.uci.ics.hyracks.dataflow.std.group.hash.HashGroupOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.join.GraceHashJoinOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.join.HybridHashJoinOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.join.InMemoryHashJoinOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.join.NestedLoopJoinOperatorDescriptor;
+
+public class Main {
+    private static class Options {
+        @Option(name = "-host", usage = "Hyracks Cluster Controller Host name", required = true)
+        public String host;
+
+        @Option(name = "-port", usage = "Hyracks Cluster Controller Port (default: 1098)", required = false)
+        public int port = 1098;
+
+        @Option(name = "-app", usage = "Hyracks Application name", required = true)
+        public String app;
+
+        @Option(name = "-infile-customer-splits", usage = "Comma separated list of file-splits for the CUSTOMER input. A file-split is <node-name>:<path>", required = true)
+        public String inFileCustomerSplits;
+
+        @Option(name = "-infile-order-splits", usage = "Comma separated list of file-splits for the ORDER input. A file-split is <node-name>:<path>", required = true)
+        public String inFileOrderSplits;
+
+        @Option(name = "-outfile-splits", usage = "Comma separated list of file-splits for the output", required = true)
+        public String outFileSplits;
+
+        @Option(name = "-num-join-partitions", usage = "Number of Join partitions to use (default: 1)", required = false)
+        public int numJoinPartitions = 1;
+
+        @Option(name = "-profile", usage = "Enable/Disable profiling. (default: enabled)")
+        public boolean profile = true;
+
+        @Option(name = "-table-size", usage = "Table size for in-memory hash join", required = false)
+        public int tableSize = 8191;
+
+        @Option(name = "-algo", usage = "Join types", required = true)
+        public String algo;
+
+        // For grace/hybrid hash join only
+        @Option(name = "-mem-size", usage = "Memory size for hash join", required = true)
+        public int memSize;
+
+        @Option(name = "-input-size", usage = "Input size of the grace/hybrid hash join", required = false)
+        public int graceInputSize = 10;
+
+        @Option(name = "-records-per-frame", usage = "Records per frame for grace/hybrid hash join", required = false)
+        public int graceRecordsPerFrame = 200;
+
+        @Option(name = "-grace-factor", usage = "Factor of the grace/hybrid hash join", required = false)
+        public double graceFactor = 1.2;
+
+        // Whether group-by is processed after the join
+        @Option(name = "-has-groupby", usage = "Whether to have group-by operation after join (default: disabled)", required = false)
+        public boolean hasGroupBy = false;
+    }
+
+    public static void main(String[] args) throws Exception {
+        Options options = new Options();
+        CmdLineParser parser = new CmdLineParser(options);
+        parser.parseArgument(args);
+
+        IHyracksClientConnection hcc = new HyracksConnection(options.host, options.port);
+
+        JobSpecification job = createJob(parseFileSplits(options.inFileCustomerSplits),
+                parseFileSplits(options.inFileOrderSplits), parseFileSplits(options.outFileSplits),
+                options.numJoinPartitions, options.algo, options.graceInputSize, options.graceRecordsPerFrame,
+                options.graceFactor, options.memSize, options.tableSize, options.hasGroupBy);
+
+        long start = System.currentTimeMillis();
+        JobId jobId = hcc.startJob(options.app, job,
+                options.profile ? EnumSet.of(JobFlag.PROFILE_RUNTIME) : EnumSet.noneOf(JobFlag.class));
+        hcc.waitForCompletion(jobId);
+        long end = System.currentTimeMillis();
+        System.err.println(start + " " + end + " " + (end - start));
+    }
+
+    private static FileSplit[] parseFileSplits(String fileSplits) {
+        String[] splits = fileSplits.split(",");
+        FileSplit[] fSplits = new FileSplit[splits.length];
+        for (int i = 0; i < splits.length; ++i) {
+            String s = splits[i].trim();
+            int idx = s.indexOf(':');
+            if (idx < 0) {
+                throw new IllegalArgumentException("File split " + s + " not well formed");
+            }
+            fSplits[i] = new FileSplit(s.substring(0, idx), new FileReference(new File(s.substring(idx + 1))));
+        }
+        return fSplits;
+    }
+
+    private static JobSpecification createJob(FileSplit[] customerSplits, FileSplit[] orderSplits,
+            FileSplit[] resultSplits, int numJoinPartitions, String algo, int graceInputSize, int graceRecordsPerFrame,
+            double graceFactor, int memSize, int tableSize, boolean hasGroupBy) throws HyracksDataException {
+        JobSpecification spec = new JobSpecification();
+
+        IFileSplitProvider custSplitsProvider = new ConstantFileSplitProvider(customerSplits);
+        RecordDescriptor custDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE });
+
+        IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(orderSplits);
+        RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE });
+
+        RecordDescriptor custOrderJoinDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE });
+
+        FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitsProvider,
+                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'), ordersDesc);
+        createPartitionConstraint(spec, ordScanner, orderSplits);
+
+        FileScanOperatorDescriptor custScanner = new FileScanOperatorDescriptor(spec, custSplitsProvider,
+                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE }, '|'), custDesc);
+        createPartitionConstraint(spec, custScanner, customerSplits);
+
+        IOperatorDescriptor join;
+
+        if ("nestedloop".equalsIgnoreCase(algo)) {
+            join = new NestedLoopJoinOperatorDescriptor(spec, new JoinComparatorFactory(
+                    PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY), 0, 1), custOrderJoinDesc, memSize, false, null);
+
+        } else if ("gracehash".equalsIgnoreCase(algo)) {
+            join = new GraceHashJoinOperatorDescriptor(
+                    spec,
+                    memSize,
+                    graceInputSize,
+                    graceRecordsPerFrame,
+                    graceFactor,
+                    new int[] { 0 },
+                    new int[] { 1 },
+                    new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory
+                            .of(UTF8StringPointable.FACTORY) },
+                    new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
+                    custOrderJoinDesc);
+
+        } else if ("hybridhash".equalsIgnoreCase(algo)) {
+            join = new HybridHashJoinOperatorDescriptor(
+                    spec,
+                    memSize,
+                    graceInputSize,
+                    graceRecordsPerFrame,
+                    graceFactor,
+                    new int[] { 0 },
+                    new int[] { 1 },
+                    new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory
+                            .of(UTF8StringPointable.FACTORY) },
+                    new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
+                    custOrderJoinDesc);
+
+        } else {
+            join = new InMemoryHashJoinOperatorDescriptor(
+                    spec,
+                    new int[] { 0 },
+                    new int[] { 1 },
+                    new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory
+                            .of(UTF8StringPointable.FACTORY) },
+                    new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
+                    custOrderJoinDesc, 6000000);
+        }
+
+        PartitionConstraintHelper.addPartitionCountConstraint(spec, join, numJoinPartitions);
+
+        IConnectorDescriptor ordJoinConn = new MToNPartitioningConnectorDescriptor(spec,
+                new FieldHashPartitionComputerFactory(new int[] { 1 },
+                        new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory
+                                .of(UTF8StringPointable.FACTORY) }));
+        spec.connect(ordJoinConn, ordScanner, 0, join, 1);
+
+        IConnectorDescriptor custJoinConn = new MToNPartitioningConnectorDescriptor(spec,
+                new FieldHashPartitionComputerFactory(new int[] { 0 },
+                        new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory
+                                .of(UTF8StringPointable.FACTORY) }));
+        spec.connect(custJoinConn, custScanner, 0, join, 0);
+
+        IOperatorDescriptor endingOp = join;
+
+        if (hasGroupBy) {
+
+            RecordDescriptor groupResultDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                    UTF8StringSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE });
+
+            HashGroupOperatorDescriptor gby = new HashGroupOperatorDescriptor(
+                    spec,
+                    new int[] { 6 },
+                    new FieldHashPartitionComputerFactory(new int[] { 6 },
+                            new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory
+                                    .of(UTF8StringPointable.FACTORY) }),
+                    new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
+                    new MultiFieldsAggregatorFactory(
+                            new IFieldAggregateDescriptorFactory[] { new CountFieldAggregatorFactory(true) }),
+                    groupResultDesc, 16);
+            createPartitionConstraint(spec, gby, resultSplits);
+
+            IConnectorDescriptor joinGroupConn = new MToNPartitioningConnectorDescriptor(spec,
+                    new FieldHashPartitionComputerFactory(new int[] { 6 },
+                            new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory
+                                    .of(UTF8StringPointable.FACTORY) }));
+            spec.connect(joinGroupConn, join, 0, gby, 0);
+
+            endingOp = gby;
+        }
+
+        IFileSplitProvider outSplitProvider = new ConstantFileSplitProvider(resultSplits);
+        FrameFileWriterOperatorDescriptor writer = new FrameFileWriterOperatorDescriptor(spec, outSplitProvider);
+        createPartitionConstraint(spec, writer, resultSplits);
+
+        IConnectorDescriptor endingPrinterConn = new OneToOneConnectorDescriptor(spec);
+        spec.connect(endingPrinterConn, endingOp, 0, writer, 0);
+
+        spec.addRoot(writer);
+        return spec;
+    }
+
+    private static void createPartitionConstraint(JobSpecification spec, IOperatorDescriptor op, FileSplit[] splits) {
+        String[] parts = new String[splits.length];
+        for (int i = 0; i < splits.length; ++i) {
+            parts[i] = splits[i].getNodeName();
+        }
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, op, parts);
+    }
+
+    static class JoinComparatorFactory implements ITuplePairComparatorFactory {
+        private static final long serialVersionUID = 1L;
+
+        private final IBinaryComparatorFactory bFactory;
+        private final int pos0;
+        private final int pos1;
+
+        public JoinComparatorFactory(IBinaryComparatorFactory bFactory, int pos0, int pos1) {
+            this.bFactory = bFactory;
+            this.pos0 = pos0;
+            this.pos1 = pos1;
+        }
+
+        @Override
+        public ITuplePairComparator createTuplePairComparator(IHyracksTaskContext ctx) {
+            return new JoinComparator(bFactory.createBinaryComparator(), pos0, pos1);
+        }
+    }
+
+    static class JoinComparator implements ITuplePairComparator {
+
+        private final IBinaryComparator bComparator;
+        private final int field0;
+        private final int field1;
+
+        public JoinComparator(IBinaryComparator bComparator, int field0, int field1) {
+            this.bComparator = bComparator;
+            this.field0 = field0;
+            this.field1 = field1;
+        }
+
+        @Override
+        public int compare(IFrameTupleAccessor accessor0, int tIndex0, IFrameTupleAccessor accessor1, int tIndex1) {
+            int tStart0 = accessor0.getTupleStartOffset(tIndex0);
+            int fStartOffset0 = accessor0.getFieldSlotsLength() + tStart0;
+
+            int tStart1 = accessor1.getTupleStartOffset(tIndex1);
+            int fStartOffset1 = accessor1.getFieldSlotsLength() + tStart1;
+
+            int fStart0 = accessor0.getFieldStartOffset(tIndex0, field0);
+            int fEnd0 = accessor0.getFieldEndOffset(tIndex0, field0);
+            int fLen0 = fEnd0 - fStart0;
+
+            int fStart1 = accessor1.getFieldStartOffset(tIndex1, field1);
+            int fEnd1 = accessor1.getFieldEndOffset(tIndex1, field1);
+            int fLen1 = fEnd1 - fStart1;
+
+            int c = bComparator.compare(accessor0.getBuffer().array(), fStart0 + fStartOffset0, fLen0, accessor1
+                    .getBuffer().array(), fStart1 + fStartOffset1, fLen1);
+            if (c != 0) {
+                return c;
+            }
+            return 0;
+        }
+    }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-hadoop-compat/pom.xml b/hyracks/hyracks-hadoop-compat/pom.xml
new file mode 100644
index 0000000..9a907b8
--- /dev/null
+++ b/hyracks/hyracks-hadoop-compat/pom.xml
@@ -0,0 +1,89 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>edu.uci.ics.hyracks</groupId>
+  <artifactId>hyracks-hadoop-compat</artifactId>
+  <version>0.2.3-SNAPSHOT</version>
+  <name>hyracks-hadoop-compat</name>
+
+  <parent>
+    <groupId>edu.uci.ics.hyracks</groupId>
+    <artifactId>hyracks</artifactId>
+    <version>0.2.3-SNAPSHOT</version>
+  </parent>
+
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-compiler-plugin</artifactId>
+        <version>2.0.2</version>
+        <configuration>
+          <source>1.7</source>
+          <target>1.7</target>
+        </configuration>
+      </plugin>
+      <plugin>
+        <groupId>org.codehaus.mojo</groupId>
+        <artifactId>appassembler-maven-plugin</artifactId>
+        <version>1.3</version>
+        <executions>
+          <execution>
+            <configuration>
+              <programs>
+                <program>
+                  <mainClass>edu.uci.ics.hyracks.hadoop.compat.driver.CompatibilityLayer</mainClass>
+                  <name>hadoop-compat</name>
+                </program>
+              </programs>
+              <repositoryLayout>flat</repositoryLayout>
+              <repositoryName>lib</repositoryName>
+            </configuration>
+            <phase>package</phase>
+            <goals>
+              <goal>assemble</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <artifactId>maven-assembly-plugin</artifactId>
+        <version>2.2-beta-5</version>
+        <executions>
+          <execution>
+            <configuration>
+              <descriptors>
+                <descriptor>src/main/assembly/binary-assembly.xml</descriptor>
+              </descriptors>
+            </configuration>
+            <phase>package</phase>
+            <goals>
+              <goal>attached</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+    </plugins>
+  </build>
+  <dependencies>
+  	<dependency>
+  		<groupId>org.apache.hadoop</groupId>
+  		<artifactId>hadoop-core</artifactId>
+  		<version>0.20.2</version>
+  		<type>jar</type>
+  		<scope>compile</scope>
+  	</dependency>
+    <dependency>
+        <groupId>edu.uci.ics.dcache</groupId>
+        <artifactId>dcache-client</artifactId>
+        <version>0.0.1</version>
+        <scope>compile</scope>
+    </dependency>
+    <dependency>
+    	<groupId>edu.uci.ics.hyracks</groupId>
+    	<artifactId>hyracks-dataflow-hadoop</artifactId>
+    	<version>0.2.3-SNAPSHOT</version>
+    	<type>jar</type>
+    	<scope>compile</scope>
+    </dependency>
+  </dependencies>
+</project>
diff --git a/hyracks-hadoop-compat/src/main/assembly/binary-assembly.xml b/hyracks/hyracks-hadoop-compat/src/main/assembly/binary-assembly.xml
similarity index 100%
rename from hyracks-hadoop-compat/src/main/assembly/binary-assembly.xml
rename to hyracks/hyracks-hadoop-compat/src/main/assembly/binary-assembly.xml
diff --git a/hyracks-hadoop-compat/src/main/java/edu/uci/ics/hyracks/hadoop/compat/client/HyracksClient.java b/hyracks/hyracks-hadoop-compat/src/main/java/edu/uci/ics/hyracks/hadoop/compat/client/HyracksClient.java
similarity index 100%
rename from hyracks-hadoop-compat/src/main/java/edu/uci/ics/hyracks/hadoop/compat/client/HyracksClient.java
rename to hyracks/hyracks-hadoop-compat/src/main/java/edu/uci/ics/hyracks/hadoop/compat/client/HyracksClient.java
diff --git a/hyracks-hadoop-compat/src/main/java/edu/uci/ics/hyracks/hadoop/compat/client/HyracksRunningJob.java b/hyracks/hyracks-hadoop-compat/src/main/java/edu/uci/ics/hyracks/hadoop/compat/client/HyracksRunningJob.java
similarity index 100%
rename from hyracks-hadoop-compat/src/main/java/edu/uci/ics/hyracks/hadoop/compat/client/HyracksRunningJob.java
rename to hyracks/hyracks-hadoop-compat/src/main/java/edu/uci/ics/hyracks/hadoop/compat/client/HyracksRunningJob.java
diff --git a/hyracks-hadoop-compat/src/main/java/edu/uci/ics/hyracks/hadoop/compat/driver/CompatibilityLayer.java b/hyracks/hyracks-hadoop-compat/src/main/java/edu/uci/ics/hyracks/hadoop/compat/driver/CompatibilityLayer.java
similarity index 100%
rename from hyracks-hadoop-compat/src/main/java/edu/uci/ics/hyracks/hadoop/compat/driver/CompatibilityLayer.java
rename to hyracks/hyracks-hadoop-compat/src/main/java/edu/uci/ics/hyracks/hadoop/compat/driver/CompatibilityLayer.java
diff --git a/hyracks-hadoop-compat/src/main/java/edu/uci/ics/hyracks/hadoop/compat/util/CompatibilityConfig.java b/hyracks/hyracks-hadoop-compat/src/main/java/edu/uci/ics/hyracks/hadoop/compat/util/CompatibilityConfig.java
similarity index 100%
rename from hyracks-hadoop-compat/src/main/java/edu/uci/ics/hyracks/hadoop/compat/util/CompatibilityConfig.java
rename to hyracks/hyracks-hadoop-compat/src/main/java/edu/uci/ics/hyracks/hadoop/compat/util/CompatibilityConfig.java
diff --git a/hyracks-hadoop-compat/src/main/java/edu/uci/ics/hyracks/hadoop/compat/util/ConfigurationConstants.java b/hyracks/hyracks-hadoop-compat/src/main/java/edu/uci/ics/hyracks/hadoop/compat/util/ConfigurationConstants.java
similarity index 100%
rename from hyracks-hadoop-compat/src/main/java/edu/uci/ics/hyracks/hadoop/compat/util/ConfigurationConstants.java
rename to hyracks/hyracks-hadoop-compat/src/main/java/edu/uci/ics/hyracks/hadoop/compat/util/ConfigurationConstants.java
diff --git a/hyracks-hadoop-compat/src/main/java/edu/uci/ics/hyracks/hadoop/compat/util/DCacheHandler.java b/hyracks/hyracks-hadoop-compat/src/main/java/edu/uci/ics/hyracks/hadoop/compat/util/DCacheHandler.java
similarity index 100%
rename from hyracks-hadoop-compat/src/main/java/edu/uci/ics/hyracks/hadoop/compat/util/DCacheHandler.java
rename to hyracks/hyracks-hadoop-compat/src/main/java/edu/uci/ics/hyracks/hadoop/compat/util/DCacheHandler.java
diff --git a/hyracks-hadoop-compat/src/main/java/edu/uci/ics/hyracks/hadoop/compat/util/HadoopAdapter.java b/hyracks/hyracks-hadoop-compat/src/main/java/edu/uci/ics/hyracks/hadoop/compat/util/HadoopAdapter.java
similarity index 100%
rename from hyracks-hadoop-compat/src/main/java/edu/uci/ics/hyracks/hadoop/compat/util/HadoopAdapter.java
rename to hyracks/hyracks-hadoop-compat/src/main/java/edu/uci/ics/hyracks/hadoop/compat/util/HadoopAdapter.java
diff --git a/hyracks-hadoop-compat/src/main/java/edu/uci/ics/hyracks/hadoop/compat/util/Utilities.java b/hyracks/hyracks-hadoop-compat/src/main/java/edu/uci/ics/hyracks/hadoop/compat/util/Utilities.java
similarity index 100%
rename from hyracks-hadoop-compat/src/main/java/edu/uci/ics/hyracks/hadoop/compat/util/Utilities.java
rename to hyracks/hyracks-hadoop-compat/src/main/java/edu/uci/ics/hyracks/hadoop/compat/util/Utilities.java
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-0.20.2/pom.xml b/hyracks/hyracks-hdfs/hyracks-hdfs-0.20.2/pom.xml
new file mode 100644
index 0000000..b33e8e2
--- /dev/null
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-0.20.2/pom.xml
@@ -0,0 +1,103 @@
+<?xml version="1.0"?>
+<project
+	xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"
+	xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
+	<modelVersion>4.0.0</modelVersion>
+	<artifactId>hyracks-hdfs-0.20.2</artifactId>
+	<name>hyracks-hdfs-0.20.2</name>
+	<parent>
+		<artifactId>hyracks-hdfs</artifactId>
+		<groupId>edu.uci.ics.hyracks</groupId>
+		<version>0.2.3-SNAPSHOT</version>
+	</parent>
+
+	<build>
+		<plugins>
+			<plugin>
+				<groupId>org.apache.maven.plugins</groupId>
+				<artifactId>maven-compiler-plugin</artifactId>
+				<version>2.0.2</version>
+				<configuration>
+					<source>1.7</source>
+					<target>1.7</target>
+				</configuration>
+			</plugin>
+			<plugin>
+				<groupId>org.apache.maven.plugins</groupId>
+				<artifactId>maven-surefire-plugin</artifactId>
+				<version>2.7.2</version>
+				<configuration>
+					<forkMode>pertest</forkMode>
+					<includes>
+						<include>**/*TestSuite.java</include>
+						<include>**/*Test.java</include>
+					</includes>
+				</configuration>
+			</plugin>
+		</plugins>
+	</build>
+
+	<profiles>
+		<profile>
+			<activation>
+				<activeByDefault>true</activeByDefault>
+			</activation>
+			<id>hadoop-0.20.2</id>
+			<dependencies>
+				<dependency>
+					<groupId>org.apache.hadoop</groupId>
+					<artifactId>hadoop-core</artifactId>
+					<version>0.20.2</version>
+					<type>jar</type>
+					<scope>compile</scope>
+				</dependency>
+				<dependency>
+					<groupId>org.apache.hadoop</groupId>
+					<artifactId>hadoop-test</artifactId>
+					<version>0.20.2</version>
+					<type>jar</type>
+					<scope>compile</scope>
+				</dependency>
+			</dependencies>
+		</profile>
+		<profile>
+			<activation>
+				<activeByDefault>false</activeByDefault>
+			</activation>
+			<id>hadoop-1.0.4</id>
+			<dependencies>
+				<dependency>
+					<groupId>org.apache.hadoop</groupId>
+					<artifactId>hadoop-core</artifactId>
+					<version>1.0.4</version>
+					<type>jar</type>
+					<scope>compile</scope>
+				</dependency>
+				<dependency>
+					<groupId>org.apache.hadoop</groupId>
+					<artifactId>hadoop-minicluster</artifactId>
+					<version>1.0.4</version>
+					<type>jar</type>
+					<scope>compile</scope>
+				</dependency>
+				<dependency>
+					<groupId>org.apache.hadoop</groupId>
+					<artifactId>hadoop-test</artifactId>
+					<version>1.0.4</version>
+					<type>jar</type>
+					<scope>compile</scope>
+				</dependency>
+			</dependencies>
+		</profile>
+	</profiles>
+
+	<dependencies>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>hyracks-api</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+	</dependencies>
+</project>
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-0.20.2/src/main/java/edu/uci/ics/hyracks/hdfs/ContextFactory.java b/hyracks/hyracks-hdfs/hyracks-hdfs-0.20.2/src/main/java/edu/uci/ics/hyracks/hdfs/ContextFactory.java
new file mode 100644
index 0000000..a2b16c6
--- /dev/null
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-0.20.2/src/main/java/edu/uci/ics/hyracks/hdfs/ContextFactory.java
@@ -0,0 +1,25 @@
+package edu.uci.ics.hyracks.hdfs;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.mapreduce.InputSplit;
+import org.apache.hadoop.mapreduce.Mapper;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+import org.apache.hadoop.mapreduce.TaskAttemptID;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+
+/**
+ * The wrapper to generate TaskTattemptContext
+ */
+public class ContextFactory {
+
+    @SuppressWarnings({ "unchecked", "rawtypes" })
+    public TaskAttemptContext createContext(Configuration conf, InputSplit split) throws HyracksDataException {
+        try {
+            return new Mapper().new Context(conf, new TaskAttemptID(), null, null, null, null, split);
+        } catch (Exception e) {
+            throw new HyracksDataException(e);
+        }
+    }
+
+}
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-0.20.2/src/main/java/edu/uci/ics/hyracks/hdfs/MiniDFSClusterFactory.java b/hyracks/hyracks-hdfs/hyracks-hdfs-0.20.2/src/main/java/edu/uci/ics/hyracks/hdfs/MiniDFSClusterFactory.java
new file mode 100644
index 0000000..9133d35
--- /dev/null
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-0.20.2/src/main/java/edu/uci/ics/hyracks/hdfs/MiniDFSClusterFactory.java
@@ -0,0 +1,17 @@
+package edu.uci.ics.hyracks.hdfs;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdfs.MiniDFSCluster;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+
+public class MiniDFSClusterFactory {
+
+    public MiniDFSCluster getMiniDFSCluster(Configuration conf, int numberOfNC) throws HyracksDataException {
+        try {
+            return new MiniDFSCluster(conf, numberOfNC, true, null);
+        } catch (Exception e) {
+            throw new HyracksDataException(e);
+        }
+    }
+}
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-0.23.1/pom.xml b/hyracks/hyracks-hdfs/hyracks-hdfs-0.23.1/pom.xml
new file mode 100644
index 0000000..07b244f
--- /dev/null
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-0.23.1/pom.xml
@@ -0,0 +1,123 @@
+<?xml version="1.0"?>
+<project
+	xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"
+	xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
+	<modelVersion>4.0.0</modelVersion>
+	<artifactId>hyracks-hdfs-0.23.1</artifactId>
+	<name>hyracks-hdfs-0.23.1</name>
+	<parent>
+		<artifactId>hyracks-hdfs</artifactId>
+		<groupId>edu.uci.ics.hyracks</groupId>
+		<version>0.2.3-SNAPSHOT</version>
+	</parent>
+	<build>
+		<plugins>
+			<plugin>
+				<groupId>org.apache.maven.plugins</groupId>
+				<artifactId>maven-compiler-plugin</artifactId>
+				<version>2.0.2</version>
+				<configuration>
+					<source>1.7</source>
+					<target>1.7</target>
+				</configuration>
+			</plugin>
+			<plugin>
+				<groupId>org.apache.maven.plugins</groupId>
+				<artifactId>maven-surefire-plugin</artifactId>
+				<version>2.7.2</version>
+				<configuration>
+					<forkMode>pertest</forkMode>
+					<includes>
+						<include>**/*TestSuite.java</include>
+						<include>**/*Test.java</include>
+					</includes>
+				</configuration>
+			</plugin>
+		</plugins>
+	</build>
+
+	<profiles>
+		<profile>
+			<activation>
+				<activeByDefault>true</activeByDefault>
+			</activation>
+			<id>hadoop-0.23.1</id>
+			<dependencies>
+				<dependency>
+					<groupId>org.apache.hadoop</groupId>
+					<artifactId>hadoop-common</artifactId>
+					<version>0.23.1</version>
+					<type>jar</type>
+					<scope>compile</scope>
+				</dependency>
+				<dependency>
+					<groupId>org.apache.hadoop</groupId>
+					<artifactId>hadoop-mapreduce-client-core</artifactId>
+					<version>0.23.1</version>
+					<type>jar</type>
+					<scope>compile</scope>
+				</dependency>
+				<dependency>
+					<groupId>org.apache.hadoop</groupId>
+					<artifactId>hadoop-hdfs</artifactId>
+					<version>0.23.1</version>
+					<type>jar</type>
+					<scope>compile</scope>
+				</dependency>
+				<dependency>
+					<groupId>org.apache.hadoop</groupId>
+					<artifactId>hadoop-minicluster</artifactId>
+					<version>0.23.1</version>
+					<type>jar</type>
+					<scope>compile</scope>
+				</dependency>
+			</dependencies>
+		</profile>
+		<profile>
+			<id>hadoop-0.23.6</id>
+			<activation>
+				<activeByDefault>false</activeByDefault>
+			</activation>
+			<dependencies>
+				<dependency>
+					<groupId>org.apache.hadoop</groupId>
+					<artifactId>hadoop-common</artifactId>
+					<version>0.23.6</version>
+					<type>jar</type>
+					<scope>compile</scope>
+				</dependency>
+				<dependency>
+					<groupId>org.apache.hadoop</groupId>
+					<artifactId>hadoop-mapreduce-client-core</artifactId>
+					<version>0.23.6</version>
+					<type>jar</type>
+					<scope>compile</scope>
+				</dependency>
+				<dependency>
+					<groupId>org.apache.hadoop</groupId>
+					<artifactId>hadoop-hdfs</artifactId>
+					<version>0.23.6</version>
+					<type>jar</type>
+					<scope>compile</scope>
+				</dependency>
+				<dependency>
+					<groupId>org.apache.hadoop</groupId>
+					<artifactId>hadoop-minicluster</artifactId>
+					<version>0.23.6</version>
+					<type>jar</type>
+					<scope>compile</scope>
+				</dependency>
+			</dependencies>
+		</profile>
+	</profiles>
+
+	<dependencies>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>hyracks-api</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+	</dependencies>
+</project>
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-0.23.1/src/main/java/edu/uci/ics/hyracks/hdfs/ContextFactory.java b/hyracks/hyracks-hdfs/hyracks-hdfs-0.23.1/src/main/java/edu/uci/ics/hyracks/hdfs/ContextFactory.java
new file mode 100644
index 0000000..60ae5d3
--- /dev/null
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-0.23.1/src/main/java/edu/uci/ics/hyracks/hdfs/ContextFactory.java
@@ -0,0 +1,24 @@
+package edu.uci.ics.hyracks.hdfs;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.mapreduce.InputSplit;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+import org.apache.hadoop.mapreduce.TaskAttemptID;
+import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+
+/**
+ * The wrapper to generate TaskTattemptContext
+ */
+public class ContextFactory {
+
+    public TaskAttemptContext createContext(Configuration conf, InputSplit split) throws HyracksDataException {
+        try {
+            return new TaskAttemptContextImpl(conf, new TaskAttemptID());
+        } catch (Exception e) {
+            throw new HyracksDataException(e);
+        }
+    }
+
+}
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-0.23.1/src/main/java/edu/uci/ics/hyracks/hdfs/MiniDFSClusterFactory.java b/hyracks/hyracks-hdfs/hyracks-hdfs-0.23.1/src/main/java/edu/uci/ics/hyracks/hdfs/MiniDFSClusterFactory.java
new file mode 100644
index 0000000..ded75f1
--- /dev/null
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-0.23.1/src/main/java/edu/uci/ics/hyracks/hdfs/MiniDFSClusterFactory.java
@@ -0,0 +1,20 @@
+package edu.uci.ics.hyracks.hdfs;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdfs.MiniDFSCluster;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+
+public class MiniDFSClusterFactory {
+
+    public MiniDFSCluster getMiniDFSCluster(Configuration conf, int numberOfNC) throws HyracksDataException {
+        try {
+            MiniDFSCluster.Builder builder = new MiniDFSCluster.Builder(conf);
+            builder.numDataNodes(numberOfNC);
+            MiniDFSCluster dfsCluster = builder.build();
+            return dfsCluster;
+        } catch (Exception e) {
+            throw new HyracksDataException(e);
+        }
+    }
+}
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-core/pom.xml b/hyracks/hyracks-hdfs/hyracks-hdfs-core/pom.xml
new file mode 100644
index 0000000..6557b08
--- /dev/null
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-core/pom.xml
@@ -0,0 +1,166 @@
+<?xml version="1.0"?>
+<project
+	xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"
+	xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
+	<modelVersion>4.0.0</modelVersion>
+	<artifactId>hyracks-hdfs-core</artifactId>
+	<name>hyracks-hdfs-core</name>
+	<parent>
+		<artifactId>hyracks-hdfs</artifactId>
+		<groupId>edu.uci.ics.hyracks</groupId>
+		<version>0.2.3-SNAPSHOT</version>
+	</parent>
+
+	<build>
+		<plugins>
+			<plugin>
+				<groupId>org.apache.maven.plugins</groupId>
+				<artifactId>maven-compiler-plugin</artifactId>
+				<version>2.0.2</version>
+				<configuration>
+					<source>1.7</source>
+					<target>1.7</target>
+				</configuration>
+			</plugin>
+			<plugin>
+				<groupId>org.apache.maven.plugins</groupId>
+				<artifactId>maven-surefire-plugin</artifactId>
+				<version>2.7.2</version>
+				<configuration>
+					<forkMode>pertest</forkMode>
+					<includes>
+						<include>**/*TestSuite.java</include>
+						<include>**/*Test.java</include>
+					</includes>
+				</configuration>
+			</plugin>
+			<plugin>
+				<artifactId>maven-clean-plugin</artifactId>
+				<version>2.5</version>
+				<configuration>
+					<filesets>
+						<fileset>
+							<directory>.</directory>
+							<includes>
+								<include>edu*</include>
+								<include>actual*</include>
+								<include>build*</include>
+								<include>expect*</include>
+								<include>ClusterController*</include>
+								<include>edu.uci.*</include>
+							</includes>
+						</fileset>
+					</filesets>
+				</configuration>
+			</plugin>
+		</plugins>
+	</build>
+
+	<profiles>
+		<profile>
+			<activation>
+				<activeByDefault>true</activeByDefault>
+			</activation>
+			<id>hadoop-0.20.2</id>
+			<dependencies>
+				<dependency>
+					<groupId>edu.uci.ics.hyracks</groupId>
+					<artifactId>hyracks-hdfs-0.20.2</artifactId>
+					<version>0.2.3-SNAPSHOT</version>
+					<type>jar</type>
+					<scope>compile</scope>
+				</dependency>
+			</dependencies>
+		</profile>
+		<profile>
+			<activation>
+				<activeByDefault>false</activeByDefault>
+			</activation>
+			<id>hadoop-1.0.4</id>
+			<dependencies>
+				<dependency>
+					<groupId>edu.uci.ics.hyracks</groupId>
+					<artifactId>hyracks-hdfs-0.20.2</artifactId>
+					<version>0.2.3-SNAPSHOT</version>
+					<type>jar</type>
+					<scope>compile</scope>
+				</dependency>
+			</dependencies>
+		</profile>
+		<profile>
+			<activation>
+				<activeByDefault>false</activeByDefault>
+			</activation>
+			<id>hadoop-0.23.1</id>
+			<dependencies>
+				<dependency>
+					<groupId>edu.uci.ics.hyracks</groupId>
+					<artifactId>hyracks-hdfs-0.23.1</artifactId>
+					<version>0.2.3-SNAPSHOT</version>
+					<type>jar</type>
+					<scope>compile</scope>
+				</dependency>
+			</dependencies>
+		</profile>
+		<profile>
+			<activation>
+				<activeByDefault>false</activeByDefault>
+			</activation>
+			<id>hadoop-0.23.6</id>
+			<dependencies>
+				<dependency>
+					<groupId>edu.uci.ics.hyracks</groupId>
+					<artifactId>hyracks-hdfs-0.23.1</artifactId>
+					<version>0.2.3-SNAPSHOT</version>
+					<type>jar</type>
+					<scope>compile</scope>
+				</dependency>
+			</dependencies>
+		</profile>
+	</profiles>
+
+	<dependencies>
+		<dependency>
+			<groupId>junit</groupId>
+			<artifactId>junit</artifactId>
+			<version>3.8.1</version>
+			<scope>test</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>hyracks-api</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>hyracks-dataflow-std</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>hyracks-dataflow-common</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>hyracks-control-cc</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+			<scope>test</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>hyracks-control-nc</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+			<scope>test</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.apache.commons</groupId>
+			<artifactId>commons-io</artifactId>
+			<version>1.3.2</version>
+			<scope>test</scope>
+		</dependency>
+	</dependencies>
+</project>
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/api/IKeyValueParser.java b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/api/IKeyValueParser.java
new file mode 100644
index 0000000..5923e1e
--- /dev/null
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/api/IKeyValueParser.java
@@ -0,0 +1,50 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.hdfs.api;
+
+import edu.uci.ics.hyracks.api.comm.IFrameWriter;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+
+/**
+ * Users need to implement this interface to use the HDFSReadOperatorDescriptor.
+ * 
+ * @param <K>
+ *            the key type
+ * @param <V>
+ *            the value type
+ */
+public interface IKeyValueParser<K, V> {
+
+    /**
+     * Parse a key-value pair returned by HDFS record reader to a tuple.
+     * when the parsers' internal buffer is full, it can flush the buffer to the writer
+     * 
+     * @param key
+     * @param value
+     * @param writer
+     * @throws HyracksDataException
+     */
+    public void parse(K key, V value, IFrameWriter writer) throws HyracksDataException;
+
+    /**
+     * Flush the residual tuples in the internal buffer to the writer.
+     * This method is called in the close() of HDFSReadOperatorDescriptor.
+     * 
+     * @param writer
+     * @throws HyracksDataException
+     */
+    public void flush(IFrameWriter writer) throws HyracksDataException;
+}
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/api/IKeyValueParserFactory.java b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/api/IKeyValueParserFactory.java
new file mode 100644
index 0000000..6e943ad
--- /dev/null
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/api/IKeyValueParserFactory.java
@@ -0,0 +1,41 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.hdfs.api;
+
+import java.io.Serializable;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+
+/**
+ * Users need to implement this interface to use the HDFSReadOperatorDescriptor.
+ * 
+ * @param <K>
+ *            the key type
+ * @param <V>
+ *            the value type
+ */
+public interface IKeyValueParserFactory<K, V> extends Serializable {
+
+    /**
+     * This method creates a key-value parser.
+     * 
+     * @param ctx
+     *            the IHyracksTaskContext
+     * @return a key-value parser instance.
+     */
+    public IKeyValueParser<K, V> createKeyValueParser(IHyracksTaskContext ctx);
+
+}
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/api/ITupleWriter.java b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/api/ITupleWriter.java
new file mode 100644
index 0000000..25b9523
--- /dev/null
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/api/ITupleWriter.java
@@ -0,0 +1,39 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.hdfs.api;
+
+import java.io.DataOutput;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+
+/**
+ * Users need to implement this interface to use the HDFSWriteOperatorDescriptor.
+ */
+public interface ITupleWriter {
+
+    /**
+     * Write the tuple to the DataOutput.
+     * 
+     * @param output
+     *            the DataOutput channel
+     * @param tuple
+     *            the tuple to write
+     * @throws HyracksDataException
+     */
+    public void write(DataOutput output, ITupleReference tuple) throws HyracksDataException;
+
+}
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/api/ITupleWriterFactory.java b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/api/ITupleWriterFactory.java
new file mode 100644
index 0000000..839de8f
--- /dev/null
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/api/ITupleWriterFactory.java
@@ -0,0 +1,30 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.hdfs.api;
+
+import java.io.Serializable;
+
+/**
+ * Users need to implement this interface to use the HDFSWriteOperatorDescriptor.
+ */
+public interface ITupleWriterFactory extends Serializable {
+
+    /**
+     * @return a tuple writer instance
+     */
+    public ITupleWriter getTupleWriter();
+
+}
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/dataflow/ConfFactory.java b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/dataflow/ConfFactory.java
new file mode 100644
index 0000000..4fa0164
--- /dev/null
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/dataflow/ConfFactory.java
@@ -0,0 +1,41 @@
+package edu.uci.ics.hyracks.hdfs.dataflow;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.DataInputStream;
+import java.io.DataOutputStream;
+import java.io.Serializable;
+
+import org.apache.hadoop.mapred.JobConf;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+
+@SuppressWarnings("deprecation")
+public class ConfFactory implements Serializable {
+    private static final long serialVersionUID = 1L;
+    private byte[] confBytes;
+
+    public ConfFactory(JobConf conf) throws HyracksDataException {
+        try {
+            ByteArrayOutputStream bos = new ByteArrayOutputStream();
+            DataOutputStream dos = new DataOutputStream(bos);
+            conf.write(dos);
+            confBytes = bos.toByteArray();
+            dos.close();
+        } catch (Exception e) {
+            throw new HyracksDataException(e);
+        }
+    }
+
+    public JobConf getConf() throws HyracksDataException {
+        try {
+            JobConf conf = new JobConf();
+            DataInputStream dis = new DataInputStream(new ByteArrayInputStream(confBytes));
+            conf.readFields(dis);
+            dis.close();
+            return conf;
+        } catch (Exception e) {
+            throw new HyracksDataException(e);
+        }
+    }
+}
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/dataflow/HDFSReadOperatorDescriptor.java b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/dataflow/HDFSReadOperatorDescriptor.java
new file mode 100644
index 0000000..e924650
--- /dev/null
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/dataflow/HDFSReadOperatorDescriptor.java
@@ -0,0 +1,144 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.hdfs.dataflow;
+
+import java.util.Arrays;
+
+import org.apache.hadoop.mapred.InputFormat;
+import org.apache.hadoop.mapred.InputSplit;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.RecordReader;
+import org.apache.hadoop.mapred.Reporter;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
+import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.exceptions.HyracksException;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryOutputSourceOperatorNodePushable;
+import edu.uci.ics.hyracks.hdfs.api.IKeyValueParser;
+import edu.uci.ics.hyracks.hdfs.api.IKeyValueParserFactory;
+
+/**
+ * The HDFS file read operator using the Hadoop old API.
+ * To use this operator, a user need to provide an IKeyValueParserFactory implementation which convert
+ * key-value pairs into tuples.
+ */
+@SuppressWarnings({ "deprecation", "rawtypes" })
+public class HDFSReadOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor {
+
+    private static final long serialVersionUID = 1L;
+    private final ConfFactory confFactory;
+    private final InputSplitsFactory splitsFactory;
+    private final String[] scheduledLocations;
+    private final IKeyValueParserFactory tupleParserFactory;
+    private final boolean[] executed;
+
+    /**
+     * The constructor of HDFSReadOperatorDescriptor.
+     * 
+     * @param spec
+     *            the JobSpecification object
+     * @param rd
+     *            the output record descriptor
+     * @param conf
+     *            the Hadoop JobConf object, which contains the input format and the input paths
+     * @param splits
+     *            the array of FileSplits (HDFS chunks).
+     * @param scheduledLocations
+     *            the node controller names to scan the FileSplits, which is an one-to-one mapping. The String array
+     *            is obtained from the edu.cui.ics.hyracks.hdfs.scheduler.Scheduler.getLocationConstraints(InputSplits[]).
+     * @param tupleParserFactory
+     *            the ITupleParserFactory implementation instance.
+     * @throws HyracksException
+     */
+    public HDFSReadOperatorDescriptor(JobSpecification spec, RecordDescriptor rd, JobConf conf, InputSplit[] splits,
+            String[] scheduledLocations, IKeyValueParserFactory tupleParserFactory) throws HyracksException {
+        super(spec, 0, 1);
+        try {
+            this.splitsFactory = new InputSplitsFactory(splits);
+            this.confFactory = new ConfFactory(conf);
+        } catch (Exception e) {
+            throw new HyracksException(e);
+        }
+        this.scheduledLocations = scheduledLocations;
+        this.executed = new boolean[scheduledLocations.length];
+        Arrays.fill(executed, false);
+        this.tupleParserFactory = tupleParserFactory;
+        this.recordDescriptors[0] = rd;
+    }
+
+    @Override
+    public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx,
+            IRecordDescriptorProvider recordDescProvider, final int partition, final int nPartitions)
+            throws HyracksDataException {
+        final InputSplit[] inputSplits = splitsFactory.getSplits();
+
+        return new AbstractUnaryOutputSourceOperatorNodePushable() {
+            private String nodeName = ctx.getJobletContext().getApplicationContext().getNodeId();
+
+            @SuppressWarnings("unchecked")
+            @Override
+            public void initialize() throws HyracksDataException {
+                ClassLoader ctxCL = Thread.currentThread().getContextClassLoader();
+                try {
+                    Thread.currentThread().setContextClassLoader(getClass().getClassLoader());
+                    JobConf conf = confFactory.getConf();
+                    IKeyValueParser parser = tupleParserFactory.createKeyValueParser(ctx);
+                    writer.open();
+                    InputFormat inputFormat = conf.getInputFormat();
+                    for (int i = 0; i < inputSplits.length; i++) {
+                        /**
+                         * read all the partitions scheduled to the current node
+                         */
+                        if (scheduledLocations[i].equals(nodeName)) {
+                            /**
+                             * pick an unread split to read
+                             * synchronize among simultaneous partitions in the same machine
+                             */
+                            synchronized (executed) {
+                                if (executed[i] == false) {
+                                    executed[i] = true;
+                                } else {
+                                    continue;
+                                }
+                            }
+
+                            /**
+                             * read the split
+                             */
+                            RecordReader reader = inputFormat.getRecordReader(inputSplits[i], conf, Reporter.NULL);
+                            Object key = reader.createKey();
+                            Object value = reader.createValue();
+                            while (reader.next(key, value) == true) {
+                                parser.parse(key, value, writer);
+                            }
+                        }
+                    }
+                    parser.flush(writer);
+                    writer.close();
+                } catch (Exception e) {
+                    throw new HyracksDataException(e);
+                } finally {
+                    Thread.currentThread().setContextClassLoader(ctxCL);
+                }
+            }
+        };
+    }
+}
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/dataflow/HDFSWriteOperatorDescriptor.java b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/dataflow/HDFSWriteOperatorDescriptor.java
new file mode 100644
index 0000000..ff97a29
--- /dev/null
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/dataflow/HDFSWriteOperatorDescriptor.java
@@ -0,0 +1,129 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.hdfs.dataflow;
+
+import java.io.File;
+import java.nio.ByteBuffer;
+
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapred.FileOutputFormat;
+import org.apache.hadoop.mapred.JobConf;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
+import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.exceptions.HyracksException;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.FrameTupleReference;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryInputSinkOperatorNodePushable;
+import edu.uci.ics.hyracks.hdfs.api.ITupleWriter;
+import edu.uci.ics.hyracks.hdfs.api.ITupleWriterFactory;
+
+/**
+ * The HDFS file write operator using the Hadoop old API.
+ * To use this operator, a user need to provide an ITupleWriterFactory.
+ */
+@SuppressWarnings("deprecation")
+public class HDFSWriteOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor {
+
+    private static final long serialVersionUID = 1L;
+    private ConfFactory confFactory;
+    private ITupleWriterFactory tupleWriterFactory;
+
+    /**
+     * The constructor of HDFSWriteOperatorDescriptor.
+     * 
+     * @param spec
+     *            the JobSpecification object
+     * @param conf
+     *            the Hadoop JobConf which contains the output path
+     * @param tupleWriterFactory
+     *            the ITupleWriterFactory implementation object
+     * @throws HyracksException
+     */
+    public HDFSWriteOperatorDescriptor(JobSpecification spec, JobConf conf, ITupleWriterFactory tupleWriterFactory)
+            throws HyracksException {
+        super(spec, 1, 0);
+        this.confFactory = new ConfFactory(conf);
+        this.tupleWriterFactory = tupleWriterFactory;
+    }
+
+    @Override
+    public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx,
+            final IRecordDescriptorProvider recordDescProvider, final int partition, final int nPartitions)
+            throws HyracksDataException {
+
+        return new AbstractUnaryInputSinkOperatorNodePushable() {
+
+            private FSDataOutputStream dos;
+            private RecordDescriptor inputRd = recordDescProvider.getInputRecordDescriptor(getActivityId(), 0);;
+            private FrameTupleAccessor accessor = new FrameTupleAccessor(ctx.getFrameSize(), inputRd);
+            private FrameTupleReference tuple = new FrameTupleReference();
+            private ITupleWriter tupleWriter;
+            private ClassLoader ctxCL;
+
+            @Override
+            public void open() throws HyracksDataException {
+                ctxCL = Thread.currentThread().getContextClassLoader();
+                Thread.currentThread().setContextClassLoader(this.getClass().getClassLoader());
+                JobConf conf = confFactory.getConf();
+                String outputDirPath = FileOutputFormat.getOutputPath(conf).toString();
+                String fileName = outputDirPath + File.separator + "part-" + partition;
+
+                tupleWriter = tupleWriterFactory.getTupleWriter();
+                try {
+                    FileSystem dfs = FileSystem.get(conf);
+                    dos = dfs.create(new Path(fileName), true);
+                } catch (Exception e) {
+                    throw new HyracksDataException(e);
+                }
+            }
+
+            @Override
+            public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
+                accessor.reset(buffer);
+                int tupleCount = accessor.getTupleCount();
+                for (int i = 0; i < tupleCount; i++) {
+                    tuple.reset(accessor, i);
+                    tupleWriter.write(dos, tuple);
+                }
+            }
+
+            @Override
+            public void fail() throws HyracksDataException {
+
+            }
+
+            @Override
+            public void close() throws HyracksDataException {
+                try {
+                    dos.close();
+                } catch (Exception e) {
+                    throw new HyracksDataException(e);
+                } finally {
+                    Thread.currentThread().setContextClassLoader(ctxCL);
+                }
+            }
+
+        };
+    }
+}
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/dataflow/InputSplitsFactory.java b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/dataflow/InputSplitsFactory.java
new file mode 100644
index 0000000..9cc9ebc
--- /dev/null
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/dataflow/InputSplitsFactory.java
@@ -0,0 +1,103 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.hdfs.dataflow;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.DataInputStream;
+import java.io.DataOutputStream;
+import java.io.IOException;
+import java.io.Serializable;
+import java.lang.reflect.Constructor;
+
+import org.apache.hadoop.mapred.InputSplit;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+
+@SuppressWarnings({ "deprecation", "rawtypes" })
+public class InputSplitsFactory implements Serializable {
+
+    private static final long serialVersionUID = 1L;
+    private byte[] splitBytes;
+    private String splitClassName;
+
+    public InputSplitsFactory(InputSplit[] splits) throws HyracksDataException {
+        splitBytes = splitsToBytes(splits);
+        if (splits.length > 0) {
+            splitClassName = splits[0].getClass().getName();
+        }
+    }
+
+    public InputSplit[] getSplits() throws HyracksDataException {
+        return bytesToSplits(splitBytes);
+    }
+
+    /**
+     * Convert splits to bytes.
+     * 
+     * @param splits
+     *            input splits
+     * @return bytes which serialize the splits
+     * @throws IOException
+     */
+    private byte[] splitsToBytes(InputSplit[] splits) throws HyracksDataException {
+        try {
+            ByteArrayOutputStream bos = new ByteArrayOutputStream();
+            DataOutputStream dos = new DataOutputStream(bos);
+            dos.writeInt(splits.length);
+            for (int i = 0; i < splits.length; i++) {
+                splits[i].write(dos);
+            }
+            dos.close();
+            return bos.toByteArray();
+        } catch (Exception e) {
+            throw new HyracksDataException(e);
+        }
+    }
+
+    /**
+     * Covert bytes to splits.
+     * 
+     * @param bytes
+     * @return
+     * @throws HyracksDataException
+     */
+    private InputSplit[] bytesToSplits(byte[] bytes) throws HyracksDataException {
+        try {
+            Class splitClass = Class.forName(splitClassName);
+            Constructor[] constructors = splitClass.getDeclaredConstructors();
+            Constructor defaultConstructor = null;
+            for (Constructor constructor : constructors) {
+                if (constructor.getParameterTypes().length == 0) {
+                    constructor.setAccessible(true);
+                    defaultConstructor = constructor;
+                }
+            }
+            ByteArrayInputStream bis = new ByteArrayInputStream(bytes);
+            DataInputStream dis = new DataInputStream(bis);
+            int size = dis.readInt();
+            InputSplit[] splits = new InputSplit[size];
+            for (int i = 0; i < size; i++) {
+                splits[i] = (InputSplit) defaultConstructor.newInstance();
+                splits[i].readFields(dis);
+            }
+            dis.close();
+            return splits;
+        } catch (Exception e) {
+            throw new HyracksDataException(e);
+        }
+    }
+}
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/lib/RawBinaryComparatorFactory.java b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/lib/RawBinaryComparatorFactory.java
new file mode 100644
index 0000000..90c5977
--- /dev/null
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/lib/RawBinaryComparatorFactory.java
@@ -0,0 +1,32 @@
+package edu.uci.ics.hyracks.hdfs.lib;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+
+public class RawBinaryComparatorFactory implements IBinaryComparatorFactory {
+
+    private static final long serialVersionUID = 1L;
+    public static IBinaryComparatorFactory INSTANCE = new RawBinaryComparatorFactory();
+
+    private RawBinaryComparatorFactory() {
+    }
+
+    @Override
+    public IBinaryComparator createBinaryComparator() {
+        return new IBinaryComparator() {
+
+            @Override
+            public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
+                int commonLength = Math.min(l1, l2);
+                for (int i = 0; i < commonLength; i++) {
+                    if (b1[s1 + i] != b2[s2 + i]) {
+                        return b1[s1 + i] - b2[s2 + i];
+                    }
+                }
+                int difference = l1 - l2;
+                return difference == 0 ? 0 : (difference > 0 ? 1 : -1);
+            }
+
+        };
+    }
+}
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/lib/RawBinaryHashFunctionFactory.java b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/lib/RawBinaryHashFunctionFactory.java
new file mode 100644
index 0000000..7895fec
--- /dev/null
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/lib/RawBinaryHashFunctionFactory.java
@@ -0,0 +1,29 @@
+package edu.uci.ics.hyracks.hdfs.lib;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunction;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFactory;
+
+public class RawBinaryHashFunctionFactory implements IBinaryHashFunctionFactory {
+    private static final long serialVersionUID = 1L;
+
+    public static IBinaryHashFunctionFactory INSTANCE = new RawBinaryHashFunctionFactory();
+
+    private RawBinaryHashFunctionFactory() {
+    }
+
+    @Override
+    public IBinaryHashFunction createBinaryHashFunction() {
+
+        return new IBinaryHashFunction() {
+            @Override
+            public int hash(byte[] bytes, int offset, int length) {
+                int value = 1;
+                int end = offset + length;
+                for (int i = offset; i < end; i++)
+                    value = value * 31 + (int) bytes[i];
+                return value;
+            }
+        };
+    }
+
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/lib/TextKeyValueParserFactory.java b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/lib/TextKeyValueParserFactory.java
new file mode 100644
index 0000000..c691f5d
--- /dev/null
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/lib/TextKeyValueParserFactory.java
@@ -0,0 +1,66 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.hdfs.lib;
+
+import java.nio.ByteBuffer;
+
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.Text;
+
+import edu.uci.ics.hyracks.api.comm.IFrameWriter;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
+import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
+import edu.uci.ics.hyracks.hdfs.api.IKeyValueParser;
+import edu.uci.ics.hyracks.hdfs.api.IKeyValueParserFactory;
+
+public class TextKeyValueParserFactory implements IKeyValueParserFactory<LongWritable, Text> {
+    private static final long serialVersionUID = 1L;
+
+    @Override
+    public IKeyValueParser<LongWritable, Text> createKeyValueParser(final IHyracksTaskContext ctx) {
+
+        final ArrayTupleBuilder tb = new ArrayTupleBuilder(1);
+        final ByteBuffer buffer = ctx.allocateFrame();
+        final FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
+        appender.reset(buffer, true);
+
+        return new IKeyValueParser<LongWritable, Text>() {
+
+            @Override
+            public void parse(LongWritable key, Text value, IFrameWriter writer) throws HyracksDataException {
+                tb.reset();
+                tb.addField(value.getBytes(), 0, value.getLength());
+                if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
+                    FrameUtils.flushFrame(buffer, writer);
+                    appender.reset(buffer, true);
+                    if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
+                        throw new HyracksDataException("tuple cannot be appended into the frame");
+                    }
+                }
+            }
+
+            @Override
+            public void flush(IFrameWriter writer) throws HyracksDataException {
+                FrameUtils.flushFrame(buffer, writer);
+            }
+
+        };
+    }
+
+}
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/lib/TextTupleWriterFactory.java b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/lib/TextTupleWriterFactory.java
new file mode 100644
index 0000000..d26721d
--- /dev/null
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/lib/TextTupleWriterFactory.java
@@ -0,0 +1,49 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.hdfs.lib;
+
+import java.io.DataOutput;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.hdfs.api.ITupleWriter;
+import edu.uci.ics.hyracks.hdfs.api.ITupleWriterFactory;
+
+public class TextTupleWriterFactory implements ITupleWriterFactory {
+    private static final long serialVersionUID = 1L;
+
+    @Override
+    public ITupleWriter getTupleWriter() {
+        return new ITupleWriter() {
+            byte newLine = "\n".getBytes()[0];
+
+            @Override
+            public void write(DataOutput output, ITupleReference tuple) throws HyracksDataException {
+                byte[] data = tuple.getFieldData(0);
+                int start = tuple.getFieldStart(0);
+                int len = tuple.getFieldLength(0);
+                try {
+                    output.write(data, start, len);
+                    output.writeByte(newLine);
+                } catch (Exception e) {
+                    throw new HyracksDataException(e);
+                }
+            }
+
+        };
+    }
+
+}
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/scheduler/Scheduler.java b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/scheduler/Scheduler.java
new file mode 100644
index 0000000..e7309d4
--- /dev/null
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/scheduler/Scheduler.java
@@ -0,0 +1,210 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.hdfs.scheduler;
+
+import java.io.IOException;
+import java.net.InetAddress;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Random;
+
+import org.apache.hadoop.mapred.InputSplit;
+
+import edu.uci.ics.hyracks.api.client.HyracksConnection;
+import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
+import edu.uci.ics.hyracks.api.client.NodeControllerInfo;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.exceptions.HyracksException;
+
+/**
+ * The scheduler conduct data-local scheduling for data reading on HDFS.
+ * This class works for Hadoop old API.
+ */
+@SuppressWarnings("deprecation")
+public class Scheduler {
+
+    /** a list of NCs */
+    private String[] NCs;
+
+    /** a map from ip to NCs */
+    private Map<String, List<String>> ipToNcMapping = new HashMap<String, List<String>>();
+
+    /** a map from the NC name to the index */
+    private Map<String, Integer> ncNameToIndex = new HashMap<String, Integer>();
+
+    /**
+     * The constructor of the scheduler
+     * 
+     * @param ncNameToNcInfos
+     * @throws HyracksException
+     */
+    public Scheduler(String ipAddress, int port) throws HyracksException {
+        try {
+            IHyracksClientConnection hcc = new HyracksConnection(ipAddress, port);
+            Map<String, NodeControllerInfo> ncNameToNcInfos = hcc.getNodeControllerInfos();
+            loadIPAddressToNCMap(ncNameToNcInfos);
+        } catch (Exception e) {
+            throw new HyracksException(e);
+        }
+    }
+
+    public Scheduler(Map<String, NodeControllerInfo> ncNameToNcInfos) throws HyracksException {
+        loadIPAddressToNCMap(ncNameToNcInfos);
+    }
+
+    /**
+     * Set location constraints for a file scan operator with a list of file splits
+     * 
+     * @throws HyracksDataException
+     */
+    public String[] getLocationConstraints(InputSplit[] splits) throws HyracksException {
+        int[] capacity = new int[NCs.length];
+        Arrays.fill(capacity, 0);
+        String[] locations = new String[splits.length];
+        int slots = splits.length % capacity.length == 0 ? (splits.length / capacity.length) : (splits.length
+                / capacity.length + 1);
+
+        try {
+            Random random = new Random(System.currentTimeMillis());
+            boolean scheduled[] = new boolean[splits.length];
+            Arrays.fill(scheduled, false);
+
+            for (int i = 0; i < splits.length; i++) {
+                /**
+                 * get the location of all the splits
+                 */
+                String[] loc = splits[i].getLocations();
+                if (loc.length > 0) {
+                    for (int j = 0; j < loc.length; j++) {
+                        /**
+                         * get all the IP addresses from the name
+                         */
+                        InetAddress[] allIps = InetAddress.getAllByName(loc[j]);
+                        /**
+                         * iterate overa all ips
+                         */
+                        for (InetAddress ip : allIps) {
+                            /**
+                             * if the node controller exists
+                             */
+                            if (ipToNcMapping.get(ip.getHostAddress()) != null) {
+                                /**
+                                 * set the ncs
+                                 */
+                                List<String> dataLocations = ipToNcMapping.get(ip.getHostAddress());
+                                int arrayPos = random.nextInt(dataLocations.size());
+                                String nc = dataLocations.get(arrayPos);
+                                int pos = ncNameToIndex.get(nc);
+                                /**
+                                 * check if the node is already full
+                                 */
+                                if (capacity[pos] < slots) {
+                                    locations[i] = nc;
+                                    capacity[pos]++;
+                                    scheduled[i] = true;
+                                }
+                            }
+                        }
+
+                        /**
+                         * break the loop for data-locations if the schedule has already been found
+                         */
+                        if (scheduled[i] == true) {
+                            break;
+                        }
+                    }
+                }
+            }
+
+            /**
+             * find the lowest index the current available NCs
+             */
+            int currentAvailableNC = 0;
+            for (int i = 0; i < capacity.length; i++) {
+                if (capacity[i] < slots) {
+                    currentAvailableNC = i;
+                    break;
+                }
+            }
+
+            /**
+             * schedule no-local file reads
+             */
+            for (int i = 0; i < splits.length; i++) {
+                // if there is no data-local NC choice, choose a random one
+                if (!scheduled[i]) {
+                    locations[i] = NCs[currentAvailableNC];
+                    capacity[currentAvailableNC]++;
+                    scheduled[i] = true;
+
+                    /**
+                     * move the available NC cursor to the next one
+                     */
+                    for (int j = currentAvailableNC; j < capacity.length; j++) {
+                        if (capacity[j] < slots) {
+                            currentAvailableNC = j;
+                            break;
+                        }
+                    }
+                }
+            }
+            return locations;
+        } catch (IOException e) {
+            throw new HyracksException(e);
+        }
+    }
+
+    /**
+     * Load the IP-address-to-NC map from the NCNameToNCInfoMap
+     * 
+     * @param ncNameToNcInfos
+     * @throws HyracksException
+     */
+    private void loadIPAddressToNCMap(Map<String, NodeControllerInfo> ncNameToNcInfos) throws HyracksException {
+        try {
+            NCs = new String[ncNameToNcInfos.size()];
+            int i = 0;
+
+            /**
+             * build the IP address to NC map
+             */
+            for (Map.Entry<String, NodeControllerInfo> entry : ncNameToNcInfos.entrySet()) {
+                String ipAddr = InetAddress.getByAddress(entry.getValue().getNetworkAddress().getIpAddress())
+                        .getHostAddress();
+                List<String> matchedNCs = ipToNcMapping.get(ipAddr);
+                if (matchedNCs == null) {
+                    matchedNCs = new ArrayList<String>();
+                    ipToNcMapping.put(ipAddr, matchedNCs);
+                }
+                matchedNCs.add(entry.getKey());
+                NCs[i] = entry.getKey();
+                i++;
+            }
+
+            /**
+             * set up the NC name to index mapping
+             */
+            for (i = 0; i < NCs.length; i++) {
+                ncNameToIndex.put(NCs[i], i);
+            }
+        } catch (Exception e) {
+            throw new HyracksException(e);
+        }
+    }
+}
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs2/dataflow/ConfFactory.java b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs2/dataflow/ConfFactory.java
new file mode 100644
index 0000000..d843d27
--- /dev/null
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs2/dataflow/ConfFactory.java
@@ -0,0 +1,40 @@
+package edu.uci.ics.hyracks.hdfs2.dataflow;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.DataInputStream;
+import java.io.DataOutputStream;
+import java.io.Serializable;
+
+import org.apache.hadoop.mapreduce.Job;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+
+public class ConfFactory implements Serializable {
+    private static final long serialVersionUID = 1L;
+    private byte[] confBytes;
+
+    public ConfFactory(Job conf) throws HyracksDataException {
+        try {
+            ByteArrayOutputStream bos = new ByteArrayOutputStream();
+            DataOutputStream dos = new DataOutputStream(bos);
+            conf.getConfiguration().write(dos);
+            confBytes = bos.toByteArray();
+            dos.close();
+        } catch (Exception e) {
+            throw new HyracksDataException(e);
+        }
+    }
+
+    public Job getConf() throws HyracksDataException {
+        try {
+            Job conf = new Job();
+            DataInputStream dis = new DataInputStream(new ByteArrayInputStream(confBytes));
+            conf.getConfiguration().readFields(dis);
+            dis.close();
+            return conf;
+        } catch (Exception e) {
+            throw new HyracksDataException(e);
+        }
+    }
+}
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs2/dataflow/FileSplitsFactory.java b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs2/dataflow/FileSplitsFactory.java
new file mode 100644
index 0000000..14dc70c
--- /dev/null
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs2/dataflow/FileSplitsFactory.java
@@ -0,0 +1,106 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.hdfs2.dataflow;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.DataInputStream;
+import java.io.DataOutputStream;
+import java.io.IOException;
+import java.io.Serializable;
+import java.lang.reflect.Constructor;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.mapreduce.lib.input.FileSplit;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+
+@SuppressWarnings("rawtypes")
+public class FileSplitsFactory implements Serializable {
+
+    private static final long serialVersionUID = 1L;
+    private byte[] splitBytes;
+    private String splitClassName;
+
+    public FileSplitsFactory(List<FileSplit> splits) throws HyracksDataException {
+        splitBytes = splitsToBytes(splits);
+        if (splits.size() > 0) {
+            splitClassName = splits.get(0).getClass().getName();
+        }
+    }
+
+    public List<FileSplit> getSplits() throws HyracksDataException {
+        return bytesToSplits(splitBytes);
+    }
+
+    /**
+     * Convert splits to bytes.
+     * 
+     * @param splits
+     *            input splits
+     * @return bytes which serialize the splits
+     * @throws IOException
+     */
+    private byte[] splitsToBytes(List<FileSplit> splits) throws HyracksDataException {
+        try {
+            ByteArrayOutputStream bos = new ByteArrayOutputStream();
+            DataOutputStream dos = new DataOutputStream(bos);
+            dos.writeInt(splits.size());
+            int size = splits.size();
+            for (int i = 0; i < size; i++) {
+                splits.get(i).write(dos);
+            }
+            dos.close();
+            return bos.toByteArray();
+        } catch (Exception e) {
+            throw new HyracksDataException(e);
+        }
+    }
+
+    /**
+     * Covert bytes to splits.
+     * 
+     * @param bytes
+     * @return
+     * @throws HyracksDataException
+     */
+    private List<FileSplit> bytesToSplits(byte[] bytes) throws HyracksDataException {
+        try {
+            Class splitClass = Class.forName(splitClassName);
+            Constructor[] constructors = splitClass.getDeclaredConstructors();
+            Constructor defaultConstructor = null;
+            for (Constructor constructor : constructors) {
+                if (constructor.getParameterTypes().length == 0) {
+                    constructor.setAccessible(true);
+                    defaultConstructor = constructor;
+                }
+            }
+            ByteArrayInputStream bis = new ByteArrayInputStream(bytes);
+            DataInputStream dis = new DataInputStream(bis);
+            int size = dis.readInt();
+            List<FileSplit> splits = new ArrayList<FileSplit>();
+            for (int i = 0; i < size; i++) {
+                splits.add((FileSplit) defaultConstructor.newInstance());
+                splits.get(i).readFields(dis);
+            }
+            dis.close();
+            return splits;
+        } catch (Exception e) {
+            throw new HyracksDataException(e);
+        }
+    }
+}
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs2/dataflow/HDFSReadOperatorDescriptor.java b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs2/dataflow/HDFSReadOperatorDescriptor.java
new file mode 100644
index 0000000..90f5603
--- /dev/null
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs2/dataflow/HDFSReadOperatorDescriptor.java
@@ -0,0 +1,161 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.hdfs2.dataflow;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+import org.apache.hadoop.mapreduce.InputFormat;
+import org.apache.hadoop.mapreduce.InputSplit;
+import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.RecordReader;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+import org.apache.hadoop.mapreduce.lib.input.FileSplit;
+import org.apache.hadoop.util.ReflectionUtils;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
+import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.exceptions.HyracksException;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryOutputSourceOperatorNodePushable;
+import edu.uci.ics.hyracks.hdfs.ContextFactory;
+import edu.uci.ics.hyracks.hdfs.api.IKeyValueParser;
+import edu.uci.ics.hyracks.hdfs.api.IKeyValueParserFactory;
+
+/**
+ * The HDFS file read operator using the Hadoop new API. To use this operator, a
+ * user need to provide an IKeyValueParserFactory implementation which convert
+ * key-value pairs into tuples.
+ */
+@SuppressWarnings("rawtypes")
+public class HDFSReadOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor {
+
+    private static final long serialVersionUID = 1L;
+    private final ConfFactory confFactory;
+    private final FileSplitsFactory splitsFactory;
+    private final String[] scheduledLocations;
+    private final IKeyValueParserFactory tupleParserFactory;
+    private final boolean[] executed;
+
+    /**
+     * The constructor of HDFSReadOperatorDescriptor.
+     * 
+     * @param spec
+     *            the JobSpecification object
+     * @param rd
+     *            the output record descriptor
+     * @param conf
+     *            the Hadoop JobConf object, which contains the input format and
+     *            the input paths
+     * @param splits
+     *            the array of FileSplits (HDFS chunks).
+     * @param scheduledLocations
+     *            the node controller names to scan the FileSplits, which is an
+     *            one-to-one mapping. The String array is obtained from the
+     *            edu.cui
+     *            .ics.hyracks.hdfs.scheduler.Scheduler.getLocationConstraints
+     *            (InputSplits[]).
+     * @param tupleParserFactory
+     *            the ITupleParserFactory implementation instance.
+     * @throws HyracksException
+     */
+    public HDFSReadOperatorDescriptor(JobSpecification spec, RecordDescriptor rd, Job conf, List<InputSplit> splits,
+            String[] scheduledLocations, IKeyValueParserFactory tupleParserFactory) throws HyracksException {
+        super(spec, 0, 1);
+        try {
+            List<FileSplit> fileSplits = new ArrayList<FileSplit>();
+            for (int i = 0; i < splits.size(); i++) {
+                fileSplits.add((FileSplit) splits.get(i));
+            }
+            this.splitsFactory = new FileSplitsFactory(fileSplits);
+            this.confFactory = new ConfFactory(conf);
+        } catch (Exception e) {
+            throw new HyracksException(e);
+        }
+        this.scheduledLocations = scheduledLocations;
+        this.executed = new boolean[scheduledLocations.length];
+        Arrays.fill(executed, false);
+        this.tupleParserFactory = tupleParserFactory;
+        this.recordDescriptors[0] = rd;
+    }
+
+    @Override
+    public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx,
+            IRecordDescriptorProvider recordDescProvider, final int partition, final int nPartitions)
+            throws HyracksDataException {
+        final List<FileSplit> inputSplits = splitsFactory.getSplits();
+
+        return new AbstractUnaryOutputSourceOperatorNodePushable() {
+            private String nodeName = ctx.getJobletContext().getApplicationContext().getNodeId();
+            private ContextFactory ctxFactory = new ContextFactory();
+
+            @SuppressWarnings("unchecked")
+            @Override
+            public void initialize() throws HyracksDataException {
+                ClassLoader ctxCL = Thread.currentThread().getContextClassLoader();
+                try {
+                    Thread.currentThread().setContextClassLoader(this.getClass().getClassLoader());
+                    Job job = confFactory.getConf();
+                    IKeyValueParser parser = tupleParserFactory.createKeyValueParser(ctx);
+                    writer.open();
+                    InputFormat inputFormat = ReflectionUtils.newInstance(job.getInputFormatClass(),
+                            job.getConfiguration());
+                    int size = inputSplits.size();
+                    for (int i = 0; i < size; i++) {
+                        /**
+                         * read all the partitions scheduled to the current node
+                         */
+                        if (scheduledLocations[i].equals(nodeName)) {
+                            /**
+                             * pick an unread split to read synchronize among
+                             * simultaneous partitions in the same machine
+                             */
+                            synchronized (executed) {
+                                if (executed[i] == false) {
+                                    executed[i] = true;
+                                } else {
+                                    continue;
+                                }
+                            }
+
+                            /**
+                             * read the split
+                             */
+                            TaskAttemptContext context = ctxFactory.createContext(job.getConfiguration(),
+                                    inputSplits.get(i));
+                            RecordReader reader = inputFormat.createRecordReader(inputSplits.get(i), context);
+                            reader.initialize(inputSplits.get(i), context);
+                            while (reader.nextKeyValue() == true) {
+                                parser.parse(reader.getCurrentKey(), reader.getCurrentValue(), writer);
+                            }
+                        }
+                    }
+                    parser.flush(writer);
+                    writer.close();
+                } catch (Exception e) {
+                    throw new HyracksDataException(e);
+                } finally {
+                    Thread.currentThread().setContextClassLoader(ctxCL);
+                }
+            }
+        };
+    }
+}
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs2/dataflow/HDFSWriteOperatorDescriptor.java b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs2/dataflow/HDFSWriteOperatorDescriptor.java
new file mode 100644
index 0000000..390a7b5
--- /dev/null
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs2/dataflow/HDFSWriteOperatorDescriptor.java
@@ -0,0 +1,128 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.hdfs2.dataflow;
+
+import java.io.File;
+import java.nio.ByteBuffer;
+
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
+import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.exceptions.HyracksException;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.FrameTupleReference;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryInputSinkOperatorNodePushable;
+import edu.uci.ics.hyracks.hdfs.api.ITupleWriter;
+import edu.uci.ics.hyracks.hdfs.api.ITupleWriterFactory;
+
+/**
+ * The HDFS file write operator using the Hadoop new API.
+ * To use this operator, a user need to provide an ITupleWriterFactory.
+ */
+public class HDFSWriteOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor {
+
+    private static final long serialVersionUID = 1L;
+    private ConfFactory confFactory;
+    private ITupleWriterFactory tupleWriterFactory;
+
+    /**
+     * The constructor of HDFSWriteOperatorDescriptor.
+     * 
+     * @param spec
+     *            the JobSpecification object
+     * @param conf
+     *            the Hadoop JobConf which contains the output path
+     * @param tupleWriterFactory
+     *            the ITupleWriterFactory implementation object
+     * @throws HyracksException
+     */
+    public HDFSWriteOperatorDescriptor(JobSpecification spec, Job conf, ITupleWriterFactory tupleWriterFactory)
+            throws HyracksException {
+        super(spec, 1, 0);
+        this.confFactory = new ConfFactory(conf);
+        this.tupleWriterFactory = tupleWriterFactory;
+    }
+
+    @Override
+    public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx,
+            final IRecordDescriptorProvider recordDescProvider, final int partition, final int nPartitions)
+            throws HyracksDataException {
+
+        return new AbstractUnaryInputSinkOperatorNodePushable() {
+
+            private FSDataOutputStream dos;
+            private RecordDescriptor inputRd = recordDescProvider.getInputRecordDescriptor(getActivityId(), 0);;
+            private FrameTupleAccessor accessor = new FrameTupleAccessor(ctx.getFrameSize(), inputRd);
+            private FrameTupleReference tuple = new FrameTupleReference();
+            private ITupleWriter tupleWriter;
+            private ClassLoader ctxCL;
+
+            @Override
+            public void open() throws HyracksDataException {
+                ctxCL = Thread.currentThread().getContextClassLoader();
+                Thread.currentThread().setContextClassLoader(this.getClass().getClassLoader());
+                Job conf = confFactory.getConf();
+                String outputPath = FileOutputFormat.getOutputPath(conf).toString();
+                String fileName = outputPath + File.separator + "part-" + partition;
+
+                tupleWriter = tupleWriterFactory.getTupleWriter();
+                try {
+                    FileSystem dfs = FileSystem.get(conf.getConfiguration());
+                    dos = dfs.create(new Path(fileName), true);
+                } catch (Exception e) {
+                    throw new HyracksDataException(e);
+                }
+            }
+
+            @Override
+            public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
+                accessor.reset(buffer);
+                int tupleCount = accessor.getTupleCount();
+                for (int i = 0; i < tupleCount; i++) {
+                    tuple.reset(accessor, i);
+                    tupleWriter.write(dos, tuple);
+                }
+            }
+
+            @Override
+            public void fail() throws HyracksDataException {
+
+            }
+
+            @Override
+            public void close() throws HyracksDataException {
+                try {
+                    dos.close();
+                } catch (Exception e) {
+                    throw new HyracksDataException(e);
+                } finally {
+                    Thread.currentThread().setContextClassLoader(ctxCL);
+                }
+            }
+
+        };
+    }
+}
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs2/scheduler/Scheduler.java b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs2/scheduler/Scheduler.java
new file mode 100644
index 0000000..3445d68
--- /dev/null
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs2/scheduler/Scheduler.java
@@ -0,0 +1,208 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.hdfs2.scheduler;
+
+import java.net.InetAddress;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Random;
+
+import org.apache.hadoop.mapreduce.InputSplit;
+
+import edu.uci.ics.hyracks.api.client.HyracksConnection;
+import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
+import edu.uci.ics.hyracks.api.client.NodeControllerInfo;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.exceptions.HyracksException;
+
+/**
+ * The scheduler conduct data-local scheduling for data reading on HDFS.
+ * This class works for Hadoop new API.
+ */
+public class Scheduler {
+
+    /** a list of NCs */
+    private String[] NCs;
+
+    /** a map from ip to NCs */
+    private Map<String, List<String>> ipToNcMapping = new HashMap<String, List<String>>();
+
+    /** a map from the NC name to the index */
+    private Map<String, Integer> ncNameToIndex = new HashMap<String, Integer>();
+
+    /**
+     * The constructor of the scheduler
+     * 
+     * @param ncNameToNcInfos
+     * @throws HyracksException
+     */
+    public Scheduler(String ipAddress, int port) throws HyracksException {
+        try {
+            IHyracksClientConnection hcc = new HyracksConnection(ipAddress, port);
+            Map<String, NodeControllerInfo> ncNameToNcInfos = hcc.getNodeControllerInfos();
+            loadIPAddressToNCMap(ncNameToNcInfos);
+        } catch (Exception e) {
+            throw new HyracksException(e);
+        }
+    }
+
+    public Scheduler(Map<String, NodeControllerInfo> ncNameToNcInfos) throws HyracksException {
+        loadIPAddressToNCMap(ncNameToNcInfos);
+    }
+
+    /**
+     * Set location constraints for a file scan operator with a list of file splits
+     * 
+     * @throws HyracksDataException
+     */
+    public String[] getLocationConstraints(List<InputSplit> splits) throws HyracksException {
+        int[] capacity = new int[NCs.length];
+        Arrays.fill(capacity, 0);
+        String[] locations = new String[splits.size()];
+        int slots = splits.size() % capacity.length == 0 ? (splits.size() / capacity.length) : (splits.size()
+                / capacity.length + 1);
+
+        try {
+            Random random = new Random(System.currentTimeMillis());
+            boolean scheduled[] = new boolean[splits.size()];
+            Arrays.fill(scheduled, false);
+
+            for (int i = 0; i < splits.size(); i++) {
+                /**
+                 * get the location of all the splits
+                 */
+                String[] loc = splits.get(i).getLocations();
+                if (loc.length > 0) {
+                    for (int j = 0; j < loc.length; j++) {
+                        /**
+                         * get all the IP addresses from the name
+                         */
+                        InetAddress[] allIps = InetAddress.getAllByName(loc[j]);
+                        /**
+                         * iterate overa all ips
+                         */
+                        for (InetAddress ip : allIps) {
+                            /**
+                             * if the node controller exists
+                             */
+                            if (ipToNcMapping.get(ip.getHostAddress()) != null) {
+                                /**
+                                 * set the ncs
+                                 */
+                                List<String> dataLocations = ipToNcMapping.get(ip.getHostAddress());
+                                int arrayPos = random.nextInt(dataLocations.size());
+                                String nc = dataLocations.get(arrayPos);
+                                int pos = ncNameToIndex.get(nc);
+                                /**
+                                 * check if the node is already full
+                                 */
+                                if (capacity[pos] < slots) {
+                                    locations[i] = nc;
+                                    capacity[pos]++;
+                                    scheduled[i] = true;
+                                }
+                            }
+                        }
+
+                        /**
+                         * break the loop for data-locations if the schedule has already been found
+                         */
+                        if (scheduled[i] == true) {
+                            break;
+                        }
+                    }
+                }
+            }
+
+            /**
+             * find the lowest index the current available NCs
+             */
+            int currentAvailableNC = 0;
+            for (int i = 0; i < capacity.length; i++) {
+                if (capacity[i] < slots) {
+                    currentAvailableNC = i;
+                    break;
+                }
+            }
+
+            /**
+             * schedule no-local file reads
+             */
+            for (int i = 0; i < splits.size(); i++) {
+                // if there is no data-local NC choice, choose a random one
+                if (!scheduled[i]) {
+                    locations[i] = NCs[currentAvailableNC];
+                    capacity[currentAvailableNC]++;
+                    scheduled[i] = true;
+
+                    /**
+                     * move the available NC cursor to the next one
+                     */
+                    for (int j = currentAvailableNC; j < capacity.length; j++) {
+                        if (capacity[j] < slots) {
+                            currentAvailableNC = j;
+                            break;
+                        }
+                    }
+                }
+            }
+            return locations;
+        } catch (Exception e) {
+            throw new HyracksException(e);
+        }
+    }
+
+    /**
+     * Load the IP-address-to-NC map from the NCNameToNCInfoMap
+     * 
+     * @param ncNameToNcInfos
+     * @throws HyracksException
+     */
+    private void loadIPAddressToNCMap(Map<String, NodeControllerInfo> ncNameToNcInfos) throws HyracksException {
+        try {
+            NCs = new String[ncNameToNcInfos.size()];
+            int i = 0;
+
+            /**
+             * build the IP address to NC map
+             */
+            for (Map.Entry<String, NodeControllerInfo> entry : ncNameToNcInfos.entrySet()) {
+                String ipAddr = InetAddress.getByAddress(entry.getValue().getNetworkAddress().getIpAddress())
+                        .getHostAddress();
+                List<String> matchedNCs = ipToNcMapping.get(ipAddr);
+                if (matchedNCs == null) {
+                    matchedNCs = new ArrayList<String>();
+                    ipToNcMapping.put(ipAddr, matchedNCs);
+                }
+                matchedNCs.add(entry.getKey());
+                NCs[i] = entry.getKey();
+                i++;
+            }
+
+            /**
+             * set up the NC name to index mapping
+             */
+            for (i = 0; i < NCs.length; i++) {
+                ncNameToIndex.put(NCs[i], i);
+            }
+        } catch (Exception e) {
+            throw new HyracksException(e);
+        }
+    }
+}
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/java/edu/uci/ics/hyracks/hdfs/dataflow/DataflowTest.java b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/java/edu/uci/ics/hyracks/hdfs/dataflow/DataflowTest.java
new file mode 100644
index 0000000..2686077
--- /dev/null
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/java/edu/uci/ics/hyracks/hdfs/dataflow/DataflowTest.java
@@ -0,0 +1,204 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.hdfs.dataflow;
+
+import java.io.DataOutputStream;
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+
+import junit.framework.Assert;
+import junit.framework.TestCase;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.hadoop.mapred.FileInputFormat;
+import org.apache.hadoop.mapred.FileOutputFormat;
+import org.apache.hadoop.mapred.InputSplit;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.TextInputFormat;
+
+import edu.uci.ics.hyracks.api.client.HyracksConnection;
+import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
+import edu.uci.ics.hyracks.api.constraints.PartitionConstraintHelper;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.job.JobId;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.partition.FieldHashPartitionComputerFactory;
+import edu.uci.ics.hyracks.dataflow.std.connectors.MToNPartitioningMergingConnectorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.sort.ExternalSortOperatorDescriptor;
+import edu.uci.ics.hyracks.hdfs.lib.RawBinaryComparatorFactory;
+import edu.uci.ics.hyracks.hdfs.lib.RawBinaryHashFunctionFactory;
+import edu.uci.ics.hyracks.hdfs.lib.TextKeyValueParserFactory;
+import edu.uci.ics.hyracks.hdfs.lib.TextTupleWriterFactory;
+import edu.uci.ics.hyracks.hdfs.scheduler.Scheduler;
+import edu.uci.ics.hyracks.hdfs.utils.HyracksUtils;
+import edu.uci.ics.hyracks.hdfs.utils.TestUtils;
+
+/**
+ * Test the edu.uci.ics.hyracks.hdfs.dataflow package,
+ * the operators for the Hadoop old API.
+ */
+@SuppressWarnings({ "deprecation" })
+public class DataflowTest extends TestCase {
+
+    private static final String ACTUAL_RESULT_DIR = "actual";
+    private static final String EXPECTED_RESULT_PATH = "src/test/resources/expected";
+    private static final String PATH_TO_HADOOP_CONF = "src/test/resources/hadoop/conf";
+
+    private static final String DATA_PATH = "src/test/resources/data/customer.tbl";
+    private static final String HDFS_INPUT_PATH = "/customer/";
+    private static final String HDFS_OUTPUT_PATH = "/customer_result/";
+
+    private static final String HYRACKS_APP_NAME = "DataflowTest";
+    private static final String HADOOP_CONF_PATH = ACTUAL_RESULT_DIR + File.separator + "conf.xml";
+    private MiniDFSCluster dfsCluster;
+
+    private JobConf conf = new JobConf();
+    private int numberOfNC = 2;
+
+    @Override
+    public void setUp() throws Exception {
+        cleanupStores();
+        HyracksUtils.init();
+        HyracksUtils.createApp(HYRACKS_APP_NAME);
+        FileUtils.forceMkdir(new File(ACTUAL_RESULT_DIR));
+        FileUtils.cleanDirectory(new File(ACTUAL_RESULT_DIR));
+        startHDFS();
+    }
+
+    private void cleanupStores() throws IOException {
+        FileUtils.forceMkdir(new File("teststore"));
+        FileUtils.forceMkdir(new File("build"));
+        FileUtils.cleanDirectory(new File("teststore"));
+        FileUtils.cleanDirectory(new File("build"));
+    }
+
+    /**
+     * Start the HDFS cluster and setup the data files
+     * 
+     * @throws IOException
+     */
+    private void startHDFS() throws IOException {
+        conf.addResource(new Path(PATH_TO_HADOOP_CONF + "/core-site.xml"));
+        conf.addResource(new Path(PATH_TO_HADOOP_CONF + "/mapred-site.xml"));
+        conf.addResource(new Path(PATH_TO_HADOOP_CONF + "/hdfs-site.xml"));
+
+        FileSystem lfs = FileSystem.getLocal(new Configuration());
+        lfs.delete(new Path("build"), true);
+        System.setProperty("hadoop.log.dir", "logs");
+        dfsCluster = new MiniDFSCluster(conf, numberOfNC, true, null);
+        FileSystem dfs = FileSystem.get(conf);
+        Path src = new Path(DATA_PATH);
+        Path dest = new Path(HDFS_INPUT_PATH);
+        Path result = new Path(HDFS_OUTPUT_PATH);
+        dfs.mkdirs(dest);
+        dfs.mkdirs(result);
+        dfs.copyFromLocalFile(src, dest);
+
+        DataOutputStream confOutput = new DataOutputStream(new FileOutputStream(new File(HADOOP_CONF_PATH)));
+        conf.writeXml(confOutput);
+        confOutput.flush();
+        confOutput.close();
+    }
+
+    /**
+     * Test a job with only HDFS read and writes.
+     * 
+     * @throws Exception
+     */
+    public void testHDFSReadWriteOperators() throws Exception {
+        FileInputFormat.setInputPaths(conf, HDFS_INPUT_PATH);
+        FileOutputFormat.setOutputPath(conf, new Path(HDFS_OUTPUT_PATH));
+        conf.setInputFormat(TextInputFormat.class);
+
+        Scheduler scheduler = new Scheduler(HyracksUtils.CC_HOST, HyracksUtils.TEST_HYRACKS_CC_CLIENT_PORT);
+        InputSplit[] splits = conf.getInputFormat().getSplits(conf, numberOfNC * 4);
+
+        String[] readSchedule = scheduler.getLocationConstraints(splits);
+        JobSpecification jobSpec = new JobSpecification();
+        RecordDescriptor recordDesc = new RecordDescriptor(
+                new ISerializerDeserializer[] { UTF8StringSerializerDeserializer.INSTANCE });
+
+        String[] locations = new String[] { HyracksUtils.NC1_ID, HyracksUtils.NC1_ID, HyracksUtils.NC2_ID,
+                HyracksUtils.NC2_ID };
+        HDFSReadOperatorDescriptor readOperator = new HDFSReadOperatorDescriptor(jobSpec, recordDesc, conf, splits,
+                readSchedule, new TextKeyValueParserFactory());
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(jobSpec, readOperator, locations);
+
+        ExternalSortOperatorDescriptor sortOperator = new ExternalSortOperatorDescriptor(jobSpec, 10, new int[] { 0 },
+                new IBinaryComparatorFactory[] { RawBinaryComparatorFactory.INSTANCE }, recordDesc);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(jobSpec, sortOperator, locations);
+
+        HDFSWriteOperatorDescriptor writeOperator = new HDFSWriteOperatorDescriptor(jobSpec, conf,
+                new TextTupleWriterFactory());
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(jobSpec, writeOperator, HyracksUtils.NC1_ID);
+
+        jobSpec.connect(new OneToOneConnectorDescriptor(jobSpec), readOperator, 0, sortOperator, 0);
+        jobSpec.connect(new MToNPartitioningMergingConnectorDescriptor(jobSpec, new FieldHashPartitionComputerFactory(
+                new int[] { 0 }, new IBinaryHashFunctionFactory[] { RawBinaryHashFunctionFactory.INSTANCE }),
+                new int[] { 0 }, new IBinaryComparatorFactory[] { RawBinaryComparatorFactory.INSTANCE }), sortOperator,
+                0, writeOperator, 0);
+        jobSpec.addRoot(writeOperator);
+
+        IHyracksClientConnection client = new HyracksConnection(HyracksUtils.CC_HOST,
+                HyracksUtils.TEST_HYRACKS_CC_CLIENT_PORT);
+        JobId jobId = client.startJob(HYRACKS_APP_NAME, jobSpec);
+        client.waitForCompletion(jobId);
+
+        Assert.assertEquals(true, checkResults());
+    }
+
+    /**
+     * Check if the results are correct
+     * 
+     * @return true if correct
+     * @throws Exception
+     */
+    private boolean checkResults() throws Exception {
+        FileSystem dfs = FileSystem.get(conf);
+        Path result = new Path(HDFS_OUTPUT_PATH);
+        Path actual = new Path(ACTUAL_RESULT_DIR);
+        dfs.copyToLocalFile(result, actual);
+
+        TestUtils.compareWithResult(new File(EXPECTED_RESULT_PATH + File.separator + "part-0"), new File(
+                ACTUAL_RESULT_DIR + File.separator + "customer_result" + File.separator + "part-0"));
+        return true;
+    }
+
+    /**
+     * cleanup hdfs cluster
+     */
+    private void cleanupHDFS() throws Exception {
+        dfsCluster.shutdown();
+    }
+
+    @Override
+    public void tearDown() throws Exception {
+        HyracksUtils.destroyApp(HYRACKS_APP_NAME);
+        HyracksUtils.deinit();
+        cleanupHDFS();
+    }
+
+}
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/java/edu/uci/ics/hyracks/hdfs/scheduler/SchedulerTest.java b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/java/edu/uci/ics/hyracks/hdfs/scheduler/SchedulerTest.java
new file mode 100644
index 0000000..4b8a278
--- /dev/null
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/java/edu/uci/ics/hyracks/hdfs/scheduler/SchedulerTest.java
@@ -0,0 +1,210 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.hdfs.scheduler;
+
+import java.net.InetAddress;
+import java.util.HashMap;
+import java.util.Map;
+
+import junit.framework.Assert;
+import junit.framework.TestCase;
+
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapred.FileSplit;
+import org.apache.hadoop.mapred.InputSplit;
+
+import edu.uci.ics.hyracks.api.client.NodeControllerInfo;
+import edu.uci.ics.hyracks.api.client.NodeStatus;
+import edu.uci.ics.hyracks.api.comm.NetworkAddress;
+
+@SuppressWarnings("deprecation")
+public class SchedulerTest extends TestCase {
+
+    /**
+     * Test the scheduler for the case when the Hyracks cluster is the HDFS cluster
+     * 
+     * @throws Exception
+     */
+    public void testSchedulerSimple() throws Exception {
+        Map<String, NodeControllerInfo> ncNameToNcInfos = new HashMap<String, NodeControllerInfo>();
+        ncNameToNcInfos.put("nc1", new NodeControllerInfo("nc1", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.1").getAddress(), 5099)));
+        ncNameToNcInfos.put("nc2", new NodeControllerInfo("nc2", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.2").getAddress(), 5099)));
+        ncNameToNcInfos.put("nc3", new NodeControllerInfo("nc3", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.3").getAddress(), 5099)));
+        ncNameToNcInfos.put("nc4", new NodeControllerInfo("nc4", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.4").getAddress(), 5099)));
+        ncNameToNcInfos.put("nc5", new NodeControllerInfo("nc5", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.5").getAddress(), 5099)));
+        ncNameToNcInfos.put("nc6", new NodeControllerInfo("nc6", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.6").getAddress(), 5099)));
+
+        InputSplit[] fileSplits = new InputSplit[6];
+        fileSplits[0] = new FileSplit(new Path("part-1"), 0, 0, new String[] { "10.0.0.1", "10.0.0.2", "10.0.0.3" });
+        fileSplits[1] = new FileSplit(new Path("part-2"), 0, 0, new String[] { "10.0.0.3", "10.0.0.4", "10.0.0.5" });
+        fileSplits[2] = new FileSplit(new Path("part-3"), 0, 0, new String[] { "10.0.0.4", "10.0.0.5", "10.0.0.6" });
+        fileSplits[3] = new FileSplit(new Path("part-4"), 0, 0, new String[] { "10.0.0.2", "10.0.0.1", "10.0.0.6" });
+        fileSplits[4] = new FileSplit(new Path("part-5"), 0, 0, new String[] { "10.0.0.3", "10.0.0.4", "10.0.0.5" });
+        fileSplits[5] = new FileSplit(new Path("part-6"), 0, 0, new String[] { "10.0.0.2", "10.0.0.3", "10.0.0.5" });
+
+        Scheduler scheduler = new Scheduler(ncNameToNcInfos);
+        String[] locationConstraints = scheduler.getLocationConstraints(fileSplits);
+
+        String[] expectedResults = new String[] { "nc1", "nc3", "nc4", "nc2", "nc5", "nc6" };
+
+        for (int i = 0; i < locationConstraints.length; i++) {
+            Assert.assertEquals(locationConstraints[i], expectedResults[i]);
+        }
+    }
+
+    /**
+     * Test the case where the HDFS cluster is a larger than the Hyracks cluster
+     * 
+     * @throws Exception
+     */
+    public void testSchedulerLargerHDFS() throws Exception {
+        Map<String, NodeControllerInfo> ncNameToNcInfos = new HashMap<String, NodeControllerInfo>();
+        ncNameToNcInfos.put("nc1", new NodeControllerInfo("nc1", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.1").getAddress(), 5099)));
+        ncNameToNcInfos.put("nc2", new NodeControllerInfo("nc2", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.2").getAddress(), 5099)));
+        ncNameToNcInfos.put("nc3", new NodeControllerInfo("nc3", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.3").getAddress(), 5099)));
+        ncNameToNcInfos.put("nc4", new NodeControllerInfo("nc4", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.4").getAddress(), 5099)));
+        ncNameToNcInfos.put("nc5", new NodeControllerInfo("nc5", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.5").getAddress(), 5099)));
+        ncNameToNcInfos.put("nc6", new NodeControllerInfo("nc6", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.6").getAddress(), 5099)));
+
+        InputSplit[] fileSplits = new InputSplit[12];
+        fileSplits[0] = new FileSplit(new Path("part-1"), 0, 0, new String[] { "10.0.0.1", "10.0.0.2", "10.0.0.3" });
+        fileSplits[1] = new FileSplit(new Path("part-2"), 0, 0, new String[] { "10.0.0.3", "10.0.0.4", "10.0.0.5" });
+        fileSplits[2] = new FileSplit(new Path("part-3"), 0, 0, new String[] { "10.0.0.4", "10.0.0.5", "10.0.0.6" });
+        fileSplits[3] = new FileSplit(new Path("part-4"), 0, 0, new String[] { "10.0.0.2", "10.0.0.1", "10.0.0.6" });
+        fileSplits[4] = new FileSplit(new Path("part-5"), 0, 0, new String[] { "10.0.0.3", "10.0.0.4", "10.0.0.5" });
+        fileSplits[5] = new FileSplit(new Path("part-6"), 0, 0, new String[] { "10.0.0.2", "10.0.0.3", "10.0.0.5" });
+        fileSplits[6] = new FileSplit(new Path("part-7"), 0, 0, new String[] { "10.0.0.1", "10.0.0.2", "10.0.0.3" });
+        fileSplits[7] = new FileSplit(new Path("part-8"), 0, 0, new String[] { "10.0.0.3", "10.0.0.4", "10.0.0.5" });
+        fileSplits[8] = new FileSplit(new Path("part-9"), 0, 0, new String[] { "10.0.0.4", "10.0.0.5", "10.0.0.6" });
+        fileSplits[9] = new FileSplit(new Path("part-10"), 0, 0, new String[] { "10.0.0.2", "10.0.0.1", "10.0.0.6" });
+        fileSplits[10] = new FileSplit(new Path("part-11"), 0, 0, new String[] { "10.0.0.3", "10.0.0.4", "10.0.0.7" });
+        fileSplits[11] = new FileSplit(new Path("part-12"), 0, 0, new String[] { "10.0.0.2", "10.0.0.3", "10.0.0.5" });
+
+        Scheduler scheduler = new Scheduler(ncNameToNcInfos);
+        String[] locationConstraints = scheduler.getLocationConstraints(fileSplits);
+
+        String[] expectedResults = new String[] { "nc1", "nc3", "nc4", "nc2", "nc3", "nc2", "nc1", "nc4", "nc5", "nc6",
+                "nc6", "nc5" };
+
+        for (int i = 0; i < locationConstraints.length; i++) {
+            Assert.assertEquals(locationConstraints[i], expectedResults[i]);
+        }
+    }
+
+    /**
+     * Test the case where the HDFS cluster is a larger than the Hyracks cluster
+     * 
+     * @throws Exception
+     */
+    public void testSchedulerSmallerHDFS() throws Exception {
+        Map<String, NodeControllerInfo> ncNameToNcInfos = new HashMap<String, NodeControllerInfo>();
+        ncNameToNcInfos.put("nc1", new NodeControllerInfo("nc1", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.1").getAddress(), 5099)));
+        ncNameToNcInfos.put("nc2", new NodeControllerInfo("nc2", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.2").getAddress(), 5099)));
+        ncNameToNcInfos.put("nc3", new NodeControllerInfo("nc3", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.3").getAddress(), 5099)));
+        ncNameToNcInfos.put("nc4", new NodeControllerInfo("nc4", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.4").getAddress(), 5099)));
+        ncNameToNcInfos.put("nc5", new NodeControllerInfo("nc5", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.5").getAddress(), 5099)));
+        ncNameToNcInfos.put("nc6", new NodeControllerInfo("nc6", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.6").getAddress(), 5099)));
+
+        InputSplit[] fileSplits = new InputSplit[12];
+        fileSplits[0] = new FileSplit(new Path("part-1"), 0, 0, new String[] { "10.0.0.1", "10.0.0.2", "10.0.0.3" });
+        fileSplits[1] = new FileSplit(new Path("part-2"), 0, 0, new String[] { "10.0.0.3", "10.0.0.4", "10.0.0.5" });
+        fileSplits[2] = new FileSplit(new Path("part-3"), 0, 0, new String[] { "10.0.0.4", "10.0.0.5", "10.0.0.3" });
+        fileSplits[3] = new FileSplit(new Path("part-4"), 0, 0, new String[] { "10.0.0.2", "10.0.0.1", "10.0.0.3" });
+        fileSplits[4] = new FileSplit(new Path("part-5"), 0, 0, new String[] { "10.0.0.3", "10.0.0.4", "10.0.0.5" });
+        fileSplits[5] = new FileSplit(new Path("part-6"), 0, 0, new String[] { "10.0.0.2", "10.0.0.3", "10.0.0.5" });
+        fileSplits[6] = new FileSplit(new Path("part-7"), 0, 0, new String[] { "10.0.0.1", "10.0.0.2", "10.0.0.3" });
+        fileSplits[7] = new FileSplit(new Path("part-8"), 0, 0, new String[] { "10.0.0.3", "10.0.0.4", "10.0.0.5" });
+        fileSplits[8] = new FileSplit(new Path("part-9"), 0, 0, new String[] { "10.0.0.4", "10.0.0.5", "10.0.0.1" });
+        fileSplits[9] = new FileSplit(new Path("part-10"), 0, 0, new String[] { "10.0.0.2", "10.0.0.1", "10.0.0.2" });
+        fileSplits[10] = new FileSplit(new Path("part-11"), 0, 0, new String[] { "10.0.0.3", "10.0.0.4", "10.0.0.5" });
+        fileSplits[11] = new FileSplit(new Path("part-12"), 0, 0, new String[] { "10.0.0.2", "10.0.0.3", "10.0.0.5" });
+
+        Scheduler scheduler = new Scheduler(ncNameToNcInfos);
+        String[] locationConstraints = scheduler.getLocationConstraints(fileSplits);
+
+        String[] expectedResults = new String[] { "nc1", "nc3", "nc4", "nc2", "nc3", "nc2", "nc1", "nc4", "nc5", "nc6",
+                "nc5", "nc6" };
+
+        for (int i = 0; i < locationConstraints.length; i++) {
+            Assert.assertEquals(locationConstraints[i], expectedResults[i]);
+        }
+    }
+
+    /**
+     * Test the case where the HDFS cluster is a larger than the Hyracks cluster
+     * 
+     * @throws Exception
+     */
+    public void testSchedulerSmallerHDFSOdd() throws Exception {
+        Map<String, NodeControllerInfo> ncNameToNcInfos = new HashMap<String, NodeControllerInfo>();
+        ncNameToNcInfos.put("nc1", new NodeControllerInfo("nc1", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.1").getAddress(), 5099)));
+        ncNameToNcInfos.put("nc2", new NodeControllerInfo("nc2", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.2").getAddress(), 5099)));
+        ncNameToNcInfos.put("nc3", new NodeControllerInfo("nc3", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.3").getAddress(), 5099)));
+        ncNameToNcInfos.put("nc4", new NodeControllerInfo("nc4", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.4").getAddress(), 5099)));
+        ncNameToNcInfos.put("nc5", new NodeControllerInfo("nc5", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.5").getAddress(), 5099)));
+        ncNameToNcInfos.put("nc6", new NodeControllerInfo("nc6", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.6").getAddress(), 5099)));
+
+        InputSplit[] fileSplits = new InputSplit[13];
+        fileSplits[0] = new FileSplit(new Path("part-1"), 0, 0, new String[] { "10.0.0.1", "10.0.0.2", "10.0.0.3" });
+        fileSplits[1] = new FileSplit(new Path("part-2"), 0, 0, new String[] { "10.0.0.3", "10.0.0.4", "10.0.0.5" });
+        fileSplits[2] = new FileSplit(new Path("part-3"), 0, 0, new String[] { "10.0.0.4", "10.0.0.5", "10.0.0.3" });
+        fileSplits[3] = new FileSplit(new Path("part-4"), 0, 0, new String[] { "10.0.0.2", "10.0.0.1", "10.0.0.3" });
+        fileSplits[4] = new FileSplit(new Path("part-5"), 0, 0, new String[] { "10.0.0.3", "10.0.0.4", "10.0.0.5" });
+        fileSplits[5] = new FileSplit(new Path("part-6"), 0, 0, new String[] { "10.0.0.2", "10.0.0.3", "10.0.0.5" });
+        fileSplits[6] = new FileSplit(new Path("part-7"), 0, 0, new String[] { "10.0.0.1", "10.0.0.2", "10.0.0.3" });
+        fileSplits[7] = new FileSplit(new Path("part-8"), 0, 0, new String[] { "10.0.0.3", "10.0.0.4", "10.0.0.5" });
+        fileSplits[8] = new FileSplit(new Path("part-9"), 0, 0, new String[] { "10.0.0.4", "10.0.0.5", "10.0.0.1" });
+        fileSplits[9] = new FileSplit(new Path("part-10"), 0, 0, new String[] { "10.0.0.2", "10.0.0.1", "10.0.0.2" });
+        fileSplits[10] = new FileSplit(new Path("part-11"), 0, 0, new String[] { "10.0.0.3", "10.0.0.4", "10.0.0.5" });
+        fileSplits[11] = new FileSplit(new Path("part-12"), 0, 0, new String[] { "10.0.0.2", "10.0.0.3", "10.0.0.5" });
+        fileSplits[12] = new FileSplit(new Path("part-13"), 0, 0, new String[] { "10.0.0.2", "10.0.0.4", "10.0.0.5" });
+
+        Scheduler scheduler = new Scheduler(ncNameToNcInfos);
+        String[] locationConstraints = scheduler.getLocationConstraints(fileSplits);
+
+        String[] expectedResults = new String[] { "nc1", "nc3", "nc4", "nc2", "nc3", "nc2", "nc1", "nc3", "nc4", "nc2",
+                "nc4", "nc5", "nc5" };
+
+        for (int i = 0; i < locationConstraints.length; i++) {
+            Assert.assertEquals(locationConstraints[i], expectedResults[i]);
+        }
+    }
+
+}
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/java/edu/uci/ics/hyracks/hdfs/utils/HyracksUtils.java b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/java/edu/uci/ics/hyracks/hdfs/utils/HyracksUtils.java
new file mode 100644
index 0000000..d44b75a
--- /dev/null
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/java/edu/uci/ics/hyracks/hdfs/utils/HyracksUtils.java
@@ -0,0 +1,104 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.hdfs.utils;
+
+import java.util.EnumSet;
+
+import edu.uci.ics.hyracks.api.client.HyracksConnection;
+import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
+import edu.uci.ics.hyracks.api.job.JobFlag;
+import edu.uci.ics.hyracks.api.job.JobId;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.control.cc.ClusterControllerService;
+import edu.uci.ics.hyracks.control.common.controllers.CCConfig;
+import edu.uci.ics.hyracks.control.common.controllers.NCConfig;
+import edu.uci.ics.hyracks.control.nc.NodeControllerService;
+
+public class HyracksUtils {
+
+    public static final String NC1_ID = "nc1";
+    public static final String NC2_ID = "nc2";
+
+    public static final int DEFAULT_HYRACKS_CC_PORT = 1099;
+    public static final int TEST_HYRACKS_CC_PORT = 1099;
+    public static final int TEST_HYRACKS_CC_CLIENT_PORT = 2099;
+    public static final String CC_HOST = "localhost";
+
+    public static final int FRAME_SIZE = 65536;
+
+    private static ClusterControllerService cc;
+    private static NodeControllerService nc1;
+    private static NodeControllerService nc2;
+    private static IHyracksClientConnection hcc;
+
+    public static void init() throws Exception {
+        CCConfig ccConfig = new CCConfig();
+        ccConfig.clientNetIpAddress = CC_HOST;
+        ccConfig.clusterNetIpAddress = CC_HOST;
+        ccConfig.clusterNetPort = TEST_HYRACKS_CC_PORT;
+        ccConfig.clientNetPort = TEST_HYRACKS_CC_CLIENT_PORT;
+        ccConfig.defaultMaxJobAttempts = 0;
+        ccConfig.jobHistorySize = 0;
+        ccConfig.profileDumpPeriod = -1;
+
+        // cluster controller
+        cc = new ClusterControllerService(ccConfig);
+        cc.start();
+
+        // two node controllers
+        NCConfig ncConfig1 = new NCConfig();
+        ncConfig1.ccHost = "localhost";
+        ncConfig1.clusterNetIPAddress = "localhost";
+        ncConfig1.ccPort = TEST_HYRACKS_CC_PORT;
+        ncConfig1.dataIPAddress = "127.0.0.1";
+        ncConfig1.nodeId = NC1_ID;
+        nc1 = new NodeControllerService(ncConfig1);
+        nc1.start();
+
+        NCConfig ncConfig2 = new NCConfig();
+        ncConfig2.ccHost = "localhost";
+        ncConfig2.clusterNetIPAddress = "localhost";
+        ncConfig2.ccPort = TEST_HYRACKS_CC_PORT;
+        ncConfig2.dataIPAddress = "127.0.0.1";
+        ncConfig2.nodeId = NC2_ID;
+        nc2 = new NodeControllerService(ncConfig2);
+        nc2.start();
+
+        // hyracks connection
+        hcc = new HyracksConnection(CC_HOST, TEST_HYRACKS_CC_CLIENT_PORT);
+    }
+
+    public static void destroyApp(String hyracksAppName) throws Exception {
+        hcc.destroyApplication(hyracksAppName);
+    }
+
+    public static void createApp(String hyracksAppName) throws Exception {
+        hcc.createApplication(hyracksAppName, null);
+    }
+
+    public static void deinit() throws Exception {
+        nc2.stop();
+        nc1.stop();
+        cc.stop();
+    }
+
+    public static void runJob(JobSpecification spec, String appName) throws Exception {
+        spec.setFrameSize(FRAME_SIZE);
+        JobId jobId = hcc.startJob(appName, spec, EnumSet.of(JobFlag.PROFILE_RUNTIME));
+        hcc.waitForCompletion(jobId);
+    }
+
+}
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/java/edu/uci/ics/hyracks/hdfs/utils/TestUtils.java b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/java/edu/uci/ics/hyracks/hdfs/utils/TestUtils.java
new file mode 100644
index 0000000..3826688
--- /dev/null
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/java/edu/uci/ics/hyracks/hdfs/utils/TestUtils.java
@@ -0,0 +1,93 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.hdfs.utils;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileReader;
+
+public class TestUtils {
+
+    public static void compareWithResult(File expectedFile, File actualFile) throws Exception {
+        BufferedReader readerExpected = new BufferedReader(new FileReader(expectedFile));
+        BufferedReader readerActual = new BufferedReader(new FileReader(actualFile));
+        String lineExpected, lineActual;
+        int num = 1;
+        try {
+            while ((lineExpected = readerExpected.readLine()) != null) {
+                lineActual = readerActual.readLine();
+                // Assert.assertEquals(lineExpected, lineActual);
+                if (lineActual == null) {
+                    throw new Exception("Actual result changed at line " + num + ":\n< " + lineExpected + "\n> ");
+                }
+                if (!equalStrings(lineExpected, lineActual)) {
+                    throw new Exception("Result for changed at line " + num + ":\n< " + lineExpected + "\n> "
+                            + lineActual);
+                }
+                ++num;
+            }
+            lineActual = readerActual.readLine();
+            if (lineActual != null) {
+                throw new Exception("Actual result changed at line " + num + ":\n< \n> " + lineActual);
+            }
+        } finally {
+            readerExpected.close();
+            readerActual.close();
+        }
+    }
+
+    private static boolean equalStrings(String s1, String s2) {
+        String[] rowsOne = s1.split("\n");
+        String[] rowsTwo = s2.split("\n");
+
+        if (rowsOne.length != rowsTwo.length)
+            return false;
+
+        for (int i = 0; i < rowsOne.length; i++) {
+            String row1 = rowsOne[i];
+            String row2 = rowsTwo[i];
+
+            if (row1.equals(row2))
+                continue;
+
+            String[] fields1 = row1.split(",");
+            String[] fields2 = row2.split(",");
+
+            for (int j = 0; j < fields1.length; j++) {
+                if (fields1[j].equals(fields2[j])) {
+                    continue;
+                } else if (fields1[j].indexOf('.') < 0) {
+                    return false;
+                } else {
+                    fields1[j] = fields1[j].split("=")[1];
+                    fields2[j] = fields2[j].split("=")[1];
+                    Double double1 = Double.parseDouble(fields1[j]);
+                    Double double2 = Double.parseDouble(fields2[j]);
+                    float float1 = (float) double1.doubleValue();
+                    float float2 = (float) double2.doubleValue();
+
+                    if (Math.abs(float1 - float2) == 0)
+                        continue;
+                    else {
+                        return false;
+                    }
+                }
+            }
+        }
+        return true;
+    }
+
+}
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/java/edu/uci/ics/hyracks/hdfs2/dataflow/DataflowTest.java b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/java/edu/uci/ics/hyracks/hdfs2/dataflow/DataflowTest.java
new file mode 100644
index 0000000..9f77979
--- /dev/null
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/java/edu/uci/ics/hyracks/hdfs2/dataflow/DataflowTest.java
@@ -0,0 +1,211 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.hdfs2.dataflow;
+
+import java.io.DataOutputStream;
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.util.List;
+
+import junit.framework.Assert;
+import junit.framework.TestCase;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.hadoop.mapreduce.InputFormat;
+import org.apache.hadoop.mapreduce.InputSplit;
+import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
+import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
+import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
+import org.apache.hadoop.util.ReflectionUtils;
+
+import edu.uci.ics.hyracks.api.client.HyracksConnection;
+import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
+import edu.uci.ics.hyracks.api.constraints.PartitionConstraintHelper;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.job.JobId;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.partition.FieldHashPartitionComputerFactory;
+import edu.uci.ics.hyracks.dataflow.std.connectors.MToNPartitioningMergingConnectorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.sort.ExternalSortOperatorDescriptor;
+import edu.uci.ics.hyracks.hdfs.MiniDFSClusterFactory;
+import edu.uci.ics.hyracks.hdfs.lib.RawBinaryComparatorFactory;
+import edu.uci.ics.hyracks.hdfs.lib.RawBinaryHashFunctionFactory;
+import edu.uci.ics.hyracks.hdfs.lib.TextKeyValueParserFactory;
+import edu.uci.ics.hyracks.hdfs.lib.TextTupleWriterFactory;
+import edu.uci.ics.hyracks.hdfs.utils.HyracksUtils;
+import edu.uci.ics.hyracks.hdfs.utils.TestUtils;
+import edu.uci.ics.hyracks.hdfs2.scheduler.Scheduler;
+
+/**
+ * Test the edu.uci.ics.hyracks.hdfs2.dataflow package,
+ * the operators for the Hadoop new API.
+ */
+public class DataflowTest extends TestCase {
+
+    private static final String ACTUAL_RESULT_DIR = "actual";
+    private static final String EXPECTED_RESULT_PATH = "src/test/resources/expected";
+    private static final String PATH_TO_HADOOP_CONF = "src/test/resources/hadoop/conf";
+
+    private static final String DATA_PATH = "src/test/resources/data/customer.tbl";
+    private static final String HDFS_INPUT_PATH = "/customer/";
+    private static final String HDFS_OUTPUT_PATH = "/customer_result/";
+
+    private static final String HYRACKS_APP_NAME = "DataflowTest";
+    private static final String HADOOP_CONF_PATH = ACTUAL_RESULT_DIR + File.separator + "conf.xml";
+    private MiniDFSCluster dfsCluster;
+    private MiniDFSClusterFactory dfsClusterFactory = new MiniDFSClusterFactory();
+
+    private Job conf;
+    private int numberOfNC = 2;
+
+    @Override
+    public void setUp() throws Exception {
+        conf = new Job();
+        cleanupStores();
+        HyracksUtils.init();
+        HyracksUtils.createApp(HYRACKS_APP_NAME);
+        FileUtils.forceMkdir(new File(ACTUAL_RESULT_DIR));
+        FileUtils.cleanDirectory(new File(ACTUAL_RESULT_DIR));
+        startHDFS();
+    }
+
+    private void cleanupStores() throws IOException {
+        FileUtils.forceMkdir(new File("teststore"));
+        FileUtils.forceMkdir(new File("build"));
+        FileUtils.cleanDirectory(new File("teststore"));
+        FileUtils.cleanDirectory(new File("build"));
+    }
+
+    /**
+     * Start the HDFS cluster and setup the data files
+     * 
+     * @throws IOException
+     */
+    private void startHDFS() throws IOException {
+        conf.getConfiguration().addResource(new Path(PATH_TO_HADOOP_CONF + "/core-site.xml"));
+        conf.getConfiguration().addResource(new Path(PATH_TO_HADOOP_CONF + "/mapred-site.xml"));
+        conf.getConfiguration().addResource(new Path(PATH_TO_HADOOP_CONF + "/hdfs-site.xml"));
+
+        FileSystem lfs = FileSystem.getLocal(new Configuration());
+        lfs.delete(new Path("build"), true);
+        System.setProperty("hadoop.log.dir", "logs");
+        dfsCluster = dfsClusterFactory.getMiniDFSCluster(conf.getConfiguration(), numberOfNC);
+        FileSystem dfs = FileSystem.get(conf.getConfiguration());
+        Path src = new Path(DATA_PATH);
+        Path dest = new Path(HDFS_INPUT_PATH);
+        Path result = new Path(HDFS_OUTPUT_PATH);
+        dfs.mkdirs(dest);
+        dfs.mkdirs(result);
+        dfs.copyFromLocalFile(src, dest);
+
+        DataOutputStream confOutput = new DataOutputStream(new FileOutputStream(new File(HADOOP_CONF_PATH)));
+        conf.getConfiguration().writeXml(confOutput);
+        confOutput.flush();
+        confOutput.close();
+    }
+
+    /**
+     * Test a job with only HDFS read and writes.
+     * 
+     * @throws Exception
+     */
+    @SuppressWarnings({ "rawtypes", "unchecked" })
+    public void testHDFSReadWriteOperators() throws Exception {
+        FileInputFormat.setInputPaths(conf, HDFS_INPUT_PATH);
+        FileOutputFormat.setOutputPath(conf, new Path(HDFS_OUTPUT_PATH));
+        conf.setInputFormatClass(TextInputFormat.class);
+
+        Scheduler scheduler = new Scheduler(HyracksUtils.CC_HOST, HyracksUtils.TEST_HYRACKS_CC_CLIENT_PORT);
+        InputFormat inputFormat = ReflectionUtils.newInstance(conf.getInputFormatClass(), conf.getConfiguration());
+        List<InputSplit> splits = inputFormat.getSplits(conf);
+
+        String[] readSchedule = scheduler.getLocationConstraints(splits);
+        JobSpecification jobSpec = new JobSpecification();
+        RecordDescriptor recordDesc = new RecordDescriptor(
+                new ISerializerDeserializer[] { UTF8StringSerializerDeserializer.INSTANCE });
+
+        String[] locations = new String[] { HyracksUtils.NC1_ID, HyracksUtils.NC1_ID, HyracksUtils.NC2_ID,
+                HyracksUtils.NC2_ID };
+        HDFSReadOperatorDescriptor readOperator = new HDFSReadOperatorDescriptor(jobSpec, recordDesc, conf, splits,
+                readSchedule, new TextKeyValueParserFactory());
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(jobSpec, readOperator, locations);
+
+        ExternalSortOperatorDescriptor sortOperator = new ExternalSortOperatorDescriptor(jobSpec, 10, new int[] { 0 },
+                new IBinaryComparatorFactory[] { RawBinaryComparatorFactory.INSTANCE }, recordDesc);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(jobSpec, sortOperator, locations);
+
+        HDFSWriteOperatorDescriptor writeOperator = new HDFSWriteOperatorDescriptor(jobSpec, conf,
+                new TextTupleWriterFactory());
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(jobSpec, writeOperator, HyracksUtils.NC1_ID);
+
+        jobSpec.connect(new OneToOneConnectorDescriptor(jobSpec), readOperator, 0, sortOperator, 0);
+        jobSpec.connect(new MToNPartitioningMergingConnectorDescriptor(jobSpec, new FieldHashPartitionComputerFactory(
+                new int[] { 0 }, new IBinaryHashFunctionFactory[] { RawBinaryHashFunctionFactory.INSTANCE }),
+                new int[] { 0 }, new IBinaryComparatorFactory[] { RawBinaryComparatorFactory.INSTANCE }), sortOperator,
+                0, writeOperator, 0);
+        jobSpec.addRoot(writeOperator);
+
+        IHyracksClientConnection client = new HyracksConnection(HyracksUtils.CC_HOST,
+                HyracksUtils.TEST_HYRACKS_CC_CLIENT_PORT);
+        JobId jobId = client.startJob(HYRACKS_APP_NAME, jobSpec);
+        client.waitForCompletion(jobId);
+
+        Assert.assertEquals(true, checkResults());
+    }
+
+    /**
+     * Check if the results are correct
+     * 
+     * @return true if correct
+     * @throws Exception
+     */
+    private boolean checkResults() throws Exception {
+        FileSystem dfs = FileSystem.get(conf.getConfiguration());
+        Path result = new Path(HDFS_OUTPUT_PATH);
+        Path actual = new Path(ACTUAL_RESULT_DIR);
+        dfs.copyToLocalFile(result, actual);
+
+        TestUtils.compareWithResult(new File(EXPECTED_RESULT_PATH + File.separator + "part-0"), new File(
+                ACTUAL_RESULT_DIR + File.separator + "customer_result" + File.separator + "part-0"));
+        return true;
+    }
+
+    /**
+     * cleanup hdfs cluster
+     */
+    private void cleanupHDFS() throws Exception {
+        dfsCluster.shutdown();
+    }
+
+    @Override
+    public void tearDown() throws Exception {
+        HyracksUtils.destroyApp(HYRACKS_APP_NAME);
+        HyracksUtils.deinit();
+        cleanupHDFS();
+    }
+
+}
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/java/edu/uci/ics/hyracks/hdfs2/scheduler/SchedulerTest.java b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/java/edu/uci/ics/hyracks/hdfs2/scheduler/SchedulerTest.java
new file mode 100644
index 0000000..ea2af13
--- /dev/null
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/java/edu/uci/ics/hyracks/hdfs2/scheduler/SchedulerTest.java
@@ -0,0 +1,215 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.hdfs2.scheduler;
+
+import java.net.InetAddress;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import junit.framework.Assert;
+import junit.framework.TestCase;
+
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapreduce.InputSplit;
+import org.apache.hadoop.mapreduce.lib.input.FileSplit;
+
+import edu.uci.ics.hyracks.api.client.NodeControllerInfo;
+import edu.uci.ics.hyracks.api.client.NodeStatus;
+import edu.uci.ics.hyracks.api.comm.NetworkAddress;
+
+/**
+ * Test case for the new HDFS API scheduler
+ * 
+ */
+public class SchedulerTest extends TestCase {
+
+    /**
+     * Test the scheduler for the case when the Hyracks cluster is the HDFS cluster
+     * 
+     * @throws Exception
+     */
+    public void testSchedulerSimple() throws Exception {
+        Map<String, NodeControllerInfo> ncNameToNcInfos = new HashMap<String, NodeControllerInfo>();
+        ncNameToNcInfos.put("nc1", new NodeControllerInfo("nc1", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.1").getAddress(), 5099)));
+        ncNameToNcInfos.put("nc2", new NodeControllerInfo("nc2", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.2").getAddress(), 5099)));
+        ncNameToNcInfos.put("nc3", new NodeControllerInfo("nc3", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.3").getAddress(), 5099)));
+        ncNameToNcInfos.put("nc4", new NodeControllerInfo("nc4", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.4").getAddress(), 5099)));
+        ncNameToNcInfos.put("nc5", new NodeControllerInfo("nc5", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.5").getAddress(), 5099)));
+        ncNameToNcInfos.put("nc6", new NodeControllerInfo("nc6", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.6").getAddress(), 5099)));
+
+        List<InputSplit> fileSplits = new ArrayList<InputSplit>();
+        fileSplits.add(new FileSplit(new Path("part-1"), 0, 0, new String[] { "10.0.0.1", "10.0.0.2", "10.0.0.3" }));
+        fileSplits.add(new FileSplit(new Path("part-2"), 0, 0, new String[] { "10.0.0.3", "10.0.0.4", "10.0.0.5" }));
+        fileSplits.add(new FileSplit(new Path("part-3"), 0, 0, new String[] { "10.0.0.4", "10.0.0.5", "10.0.0.6" }));
+        fileSplits.add(new FileSplit(new Path("part-4"), 0, 0, new String[] { "10.0.0.2", "10.0.0.1", "10.0.0.6" }));
+        fileSplits.add(new FileSplit(new Path("part-5"), 0, 0, new String[] { "10.0.0.3", "10.0.0.4", "10.0.0.5" }));
+        fileSplits.add(new FileSplit(new Path("part-6"), 0, 0, new String[] { "10.0.0.2", "10.0.0.3", "10.0.0.5" }));
+
+        Scheduler scheduler = new Scheduler(ncNameToNcInfos);
+        String[] locationConstraints = scheduler.getLocationConstraints(fileSplits);
+
+        String[] expectedResults = new String[] { "nc1", "nc3", "nc4", "nc2", "nc5", "nc6" };
+
+        for (int i = 0; i < locationConstraints.length; i++) {
+            Assert.assertEquals(locationConstraints[i], expectedResults[i]);
+        }
+    }
+
+    /**
+     * Test the case where the HDFS cluster is a larger than the Hyracks cluster
+     * 
+     * @throws Exception
+     */
+    public void testSchedulerLargerHDFS() throws Exception {
+        Map<String, NodeControllerInfo> ncNameToNcInfos = new HashMap<String, NodeControllerInfo>();
+        ncNameToNcInfos.put("nc1", new NodeControllerInfo("nc1", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.1").getAddress(), 5099)));
+        ncNameToNcInfos.put("nc2", new NodeControllerInfo("nc2", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.2").getAddress(), 5099)));
+        ncNameToNcInfos.put("nc3", new NodeControllerInfo("nc3", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.3").getAddress(), 5099)));
+        ncNameToNcInfos.put("nc4", new NodeControllerInfo("nc4", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.4").getAddress(), 5099)));
+        ncNameToNcInfos.put("nc5", new NodeControllerInfo("nc5", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.5").getAddress(), 5099)));
+        ncNameToNcInfos.put("nc6", new NodeControllerInfo("nc6", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.6").getAddress(), 5099)));
+
+        List<InputSplit> fileSplits = new ArrayList<InputSplit>();
+        fileSplits.add(new FileSplit(new Path("part-1"), 0, 0, new String[] { "10.0.0.1", "10.0.0.2", "10.0.0.3" }));
+        fileSplits.add(new FileSplit(new Path("part-2"), 0, 0, new String[] { "10.0.0.3", "10.0.0.4", "10.0.0.5" }));
+        fileSplits.add(new FileSplit(new Path("part-3"), 0, 0, new String[] { "10.0.0.4", "10.0.0.5", "10.0.0.6" }));
+        fileSplits.add(new FileSplit(new Path("part-4"), 0, 0, new String[] { "10.0.0.2", "10.0.0.1", "10.0.0.6" }));
+        fileSplits.add(new FileSplit(new Path("part-5"), 0, 0, new String[] { "10.0.0.3", "10.0.0.4", "10.0.0.5" }));
+        fileSplits.add(new FileSplit(new Path("part-6"), 0, 0, new String[] { "10.0.0.2", "10.0.0.3", "10.0.0.5" }));
+        fileSplits.add(new FileSplit(new Path("part-7"), 0, 0, new String[] { "10.0.0.1", "10.0.0.2", "10.0.0.3" }));
+        fileSplits.add(new FileSplit(new Path("part-8"), 0, 0, new String[] { "10.0.0.3", "10.0.0.4", "10.0.0.5" }));
+        fileSplits.add(new FileSplit(new Path("part-9"), 0, 0, new String[] { "10.0.0.4", "10.0.0.5", "10.0.0.6" }));
+        fileSplits.add(new FileSplit(new Path("part-10"), 0, 0, new String[] { "10.0.0.2", "10.0.0.1", "10.0.0.6" }));
+        fileSplits.add(new FileSplit(new Path("part-11"), 0, 0, new String[] { "10.0.0.3", "10.0.0.4", "10.0.0.7" }));
+        fileSplits.add(new FileSplit(new Path("part-12"), 0, 0, new String[] { "10.0.0.2", "10.0.0.3", "10.0.0.5" }));
+
+        Scheduler scheduler = new Scheduler(ncNameToNcInfos);
+        String[] locationConstraints = scheduler.getLocationConstraints(fileSplits);
+
+        String[] expectedResults = new String[] { "nc1", "nc3", "nc4", "nc2", "nc3", "nc2", "nc1", "nc4", "nc5", "nc6",
+                "nc6", "nc5" };
+
+        for (int i = 0; i < locationConstraints.length; i++) {
+            Assert.assertEquals(locationConstraints[i], expectedResults[i]);
+        }
+    }
+
+    /**
+     * Test the case where the HDFS cluster is a larger than the Hyracks cluster
+     * 
+     * @throws Exception
+     */
+    public void testSchedulerSmallerHDFS() throws Exception {
+        Map<String, NodeControllerInfo> ncNameToNcInfos = new HashMap<String, NodeControllerInfo>();
+        ncNameToNcInfos.put("nc1", new NodeControllerInfo("nc1", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.1").getAddress(), 5099)));
+        ncNameToNcInfos.put("nc2", new NodeControllerInfo("nc2", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.2").getAddress(), 5099)));
+        ncNameToNcInfos.put("nc3", new NodeControllerInfo("nc3", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.3").getAddress(), 5099)));
+        ncNameToNcInfos.put("nc4", new NodeControllerInfo("nc4", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.4").getAddress(), 5099)));
+        ncNameToNcInfos.put("nc5", new NodeControllerInfo("nc5", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.5").getAddress(), 5099)));
+        ncNameToNcInfos.put("nc6", new NodeControllerInfo("nc6", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.6").getAddress(), 5099)));
+
+        List<InputSplit> fileSplits = new ArrayList<InputSplit>();
+        fileSplits.add(new FileSplit(new Path("part-1"), 0, 0, new String[] { "10.0.0.1", "10.0.0.2", "10.0.0.3" }));
+        fileSplits.add(new FileSplit(new Path("part-2"), 0, 0, new String[] { "10.0.0.3", "10.0.0.4", "10.0.0.5" }));
+        fileSplits.add(new FileSplit(new Path("part-3"), 0, 0, new String[] { "10.0.0.4", "10.0.0.5", "10.0.0.3" }));
+        fileSplits.add(new FileSplit(new Path("part-4"), 0, 0, new String[] { "10.0.0.2", "10.0.0.1", "10.0.0.3" }));
+        fileSplits.add(new FileSplit(new Path("part-5"), 0, 0, new String[] { "10.0.0.3", "10.0.0.4", "10.0.0.5" }));
+        fileSplits.add(new FileSplit(new Path("part-6"), 0, 0, new String[] { "10.0.0.2", "10.0.0.3", "10.0.0.5" }));
+        fileSplits.add(new FileSplit(new Path("part-7"), 0, 0, new String[] { "10.0.0.1", "10.0.0.2", "10.0.0.3" }));
+        fileSplits.add(new FileSplit(new Path("part-8"), 0, 0, new String[] { "10.0.0.3", "10.0.0.4", "10.0.0.5" }));
+        fileSplits.add(new FileSplit(new Path("part-9"), 0, 0, new String[] { "10.0.0.4", "10.0.0.5", "10.0.0.1" }));
+        fileSplits.add(new FileSplit(new Path("part-10"), 0, 0, new String[] { "10.0.0.2", "10.0.0.1", "10.0.0.2" }));
+        fileSplits.add(new FileSplit(new Path("part-11"), 0, 0, new String[] { "10.0.0.3", "10.0.0.4", "10.0.0.5" }));
+        fileSplits.add(new FileSplit(new Path("part-12"), 0, 0, new String[] { "10.0.0.2", "10.0.0.3", "10.0.0.5" }));
+
+        Scheduler scheduler = new Scheduler(ncNameToNcInfos);
+        String[] locationConstraints = scheduler.getLocationConstraints(fileSplits);
+
+        String[] expectedResults = new String[] { "nc1", "nc3", "nc4", "nc2", "nc3", "nc2", "nc1", "nc4", "nc5", "nc6",
+                "nc5", "nc6" };
+
+        for (int i = 0; i < locationConstraints.length; i++) {
+            Assert.assertEquals(locationConstraints[i], expectedResults[i]);
+        }
+    }
+
+    /**
+     * Test the case where the HDFS cluster is a larger than the Hyracks cluster
+     * 
+     * @throws Exception
+     */
+    public void testSchedulerSmallerHDFSOdd() throws Exception {
+        Map<String, NodeControllerInfo> ncNameToNcInfos = new HashMap<String, NodeControllerInfo>();
+        ncNameToNcInfos.put("nc1", new NodeControllerInfo("nc1", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.1").getAddress(), 5099)));
+        ncNameToNcInfos.put("nc2", new NodeControllerInfo("nc2", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.2").getAddress(), 5099)));
+        ncNameToNcInfos.put("nc3", new NodeControllerInfo("nc3", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.3").getAddress(), 5099)));
+        ncNameToNcInfos.put("nc4", new NodeControllerInfo("nc4", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.4").getAddress(), 5099)));
+        ncNameToNcInfos.put("nc5", new NodeControllerInfo("nc5", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.5").getAddress(), 5099)));
+        ncNameToNcInfos.put("nc6", new NodeControllerInfo("nc6", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.6").getAddress(), 5099)));
+
+        List<InputSplit> fileSplits = new ArrayList<InputSplit>();
+        fileSplits.add(new FileSplit(new Path("part-1"), 0, 0, new String[] { "10.0.0.1", "10.0.0.2", "10.0.0.3" }));
+        fileSplits.add(new FileSplit(new Path("part-2"), 0, 0, new String[] { "10.0.0.3", "10.0.0.4", "10.0.0.5" }));
+        fileSplits.add(new FileSplit(new Path("part-3"), 0, 0, new String[] { "10.0.0.4", "10.0.0.5", "10.0.0.3" }));
+        fileSplits.add(new FileSplit(new Path("part-4"), 0, 0, new String[] { "10.0.0.2", "10.0.0.1", "10.0.0.3" }));
+        fileSplits.add(new FileSplit(new Path("part-5"), 0, 0, new String[] { "10.0.0.3", "10.0.0.4", "10.0.0.5" }));
+        fileSplits.add(new FileSplit(new Path("part-6"), 0, 0, new String[] { "10.0.0.2", "10.0.0.3", "10.0.0.5" }));
+        fileSplits.add(new FileSplit(new Path("part-7"), 0, 0, new String[] { "10.0.0.1", "10.0.0.2", "10.0.0.3" }));
+        fileSplits.add(new FileSplit(new Path("part-8"), 0, 0, new String[] { "10.0.0.3", "10.0.0.4", "10.0.0.5" }));
+        fileSplits.add(new FileSplit(new Path("part-9"), 0, 0, new String[] { "10.0.0.4", "10.0.0.5", "10.0.0.1" }));
+        fileSplits.add(new FileSplit(new Path("part-10"), 0, 0, new String[] { "10.0.0.2", "10.0.0.1", "10.0.0.2" }));
+        fileSplits.add(new FileSplit(new Path("part-11"), 0, 0, new String[] { "10.0.0.3", "10.0.0.4", "10.0.0.5" }));
+        fileSplits.add(new FileSplit(new Path("part-12"), 0, 0, new String[] { "10.0.0.2", "10.0.0.3", "10.0.0.5" }));
+        fileSplits.add(new FileSplit(new Path("part-13"), 0, 0, new String[] { "10.0.0.2", "10.0.0.4", "10.0.0.5" }));
+
+        Scheduler scheduler = new Scheduler(ncNameToNcInfos);
+        String[] locationConstraints = scheduler.getLocationConstraints(fileSplits);
+
+        String[] expectedResults = new String[] { "nc1", "nc3", "nc4", "nc2", "nc3", "nc2", "nc1", "nc3", "nc4", "nc2",
+                "nc4", "nc5", "nc5" };
+
+        for (int i = 0; i < locationConstraints.length; i++) {
+            Assert.assertEquals(locationConstraints[i], expectedResults[i]);
+        }
+    }
+
+}
diff --git a/hyracks-algebricks/hyracks-algebricks-tests/data/tpch0.001/customer.tbl b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/resources/data/customer.tbl
similarity index 100%
copy from hyracks-algebricks/hyracks-algebricks-tests/data/tpch0.001/customer.tbl
copy to hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/resources/data/customer.tbl
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/resources/expected/part-0 b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/resources/expected/part-0
new file mode 100755
index 0000000..ce3b00c
--- /dev/null
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/resources/expected/part-0
@@ -0,0 +1,150 @@
+100|Customer#000000100|fptUABXcmkC5Wx|20|30-749-445-4907|9889.89|FURNITURE|was furiously fluffily quiet deposits. silent, pending requests boost against |
+101|Customer#000000101|sMmL2rNeHDltovSm Y|2|12-514-298-3699|7470.96|MACHINERY| sleep. pending packages detect slyly ironic pack|
+102|Customer#000000102|UAtflJ06 fn9zBfKjInkQZlWtqaA|19|29-324-978-8538|8462.17|BUILDING|ously regular dependencies nag among the furiously express dinos. blithely final|
+103|Customer#000000103|8KIsQX4LJ7QMsj6DrtFtXu0nUEdV,8a|9|19-216-107-2107|2757.45|BUILDING|furiously pending notornis boost slyly around the blithely ironic ideas? final, even instructions cajole fl|
+104|Customer#000000104|9mcCK L7rt0SwiYtrbO88DiZS7U d7M|10|20-966-284-8065|-588.38|FURNITURE|rate carefully slyly special pla|
+105|Customer#000000105|4iSJe4L SPjg7kJj98Yz3z0B|10|20-793-553-6417|9091.82|MACHINERY|l pains cajole even accounts. quietly final instructi|
+106|Customer#000000106|xGCOEAUjUNG|1|11-751-989-4627|3288.42|MACHINERY|lose slyly. ironic accounts along the evenly regular theodolites wake about the special, final gifts. |
+107|Customer#000000107|Zwg64UZ,q7GRqo3zm7P1tZIRshBDz|15|25-336-529-9919|2514.15|AUTOMOBILE|counts cajole slyly. regular requests wake. furiously regular deposits about the blithely final fo|
+108|Customer#000000108|GPoeEvpKo1|5|15-908-619-7526|2259.38|BUILDING|refully ironic deposits sleep. regular, unusual requests wake slyly|
+109|Customer#000000109|OOOkYBgCMzgMQXUmkocoLb56rfrdWp2NE2c|16|26-992-422-8153|-716.10|BUILDING|es. fluffily final dependencies sleep along the blithely even pinto beans. final deposits haggle furiously furiou|
+10|Customer#000000010|6LrEaV6KR6PLVcgl2ArL Q3rqzLzcT1 v2|5|15-741-346-9870|2753.54|HOUSEHOLD|es regular deposits haggle. fur|
+110|Customer#000000110|mymPfgphaYXNYtk|10|20-893-536-2069|7462.99|AUTOMOBILE|nto beans cajole around the even, final deposits. quickly bold packages according to the furiously regular dept|
+111|Customer#000000111|CBSbPyOWRorloj2TBvrK9qp9tHBs|22|32-582-283-7528|6505.26|MACHINERY|ly unusual instructions detect fluffily special deposits-- theodolites nag carefully during the ironic dependencies|
+112|Customer#000000112|RcfgG3bO7QeCnfjqJT1|19|29-233-262-8382|2953.35|FURNITURE|rmanently unusual multipliers. blithely ruthless deposits are furiously along the|
+113|Customer#000000113|eaOl5UBXIvdY57rglaIzqvfPD,MYfK|12|22-302-930-4756|2912.00|BUILDING|usly regular theodolites boost furiously doggedly pending instructio|
+114|Customer#000000114|xAt 5f5AlFIU|14|24-805-212-7646|1027.46|FURNITURE|der the carefully express theodolites are after the packages. packages are. bli|
+115|Customer#000000115|0WFt1IXENmUT2BgbsB0ShVKJZt0HCBCbFl0aHc|8|18-971-699-1843|7508.92|HOUSEHOLD|sits haggle above the carefully ironic theodolite|
+116|Customer#000000116|yCuVxIgsZ3,qyK2rloThy3u|16|26-632-309-5792|8403.99|BUILDING|as. quickly final sauternes haggle slyly carefully even packages. brave, ironic pinto beans are above the furious|
+117|Customer#000000117|uNhM,PzsRA3S,5Y Ge5Npuhi|24|34-403-631-3505|3950.83|FURNITURE|affix. instructions are furiously sl|
+118|Customer#000000118|OVnFuHygK9wx3xpg8|18|28-639-943-7051|3582.37|AUTOMOBILE|uick packages alongside of the furiously final deposits haggle above the fluffily even foxes. blithely dogged dep|
+119|Customer#000000119|M1ETOIecuvH8DtM0Y0nryXfW|7|17-697-919-8406|3930.35|FURNITURE|express ideas. blithely ironic foxes thrash. special acco|
+11|Customer#000000011|PkWS 3HlXqwTuzrKg633BEi|23|33-464-151-3439|-272.60|BUILDING|ckages. requests sleep slyly. quickly even pinto beans promise above the slyly regular pinto beans. |
+120|Customer#000000120|zBNna00AEInqyO1|12|22-291-534-1571|363.75|MACHINERY| quickly. slyly ironic requests cajole blithely furiously final dependen|
+121|Customer#000000121|tv nCR2YKupGN73mQudO|17|27-411-990-2959|6428.32|BUILDING|uriously stealthy ideas. carefully final courts use carefully|
+122|Customer#000000122|yp5slqoNd26lAENZW3a67wSfXA6hTF|3|13-702-694-4520|7865.46|HOUSEHOLD| the special packages hinder blithely around the permanent requests. bold depos|
+123|Customer#000000123|YsOnaaER8MkvK5cpf4VSlq|5|15-817-151-1168|5897.83|BUILDING|ependencies. regular, ironic requests are fluffily regu|
+124|Customer#000000124|aTbyVAW5tCd,v09O|18|28-183-750-7809|1842.49|AUTOMOBILE|le fluffily even dependencies. quietly s|
+125|Customer#000000125|,wSZXdVR xxIIfm9s8ITyLl3kgjT6UC07GY0Y|19|29-261-996-3120|-234.12|FURNITURE|x-ray finally after the packages? regular requests c|
+126|Customer#000000126|ha4EHmbx3kg DYCsP6DFeUOmavtQlHhcfaqr|22|32-755-914-7592|1001.39|HOUSEHOLD|s about the even instructions boost carefully furiously ironic pearls. ruthless, |
+127|Customer#000000127|Xyge4DX2rXKxXyye1Z47LeLVEYMLf4Bfcj|21|31-101-672-2951|9280.71|MACHINERY|ic, unusual theodolites nod silently after the final, ironic instructions: pending r|
+128|Customer#000000128|AmKUMlJf2NRHcKGmKjLS|4|14-280-874-8044|-986.96|HOUSEHOLD|ing packages integrate across the slyly unusual dugouts. blithely silent ideas sublate carefully. blithely expr|
+129|Customer#000000129|q7m7rbMM0BpaCdmxloCgBDRCleXsXkdD8kf|7|17-415-148-7416|9127.27|HOUSEHOLD| unusual deposits boost carefully furiously silent ideas. pending accounts cajole slyly across|
+12|Customer#000000012|9PWKuhzT4Zr1Q|13|23-791-276-1263|3396.49|HOUSEHOLD| to the carefully final braids. blithely regular requests nag. ironic theodolites boost quickly along|
+130|Customer#000000130|RKPx2OfZy0Vn 8wGWZ7F2EAvmMORl1k8iH|9|19-190-993-9281|5073.58|HOUSEHOLD|ix slowly. express packages along the furiously ironic requests integrate daringly deposits. fur|
+131|Customer#000000131|jyN6lAjb1FtH10rMC,XzlWyCBrg75|11|21-840-210-3572|8595.53|HOUSEHOLD|jole special packages. furiously final dependencies about the furiously speci|
+132|Customer#000000132|QM5YabAsTLp9|4|14-692-150-9717|162.57|HOUSEHOLD|uickly carefully special theodolites. carefully regular requests against the blithely unusual instructions |
+133|Customer#000000133|IMCuXdpIvdkYO92kgDGuyHgojcUs88p|17|27-408-997-8430|2314.67|AUTOMOBILE|t packages. express pinto beans are blithely along the unusual, even theodolites. silent packages use fu|
+134|Customer#000000134|sUiZ78QCkTQPICKpA9OBzkUp2FM|11|21-200-159-5932|4608.90|BUILDING|yly fluffy foxes boost final ideas. b|
+135|Customer#000000135|oZK,oC0 fdEpqUML|19|29-399-293-6241|8732.91|FURNITURE| the slyly final accounts. deposits cajole carefully. carefully sly packag|
+136|Customer#000000136|QoLsJ0v5C1IQbh,DS1|7|17-501-210-4726|-842.39|FURNITURE|ackages sleep ironic, final courts. even requests above the blithely bold requests g|
+137|Customer#000000137|cdW91p92rlAEHgJafqYyxf1Q|16|26-777-409-5654|7838.30|HOUSEHOLD|carefully regular theodolites use. silent dolphins cajo|
+138|Customer#000000138|5uyLAeY7HIGZqtu66Yn08f|5|15-394-860-4589|430.59|MACHINERY|ts doze on the busy ideas. regular|
+139|Customer#000000139|3ElvBwudHKL02732YexGVFVt |9|19-140-352-1403|7897.78|MACHINERY|nstructions. quickly ironic ideas are carefully. bold, |
+13|Customer#000000013|nsXQu0oVjD7PM659uC3SRSp|3|13-761-547-5974|3857.34|BUILDING|ounts sleep carefully after the close frays. carefully bold notornis use ironic requests. blithely|
+140|Customer#000000140|XRqEPiKgcETII,iOLDZp5jA|4|14-273-885-6505|9963.15|MACHINERY|ies detect slyly ironic accounts. slyly ironic theodolites hag|
+141|Customer#000000141|5IW,WROVnikc3l7DwiUDGQNGsLBGOL6Dc0|1|11-936-295-6204|6706.14|FURNITURE|packages nag furiously. carefully unusual accounts snooze according to the fluffily regular pinto beans. slyly spec|
+142|Customer#000000142|AnJ5lxtLjioClr2khl9pb8NLxG2,|9|19-407-425-2584|2209.81|AUTOMOBILE|. even, express theodolites upo|
+143|Customer#000000143|681r22uL452zqk 8By7I9o9enQfx0|16|26-314-406-7725|2186.50|MACHINERY|across the blithely unusual requests haggle theodo|
+144|Customer#000000144|VxYZ3ebhgbltnetaGjNC8qCccjYU05 fePLOno8y|1|11-717-379-4478|6417.31|MACHINERY|ges. slyly regular accounts are slyly. bold, idle reque|
+145|Customer#000000145|kQjHmt2kcec cy3hfMh969u|13|23-562-444-8454|9748.93|HOUSEHOLD|ests? express, express instructions use. blithely fina|
+146|Customer#000000146|GdxkdXG9u7iyI1,,y5tq4ZyrcEy|3|13-835-723-3223|3328.68|FURNITURE|ffily regular dinos are slyly unusual requests. slyly specia|
+147|Customer#000000147|6VvIwbVdmcsMzuu,C84GtBWPaipGfi7DV|18|28-803-187-4335|8071.40|AUTOMOBILE|ress packages above the blithely regular packages sleep fluffily blithely ironic accounts. |
+148|Customer#000000148|BhSPlEWGvIJyT9swk vCWE|11|21-562-498-6636|2135.60|HOUSEHOLD|ing to the carefully ironic requests. carefully regular dependencies about the theodolites wake furious|
+149|Customer#000000149|3byTHCp2mNLPigUrrq|19|29-797-439-6760|8959.65|AUTOMOBILE|al instructions haggle against the slyly bold w|
+14|Customer#000000014|KXkletMlL2JQEA |1|11-845-129-3851|5266.30|FURNITURE|, ironic packages across the unus|
+150|Customer#000000150|zeoGShTjCwGPplOWFkLURrh41O0AZ8dwNEEN4 |18|28-328-564-7630|3849.48|MACHINERY|ole blithely among the furiously pending packages. furiously bold ideas wake fluffily ironic idea|
+15|Customer#000000015|YtWggXoOLdwdo7b0y,BZaGUQMLJMX1Y,EC,6Dn|23|33-687-542-7601|2788.52|HOUSEHOLD| platelets. regular deposits detect asymptotes. blithely unusual packages nag slyly at the fluf|
+16|Customer#000000016|cYiaeMLZSMAOQ2 d0W,|10|20-781-609-3107|4681.03|FURNITURE|kly silent courts. thinly regular theodolites sleep fluffily after |
+17|Customer#000000017|izrh 6jdqtp2eqdtbkswDD8SG4SzXruMfIXyR7|2|12-970-682-3487|6.34|AUTOMOBILE|packages wake! blithely even pint|
+18|Customer#000000018|3txGO AiuFux3zT0Z9NYaFRnZt|6|16-155-215-1315|5494.43|BUILDING|s sleep. carefully even instructions nag furiously alongside of t|
+19|Customer#000000019|uc,3bHIx84H,wdrmLOjVsiqXCq2tr|18|28-396-526-5053|8914.71|HOUSEHOLD| nag. furiously careful packages are slyly at the accounts. furiously regular in|
+1|Customer#000000001|IVhzIApeRb ot,c,E|15|25-989-741-2988|711.56|BUILDING|to the even, regular platelets. regular, ironic epitaphs nag e|
+20|Customer#000000020|JrPk8Pqplj4Ne|22|32-957-234-8742|7603.40|FURNITURE|g alongside of the special excuses-- fluffily enticing packages wake |
+21|Customer#000000021|XYmVpr9yAHDEn|8|18-902-614-8344|1428.25|MACHINERY| quickly final accounts integrate blithely furiously u|
+22|Customer#000000022|QI6p41,FNs5k7RZoCCVPUTkUdYpB|3|13-806-545-9701|591.98|MACHINERY|s nod furiously above the furiously ironic ideas. |
+23|Customer#000000023|OdY W13N7Be3OC5MpgfmcYss0Wn6TKT|3|13-312-472-8245|3332.02|HOUSEHOLD|deposits. special deposits cajole slyly. fluffily special deposits about the furiously |
+24|Customer#000000024|HXAFgIAyjxtdqwimt13Y3OZO 4xeLe7U8PqG|13|23-127-851-8031|9255.67|MACHINERY|into beans. fluffily final ideas haggle fluffily|
+25|Customer#000000025|Hp8GyFQgGHFYSilH5tBfe|12|22-603-468-3533|7133.70|FURNITURE|y. accounts sleep ruthlessly according to the regular theodolites. unusual instructions sleep. ironic, final|
+26|Customer#000000026|8ljrc5ZeMl7UciP|22|32-363-455-4837|5182.05|AUTOMOBILE|c requests use furiously ironic requests. slyly ironic dependencies us|
+27|Customer#000000027|IS8GIyxpBrLpMT0u7|3|13-137-193-2709|5679.84|BUILDING| about the carefully ironic pinto beans. accoun|
+28|Customer#000000028|iVyg0daQ,Tha8x2WPWA9m2529m|8|18-774-241-1462|1007.18|FURNITURE| along the regular deposits. furiously final pac|
+29|Customer#000000029|sJ5adtfyAkCK63df2,vF25zyQMVYE34uh|0|10-773-203-7342|7618.27|FURNITURE|its after the carefully final platelets x-ray against |
+2|Customer#000000002|XSTf4,NCwDVaWNe6tEgvwfmRchLXak|13|23-768-687-3665|121.65|AUTOMOBILE|l accounts. blithely ironic theodolites integrate boldly: caref|
+30|Customer#000000030|nJDsELGAavU63Jl0c5NKsKfL8rIJQQkQnYL2QJY|1|11-764-165-5076|9321.01|BUILDING|lithely final requests. furiously unusual account|
+31|Customer#000000031|LUACbO0viaAv6eXOAebryDB xjVst|23|33-197-837-7094|5236.89|HOUSEHOLD|s use among the blithely pending depo|
+32|Customer#000000032|jD2xZzi UmId,DCtNBLXKj9q0Tlp2iQ6ZcO3J|15|25-430-914-2194|3471.53|BUILDING|cial ideas. final, furious requests across the e|
+33|Customer#000000033|qFSlMuLucBmx9xnn5ib2csWUweg D|17|27-375-391-1280|-78.56|AUTOMOBILE|s. slyly regular accounts are furiously. carefully pending requests|
+34|Customer#000000034|Q6G9wZ6dnczmtOx509xgE,M2KV|15|25-344-968-5422|8589.70|HOUSEHOLD|nder against the even, pending accounts. even|
+35|Customer#000000035|TEjWGE4nBzJL2|17|27-566-888-7431|1228.24|HOUSEHOLD|requests. special, express requests nag slyly furiousl|
+36|Customer#000000036|3TvCzjuPzpJ0,DdJ8kW5U|21|31-704-669-5769|4987.27|BUILDING|haggle. enticing, quiet platelets grow quickly bold sheaves. carefully regular acc|
+37|Customer#000000037|7EV4Pwh,3SboctTWt|8|18-385-235-7162|-917.75|FURNITURE|ilent packages are carefully among the deposits. furiousl|
+38|Customer#000000038|a5Ee5e9568R8RLP 2ap7|12|22-306-880-7212|6345.11|HOUSEHOLD|lar excuses. closely even asymptotes cajole blithely excuses. carefully silent pinto beans sleep carefully fin|
+39|Customer#000000039|nnbRg,Pvy33dfkorYE FdeZ60|2|12-387-467-6509|6264.31|AUTOMOBILE|tions. slyly silent excuses slee|
+3|Customer#000000003|MG9kdTD2WBHm|1|11-719-748-3364|7498.12|AUTOMOBILE| deposits eat slyly ironic, even instructions. express foxes detect slyly. blithely even accounts abov|
+40|Customer#000000040|gOnGWAyhSV1ofv|3|13-652-915-8939|1335.30|BUILDING|rges impress after the slyly ironic courts. foxes are. blithely |
+41|Customer#000000041|IM9mzmyoxeBmvNw8lA7G3Ydska2nkZF|10|20-917-711-4011|270.95|HOUSEHOLD|ly regular accounts hang bold, silent packages. unusual foxes haggle slyly above the special, final depo|
+42|Customer#000000042|ziSrvyyBke|5|15-416-330-4175|8727.01|BUILDING|ssly according to the pinto beans: carefully special requests across the even, pending accounts wake special|
+43|Customer#000000043|ouSbjHk8lh5fKX3zGso3ZSIj9Aa3PoaFd|19|29-316-665-2897|9904.28|MACHINERY|ial requests: carefully pending foxes detect quickly. carefully final courts cajole quickly. carefully|
+44|Customer#000000044|Oi,dOSPwDu4jo4x,,P85E0dmhZGvNtBwi|16|26-190-260-5375|7315.94|AUTOMOBILE|r requests around the unusual, bold a|
+45|Customer#000000045|4v3OcpFgoOmMG,CbnF,4mdC|9|19-715-298-9917|9983.38|AUTOMOBILE|nto beans haggle slyly alongside of t|
+46|Customer#000000046|eaTXWWm10L9|6|16-357-681-2007|5744.59|AUTOMOBILE|ctions. accounts sleep furiously even requests. regular, regular accounts cajole blithely around the final pa|
+47|Customer#000000047|b0UgocSqEW5 gdVbhNT|2|12-427-271-9466|274.58|BUILDING|ions. express, ironic instructions sleep furiously ironic ideas. furi|
+48|Customer#000000048|0UU iPhBupFvemNB|0|10-508-348-5882|3792.50|BUILDING|re fluffily pending foxes. pending, bold platelets sleep slyly. even platelets cajo|
+49|Customer#000000049|cNgAeX7Fqrdf7HQN9EwjUa4nxT,68L FKAxzl|10|20-908-631-4424|4573.94|FURNITURE|nusual foxes! fluffily pending packages maintain to the regular |
+4|Customer#000000004|XxVSJsLAGtn|4|14-128-190-5944|2866.83|MACHINERY| requests. final, regular ideas sleep final accou|
+50|Customer#000000050|9SzDYlkzxByyJ1QeTI o|6|16-658-112-3221|4266.13|MACHINERY|ts. furiously ironic accounts cajole furiously slyly ironic dinos.|
+51|Customer#000000051|uR,wEaiTvo4|12|22-344-885-4251|855.87|FURNITURE|eposits. furiously regular requests integrate carefully packages. furious|
+52|Customer#000000052|7 QOqGqqSy9jfV51BC71jcHJSD0|11|21-186-284-5998|5630.28|HOUSEHOLD|ic platelets use evenly even accounts. stealthy theodolites cajole furiou|
+53|Customer#000000053|HnaxHzTfFTZs8MuCpJyTbZ47Cm4wFOOgib|15|25-168-852-5363|4113.64|HOUSEHOLD|ar accounts are. even foxes are blithely. fluffily pending deposits boost|
+54|Customer#000000054|,k4vf 5vECGWFy,hosTE,|4|14-776-370-4745|868.90|AUTOMOBILE|sual, silent accounts. furiously express accounts cajole special deposits. final, final accounts use furi|
+55|Customer#000000055|zIRBR4KNEl HzaiV3a i9n6elrxzDEh8r8pDom|10|20-180-440-8525|4572.11|MACHINERY|ully unusual packages wake bravely bold packages. unusual requests boost deposits! blithely ironic packages ab|
+56|Customer#000000056|BJYZYJQk4yD5B|10|20-895-685-6920|6530.86|FURNITURE|. notornis wake carefully. carefully fluffy requests are furiously even accounts. slyly expre|
+57|Customer#000000057|97XYbsuOPRXPWU|21|31-835-306-1650|4151.93|AUTOMOBILE|ove the carefully special packages. even, unusual deposits sleep slyly pend|
+58|Customer#000000058|g9ap7Dk1Sv9fcXEWjpMYpBZIRUohi T|13|23-244-493-2508|6478.46|HOUSEHOLD|ideas. ironic ideas affix furiously express, final instructions. regular excuses use quickly e|
+59|Customer#000000059|zLOCP0wh92OtBihgspOGl4|1|11-355-584-3112|3458.60|MACHINERY|ously final packages haggle blithely after the express deposits. furiou|
+5|Customer#000000005|KvpyuHCplrB84WgAiGV6sYpZq7Tj|3|13-750-942-6364|794.47|HOUSEHOLD|n accounts will have to unwind. foxes cajole accor|
+60|Customer#000000060|FyodhjwMChsZmUz7Jz0H|12|22-480-575-5866|2741.87|MACHINERY|latelets. blithely unusual courts boost furiously about the packages. blithely final instruct|
+61|Customer#000000061|9kndve4EAJxhg3veF BfXr7AqOsT39o gtqjaYE|17|27-626-559-8599|1536.24|FURNITURE|egular packages shall have to impress along the |
+62|Customer#000000062|upJK2Dnw13,|7|17-361-978-7059|595.61|MACHINERY|kly special dolphins. pinto beans are slyly. quickly regular accounts are furiously a|
+63|Customer#000000063|IXRSpVWWZraKII|21|31-952-552-9584|9331.13|AUTOMOBILE|ithely even accounts detect slyly above the fluffily ir|
+64|Customer#000000064|MbCeGY20kaKK3oalJD,OT|3|13-558-731-7204|-646.64|BUILDING|structions after the quietly ironic theodolites cajole be|
+65|Customer#000000065|RGT yzQ0y4l0H90P783LG4U95bXQFDRXbWa1sl,X|23|33-733-623-5267|8795.16|AUTOMOBILE|y final foxes serve carefully. theodolites are carefully. pending i|
+66|Customer#000000066|XbsEqXH1ETbJYYtA1A|22|32-213-373-5094|242.77|HOUSEHOLD|le slyly accounts. carefully silent packages benea|
+67|Customer#000000067|rfG0cOgtr5W8 xILkwp9fpCS8|9|19-403-114-4356|8166.59|MACHINERY|indle furiously final, even theodo|
+68|Customer#000000068|o8AibcCRkXvQFh8hF,7o|12|22-918-832-2411|6853.37|HOUSEHOLD| pending pinto beans impress realms. final dependencies |
+69|Customer#000000069|Ltx17nO9Wwhtdbe9QZVxNgP98V7xW97uvSH1prEw|9|19-225-978-5670|1709.28|HOUSEHOLD|thely final ideas around the quickly final dependencies affix carefully quickly final theodolites. final accounts c|
+6|Customer#000000006|sKZz0CsnMD7mp4Xd0YrBvx,LREYKUWAh yVn|20|30-114-968-4951|7638.57|AUTOMOBILE|tions. even deposits boost according to the slyly bold packages. final accounts cajole requests. furious|
+70|Customer#000000070|mFowIuhnHjp2GjCiYYavkW kUwOjIaTCQ|22|32-828-107-2832|4867.52|FURNITURE|fter the special asymptotes. ideas after the unusual frets cajole quickly regular pinto be|
+71|Customer#000000071|TlGalgdXWBmMV,6agLyWYDyIz9MKzcY8gl,w6t1B|7|17-710-812-5403|-611.19|HOUSEHOLD|g courts across the regular, final pinto beans are blithely pending ac|
+72|Customer#000000072|putjlmskxE,zs,HqeIA9Wqu7dhgH5BVCwDwHHcf|2|12-759-144-9689|-362.86|FURNITURE|ithely final foxes sleep always quickly bold accounts. final wat|
+73|Customer#000000073|8IhIxreu4Ug6tt5mog4|0|10-473-439-3214|4288.50|BUILDING|usual, unusual packages sleep busily along the furiou|
+74|Customer#000000074|IkJHCA3ZThF7qL7VKcrU nRLl,kylf |4|14-199-862-7209|2764.43|MACHINERY|onic accounts. blithely slow packages would haggle carefully. qui|
+75|Customer#000000075|Dh 6jZ,cwxWLKQfRKkiGrzv6pm|18|28-247-803-9025|6684.10|AUTOMOBILE| instructions cajole even, even deposits. finally bold deposits use above the even pains. slyl|
+76|Customer#000000076|m3sbCvjMOHyaOofH,e UkGPtqc4|0|10-349-718-3044|5745.33|FURNITURE|pecial deposits. ironic ideas boost blithely according to the closely ironic theodolites! furiously final deposits n|
+77|Customer#000000077|4tAE5KdMFGD4byHtXF92vx|17|27-269-357-4674|1738.87|BUILDING|uffily silent requests. carefully ironic asymptotes among the ironic hockey players are carefully bli|
+78|Customer#000000078|HBOta,ZNqpg3U2cSL0kbrftkPwzX|9|19-960-700-9191|7136.97|FURNITURE|ests. blithely bold pinto beans h|
+79|Customer#000000079|n5hH2ftkVRwW8idtD,BmM2|15|25-147-850-4166|5121.28|MACHINERY|es. packages haggle furiously. regular, special requests poach after the quickly express ideas. blithely pending re|
+7|Customer#000000007|TcGe5gaZNgVePxU5kRrvXBfkasDTea|18|28-190-982-9759|9561.95|AUTOMOBILE|ainst the ironic, express theodolites. express, even pinto beans among the exp|
+80|Customer#000000080|K,vtXp8qYB |0|10-267-172-7101|7383.53|FURNITURE|tect among the dependencies. bold accounts engage closely even pinto beans. ca|
+81|Customer#000000081|SH6lPA7JiiNC6dNTrR|20|30-165-277-3269|2023.71|BUILDING|r packages. fluffily ironic requests cajole fluffily. ironically regular theodolit|
+82|Customer#000000082|zhG3EZbap4c992Gj3bK,3Ne,Xn|18|28-159-442-5305|9468.34|AUTOMOBILE|s wake. bravely regular accounts are furiously. regula|
+83|Customer#000000083|HnhTNB5xpnSF20JBH4Ycs6psVnkC3RDf|22|32-817-154-4122|6463.51|BUILDING|ccording to the quickly bold warhorses. final, regular foxes integrate carefully. bold packages nag blithely ev|
+84|Customer#000000084|lpXz6Fwr9945rnbtMc8PlueilS1WmASr CB|11|21-546-818-3802|5174.71|FURNITURE|ly blithe foxes. special asymptotes haggle blithely against the furiously regular depo|
+85|Customer#000000085|siRerlDwiolhYR 8FgksoezycLj|5|15-745-585-8219|3386.64|FURNITURE|ronic ideas use above the slowly pendin|
+86|Customer#000000086|US6EGGHXbTTXPL9SBsxQJsuvy|0|10-677-951-2353|3306.32|HOUSEHOLD|quests. pending dugouts are carefully aroun|
+87|Customer#000000087|hgGhHVSWQl 6jZ6Ev|23|33-869-884-7053|6327.54|FURNITURE|hely ironic requests integrate according to the ironic accounts. slyly regular pla|
+88|Customer#000000088|wtkjBN9eyrFuENSMmMFlJ3e7jE5KXcg|16|26-516-273-2566|8031.44|AUTOMOBILE|s are quickly above the quickly ironic instructions; even requests about the carefully final deposi|
+89|Customer#000000089|dtR, y9JQWUO6FoJExyp8whOU|14|24-394-451-5404|1530.76|FURNITURE|counts are slyly beyond the slyly final accounts. quickly final ideas wake. r|
+8|Customer#000000008|I0B10bB0AymmC, 0PrRYBCP1yGJ8xcBPmWhl5|17|27-147-574-9335|6819.74|BUILDING|among the slyly regular theodolites kindle blithely courts. carefully even theodolites haggle slyly along the ide|
+90|Customer#000000090|QxCzH7VxxYUWwfL7|16|26-603-491-1238|7354.23|BUILDING|sly across the furiously even |
+91|Customer#000000091|S8OMYFrpHwoNHaGBeuS6E 6zhHGZiprw1b7 q|8|18-239-400-3677|4643.14|AUTOMOBILE|onic accounts. fluffily silent pinto beans boost blithely according to the fluffily exp|
+92|Customer#000000092|obP PULk2LH LqNF,K9hcbNqnLAkJVsl5xqSrY,|2|12-446-416-8471|1182.91|MACHINERY|. pinto beans hang slyly final deposits. ac|
+93|Customer#000000093|EHXBr2QGdh|7|17-359-388-5266|2182.52|MACHINERY|press deposits. carefully regular platelets r|
+94|Customer#000000094|IfVNIN9KtkScJ9dUjK3Pg5gY1aFeaXewwf|9|19-953-499-8833|5500.11|HOUSEHOLD|latelets across the bold, final requests sleep according to the fluffily bold accounts. unusual deposits amon|
+95|Customer#000000095|EU0xvmWvOmUUn5J,2z85DQyG7QCJ9Xq7|15|25-923-255-2929|5327.38|MACHINERY|ithely. ruthlessly final requests wake slyly alongside of the furiously silent pinto beans. even the|
+96|Customer#000000096|vWLOrmXhRR|8|18-422-845-1202|6323.92|AUTOMOBILE|press requests believe furiously. carefully final instructions snooze carefully. |
+97|Customer#000000097|OApyejbhJG,0Iw3j rd1M|17|27-588-919-5638|2164.48|AUTOMOBILE|haggle slyly. bold, special ideas are blithely above the thinly bold theo|
+98|Customer#000000098|7yiheXNSpuEAwbswDW|12|22-885-845-6889|-551.37|BUILDING|ages. furiously pending accounts are quickly carefully final foxes: busily pe|
+99|Customer#000000099|szsrOiPtCHVS97Lt|15|25-515-237-9232|4088.65|HOUSEHOLD|cajole slyly about the regular theodolites! furiously bold requests nag along the pending, regular packages. somas|
+9|Customer#000000009|xKiAFTjUsCuxfeleNqefumTrjS|8|18-338-906-3675|8324.07|FURNITURE|r theodolites according to the requests wake thinly excuses: pending requests haggle furiousl|
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/resources/hadoop/conf/core-site.xml b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/resources/hadoop/conf/core-site.xml
new file mode 100644
index 0000000..47dfac5
--- /dev/null
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/resources/hadoop/conf/core-site.xml
@@ -0,0 +1,18 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+
+<!-- Put site-specific property overrides in this file. -->
+
+<configuration>
+
+<property>
+    <name>fs.default.name</name>
+    <value>hdfs://127.0.0.1:31888</value>
+</property>
+<property>
+    <name>hadoop.tmp.dir</name>
+    <value>/tmp/hadoop</value>
+</property>
+
+
+</configuration>
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/resources/hadoop/conf/hdfs-site.xml b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/resources/hadoop/conf/hdfs-site.xml
new file mode 100644
index 0000000..8d29b1d
--- /dev/null
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/resources/hadoop/conf/hdfs-site.xml
@@ -0,0 +1,18 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+
+<!-- Put site-specific property overrides in this file. -->
+
+<configuration>
+
+<property>
+   <name>dfs.replication</name>
+   <value>1</value>
+</property>
+
+<property>
+	<name>dfs.block.size</name>
+	<value>65536</value>
+</property>
+
+</configuration>
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/resources/hadoop/conf/log4j.properties b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/resources/hadoop/conf/log4j.properties
new file mode 100755
index 0000000..d5e6004
--- /dev/null
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/resources/hadoop/conf/log4j.properties
@@ -0,0 +1,94 @@
+# Define some default values that can be overridden by system properties
+hadoop.root.logger=FATAL,console
+hadoop.log.dir=.
+hadoop.log.file=hadoop.log
+
+# Define the root logger to the system property "hadoop.root.logger".
+log4j.rootLogger=${hadoop.root.logger}, EventCounter
+
+# Logging Threshold
+log4j.threshhold=FATAL
+
+#
+# Daily Rolling File Appender
+#
+
+log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender
+log4j.appender.DRFA.File=${hadoop.log.dir}/${hadoop.log.file}
+
+# Rollver at midnight
+log4j.appender.DRFA.DatePattern=.yyyy-MM-dd
+
+# 30-day backup
+#log4j.appender.DRFA.MaxBackupIndex=30
+log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout
+
+# Pattern format: Date LogLevel LoggerName LogMessage
+log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
+# Debugging Pattern format
+#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n
+
+
+#
+# console
+# Add "console" to rootlogger above if you want to use this 
+#
+
+log4j.appender.console=org.apache.log4j.ConsoleAppender
+log4j.appender.console.target=System.err
+log4j.appender.console.layout=org.apache.log4j.PatternLayout
+log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n
+
+#
+# TaskLog Appender
+#
+
+#Default values
+hadoop.tasklog.taskid=null
+hadoop.tasklog.noKeepSplits=4
+hadoop.tasklog.totalLogFileSize=100
+hadoop.tasklog.purgeLogSplits=true
+hadoop.tasklog.logsRetainHours=12
+
+log4j.appender.TLA=org.apache.hadoop.mapred.TaskLogAppender
+log4j.appender.TLA.taskId=${hadoop.tasklog.taskid}
+log4j.appender.TLA.totalLogFileSize=${hadoop.tasklog.totalLogFileSize}
+
+log4j.appender.TLA.layout=org.apache.log4j.PatternLayout
+log4j.appender.TLA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
+
+#
+# Rolling File Appender
+#
+
+#log4j.appender.RFA=org.apache.log4j.RollingFileAppender
+#log4j.appender.RFA.File=${hadoop.log.dir}/${hadoop.log.file}
+
+# Logfile size and and 30-day backups
+#log4j.appender.RFA.MaxFileSize=1MB
+#log4j.appender.RFA.MaxBackupIndex=30
+
+#log4j.appender.RFA.layout=org.apache.log4j.PatternLayout
+#log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} - %m%n
+#log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n
+
+#
+# FSNamesystem Audit logging
+# All audit events are logged at INFO level
+#
+log4j.logger.org.apache.hadoop.fs.FSNamesystem.audit=WARN
+
+# Custom Logging levels
+
+#log4j.logger.org.apache.hadoop.mapred.JobTracker=DEBUG
+#log4j.logger.org.apache.hadoop.mapred.TaskTracker=DEBUG
+#log4j.logger.org.apache.hadoop.fs.FSNamesystem=DEBUG
+
+# Jets3t library
+log4j.logger.org.jets3t.service.impl.rest.httpclient.RestS3Service=ERROR
+
+#
+# Event Counter Appender
+# Sends counts of logging messages at different severity levels to Hadoop Metrics.
+#
+log4j.appender.EventCounter=org.apache.hadoop.metrics.jvm.EventCounter
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/resources/hadoop/conf/mapred-site.xml b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/resources/hadoop/conf/mapred-site.xml
new file mode 100644
index 0000000..39b6505
--- /dev/null
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/resources/hadoop/conf/mapred-site.xml
@@ -0,0 +1,25 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+
+<!-- Put site-specific property overrides in this file. -->
+
+<configuration>
+
+  <property>
+    <name>mapred.job.tracker</name>
+    <value>localhost:29007</value>
+  </property>
+  <property>
+     <name>mapred.tasktracker.map.tasks.maximum</name>
+     <value>20</value>
+  </property>
+   <property>
+      <name>mapred.tasktracker.reduce.tasks.maximum</name>
+      <value>20</value>
+   </property>
+   <property>
+      <name>mapred.max.split.size</name>
+      <value>2048</value>
+   </property>
+
+</configuration>
diff --git a/hyracks/hyracks-hdfs/pom.xml b/hyracks/hyracks-hdfs/pom.xml
new file mode 100644
index 0000000..5ed76e9
--- /dev/null
+++ b/hyracks/hyracks-hdfs/pom.xml
@@ -0,0 +1,19 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <artifactId>hyracks-hdfs</artifactId>
+  <packaging>pom</packaging>
+  <name>hyracks-hdfs</name>
+
+  <parent>
+    <groupId>edu.uci.ics.hyracks</groupId>
+    <artifactId>hyracks</artifactId>
+    <version>0.2.3-SNAPSHOT</version>
+  </parent>
+
+  <modules>
+    <module>hyracks-hdfs-0.20.2</module>
+    <module>hyracks-hdfs-0.23.1</module>
+    <module>hyracks-hdfs-core</module>
+  </modules>
+</project>
diff --git a/hyracks/hyracks-ipc/pom.xml b/hyracks/hyracks-ipc/pom.xml
new file mode 100644
index 0000000..a5e3662
--- /dev/null
+++ b/hyracks/hyracks-ipc/pom.xml
@@ -0,0 +1,32 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <artifactId>hyracks-ipc</artifactId>
+  <name>hyracks-ipc</name>
+  <parent>
+    <groupId>edu.uci.ics.hyracks</groupId>
+    <artifactId>hyracks</artifactId>
+    <version>0.2.3-SNAPSHOT</version>
+  </parent>
+
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-compiler-plugin</artifactId>
+        <version>2.0.2</version>
+        <configuration>
+          <source>1.7</source>
+          <target>1.7</target>
+        </configuration>
+      </plugin>
+    </plugins>
+  </build>
+  <dependencies>
+  <dependency>
+  	<groupId>junit</groupId>
+  	<artifactId>junit</artifactId>
+  	<version>4.8.1</version>
+  	<scope>test</scope>
+  </dependency>
+  </dependencies>
+</project>
diff --git a/hyracks-ipc/src/main/java/edu/uci/ics/hyracks/ipc/api/IIPCHandle.java b/hyracks/hyracks-ipc/src/main/java/edu/uci/ics/hyracks/ipc/api/IIPCHandle.java
similarity index 100%
rename from hyracks-ipc/src/main/java/edu/uci/ics/hyracks/ipc/api/IIPCHandle.java
rename to hyracks/hyracks-ipc/src/main/java/edu/uci/ics/hyracks/ipc/api/IIPCHandle.java
diff --git a/hyracks-ipc/src/main/java/edu/uci/ics/hyracks/ipc/api/IIPCI.java b/hyracks/hyracks-ipc/src/main/java/edu/uci/ics/hyracks/ipc/api/IIPCI.java
similarity index 100%
rename from hyracks-ipc/src/main/java/edu/uci/ics/hyracks/ipc/api/IIPCI.java
rename to hyracks/hyracks-ipc/src/main/java/edu/uci/ics/hyracks/ipc/api/IIPCI.java
diff --git a/hyracks-ipc/src/main/java/edu/uci/ics/hyracks/ipc/api/IPCPerformanceCounters.java b/hyracks/hyracks-ipc/src/main/java/edu/uci/ics/hyracks/ipc/api/IPCPerformanceCounters.java
similarity index 100%
rename from hyracks-ipc/src/main/java/edu/uci/ics/hyracks/ipc/api/IPCPerformanceCounters.java
rename to hyracks/hyracks-ipc/src/main/java/edu/uci/ics/hyracks/ipc/api/IPCPerformanceCounters.java
diff --git a/hyracks-ipc/src/main/java/edu/uci/ics/hyracks/ipc/api/IPayloadSerializerDeserializer.java b/hyracks/hyracks-ipc/src/main/java/edu/uci/ics/hyracks/ipc/api/IPayloadSerializerDeserializer.java
similarity index 100%
rename from hyracks-ipc/src/main/java/edu/uci/ics/hyracks/ipc/api/IPayloadSerializerDeserializer.java
rename to hyracks/hyracks-ipc/src/main/java/edu/uci/ics/hyracks/ipc/api/IPayloadSerializerDeserializer.java
diff --git a/hyracks-ipc/src/main/java/edu/uci/ics/hyracks/ipc/api/IResponseCallback.java b/hyracks/hyracks-ipc/src/main/java/edu/uci/ics/hyracks/ipc/api/IResponseCallback.java
similarity index 100%
rename from hyracks-ipc/src/main/java/edu/uci/ics/hyracks/ipc/api/IResponseCallback.java
rename to hyracks/hyracks-ipc/src/main/java/edu/uci/ics/hyracks/ipc/api/IResponseCallback.java
diff --git a/hyracks-ipc/src/main/java/edu/uci/ics/hyracks/ipc/api/RPCInterface.java b/hyracks/hyracks-ipc/src/main/java/edu/uci/ics/hyracks/ipc/api/RPCInterface.java
similarity index 100%
rename from hyracks-ipc/src/main/java/edu/uci/ics/hyracks/ipc/api/RPCInterface.java
rename to hyracks/hyracks-ipc/src/main/java/edu/uci/ics/hyracks/ipc/api/RPCInterface.java
diff --git a/hyracks-ipc/src/main/java/edu/uci/ics/hyracks/ipc/exceptions/IPCException.java b/hyracks/hyracks-ipc/src/main/java/edu/uci/ics/hyracks/ipc/exceptions/IPCException.java
similarity index 100%
rename from hyracks-ipc/src/main/java/edu/uci/ics/hyracks/ipc/exceptions/IPCException.java
rename to hyracks/hyracks-ipc/src/main/java/edu/uci/ics/hyracks/ipc/exceptions/IPCException.java
diff --git a/hyracks-ipc/src/main/java/edu/uci/ics/hyracks/ipc/impl/HandleState.java b/hyracks/hyracks-ipc/src/main/java/edu/uci/ics/hyracks/ipc/impl/HandleState.java
similarity index 100%
rename from hyracks-ipc/src/main/java/edu/uci/ics/hyracks/ipc/impl/HandleState.java
rename to hyracks/hyracks-ipc/src/main/java/edu/uci/ics/hyracks/ipc/impl/HandleState.java
diff --git a/hyracks-ipc/src/main/java/edu/uci/ics/hyracks/ipc/impl/IPCConnectionManager.java b/hyracks/hyracks-ipc/src/main/java/edu/uci/ics/hyracks/ipc/impl/IPCConnectionManager.java
similarity index 100%
rename from hyracks-ipc/src/main/java/edu/uci/ics/hyracks/ipc/impl/IPCConnectionManager.java
rename to hyracks/hyracks-ipc/src/main/java/edu/uci/ics/hyracks/ipc/impl/IPCConnectionManager.java
diff --git a/hyracks-ipc/src/main/java/edu/uci/ics/hyracks/ipc/impl/IPCHandle.java b/hyracks/hyracks-ipc/src/main/java/edu/uci/ics/hyracks/ipc/impl/IPCHandle.java
similarity index 100%
rename from hyracks-ipc/src/main/java/edu/uci/ics/hyracks/ipc/impl/IPCHandle.java
rename to hyracks/hyracks-ipc/src/main/java/edu/uci/ics/hyracks/ipc/impl/IPCHandle.java
diff --git a/hyracks-ipc/src/main/java/edu/uci/ics/hyracks/ipc/impl/IPCSystem.java b/hyracks/hyracks-ipc/src/main/java/edu/uci/ics/hyracks/ipc/impl/IPCSystem.java
similarity index 100%
rename from hyracks-ipc/src/main/java/edu/uci/ics/hyracks/ipc/impl/IPCSystem.java
rename to hyracks/hyracks-ipc/src/main/java/edu/uci/ics/hyracks/ipc/impl/IPCSystem.java
diff --git a/hyracks-ipc/src/main/java/edu/uci/ics/hyracks/ipc/impl/JavaSerializationBasedPayloadSerializerDeserializer.java b/hyracks/hyracks-ipc/src/main/java/edu/uci/ics/hyracks/ipc/impl/JavaSerializationBasedPayloadSerializerDeserializer.java
similarity index 100%
rename from hyracks-ipc/src/main/java/edu/uci/ics/hyracks/ipc/impl/JavaSerializationBasedPayloadSerializerDeserializer.java
rename to hyracks/hyracks-ipc/src/main/java/edu/uci/ics/hyracks/ipc/impl/JavaSerializationBasedPayloadSerializerDeserializer.java
diff --git a/hyracks-ipc/src/main/java/edu/uci/ics/hyracks/ipc/impl/Message.java b/hyracks/hyracks-ipc/src/main/java/edu/uci/ics/hyracks/ipc/impl/Message.java
similarity index 100%
rename from hyracks-ipc/src/main/java/edu/uci/ics/hyracks/ipc/impl/Message.java
rename to hyracks/hyracks-ipc/src/main/java/edu/uci/ics/hyracks/ipc/impl/Message.java
diff --git a/hyracks-ipc/src/test/java/edu/uci/ics/hyracks/ipc/tests/IPCTest.java b/hyracks/hyracks-ipc/src/test/java/edu/uci/ics/hyracks/ipc/tests/IPCTest.java
similarity index 100%
rename from hyracks-ipc/src/test/java/edu/uci/ics/hyracks/ipc/tests/IPCTest.java
rename to hyracks/hyracks-ipc/src/test/java/edu/uci/ics/hyracks/ipc/tests/IPCTest.java
diff --git a/hyracks/hyracks-maven-plugins/hyracks-virtualcluster-maven-plugin/pom.xml b/hyracks/hyracks-maven-plugins/hyracks-virtualcluster-maven-plugin/pom.xml
new file mode 100644
index 0000000..e95f7f0
--- /dev/null
+++ b/hyracks/hyracks-maven-plugins/hyracks-virtualcluster-maven-plugin/pom.xml
@@ -0,0 +1,26 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <artifactId>hyracks-virtualcluster-maven-plugin</artifactId>
+  <packaging>maven-plugin</packaging>
+  <name>Hyracks VirtualCluster Maven Plugin</name>
+
+  <parent>
+    <groupId>edu.uci.ics.hyracks</groupId>
+    <artifactId>hyracks-maven-plugins</artifactId>
+    <version>0.2.3-SNAPSHOT</version>
+  </parent>
+
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-compiler-plugin</artifactId>
+        <version>2.0.2</version>
+        <configuration>
+          <source>1.7</source>
+          <target>1.7</target>
+        </configuration>
+      </plugin>
+    </plugins>
+  </build>
+</project>
diff --git a/hyracks-maven-plugins/hyracks-virtualcluster-maven-plugin/src/main/java/edu/uci/ics/hyracks/maven/plugin/AbstractHyracksCLIMojo.java b/hyracks/hyracks-maven-plugins/hyracks-virtualcluster-maven-plugin/src/main/java/edu/uci/ics/hyracks/maven/plugin/AbstractHyracksCLIMojo.java
similarity index 100%
rename from hyracks-maven-plugins/hyracks-virtualcluster-maven-plugin/src/main/java/edu/uci/ics/hyracks/maven/plugin/AbstractHyracksCLIMojo.java
rename to hyracks/hyracks-maven-plugins/hyracks-virtualcluster-maven-plugin/src/main/java/edu/uci/ics/hyracks/maven/plugin/AbstractHyracksCLIMojo.java
diff --git a/hyracks-maven-plugins/hyracks-virtualcluster-maven-plugin/src/main/java/edu/uci/ics/hyracks/maven/plugin/AbstractHyracksMojo.java b/hyracks/hyracks-maven-plugins/hyracks-virtualcluster-maven-plugin/src/main/java/edu/uci/ics/hyracks/maven/plugin/AbstractHyracksMojo.java
similarity index 100%
rename from hyracks-maven-plugins/hyracks-virtualcluster-maven-plugin/src/main/java/edu/uci/ics/hyracks/maven/plugin/AbstractHyracksMojo.java
rename to hyracks/hyracks-maven-plugins/hyracks-virtualcluster-maven-plugin/src/main/java/edu/uci/ics/hyracks/maven/plugin/AbstractHyracksMojo.java
diff --git a/hyracks-maven-plugins/hyracks-virtualcluster-maven-plugin/src/main/java/edu/uci/ics/hyracks/maven/plugin/AbstractHyracksServerMojo.java b/hyracks/hyracks-maven-plugins/hyracks-virtualcluster-maven-plugin/src/main/java/edu/uci/ics/hyracks/maven/plugin/AbstractHyracksServerMojo.java
similarity index 100%
rename from hyracks-maven-plugins/hyracks-virtualcluster-maven-plugin/src/main/java/edu/uci/ics/hyracks/maven/plugin/AbstractHyracksServerMojo.java
rename to hyracks/hyracks-maven-plugins/hyracks-virtualcluster-maven-plugin/src/main/java/edu/uci/ics/hyracks/maven/plugin/AbstractHyracksServerMojo.java
diff --git a/hyracks-maven-plugins/hyracks-virtualcluster-maven-plugin/src/main/java/edu/uci/ics/hyracks/maven/plugin/HyracksAppDeploymentMojo.java b/hyracks/hyracks-maven-plugins/hyracks-virtualcluster-maven-plugin/src/main/java/edu/uci/ics/hyracks/maven/plugin/HyracksAppDeploymentMojo.java
similarity index 100%
rename from hyracks-maven-plugins/hyracks-virtualcluster-maven-plugin/src/main/java/edu/uci/ics/hyracks/maven/plugin/HyracksAppDeploymentMojo.java
rename to hyracks/hyracks-maven-plugins/hyracks-virtualcluster-maven-plugin/src/main/java/edu/uci/ics/hyracks/maven/plugin/HyracksAppDeploymentMojo.java
diff --git a/hyracks-maven-plugins/hyracks-virtualcluster-maven-plugin/src/main/java/edu/uci/ics/hyracks/maven/plugin/HyracksCCStartMojo.java b/hyracks/hyracks-maven-plugins/hyracks-virtualcluster-maven-plugin/src/main/java/edu/uci/ics/hyracks/maven/plugin/HyracksCCStartMojo.java
similarity index 100%
rename from hyracks-maven-plugins/hyracks-virtualcluster-maven-plugin/src/main/java/edu/uci/ics/hyracks/maven/plugin/HyracksCCStartMojo.java
rename to hyracks/hyracks-maven-plugins/hyracks-virtualcluster-maven-plugin/src/main/java/edu/uci/ics/hyracks/maven/plugin/HyracksCCStartMojo.java
diff --git a/hyracks-maven-plugins/hyracks-virtualcluster-maven-plugin/src/main/java/edu/uci/ics/hyracks/maven/plugin/HyracksNCStartMojo.java b/hyracks/hyracks-maven-plugins/hyracks-virtualcluster-maven-plugin/src/main/java/edu/uci/ics/hyracks/maven/plugin/HyracksNCStartMojo.java
similarity index 100%
rename from hyracks-maven-plugins/hyracks-virtualcluster-maven-plugin/src/main/java/edu/uci/ics/hyracks/maven/plugin/HyracksNCStartMojo.java
rename to hyracks/hyracks-maven-plugins/hyracks-virtualcluster-maven-plugin/src/main/java/edu/uci/ics/hyracks/maven/plugin/HyracksNCStartMojo.java
diff --git a/hyracks-maven-plugins/hyracks-virtualcluster-maven-plugin/src/main/java/edu/uci/ics/hyracks/maven/plugin/HyracksServiceRegistry.java b/hyracks/hyracks-maven-plugins/hyracks-virtualcluster-maven-plugin/src/main/java/edu/uci/ics/hyracks/maven/plugin/HyracksServiceRegistry.java
similarity index 100%
rename from hyracks-maven-plugins/hyracks-virtualcluster-maven-plugin/src/main/java/edu/uci/ics/hyracks/maven/plugin/HyracksServiceRegistry.java
rename to hyracks/hyracks-maven-plugins/hyracks-virtualcluster-maven-plugin/src/main/java/edu/uci/ics/hyracks/maven/plugin/HyracksServiceRegistry.java
diff --git a/hyracks-maven-plugins/hyracks-virtualcluster-maven-plugin/src/main/java/edu/uci/ics/hyracks/maven/plugin/HyracksStopServicesMojo.java b/hyracks/hyracks-maven-plugins/hyracks-virtualcluster-maven-plugin/src/main/java/edu/uci/ics/hyracks/maven/plugin/HyracksStopServicesMojo.java
similarity index 100%
rename from hyracks-maven-plugins/hyracks-virtualcluster-maven-plugin/src/main/java/edu/uci/ics/hyracks/maven/plugin/HyracksStopServicesMojo.java
rename to hyracks/hyracks-maven-plugins/hyracks-virtualcluster-maven-plugin/src/main/java/edu/uci/ics/hyracks/maven/plugin/HyracksStopServicesMojo.java
diff --git a/hyracks/hyracks-maven-plugins/pom.xml b/hyracks/hyracks-maven-plugins/pom.xml
new file mode 100644
index 0000000..d9223ba
--- /dev/null
+++ b/hyracks/hyracks-maven-plugins/pom.xml
@@ -0,0 +1,26 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <artifactId>hyracks-maven-plugins</artifactId>
+  <packaging>pom</packaging>
+  <name>hyracks-maven-plugins</name>
+
+  <parent>
+    <groupId>edu.uci.ics.hyracks</groupId>
+    <artifactId>hyracks</artifactId>
+    <version>0.2.3-SNAPSHOT</version>
+  </parent>
+
+  <dependencies>
+  	<dependency>
+  		<groupId>org.apache.maven</groupId>
+  		<artifactId>maven-plugin-api</artifactId>
+  		<version>2.2.1</version>
+  		<type>jar</type>
+  		<scope>compile</scope>
+  	</dependency>
+  </dependencies>
+
+  <modules>
+    <module>hyracks-virtualcluster-maven-plugin</module>
+  </modules>
+</project>
diff --git a/hyracks/hyracks-net/pom.xml b/hyracks/hyracks-net/pom.xml
new file mode 100644
index 0000000..5eb88b5
--- /dev/null
+++ b/hyracks/hyracks-net/pom.xml
@@ -0,0 +1,32 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <artifactId>hyracks-net</artifactId>
+  <name>hyracks-net</name>
+  <parent>
+    <groupId>edu.uci.ics.hyracks</groupId>
+    <artifactId>hyracks</artifactId>
+    <version>0.2.3-SNAPSHOT</version>
+  </parent>
+
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-compiler-plugin</artifactId>
+        <version>2.0.2</version>
+        <configuration>
+          <source>1.7</source>
+          <target>1.7</target>
+        </configuration>
+      </plugin>
+    </plugins>
+  </build>
+  <dependencies>
+  <dependency>
+  	<groupId>junit</groupId>
+  	<artifactId>junit</artifactId>
+  	<version>4.8.1</version>
+  	<scope>test</scope>
+  </dependency>
+  </dependencies>
+</project>
diff --git a/hyracks-net/src/main/java/edu/uci/ics/hyracks/net/buffers/IBufferAcceptor.java b/hyracks/hyracks-net/src/main/java/edu/uci/ics/hyracks/net/buffers/IBufferAcceptor.java
similarity index 100%
rename from hyracks-net/src/main/java/edu/uci/ics/hyracks/net/buffers/IBufferAcceptor.java
rename to hyracks/hyracks-net/src/main/java/edu/uci/ics/hyracks/net/buffers/IBufferAcceptor.java
diff --git a/hyracks-net/src/main/java/edu/uci/ics/hyracks/net/buffers/ICloseableBufferAcceptor.java b/hyracks/hyracks-net/src/main/java/edu/uci/ics/hyracks/net/buffers/ICloseableBufferAcceptor.java
similarity index 100%
rename from hyracks-net/src/main/java/edu/uci/ics/hyracks/net/buffers/ICloseableBufferAcceptor.java
rename to hyracks/hyracks-net/src/main/java/edu/uci/ics/hyracks/net/buffers/ICloseableBufferAcceptor.java
diff --git a/hyracks-net/src/main/java/edu/uci/ics/hyracks/net/exceptions/NetException.java b/hyracks/hyracks-net/src/main/java/edu/uci/ics/hyracks/net/exceptions/NetException.java
similarity index 100%
rename from hyracks-net/src/main/java/edu/uci/ics/hyracks/net/exceptions/NetException.java
rename to hyracks/hyracks-net/src/main/java/edu/uci/ics/hyracks/net/exceptions/NetException.java
diff --git a/hyracks-net/src/main/java/edu/uci/ics/hyracks/net/protocols/muxdemux/ChannelControlBlock.java b/hyracks/hyracks-net/src/main/java/edu/uci/ics/hyracks/net/protocols/muxdemux/ChannelControlBlock.java
similarity index 100%
rename from hyracks-net/src/main/java/edu/uci/ics/hyracks/net/protocols/muxdemux/ChannelControlBlock.java
rename to hyracks/hyracks-net/src/main/java/edu/uci/ics/hyracks/net/protocols/muxdemux/ChannelControlBlock.java
diff --git a/hyracks/hyracks-net/src/main/java/edu/uci/ics/hyracks/net/protocols/muxdemux/ChannelSet.java b/hyracks/hyracks-net/src/main/java/edu/uci/ics/hyracks/net/protocols/muxdemux/ChannelSet.java
new file mode 100644
index 0000000..8fa99be
--- /dev/null
+++ b/hyracks/hyracks-net/src/main/java/edu/uci/ics/hyracks/net/protocols/muxdemux/ChannelSet.java
@@ -0,0 +1,235 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.net.protocols.muxdemux;
+
+import java.util.Arrays;
+import java.util.BitSet;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import edu.uci.ics.hyracks.net.exceptions.NetException;
+
+public class ChannelSet {
+    private static final Logger LOGGER = Logger.getLogger(ChannelSet.class.getName());
+
+    private static final int INITIAL_SIZE = 16;
+
+    private final MultiplexedConnection mConn;
+
+    private ChannelControlBlock[] ccbArray;
+
+    private final BitSet allocationBitmap;
+
+    private final BitSet pendingChannelWriteBitmap;
+
+    private final BitSet pendingChannelCreditsBitmap;
+
+    private final BitSet pendingChannelSynBitmap;
+
+    private final BitSet pendingEOSAckBitmap;
+
+    private int openChannelCount;
+
+    private final IEventCounter pendingWriteEventsCounter;
+
+    ChannelSet(MultiplexedConnection mConn, IEventCounter pendingWriteEventsCounter) {
+        this.mConn = mConn;
+        ccbArray = new ChannelControlBlock[INITIAL_SIZE];
+        allocationBitmap = new BitSet();
+        pendingChannelWriteBitmap = new BitSet();
+        pendingChannelCreditsBitmap = new BitSet();
+        pendingChannelSynBitmap = new BitSet();
+        pendingEOSAckBitmap = new BitSet();
+        this.pendingWriteEventsCounter = pendingWriteEventsCounter;
+        openChannelCount = 0;
+    }
+
+    ChannelControlBlock allocateChannel() throws NetException {
+        synchronized (mConn) {
+       	    cleanupClosedChannels();
+            int idx = allocationBitmap.nextClearBit(0);
+            if (idx < 0 || idx >= ccbArray.length) {
+                cleanupClosedChannels();
+                idx = allocationBitmap.nextClearBit(0);
+                if (idx < 0 || idx == ccbArray.length) {
+                    idx = ccbArray.length;
+                }
+            }
+            return createChannel(idx);
+        }
+    }
+
+    private void cleanupClosedChannels() {
+        for (int i = 0; i < ccbArray.length; ++i) {
+            ChannelControlBlock ccb = ccbArray[i];
+            if (ccb != null) {
+                if (ccb.completelyClosed()) {
+                    if (LOGGER.isLoggable(Level.FINE)) {
+                        LOGGER.fine("Cleaning free channel: " + ccb);
+                    }
+                    freeChannel(ccb);
+                }
+            }
+        }
+    }
+
+    ChannelControlBlock registerChannel(int channelId) throws NetException {
+        synchronized (mConn) {
+            return createChannel(channelId);
+        }
+    }
+
+    private void freeChannel(ChannelControlBlock channel) {
+        int idx = channel.getChannelId();
+        ccbArray[idx] = null;
+        allocationBitmap.clear(idx);
+        pendingChannelWriteBitmap.clear(idx);
+        pendingChannelCreditsBitmap.clear(idx);
+        pendingChannelSynBitmap.clear(idx);
+        pendingEOSAckBitmap.clear(idx);
+        --openChannelCount;
+    }
+
+    ChannelControlBlock getCCB(int channelId) {
+        return ccbArray[channelId];
+    }
+
+    BitSet getPendingChannelWriteBitmap() {
+        return pendingChannelWriteBitmap;
+    }
+
+    BitSet getPendingChannelCreditsBitmap() {
+        return pendingChannelCreditsBitmap;
+    }
+
+    BitSet getPendingChannelSynBitmap() {
+        return pendingChannelSynBitmap;
+    }
+
+    BitSet getPendingEOSAckBitmap() {
+        return pendingEOSAckBitmap;
+    }
+
+    int getOpenChannelCount() {
+        return openChannelCount;
+    }
+
+    void initiateChannelSyn(int channelId) {
+        synchronized (mConn) {
+            assert !pendingChannelSynBitmap.get(channelId);
+            pendingChannelSynBitmap.set(channelId);
+            pendingWriteEventsCounter.increment();
+        }
+    }
+
+    void addPendingCredits(int channelId, int delta) {
+        if (delta <= 0) {
+            return;
+        }
+        synchronized (mConn) {
+            ChannelControlBlock ccb = ccbArray[channelId];
+            if (ccb != null) {
+                if (ccb.getRemoteEOS()) {
+                    return;
+                }
+                int oldCredits = ccb.getReadCredits();
+                ccb.setReadCredits(oldCredits + delta);
+                if (oldCredits == 0) {
+                    assert !pendingChannelCreditsBitmap.get(channelId);
+                    pendingChannelCreditsBitmap.set(channelId);
+                    pendingWriteEventsCounter.increment();
+                }
+            }
+        }
+    }
+
+    void unmarkPendingCredits(int channelId) {
+        synchronized (mConn) {
+            if (pendingChannelCreditsBitmap.get(channelId)) {
+                pendingChannelCreditsBitmap.clear(channelId);
+                pendingWriteEventsCounter.decrement();
+            }
+        }
+    }
+
+    void markPendingWrite(int channelId) {
+        synchronized (mConn) {
+            assert !pendingChannelWriteBitmap.get(channelId);
+            pendingChannelWriteBitmap.set(channelId);
+            pendingWriteEventsCounter.increment();
+        }
+    }
+
+    void unmarkPendingWrite(int channelId) {
+        synchronized (mConn) {
+            assert pendingChannelWriteBitmap.get(channelId);
+            pendingChannelWriteBitmap.clear(channelId);
+            pendingWriteEventsCounter.decrement();
+        }
+    }
+
+    void markEOSAck(int channelId) {
+        synchronized (mConn) {
+            if (!pendingEOSAckBitmap.get(channelId)) {
+                pendingEOSAckBitmap.set(channelId);
+                pendingWriteEventsCounter.increment();
+            }
+        }
+    }
+
+    void notifyIOError() {
+        synchronized (mConn) {
+            for (int i = 0; i < ccbArray.length; ++i) {
+                ChannelControlBlock ccb = ccbArray[i];
+                if (ccb != null && !ccb.getRemoteEOS()) {
+                    ccb.reportRemoteError(-1);
+                    markEOSAck(i);
+                    unmarkPendingCredits(i);
+                }
+            }
+        }
+    }
+
+    private ChannelControlBlock createChannel(int idx) throws NetException {
+        if (idx > MuxDemuxCommand.MAX_CHANNEL_ID) {
+            throw new NetException("Channel Id > " + MuxDemuxCommand.MAX_CHANNEL_ID + " being opened");
+        }
+        if (idx >= ccbArray.length) {
+            expand(idx);
+        }
+        if (ccbArray[idx] != null) {
+            assert ccbArray[idx].completelyClosed() : ccbArray[idx].toString();
+            if (ccbArray[idx].completelyClosed()) {
+                if (LOGGER.isLoggable(Level.FINE)) {
+                    LOGGER.fine("Cleaning free channel: " + ccbArray[idx]);
+                }
+                freeChannel(ccbArray[idx]);
+            }
+        }
+        assert idx < ccbArray.length;
+        assert !allocationBitmap.get(idx);
+        ChannelControlBlock channel = new ChannelControlBlock(this, idx);
+        ccbArray[idx] = channel;
+        allocationBitmap.set(idx);
+        ++openChannelCount;
+        return channel;
+    }
+
+    private void expand(int idx) {
+        while (idx >= ccbArray.length) {
+            ccbArray = Arrays.copyOf(ccbArray, ccbArray.length * 2);
+        }
+    }
+}
diff --git a/hyracks-net/src/main/java/edu/uci/ics/hyracks/net/protocols/muxdemux/IChannelOpenListener.java b/hyracks/hyracks-net/src/main/java/edu/uci/ics/hyracks/net/protocols/muxdemux/IChannelOpenListener.java
similarity index 100%
rename from hyracks-net/src/main/java/edu/uci/ics/hyracks/net/protocols/muxdemux/IChannelOpenListener.java
rename to hyracks/hyracks-net/src/main/java/edu/uci/ics/hyracks/net/protocols/muxdemux/IChannelOpenListener.java
diff --git a/hyracks-net/src/main/java/edu/uci/ics/hyracks/net/protocols/muxdemux/IChannelReadInterface.java b/hyracks/hyracks-net/src/main/java/edu/uci/ics/hyracks/net/protocols/muxdemux/IChannelReadInterface.java
similarity index 100%
rename from hyracks-net/src/main/java/edu/uci/ics/hyracks/net/protocols/muxdemux/IChannelReadInterface.java
rename to hyracks/hyracks-net/src/main/java/edu/uci/ics/hyracks/net/protocols/muxdemux/IChannelReadInterface.java
diff --git a/hyracks-net/src/main/java/edu/uci/ics/hyracks/net/protocols/muxdemux/IChannelWriteInterface.java b/hyracks/hyracks-net/src/main/java/edu/uci/ics/hyracks/net/protocols/muxdemux/IChannelWriteInterface.java
similarity index 100%
rename from hyracks-net/src/main/java/edu/uci/ics/hyracks/net/protocols/muxdemux/IChannelWriteInterface.java
rename to hyracks/hyracks-net/src/main/java/edu/uci/ics/hyracks/net/protocols/muxdemux/IChannelWriteInterface.java
diff --git a/hyracks-net/src/main/java/edu/uci/ics/hyracks/net/protocols/muxdemux/IEventCounter.java b/hyracks/hyracks-net/src/main/java/edu/uci/ics/hyracks/net/protocols/muxdemux/IEventCounter.java
similarity index 100%
rename from hyracks-net/src/main/java/edu/uci/ics/hyracks/net/protocols/muxdemux/IEventCounter.java
rename to hyracks/hyracks-net/src/main/java/edu/uci/ics/hyracks/net/protocols/muxdemux/IEventCounter.java
diff --git a/hyracks-net/src/main/java/edu/uci/ics/hyracks/net/protocols/muxdemux/MultiplexedConnection.java b/hyracks/hyracks-net/src/main/java/edu/uci/ics/hyracks/net/protocols/muxdemux/MultiplexedConnection.java
similarity index 100%
rename from hyracks-net/src/main/java/edu/uci/ics/hyracks/net/protocols/muxdemux/MultiplexedConnection.java
rename to hyracks/hyracks-net/src/main/java/edu/uci/ics/hyracks/net/protocols/muxdemux/MultiplexedConnection.java
diff --git a/hyracks-net/src/main/java/edu/uci/ics/hyracks/net/protocols/muxdemux/MuxDemux.java b/hyracks/hyracks-net/src/main/java/edu/uci/ics/hyracks/net/protocols/muxdemux/MuxDemux.java
similarity index 100%
rename from hyracks-net/src/main/java/edu/uci/ics/hyracks/net/protocols/muxdemux/MuxDemux.java
rename to hyracks/hyracks-net/src/main/java/edu/uci/ics/hyracks/net/protocols/muxdemux/MuxDemux.java
diff --git a/hyracks-net/src/main/java/edu/uci/ics/hyracks/net/protocols/muxdemux/MuxDemuxCommand.java b/hyracks/hyracks-net/src/main/java/edu/uci/ics/hyracks/net/protocols/muxdemux/MuxDemuxCommand.java
similarity index 100%
rename from hyracks-net/src/main/java/edu/uci/ics/hyracks/net/protocols/muxdemux/MuxDemuxCommand.java
rename to hyracks/hyracks-net/src/main/java/edu/uci/ics/hyracks/net/protocols/muxdemux/MuxDemuxCommand.java
diff --git a/hyracks-net/src/main/java/edu/uci/ics/hyracks/net/protocols/muxdemux/MuxDemuxPerformanceCounters.java b/hyracks/hyracks-net/src/main/java/edu/uci/ics/hyracks/net/protocols/muxdemux/MuxDemuxPerformanceCounters.java
similarity index 100%
rename from hyracks-net/src/main/java/edu/uci/ics/hyracks/net/protocols/muxdemux/MuxDemuxPerformanceCounters.java
rename to hyracks/hyracks-net/src/main/java/edu/uci/ics/hyracks/net/protocols/muxdemux/MuxDemuxPerformanceCounters.java
diff --git a/hyracks-net/src/main/java/edu/uci/ics/hyracks/net/protocols/tcp/ITCPConnectionEventListener.java b/hyracks/hyracks-net/src/main/java/edu/uci/ics/hyracks/net/protocols/tcp/ITCPConnectionEventListener.java
similarity index 100%
rename from hyracks-net/src/main/java/edu/uci/ics/hyracks/net/protocols/tcp/ITCPConnectionEventListener.java
rename to hyracks/hyracks-net/src/main/java/edu/uci/ics/hyracks/net/protocols/tcp/ITCPConnectionEventListener.java
diff --git a/hyracks-net/src/main/java/edu/uci/ics/hyracks/net/protocols/tcp/ITCPConnectionListener.java b/hyracks/hyracks-net/src/main/java/edu/uci/ics/hyracks/net/protocols/tcp/ITCPConnectionListener.java
similarity index 100%
rename from hyracks-net/src/main/java/edu/uci/ics/hyracks/net/protocols/tcp/ITCPConnectionListener.java
rename to hyracks/hyracks-net/src/main/java/edu/uci/ics/hyracks/net/protocols/tcp/ITCPConnectionListener.java
diff --git a/hyracks-net/src/main/java/edu/uci/ics/hyracks/net/protocols/tcp/TCPConnection.java b/hyracks/hyracks-net/src/main/java/edu/uci/ics/hyracks/net/protocols/tcp/TCPConnection.java
similarity index 100%
rename from hyracks-net/src/main/java/edu/uci/ics/hyracks/net/protocols/tcp/TCPConnection.java
rename to hyracks/hyracks-net/src/main/java/edu/uci/ics/hyracks/net/protocols/tcp/TCPConnection.java
diff --git a/hyracks-net/src/main/java/edu/uci/ics/hyracks/net/protocols/tcp/TCPEndpoint.java b/hyracks/hyracks-net/src/main/java/edu/uci/ics/hyracks/net/protocols/tcp/TCPEndpoint.java
similarity index 100%
rename from hyracks-net/src/main/java/edu/uci/ics/hyracks/net/protocols/tcp/TCPEndpoint.java
rename to hyracks/hyracks-net/src/main/java/edu/uci/ics/hyracks/net/protocols/tcp/TCPEndpoint.java
diff --git a/hyracks-net/src/test/java/edu/uci/ics/hyracks/net/tests/NetTest.java b/hyracks/hyracks-net/src/test/java/edu/uci/ics/hyracks/net/tests/NetTest.java
similarity index 100%
rename from hyracks-net/src/test/java/edu/uci/ics/hyracks/net/tests/NetTest.java
rename to hyracks/hyracks-net/src/test/java/edu/uci/ics/hyracks/net/tests/NetTest.java
diff --git a/hyracks-server/docs/README b/hyracks/hyracks-server/docs/README
similarity index 100%
rename from hyracks-server/docs/README
rename to hyracks/hyracks-server/docs/README
diff --git a/hyracks/hyracks-server/pom.xml b/hyracks/hyracks-server/pom.xml
new file mode 100644
index 0000000..f514820
--- /dev/null
+++ b/hyracks/hyracks-server/pom.xml
@@ -0,0 +1,88 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <artifactId>hyracks-server</artifactId>
+  <name>hyracks-server</name>
+  <parent>
+    <groupId>edu.uci.ics.hyracks</groupId>
+    <artifactId>hyracks</artifactId>
+    <version>0.2.3-SNAPSHOT</version>
+  </parent>
+
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-compiler-plugin</artifactId>
+        <version>2.0.2</version>
+        <configuration>
+          <source>1.7</source>
+          <target>1.7</target>
+        </configuration>
+      </plugin>
+      <plugin>
+        <groupId>org.codehaus.mojo</groupId>
+        <artifactId>appassembler-maven-plugin</artifactId>
+        <version>1.3</version>
+        <executions>
+          <execution>
+            <configuration>
+              <programs>
+                <program>
+                  <mainClass>edu.uci.ics.hyracks.control.cc.CCDriver</mainClass>
+                  <name>hyrackscc</name>
+                </program>
+                <program>
+                  <mainClass>edu.uci.ics.hyracks.control.nc.NCDriver</mainClass>
+                  <name>hyracksnc</name>
+                </program>
+                <program>
+                  <mainClass>edu.uci.ics.hyracks.server.drivers.VirtualClusterDriver</mainClass>
+                  <name>hyracks-virtual-cluster</name>
+                </program>
+              </programs>
+              <repositoryLayout>flat</repositoryLayout>
+              <repositoryName>lib</repositoryName>
+            </configuration>
+            <phase>package</phase>
+            <goals>
+              <goal>assemble</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <artifactId>maven-assembly-plugin</artifactId>
+        <version>2.2-beta-5</version>
+        <executions>
+          <execution>
+            <configuration>
+              <descriptors>
+                <descriptor>src/main/assembly/binary-assembly.xml</descriptor>
+              </descriptors>
+            </configuration>
+            <phase>package</phase>
+            <goals>
+              <goal>attached</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+    </plugins>
+  </build>
+  <dependencies>
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>hyracks-control-cc</artifactId>
+  		<version>0.2.3-SNAPSHOT</version>
+  		<type>jar</type>
+  		<scope>compile</scope>
+  	</dependency>
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>hyracks-control-nc</artifactId>
+  		<version>0.2.3-SNAPSHOT</version>
+  		<type>jar</type>
+  		<scope>compile</scope>
+  	</dependency>
+  </dependencies>
+</project>
diff --git a/hyracks-server/src/main/assembly/binary-assembly.xml b/hyracks/hyracks-server/src/main/assembly/binary-assembly.xml
similarity index 100%
rename from hyracks-server/src/main/assembly/binary-assembly.xml
rename to hyracks/hyracks-server/src/main/assembly/binary-assembly.xml
diff --git a/hyracks-server/src/main/java/edu/uci/ics/hyracks/server/drivers/VirtualClusterDriver.java b/hyracks/hyracks-server/src/main/java/edu/uci/ics/hyracks/server/drivers/VirtualClusterDriver.java
similarity index 100%
rename from hyracks-server/src/main/java/edu/uci/ics/hyracks/server/drivers/VirtualClusterDriver.java
rename to hyracks/hyracks-server/src/main/java/edu/uci/ics/hyracks/server/drivers/VirtualClusterDriver.java
diff --git a/hyracks-server/src/main/java/edu/uci/ics/hyracks/server/process/HyracksCCProcess.java b/hyracks/hyracks-server/src/main/java/edu/uci/ics/hyracks/server/process/HyracksCCProcess.java
similarity index 100%
rename from hyracks-server/src/main/java/edu/uci/ics/hyracks/server/process/HyracksCCProcess.java
rename to hyracks/hyracks-server/src/main/java/edu/uci/ics/hyracks/server/process/HyracksCCProcess.java
diff --git a/hyracks-server/src/main/java/edu/uci/ics/hyracks/server/process/HyracksNCProcess.java b/hyracks/hyracks-server/src/main/java/edu/uci/ics/hyracks/server/process/HyracksNCProcess.java
similarity index 100%
rename from hyracks-server/src/main/java/edu/uci/ics/hyracks/server/process/HyracksNCProcess.java
rename to hyracks/hyracks-server/src/main/java/edu/uci/ics/hyracks/server/process/HyracksNCProcess.java
diff --git a/hyracks-server/src/main/java/edu/uci/ics/hyracks/server/process/HyracksServerProcess.java b/hyracks/hyracks-server/src/main/java/edu/uci/ics/hyracks/server/process/HyracksServerProcess.java
similarity index 100%
rename from hyracks-server/src/main/java/edu/uci/ics/hyracks/server/process/HyracksServerProcess.java
rename to hyracks/hyracks-server/src/main/java/edu/uci/ics/hyracks/server/process/HyracksServerProcess.java
diff --git a/hyracks/hyracks-storage-am-btree/pom.xml b/hyracks/hyracks-storage-am-btree/pom.xml
new file mode 100644
index 0000000..ccb3b41
--- /dev/null
+++ b/hyracks/hyracks-storage-am-btree/pom.xml
@@ -0,0 +1,64 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>edu.uci.ics.hyracks</groupId>
+  <artifactId>hyracks-storage-am-btree</artifactId>
+  <version>0.2.3-SNAPSHOT</version>
+  <name>hyracks-storage-am-btree</name>
+
+  <parent>
+    <groupId>edu.uci.ics.hyracks</groupId>
+    <artifactId>hyracks</artifactId>
+    <version>0.2.3-SNAPSHOT</version>
+  </parent>
+
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-compiler-plugin</artifactId>
+        <version>2.0.2</version>
+        <configuration>
+          <source>1.7</source>
+          <target>1.7</target>
+        </configuration>
+      </plugin>
+    </plugins>
+  </build>
+  <dependencies>
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>hyracks-storage-common</artifactId>
+  		<version>0.2.3-SNAPSHOT</version>
+  		<type>jar</type>
+  		<scope>compile</scope>
+  	</dependency>  	
+        <dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>hyracks-storage-am-common</artifactId>
+  		<version>0.2.3-SNAPSHOT</version>
+  		<type>jar</type>
+  		<scope>compile</scope>
+  	</dependency>  	
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>hyracks-dataflow-common</artifactId>
+  		<version>0.2.3-SNAPSHOT</version>
+  		<type>jar</type>
+  		<scope>compile</scope>
+  	</dependency>  	
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>hyracks-dataflow-std</artifactId>
+  		<version>0.2.3-SNAPSHOT</version>
+  		<type>jar</type>
+  		<scope>compile</scope>
+  	</dependency>  	
+  	<dependency>
+  		<groupId>junit</groupId>
+  		<artifactId>junit</artifactId>
+  		<version>4.8.1</version>
+  		<type>jar</type>
+  		<scope>test</scope>
+  	</dependency>  	  		
+  </dependencies>
+</project>
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeFrame.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeFrame.java
new file mode 100644
index 0000000..7a61d09
--- /dev/null
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeFrame.java
@@ -0,0 +1,31 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree.api;
+
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
+import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+
+public interface IBTreeFrame extends ITreeIndexFrame {
+	public int findUpdateTupleIndex(ITupleReference tuple) throws TreeIndexException;
+	public int findInsertTupleIndex(ITupleReference tuple) throws TreeIndexException;	
+	public int findDeleteTupleIndex(ITupleReference tuple) throws TreeIndexException;
+	public void insertSorted(ITupleReference tuple);
+    public boolean getSmFlag();
+    public void setSmFlag(boolean smFlag);
+    public void setMultiComparator(MultiComparator cmp);
+}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeInteriorFrame.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeInteriorFrame.java
new file mode 100644
index 0000000..23fdcf5
--- /dev/null
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeInteriorFrame.java
@@ -0,0 +1,30 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree.api;
+
+import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
+
+public interface IBTreeInteriorFrame extends IBTreeFrame {
+    public int getChildPageId(RangePredicate pred);
+
+    public int getLeftmostChildPageId();
+
+    public int getRightmostChildPageId();
+
+    public void setRightmostChildPageId(int pageId);
+
+    public void deleteGreatest();
+}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeLeafFrame.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeLeafFrame.java
new file mode 100644
index 0000000..74bf2b0
--- /dev/null
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeLeafFrame.java
@@ -0,0 +1,36 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree.api;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
+import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.FindTupleMode;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.FindTupleNoExactMatchPolicy;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+
+public interface IBTreeLeafFrame extends IBTreeFrame {
+    public void setNextLeaf(int nextPage);
+
+    public int getNextLeaf();
+
+    public int findTupleIndex(ITupleReference searchKey, ITreeIndexTupleReference pageTuple, MultiComparator cmp,
+            FindTupleMode ftm, FindTupleNoExactMatchPolicy ftp) throws HyracksDataException;
+    
+    public int findUpsertTupleIndex(ITupleReference tuple) throws TreeIndexException;
+    public ITupleReference getUpsertBeforeTuple(ITupleReference tuple, int targetTupleIndex) throws TreeIndexException;
+}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IPrefixSlotManager.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IPrefixSlotManager.java
new file mode 100644
index 0000000..0636968
--- /dev/null
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IPrefixSlotManager.java
@@ -0,0 +1,83 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree.api;
+
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.storage.am.common.api.ISlotManager;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.FindTupleMode;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.FindTupleNoExactMatchPolicy;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+
+// a slot consists of two fields:
+// first field is 1 byte, it indicates the slot number of a prefix tuple
+// we call the first field prefixSlotOff
+// second field is 3 bytes, it points to the start offset of a tuple
+// we call the second field tupleOff
+
+// we distinguish between two slot types:
+// prefix slots that point to prefix tuples, 
+// a frame is assumed to have a field numPrefixTuples
+// tuple slots that point to data tuples
+// a frame is assumed to have a field numTuples
+// a tuple slot contains a tuple pointer and a pointer to a prefix slot (prefix slot number) 
+
+// INSERT procedure
+// a tuple insertion may use an existing prefix tuple 
+// a tuple insertion may never create a new prefix tuple
+// modifying the prefix slots would be extremely expensive because: 
+// potentially all tuples slots would have to change their prefix slot pointers
+// all prefixes are recomputed during a reorg or compaction
+
+public interface IPrefixSlotManager extends ISlotManager {
+    // TODO: Clean up interface after extending ISlotManager.
+	
+    public int decodeFirstSlotField(int slot);
+
+    public int decodeSecondSlotField(int slot);
+
+    public int encodeSlotFields(int firstField, int secondField);
+
+    public int findSlot(ITupleReference searchKey, ITreeIndexTupleReference frameTuple,
+            ITreeIndexTupleReference framePrefixTuple, MultiComparator multiCmp, FindTupleMode mode,
+            FindTupleNoExactMatchPolicy matchPolicy);
+
+    public int insertSlot(int slot, int tupleOff);
+
+    // returns prefix slot number, returns TUPLE_UNCOMPRESSED if none found
+    public int findPrefix(ITupleReference tuple, ITreeIndexTupleReference framePrefixTuple);
+
+    public int getTupleSlotStartOff();
+
+    public int getTupleSlotEndOff();
+
+    public int getPrefixSlotStartOff();
+
+    public int getPrefixSlotEndOff();
+
+    public int getTupleSlotOff(int tupleIndex);
+
+    public int getPrefixSlotOff(int tupleIndex);
+
+    public int getSlotSize();
+
+    public void setSlot(int offset, int value);
+
+    // functions for testing
+    public void setPrefixSlot(int tupleIndex, int slot);
+    
+    public void setMultiComparator(MultiComparator cmp);
+}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/compressors/FieldPrefixCompressor.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/compressors/FieldPrefixCompressor.java
new file mode 100644
index 0000000..f78b6e4
--- /dev/null
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/compressors/FieldPrefixCompressor.java
@@ -0,0 +1,518 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree.compressors;
+
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Comparator;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.storage.am.btree.api.IPrefixSlotManager;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeFieldPrefixNSMLeafFrame;
+import edu.uci.ics.hyracks.storage.am.btree.impls.FieldPrefixSlotManager;
+import edu.uci.ics.hyracks.storage.am.btree.impls.FieldPrefixTupleReference;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameCompressor;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriter;
+
+public class FieldPrefixCompressor implements ITreeIndexFrameCompressor {
+
+    // minimum ratio of uncompressed tuples to total tuple to consider
+    // re-compression
+    private float ratioThreshold;
+
+    // minimum number of tuple matching field prefixes to consider compressing
+    // them
+    private int occurrenceThreshold;
+
+    private ITypeTraits[] typeTraits;
+
+    public FieldPrefixCompressor(ITypeTraits[] typeTraits, float ratioThreshold, int occurrenceThreshold) {
+        this.typeTraits = typeTraits;
+        this.ratioThreshold = ratioThreshold;
+        this.occurrenceThreshold = occurrenceThreshold;
+    }
+
+    @Override
+    public boolean compress(ITreeIndexFrame indexFrame, MultiComparator cmp) throws Exception {
+        BTreeFieldPrefixNSMLeafFrame frame = (BTreeFieldPrefixNSMLeafFrame)indexFrame;
+    	int tupleCount = frame.getTupleCount();
+        if (tupleCount <= 0) {
+            frame.setPrefixTupleCount(0);
+            frame.setFreeSpaceOff(frame.getOrigFreeSpaceOff());
+            frame.setTotalFreeSpace(frame.getOrigTotalFreeSpace());
+            return false;
+        }
+
+        if (cmp.getKeyFieldCount() == 1) {
+            return false;
+        }
+        
+        int uncompressedTupleCount = frame.getUncompressedTupleCount();
+        float ratio = (float) uncompressedTupleCount / (float) tupleCount;
+        if (ratio < ratioThreshold)
+            return false;
+
+        IBinaryComparator[] cmps = cmp.getComparators();
+        int fieldCount = typeTraits.length;
+
+        ByteBuffer buf = frame.getBuffer();
+        byte[] pageArray = buf.array();
+        IPrefixSlotManager slotManager = frame.slotManager;
+
+        // perform analysis pass
+        ArrayList<KeyPartition> keyPartitions = getKeyPartitions(frame, cmp, occurrenceThreshold);
+        if (keyPartitions.size() == 0)
+            return false;
+
+        // for each keyPartition, determine the best prefix length for
+        // compression, and count how many prefix tuple we would need in total
+        int totalSlotsNeeded = 0;
+        int totalPrefixBytes = 0;
+        for (KeyPartition kp : keyPartitions) {
+
+            for (int j = 0; j < kp.pmi.length; j++) {
+                int benefitMinusCost = kp.pmi[j].spaceBenefit - kp.pmi[j].spaceCost;
+                if (benefitMinusCost > kp.maxBenefitMinusCost) {
+                    kp.maxBenefitMinusCost = benefitMinusCost;
+                    kp.maxPmiIndex = j;
+                }
+            }
+
+            // ignore keyPartitions with no benefit and don't count bytes and
+            // slots needed
+            if (kp.maxBenefitMinusCost <= 0)
+                continue;
+
+            totalPrefixBytes += kp.pmi[kp.maxPmiIndex].prefixBytes;
+            totalSlotsNeeded += kp.pmi[kp.maxPmiIndex].prefixSlotsNeeded;
+        }
+
+        // System.out.println("TOTAL SLOTS NEEDED: " + totalSlotsNeeded);
+
+        // we use a greedy heuristic to solve this "knapsack"-like problem
+        // (every keyPartition has a space savings and a number of slots
+        // required, but the number of slots are constrained by
+        // MAX_PREFIX_SLOTS)
+        // we sort the keyPartitions by maxBenefitMinusCost / prefixSlotsNeeded
+        // and later choose the top MAX_PREFIX_SLOTS
+        int[] newPrefixSlots;
+        if (totalSlotsNeeded > FieldPrefixSlotManager.MAX_PREFIX_SLOTS) {
+            // order keyPartitions by the heuristic function
+            SortByHeuristic heuristicComparator = new SortByHeuristic();
+            Collections.sort(keyPartitions, heuristicComparator);
+            int slotsUsed = 0;
+            int numberKeyPartitions = -1;
+            for (int i = 0; i < keyPartitions.size(); i++) {
+                KeyPartition kp = keyPartitions.get(i);
+                slotsUsed += kp.pmi[kp.maxPmiIndex].prefixSlotsNeeded;
+                if (slotsUsed > FieldPrefixSlotManager.MAX_PREFIX_SLOTS) {
+                    numberKeyPartitions = i + 1;
+                    slotsUsed -= kp.pmi[kp.maxPmiIndex].prefixSlotsNeeded;
+                    break;
+                }
+            }
+            newPrefixSlots = new int[slotsUsed];
+
+            // remove irrelevant keyPartitions and adjust total prefix bytes
+            while (keyPartitions.size() >= numberKeyPartitions) {
+                int lastIndex = keyPartitions.size() - 1;
+                KeyPartition kp = keyPartitions.get(lastIndex);
+                if (kp.maxBenefitMinusCost > 0)
+                    totalPrefixBytes -= kp.pmi[kp.maxPmiIndex].prefixBytes;
+                keyPartitions.remove(lastIndex);
+            }
+
+            // re-order keyPartitions by prefix (corresponding to original
+            // order)
+            SortByOriginalRank originalRankComparator = new SortByOriginalRank();
+            Collections.sort(keyPartitions, originalRankComparator);
+        } else {
+            newPrefixSlots = new int[totalSlotsNeeded];
+        }
+
+        int[] newTupleSlots = new int[tupleCount];
+
+        // WARNING: our hope is that compression is infrequent
+        // here we allocate a big chunk of memory to temporary hold the new,
+        // re-compressed tuple
+        // in general it is very hard to avoid this step
+        int prefixFreeSpace = frame.getOrigFreeSpaceOff();
+        int tupleFreeSpace = prefixFreeSpace + totalPrefixBytes;
+        byte[] buffer = new byte[buf.capacity()];
+        ByteBuffer byteBuffer = ByteBuffer.wrap(buffer);
+
+        // perform compression, and reorg
+        // we assume that the keyPartitions are sorted by the prefixes (i.e., in
+        // the logical target order)
+        int kpIndex = 0;
+        int tupleIndex = 0;
+        int prefixTupleIndex = 0;
+        uncompressedTupleCount = 0;
+
+        TypeAwareTupleWriter tupleWriter = new TypeAwareTupleWriter(typeTraits);
+        FieldPrefixTupleReference tupleToWrite = new FieldPrefixTupleReference(tupleWriter.createTupleReference());
+        tupleToWrite.setFieldCount(fieldCount);
+
+        while (tupleIndex < tupleCount) {
+            if (kpIndex < keyPartitions.size()) {
+
+                // beginning of keyPartition found, compress entire keyPartition
+                if (tupleIndex == keyPartitions.get(kpIndex).firstTupleIndex) {
+
+                    // number of fields we decided to use for compression of
+                    // this keyPartition
+                    int fieldCountToCompress = keyPartitions.get(kpIndex).maxPmiIndex + 1;
+                    int segmentStart = keyPartitions.get(kpIndex).firstTupleIndex;
+                    int tuplesInSegment = 1;
+
+                    // System.out.println("PROCESSING KEYPARTITION: " + kpIndex
+                    // + " RANGE: " + keyPartitions.get(kpIndex).firstRecSlotNum
+                    // + " " + keyPartitions.get(kpIndex).lastRecSlotNum +
+                    // " FIELDSTOCOMPRESS: " + fieldCountToCompress);
+
+                    FieldPrefixTupleReference prevTuple = new FieldPrefixTupleReference(tupleWriter
+                            .createTupleReference());
+                    prevTuple.setFieldCount(fieldCount);
+
+                    FieldPrefixTupleReference tuple = new FieldPrefixTupleReference(tupleWriter.createTupleReference());
+                    tuple.setFieldCount(fieldCount);
+
+                    for (int i = tupleIndex + 1; i <= keyPartitions.get(kpIndex).lastTupleIndex; i++) {
+                        prevTuple.resetByTupleIndex(frame, i - 1);
+                        tuple.resetByTupleIndex(frame, i);
+
+                        // check if tuples match in fieldCountToCompress of their
+                        // first fields
+                        int prefixFieldsMatch = 0;
+                        for (int j = 0; j < fieldCountToCompress; j++) {
+                            if (cmps[j].compare(pageArray, prevTuple.getFieldStart(j), prevTuple.getFieldLength(j),
+                                    pageArray, tuple.getFieldStart(j), tuple.getFieldLength(j)) == 0)
+                                prefixFieldsMatch++;
+                            else
+                                break;
+                        }
+
+                        // the two tuples must match in exactly the number of
+                        // fields we decided to compress for this keyPartition
+                        int processSegments = 0;
+                        if (prefixFieldsMatch == fieldCountToCompress)
+                            tuplesInSegment++;
+                        else
+                            processSegments++;
+
+                        if (i == keyPartitions.get(kpIndex).lastTupleIndex)
+                            processSegments++;
+
+                        for (int r = 0; r < processSegments; r++) {
+                            // compress current segment and then start new
+                            // segment
+                            if (tuplesInSegment < occurrenceThreshold || fieldCountToCompress <= 0) {
+                                // segment does not have at least
+                                // occurrenceThreshold tuples, so write tuples
+                                // uncompressed
+                                for (int j = 0; j < tuplesInSegment; j++) {
+                                    int slotNum = segmentStart + j;
+                                    tupleToWrite.resetByTupleIndex(frame, slotNum);
+                                    newTupleSlots[tupleCount - 1 - slotNum] = slotManager.encodeSlotFields(
+                                            FieldPrefixSlotManager.TUPLE_UNCOMPRESSED, tupleFreeSpace);
+                                    tupleFreeSpace += tupleWriter.writeTuple(tupleToWrite, byteBuffer, tupleFreeSpace);
+                                }
+                                uncompressedTupleCount += tuplesInSegment;
+                            } else {
+                                // segment has enough tuples, compress segment
+                                // extract prefix, write prefix tuple to buffer,
+                                // and set prefix slot
+                                newPrefixSlots[newPrefixSlots.length - 1 - prefixTupleIndex] = slotManager
+                                        .encodeSlotFields(fieldCountToCompress, prefixFreeSpace);
+                                // int tmp = freeSpace;
+                                // prevRec.reset();
+                                // System.out.println("SOURCE CONTENTS: " +
+                                // buf.getInt(prevRec.getFieldOff()) + " " +
+                                // buf.getInt(prevRec.getFieldOff()+4));
+                                prefixFreeSpace += tupleWriter.writeTupleFields(prevTuple, 0, fieldCountToCompress,
+                                        byteBuffer.array(), prefixFreeSpace);
+                                // System.out.println("WRITING PREFIX RECORD " +
+                                // prefixSlotNum + " AT " + tmp + " " +
+                                // freeSpace);
+                                // System.out.print("CONTENTS: ");
+                                // for(int x = 0; x < fieldCountToCompress; x++)
+                                // System.out.print(buf.getInt(tmp + x*4) +
+                                // " ");
+                                // System.out.println();
+
+                                // truncate tuples, write them to buffer, and
+                                // set tuple slots
+                                for (int j = 0; j < tuplesInSegment; j++) {
+                                    int currTupleIndex = segmentStart + j;
+                                    tupleToWrite.resetByTupleIndex(frame, currTupleIndex);
+                                    newTupleSlots[tupleCount - 1 - currTupleIndex] = slotManager.encodeSlotFields(
+                                            prefixTupleIndex, tupleFreeSpace);
+                                    tupleFreeSpace += tupleWriter.writeTupleFields(tupleToWrite, fieldCountToCompress,
+                                            fieldCount - fieldCountToCompress, byteBuffer.array(), tupleFreeSpace);
+                                }
+
+                                prefixTupleIndex++;
+                            }
+
+                            // begin new segment
+                            segmentStart = i;
+                            tuplesInSegment = 1;
+                        }
+                    }
+
+                    tupleIndex = keyPartitions.get(kpIndex).lastTupleIndex;
+                    kpIndex++;
+                } else {
+                    // just write the tuple uncompressed
+                    tupleToWrite.resetByTupleIndex(frame, tupleIndex);
+                    newTupleSlots[tupleCount - 1 - tupleIndex] = slotManager.encodeSlotFields(
+                            FieldPrefixSlotManager.TUPLE_UNCOMPRESSED, tupleFreeSpace);
+                    tupleFreeSpace += tupleWriter.writeTuple(tupleToWrite, byteBuffer, tupleFreeSpace);
+                    uncompressedTupleCount++;
+                }
+            } else {
+                // just write the tuple uncompressed
+                tupleToWrite.resetByTupleIndex(frame, tupleIndex);
+                newTupleSlots[tupleCount - 1 - tupleIndex] = slotManager.encodeSlotFields(
+                        FieldPrefixSlotManager.TUPLE_UNCOMPRESSED, tupleFreeSpace);
+                tupleFreeSpace += tupleWriter.writeTuple(tupleToWrite, byteBuffer, tupleFreeSpace);
+                uncompressedTupleCount++;
+            }
+            tupleIndex++;
+        }
+
+        // sanity check to see if we have written exactly as many prefix bytes
+        // as computed before
+        if (prefixFreeSpace != frame.getOrigFreeSpaceOff() + totalPrefixBytes) {
+            throw new Exception("ERROR: Number of prefix bytes written don't match computed number");
+        }
+
+        // in some rare instances our procedure could even increase the space
+        // requirement which is very dangerous
+        // this can happen to to the greedy solution of the knapsack-like
+        // problem
+        // therefore, we check if the new space exceeds the page size to avoid
+        // the only danger of an increasing space
+        int totalSpace = tupleFreeSpace + newTupleSlots.length * slotManager.getSlotSize() + newPrefixSlots.length
+                * slotManager.getSlotSize();
+        if (totalSpace > buf.capacity())
+            return false; // just leave the page as is
+
+        // copy new tuple and new slots into original page
+        int freeSpaceAfterInit = frame.getOrigFreeSpaceOff();
+        System.arraycopy(buffer, freeSpaceAfterInit, pageArray, freeSpaceAfterInit, tupleFreeSpace
+                        - freeSpaceAfterInit);
+
+        // copy prefix slots
+        int slotOffRunner = buf.capacity() - slotManager.getSlotSize();
+        for (int i = 0; i < newPrefixSlots.length; i++) {
+            buf.putInt(slotOffRunner, newPrefixSlots[newPrefixSlots.length - 1 - i]);
+            slotOffRunner -= slotManager.getSlotSize();
+        }
+
+        // copy tuple slots
+        for (int i = 0; i < newTupleSlots.length; i++) {
+            buf.putInt(slotOffRunner, newTupleSlots[newTupleSlots.length - 1 - i]);
+            slotOffRunner -= slotManager.getSlotSize();
+        }
+
+        // int originalFreeSpaceOff = frame.getOrigFreeSpaceOff();
+        // System.out.println("ORIGINALFREESPACE: " + originalFreeSpaceOff);
+        // System.out.println("RECSPACE BEF: " + (frame.getFreeSpaceOff() -
+        // originalFreeSpaceOff));
+        // System.out.println("RECSPACE AFT: " + (recordFreeSpace -
+        // originalFreeSpaceOff));
+        // System.out.println("PREFIXSLOTS BEF: " +
+        // frame.getNumPrefixRecords());
+        // System.out.println("PREFIXSLOTS AFT: " + newPrefixSlots.length);
+        //        
+        // System.out.println("FREESPACE BEF: " + frame.getFreeSpaceOff());
+        // System.out.println("FREESPACE AFT: " + recordFreeSpace);
+        // System.out.println("PREFIXES: " + newPrefixSlots.length + " / " +
+        // FieldPrefixSlotManager.MAX_PREFIX_SLOTS);
+        // System.out.println("RECORDS: " + newRecordSlots.length);
+
+        // update space fields, TODO: we need to update more fields
+        frame.setFreeSpaceOff(tupleFreeSpace);
+        frame.setPrefixTupleCount(newPrefixSlots.length);
+        frame.setUncompressedTupleCount(uncompressedTupleCount);
+        int totalFreeSpace = buf.capacity() - tupleFreeSpace
+                - ((newTupleSlots.length + newPrefixSlots.length) * slotManager.getSlotSize());
+        frame.setTotalFreeSpace(totalFreeSpace);
+
+        return true;
+    }
+
+    // we perform an analysis pass over the tuples to determine the costs and
+    // benefits of different compression options
+    // a "keypartition" is a range of tuples that has an identical first field
+    // for each keypartition we chose a prefix length to use for compression
+    // i.e., all tuples in a keypartition will be compressed based on the same
+    // prefix length (number of fields)
+    // the prefix length may be different for different keypartitions
+    // the occurrenceThreshold determines the minimum number of tuples that must
+    // share a common prefix in order for us to consider compressing them
+    private ArrayList<KeyPartition> getKeyPartitions(BTreeFieldPrefixNSMLeafFrame frame, MultiComparator cmp,
+            int occurrenceThreshold) {
+        IBinaryComparator[] cmps = cmp.getComparators();
+        int fieldCount = typeTraits.length;
+
+        int maxCmps = cmps.length - 1;
+        ByteBuffer buf = frame.getBuffer();
+        byte[] pageArray = buf.array();
+        IPrefixSlotManager slotManager = frame.slotManager;
+
+        ArrayList<KeyPartition> keyPartitions = new ArrayList<KeyPartition>();
+        KeyPartition kp = new KeyPartition(maxCmps);
+        keyPartitions.add(kp);
+
+        TypeAwareTupleWriter tupleWriter = new TypeAwareTupleWriter(typeTraits);
+
+        FieldPrefixTupleReference prevTuple = new FieldPrefixTupleReference(tupleWriter.createTupleReference());
+        prevTuple.setFieldCount(fieldCount);
+
+        FieldPrefixTupleReference tuple = new FieldPrefixTupleReference(tupleWriter.createTupleReference());
+        tuple.setFieldCount(fieldCount);
+
+        kp.firstTupleIndex = 0;
+        int tupleCount = frame.getTupleCount();
+        for (int i = 1; i < tupleCount; i++) {
+            prevTuple.resetByTupleIndex(frame, i - 1);
+            tuple.resetByTupleIndex(frame, i);
+
+            // System.out.println("BEFORE RECORD: " + i + " " + rec.recSlotOff +
+            // " " + rec.recOff);
+            // kp.print();
+
+            int prefixFieldsMatch = 0;
+            for (int j = 0; j < maxCmps; j++) {
+
+                if (cmps[j].compare(pageArray, prevTuple.getFieldStart(j), prevTuple.getFieldLength(j), pageArray,
+                        tuple.getFieldStart(j), prevTuple.getFieldLength(j)) == 0) {
+                    prefixFieldsMatch++;
+                    kp.pmi[j].matches++;
+
+                    int prefixBytes = tupleWriter.bytesRequired(tuple, 0, prefixFieldsMatch);
+                    int spaceBenefit = tupleWriter.bytesRequired(tuple)
+                            - tupleWriter.bytesRequired(tuple, prefixFieldsMatch, tuple.getFieldCount()
+                                    - prefixFieldsMatch);
+
+                    if (kp.pmi[j].matches == occurrenceThreshold) {
+                        // if we compress this prefix, we pay the cost of
+                        // storing it once, plus the size for one prefix slot
+                        kp.pmi[j].prefixBytes += prefixBytes;
+                        kp.pmi[j].spaceCost += prefixBytes + slotManager.getSlotSize();
+                        kp.pmi[j].prefixSlotsNeeded++;
+                        kp.pmi[j].spaceBenefit += occurrenceThreshold * spaceBenefit;
+                    } else if (kp.pmi[j].matches > occurrenceThreshold) {
+                        // we are beyond the occurrence threshold, every
+                        // additional tuple with a matching prefix increases the
+                        // benefit
+                        kp.pmi[j].spaceBenefit += spaceBenefit;
+                    }
+                } else {
+                    kp.pmi[j].matches = 1;
+                    break;
+                }
+            }
+
+            // System.out.println();
+            // System.out.println("AFTER RECORD: " + i);
+            // kp.print();
+            // System.out.println("-----------------");
+
+            // this means not even the first field matched, so we start to
+            // consider a new "key partition"
+            if (maxCmps > 0 && prefixFieldsMatch == 0) {
+                // System.out.println("NEW KEY PARTITION");
+                kp.lastTupleIndex = i - 1;
+
+                // remove keyPartitions that don't have enough tuples
+                if ((kp.lastTupleIndex - kp.firstTupleIndex) + 1 < occurrenceThreshold)
+                    keyPartitions.remove(keyPartitions.size() - 1);
+
+                kp = new KeyPartition(maxCmps);
+                keyPartitions.add(kp);
+                kp.firstTupleIndex = i;
+            }
+        }
+        kp.lastTupleIndex = tupleCount - 1;
+        // remove keyPartitions that don't have enough tuples
+        if ((kp.lastTupleIndex - kp.firstTupleIndex) + 1 < occurrenceThreshold)
+            keyPartitions.remove(keyPartitions.size() - 1);
+
+        return keyPartitions;
+    }
+
+    private class PrefixMatchInfo {
+        public int matches = 1;
+        public int spaceCost = 0;
+        public int spaceBenefit = 0;
+        public int prefixSlotsNeeded = 0;
+        public int prefixBytes = 0;
+    }
+
+    private class KeyPartition {
+        public int firstTupleIndex;
+        public int lastTupleIndex;
+        public PrefixMatchInfo[] pmi;
+
+        public int maxBenefitMinusCost = 0;
+        public int maxPmiIndex = -1;
+
+        // number of fields used for compression for this kp of current page
+
+        public KeyPartition(int numKeyFields) {
+            pmi = new PrefixMatchInfo[numKeyFields];
+            for (int i = 0; i < numKeyFields; i++) {
+                pmi[i] = new PrefixMatchInfo();
+            }
+        }
+    }
+
+    private class SortByHeuristic implements Comparator<KeyPartition> {
+        @Override
+        public int compare(KeyPartition a, KeyPartition b) {
+            if (a.maxPmiIndex < 0) {
+                if (b.maxPmiIndex < 0)
+                    return 0;
+                return 1;
+            } else if (b.maxPmiIndex < 0)
+                return -1;
+
+            // non-negative maxPmiIndex, meaning a non-zero benefit exists
+            float thisHeuristicVal = (float) a.maxBenefitMinusCost / (float) a.pmi[a.maxPmiIndex].prefixSlotsNeeded;
+            float otherHeuristicVal = (float) b.maxBenefitMinusCost / (float) b.pmi[b.maxPmiIndex].prefixSlotsNeeded;
+            if (thisHeuristicVal < otherHeuristicVal)
+                return 1;
+            else if (thisHeuristicVal > otherHeuristicVal)
+                return -1;
+            else
+                return 0;
+        }
+    }
+
+    private class SortByOriginalRank implements Comparator<KeyPartition> {
+        @Override
+        public int compare(KeyPartition a, KeyPartition b) {
+            return a.firstTupleIndex - b.firstTupleIndex;
+        }
+    }
+}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeDataflowHelper.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeDataflowHelper.java
new file mode 100644
index 0000000..699915d
--- /dev/null
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeDataflowHelper.java
@@ -0,0 +1,42 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree.dataflow;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.storage.am.btree.exceptions.BTreeException;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
+import edu.uci.ics.hyracks.storage.am.btree.util.BTreeUtils;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexDataflowHelper;
+
+public class BTreeDataflowHelper extends TreeIndexDataflowHelper {
+    public BTreeDataflowHelper(IIndexOperatorDescriptor opDesc, IHyracksTaskContext ctx, int partition) {
+        super(opDesc, ctx, partition);
+    }
+
+    @Override
+    public ITreeIndex createIndexInstance() throws HyracksDataException {
+        try {
+            return BTreeUtils.createBTree(opDesc.getStorageManager().getBufferCache(ctx), opDesc
+                    .getOpCallbackProvider().getOperationCallback(), treeOpDesc.getTreeIndexTypeTraits(), treeOpDesc
+                    .getTreeIndexComparatorFactories(), BTreeLeafFrameType.REGULAR_NSM);
+        } catch (BTreeException e) {
+            throw new HyracksDataException(e);
+        }
+    }
+}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeDataflowHelperFactory.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeDataflowHelperFactory.java
similarity index 100%
rename from hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeDataflowHelperFactory.java
rename to hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeDataflowHelperFactory.java
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeSearchOperatorDescriptor.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeSearchOperatorDescriptor.java
new file mode 100644
index 0000000..f403312
--- /dev/null
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeSearchOperatorDescriptor.java
@@ -0,0 +1,64 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree.dataflow;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.job.IOperatorDescriptorRegistry;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.storage.am.common.api.IOperationCallbackProvider;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.AbstractTreeIndexOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndex;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexRegistryProvider;
+import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
+
+public class BTreeSearchOperatorDescriptor extends AbstractTreeIndexOperatorDescriptor {
+
+    private static final long serialVersionUID = 1L;
+
+    // fields in input tuple to be used as low keys
+    protected int[] lowKeyFields;
+    // fields in input tuple to be used as high keys
+    protected int[] highKeyFields;
+    protected boolean lowKeyInclusive;
+    protected boolean highKeyInclusive;
+
+    public BTreeSearchOperatorDescriptor(IOperatorDescriptorRegistry spec, RecordDescriptor recDesc,
+            IStorageManagerInterface storageManager, IIndexRegistryProvider<IIndex> indexRegistryProvider,
+            IFileSplitProvider fileSplitProvider, ITypeTraits[] typeTraits,
+            IBinaryComparatorFactory[] comparatorFactories, int[] lowKeyFields, int[] highKeyFields,
+            boolean lowKeyInclusive, boolean highKeyInclusive, IIndexDataflowHelperFactory dataflowHelperFactory,
+            boolean retainInput, IOperationCallbackProvider opCallbackProvider) {
+        super(spec, 1, 1, recDesc, storageManager, indexRegistryProvider, fileSplitProvider, typeTraits,
+                comparatorFactories, dataflowHelperFactory, null, retainInput, opCallbackProvider);
+        this.lowKeyFields = lowKeyFields;
+        this.highKeyFields = highKeyFields;
+        this.lowKeyInclusive = lowKeyInclusive;
+        this.highKeyInclusive = highKeyInclusive;
+    }
+
+    @Override
+    public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx,
+            IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) {
+        return new BTreeSearchOperatorNodePushable(this, ctx, partition, recordDescProvider, lowKeyFields,
+                highKeyFields, lowKeyInclusive, highKeyInclusive);
+    }
+}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeSearchOperatorNodePushable.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeSearchOperatorNodePushable.java
new file mode 100644
index 0000000..b284847
--- /dev/null
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeSearchOperatorNodePushable.java
@@ -0,0 +1,67 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.storage.am.btree.dataflow;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
+import edu.uci.ics.hyracks.storage.am.btree.util.BTreeUtils;
+import edu.uci.ics.hyracks.storage.am.common.api.ISearchPredicate;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.AbstractTreeIndexOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.PermutingFrameTupleReference;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexSearchOperatorNodePushable;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+
+public class BTreeSearchOperatorNodePushable extends TreeIndexSearchOperatorNodePushable {
+    protected PermutingFrameTupleReference lowKey;
+    protected PermutingFrameTupleReference highKey;
+    protected boolean lowKeyInclusive;
+    protected boolean highKeyInclusive;
+    protected MultiComparator lowKeySearchCmp;
+    protected MultiComparator highKeySearchCmp;
+
+    public BTreeSearchOperatorNodePushable(AbstractTreeIndexOperatorDescriptor opDesc, IHyracksTaskContext ctx,
+            int partition, IRecordDescriptorProvider recordDescProvider, int[] lowKeyFields, int[] highKeyFields,
+            boolean lowKeyInclusive, boolean highKeyInclusive) {
+        super(opDesc, ctx, partition, recordDescProvider);
+        this.lowKeyInclusive = lowKeyInclusive;
+        this.highKeyInclusive = highKeyInclusive;
+        if (lowKeyFields != null && lowKeyFields.length > 0) {
+            lowKey = new PermutingFrameTupleReference();
+            lowKey.setFieldPermutation(lowKeyFields);
+        }
+        if (highKeyFields != null && highKeyFields.length > 0) {
+            highKey = new PermutingFrameTupleReference();
+            highKey.setFieldPermutation(highKeyFields);
+        }
+    }
+
+    @Override
+    protected void resetSearchPredicate(int tupleIndex) {
+        if (lowKey != null) {
+            lowKey.reset(accessor, tupleIndex);
+        }
+        if (highKey != null) {
+            highKey.reset(accessor, tupleIndex);
+        }
+    }
+
+    @Override
+    protected ISearchPredicate createSearchPredicate() {
+        lowKeySearchCmp = BTreeUtils.getSearchMultiComparator(treeIndex.getComparatorFactories(), lowKey);
+        highKeySearchCmp = BTreeUtils.getSearchMultiComparator(treeIndex.getComparatorFactories(), highKey);
+        return new RangePredicate(lowKey, highKey, lowKeyInclusive, highKeyInclusive, lowKeySearchCmp, highKeySearchCmp);
+    }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeUpdateSearchOperatorDescriptor.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeUpdateSearchOperatorDescriptor.java
new file mode 100644
index 0000000..1fc5cb8
--- /dev/null
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeUpdateSearchOperatorDescriptor.java
@@ -0,0 +1,57 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree.dataflow;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.job.IOperatorDescriptorRegistry;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.storage.am.common.api.IOperationCallbackProvider;
+import edu.uci.ics.hyracks.storage.am.common.api.ITupleUpdaterFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndex;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexRegistryProvider;
+import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
+
+public class BTreeUpdateSearchOperatorDescriptor extends BTreeSearchOperatorDescriptor {
+
+    private static final long serialVersionUID = 1L;
+
+    private final ITupleUpdaterFactory tupleUpdaterFactory;
+
+    public BTreeUpdateSearchOperatorDescriptor(IOperatorDescriptorRegistry spec, RecordDescriptor recDesc,
+            IStorageManagerInterface storageManager, IIndexRegistryProvider<IIndex> indexRegistryProvider,
+            IFileSplitProvider fileSplitProvider, ITypeTraits[] typeTraits,
+            IBinaryComparatorFactory[] comparatorFactories, int[] lowKeyFields, int[] highKeyFields,
+            boolean lowKeyInclusive, boolean highKeyInclusive, IIndexDataflowHelperFactory dataflowHelperFactory,
+            boolean retainInput, IOperationCallbackProvider opCallbackProvider, ITupleUpdaterFactory tupleUpdaterFactory) {
+        super(spec, recDesc, storageManager, indexRegistryProvider, fileSplitProvider, typeTraits, comparatorFactories,
+                lowKeyFields, highKeyFields, lowKeyInclusive, highKeyInclusive, dataflowHelperFactory,
+                retainInput, opCallbackProvider);
+        this.tupleUpdaterFactory = tupleUpdaterFactory;
+    }
+
+    @Override
+    public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx,
+            IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) {
+        return new BTreeUpdateSearchOperatorNodePushable(this, ctx, partition, recordDescProvider, lowKeyFields,
+                highKeyFields, lowKeyInclusive, highKeyInclusive, tupleUpdaterFactory.createTupleUpdater());
+    }
+}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeUpdateSearchOperatorNodePushable.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeUpdateSearchOperatorNodePushable.java
new file mode 100644
index 0000000..1141639
--- /dev/null
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeUpdateSearchOperatorNodePushable.java
@@ -0,0 +1,71 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree.dataflow;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeRangeSearchCursor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITupleUpdater;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.AbstractTreeIndexOperatorDescriptor;
+
+public class BTreeUpdateSearchOperatorNodePushable extends BTreeSearchOperatorNodePushable {
+    private final ITupleUpdater tupleUpdater;
+
+    public BTreeUpdateSearchOperatorNodePushable(AbstractTreeIndexOperatorDescriptor opDesc, IHyracksTaskContext ctx,
+            int partition, IRecordDescriptorProvider recordDescProvider, int[] lowKeyFields, int[] highKeyFields,
+            boolean lowKeyInclusive, boolean highKeyInclusive, ITupleUpdater tupleUpdater) {
+        super(opDesc, ctx, partition, recordDescProvider, lowKeyFields, highKeyFields, lowKeyInclusive,
+                highKeyInclusive);
+        this.tupleUpdater = tupleUpdater;
+    }
+
+    @Override
+    protected ITreeIndexCursor createCursor() {
+        return new BTreeRangeSearchCursor((IBTreeLeafFrame) cursorFrame, true);
+    }
+
+    @Override
+    protected void writeSearchResults(int tupleIndex) throws Exception {
+        while (cursor.hasNext()) {
+            tb.reset();
+            cursor.next();
+            if (retainInput) {
+            	frameTuple.reset(accessor, tupleIndex);
+                for (int i = 0; i < frameTuple.getFieldCount(); i++) {
+                	dos.write(frameTuple.getFieldData(i), frameTuple.getFieldStart(i), frameTuple.getFieldLength(i));
+                    tb.addFieldEndOffset();
+                }
+            }
+            ITupleReference tuple = cursor.getTuple();
+            tupleUpdater.updateTuple(tuple);
+            for (int i = 0; i < tuple.getFieldCount(); i++) {
+                dos.write(tuple.getFieldData(i), tuple.getFieldStart(i), tuple.getFieldLength(i));
+                tb.addFieldEndOffset();
+            }
+            if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
+                FrameUtils.flushFrame(writeBuffer, writer);
+                appender.reset(writeBuffer, true);
+                if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
+                    throw new IllegalStateException();
+                }
+            }
+        }
+    }
+}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/exceptions/BTreeDuplicateKeyException.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/exceptions/BTreeDuplicateKeyException.java
new file mode 100644
index 0000000..d6d945f
--- /dev/null
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/exceptions/BTreeDuplicateKeyException.java
@@ -0,0 +1,28 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree.exceptions;
+
+public class BTreeDuplicateKeyException extends BTreeException {
+    private static final long serialVersionUID = 1L;
+    
+    public BTreeDuplicateKeyException(Exception e) {
+        super(e);
+    }
+    
+    public BTreeDuplicateKeyException(String message) {
+        super(message);
+    }
+}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/exceptions/BTreeException.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/exceptions/BTreeException.java
similarity index 100%
rename from hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/exceptions/BTreeException.java
rename to hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/exceptions/BTreeException.java
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/exceptions/BTreeNonExistentKeyException.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/exceptions/BTreeNonExistentKeyException.java
new file mode 100644
index 0000000..81a0e79
--- /dev/null
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/exceptions/BTreeNonExistentKeyException.java
@@ -0,0 +1,29 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree.exceptions;
+
+public class BTreeNonExistentKeyException extends BTreeException {
+    
+    private static final long serialVersionUID = 1L;
+    
+    public BTreeNonExistentKeyException(Exception e) {
+        super(e);
+    }
+    
+    public BTreeNonExistentKeyException(String message) {
+        super(message);
+    }
+}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/exceptions/BTreeNotUpdateableException.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/exceptions/BTreeNotUpdateableException.java
new file mode 100644
index 0000000..73b22d8
--- /dev/null
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/exceptions/BTreeNotUpdateableException.java
@@ -0,0 +1,28 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree.exceptions;
+
+public class BTreeNotUpdateableException extends BTreeException {
+    private static final long serialVersionUID = 1L;
+    
+    public BTreeNotUpdateableException(Exception e) {
+        super(e);
+    }
+    
+    public BTreeNotUpdateableException(String message) {
+        super(message);
+    }
+}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeFieldPrefixNSMLeafFrame.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeFieldPrefixNSMLeafFrame.java
new file mode 100644
index 0000000..fb2e833
--- /dev/null
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeFieldPrefixNSMLeafFrame.java
@@ -0,0 +1,735 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree.frames;
+
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.Collections;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
+import edu.uci.ics.hyracks.storage.am.btree.api.IPrefixSlotManager;
+import edu.uci.ics.hyracks.storage.am.btree.compressors.FieldPrefixCompressor;
+import edu.uci.ics.hyracks.storage.am.btree.exceptions.BTreeDuplicateKeyException;
+import edu.uci.ics.hyracks.storage.am.btree.exceptions.BTreeNonExistentKeyException;
+import edu.uci.ics.hyracks.storage.am.btree.impls.FieldPrefixPrefixTupleReference;
+import edu.uci.ics.hyracks.storage.am.btree.impls.FieldPrefixSlotManager;
+import edu.uci.ics.hyracks.storage.am.btree.impls.FieldPrefixTupleReference;
+import edu.uci.ics.hyracks.storage.am.common.api.ISlotManager;
+import edu.uci.ics.hyracks.storage.am.common.api.ISplitKey;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameCompressor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriter;
+import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
+import edu.uci.ics.hyracks.storage.am.common.frames.FrameOpSpaceStatus;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.FindTupleMode;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.FindTupleNoExactMatchPolicy;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.SlotOffTupleOff;
+import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriter;
+import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
+
+// WARNING: only works when tupleWriter is an instance of TypeAwareTupleWriter
+
+public class BTreeFieldPrefixNSMLeafFrame implements IBTreeLeafFrame {
+
+    protected static final int pageLsnOff = 0; // 0
+    protected static final int tupleCountOff = pageLsnOff + 8; // 8
+    protected static final int freeSpaceOff = tupleCountOff + 4; // 12
+    protected static final int totalFreeSpaceOff = freeSpaceOff + 4; // 16
+    protected static final int levelOff = totalFreeSpaceOff + 4; // 20
+    protected static final int smFlagOff = levelOff + 1; // 21
+    protected static final int uncompressedTupleCountOff = smFlagOff + 1; // 22
+    protected static final int prefixTupleCountOff = uncompressedTupleCountOff + 4; // 26
+
+    protected static final int nextLeafOff = prefixTupleCountOff + 4; // 30
+
+    protected ICachedPage page = null;
+    protected ByteBuffer buf = null;
+
+    public final ITreeIndexFrameCompressor compressor;
+    // TODO: Should be protected, but will trigger some refactoring.
+    public final IPrefixSlotManager slotManager;
+
+    private final ITreeIndexTupleWriter tupleWriter;
+    private MultiComparator cmp;
+    
+    private final FieldPrefixTupleReference frameTuple;
+    private final FieldPrefixPrefixTupleReference framePrefixTuple;
+
+    public BTreeFieldPrefixNSMLeafFrame(ITreeIndexTupleWriter tupleWriter) {
+        this.tupleWriter = tupleWriter;
+        this.frameTuple = new FieldPrefixTupleReference(tupleWriter.createTupleReference());
+        ITypeTraits[] typeTraits = ((TypeAwareTupleWriter) tupleWriter).getTypeTraits();
+        this.framePrefixTuple = new FieldPrefixPrefixTupleReference(typeTraits);
+        this.slotManager = new FieldPrefixSlotManager();
+        this.compressor = new FieldPrefixCompressor(typeTraits, 0.001f, 2);
+    }
+
+    @Override
+    public void setPage(ICachedPage page) {
+        this.page = page;
+        this.buf = page.getBuffer();
+        slotManager.setFrame(this);
+    }
+
+    @Override
+    public ByteBuffer getBuffer() {
+        return page.getBuffer();
+    }
+
+    @Override
+    public ICachedPage getPage() {
+        return page;
+    }
+
+    @Override
+    public boolean compress() throws HyracksDataException {
+        try {
+            return compressor.compress(this, cmp);
+        } catch (Exception e) {
+            throw new HyracksDataException(e);
+        }
+    }
+
+    // assumptions:
+    // 1. prefix tuple are stored contiguously
+    // 2. prefix tuple are located before tuples (physically on the page)
+    // 3. prefix tuple are sorted (last prefix tuple is at highest offset)
+    // this procedure will not move prefix tuples
+    @Override
+    public boolean compact() {
+        resetSpaceParams();
+
+        int tupleCount = buf.getInt(tupleCountOff);
+
+        // determine start of target free space (depends on assumptions stated
+        // above)
+        int freeSpace = buf.getInt(freeSpaceOff);
+        int prefixTupleCount = buf.getInt(prefixTupleCountOff);
+        if (prefixTupleCount > 0) {
+
+            // debug
+            int max = 0;
+            for (int i = 0; i < prefixTupleCount; i++) {
+                framePrefixTuple.resetByTupleIndex(this, i);
+                int end = framePrefixTuple.getFieldStart(framePrefixTuple.getFieldCount() - 1)
+                        + framePrefixTuple.getFieldLength(framePrefixTuple.getFieldCount() - 1);
+                if (end > max)
+                    max = end;
+            }
+
+            framePrefixTuple.resetByTupleIndex(this, prefixTupleCount - 1);
+            freeSpace = framePrefixTuple.getFieldStart(framePrefixTuple.getFieldCount() - 1)
+                    + framePrefixTuple.getFieldLength(framePrefixTuple.getFieldCount() - 1);
+        }
+
+        ArrayList<SlotOffTupleOff> sortedTupleOffs = new ArrayList<SlotOffTupleOff>();
+        sortedTupleOffs.ensureCapacity(tupleCount);
+        for (int i = 0; i < tupleCount; i++) {
+            int tupleSlotOff = slotManager.getTupleSlotOff(i);
+            int tupleSlot = buf.getInt(tupleSlotOff);
+            int tupleOff = slotManager.decodeSecondSlotField(tupleSlot);
+            sortedTupleOffs.add(new SlotOffTupleOff(i, tupleSlotOff, tupleOff));
+
+        }
+        Collections.sort(sortedTupleOffs);
+
+        for (int i = 0; i < sortedTupleOffs.size(); i++) {
+            int tupleOff = sortedTupleOffs.get(i).tupleOff;
+            int tupleSlot = buf.getInt(sortedTupleOffs.get(i).slotOff);
+            int prefixSlotNum = slotManager.decodeFirstSlotField(tupleSlot);
+
+            frameTuple.resetByTupleIndex(this, sortedTupleOffs.get(i).tupleIndex);
+            int tupleEndOff = frameTuple.getFieldStart(frameTuple.getFieldCount() - 1)
+                    + frameTuple.getFieldLength(frameTuple.getFieldCount() - 1);
+            int tupleLength = tupleEndOff - tupleOff;
+            System.arraycopy(buf.array(), tupleOff, buf.array(), freeSpace, tupleLength);
+
+            slotManager.setSlot(sortedTupleOffs.get(i).slotOff, slotManager.encodeSlotFields(prefixSlotNum, freeSpace));
+            freeSpace += tupleLength;
+        }
+
+        buf.putInt(freeSpaceOff, freeSpace);
+        int totalFreeSpace = buf.capacity() - buf.getInt(freeSpaceOff)
+                - ((buf.getInt(tupleCountOff) + buf.getInt(prefixTupleCountOff)) * slotManager.getSlotSize());
+        buf.putInt(totalFreeSpaceOff, totalFreeSpace);
+
+        return false;
+    }
+
+    @Override
+    public void delete(ITupleReference tuple, int slot) {
+        int tupleIndex = slotManager.decodeSecondSlotField(slot);
+        int prefixSlotNum = slotManager.decodeFirstSlotField(slot);
+        int tupleSlotOff = slotManager.getTupleSlotOff(tupleIndex);
+
+        // perform deletion (we just do a memcpy to overwrite the slot)
+        int slotEndOff = slotManager.getTupleSlotEndOff();
+        int length = tupleSlotOff - slotEndOff;
+        System.arraycopy(buf.array(), slotEndOff, buf.array(), slotEndOff + slotManager.getSlotSize(), length);
+
+        // maintain space information, get size of tuple suffix (suffix
+        // could be entire tuple)
+        int tupleSize = 0;
+        int suffixFieldStart = 0;
+        if (prefixSlotNum == FieldPrefixSlotManager.TUPLE_UNCOMPRESSED) {
+            suffixFieldStart = 0;
+            buf.putInt(uncompressedTupleCountOff, buf.getInt(uncompressedTupleCountOff) - 1);
+        } else {
+            int prefixSlot = buf.getInt(slotManager.getPrefixSlotOff(prefixSlotNum));
+            suffixFieldStart = slotManager.decodeFirstSlotField(prefixSlot);
+        }
+
+        frameTuple.resetByTupleIndex(this, tupleIndex);
+        tupleSize = tupleWriter.bytesRequired(frameTuple, suffixFieldStart, frameTuple.getFieldCount()
+                - suffixFieldStart);
+
+        buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) - 1);
+        buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) + tupleSize + slotManager.getSlotSize());
+    }
+
+    @Override
+    public FrameOpSpaceStatus hasSpaceInsert(ITupleReference tuple) {
+        int freeContiguous = buf.capacity() - buf.getInt(freeSpaceOff)
+                - ((buf.getInt(tupleCountOff) + buf.getInt(prefixTupleCountOff)) * slotManager.getSlotSize());
+
+        int bytesRequired = tupleWriter.bytesRequired(tuple);
+
+        // See if the tuple would fit uncompressed.
+        if (bytesRequired + slotManager.getSlotSize() <= freeContiguous)
+            return FrameOpSpaceStatus.SUFFICIENT_CONTIGUOUS_SPACE;
+
+        // See if tuple would fit into remaining space after compaction.
+        if (bytesRequired + slotManager.getSlotSize() <= buf.getInt(totalFreeSpaceOff))
+            return FrameOpSpaceStatus.SUFFICIENT_SPACE;
+
+        // See if the tuple matches a prefix and will fit after truncating the prefix.
+        int prefixSlotNum = slotManager.findPrefix(tuple, framePrefixTuple);
+        if (prefixSlotNum != FieldPrefixSlotManager.TUPLE_UNCOMPRESSED) {
+            int prefixSlotOff = slotManager.getPrefixSlotOff(prefixSlotNum);
+            int prefixSlot = buf.getInt(prefixSlotOff);
+            int numPrefixFields = slotManager.decodeFirstSlotField(prefixSlot);
+
+            int compressedSize = tupleWriter.bytesRequired(tuple, numPrefixFields, tuple.getFieldCount()
+                    - numPrefixFields);
+            if (compressedSize + slotManager.getSlotSize() <= freeContiguous)
+                return FrameOpSpaceStatus.SUFFICIENT_CONTIGUOUS_SPACE;
+        }
+
+        return FrameOpSpaceStatus.INSUFFICIENT_SPACE;
+    }
+
+    @Override
+    public void insert(ITupleReference tuple, int tupleIndex) {
+        int slot = slotManager.insertSlot(tupleIndex, buf.getInt(freeSpaceOff));
+        int prefixSlotNum = slotManager.decodeFirstSlotField(slot);
+        int numPrefixFields = 0;
+        if (prefixSlotNum != FieldPrefixSlotManager.TUPLE_UNCOMPRESSED) {
+            int prefixSlotOff = slotManager.getPrefixSlotOff(prefixSlotNum);
+            int prefixSlot = buf.getInt(prefixSlotOff);
+            numPrefixFields = slotManager.decodeFirstSlotField(prefixSlot);
+        } else {
+            buf.putInt(uncompressedTupleCountOff, buf.getInt(uncompressedTupleCountOff) + 1);
+        }
+
+        int freeSpace = buf.getInt(freeSpaceOff);
+        int bytesWritten = tupleWriter.writeTupleFields(tuple, numPrefixFields,
+                tuple.getFieldCount() - numPrefixFields, buf.array(), freeSpace);
+
+        buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) + 1);
+        buf.putInt(freeSpaceOff, buf.getInt(freeSpaceOff) + bytesWritten);
+        buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) - bytesWritten - slotManager.getSlotSize());
+    }
+    
+    @Override
+    public FrameOpSpaceStatus hasSpaceUpdate(ITupleReference newTuple, int oldTupleIndex) {
+        int tupleIndex = slotManager.decodeSecondSlotField(oldTupleIndex);
+        frameTuple.resetByTupleIndex(this, tupleIndex);
+        
+        int oldTupleBytes = 0;
+        int newTupleBytes = 0;
+        
+        int numPrefixFields = frameTuple.getNumPrefixFields();
+        int fieldCount = frameTuple.getFieldCount();
+        if (numPrefixFields != 0) {
+            // Check the space requirements for updating the suffix of the original tuple.            
+            oldTupleBytes = frameTuple.getSuffixTupleSize();
+            newTupleBytes = tupleWriter.bytesRequired(newTuple, numPrefixFields, fieldCount - numPrefixFields); 
+        } else {
+            // The original tuple is uncompressed.
+            oldTupleBytes = frameTuple.getTupleSize();
+            newTupleBytes = tupleWriter.bytesRequired(newTuple);
+        }
+        
+        int additionalBytesRequired = newTupleBytes - oldTupleBytes;
+        // Enough space for an in-place update?
+        if (additionalBytesRequired <= 0) {
+            return FrameOpSpaceStatus.SUFFICIENT_INPLACE_SPACE;
+        }
+        
+        int freeContiguous = buf.capacity() - buf.getInt(freeSpaceOff)
+                - ((buf.getInt(tupleCountOff) + buf.getInt(prefixTupleCountOff)) * slotManager.getSlotSize());
+        
+        // Enough space if we delete the old tuple and insert the new one without compaction? 
+        if (newTupleBytes <= freeContiguous) {
+            return FrameOpSpaceStatus.SUFFICIENT_CONTIGUOUS_SPACE;
+        }
+        // Enough space if we delete the old tuple and compact?
+        if (additionalBytesRequired <= buf.getInt(totalFreeSpaceOff)) {
+            return FrameOpSpaceStatus.SUFFICIENT_SPACE;
+        }
+        return FrameOpSpaceStatus.INSUFFICIENT_SPACE;
+    }
+
+    @Override
+    public void update(ITupleReference newTuple, int oldTupleIndex, boolean inPlace) {
+        int tupleIndex = slotManager.decodeSecondSlotField(oldTupleIndex);
+        int tupleSlotOff = slotManager.getTupleSlotOff(tupleIndex);
+        int tupleSlot = buf.getInt(tupleSlotOff);
+        int prefixSlotNum = slotManager.decodeFirstSlotField(tupleSlot);
+        int suffixTupleStartOff = slotManager.decodeSecondSlotField(tupleSlot);                
+        
+        frameTuple.resetByTupleIndex(this, tupleIndex);
+        int fieldCount = frameTuple.getFieldCount();
+        int numPrefixFields = frameTuple.getNumPrefixFields();
+        int oldTupleBytes = frameTuple.getSuffixTupleSize();
+        int bytesWritten = 0;        
+        
+        if (inPlace) {
+            // Overwrite the old tuple suffix in place.
+            bytesWritten = tupleWriter.writeTupleFields(newTuple, numPrefixFields, fieldCount - numPrefixFields, buf.array(), suffixTupleStartOff);
+        } else {
+            // Insert the new tuple suffix at the end of the free space, and change the slot value (effectively "deleting" the old tuple).
+            int newSuffixTupleStartOff = buf.getInt(freeSpaceOff);
+            bytesWritten = tupleWriter.writeTupleFields(newTuple, numPrefixFields, fieldCount - numPrefixFields, buf.array(), newSuffixTupleStartOff);
+            // Update slot value using the same prefix slot num.
+            slotManager.setSlot(tupleSlotOff, slotManager.encodeSlotFields(prefixSlotNum, newSuffixTupleStartOff));
+            // Update contiguous free space pointer.
+            buf.putInt(freeSpaceOff, newSuffixTupleStartOff + bytesWritten);
+        }
+        buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) + oldTupleBytes - bytesWritten);
+    }
+    
+    protected void resetSpaceParams() {
+        buf.putInt(freeSpaceOff, getOrigFreeSpaceOff());
+        buf.putInt(totalFreeSpaceOff, getOrigTotalFreeSpace());
+    }
+
+    @Override
+    public void initBuffer(byte level) {
+        buf.putLong(pageLsnOff, 0);
+        // during creation
+        buf.putInt(tupleCountOff, 0);
+        resetSpaceParams();
+        buf.putInt(uncompressedTupleCountOff, 0);
+        buf.putInt(prefixTupleCountOff, 0);
+        buf.put(levelOff, level);
+        buf.put(smFlagOff, (byte) 0);
+        buf.putInt(nextLeafOff, -1);
+    }
+
+    public void setTotalFreeSpace(int totalFreeSpace) {
+        buf.putInt(totalFreeSpaceOff, totalFreeSpace);
+    }
+
+    public int getOrigTotalFreeSpace() {
+        return buf.capacity() - (nextLeafOff + 4);
+    }
+
+    @Override
+    public int findInsertTupleIndex(ITupleReference tuple) throws TreeIndexException {
+    	int slot = slotManager.findSlot(tuple, frameTuple, framePrefixTuple, cmp, FindTupleMode.EXCLUSIVE_ERROR_IF_EXISTS,
+                FindTupleNoExactMatchPolicy.HIGHER_KEY);
+        int tupleIndex = slotManager.decodeSecondSlotField(slot);
+        // Error indicator is set if there is an exact match.
+        if (tupleIndex == slotManager.getErrorIndicator()) {
+            throw new BTreeDuplicateKeyException("Trying to insert duplicate key into leaf node.");
+        }
+        return slot;
+    }
+    
+    @Override
+    public int findUpsertTupleIndex(ITupleReference tuple) throws TreeIndexException {
+        int slot = slotManager.findSlot(tuple, frameTuple, framePrefixTuple, cmp, FindTupleMode.INCLUSIVE,
+                FindTupleNoExactMatchPolicy.HIGHER_KEY);
+        int tupleIndex = slotManager.decodeSecondSlotField(slot);
+        // Error indicator is set if there is an exact match.
+        if (tupleIndex == slotManager.getErrorIndicator()) {
+            throw new BTreeDuplicateKeyException("Trying to insert duplicate key into leaf node.");
+        }
+        return slot;
+    }
+    
+    @Override
+    public ITupleReference getUpsertBeforeTuple(ITupleReference tuple, int targetTupleIndex) throws TreeIndexException {
+        int tupleIndex = slotManager.decodeSecondSlotField(targetTupleIndex);
+        // Examine the tuple index to determine whether it is valid or not.
+        if (tupleIndex != slotManager.getGreatestKeyIndicator()) {
+            // We need to check the key to determine whether it's an insert or an update.
+            frameTuple.resetByTupleIndex(this, tupleIndex);
+            if (cmp.compare(tuple, frameTuple) == 0) {
+                // The keys match, it's an update.
+                return frameTuple;
+            }
+        }
+        // Either the tuple index is a special indicator, or the keys don't match.
+        // In those cases, we are definitely dealing with an insert.
+        return null;
+    }
+    
+    @Override
+    public int findUpdateTupleIndex(ITupleReference tuple) throws TreeIndexException {
+        int slot = slotManager.findSlot(tuple, frameTuple, framePrefixTuple, cmp, FindTupleMode.EXACT,
+                FindTupleNoExactMatchPolicy.HIGHER_KEY);
+        int tupleIndex = slotManager.decodeSecondSlotField(slot);
+        // Error indicator is set if there is no exact match.
+        if (tupleIndex == slotManager.getErrorIndicator()) {
+            throw new BTreeNonExistentKeyException("Trying to update a tuple with a nonexistent key in leaf node.");
+        }    
+        return slot;
+    }
+    
+    @Override
+    public int findDeleteTupleIndex(ITupleReference tuple) throws TreeIndexException {
+        int slot = slotManager.findSlot(tuple, frameTuple, framePrefixTuple, cmp, FindTupleMode.EXACT,
+                FindTupleNoExactMatchPolicy.HIGHER_KEY);
+        int tupleIndex = slotManager.decodeSecondSlotField(slot);
+        // Error indicator is set if there is no exact match.
+        if (tupleIndex == slotManager.getErrorIndicator()) {
+            throw new BTreeNonExistentKeyException("Trying to delete a tuple with a nonexistent key in leaf node.");
+        }    
+        return slot;
+    }
+    
+    @Override
+    public String printHeader() {
+        StringBuilder strBuilder = new StringBuilder();
+        strBuilder.append("pageLsnOff:                " + pageLsnOff + "\n");
+        strBuilder.append("tupleCountOff:             " + tupleCountOff + "\n");
+        strBuilder.append("freeSpaceOff:              " + freeSpaceOff + "\n");
+        strBuilder.append("totalFreeSpaceOff:         " + totalFreeSpaceOff + "\n");
+        strBuilder.append("levelOff:                  " + levelOff + "\n");
+        strBuilder.append("smFlagOff:                 " + smFlagOff + "\n");
+        strBuilder.append("uncompressedTupleCountOff: " + uncompressedTupleCountOff + "\n");
+        strBuilder.append("prefixTupleCountOff:       " + prefixTupleCountOff + "\n");
+        strBuilder.append("nextLeafOff:               " + nextLeafOff + "\n");
+        return strBuilder.toString();
+    }
+
+    @Override
+    public int getTupleCount() {
+        return buf.getInt(tupleCountOff);
+    }
+
+    public ISlotManager getSlotManager() {
+        return null;
+    }
+
+    @Override
+    public int getTupleOffset(int slotNum) {
+        int tupleSlotOff = slotManager.getTupleSlotOff(slotNum);
+        int tupleSlot = buf.getInt(tupleSlotOff);
+        return slotManager.decodeSecondSlotField(tupleSlot);
+    }
+
+    @Override
+    public long getPageLsn() {
+        return buf.getLong(pageLsnOff);
+    }
+
+    @Override
+    public void setPageLsn(long pageLsn) {
+        buf.putLong(pageLsnOff, pageLsn);
+    }
+
+    @Override
+    public int getTotalFreeSpace() {
+        return buf.getInt(totalFreeSpaceOff);
+    }
+
+    @Override
+    public boolean isLeaf() {
+        return buf.get(levelOff) == 0;
+    }
+
+    @Override
+    public boolean isInterior() {
+        return buf.get(levelOff) > 0;
+    }
+
+    @Override
+    public byte getLevel() {
+        return buf.get(levelOff);
+    }
+
+    @Override
+    public void setLevel(byte level) {
+        buf.put(levelOff, level);
+    }
+
+    @Override
+    public boolean getSmFlag() {
+        return buf.get(smFlagOff) != 0;
+    }
+
+    @Override
+    public void setSmFlag(boolean smFlag) {
+        if (smFlag)
+            buf.put(smFlagOff, (byte) 1);
+        else
+            buf.put(smFlagOff, (byte) 0);
+    }
+
+    public int getPrefixTupleCount() {
+        return buf.getInt(prefixTupleCountOff);
+    }
+
+    public void setPrefixTupleCount(int prefixTupleCount) {
+        buf.putInt(prefixTupleCountOff, prefixTupleCount);
+    }
+
+    @Override
+    public void insertSorted(ITupleReference tuple) {
+        int freeSpace = buf.getInt(freeSpaceOff);
+        int fieldsToTruncate = 0;
+
+        // check if tuple matches last prefix tuple
+        if (buf.getInt(prefixTupleCountOff) > 0) {
+            framePrefixTuple.resetByTupleIndex(this, buf.getInt(prefixTupleCountOff) - 1);
+            if (cmp.fieldRangeCompare(tuple, framePrefixTuple, 0, framePrefixTuple.getFieldCount()) == 0) {
+                fieldsToTruncate = framePrefixTuple.getFieldCount();
+            }
+        }
+
+        int bytesWritten = tupleWriter.writeTupleFields(tuple, fieldsToTruncate, tuple.getFieldCount()
+                - fieldsToTruncate, buf.array(), freeSpace);
+
+        // insert slot
+        int prefixSlotNum = FieldPrefixSlotManager.TUPLE_UNCOMPRESSED;
+        if (fieldsToTruncate > 0)
+            prefixSlotNum = buf.getInt(prefixTupleCountOff) - 1;
+        else
+            buf.putInt(uncompressedTupleCountOff, buf.getInt(uncompressedTupleCountOff) + 1);
+        int insSlot = slotManager.encodeSlotFields(prefixSlotNum, FieldPrefixSlotManager.GREATEST_KEY_INDICATOR);
+        slotManager.insertSlot(insSlot, freeSpace);
+
+        // update page metadata
+        buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) + 1);
+        buf.putInt(freeSpaceOff, buf.getInt(freeSpaceOff) + bytesWritten);
+        buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) - bytesWritten - slotManager.getSlotSize());
+    }
+
+    @Override
+    public void split(ITreeIndexFrame rightFrame, ITupleReference tuple, ISplitKey splitKey)
+    		throws TreeIndexException {
+
+        BTreeFieldPrefixNSMLeafFrame rf = (BTreeFieldPrefixNSMLeafFrame)rightFrame;
+
+        ByteBuffer right = rf.getBuffer();
+        int tupleCount = getTupleCount();
+        int prefixTupleCount = getPrefixTupleCount();
+
+        // Find split point, and determine into which frame the new tuple should be inserted into.
+        int tuplesToLeft;
+        int midSlotNum = tupleCount / 2;
+        ITreeIndexFrame targetFrame = null;
+        frameTuple.resetByTupleIndex(this, midSlotNum);
+        int comparison = cmp.compare(tuple, frameTuple);
+        if (comparison >= 0) {
+            tuplesToLeft = midSlotNum + (tupleCount % 2);
+            targetFrame = rf;
+        } else {
+            tuplesToLeft = midSlotNum;
+            targetFrame = this;
+        }
+        int tuplesToRight = tupleCount - tuplesToLeft;
+
+        // copy entire page
+        System.arraycopy(buf.array(), 0, right.array(), 0, buf.capacity());
+
+        // determine how many slots go on left and right page
+        int prefixesToLeft = prefixTupleCount;
+        for (int i = tuplesToLeft; i < tupleCount; i++) {
+            int tupleSlotOff = rf.slotManager.getTupleSlotOff(i);
+            int tupleSlot = right.getInt(tupleSlotOff);
+            int prefixSlotNum = rf.slotManager.decodeFirstSlotField(tupleSlot);
+            if (prefixSlotNum != FieldPrefixSlotManager.TUPLE_UNCOMPRESSED) {
+                prefixesToLeft = prefixSlotNum;
+                break;
+            }
+        }
+
+        // if we are splitting in the middle of a prefix both pages need to have
+        // the prefix slot and tuple
+        int boundaryTupleSlotOff = rf.slotManager.getTupleSlotOff(tuplesToLeft - 1);
+        int boundaryTupleSlot = buf.getInt(boundaryTupleSlotOff);
+        int boundaryPrefixSlotNum = rf.slotManager.decodeFirstSlotField(boundaryTupleSlot);
+        int prefixesToRight = prefixTupleCount - prefixesToLeft;
+        if (boundaryPrefixSlotNum == prefixesToLeft
+                && boundaryPrefixSlotNum != FieldPrefixSlotManager.TUPLE_UNCOMPRESSED) {
+            prefixesToLeft++; // tuples on both pages share one prefix
+        }
+
+        // move prefix tuples on right page to beginning of page and adjust
+        // prefix slots
+        if (prefixesToRight > 0 && prefixesToLeft > 0 && prefixTupleCount > 1) {
+
+            int freeSpace = rf.getOrigFreeSpaceOff();
+            int lastPrefixSlotNum = -1;
+
+            for (int i = tuplesToLeft; i < tupleCount; i++) {
+                int tupleSlotOff = rf.slotManager.getTupleSlotOff(i);
+                int tupleSlot = right.getInt(tupleSlotOff);
+                int prefixSlotNum = rf.slotManager.decodeFirstSlotField(tupleSlot);
+                if (prefixSlotNum != FieldPrefixSlotManager.TUPLE_UNCOMPRESSED) {
+                    framePrefixTuple.resetByTupleIndex(this, prefixSlotNum);
+
+                    int bytesWritten = 0;
+                    if (lastPrefixSlotNum != prefixSlotNum) {
+                        bytesWritten = tupleWriter.writeTuple(framePrefixTuple, right.array(), freeSpace);
+                        int newPrefixSlot = rf.slotManager
+                                .encodeSlotFields(framePrefixTuple.getFieldCount(), freeSpace);
+                        int prefixSlotOff = rf.slotManager.getPrefixSlotOff(prefixSlotNum);
+                        right.putInt(prefixSlotOff, newPrefixSlot);
+                        lastPrefixSlotNum = prefixSlotNum;
+                    }
+
+                    int tupleOff = rf.slotManager.decodeSecondSlotField(tupleSlot);
+                    int newTupleSlot = rf.slotManager.encodeSlotFields(prefixSlotNum
+                            - (prefixTupleCount - prefixesToRight), tupleOff);
+                    right.putInt(tupleSlotOff, newTupleSlot);
+                    freeSpace += bytesWritten;
+                }
+            }
+        }
+
+        // move the modified prefix slots on the right page
+        int prefixSrc = rf.slotManager.getPrefixSlotEndOff();
+        int prefixDest = rf.slotManager.getPrefixSlotEndOff() + (prefixTupleCount - prefixesToRight)
+                * rf.slotManager.getSlotSize();
+        int prefixLength = rf.slotManager.getSlotSize() * prefixesToRight;
+        System.arraycopy(right.array(), prefixSrc, right.array(), prefixDest, prefixLength);
+
+        // on right page we need to copy rightmost tuple slots to left
+        int src = rf.slotManager.getTupleSlotEndOff();
+        int dest = rf.slotManager.getTupleSlotEndOff() + tuplesToLeft * rf.slotManager.getSlotSize()
+                + (prefixTupleCount - prefixesToRight) * rf.slotManager.getSlotSize();
+        int length = rf.slotManager.getSlotSize() * tuplesToRight;
+        System.arraycopy(right.array(), src, right.array(), dest, length);
+
+        right.putInt(tupleCountOff, tuplesToRight);
+        right.putInt(prefixTupleCountOff, prefixesToRight);
+
+        // on left page move slots to reflect possibly removed prefixes
+        src = slotManager.getTupleSlotEndOff() + tuplesToRight * slotManager.getSlotSize();
+        dest = slotManager.getTupleSlotEndOff() + tuplesToRight * slotManager.getSlotSize()
+                + (prefixTupleCount - prefixesToLeft) * slotManager.getSlotSize();
+        length = slotManager.getSlotSize() * tuplesToLeft;
+        System.arraycopy(buf.array(), src, buf.array(), dest, length);
+
+        buf.putInt(tupleCountOff, tuplesToLeft);
+        buf.putInt(prefixTupleCountOff, prefixesToLeft);
+
+        // compact both pages
+        compact();
+        rightFrame.compact();
+
+        // insert last key
+        int targetTupleIndex = ((IBTreeLeafFrame)targetFrame).findInsertTupleIndex(tuple);
+        targetFrame.insert(tuple, targetTupleIndex);
+
+        // set split key to be highest value in left page
+        frameTuple.resetByTupleIndex(this, getTupleCount() - 1);
+
+        int splitKeySize = tupleWriter.bytesRequired(frameTuple, 0, cmp.getKeyFieldCount());
+        splitKey.initData(splitKeySize);
+        tupleWriter.writeTupleFields(frameTuple, 0, cmp.getKeyFieldCount(), splitKey.getBuffer().array(), 0);
+        splitKey.getTuple().resetByTupleOffset(splitKey.getBuffer(), 0);
+    }
+
+    @Override
+    public int getFreeSpaceOff() {
+        return buf.getInt(freeSpaceOff);
+    }
+
+    public int getOrigFreeSpaceOff() {
+        return nextLeafOff + 4;
+    }
+
+    @Override
+    public void setFreeSpaceOff(int freeSpace) {
+        buf.putInt(freeSpaceOff, freeSpace);
+    }
+
+    @Override
+    public void setNextLeaf(int page) {
+        buf.putInt(nextLeafOff, page);
+    }
+
+    @Override
+    public int getNextLeaf() {
+        return buf.getInt(nextLeafOff);
+    }
+
+    public int getUncompressedTupleCount() {
+        return buf.getInt(uncompressedTupleCountOff);
+    }
+
+    public void setUncompressedTupleCount(int uncompressedTupleCount) {
+        buf.putInt(uncompressedTupleCountOff, uncompressedTupleCount);
+    }
+
+    @Override
+    public int getSlotSize() {
+        return slotManager.getSlotSize();
+    }
+
+    public ITreeIndexTupleWriter getTupleWriter() {
+        return tupleWriter;
+    }
+
+    @Override
+    public ITreeIndexTupleReference createTupleReference() {
+        return new FieldPrefixTupleReference(tupleWriter.createTupleReference());
+    }
+
+    @Override
+    public int findTupleIndex(ITupleReference searchKey, ITreeIndexTupleReference pageTuple, MultiComparator cmp,
+            FindTupleMode ftm, FindTupleNoExactMatchPolicy ftp) {
+        int slot = slotManager.findSlot(searchKey, pageTuple, framePrefixTuple, cmp, ftm, ftp);
+        int tupleIndex = slotManager.decodeSecondSlotField(slot);
+        // TODO: Revisit this one. Maybe there is a cleaner way to solve this in the RangeSearchCursor.
+        if (tupleIndex == FieldPrefixSlotManager.GREATEST_KEY_INDICATOR || tupleIndex == FieldPrefixSlotManager.ERROR_INDICATOR)
+            return -1;
+        else
+            return tupleIndex;
+    }
+
+    @Override
+    public int getPageHeaderSize() {
+        return nextLeafOff;
+    }
+
+	@Override
+	public void setMultiComparator(MultiComparator cmp) {
+		this.cmp = cmp;
+		this.slotManager.setMultiComparator(cmp);
+	}
+}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeFieldPrefixNSMLeafFrameFactory.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeFieldPrefixNSMLeafFrameFactory.java
new file mode 100644
index 0000000..5defb27
--- /dev/null
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeFieldPrefixNSMLeafFrameFactory.java
@@ -0,0 +1,40 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree.frames;
+
+import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriterFactory;
+
+public class BTreeFieldPrefixNSMLeafFrameFactory implements ITreeIndexFrameFactory {
+
+    private static final long serialVersionUID = 1L;
+    private final ITreeIndexTupleWriterFactory tupleWriterFactory;
+    
+    public BTreeFieldPrefixNSMLeafFrameFactory(ITreeIndexTupleWriterFactory tupleWriterFactory) {
+        this.tupleWriterFactory = tupleWriterFactory;
+    }
+
+    @Override
+    public IBTreeLeafFrame createFrame() {
+        return new BTreeFieldPrefixNSMLeafFrame(tupleWriterFactory.createTupleWriter());
+    }
+
+    @Override
+    public ITreeIndexTupleWriterFactory getTupleWriterFactory() {
+       return tupleWriterFactory;
+    }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeLeafFrameType.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeLeafFrameType.java
new file mode 100644
index 0000000..6ff44be
--- /dev/null
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeLeafFrameType.java
@@ -0,0 +1,6 @@
+package edu.uci.ics.hyracks.storage.am.btree.frames;
+
+public enum BTreeLeafFrameType {
+    REGULAR_NSM,
+    FIELD_PREFIX_COMPRESSED_NSM
+}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeNSMInteriorFrame.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeNSMInteriorFrame.java
new file mode 100644
index 0000000..d2cb2c6
--- /dev/null
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeNSMInteriorFrame.java
@@ -0,0 +1,410 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree.frames;
+
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.Collections;
+
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrame;
+import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
+import edu.uci.ics.hyracks.storage.am.common.api.ISplitKey;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriter;
+import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
+import edu.uci.ics.hyracks.storage.am.common.frames.FrameOpSpaceStatus;
+import edu.uci.ics.hyracks.storage.am.common.frames.TreeIndexNSMFrame;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.FindTupleMode;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.FindTupleNoExactMatchPolicy;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.SlotOffTupleOff;
+
+public class BTreeNSMInteriorFrame extends TreeIndexNSMFrame implements IBTreeInteriorFrame {
+
+    private static final int rightLeafOff = smFlagOff + 1;
+    private static final int childPtrSize = 4;
+
+    private final ITreeIndexTupleReference cmpFrameTuple;
+    private MultiComparator cmp;
+
+    public BTreeNSMInteriorFrame(ITreeIndexTupleWriter tupleWriter) {
+        super(tupleWriter, new OrderedSlotManager());
+        cmpFrameTuple = tupleWriter.createTupleReference();
+    }
+
+    @Override
+    public void initBuffer(byte level) {
+        super.initBuffer(level);
+        buf.putInt(rightLeafOff, -1);
+    }
+
+    @Override
+    public int findInsertTupleIndex(ITupleReference tuple) throws TreeIndexException {
+        return slotManager.findTupleIndex(tuple, frameTuple, cmp, FindTupleMode.INCLUSIVE,
+                FindTupleNoExactMatchPolicy.HIGHER_KEY);
+    }
+    
+    @Override
+    public FrameOpSpaceStatus hasSpaceInsert(ITupleReference tuple) {
+        // Tuple bytes + child pointer + slot.
+        int bytesRequired = tupleWriter.bytesRequired(tuple) + childPtrSize + slotManager.getSlotSize();
+        if (bytesRequired <= getFreeContiguousSpace()) {
+            return FrameOpSpaceStatus.SUFFICIENT_CONTIGUOUS_SPACE;
+        }
+        if (bytesRequired <= getTotalFreeSpace()) {
+            return FrameOpSpaceStatus.SUFFICIENT_SPACE;
+        }
+        return FrameOpSpaceStatus.INSUFFICIENT_SPACE;
+    }
+
+    @Override
+    public void insert(ITupleReference tuple, int tupleIndex) {
+        int slotOff = slotManager.insertSlot(tupleIndex, buf.getInt(freeSpaceOff));
+        int freeSpace = buf.getInt(freeSpaceOff);
+        int bytesWritten = tupleWriter.writeTupleFields(tuple, 0, tuple.getFieldCount(), buf.array(), freeSpace);
+        System.arraycopy(tuple.getFieldData(tuple.getFieldCount() - 1), getLeftChildPageOff(tuple), buf.array(),
+                freeSpace + bytesWritten, childPtrSize);
+        int tupleSize = bytesWritten + childPtrSize;
+        buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) + 1);
+        buf.putInt(freeSpaceOff, buf.getInt(freeSpaceOff) + tupleSize);
+        buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) - tupleSize - slotManager.getSlotSize());
+        // Did we insert into the rightmost slot?
+        if (slotOff == slotManager.getSlotEndOff()) {
+            System.arraycopy(tuple.getFieldData(tuple.getFieldCount() - 1), getLeftChildPageOff(tuple) + childPtrSize,
+                    buf.array(), rightLeafOff, childPtrSize);
+        } else {
+            // If slotOff has a right (slot-)neighbor then update its child
+            // pointer.
+            // The only time when this is NOT the case, is when this is the
+            // very first tuple (or when the splitkey goes into the rightmost
+            // slot but that
+            // case is handled in the if above).
+            if (buf.getInt(tupleCountOff) > 1) {
+                int rightNeighborOff = slotOff - slotManager.getSlotSize();
+                frameTuple.resetByTupleOffset(buf, slotManager.getTupleOff(rightNeighborOff));
+                System.arraycopy(tuple.getFieldData(0), getLeftChildPageOff(tuple) + childPtrSize, buf.array(),
+                        getLeftChildPageOff(frameTuple), childPtrSize);
+            }
+        }
+    }
+    
+    @Override
+    public int findDeleteTupleIndex(ITupleReference tuple) throws TreeIndexException {
+        return slotManager.findTupleIndex(tuple, frameTuple, cmp, FindTupleMode.INCLUSIVE,
+                FindTupleNoExactMatchPolicy.HIGHER_KEY);
+    }
+
+    @Override
+    public void delete(ITupleReference tuple, int tupleIndex) {
+        int slotOff = slotManager.getSlotOff(tupleIndex);
+        int tupleOff;
+        int keySize;
+        if (tupleIndex == slotManager.getGreatestKeyIndicator()) {
+            tupleOff = slotManager.getTupleOff(slotManager.getSlotEndOff());
+            frameTuple.resetByTupleOffset(buf, tupleOff);
+            keySize = frameTuple.getTupleSize();
+            // Copy new rightmost pointer.
+            System.arraycopy(buf.array(), tupleOff + keySize, buf.array(), rightLeafOff, childPtrSize);
+        } else {
+            tupleOff = slotManager.getTupleOff(slotOff);
+            frameTuple.resetByTupleOffset(buf, tupleOff);
+            keySize = frameTuple.getTupleSize();
+            // Perform deletion (we just do a memcpy to overwrite the slot).
+            int slotStartOff = slotManager.getSlotEndOff();
+            int length = slotOff - slotStartOff;
+            System.arraycopy(buf.array(), slotStartOff, buf.array(), slotStartOff + slotManager.getSlotSize(), length);
+        }
+        // Maintain space information.
+        buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) - 1);
+        buf.putInt(totalFreeSpaceOff,
+                buf.getInt(totalFreeSpaceOff) + keySize + childPtrSize + slotManager.getSlotSize());
+    }
+    
+    @Override
+    public void deleteGreatest() {
+        int slotOff = slotManager.getSlotEndOff();
+        int tupleOff = slotManager.getTupleOff(slotOff);
+        frameTuple.resetByTupleOffset(buf, tupleOff);
+        int keySize = tupleWriter.bytesRequired(frameTuple);
+        System.arraycopy(buf.array(), tupleOff + keySize, buf.array(), rightLeafOff, childPtrSize);
+        // Maintain space information.
+        buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) - 1);
+        buf.putInt(totalFreeSpaceOff,
+                buf.getInt(totalFreeSpaceOff) + keySize + childPtrSize + slotManager.getSlotSize());
+        int freeSpace = buf.getInt(freeSpaceOff);
+        if (freeSpace == tupleOff + keySize + childPtrSize) {
+            buf.putInt(freeSpace, freeSpace - (keySize + childPtrSize));
+        }
+    }
+    
+    @Override
+    public FrameOpSpaceStatus hasSpaceUpdate(ITupleReference tuple, int oldTupleIndex) {
+        throw new UnsupportedOperationException("Cannot update tuples in interior node.");
+    }
+    
+    @Override
+    public int findUpdateTupleIndex(ITupleReference tuple) throws TreeIndexException {
+        throw new UnsupportedOperationException("Cannot update tuples in interior node.");
+    }
+
+    @Override
+    public void insertSorted(ITupleReference tuple) {
+        int freeSpace = buf.getInt(freeSpaceOff);
+        slotManager.insertSlot(slotManager.getGreatestKeyIndicator(), freeSpace);
+        int bytesWritten = tupleWriter.writeTuple(tuple, buf, freeSpace);
+        System.arraycopy(tuple.getFieldData(tuple.getFieldCount() - 1), getLeftChildPageOff(tuple), buf.array(),
+                freeSpace + bytesWritten, childPtrSize);
+        int tupleSize = bytesWritten + childPtrSize;
+        buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) + 1);
+        buf.putInt(freeSpaceOff, buf.getInt(freeSpaceOff) + tupleSize);
+        buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) - tupleSize - slotManager.getSlotSize());
+        System.arraycopy(tuple.getFieldData(0), getLeftChildPageOff(tuple) + childPtrSize, buf.array(), rightLeafOff,
+                childPtrSize);
+    }
+
+    @Override
+    public void split(ITreeIndexFrame rightFrame, ITupleReference tuple, ISplitKey splitKey) throws TreeIndexException {
+        ByteBuffer right = rightFrame.getBuffer();
+        int tupleCount = getTupleCount();
+        
+        // Find split point, and determine into which frame the new tuple should be inserted into.
+        int tuplesToLeft = (tupleCount / 2) + (tupleCount % 2);
+        ITreeIndexFrame targetFrame = null;
+        frameTuple.resetByTupleIndex(this, tuplesToLeft - 1);
+        if (cmp.compare(tuple, frameTuple) <= 0) {
+            targetFrame = this;
+        } else {
+            targetFrame = rightFrame;
+        }
+        int tuplesToRight = tupleCount - tuplesToLeft;
+
+        // Copy entire page.
+        System.arraycopy(buf.array(), 0, right.array(), 0, buf.capacity());
+
+        // On the right page we need to copy rightmost slots to left.
+        int src = rightFrame.getSlotManager().getSlotEndOff();
+        int dest = rightFrame.getSlotManager().getSlotEndOff() + tuplesToLeft
+                * rightFrame.getSlotManager().getSlotSize();
+        int length = rightFrame.getSlotManager().getSlotSize() * tuplesToRight;
+        System.arraycopy(right.array(), src, right.array(), dest, length);
+        right.putInt(tupleCountOff, tuplesToRight);
+
+        // On the left page, remove the highest key and make its child pointer
+        // the rightmost child pointer.
+        buf.putInt(tupleCountOff, tuplesToLeft);
+
+        // Copy the split key to be inserted.
+        // We must do so because setting the new split key will overwrite the
+        // old split key, and we cannot insert the existing split key at this point.
+        ISplitKey savedSplitKey = splitKey.duplicate(tupleWriter.createTupleReference());
+
+        // Set split key to be highest value in left page.
+        int tupleOff = slotManager.getTupleOff(slotManager.getSlotEndOff());
+        frameTuple.resetByTupleOffset(buf, tupleOff);
+        int splitKeySize = tupleWriter.bytesRequired(frameTuple, 0, cmp.getKeyFieldCount());
+        splitKey.initData(splitKeySize);
+        tupleWriter.writeTuple(frameTuple, splitKey.getBuffer(), 0);
+        splitKey.getTuple().resetByTupleOffset(splitKey.getBuffer(), 0);
+
+        int deleteTupleOff = slotManager.getTupleOff(slotManager.getSlotEndOff());
+        frameTuple.resetByTupleOffset(buf, deleteTupleOff);
+        buf.putInt(rightLeafOff, buf.getInt(getLeftChildPageOff(frameTuple)));
+        buf.putInt(tupleCountOff, tuplesToLeft - 1);
+
+        // Compact both pages.
+        rightFrame.compact();
+        compact();
+
+        // Insert the saved split key.
+        int targetTupleIndex = ((BTreeNSMInteriorFrame) targetFrame)
+                .findInsertTupleIndex(savedSplitKey.getTuple());
+        targetFrame.insert(savedSplitKey.getTuple(), targetTupleIndex);
+    }
+
+    @Override
+    public boolean compact() {
+        resetSpaceParams();
+        int tupleCount = buf.getInt(tupleCountOff);
+        int freeSpace = buf.getInt(freeSpaceOff);
+        // Sort the slots by the tuple offset they point to.
+        ArrayList<SlotOffTupleOff> sortedTupleOffs = new ArrayList<SlotOffTupleOff>();
+        sortedTupleOffs.ensureCapacity(tupleCount);
+        for (int i = 0; i < tupleCount; i++) {
+            int slotOff = slotManager.getSlotOff(i);
+            int tupleOff = slotManager.getTupleOff(slotOff);
+            sortedTupleOffs.add(new SlotOffTupleOff(i, slotOff, tupleOff));
+        }
+        Collections.sort(sortedTupleOffs);
+        // Iterate over the sorted slots, and move their corresponding tuples to
+        // the left, reclaiming free space.
+        for (int i = 0; i < sortedTupleOffs.size(); i++) {
+            int tupleOff = sortedTupleOffs.get(i).tupleOff;
+            frameTuple.resetByTupleOffset(buf, tupleOff);
+            int tupleEndOff = frameTuple.getFieldStart(frameTuple.getFieldCount() - 1)
+                    + frameTuple.getFieldLength(frameTuple.getFieldCount() - 1);
+            int tupleLength = tupleEndOff - tupleOff + childPtrSize;
+            System.arraycopy(buf.array(), tupleOff, buf.array(), freeSpace, tupleLength);
+            slotManager.setSlot(sortedTupleOffs.get(i).slotOff, freeSpace);
+            freeSpace += tupleLength;
+        }
+        // Update contiguous free space pointer and total free space indicator.
+        buf.putInt(freeSpaceOff, freeSpace);
+        buf.putInt(totalFreeSpaceOff, buf.capacity() - freeSpace - tupleCount * slotManager.getSlotSize());
+        return false;
+    }
+
+    @Override
+    public int getChildPageId(RangePredicate pred) {
+        // Trivial case where there is only a child pointer (and no key).
+        if (buf.getInt(tupleCountOff) == 0) {
+            return buf.getInt(rightLeafOff);
+        }
+        // Trivial cases where no low key or high key was given (e.g.
+        // during an index scan).
+        ITupleReference tuple = null;
+        FindTupleMode fsm = null;
+        // The target comparator may be on a prefix of the BTree key fields.
+        MultiComparator targetCmp = pred.getLowKeyComparator();;
+        tuple = pred.getLowKey();
+        if (tuple == null) {
+            return getLeftmostChildPageId();
+        }
+        if (pred.isLowKeyInclusive()) {
+            fsm = FindTupleMode.INCLUSIVE;
+        } else {
+            fsm = FindTupleMode.EXCLUSIVE;
+        }
+        // Search for a matching key.
+        int tupleIndex = slotManager.findTupleIndex(tuple, frameTuple, targetCmp, fsm,
+                FindTupleNoExactMatchPolicy.HIGHER_KEY);
+        int slotOff = slotManager.getSlotOff(tupleIndex);
+        // Follow the rightmost (greatest) child pointer.
+        if (tupleIndex == slotManager.getGreatestKeyIndicator()) {
+            return buf.getInt(rightLeafOff);
+        }
+        // Deal with prefix searches.
+        // slotManager.findTupleIndex() will return an arbitrary tuple matching
+        // the given field prefix (according to the target comparator).
+        // To make sure we traverse the right path, we must find the
+        // leftmost or rightmost tuple that matches the prefix.
+        int origTupleOff = slotManager.getTupleOff(slotOff);
+        cmpFrameTuple.resetByTupleOffset(buf, origTupleOff);
+        int cmpTupleOff = origTupleOff;
+        // The answer set begins with the lowest key matching the prefix.
+        // We must follow the child pointer of the lowest (leftmost) key
+        // matching the given prefix.
+        int maxSlotOff = buf.capacity();
+        slotOff += slotManager.getSlotSize();
+        while (slotOff < maxSlotOff) {
+            cmpTupleOff = slotManager.getTupleOff(slotOff);
+            frameTuple.resetByTupleOffset(buf, cmpTupleOff);
+            if (targetCmp.compare(cmpFrameTuple, frameTuple) != 0) {
+                break;
+            }
+            slotOff += slotManager.getSlotSize();
+        }
+        slotOff -= slotManager.getSlotSize();
+        frameTuple.resetByTupleOffset(buf, slotManager.getTupleOff(slotOff));
+        int childPageOff = getLeftChildPageOff(frameTuple);
+        return buf.getInt(childPageOff);
+    }
+
+    @Override
+    protected void resetSpaceParams() {
+        buf.putInt(freeSpaceOff, rightLeafOff + childPtrSize);
+        buf.putInt(totalFreeSpaceOff, buf.capacity() - (rightLeafOff + childPtrSize));
+    }
+
+    @Override
+    public int getLeftmostChildPageId() {
+        int tupleOff = slotManager.getTupleOff(slotManager.getSlotStartOff());
+        frameTuple.resetByTupleOffset(buf, tupleOff);
+        int childPageOff = getLeftChildPageOff(frameTuple);
+        return buf.getInt(childPageOff);
+    }
+
+    @Override
+    public int getRightmostChildPageId() {
+        return buf.getInt(rightLeafOff);
+    }
+
+    @Override
+    public void setRightmostChildPageId(int pageId) {
+        buf.putInt(rightLeafOff, pageId);
+    }
+
+    @Override
+    public int getPageHeaderSize() {
+        return rightLeafOff;
+    }
+
+    private int getLeftChildPageOff(ITupleReference tuple) {
+        return tuple.getFieldStart(tuple.getFieldCount() - 1) + tuple.getFieldLength(tuple.getFieldCount() - 1);
+    }
+
+    @Override
+    public boolean getSmFlag() {
+        return buf.get(smFlagOff) != 0;
+    }
+
+    @Override
+    public void setSmFlag(boolean smFlag) {
+        if (smFlag) {
+            buf.put(smFlagOff, (byte) 1);
+        } else {
+            buf.put(smFlagOff, (byte) 0);
+        }
+    }
+
+    @Override
+    public void setMultiComparator(MultiComparator cmp) {
+        this.cmp = cmp;
+        cmpFrameTuple.setFieldCount(cmp.getKeyFieldCount());
+        frameTuple.setFieldCount(cmp.getKeyFieldCount());
+    }
+    
+    @Override
+    public ITreeIndexTupleReference createTupleReference() {
+        ITreeIndexTupleReference tuple = tupleWriter.createTupleReference();
+        tuple.setFieldCount(cmp.getKeyFieldCount());
+        return tuple;
+    }
+    
+    // For debugging.
+    public ArrayList<Integer> getChildren(MultiComparator cmp) {
+        ArrayList<Integer> ret = new ArrayList<Integer>();
+        frameTuple.setFieldCount(cmp.getKeyFieldCount());
+        int tupleCount = buf.getInt(tupleCountOff);
+        for (int i = 0; i < tupleCount; i++) {
+            int tupleOff = slotManager.getTupleOff(slotManager.getSlotOff(i));
+            frameTuple.resetByTupleOffset(buf, tupleOff);
+            int intVal = IntegerSerializerDeserializer.getInt(
+                    buf.array(),
+                    frameTuple.getFieldStart(frameTuple.getFieldCount() - 1)
+                            + frameTuple.getFieldLength(frameTuple.getFieldCount() - 1));
+            ret.add(intVal);
+        }
+        if (!isLeaf()) {
+            int rightLeaf = buf.getInt(rightLeafOff);
+            if (rightLeaf > 0)
+                ret.add(buf.getInt(rightLeafOff));
+        }
+        return ret;
+    }
+}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeNSMInteriorFrameFactory.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeNSMInteriorFrameFactory.java
new file mode 100644
index 0000000..8618df8
--- /dev/null
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeNSMInteriorFrameFactory.java
@@ -0,0 +1,40 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree.frames;
+
+import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrame;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriterFactory;
+
+public class BTreeNSMInteriorFrameFactory implements ITreeIndexFrameFactory {
+
+    private static final long serialVersionUID = 1L;
+    private final ITreeIndexTupleWriterFactory tupleWriterFactory;
+
+    public BTreeNSMInteriorFrameFactory(ITreeIndexTupleWriterFactory tupleWriterFactory) {
+        this.tupleWriterFactory = tupleWriterFactory;
+    }
+
+    @Override
+    public IBTreeInteriorFrame createFrame() {
+        return new BTreeNSMInteriorFrame(tupleWriterFactory.createTupleWriter());
+    }
+
+    @Override
+    public ITreeIndexTupleWriterFactory getTupleWriterFactory() {
+        return tupleWriterFactory;
+    }
+}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeNSMLeafFrame.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeNSMLeafFrame.java
new file mode 100644
index 0000000..4b7f44b
--- /dev/null
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeNSMLeafFrame.java
@@ -0,0 +1,221 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree.frames;
+
+import java.nio.ByteBuffer;
+
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
+import edu.uci.ics.hyracks.storage.am.btree.exceptions.BTreeDuplicateKeyException;
+import edu.uci.ics.hyracks.storage.am.btree.exceptions.BTreeNonExistentKeyException;
+import edu.uci.ics.hyracks.storage.am.common.api.ISplitKey;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriter;
+import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
+import edu.uci.ics.hyracks.storage.am.common.frames.TreeIndexNSMFrame;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.FindTupleMode;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.FindTupleNoExactMatchPolicy;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+
+public class BTreeNSMLeafFrame extends TreeIndexNSMFrame implements IBTreeLeafFrame {
+    protected static final int nextLeafOff = smFlagOff + 1;
+    private MultiComparator cmp;
+    
+    public BTreeNSMLeafFrame(ITreeIndexTupleWriter tupleWriter) {
+        super(tupleWriter, new OrderedSlotManager());
+    }
+
+    @Override
+    public void initBuffer(byte level) {
+        super.initBuffer(level);
+        buf.putInt(nextLeafOff, -1);
+    }
+
+    @Override
+    public void setNextLeaf(int page) {
+        buf.putInt(nextLeafOff, page);
+    }
+
+    @Override
+    public int getNextLeaf() {
+        return buf.getInt(nextLeafOff);
+    }
+
+    @Override
+    public int findInsertTupleIndex(ITupleReference tuple) throws TreeIndexException {
+        int tupleIndex = slotManager.findTupleIndex(tuple, frameTuple, cmp, FindTupleMode.EXCLUSIVE_ERROR_IF_EXISTS,
+                FindTupleNoExactMatchPolicy.HIGHER_KEY);
+        // Error indicator is set if there is an exact match.
+        if (tupleIndex == slotManager.getErrorIndicator()) {
+            throw new BTreeDuplicateKeyException("Trying to insert duplicate key into leaf node.");
+        }
+        return tupleIndex;
+    }
+    
+    @Override
+    public int findUpdateTupleIndex(ITupleReference tuple) throws TreeIndexException {
+        int tupleIndex = slotManager.findTupleIndex(tuple, frameTuple, cmp, FindTupleMode.EXACT,
+                FindTupleNoExactMatchPolicy.HIGHER_KEY);
+        // Error indicator is set if there is no exact match.
+        if (tupleIndex == slotManager.getErrorIndicator() || tupleIndex == slotManager.getGreatestKeyIndicator()) {
+            throw new BTreeNonExistentKeyException("Trying to update a tuple with a nonexistent key in leaf node.");
+        }        
+        return tupleIndex;
+    }
+    
+    @Override
+    public int findUpsertTupleIndex(ITupleReference tuple) throws TreeIndexException {
+        int tupleIndex = slotManager.findTupleIndex(tuple, frameTuple, cmp, FindTupleMode.INCLUSIVE,
+                FindTupleNoExactMatchPolicy.HIGHER_KEY);
+        // Just return the found tupleIndex. The caller will make the final decision whether to insert or update.
+        return tupleIndex;
+    }
+    
+    @Override
+    public ITupleReference getUpsertBeforeTuple(ITupleReference tuple, int targetTupleIndex) throws TreeIndexException {
+        // Examine the tuple index to determine whether it is valid or not.
+        if (targetTupleIndex != slotManager.getGreatestKeyIndicator()) {
+            // We need to check the key to determine whether it's an insert or an update.
+            frameTuple.resetByTupleIndex(this, targetTupleIndex);
+            if (cmp.compare(tuple, frameTuple) == 0) {
+                // The keys match, it's an update.
+                return frameTuple;
+            }
+        }
+        // Either the tuple index is a special indicator, or the keys don't match.
+        // In those cases, we are definitely dealing with an insert.
+        return null;
+    }
+    
+    @Override
+    public int findDeleteTupleIndex(ITupleReference tuple) throws TreeIndexException {
+        int tupleIndex = slotManager.findTupleIndex(tuple, frameTuple, cmp, FindTupleMode.EXACT,
+                FindTupleNoExactMatchPolicy.HIGHER_KEY);
+        // Error indicator is set if there is no exact match.
+        if (tupleIndex == slotManager.getErrorIndicator() || tupleIndex == slotManager.getGreatestKeyIndicator()) {
+            throw new BTreeNonExistentKeyException("Trying to delete a tuple with a nonexistent key in leaf node.");
+        }        
+        return tupleIndex;
+    }
+
+    @Override
+    public void insert(ITupleReference tuple, int tupleIndex) {
+        int freeSpace = buf.getInt(freeSpaceOff);
+        slotManager.insertSlot(tupleIndex, freeSpace);
+        int bytesWritten = tupleWriter.writeTuple(tuple, buf.array(), freeSpace);
+        buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) + 1);
+        buf.putInt(freeSpaceOff, buf.getInt(freeSpaceOff) + bytesWritten);
+        buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) - bytesWritten - slotManager.getSlotSize());
+    }
+
+    @Override
+    public void insertSorted(ITupleReference tuple) {
+        insert(tuple, slotManager.getGreatestKeyIndicator());
+    }
+
+    @Override
+    public void split(ITreeIndexFrame rightFrame, ITupleReference tuple, ISplitKey splitKey) throws TreeIndexException {
+    	ByteBuffer right = rightFrame.getBuffer();
+        int tupleCount = getTupleCount();        
+        
+        // Find split point, and determine into which frame the new tuple should be inserted into.
+        int tuplesToLeft;
+        int mid = tupleCount / 2;
+        ITreeIndexFrame targetFrame = null;
+        int tupleOff = slotManager.getTupleOff(slotManager.getSlotEndOff() + slotManager.getSlotSize() * mid);
+        frameTuple.resetByTupleOffset(buf, tupleOff);
+        if (cmp.compare(tuple, frameTuple) >= 0) {
+            tuplesToLeft = mid + (tupleCount % 2);
+            targetFrame = rightFrame;
+        } else {
+            tuplesToLeft = mid;
+            targetFrame = this;
+        }
+        int tuplesToRight = tupleCount - tuplesToLeft;
+
+        // Copy entire page.
+        System.arraycopy(buf.array(), 0, right.array(), 0, buf.capacity());
+
+        // On the right page we need to copy rightmost slots to the left.
+        int src = rightFrame.getSlotManager().getSlotEndOff();
+        int dest = rightFrame.getSlotManager().getSlotEndOff() + tuplesToLeft
+                * rightFrame.getSlotManager().getSlotSize();
+        int length = rightFrame.getSlotManager().getSlotSize() * tuplesToRight;
+        System.arraycopy(right.array(), src, right.array(), dest, length);
+        right.putInt(tupleCountOff, tuplesToRight);
+
+        // On left page only change the tupleCount indicator.
+        buf.putInt(tupleCountOff, tuplesToLeft);
+
+        // Compact both pages.
+        rightFrame.compact();
+        compact();
+
+        // Insert the new tuple.
+        int targetTupleIndex = ((BTreeNSMLeafFrame)targetFrame).findInsertTupleIndex(tuple);
+        targetFrame.insert(tuple, targetTupleIndex);
+
+        // Set the split key to be highest key in the left page.
+        tupleOff = slotManager.getTupleOff(slotManager.getSlotEndOff());
+        frameTuple.resetByTupleOffset(buf, tupleOff);
+        int splitKeySize = tupleWriter.bytesRequired(frameTuple, 0, cmp.getKeyFieldCount());
+        splitKey.initData(splitKeySize);
+        tupleWriter.writeTupleFields(frameTuple, 0, cmp.getKeyFieldCount(), splitKey.getBuffer().array(), 0);
+        splitKey.getTuple().resetByTupleOffset(splitKey.getBuffer(), 0);
+    }
+
+    @Override
+    protected void resetSpaceParams() {
+        buf.putInt(freeSpaceOff, nextLeafOff + 4);
+        buf.putInt(totalFreeSpaceOff, buf.capacity() - (nextLeafOff + 4));
+    }
+
+    @Override
+    public ITreeIndexTupleReference createTupleReference() {
+        return tupleWriter.createTupleReference();
+    }
+
+    @Override
+    public int findTupleIndex(ITupleReference searchKey, ITreeIndexTupleReference pageTuple, MultiComparator cmp,
+            FindTupleMode ftm, FindTupleNoExactMatchPolicy ftp) {
+        return slotManager.findTupleIndex(searchKey, pageTuple, cmp, ftm, ftp);
+    }
+
+    @Override
+    public int getPageHeaderSize() {
+        return nextLeafOff;
+    }
+
+    @Override
+    public boolean getSmFlag() {
+        return buf.get(smFlagOff) != 0;
+    }
+
+    @Override
+    public void setSmFlag(boolean smFlag) {
+        if (smFlag) {
+            buf.put(smFlagOff, (byte) 1);
+        } else {
+            buf.put(smFlagOff, (byte) 0);
+        }
+    }
+    
+	@Override
+	public void setMultiComparator(MultiComparator cmp) {
+		this.cmp = cmp;
+	}
+}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeNSMLeafFrameFactory.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeNSMLeafFrameFactory.java
new file mode 100644
index 0000000..9508df5
--- /dev/null
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeNSMLeafFrameFactory.java
@@ -0,0 +1,40 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree.frames;
+
+import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriterFactory;
+
+public class BTreeNSMLeafFrameFactory implements ITreeIndexFrameFactory {
+
+    private static final long serialVersionUID = 1L;
+    private final ITreeIndexTupleWriterFactory tupleWriterFactory;
+    
+    public BTreeNSMLeafFrameFactory(ITreeIndexTupleWriterFactory tupleWriterFactory) {
+        this.tupleWriterFactory = tupleWriterFactory;
+    }
+
+    @Override
+    public IBTreeLeafFrame createFrame() {
+        return new BTreeNSMLeafFrame(tupleWriterFactory.createTupleWriter());
+    }
+
+    @Override
+    public ITreeIndexTupleWriterFactory getTupleWriterFactory() {
+        return tupleWriterFactory;
+    }
+}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/OrderedSlotManager.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/OrderedSlotManager.java
new file mode 100644
index 0000000..5f507f5
--- /dev/null
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/OrderedSlotManager.java
@@ -0,0 +1,107 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree.frames;
+
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
+import edu.uci.ics.hyracks.storage.am.common.frames.AbstractSlotManager;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.FindTupleMode;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.FindTupleNoExactMatchPolicy;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+
+public class OrderedSlotManager extends AbstractSlotManager {
+    
+	@Override
+    public int findTupleIndex(ITupleReference searchKey, ITreeIndexTupleReference frameTuple, MultiComparator multiCmp,
+            FindTupleMode mode, FindTupleNoExactMatchPolicy matchPolicy) {
+        if (frame.getTupleCount() <= 0) {
+            return GREATEST_KEY_INDICATOR;
+        }
+
+        int mid;
+        int begin = 0;
+        int end = frame.getTupleCount() - 1;
+        
+        while (begin <= end) {
+            mid = (begin + end) / 2;
+            frameTuple.resetByTupleIndex(frame, mid);            
+            
+            int cmp = multiCmp.compare(searchKey, frameTuple);            
+            if (cmp < 0) {
+                end = mid - 1;
+            } else if (cmp > 0) {
+                begin = mid + 1;
+            } else {
+                if (mode == FindTupleMode.EXCLUSIVE) {
+                    if (matchPolicy == FindTupleNoExactMatchPolicy.HIGHER_KEY) {
+                        begin = mid + 1;
+                    } else {
+                        end = mid - 1;
+                    }
+                } else {
+                    if (mode == FindTupleMode.EXCLUSIVE_ERROR_IF_EXISTS) {
+                        return ERROR_INDICATOR;
+                    } else {
+                        return mid;
+                    }
+                }
+            }
+        }
+
+        if (mode == FindTupleMode.EXACT) {
+            return ERROR_INDICATOR;
+        }
+
+        if (matchPolicy == FindTupleNoExactMatchPolicy.HIGHER_KEY) {
+            if (begin > frame.getTupleCount() - 1) {
+                return GREATEST_KEY_INDICATOR;
+            }
+            frameTuple.resetByTupleIndex(frame, begin);
+            if (multiCmp.compare(searchKey, frameTuple) < 0) {
+                return begin;
+            } else {
+                return GREATEST_KEY_INDICATOR;
+            }
+        } else {
+            if (end < 0) {
+                return GREATEST_KEY_INDICATOR;
+            }
+            frameTuple.resetByTupleIndex(frame, end);
+            if (multiCmp.compare(searchKey, frameTuple) > 0) {
+                return end;
+            } else {
+                return GREATEST_KEY_INDICATOR;
+            }
+        }
+    }
+    
+    @Override
+    public int insertSlot(int tupleIndex, int tupleOff) {
+        int slotOff = getSlotOff(tupleIndex);
+        if (tupleIndex == GREATEST_KEY_INDICATOR) {
+            slotOff = getSlotEndOff() - slotSize;
+            setSlot(slotOff, tupleOff);
+            return slotOff;
+        } else {
+            int slotEndOff = getSlotEndOff();
+            int length = (slotOff - slotEndOff) + slotSize;
+            System.arraycopy(frame.getBuffer().array(), slotEndOff, frame.getBuffer().array(), slotEndOff - slotSize,
+                    length);
+            setSlot(slotOff, tupleOff);
+            return slotOff;
+        }
+    }
+}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTree.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTree.java
new file mode 100644
index 0000000..89e8870
--- /dev/null
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTree.java
@@ -0,0 +1,1081 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree.impls;
+
+import java.util.ArrayList;
+import java.util.concurrent.locks.ReadWriteLock;
+import java.util.concurrent.locks.ReentrantReadWriteLock;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeFrame;
+import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrame;
+import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
+import edu.uci.ics.hyracks.storage.am.btree.exceptions.BTreeException;
+import edu.uci.ics.hyracks.storage.am.btree.exceptions.BTreeNonExistentKeyException;
+import edu.uci.ics.hyracks.storage.am.btree.exceptions.BTreeNotUpdateableException;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMInteriorFrame;
+import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManager;
+import edu.uci.ics.hyracks.storage.am.common.api.IIndexBulkLoadContext;
+import edu.uci.ics.hyracks.storage.am.common.api.IIndexCursor;
+import edu.uci.ics.hyracks.storage.am.common.api.IOperationCallback;
+import edu.uci.ics.hyracks.storage.am.common.api.ISearchPredicate;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexAccessor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrame;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriter;
+import edu.uci.ics.hyracks.storage.am.common.api.IndexType;
+import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
+import edu.uci.ics.hyracks.storage.am.common.frames.FrameOpSpaceStatus;
+import edu.uci.ics.hyracks.storage.am.common.impls.TreeDiskOrderScanCursor;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOp;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.common.util.TreeIndexUtils;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
+import edu.uci.ics.hyracks.storage.common.file.BufferedFileHandle;
+
+public class BTree implements ITreeIndex {
+
+    public static final float DEFAULT_FILL_FACTOR = 0.7f;
+
+    private final static long RESTART_OP = Long.MIN_VALUE;
+    private final static int MAX_RESTARTS = 10;
+    private final static int rootPage = 1;
+        
+    private final IFreePageManager freePageManager;
+    private final IBufferCache bufferCache;    
+    private final IOperationCallback opCallback;
+    private final ITreeIndexFrameFactory interiorFrameFactory;
+    private final ITreeIndexFrameFactory leafFrameFactory;
+    private final int fieldCount;
+    private final IBinaryComparatorFactory[] cmpFactories;
+    private final ReadWriteLock treeLatch;
+    private int fileId;
+
+    public BTree(IBufferCache bufferCache, IOperationCallback opCallback, int fieldCount, IBinaryComparatorFactory[] cmpFactories, IFreePageManager freePageManager,
+            ITreeIndexFrameFactory interiorFrameFactory, ITreeIndexFrameFactory leafFrameFactory) {
+        this.bufferCache = bufferCache;
+        this.opCallback = opCallback;
+        this.fieldCount = fieldCount;
+        this.cmpFactories = cmpFactories;
+        this.interiorFrameFactory = interiorFrameFactory;
+        this.leafFrameFactory = leafFrameFactory;        
+        this.freePageManager = freePageManager;
+        this.treeLatch = new ReentrantReadWriteLock(true);
+    }
+
+    @Override
+    public void create(int fileId) throws HyracksDataException {
+        treeLatch.writeLock().lock();
+        try {
+            ITreeIndexFrame leafFrame = leafFrameFactory.createFrame();
+            ITreeIndexMetaDataFrame metaFrame = freePageManager.getMetaDataFrameFactory().createFrame();
+            this.fileId = fileId;
+            freePageManager.open(fileId);
+            freePageManager.init(metaFrame, rootPage);
+            initRoot(leafFrame, true);
+        } finally {
+            treeLatch.writeLock().unlock();
+        }
+    }
+
+    @Override
+    public void open(int fileId) {    	
+    	this.fileId = fileId;
+    	freePageManager.open(fileId);
+    }
+
+    @Override
+    public void close() {
+        fileId = -1;
+        freePageManager.close();
+    }
+
+    private void diskOrderScan(ITreeIndexCursor icursor, BTreeOpContext ctx) throws HyracksDataException {
+        TreeDiskOrderScanCursor cursor = (TreeDiskOrderScanCursor) icursor;
+        ctx.reset();
+        RangePredicate diskOrderScanPred = new RangePredicate(null, null, true, true, ctx.cmp, ctx.cmp);
+        int currentPageId = rootPage;
+        int maxPageId = freePageManager.getMaxPage(ctx.metaFrame);
+        ICachedPage page = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, currentPageId), false);
+        page.acquireReadLatch();
+        try {
+            cursor.setBufferCache(bufferCache);
+            cursor.setFileId(fileId);
+            cursor.setCurrentPageId(currentPageId);
+            cursor.setMaxPageId(maxPageId);
+            ctx.cursorInitialState.setPage(page);
+            cursor.open(ctx.cursorInitialState, diskOrderScanPred);
+        } catch (Exception e) {
+            page.releaseReadLatch();
+            bufferCache.unpin(page);
+            throw new HyracksDataException(e);
+        }
+    }
+
+    private void search(ITreeIndexCursor cursor, ISearchPredicate searchPred, BTreeOpContext ctx)
+            throws TreeIndexException, HyracksDataException {
+        ctx.reset();
+        ctx.pred = (RangePredicate) searchPred;
+        ctx.cursor = cursor;
+        // simple index scan
+        if (ctx.pred.getLowKeyComparator() == null) {
+            ctx.pred.setLowKeyComparator(ctx.cmp);
+        }
+        if (ctx.pred.getHighKeyComparator() == null) {
+            ctx.pred.setHighKeyComparator(ctx.cmp);
+        }
+        // we use this loop to deal with possibly multiple operation restarts
+        // due to ongoing structure modifications during the descent
+        boolean repeatOp = true;
+        while (repeatOp && ctx.opRestarts < MAX_RESTARTS) {
+            performOp(rootPage, null, true, ctx);
+            // if we reach this stage then we need to restart from the (possibly
+            // new) root
+            if (!ctx.pageLsns.isEmpty() && ctx.pageLsns.getLast() == RESTART_OP) {
+                ctx.pageLsns.removeLast(); // pop the restart op indicator
+                continue;
+            }
+            repeatOp = false;
+        }
+        cursor.setBufferCache(bufferCache);
+        cursor.setFileId(fileId);
+    }
+
+    private void unsetSmPages(BTreeOpContext ctx) throws HyracksDataException {
+        ICachedPage originalPage = ctx.interiorFrame.getPage();
+        for (int i = 0; i < ctx.smPages.size(); i++) {
+            int pageId = ctx.smPages.get(i);
+            ICachedPage smPage = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
+            smPage.acquireWriteLatch();
+            try {
+                ctx.interiorFrame.setPage(smPage);
+                ctx.interiorFrame.setSmFlag(false);
+            } finally {
+                smPage.releaseWriteLatch();
+                bufferCache.unpin(smPage);
+            }
+        }
+        if (ctx.smPages.size() > 0) {
+            treeLatch.writeLock().unlock();
+            ctx.smPages.clear();
+        }
+        ctx.interiorFrame.setPage(originalPage);
+    }
+
+    private void initRoot(ITreeIndexFrame leafFrame, boolean firstInit) throws HyracksDataException {
+        ICachedPage rootNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, rootPage), firstInit);
+        rootNode.acquireWriteLatch();
+        try {
+            leafFrame.setPage(rootNode);
+            leafFrame.initBuffer((byte) 0);
+        } finally {
+            rootNode.releaseWriteLatch();
+            bufferCache.unpin(rootNode);
+        }
+    }
+    
+    private void createNewRoot(BTreeOpContext ctx) throws HyracksDataException, TreeIndexException {
+        // Make sure the root is always in the same page.
+        ICachedPage leftNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, ctx.splitKey.getLeftPage()),
+                false);
+        leftNode.acquireWriteLatch();
+        try {
+            int newLeftId = freePageManager.getFreePage(ctx.metaFrame);
+            ICachedPage newLeftNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, newLeftId), true);
+            newLeftNode.acquireWriteLatch();
+            try {
+                // Copy left child to new left child.
+                System.arraycopy(leftNode.getBuffer().array(), 0, newLeftNode.getBuffer().array(), 0, newLeftNode
+                        .getBuffer().capacity());
+                ctx.interiorFrame.setPage(newLeftNode);
+                ctx.interiorFrame.setSmFlag(false);
+                // Initialize new root (leftNode becomes new root).
+                ctx.interiorFrame.setPage(leftNode);
+                ctx.interiorFrame.initBuffer((byte) (ctx.interiorFrame.getLevel() + 1));
+                // Will be cleared later in unsetSmPages.
+                ctx.interiorFrame.setSmFlag(true);
+                ctx.splitKey.setLeftPage(newLeftId);
+                int targetTupleIndex = ctx.interiorFrame.findInsertTupleIndex(ctx.splitKey.getTuple());
+                ctx.interiorFrame.insert(ctx.splitKey.getTuple(), targetTupleIndex);
+            } finally {
+                newLeftNode.releaseWriteLatch();
+                bufferCache.unpin(newLeftNode);
+            }
+        } finally {
+            leftNode.releaseWriteLatch();
+            bufferCache.unpin(leftNode);
+        }
+    }
+    
+    private void insertUpdateOrDelete(ITupleReference tuple, BTreeOpContext ctx) throws HyracksDataException, TreeIndexException {
+        ctx.reset();
+        ctx.pred.setLowKeyComparator(ctx.cmp);
+        ctx.pred.setHighKeyComparator(ctx.cmp);
+        ctx.pred.setLowKey(tuple, true);
+        ctx.pred.setHighKey(tuple, true);
+        ctx.splitKey.reset();
+        ctx.splitKey.getTuple().setFieldCount(ctx.cmp.getKeyFieldCount());
+        // We use this loop to deal with possibly multiple operation restarts
+        // due to ongoing structure modifications during the descent.
+        boolean repeatOp = true;
+        while (repeatOp && ctx.opRestarts < MAX_RESTARTS) {
+            performOp(rootPage, null, true, ctx);
+            // Do we need to restart from the (possibly new) root?
+            if (!ctx.pageLsns.isEmpty() && ctx.pageLsns.getLast() == RESTART_OP) {
+                ctx.pageLsns.removeLast(); // pop the restart op indicator
+                continue;
+            }
+            // Split key propagated?
+            if (ctx.splitKey.getBuffer() != null) {
+                // Insert or update op. Create a new root.
+                createNewRoot(ctx);
+            }
+            unsetSmPages(ctx);
+            repeatOp = false;
+        }
+    }
+    
+    private void insert(ITupleReference tuple, BTreeOpContext ctx) throws HyracksDataException, TreeIndexException {
+        insertUpdateOrDelete(tuple, ctx);
+    }
+    
+    private void upsert(ITupleReference tuple, BTreeOpContext ctx) throws HyracksDataException, TreeIndexException {
+        insertUpdateOrDelete(tuple, ctx);
+    }
+
+    private void update(ITupleReference tuple, BTreeOpContext ctx) throws HyracksDataException, TreeIndexException {
+        // This call only allows updating of non-key fields.
+        // Updating a tuple's key necessitates deleting the old entry, and inserting the new entry.
+        // The user of the BTree is responsible for dealing with non-key updates (i.e., doing a delete + insert). 
+        if (fieldCount == ctx.cmp.getKeyFieldCount()) {
+            throw new BTreeNotUpdateableException("Cannot perform updates when the entire tuple forms the key.");
+        }
+        insertUpdateOrDelete(tuple, ctx);
+    }
+    
+    private void delete(ITupleReference tuple, BTreeOpContext ctx) throws HyracksDataException, TreeIndexException {
+        insertUpdateOrDelete(tuple, ctx);
+    }
+    
+    private boolean insertLeaf(ITupleReference tuple, int targetTupleIndex, int pageId, BTreeOpContext ctx) throws Exception {
+        boolean restartOp = false;
+        FrameOpSpaceStatus spaceStatus = ctx.leafFrame.hasSpaceInsert(tuple);
+        switch (spaceStatus) {
+            case SUFFICIENT_CONTIGUOUS_SPACE: {
+                ctx.leafFrame.insert(tuple, targetTupleIndex);
+                ctx.splitKey.reset();
+                break;
+            }
+            case SUFFICIENT_SPACE: {
+                boolean slotsChanged = ctx.leafFrame.compact();
+                if (slotsChanged) {
+                    targetTupleIndex = ctx.leafFrame.findInsertTupleIndex(tuple);
+                }
+                ctx.leafFrame.insert(tuple, targetTupleIndex);
+                ctx.splitKey.reset();
+                break;
+            }
+            case INSUFFICIENT_SPACE: {            	
+                // Try compressing the page first and see if there is space available.
+                boolean reCompressed = ctx.leafFrame.compress();
+                if (reCompressed) {
+                    // Compression could have changed the target tuple index, find it again.
+                    targetTupleIndex = ctx.leafFrame.findInsertTupleIndex(tuple);
+                    spaceStatus = ctx.leafFrame.hasSpaceInsert(tuple);
+                }
+                if (spaceStatus == FrameOpSpaceStatus.SUFFICIENT_CONTIGUOUS_SPACE) {
+                    ctx.leafFrame.insert(tuple, targetTupleIndex);
+                    ctx.splitKey.reset();
+                } else {
+                	restartOp = performLeafSplit(pageId, tuple, ctx);
+                }
+                break;
+            }
+        }        
+        return restartOp;
+    }
+    
+    private boolean performLeafSplit(int pageId, ITupleReference tuple, BTreeOpContext ctx) throws Exception {    	
+        // We must never hold a latch on a page while waiting to obtain the tree
+        // latch, because it this could lead to a latch-deadlock.
+        // If we can't get the tree latch, we return, release our page latches,
+        // and restart the operation from one level above.
+        // Lock is released in unsetSmPages(), after sm has fully completed.
+        if (!treeLatch.writeLock().tryLock()) {
+            return true;
+        }
+        int rightPageId = freePageManager.getFreePage(ctx.metaFrame);
+        ICachedPage rightNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, rightPageId),
+                true);
+        rightNode.acquireWriteLatch();
+        try {
+            IBTreeLeafFrame rightFrame = ctx.createLeafFrame();
+            rightFrame.setPage(rightNode);
+            rightFrame.initBuffer((byte) 0);
+            rightFrame.setMultiComparator(ctx.cmp);
+            ctx.leafFrame.split(rightFrame, tuple, ctx.splitKey);
+
+            ctx.smPages.add(pageId);
+            ctx.smPages.add(rightPageId);
+            ctx.leafFrame.setSmFlag(true);
+            rightFrame.setSmFlag(true);
+
+            rightFrame.setNextLeaf(ctx.leafFrame.getNextLeaf());
+            ctx.leafFrame.setNextLeaf(rightPageId);
+
+            // TODO: we just use increasing numbers as pageLsn,
+            // we
+            // should tie this together with the LogManager and
+            // TransactionManager
+            rightFrame.setPageLsn(rightFrame.getPageLsn() + 1);
+            ctx.leafFrame.setPageLsn(ctx.leafFrame.getPageLsn() + 1);
+
+            ctx.splitKey.setPages(pageId, rightPageId);
+        } catch (Exception e) {
+            treeLatch.writeLock().unlock();
+            throw e;
+        } finally {
+            rightNode.releaseWriteLatch();
+            bufferCache.unpin(rightNode);
+        }
+        return false;
+    }
+    
+    private boolean updateLeaf(ITupleReference tuple, int oldTupleIndex, int pageId, BTreeOpContext ctx) throws Exception {
+        FrameOpSpaceStatus spaceStatus = ctx.leafFrame.hasSpaceUpdate(tuple, oldTupleIndex);
+        boolean restartOp = false;
+        switch (spaceStatus) {
+            case SUFFICIENT_INPLACE_SPACE: {
+                ctx.leafFrame.update(tuple, oldTupleIndex, true);
+                ctx.splitKey.reset();
+                break;
+            }
+            case SUFFICIENT_CONTIGUOUS_SPACE: {
+                ctx.leafFrame.update(tuple, oldTupleIndex, false);
+                ctx.splitKey.reset();
+                break;
+            }                
+            case SUFFICIENT_SPACE: {
+                // Delete the old tuple, compact the frame, and insert the new tuple.
+                ctx.leafFrame.delete(tuple, oldTupleIndex);
+                ctx.leafFrame.compact();
+                int targetTupleIndex = ctx.leafFrame.findInsertTupleIndex(tuple);
+                ctx.leafFrame.insert(tuple, targetTupleIndex);
+                ctx.splitKey.reset();
+                break;
+            }                
+            case INSUFFICIENT_SPACE: {
+                // Delete the old tuple, and try compressing the page to make space available.
+                ctx.leafFrame.delete(tuple, oldTupleIndex);
+                ctx.leafFrame.compress();
+                // We need to insert the new tuple, so check if there is space.
+                spaceStatus = ctx.leafFrame.hasSpaceInsert(tuple);                
+                if (spaceStatus == FrameOpSpaceStatus.SUFFICIENT_CONTIGUOUS_SPACE) {
+                    int targetTupleIndex = ctx.leafFrame.findInsertTupleIndex(tuple);
+                    ctx.leafFrame.insert(tuple, targetTupleIndex);
+                    ctx.splitKey.reset();
+                } else {
+                    restartOp = performLeafSplit(pageId, tuple, ctx);
+                }
+                break;
+            }
+        }
+        return restartOp;
+    }
+
+    private boolean upsertLeaf(ITupleReference tuple, int targetTupleIndex, int pageId, BTreeOpContext ctx) throws Exception {
+        boolean restartOp = false;
+        ITupleReference beforeTuple = ctx.leafFrame.getUpsertBeforeTuple(tuple, targetTupleIndex);
+        if (beforeTuple == null) {
+            opCallback.pre(null);
+            restartOp = insertLeaf(tuple, targetTupleIndex, pageId, ctx);
+        } else {
+            opCallback.pre(beforeTuple);
+            restartOp = updateLeaf(tuple, targetTupleIndex, pageId, ctx);
+        }
+        opCallback.post(tuple);
+        return restartOp;
+    }
+    
+    private void insertInterior(ICachedPage node, int pageId, ITupleReference tuple, BTreeOpContext ctx)
+            throws Exception {
+        ctx.interiorFrame.setPage(node);
+        int targetTupleIndex = ctx.interiorFrame.findInsertTupleIndex(tuple);
+        FrameOpSpaceStatus spaceStatus = ctx.interiorFrame.hasSpaceInsert(tuple);
+        switch (spaceStatus) {
+            case INSUFFICIENT_SPACE: {
+                int rightPageId = freePageManager.getFreePage(ctx.metaFrame);
+                ICachedPage rightNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, rightPageId), true);
+                rightNode.acquireWriteLatch();
+                try {
+                    IBTreeFrame rightFrame = ctx.createInteriorFrame();
+                    rightFrame.setPage(rightNode);
+                    rightFrame.initBuffer((byte) ctx.interiorFrame.getLevel());
+                    rightFrame.setMultiComparator(ctx.cmp);
+                    // instead of creating a new split key, use the existing
+                    // splitKey
+                    ctx.interiorFrame.split(rightFrame, ctx.splitKey.getTuple(), ctx.splitKey);
+                    ctx.smPages.add(pageId);
+                    ctx.smPages.add(rightPageId);
+                    ctx.interiorFrame.setSmFlag(true);
+                    rightFrame.setSmFlag(true);
+                    // TODO: we just use increasing numbers as pageLsn, we
+                    // should tie this together with the LogManager and
+                    // TransactionManager
+                    rightFrame.setPageLsn(rightFrame.getPageLsn() + 1);
+                    ctx.interiorFrame.setPageLsn(ctx.interiorFrame.getPageLsn() + 1);
+
+                    ctx.splitKey.setPages(pageId, rightPageId);
+                } finally {
+                    rightNode.releaseWriteLatch();
+                    bufferCache.unpin(rightNode);
+                }
+                break;
+            }                
+
+            case SUFFICIENT_CONTIGUOUS_SPACE: {
+                ctx.interiorFrame.insert(tuple, targetTupleIndex);
+                ctx.splitKey.reset();
+                break;
+            }
+
+            case SUFFICIENT_SPACE: {
+                boolean slotsChanged = ctx.interiorFrame.compact();
+                if (slotsChanged) {
+                    targetTupleIndex = ctx.interiorFrame.findInsertTupleIndex(tuple);
+                }
+                ctx.interiorFrame.insert(tuple, targetTupleIndex);
+                ctx.splitKey.reset();
+                break;
+            }
+        }
+    }
+
+    private boolean deleteLeaf(ICachedPage node, int pageId, ITupleReference tuple, BTreeOpContext ctx) throws Exception {
+        // Simply delete the tuple, and don't do any rebalancing.
+        // This means that there could be underflow, even an empty page that is
+        // pointed to by an interior node.
+        if (ctx.leafFrame.getTupleCount() == 0) {
+            throw new BTreeNonExistentKeyException("Trying to delete a tuple with a nonexistent key in leaf node.");
+        }
+        int tupleIndex = ctx.leafFrame.findDeleteTupleIndex(tuple);
+        ctx.leafFrame.delete(tuple, tupleIndex);
+        return false;
+    }
+
+    private final boolean acquireLatch(ICachedPage node, BTreeOpContext ctx, boolean isLeaf) {
+        if (!isLeaf || (ctx.op == IndexOp.SEARCH && !ctx.cursor.exclusiveLatchNodes())) {
+            node.acquireReadLatch();
+            return true;
+        } else {
+            node.acquireWriteLatch();
+            return false;
+        }
+    }
+
+    private boolean isConsistent(int pageId, BTreeOpContext ctx) throws Exception {
+        ICachedPage node = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
+        node.acquireReadLatch();
+        ctx.interiorFrame.setPage(node);
+        boolean isConsistent = false;
+        try {
+            isConsistent = ctx.pageLsns.getLast() == ctx.interiorFrame.getPageLsn();
+        } finally {
+            node.releaseReadLatch();
+            bufferCache.unpin(node);
+        }
+        return isConsistent;
+    }
+
+    private void performOp(int pageId, ICachedPage parent, boolean parentIsReadLatched, BTreeOpContext ctx) throws HyracksDataException, TreeIndexException {
+        ICachedPage node = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
+        ctx.interiorFrame.setPage(node);
+        
+        // this check performs an unprotected read in the page
+        // the following could happen: TODO fill out
+        boolean unsafeIsLeaf = ctx.interiorFrame.isLeaf();
+        boolean isReadLatched = acquireLatch(node, ctx, unsafeIsLeaf);
+        boolean smFlag = ctx.interiorFrame.getSmFlag();
+        // re-check leafness after latching
+        boolean isLeaf = ctx.interiorFrame.isLeaf();
+
+        // remember trail of pageLsns, to unwind recursion in case of an ongoing
+        // structure modification
+        ctx.pageLsns.add(ctx.interiorFrame.getPageLsn());
+        try {
+            // Latch coupling: unlatch parent.
+            if (parent != null) {
+                if (parentIsReadLatched) {
+                	parent.releaseReadLatch();
+                } else {
+                	parent.releaseWriteLatch();
+                }
+                bufferCache.unpin(parent);
+            }
+            if (!isLeaf || smFlag) {
+                if (!smFlag) {
+                    // We use this loop to deal with possibly multiple operation
+                    // restarts due to ongoing structure modifications during
+                    // the descent.
+                    boolean repeatOp = true;
+                    while (repeatOp && ctx.opRestarts < MAX_RESTARTS) {
+                        int childPageId = ctx.interiorFrame.getChildPageId(ctx.pred);
+                        performOp(childPageId, node, isReadLatched, ctx);
+
+                        if (!ctx.pageLsns.isEmpty() && ctx.pageLsns.getLast() == RESTART_OP) {
+                            // Pop the restart op indicator.
+                            ctx.pageLsns.removeLast();                            
+                            if (isConsistent(pageId, ctx)) {
+                                // Pin and latch page again, since it was unpinned and unlatched in call to performOp (passed as parent).
+                                node = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
+                                node.acquireReadLatch();
+                                ctx.interiorFrame.setPage(node);
+                                isReadLatched = true;
+                                // Descend the tree again.                                
+                                continue;
+                            } else {
+                                // Pop pageLsn of this page (version seen by this op during descent).
+                                ctx.pageLsns.removeLast(); 
+                                // This node is not consistent set the restart indicator for upper level.
+                                ctx.pageLsns.add(RESTART_OP);
+                                break;
+                            }
+                        }
+                        
+                        switch (ctx.op) {
+                            case INSERT:
+                            case UPSERT:
+                            case UPDATE: {
+                                // Is there a propagated split key?
+                                if (ctx.splitKey.getBuffer() != null) {
+                                    ICachedPage interiorNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
+                                    interiorNode.acquireWriteLatch();
+                                    try {
+                                        // Insert or update op. Both can cause split keys to propagate upwards.                                            
+                                        insertInterior(interiorNode, pageId, ctx.splitKey.getTuple(), ctx);
+                                    } finally {
+                                    	interiorNode.releaseWriteLatch();
+                                        bufferCache.unpin(interiorNode);
+                                    }
+                                } else {
+                                    unsetSmPages(ctx);
+                                }
+                                break;
+                            }
+                            
+                            case DELETE: {
+                                if (ctx.splitKey.getBuffer() != null) {
+                                    throw new BTreeException("Split key was propagated during delete. Delete allows empty leaf pages.");
+                                }
+                                break;
+                            }
+                                
+                            default: {
+                                // Do nothing for Search and DiskOrderScan.
+                                break;
+                            }
+                        }
+                        // Operation completed.
+                        repeatOp = false;
+                    } // end while
+                } else { // smFlag
+                    ctx.opRestarts++;
+                    if (isReadLatched) {
+                    	node.releaseReadLatch();
+                    } else {
+                    	node.releaseWriteLatch();
+                    }
+                    bufferCache.unpin(node);
+
+                    // TODO: this should be an instant duration lock, how to do
+                    // this in java?
+                    // instead we just immediately release the lock. this is
+                    // inefficient but still correct and will not cause
+                    // latch-deadlock
+                    treeLatch.writeLock().lock();
+                    treeLatch.writeLock().unlock();
+                    
+                    // unwind recursion and restart operation, find lowest page
+                    // with a pageLsn as seen by this operation during descent
+                    ctx.pageLsns.removeLast(); // pop current page lsn
+                    // put special value on the stack to inform caller of
+                    // restart
+                    ctx.pageLsns.add(RESTART_OP);
+                }
+            } else { // isLeaf and !smFlag
+                // We may have to restart an op to avoid latch deadlock.
+            	boolean restartOp = false;
+            	ctx.leafFrame.setPage(node);
+            	switch (ctx.op) {
+                    case INSERT: {                        
+                        int targetTupleIndex = ctx.leafFrame.findInsertTupleIndex(ctx.pred.getLowKey());
+                        restartOp = insertLeaf(ctx.pred.getLowKey(), targetTupleIndex, pageId, ctx);
+                        break;
+                    }
+                    case UPSERT: {
+                        int targetTupleIndex = ctx.leafFrame.findUpsertTupleIndex(ctx.pred.getLowKey());
+                        restartOp = upsertLeaf(ctx.pred.getLowKey(), targetTupleIndex, pageId, ctx);
+                        break;
+                    }
+                    case UPDATE: {
+                        int oldTupleIndex = ctx.leafFrame.findUpdateTupleIndex(ctx.pred.getLowKey());
+                    	restartOp = updateLeaf(ctx.pred.getLowKey(), oldTupleIndex, pageId, ctx);
+                        break;
+                    }
+                    case DELETE: {
+                    	restartOp = deleteLeaf(node, pageId, ctx.pred.getLowKey(), ctx);
+                        break;
+                    }
+                    case SEARCH: {
+                        ctx.cursorInitialState.setPage(node);
+                        ctx.cursor.open(ctx.cursorInitialState, ctx.pred);
+                        break;
+                    }
+                }
+            	if (ctx.op != IndexOp.SEARCH) {
+            	    node.releaseWriteLatch();
+                    bufferCache.unpin(node);
+            	}
+            	if (restartOp) {
+            		ctx.pageLsns.removeLast();
+                    ctx.pageLsns.add(RESTART_OP);
+            	}
+            }
+        } catch (TreeIndexException e) {
+        	if (!ctx.exceptionHandled) {
+        		if (node != null) {
+        			if (isReadLatched) {
+        				node.releaseReadLatch();
+        			} else {
+        				node.releaseWriteLatch();
+        			}
+        			bufferCache.unpin(node);
+        			ctx.exceptionHandled = true;
+        		}
+            }
+            throw e;
+        } catch (Exception e) {
+        	e.printStackTrace();
+        	if (node != null) {
+        		if (isReadLatched) {
+    				node.releaseReadLatch();
+    			} else {
+    				node.releaseWriteLatch();
+    			}
+        		bufferCache.unpin(node);
+        	}
+            BTreeException wrappedException = new BTreeException(e);
+            ctx.exceptionHandled = true;
+            throw wrappedException;
+        }
+    }
+
+    public class BulkLoadContext implements IIndexBulkLoadContext {
+        public final MultiComparator cmp;
+        public final int slotSize;
+        public final int leafMaxBytes;
+        public final int interiorMaxBytes;
+        public final BTreeSplitKey splitKey;
+        // we maintain a frontier of nodes for each level
+        private final ArrayList<NodeFrontier> nodeFrontiers = new ArrayList<NodeFrontier>();
+        private final IBTreeLeafFrame leafFrame;
+        private final IBTreeInteriorFrame interiorFrame;
+        private final ITreeIndexMetaDataFrame metaFrame;
+        private final ITreeIndexTupleWriter tupleWriter;        
+        
+        public BulkLoadContext(float fillFactor, IBTreeLeafFrame leafFrame, IBTreeInteriorFrame interiorFrame,
+                ITreeIndexMetaDataFrame metaFrame, IBinaryComparatorFactory[] cmpFactories) throws HyracksDataException {
+            this.cmp = MultiComparator.create(cmpFactories);
+            
+        	leafFrame.setMultiComparator(cmp);
+        	interiorFrame.setMultiComparator(cmp);
+        	
+            splitKey = new BTreeSplitKey(leafFrame.getTupleWriter().createTupleReference());
+            tupleWriter = leafFrame.getTupleWriter();
+
+            NodeFrontier leafFrontier = new NodeFrontier(leafFrame.createTupleReference());
+            leafFrontier.pageId = freePageManager.getFreePage(metaFrame);
+            leafFrontier.page = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, leafFrontier.pageId),
+                    true);
+            leafFrontier.page.acquireWriteLatch();
+
+            interiorFrame.setPage(leafFrontier.page);
+            interiorFrame.initBuffer((byte) 0);
+            interiorMaxBytes = (int) ((float) interiorFrame.getBuffer().capacity() * fillFactor);
+
+            leafFrame.setPage(leafFrontier.page);
+            leafFrame.initBuffer((byte) 0);
+            leafMaxBytes = (int) ((float) leafFrame.getBuffer().capacity() * fillFactor);
+
+            slotSize = leafFrame.getSlotSize();
+
+            this.leafFrame = leafFrame;
+            this.interiorFrame = interiorFrame;
+            this.metaFrame = metaFrame;
+
+            nodeFrontiers.add(leafFrontier);
+        }
+
+        private void addLevel() throws HyracksDataException {
+            NodeFrontier frontier = new NodeFrontier(tupleWriter.createTupleReference());
+            frontier.pageId = freePageManager.getFreePage(metaFrame);
+            frontier.page = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, frontier.pageId), true);
+            frontier.page.acquireWriteLatch();
+            frontier.lastTuple.setFieldCount(cmp.getKeyFieldCount());
+            interiorFrame.setPage(frontier.page);
+            interiorFrame.initBuffer((byte) nodeFrontiers.size());
+            nodeFrontiers.add(frontier);
+        }
+    }
+
+    private void propagateBulk(BulkLoadContext ctx, int level) throws HyracksDataException {
+
+        if (ctx.splitKey.getBuffer() == null)
+            return;
+
+        if (level >= ctx.nodeFrontiers.size())
+            ctx.addLevel();
+
+        NodeFrontier frontier = ctx.nodeFrontiers.get(level);
+        ctx.interiorFrame.setPage(frontier.page);
+
+        ITupleReference tuple = ctx.splitKey.getTuple();
+        int spaceNeeded = ctx.tupleWriter.bytesRequired(tuple, 0, ctx.cmp.getKeyFieldCount()) + ctx.slotSize + 4;
+        int spaceUsed = ctx.interiorFrame.getBuffer().capacity() - ctx.interiorFrame.getTotalFreeSpace();
+        if (spaceUsed + spaceNeeded > ctx.interiorMaxBytes) {
+
+            BTreeSplitKey copyKey = ctx.splitKey.duplicate(ctx.leafFrame.getTupleWriter().createTupleReference());
+            tuple = copyKey.getTuple();
+
+            frontier.lastTuple.resetByTupleIndex(ctx.interiorFrame, ctx.interiorFrame.getTupleCount() - 1);
+            int splitKeySize = ctx.tupleWriter.bytesRequired(frontier.lastTuple, 0, ctx.cmp.getKeyFieldCount());
+            ctx.splitKey.initData(splitKeySize);
+            ctx.tupleWriter
+                    .writeTupleFields(frontier.lastTuple, 0, ctx.cmp.getKeyFieldCount(), ctx.splitKey.getBuffer().array(), 0);
+            ctx.splitKey.getTuple().resetByTupleOffset(ctx.splitKey.getBuffer(), 0);
+            ctx.splitKey.setLeftPage(frontier.pageId);
+
+            ctx.interiorFrame.deleteGreatest();
+
+            frontier.page.releaseWriteLatch();
+            bufferCache.unpin(frontier.page);
+            frontier.pageId = freePageManager.getFreePage(ctx.metaFrame);
+
+            ctx.splitKey.setRightPage(frontier.pageId);
+            propagateBulk(ctx, level + 1);
+
+            frontier.page = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, frontier.pageId), true);
+            frontier.page.acquireWriteLatch();
+            ctx.interiorFrame.setPage(frontier.page);
+            ctx.interiorFrame.initBuffer((byte) level);
+        }
+        ctx.interiorFrame.insertSorted(tuple);
+    }
+
+    // assumes btree has been created and opened
+    @Override
+    public IIndexBulkLoadContext beginBulkLoad(float fillFactor) throws TreeIndexException, HyracksDataException {
+        IBTreeLeafFrame leafFrame = (IBTreeLeafFrame)leafFrameFactory.createFrame();
+    	if (!isEmptyTree(leafFrame)) {
+    		throw new BTreeException("Trying to Bulk-load a non-empty BTree.");
+    	}
+    	
+        BulkLoadContext ctx = new BulkLoadContext(fillFactor, leafFrame,
+                (IBTreeInteriorFrame)interiorFrameFactory.createFrame(), freePageManager.getMetaDataFrameFactory().createFrame(), cmpFactories);
+        ctx.splitKey.getTuple().setFieldCount(ctx.cmp.getKeyFieldCount());
+        return ctx;
+    }
+
+    @Override
+    public void bulkLoadAddTuple(ITupleReference tuple, IIndexBulkLoadContext ictx) throws HyracksDataException {
+        BulkLoadContext ctx = (BulkLoadContext) ictx;
+        NodeFrontier leafFrontier = ctx.nodeFrontiers.get(0);
+        IBTreeLeafFrame leafFrame = ctx.leafFrame;
+
+        int spaceNeeded = ctx.tupleWriter.bytesRequired(tuple) + ctx.slotSize;
+        int spaceUsed = leafFrame.getBuffer().capacity() - leafFrame.getTotalFreeSpace();
+
+        // try to free space by compression
+        if (spaceUsed + spaceNeeded > ctx.leafMaxBytes) {
+            leafFrame.compress();
+            spaceUsed = leafFrame.getBuffer().capacity() - leafFrame.getTotalFreeSpace();
+        }
+
+        if (spaceUsed + spaceNeeded > ctx.leafMaxBytes) {
+            leafFrontier.lastTuple.resetByTupleIndex(leafFrame, leafFrame.getTupleCount() - 1);
+            int splitKeySize = ctx.tupleWriter.bytesRequired(leafFrontier.lastTuple, 0, ctx.cmp.getKeyFieldCount());
+            ctx.splitKey.initData(splitKeySize);
+            ctx.tupleWriter.writeTupleFields(leafFrontier.lastTuple, 0, ctx.cmp.getKeyFieldCount(),
+                    ctx.splitKey.getBuffer().array(), 0);
+            ctx.splitKey.getTuple().resetByTupleOffset(ctx.splitKey.getBuffer(), 0);
+            ctx.splitKey.setLeftPage(leafFrontier.pageId);
+            leafFrontier.pageId = freePageManager.getFreePage(ctx.metaFrame);
+
+            leafFrame.setNextLeaf(leafFrontier.pageId);
+            leafFrontier.page.releaseWriteLatch();
+            bufferCache.unpin(leafFrontier.page);
+
+            ctx.splitKey.setRightPage(leafFrontier.pageId);
+            propagateBulk(ctx, 1);
+
+            leafFrontier.page = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, leafFrontier.pageId),
+                    true);
+            leafFrontier.page.acquireWriteLatch();
+            leafFrame.setPage(leafFrontier.page);
+            leafFrame.initBuffer((byte) 0);
+        }
+
+        leafFrame.setPage(leafFrontier.page);
+        leafFrame.insertSorted(tuple);
+    }
+
+    @Override
+    public void endBulkLoad(IIndexBulkLoadContext ictx) throws HyracksDataException {
+        // copy root
+        BulkLoadContext ctx = (BulkLoadContext) ictx;
+        ICachedPage rootNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, rootPage), true);
+        rootNode.acquireWriteLatch();
+        NodeFrontier lastNodeFrontier = ctx.nodeFrontiers.get(ctx.nodeFrontiers.size() - 1);
+        IBTreeInteriorFrame interiorFrame = ctx.interiorFrame;
+        try {
+            ICachedPage toBeRoot = lastNodeFrontier.page;
+            System.arraycopy(toBeRoot.getBuffer().array(), 0, rootNode.getBuffer().array(), 0, toBeRoot.getBuffer()
+                    .capacity());
+        } finally {
+            rootNode.releaseWriteLatch();
+            bufferCache.unpin(rootNode);
+
+            // register old root as free page
+            freePageManager.addFreePage(ctx.metaFrame, lastNodeFrontier.pageId);
+
+            // make old root a free page
+            interiorFrame.setPage(lastNodeFrontier.page);
+            interiorFrame.initBuffer(freePageManager.getFreePageLevelIndicator());
+
+            // cleanup
+            for (int i = 0; i < ctx.nodeFrontiers.size(); i++) {
+                ctx.nodeFrontiers.get(i).page.releaseWriteLatch();
+                bufferCache.unpin(ctx.nodeFrontiers.get(i).page);
+            }
+        }
+    }
+
+    private BTreeOpContext createOpContext() {
+        return new BTreeOpContext(leafFrameFactory, interiorFrameFactory, freePageManager.getMetaDataFrameFactory()
+                .createFrame(), cmpFactories);
+    }
+    
+    public ITreeIndexFrameFactory getInteriorFrameFactory() {
+        return interiorFrameFactory;
+    }
+
+    public ITreeIndexFrameFactory getLeafFrameFactory() {
+        return leafFrameFactory;
+    }
+
+    public IBinaryComparatorFactory[] getComparatorFactories() {
+        return cmpFactories;
+    }
+
+    public IFreePageManager getFreePageManager() {
+        return freePageManager;
+    }
+
+    public int getRootPageId() {
+        return rootPage;
+    }    
+
+    @Override
+    public int getFieldCount() {
+        return fieldCount;
+    }
+
+    @Override
+    public IndexType getIndexType() {
+        return IndexType.BTREE;
+    }
+    
+    @Override
+    public int getFileId() {
+    	return fileId;
+    }
+    
+    @Override
+    public IBufferCache getBufferCache() {
+        return bufferCache;
+    }
+    
+    public byte getTreeHeight(IBTreeLeafFrame leafFrame) throws HyracksDataException {
+        ICachedPage rootNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, rootPage), false);
+        rootNode.acquireReadLatch();
+        try {
+            leafFrame.setPage(rootNode);
+            return leafFrame.getLevel();
+        } finally {
+            rootNode.releaseReadLatch();
+            bufferCache.unpin(rootNode);
+        }
+    }
+    
+    public boolean isEmptyTree(IBTreeLeafFrame leafFrame) throws HyracksDataException {
+    	ICachedPage rootNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, rootPage), false);
+        rootNode.acquireReadLatch();
+        try {
+            leafFrame.setPage(rootNode);
+            if (leafFrame.getLevel() == 0 && leafFrame.getTupleCount() == 0) {
+            	return true;
+            } else {
+            	return false;
+            }
+        } finally {
+            rootNode.releaseReadLatch();
+            bufferCache.unpin(rootNode);
+        }
+    }
+    
+    @SuppressWarnings("rawtypes") 
+    public String printTree(IBTreeLeafFrame leafFrame, IBTreeInteriorFrame interiorFrame, ISerializerDeserializer[] keySerdes)
+            throws Exception {
+        MultiComparator cmp = MultiComparator.create(cmpFactories);
+        byte treeHeight = getTreeHeight(leafFrame);
+        StringBuilder strBuilder = new StringBuilder();
+        printTree(rootPage, null, false, leafFrame, interiorFrame, treeHeight, keySerdes, strBuilder, cmp);
+        return strBuilder.toString();
+    }
+
+    @SuppressWarnings("rawtypes") 
+    public void printTree(int pageId, ICachedPage parent, boolean unpin, IBTreeLeafFrame leafFrame,
+            IBTreeInteriorFrame interiorFrame, byte treeHeight, ISerializerDeserializer[] keySerdes, StringBuilder strBuilder, MultiComparator cmp) throws Exception {
+        ICachedPage node = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
+        node.acquireReadLatch();
+        try {
+            if (parent != null && unpin == true) {
+                parent.releaseReadLatch();
+                bufferCache.unpin(parent);
+            }
+            interiorFrame.setPage(node);
+            int level = interiorFrame.getLevel();
+            strBuilder.append(String.format("%1d ", level));
+            strBuilder.append(String.format("%3d ", pageId) + ": ");
+            for (int i = 0; i < treeHeight - level; i++) {
+                strBuilder.append("    ");
+            }
+
+            String keyString;
+            if (interiorFrame.isLeaf()) {
+                leafFrame.setPage(node);
+                keyString = TreeIndexUtils.printFrameTuples(leafFrame, keySerdes);
+            } else {
+                keyString = TreeIndexUtils.printFrameTuples(interiorFrame, keySerdes);
+            }
+
+            strBuilder.append(keyString + "\n");
+            if (!interiorFrame.isLeaf()) {
+                ArrayList<Integer> children = ((BTreeNSMInteriorFrame) (interiorFrame)).getChildren(cmp);
+                for (int i = 0; i < children.size(); i++) {
+                    printTree(children.get(i), node, i == children.size() - 1, leafFrame, interiorFrame, treeHeight, keySerdes, strBuilder, cmp);
+                }
+            } else {
+                node.releaseReadLatch();
+                bufferCache.unpin(node);
+            }
+        } catch (Exception e) {
+            node.releaseReadLatch();
+            bufferCache.unpin(node);
+            e.printStackTrace();
+        }
+    }
+
+    @Override
+    public ITreeIndexAccessor createAccessor() {
+        return new BTreeAccessor(this);
+    }
+    
+	// TODO: Class should be private. But currently we need to expose the
+	// setOpContext() API to the LSM Tree for it to work correctly.
+    public class BTreeAccessor implements ITreeIndexAccessor {
+        private BTree btree;
+        private BTreeOpContext ctx;
+        
+        public BTreeAccessor(BTree btree) {
+            this.btree = btree;
+            this.ctx = btree.createOpContext();
+        }
+        
+        @Override
+        public void insert(ITupleReference tuple) throws HyracksDataException, TreeIndexException {
+            ctx.reset(IndexOp.INSERT);
+            btree.insert(tuple, ctx);
+        }
+
+        @Override
+        public void update(ITupleReference tuple) throws HyracksDataException, TreeIndexException {
+            ctx.reset(IndexOp.UPDATE);
+            btree.update(tuple, ctx);
+        }
+
+        @Override
+        public void delete(ITupleReference tuple) throws HyracksDataException, TreeIndexException {
+            ctx.reset(IndexOp.DELETE);
+            btree.delete(tuple, ctx);
+        }
+        
+        @Override
+        public void upsert(ITupleReference tuple) throws HyracksDataException, TreeIndexException {
+            ctx.reset(IndexOp.UPSERT);
+            btree.upsert(tuple, ctx);
+        }
+        
+        @Override
+		public ITreeIndexCursor createSearchCursor() {
+			IBTreeLeafFrame leafFrame = (IBTreeLeafFrame) btree.getLeafFrameFactory().createFrame();
+	        return new BTreeRangeSearchCursor(leafFrame, false);
+		}
+        
+        @Override
+        public void search(IIndexCursor cursor, ISearchPredicate searchPred) throws HyracksDataException,
+                TreeIndexException {
+            ctx.reset(IndexOp.SEARCH);
+            btree.search((ITreeIndexCursor) cursor, searchPred, ctx);
+        }
+
+        @Override
+		public ITreeIndexCursor createDiskOrderScanCursor() {
+			IBTreeLeafFrame leafFrame = (IBTreeLeafFrame) btree.getLeafFrameFactory().createFrame();
+	        return new TreeDiskOrderScanCursor(leafFrame);
+		}
+        
+        @Override
+        public void diskOrderScan(ITreeIndexCursor cursor) throws HyracksDataException {
+            ctx.reset(IndexOp.DISKORDERSCAN);
+            btree.diskOrderScan(cursor, ctx);
+        }
+		
+		// TODO: Ideally, this method should not exist. But we need it for
+		// the changing the leafFrame and leafFrameFactory of the op context for
+		// the LSM-BTree to work correctly.
+		public BTreeOpContext getOpContext() {
+			return ctx;
+		}
+    }
+}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeCursorInitialState.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeCursorInitialState.java
new file mode 100644
index 0000000..855f9e6
--- /dev/null
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeCursorInitialState.java
@@ -0,0 +1,21 @@
+package edu.uci.ics.hyracks.storage.am.btree.impls;
+
+import edu.uci.ics.hyracks.storage.am.common.api.ICursorInitialState;
+import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
+
+public class BTreeCursorInitialState implements ICursorInitialState {
+
+    private ICachedPage page;
+
+    public BTreeCursorInitialState(ICachedPage page) {
+        this.page = page;
+    }
+
+    public ICachedPage getPage() {
+        return page;
+    }
+
+    public void setPage(ICachedPage page) {
+        this.page = page;
+    }
+}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeOpContext.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeOpContext.java
new file mode 100644
index 0000000..23a14eb
--- /dev/null
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeOpContext.java
@@ -0,0 +1,109 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree.impls;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrame;
+import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
+import edu.uci.ics.hyracks.storage.am.common.api.IIndexOpContext;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrame;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOp;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.IntArrayList;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.LongArrayList;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+
+public class BTreeOpContext implements IIndexOpContext {
+    private final int INIT_ARRAYLIST_SIZE = 6;
+    public MultiComparator cmp;
+    public ITreeIndexFrameFactory leafFrameFactory;
+    public ITreeIndexFrameFactory interiorFrameFactory;
+    public IBTreeLeafFrame leafFrame;
+    public IBTreeInteriorFrame interiorFrame;
+    public ITreeIndexMetaDataFrame metaFrame;
+    public IndexOp op;
+    public ITreeIndexCursor cursor;
+    public BTreeCursorInitialState cursorInitialState;
+    public RangePredicate pred;
+    public BTreeSplitKey splitKey;    
+    public LongArrayList pageLsns;
+    public IntArrayList smPages;
+    public IntArrayList freePages;
+    public int opRestarts = 0;
+    public boolean exceptionHandled;
+    
+    public BTreeOpContext(ITreeIndexFrameFactory leafFrameFactory, ITreeIndexFrameFactory interiorFrameFactory,
+            ITreeIndexMetaDataFrame metaFrame, IBinaryComparatorFactory[] cmpFactories) {
+        this.cmp = MultiComparator.create(cmpFactories);
+        this.leafFrameFactory = leafFrameFactory;
+        this.leafFrame = (IBTreeLeafFrame) leafFrameFactory.createFrame();
+        if (leafFrame != null) {
+            leafFrame.setMultiComparator(cmp);
+        }
+        this.interiorFrameFactory = interiorFrameFactory;
+        this.interiorFrame = (IBTreeInteriorFrame) interiorFrameFactory.createFrame();
+        if (interiorFrame != null) {
+            interiorFrame.setMultiComparator(cmp);
+        }
+        this.metaFrame = metaFrame;
+        this.pageLsns = new LongArrayList(INIT_ARRAYLIST_SIZE, INIT_ARRAYLIST_SIZE);
+    }
+
+    public void reset() {
+        if (pageLsns != null)
+            pageLsns.clear();
+        if (freePages != null)
+            freePages.clear();
+        if (smPages != null)
+            smPages.clear();
+        opRestarts = 0;
+        exceptionHandled = false;
+    }
+
+    @Override
+    public void reset(IndexOp newOp) {
+        if (newOp == IndexOp.SEARCH || newOp == IndexOp.DISKORDERSCAN) {
+            if (cursorInitialState == null) {
+                cursorInitialState = new BTreeCursorInitialState(null);
+            }
+        } else {
+            // Insert, delete, update or upsert operation.
+            if (smPages == null) {
+                smPages = new IntArrayList(INIT_ARRAYLIST_SIZE, INIT_ARRAYLIST_SIZE);
+            }
+            if (freePages == null) {
+                freePages = new IntArrayList(INIT_ARRAYLIST_SIZE, INIT_ARRAYLIST_SIZE);
+            }
+            if (pred == null) {
+                pred = new RangePredicate(null, null, true, true, null, null);
+            }
+            if (splitKey == null) {
+                splitKey = new BTreeSplitKey(leafFrame.getTupleWriter().createTupleReference());
+            }
+        }
+        op = newOp;
+        exceptionHandled = false;
+    }
+
+    public IBTreeLeafFrame createLeafFrame() {
+        return (IBTreeLeafFrame) leafFrameFactory.createFrame();
+    }
+
+    public IBTreeInteriorFrame createInteriorFrame() {
+        return (IBTreeInteriorFrame) interiorFrameFactory.createFrame();
+    }
+}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeRangeSearchCursor.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeRangeSearchCursor.java
new file mode 100644
index 0000000..8bf4db3
--- /dev/null
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeRangeSearchCursor.java
@@ -0,0 +1,230 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree.impls;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
+import edu.uci.ics.hyracks.storage.am.common.api.ICursorInitialState;
+import edu.uci.ics.hyracks.storage.am.common.api.ISearchPredicate;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.FindTupleMode;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.FindTupleNoExactMatchPolicy;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
+import edu.uci.ics.hyracks.storage.common.file.BufferedFileHandle;
+
+public class BTreeRangeSearchCursor implements ITreeIndexCursor {
+
+    private int fileId = -1;
+    private ICachedPage page = null;
+    private IBufferCache bufferCache = null;
+
+    private int tupleIndex = 0;
+    private int stopTupleIndex;
+    private int tupleIndexInc = 0;
+
+    private FindTupleMode lowKeyFtm;
+    private FindTupleMode highKeyFtm;
+
+    private FindTupleNoExactMatchPolicy lowKeyFtp;
+    private FindTupleNoExactMatchPolicy highKeyFtp;
+
+    private final IBTreeLeafFrame frame;
+    private final ITreeIndexTupleReference frameTuple;
+    private final boolean exclusiveLatchNodes;
+
+    private RangePredicate pred;
+    private MultiComparator lowKeyCmp;
+    private MultiComparator highKeyCmp;
+    private ITupleReference lowKey;
+    private ITupleReference highKey;
+
+    public BTreeRangeSearchCursor(IBTreeLeafFrame frame, boolean exclusiveLatchNodes) {
+        this.frame = frame;
+        this.frameTuple = frame.createTupleReference();
+        this.exclusiveLatchNodes = exclusiveLatchNodes;
+    }
+
+    @Override
+    public void close() throws HyracksDataException {
+        if (page != null) {
+            if (exclusiveLatchNodes) {
+                page.releaseWriteLatch();
+            } else {
+                page.releaseReadLatch();
+            }
+            bufferCache.unpin(page);
+        }
+        tupleIndex = 0;
+        page = null;
+        pred = null;
+    }
+
+    public ITupleReference getTuple() {
+        return frameTuple;
+    }
+
+    @Override
+    public ICachedPage getPage() {
+        return page;
+    }
+
+    private void fetchNextLeafPage(int nextLeafPage) throws HyracksDataException {
+        do {
+            ICachedPage nextLeaf = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, nextLeafPage), false);
+            if (exclusiveLatchNodes) {
+                nextLeaf.acquireWriteLatch();
+                page.releaseWriteLatch();
+            } else {
+                nextLeaf.acquireReadLatch();
+                page.releaseReadLatch();
+            }
+            bufferCache.unpin(page);
+            page = nextLeaf;
+            frame.setPage(page);
+            nextLeafPage = frame.getNextLeaf();
+        } while (frame.getTupleCount() == 0 && nextLeafPage > 0);
+    }
+
+    @Override
+    public boolean hasNext() throws HyracksDataException {
+        if (tupleIndex >= frame.getTupleCount()) {
+            int nextLeafPage = frame.getNextLeaf();
+            if (nextLeafPage >= 0) {
+                fetchNextLeafPage(nextLeafPage);
+                tupleIndex = 0;
+                stopTupleIndex = getHighKeyIndex();
+                if (stopTupleIndex < 0) {
+                    return false;
+                }
+            } else {
+                return false;
+            }
+        }
+
+        frameTuple.resetByTupleIndex(frame, tupleIndex);
+        if (highKey == null || tupleIndex <= stopTupleIndex) {
+            return true;
+        } else {
+            return false;
+        }
+    }
+    
+    @Override
+    public void next() throws HyracksDataException {
+        tupleIndex += tupleIndexInc;
+    }
+
+    private int getLowKeyIndex() throws HyracksDataException {
+        if (lowKey == null) {
+            return 0;
+        }
+        int index = frame.findTupleIndex(lowKey, frameTuple, lowKeyCmp, lowKeyFtm, lowKeyFtp);
+        if (pred.lowKeyInclusive) {
+            index++;
+        } else {
+            if (index < 0) {
+                index = frame.getTupleCount();
+            }
+        }
+        return index;
+    }
+
+    private int getHighKeyIndex() throws HyracksDataException {
+        if (highKey == null) {
+            return frame.getTupleCount() - 1;
+        }
+        int index = frame.findTupleIndex(highKey, frameTuple, highKeyCmp, highKeyFtm, highKeyFtp);
+        if (pred.highKeyInclusive) {
+            if (index < 0) {
+                index = frame.getTupleCount() - 1;
+            }
+            else {
+                index--;
+            }
+        }
+        return index;
+    }
+
+    @Override
+    public void open(ICursorInitialState initialState, ISearchPredicate searchPred) throws HyracksDataException {
+        // in case open is called multiple times without closing
+        if (page != null) {
+            if (exclusiveLatchNodes) {
+                page.releaseWriteLatch();
+            } else {
+                page.releaseReadLatch();
+            }
+            bufferCache.unpin(page);
+        }
+
+        page = ((BTreeCursorInitialState) initialState).getPage();
+        frame.setPage(page);
+
+        pred = (RangePredicate) searchPred;
+        lowKeyCmp = pred.getLowKeyComparator();
+        highKeyCmp = pred.getHighKeyComparator();
+
+        lowKey = pred.getLowKey();
+        highKey = pred.getHighKey();
+
+        // init
+        lowKeyFtm = FindTupleMode.EXCLUSIVE;
+        if (pred.lowKeyInclusive) {
+            lowKeyFtp = FindTupleNoExactMatchPolicy.LOWER_KEY;
+        } else {
+            lowKeyFtp = FindTupleNoExactMatchPolicy.HIGHER_KEY;
+        }
+
+        highKeyFtm = FindTupleMode.EXCLUSIVE;
+        if (pred.highKeyInclusive) {
+            highKeyFtp = FindTupleNoExactMatchPolicy.HIGHER_KEY;
+        } else {
+            highKeyFtp = FindTupleNoExactMatchPolicy.LOWER_KEY;
+        }
+        
+        tupleIndex = getLowKeyIndex();
+        stopTupleIndex = getHighKeyIndex();
+        tupleIndexInc = 1;
+    }
+
+    @Override
+    public void reset() {
+        try {
+            close();
+        } catch (Exception e) {
+            e.printStackTrace();
+        }
+    }
+
+    @Override
+    public void setBufferCache(IBufferCache bufferCache) {
+        this.bufferCache = bufferCache;
+    }
+
+    @Override
+    public void setFileId(int fileId) {
+        this.fileId = fileId;
+    }
+
+    @Override
+    public boolean exclusiveLatchNodes() {
+        return exclusiveLatchNodes;
+    }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeSplitKey.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeSplitKey.java
new file mode 100644
index 0000000..e664e5b
--- /dev/null
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeSplitKey.java
@@ -0,0 +1,91 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree.impls;
+
+import java.nio.ByteBuffer;
+
+import edu.uci.ics.hyracks.storage.am.common.api.ISplitKey;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
+
+public class BTreeSplitKey implements ISplitKey {
+    public byte[] data = null;
+    public ByteBuffer buf = null;
+    public ITreeIndexTupleReference tuple;
+    public int keySize = 0;
+
+    public BTreeSplitKey(ITreeIndexTupleReference tuple) {
+        this.tuple = tuple;
+    }
+    
+    public void initData(int keySize) {
+        // try to reuse existing memory from a lower-level split if possible
+        this.keySize = keySize;
+        if (data != null) {
+            if (data.length < keySize + 8) {
+                data = new byte[keySize + 8]; // add 8 for left and right page
+                buf = ByteBuffer.wrap(data);
+            }
+        } else {
+            data = new byte[keySize + 8]; // add 8 for left and right page
+            buf = ByteBuffer.wrap(data);
+        }
+
+        tuple.resetByTupleOffset(buf, 0);
+    }
+
+    public void reset() {
+        data = null;
+        buf = null;
+    }
+
+    public ByteBuffer getBuffer() {
+        return buf;
+    }
+
+    public ITreeIndexTupleReference getTuple() {
+        return tuple;
+    }
+
+    public int getLeftPage() {
+        return buf.getInt(keySize);
+    }
+
+    public int getRightPage() {
+        return buf.getInt(keySize + 4);
+    }
+
+    public void setLeftPage(int leftPage) {
+        buf.putInt(keySize, leftPage);
+    }
+
+    public void setRightPage(int rightPage) {
+        buf.putInt(keySize + 4, rightPage);
+    }
+
+    public void setPages(int leftPage, int rightPage) {
+        buf.putInt(keySize, leftPage);
+        buf.putInt(keySize + 4, rightPage);
+    }
+
+    public BTreeSplitKey duplicate(ITreeIndexTupleReference copyTuple) {
+        BTreeSplitKey copy = new BTreeSplitKey(copyTuple);
+        copy.data = data.clone();
+        copy.buf = ByteBuffer.wrap(copy.data);
+        copy.tuple.setFieldCount(tuple.getFieldCount());
+        copy.tuple.resetByTupleOffset(copy.buf, 0);
+        return copy;
+    }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/FieldPrefixPrefixTupleReference.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/FieldPrefixPrefixTupleReference.java
new file mode 100644
index 0000000..80ac173
--- /dev/null
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/FieldPrefixPrefixTupleReference.java
@@ -0,0 +1,40 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree.impls;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeFieldPrefixNSMLeafFrame;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
+import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleReference;
+
+public class FieldPrefixPrefixTupleReference extends TypeAwareTupleReference {
+
+    public FieldPrefixPrefixTupleReference(ITypeTraits[] typeTraits) {
+        super(typeTraits);
+    }
+
+    // assumes tuple index refers to prefix tuples
+    @Override
+    public void resetByTupleIndex(ITreeIndexFrame frame, int tupleIndex) {
+        BTreeFieldPrefixNSMLeafFrame concreteFrame = (BTreeFieldPrefixNSMLeafFrame) frame;
+        int prefixSlotOff = concreteFrame.slotManager.getPrefixSlotOff(tupleIndex);
+        int prefixSlot = concreteFrame.getBuffer().getInt(prefixSlotOff);
+        setFieldCount(concreteFrame.slotManager.decodeFirstSlotField(prefixSlot));
+        tupleStartOff = concreteFrame.slotManager.decodeSecondSlotField(prefixSlot);
+        buf = concreteFrame.getBuffer();
+        resetByTupleOffset(buf, tupleStartOff);
+    }
+}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/FieldPrefixSlotManager.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/FieldPrefixSlotManager.java
new file mode 100644
index 0000000..6525bb9
--- /dev/null
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/FieldPrefixSlotManager.java
@@ -0,0 +1,304 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree.impls;
+
+import java.nio.ByteBuffer;
+
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.storage.am.btree.api.IPrefixSlotManager;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeFieldPrefixNSMLeafFrame;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.FindTupleMode;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.FindTupleNoExactMatchPolicy;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+
+public class FieldPrefixSlotManager implements IPrefixSlotManager {
+
+    private static final int slotSize = 4;
+    public static final int TUPLE_UNCOMPRESSED = 0xFF;
+    public static final int MAX_PREFIX_SLOTS = 0xFE;
+    public static final int GREATEST_KEY_INDICATOR = 0x00FFFFFF;
+    public static final int ERROR_INDICATOR = 0x00FFFFFE;
+
+    private ByteBuffer buf;
+    private BTreeFieldPrefixNSMLeafFrame frame;
+    private MultiComparator cmp;
+    
+    public int decodeFirstSlotField(int slot) {
+        return (slot & 0xFF000000) >>> 24;
+    }
+
+    public int decodeSecondSlotField(int slot) {
+        return slot & 0x00FFFFFF;
+    }
+
+    public int encodeSlotFields(int firstField, int secondField) {
+        return ((firstField & 0x000000FF) << 24) | (secondField & 0x00FFFFFF);
+    }
+
+    // returns prefix slot number, or TUPLE_UNCOMPRESSED of no match was found
+    public int findPrefix(ITupleReference tuple, ITreeIndexTupleReference framePrefixTuple) {
+        int prefixMid;
+        int prefixBegin = 0;
+        int prefixEnd = frame.getPrefixTupleCount() - 1;
+
+        if (frame.getPrefixTupleCount() > 0) {
+            while (prefixBegin <= prefixEnd) {
+                prefixMid = (prefixBegin + prefixEnd) / 2;
+                framePrefixTuple.resetByTupleIndex(frame, prefixMid);
+                int cmpVal = cmp.fieldRangeCompare(tuple, framePrefixTuple, 0, framePrefixTuple.getFieldCount());
+                if (cmpVal < 0)
+                    prefixEnd = prefixMid - 1;
+                else if (cmpVal > 0)
+                    prefixBegin = prefixMid + 1;
+                else
+                    return prefixMid;
+            }
+        }
+
+        return FieldPrefixSlotManager.TUPLE_UNCOMPRESSED;
+    }
+
+    @Override
+    public int findSlot(ITupleReference searchKey, ITreeIndexTupleReference frameTuple,
+            ITreeIndexTupleReference framePrefixTuple, MultiComparator multiCmp, FindTupleMode mode,
+            FindTupleNoExactMatchPolicy matchPolicy) {
+        if (frame.getTupleCount() <= 0)
+            encodeSlotFields(TUPLE_UNCOMPRESSED, GREATEST_KEY_INDICATOR);
+
+        int prefixMid;
+        int prefixBegin = 0;
+        int prefixEnd = frame.getPrefixTupleCount() - 1;
+        int prefixMatch = TUPLE_UNCOMPRESSED;
+
+        // bounds are inclusive on both ends
+        int tuplePrefixSlotNumLbound = prefixBegin;
+        int tuplePrefixSlotNumUbound = prefixEnd;
+
+        // binary search on the prefix slots to determine upper and lower bounds
+        // for the prefixSlotNums in tuple slots
+        while (prefixBegin <= prefixEnd) {
+            prefixMid = (prefixBegin + prefixEnd) / 2;
+            framePrefixTuple.resetByTupleIndex(frame, prefixMid);
+            int cmp = multiCmp.fieldRangeCompare(searchKey, framePrefixTuple, 0, framePrefixTuple.getFieldCount());
+            if (cmp < 0) {
+                prefixEnd = prefixMid - 1;
+                tuplePrefixSlotNumLbound = prefixMid - 1;
+            } else if (cmp > 0) {
+                prefixBegin = prefixMid + 1;
+                tuplePrefixSlotNumUbound = prefixMid + 1;
+            } else {
+                if (mode == FindTupleMode.EXCLUSIVE) {
+                    if (matchPolicy == FindTupleNoExactMatchPolicy.HIGHER_KEY)
+                        prefixBegin = prefixMid + 1;
+                    else
+                        prefixEnd = prefixMid - 1;
+                } else {
+                    tuplePrefixSlotNumLbound = prefixMid;
+                    tuplePrefixSlotNumUbound = prefixMid;
+                    prefixMatch = prefixMid;
+                }
+
+                break;
+            }
+        }
+
+        // System.out.println("SLOTLBOUND: " + tuplePrefixSlotNumLbound);
+        // System.out.println("SLOTUBOUND: " + tuplePrefixSlotNumUbound);
+
+        int tupleMid = -1;
+        int tupleBegin = 0;
+        int tupleEnd = frame.getTupleCount() - 1;
+
+        // binary search on tuples, guided by the lower and upper bounds on
+        // prefixSlotNum
+        while (tupleBegin <= tupleEnd) {
+            tupleMid = (tupleBegin + tupleEnd) / 2;
+            int tupleSlotOff = getTupleSlotOff(tupleMid);
+            int tupleSlot = buf.getInt(tupleSlotOff);
+            int prefixSlotNum = decodeFirstSlotField(tupleSlot);
+
+            // System.out.println("RECS: " + recBegin + " " + recMid + " " +
+            // recEnd);
+            int cmp = 0;
+            if (prefixSlotNum == TUPLE_UNCOMPRESSED) {
+                frameTuple.resetByTupleIndex(frame, tupleMid);
+                cmp = multiCmp.compare(searchKey, frameTuple);
+            } else {
+                if (prefixSlotNum < tuplePrefixSlotNumLbound)
+                    cmp = 1;
+                else if (prefixSlotNum > tuplePrefixSlotNumUbound)
+                    cmp = -1;
+                else {
+                    frameTuple.resetByTupleIndex(frame, tupleMid);
+                    cmp = multiCmp.compare(searchKey, frameTuple);
+                }
+            }
+
+            if (cmp < 0)
+                tupleEnd = tupleMid - 1;
+            else if (cmp > 0)
+                tupleBegin = tupleMid + 1;
+            else {
+                if (mode == FindTupleMode.EXCLUSIVE) {
+                    if (matchPolicy == FindTupleNoExactMatchPolicy.HIGHER_KEY)
+                        tupleBegin = tupleMid + 1;
+                    else
+                        tupleEnd = tupleMid - 1;
+                } else {
+                	if (mode == FindTupleMode.EXCLUSIVE_ERROR_IF_EXISTS) {
+                		return encodeSlotFields(prefixMatch, ERROR_INDICATOR);
+                	} else {
+                		return encodeSlotFields(prefixMatch, tupleMid);
+                	}
+                }
+            }
+        }
+
+        // System.out.println("RECS: " + recBegin + " " + recMid + " " +
+        // recEnd);
+
+        if (mode == FindTupleMode.EXACT)
+            return encodeSlotFields(prefixMatch, ERROR_INDICATOR);
+
+        // do final comparison to determine whether the search key is greater
+        // than all keys or in between some existing keys
+        if (matchPolicy == FindTupleNoExactMatchPolicy.HIGHER_KEY) {
+            if (tupleBegin > frame.getTupleCount() - 1)
+                return encodeSlotFields(prefixMatch, GREATEST_KEY_INDICATOR);
+            frameTuple.resetByTupleIndex(frame, tupleBegin);
+            if (multiCmp.compare(searchKey, frameTuple) < 0)
+                return encodeSlotFields(prefixMatch, tupleBegin);
+            else
+                return encodeSlotFields(prefixMatch, GREATEST_KEY_INDICATOR);
+        } else {
+            if (tupleEnd < 0)
+                return encodeSlotFields(prefixMatch, GREATEST_KEY_INDICATOR);
+            frameTuple.resetByTupleIndex(frame, tupleEnd);
+            if (multiCmp.compare(searchKey, frameTuple) > 0)
+                return encodeSlotFields(prefixMatch, tupleEnd);
+            else
+                return encodeSlotFields(prefixMatch, GREATEST_KEY_INDICATOR);
+        }
+    }
+
+    public int getPrefixSlotStartOff() {
+        return buf.capacity() - slotSize;
+    }
+
+    public int getPrefixSlotEndOff() {
+        return buf.capacity() - slotSize * frame.getPrefixTupleCount();
+    }
+
+    public int getTupleSlotStartOff() {
+        return getPrefixSlotEndOff() - slotSize;
+    }
+
+    public int getTupleSlotEndOff() {
+        return buf.capacity() - slotSize * (frame.getPrefixTupleCount() + frame.getTupleCount());
+    }
+
+    public int getSlotSize() {
+        return slotSize;
+    }
+
+    public void setSlot(int offset, int value) {
+        frame.getBuffer().putInt(offset, value);
+    }
+
+    public int insertSlot(int slot, int tupleOff) {
+        int slotNum = decodeSecondSlotField(slot);
+        if (slotNum == ERROR_INDICATOR) {
+        	System.out.println("WOW BIG PROBLEM!");
+        }
+        if (slotNum == GREATEST_KEY_INDICATOR) {
+            int slotOff = getTupleSlotEndOff() - slotSize;
+            int newSlot = encodeSlotFields(decodeFirstSlotField(slot), tupleOff);
+            setSlot(slotOff, newSlot);
+            return newSlot;
+        } else {
+            int slotEndOff = getTupleSlotEndOff();
+            int slotOff = getTupleSlotOff(slotNum);
+            int length = (slotOff - slotEndOff) + slotSize;
+            System.arraycopy(frame.getBuffer().array(), slotEndOff, frame.getBuffer().array(), slotEndOff - slotSize,
+                    length);
+
+            int newSlot = encodeSlotFields(decodeFirstSlotField(slot), tupleOff);
+            setSlot(slotOff, newSlot);
+            return newSlot;
+        }
+    }
+
+    public int getPrefixSlotOff(int tupleIndex) {
+        return getPrefixSlotStartOff() - tupleIndex * slotSize;
+    }
+
+    public int getTupleSlotOff(int tupleIndex) {
+        return getTupleSlotStartOff() - tupleIndex * slotSize;
+    }
+
+    public void setPrefixSlot(int tupleIndex, int slot) {
+        buf.putInt(getPrefixSlotOff(tupleIndex), slot);
+    }
+
+	@Override
+	public int getGreatestKeyIndicator() {
+		return GREATEST_KEY_INDICATOR;
+	}
+
+	@Override
+	public int getErrorIndicator() {
+		return ERROR_INDICATOR;
+	}
+
+	@Override
+	public void setFrame(ITreeIndexFrame frame) {
+		this.frame = (BTreeFieldPrefixNSMLeafFrame)frame;
+        this.buf = frame.getBuffer();
+	}
+
+	@Override
+	public int findTupleIndex(ITupleReference searchKey,
+			ITreeIndexTupleReference frameTuple, MultiComparator multiCmp,
+			FindTupleMode mode, FindTupleNoExactMatchPolicy matchPolicy) {
+		throw new UnsupportedOperationException("Not implemented.");
+	}
+	
+	@Override
+	public int getSlotStartOff() {
+		throw new UnsupportedOperationException("Not implemented.");
+	}
+
+	@Override
+	public int getSlotEndOff() {
+		throw new UnsupportedOperationException("Not implemented.");
+	}
+
+	@Override
+	public int getTupleOff(int slotOff) {
+		throw new UnsupportedOperationException("Not implemented.");
+	}
+
+	@Override
+	public int getSlotOff(int tupleIndex) {
+		throw new UnsupportedOperationException("Not implemented.");
+	}
+	
+	public void setMultiComparator(MultiComparator cmp) {
+		this.cmp = cmp;
+	}
+}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/FieldPrefixTupleReference.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/FieldPrefixTupleReference.java
new file mode 100644
index 0000000..9644a3e
--- /dev/null
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/FieldPrefixTupleReference.java
@@ -0,0 +1,116 @@
+package edu.uci.ics.hyracks.storage.am.btree.impls;
+
+import java.nio.ByteBuffer;
+
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeFieldPrefixNSMLeafFrame;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
+
+public class FieldPrefixTupleReference implements ITreeIndexTupleReference {
+
+    private BTreeFieldPrefixNSMLeafFrame frame;
+    private int prefixTupleStartOff;
+    private int suffixTupleStartOff;
+    private int numPrefixFields;
+    private int fieldCount;
+    private ITreeIndexTupleReference helperTuple;
+
+    public FieldPrefixTupleReference(ITreeIndexTupleReference helperTuple) {
+        this.helperTuple = helperTuple;
+        this.fieldCount = helperTuple.getFieldCount();
+    }
+
+    @Override
+    public void resetByTupleIndex(ITreeIndexFrame frame, int tupleIndex) {
+        this.frame = (BTreeFieldPrefixNSMLeafFrame) frame;
+
+        int tupleSlotOff = this.frame.slotManager.getTupleSlotOff(tupleIndex);
+        int tupleSlot = this.frame.getBuffer().getInt(tupleSlotOff);
+        int prefixSlotNum = this.frame.slotManager.decodeFirstSlotField(tupleSlot);
+        suffixTupleStartOff = this.frame.slotManager.decodeSecondSlotField(tupleSlot);
+
+        if (prefixSlotNum != FieldPrefixSlotManager.TUPLE_UNCOMPRESSED) {
+            int prefixSlotOff = this.frame.slotManager.getPrefixSlotOff(prefixSlotNum);
+            int prefixSlot = this.frame.getBuffer().getInt(prefixSlotOff);
+            numPrefixFields = this.frame.slotManager.decodeFirstSlotField(prefixSlot);
+            prefixTupleStartOff = this.frame.slotManager.decodeSecondSlotField(prefixSlot);
+        } else {
+            numPrefixFields = 0;
+            prefixTupleStartOff = -1;
+        }
+    }
+
+    @Override
+    public void setFieldCount(int fieldCount) {
+        this.fieldCount = fieldCount;
+    }
+
+    @Override
+    public void setFieldCount(int fieldStartIndex, int fieldCount) {
+        throw new UnsupportedOperationException("Not supported.");
+    }
+
+    @Override
+    public int getFieldCount() {
+        return fieldCount;
+    }
+
+    @Override
+    public byte[] getFieldData(int fIdx) {
+        return frame.getBuffer().array();
+    }
+
+    @Override
+    public int getFieldLength(int fIdx) {
+        if (fIdx < numPrefixFields) {
+            helperTuple.setFieldCount(numPrefixFields);
+            helperTuple.resetByTupleOffset(frame.getBuffer(), prefixTupleStartOff);
+            return helperTuple.getFieldLength(fIdx);
+        } else {
+            helperTuple.setFieldCount(numPrefixFields, fieldCount - numPrefixFields);
+            helperTuple.resetByTupleOffset(frame.getBuffer(), suffixTupleStartOff);
+            return helperTuple.getFieldLength(fIdx - numPrefixFields);
+        }
+    }
+
+    @Override
+    public int getFieldStart(int fIdx) {
+        if (fIdx < numPrefixFields) {
+            helperTuple.setFieldCount(numPrefixFields);
+            helperTuple.resetByTupleOffset(frame.getBuffer(), prefixTupleStartOff);
+            return helperTuple.getFieldStart(fIdx);
+        } else {
+            helperTuple.setFieldCount(numPrefixFields, fieldCount - numPrefixFields);
+            helperTuple.resetByTupleOffset(frame.getBuffer(), suffixTupleStartOff);
+            return helperTuple.getFieldStart(fIdx - numPrefixFields);
+        }
+    }
+
+    // unsupported operation
+    @Override
+    public void resetByTupleOffset(ByteBuffer buf, int tupleStartOffset) {
+        throw new UnsupportedOperationException("Resetting this type of frame by offset is not supported.");
+    }
+
+    @Override
+    public int getTupleSize() {
+        return getSuffixTupleSize() + getPrefixTupleSize();
+    }
+    
+    public int getSuffixTupleSize() {
+        helperTuple.setFieldCount(numPrefixFields, fieldCount - numPrefixFields);
+        helperTuple.resetByTupleOffset(frame.getBuffer(), suffixTupleStartOff);
+        return helperTuple.getTupleSize();
+    }
+    
+    public int getPrefixTupleSize() {
+        if (numPrefixFields == 0) return 0;
+        helperTuple.setFieldCount(numPrefixFields);
+        helperTuple.resetByTupleOffset(frame.getBuffer(), prefixTupleStartOff);
+        return helperTuple.getTupleSize();
+    }
+    
+    public int getNumPrefixFields() {
+        return numPrefixFields;
+    }
+}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/NodeFrontier.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/NodeFrontier.java
similarity index 100%
rename from hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/NodeFrontier.java
rename to hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/NodeFrontier.java
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/RangePredicate.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/RangePredicate.java
similarity index 100%
rename from hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/RangePredicate.java
rename to hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/RangePredicate.java
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/util/BTreeUtils.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/util/BTreeUtils.java
new file mode 100644
index 0000000..57e3a96
--- /dev/null
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/util/BTreeUtils.java
@@ -0,0 +1,60 @@
+package edu.uci.ics.hyracks.storage.am.btree.util;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.storage.am.btree.exceptions.BTreeException;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeFieldPrefixNSMLeafFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMInteriorFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMLeafFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
+import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManager;
+import edu.uci.ics.hyracks.storage.am.common.api.IOperationCallback;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriterFactory;
+import edu.uci.ics.hyracks.storage.am.common.frames.LIFOMetaDataFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.freepage.LinkedListFreePageManager;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriterFactory;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+
+public class BTreeUtils {
+    public static BTree createBTree(IBufferCache bufferCache, IOperationCallback opCallback, ITypeTraits[] typeTraits, IBinaryComparatorFactory[] cmpFactories, BTreeLeafFrameType leafType) throws BTreeException {
+        TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
+        ITreeIndexFrameFactory leafFrameFactory = getLeafFrameFactory(tupleWriterFactory, leafType);
+        ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(tupleWriterFactory);
+        ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
+        IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, 0, metaFrameFactory);
+        BTree btree = new BTree(bufferCache, opCallback, typeTraits.length, cmpFactories, freePageManager, interiorFrameFactory, leafFrameFactory);
+        return btree;
+    }
+    
+    // Creates a new MultiComparator by constructing new IBinaryComparators.
+    public static MultiComparator getSearchMultiComparator(IBinaryComparatorFactory[] cmpFactories, ITupleReference searchKey) {
+        if (searchKey == null || cmpFactories.length == searchKey.getFieldCount()) {
+            return MultiComparator.create(cmpFactories);
+        }
+        IBinaryComparator[] newCmps = new IBinaryComparator[searchKey.getFieldCount()];
+        for (int i = 0; i < searchKey.getFieldCount(); i++) {
+            newCmps[i] = cmpFactories[i].createBinaryComparator();
+        }
+        return new MultiComparator(newCmps);
+    }
+    
+    public static ITreeIndexFrameFactory getLeafFrameFactory(ITreeIndexTupleWriterFactory tupleWriterFactory, BTreeLeafFrameType leafType) throws BTreeException {
+        switch(leafType) {
+            case REGULAR_NSM: {
+                return new BTreeNSMLeafFrameFactory(tupleWriterFactory);                
+            }
+            case FIELD_PREFIX_COMPRESSED_NSM: {
+                return new BTreeFieldPrefixNSMLeafFrameFactory(tupleWriterFactory);
+            }
+            default: {
+                throw new BTreeException("Unknown BTreeLeafFrameType: " + leafType.toString());
+            }
+        }
+    }
+}
diff --git a/hyracks/hyracks-storage-am-common/pom.xml b/hyracks/hyracks-storage-am-common/pom.xml
new file mode 100644
index 0000000..a90e91e
--- /dev/null
+++ b/hyracks/hyracks-storage-am-common/pom.xml
@@ -0,0 +1,57 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>edu.uci.ics.hyracks</groupId>
+  <artifactId>hyracks-storage-am-common</artifactId>
+  <version>0.2.3-SNAPSHOT</version>
+  <name>hyracks-storage-am-common</name>
+
+  <parent>
+    <groupId>edu.uci.ics.hyracks</groupId>
+    <artifactId>hyracks</artifactId>
+    <version>0.2.3-SNAPSHOT</version>
+  </parent>
+
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-compiler-plugin</artifactId>
+        <version>2.0.2</version>
+        <configuration>
+          <source>1.7</source>
+          <target>1.7</target>
+        </configuration>
+      </plugin>
+    </plugins>
+  </build>
+  <dependencies>
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>hyracks-api</artifactId>
+  		<version>0.2.3-SNAPSHOT</version>
+  		<type>jar</type>
+  		<scope>compile</scope>
+  	</dependency>
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>hyracks-storage-common</artifactId>
+  		<version>0.2.3-SNAPSHOT</version>
+  		<type>jar</type>
+  		<scope>compile</scope>
+  	</dependency>
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>hyracks-dataflow-common</artifactId>
+  		<version>0.2.3-SNAPSHOT</version>
+  		<type>jar</type>
+  		<scope>compile</scope>
+  	</dependency>
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>hyracks-dataflow-std</artifactId>
+  		<version>0.2.3-SNAPSHOT</version>
+  		<type>jar</type>
+  		<scope>compile</scope>
+  	</dependency>
+  </dependencies>
+</project>
diff --git a/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ICursorInitialState.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ICursorInitialState.java
new file mode 100644
index 0000000..60e8ba9
--- /dev/null
+++ b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ICursorInitialState.java
@@ -0,0 +1,23 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.common.api;
+
+import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
+
+public interface ICursorInitialState {
+	public ICachedPage getPage();
+	public void setPage(ICachedPage page);
+}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IFreePageManager.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IFreePageManager.java
similarity index 100%
rename from hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IFreePageManager.java
rename to hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IFreePageManager.java
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IIndexAccessor.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IIndexAccessor.java
similarity index 100%
rename from hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IIndexAccessor.java
rename to hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IIndexAccessor.java
diff --git a/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IIndexBulkLoadContext.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IIndexBulkLoadContext.java
new file mode 100644
index 0000000..a896d80
--- /dev/null
+++ b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IIndexBulkLoadContext.java
@@ -0,0 +1,4 @@
+package edu.uci.ics.hyracks.storage.am.common.api;
+
+public interface IIndexBulkLoadContext {
+}
diff --git a/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IIndexCursor.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IIndexCursor.java
new file mode 100644
index 0000000..d29fd73
--- /dev/null
+++ b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IIndexCursor.java
@@ -0,0 +1,34 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.common.api;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+
+public interface IIndexCursor {
+    public void open(ICursorInitialState initialState,
+            ISearchPredicate searchPred) throws HyracksDataException;      
+
+    public boolean hasNext() throws HyracksDataException;
+
+    public void next() throws HyracksDataException;
+
+    public void close() throws HyracksDataException;
+
+    public void reset();
+
+    public ITupleReference getTuple();
+}
diff --git a/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IIndexOpContext.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IIndexOpContext.java
new file mode 100644
index 0000000..7153f78
--- /dev/null
+++ b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IIndexOpContext.java
@@ -0,0 +1,8 @@
+package edu.uci.ics.hyracks.storage.am.common.api;
+
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOp;
+
+public interface IIndexOpContext {
+	void reset();
+	void reset(IndexOp newOp);
+}
diff --git a/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IOperationCallback.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IOperationCallback.java
new file mode 100644
index 0000000..9e66b43
--- /dev/null
+++ b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IOperationCallback.java
@@ -0,0 +1,9 @@
+package edu.uci.ics.hyracks.storage.am.common.api;
+
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+
+public interface IOperationCallback {
+    public void pre(ITupleReference tuple);
+
+    public void post(ITupleReference tuple);
+}
diff --git a/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IOperationCallbackProvider.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IOperationCallbackProvider.java
new file mode 100644
index 0000000..974ef1a
--- /dev/null
+++ b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IOperationCallbackProvider.java
@@ -0,0 +1,7 @@
+package edu.uci.ics.hyracks.storage.am.common.api;
+
+import java.io.Serializable;
+
+public interface IOperationCallbackProvider extends Serializable {
+    public IOperationCallback getOperationCallback();
+}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IPrimitiveValueProvider.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IPrimitiveValueProvider.java
similarity index 100%
rename from hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IPrimitiveValueProvider.java
rename to hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IPrimitiveValueProvider.java
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IPrimitiveValueProviderFactory.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IPrimitiveValueProviderFactory.java
similarity index 100%
rename from hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IPrimitiveValueProviderFactory.java
rename to hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IPrimitiveValueProviderFactory.java
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ISearchPredicate.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ISearchPredicate.java
similarity index 100%
rename from hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ISearchPredicate.java
rename to hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ISearchPredicate.java
diff --git a/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ISlotManager.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ISlotManager.java
new file mode 100644
index 0000000..2619493
--- /dev/null
+++ b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ISlotManager.java
@@ -0,0 +1,47 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.common.api;
+
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.FindTupleMode;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.FindTupleNoExactMatchPolicy;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+
+public interface ISlotManager {
+	public int findTupleIndex(ITupleReference searchKey,
+			ITreeIndexTupleReference frameTuple, MultiComparator multiCmp,
+			FindTupleMode mode, FindTupleNoExactMatchPolicy matchPolicy);
+
+	public int getGreatestKeyIndicator();
+	
+	public int getErrorIndicator();
+
+	public void setFrame(ITreeIndexFrame frame);
+	
+	public int insertSlot(int tupleIndex, int tupleOff);
+
+	public int getSlotStartOff();
+
+	public int getSlotEndOff();
+
+	public int getTupleOff(int slotOff);
+
+	public void setSlot(int slotOff, int value);
+
+	public int getSlotOff(int tupleIndex);
+
+	public int getSlotSize();
+}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ISplitKey.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ISplitKey.java
similarity index 100%
rename from hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ISplitKey.java
rename to hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ISplitKey.java
diff --git a/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndex.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndex.java
new file mode 100644
index 0000000..52626cf
--- /dev/null
+++ b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndex.java
@@ -0,0 +1,62 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.common.api;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndex;
+
+/**
+ * Interface describing the operations of tree-based index structures. Indexes
+ * implementing this interface can easily reuse the tree index operators for
+ * dataflow. We assume that indexes store tuples with a fixed number of fields.
+ * Users must perform operations on an ITreeIndex via an ITreeIndexAccessor.
+ */
+public interface ITreeIndex extends IIndex {
+    /**
+     * @return The index's leaf frame factory.
+     */
+    public ITreeIndexFrameFactory getLeafFrameFactory();
+
+    /**
+     * @return The index's interior frame factory.
+     */
+    public ITreeIndexFrameFactory getInteriorFrameFactory();
+
+    /**
+     * @return The index's free page manager.
+     */
+    public IFreePageManager getFreePageManager();
+
+    /**
+     * @return The number of fields tuples of this index have.
+     */
+    public int getFieldCount();
+
+    /**
+     * @return The current root page id of this index.
+     */
+    public int getRootPageId();
+
+    /**
+     * @return The file id of this index.
+     */
+    public int getFileId();
+
+    /**
+     * @return Comparator factories.
+     */
+    public IBinaryComparatorFactory[] getComparatorFactories();
+}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexAccessor.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexAccessor.java
similarity index 100%
rename from hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexAccessor.java
rename to hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexAccessor.java
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexCursor.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexCursor.java
similarity index 100%
rename from hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexCursor.java
rename to hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexCursor.java
diff --git a/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexFrame.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexFrame.java
new file mode 100644
index 0000000..c33a8d8
--- /dev/null
+++ b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexFrame.java
@@ -0,0 +1,92 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.common.api;
+
+import java.nio.ByteBuffer;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.storage.am.common.frames.FrameOpSpaceStatus;
+import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
+
+public interface ITreeIndexFrame {
+
+	public void initBuffer(byte level);
+	
+    public FrameOpSpaceStatus hasSpaceInsert(ITupleReference tuple);
+	
+	public void insert(ITupleReference tuple, int tupleIndex);    
+    
+	public FrameOpSpaceStatus hasSpaceUpdate(ITupleReference newTuple, int oldTupleIndex);
+	
+	public void update(ITupleReference newTuple, int oldTupleIndex, boolean inPlace);    
+    
+    public void delete(ITupleReference tuple, int tupleIndex);
+
+    // returns true if slots were modified, false otherwise
+    public boolean compact();
+
+    // returns true if compressed.
+    public boolean compress() throws HyracksDataException;
+
+    public int getTupleCount();
+
+    public int getTupleOffset(int slotNum);
+
+    public int getTotalFreeSpace();
+
+    public void setPageLsn(long pageLsn);
+
+    public long getPageLsn();
+
+    public void setPage(ICachedPage page);
+
+    public ICachedPage getPage();
+
+    public ByteBuffer getBuffer();
+    
+    // for debugging
+    public String printHeader();
+
+    public void split(ITreeIndexFrame rightFrame, ITupleReference tuple, ISplitKey splitKey) throws TreeIndexException;
+
+    public ISlotManager getSlotManager();
+
+    // ATTENTION: in b-tree operations it may not always be possible to
+    // determine whether an ICachedPage is a leaf or interior node
+    // a compatible interior and leaf implementation MUST return identical
+    // values when given the same ByteBuffer for the functions below
+    public boolean isLeaf();
+
+    public boolean isInterior();
+
+    public byte getLevel();
+
+    public void setLevel(byte level);
+
+    public int getSlotSize();
+
+    // for debugging
+    public int getFreeSpaceOff();
+
+    public void setFreeSpaceOff(int freeSpace);
+
+    public ITreeIndexTupleWriter getTupleWriter();
+
+    public int getPageHeaderSize();
+    
+    public ITreeIndexTupleReference createTupleReference();
+}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexFrameCompressor.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexFrameCompressor.java
similarity index 100%
rename from hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexFrameCompressor.java
rename to hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexFrameCompressor.java
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexFrameFactory.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexFrameFactory.java
similarity index 100%
rename from hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexFrameFactory.java
rename to hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexFrameFactory.java
diff --git a/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexMetaDataFrame.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexMetaDataFrame.java
new file mode 100644
index 0000000..9e95970
--- /dev/null
+++ b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexMetaDataFrame.java
@@ -0,0 +1,50 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.common.api;
+
+import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
+
+public interface ITreeIndexMetaDataFrame {
+	public void initBuffer(byte level);
+
+	public void setPage(ICachedPage page);
+
+	public ICachedPage getPage();
+
+	public byte getLevel();
+
+	public void setLevel(byte level);
+
+	public int getNextPage();
+
+	public void setNextPage(int nextPage);
+
+	public int getMaxPage();
+
+	public void setMaxPage(int maxPage);
+
+	public int getFreePage();
+
+	public boolean hasSpace();
+
+	public void addFreePage(int freePage);
+	
+	// Special flag for LSM-Components to mark whether they are valid or not. 
+	public boolean isValid();
+	
+	// Set special validity flag.
+	public void setValid(boolean isValid);
+}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexMetaDataFrameFactory.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexMetaDataFrameFactory.java
similarity index 100%
rename from hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexMetaDataFrameFactory.java
rename to hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexMetaDataFrameFactory.java
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexTupleReference.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexTupleReference.java
similarity index 100%
rename from hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexTupleReference.java
rename to hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexTupleReference.java
diff --git a/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexTupleWriter.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexTupleWriter.java
new file mode 100644
index 0000000..30e8f39
--- /dev/null
+++ b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexTupleWriter.java
@@ -0,0 +1,39 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.common.api;
+
+import java.nio.ByteBuffer;
+
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+
+public interface ITreeIndexTupleWriter {
+    public int writeTuple(ITupleReference tuple, ByteBuffer targetBuf, int targetOff);
+    
+    public int writeTuple(ITupleReference tuple, byte[] targetBuf, int targetOff);
+
+    public int bytesRequired(ITupleReference tuple);
+
+    public int writeTupleFields(ITupleReference tuple, int startField, int numFields, byte[] targetBuf,
+            int targetOff);
+
+    public int bytesRequired(ITupleReference tuple, int startField, int numFields);
+
+    // return a tuplereference instance that can read the tuple written by this
+    // writer
+    // the main idea is that the format of the written tuple may not be the same
+    // as the format written by this writer
+    public ITreeIndexTupleReference createTupleReference();
+}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexTupleWriterFactory.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexTupleWriterFactory.java
similarity index 100%
rename from hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexTupleWriterFactory.java
rename to hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexTupleWriterFactory.java
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITupleFilter.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITupleFilter.java
similarity index 100%
rename from hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITupleFilter.java
rename to hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITupleFilter.java
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITupleFilterFactory.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITupleFilterFactory.java
similarity index 100%
rename from hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITupleFilterFactory.java
rename to hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITupleFilterFactory.java
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITupleUpdater.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITupleUpdater.java
similarity index 100%
rename from hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITupleUpdater.java
rename to hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITupleUpdater.java
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITupleUpdaterFactory.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITupleUpdaterFactory.java
similarity index 100%
rename from hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITupleUpdaterFactory.java
rename to hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITupleUpdaterFactory.java
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IndexException.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IndexException.java
similarity index 100%
rename from hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IndexException.java
rename to hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IndexException.java
diff --git a/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IndexType.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IndexType.java
new file mode 100644
index 0000000..6f83e0b
--- /dev/null
+++ b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IndexType.java
@@ -0,0 +1,5 @@
+package edu.uci.ics.hyracks.storage.am.common.api;
+
+public enum IndexType {
+	BTREE, RTREE, INVERTED
+}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/TreeIndexException.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/TreeIndexException.java
similarity index 100%
rename from hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/TreeIndexException.java
rename to hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/TreeIndexException.java
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/data/PointablePrimitiveValueProviderFactory.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/data/PointablePrimitiveValueProviderFactory.java
similarity index 100%
rename from hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/data/PointablePrimitiveValueProviderFactory.java
rename to hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/data/PointablePrimitiveValueProviderFactory.java
diff --git a/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/AbstractTreeIndexOperatorDescriptor.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/AbstractTreeIndexOperatorDescriptor.java
new file mode 100644
index 0000000..9f0fbc9
--- /dev/null
+++ b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/AbstractTreeIndexOperatorDescriptor.java
@@ -0,0 +1,122 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.common.dataflow;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.job.IOperatorDescriptorRegistry;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.storage.am.common.api.IOperationCallbackProvider;
+import edu.uci.ics.hyracks.storage.am.common.api.ITupleFilterFactory;
+import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
+
+public abstract class AbstractTreeIndexOperatorDescriptor extends
+		AbstractSingleActivityOperatorDescriptor implements
+		ITreeIndexOperatorDescriptor {
+
+	private static final long serialVersionUID = 1L;
+
+	protected final IFileSplitProvider fileSplitProvider;
+
+	protected final IBinaryComparatorFactory[] comparatorFactories;
+
+	protected final IStorageManagerInterface storageManager;
+	protected final IIndexRegistryProvider<IIndex> indexRegistryProvider;
+
+	protected final ITypeTraits[] typeTraits;
+	protected final IIndexDataflowHelperFactory dataflowHelperFactory;
+	protected final ITupleFilterFactory tupleFilterFactory;
+	
+	protected final boolean retainInput;
+    protected final IOperationCallbackProvider opCallbackProvider;
+
+	public AbstractTreeIndexOperatorDescriptor(IOperatorDescriptorRegistry spec,
+			int inputArity, int outputArity, RecordDescriptor recDesc,
+			IStorageManagerInterface storageManager,
+			IIndexRegistryProvider<IIndex> indexRegistryProvider,
+			IFileSplitProvider fileSplitProvider,
+			ITypeTraits[] typeTraits,
+			IBinaryComparatorFactory[] comparatorFactories,
+			IIndexDataflowHelperFactory dataflowHelperFactory,
+			ITupleFilterFactory tupleFilterFactory,
+			boolean retainInput, IOperationCallbackProvider opCallbackProvider) {
+		super(spec, inputArity, outputArity);
+		this.fileSplitProvider = fileSplitProvider;
+		this.storageManager = storageManager;
+		this.indexRegistryProvider = indexRegistryProvider;
+		this.typeTraits = typeTraits;
+		this.comparatorFactories = comparatorFactories;
+		this.dataflowHelperFactory = dataflowHelperFactory;
+		this.retainInput = retainInput;
+		this.tupleFilterFactory = tupleFilterFactory;
+        this.opCallbackProvider = opCallbackProvider;
+		if (outputArity > 0) {
+			recordDescriptors[0] = recDesc;
+		}
+	}
+
+	@Override
+	public IFileSplitProvider getFileSplitProvider() {
+		return fileSplitProvider;
+	}
+
+	@Override
+	public IBinaryComparatorFactory[] getTreeIndexComparatorFactories() {
+		return comparatorFactories;
+	}
+
+	@Override
+	public ITypeTraits[] getTreeIndexTypeTraits() {
+		return typeTraits;
+	}
+
+	@Override
+	public IStorageManagerInterface getStorageManager() {
+		return storageManager;
+	}
+
+	@Override
+	public IIndexRegistryProvider<IIndex> getIndexRegistryProvider() {
+		return indexRegistryProvider;
+	}
+
+	@Override
+	public RecordDescriptor getRecordDescriptor() {
+		return recordDescriptors[0];
+	}
+
+	@Override
+	public IIndexDataflowHelperFactory getIndexDataflowHelperFactory() {
+		return dataflowHelperFactory;
+	}
+	
+	@Override
+	public boolean getRetainInput() {
+		return retainInput;
+	}
+	
+	@Override
+	public IOperationCallbackProvider getOpCallbackProvider() {
+	    return opCallbackProvider;
+	}
+	
+	@Override
+	public ITupleFilterFactory getTupleFilterFactory() {
+		return tupleFilterFactory;
+	}
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/IIndex.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/IIndex.java
new file mode 100644
index 0000000..64cbd58
--- /dev/null
+++ b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/IIndex.java
@@ -0,0 +1,112 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.common.dataflow;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.storage.am.common.api.IIndexAccessor;
+import edu.uci.ics.hyracks.storage.am.common.api.IIndexBulkLoadContext;
+import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
+import edu.uci.ics.hyracks.storage.am.common.api.IndexType;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+
+/**
+ * Interface describing the operations common to all index structures. Indexes
+ * implementing this interface can easily reuse existing index operators for
+ * dataflow. Users must perform operations on an IIndex via an IIndexAccessor.
+ */
+public interface IIndex {
+    /**
+     * Initializes the persistent state of an index, e.g., the root page, and
+     * metadata pages.
+     * 
+     * @param indexFileId
+     *            The file id to use for this index.
+     * @throws HyracksDataException
+     *             If the BufferCache throws while un/pinning or un/latching.
+     */
+    public void create(int indexFileId) throws HyracksDataException;
+
+    /**
+     * Opens the index backed by the given file id.
+     * 
+     * @param indexFileId
+     *            The file id backing this index.
+     */
+    public void open(int indexFileId) throws HyracksDataException;
+
+    /**
+     * Closes the index.
+     */
+    public void close() throws HyracksDataException;
+
+    /**
+     * Creates an index accessor for performing operations on this index.
+     * (insert/delete/update/search/diskorderscan). An IIndexAccessor is not
+     * thread safe, but different IIndexAccessors can concurrently operate
+     * on the same IIndex
+     * 
+     * @returns IIndexAccessor An accessor for this tree.
+     */
+    public IIndexAccessor createAccessor();
+
+    /**
+     * Prepares the index for bulk loading, returning a bulk load context. The
+     * index may require to be empty for bulk loading.
+     * 
+     * @param fillFactor
+     *            Desired fill factor in [0, 1.0].
+     * @throws HyracksDataException
+     *             If the BufferCache throws while un/pinning or un/latching.
+     * @throws IndexException
+     *             For example, if the index was already loaded and only
+     *             supports a single load.
+     * @returns A new context for bulk loading, required for appending tuples.
+     */
+    public IIndexBulkLoadContext beginBulkLoad(float fillFactor) throws IndexException, HyracksDataException;
+
+    /**
+     * Append a tuple to the index in the context of a bulk load.
+     * 
+     * @param tuple
+     *            Tuple to be inserted.
+     * @param ictx
+     *            Existing bulk load context.
+     * @throws HyracksDataException
+     *             If the BufferCache throws while un/pinning or un/latching.
+     */
+    public void bulkLoadAddTuple(ITupleReference tuple, IIndexBulkLoadContext ictx) throws HyracksDataException;
+
+    /**
+     * Finalize the bulk loading operation in the given context.
+     * 
+     * @param ictx
+     *            Existing bulk load context to be finalized.
+     * @throws HyracksDataException
+     *             If the BufferCache throws while un/pinning or un/latching.
+     */
+    public void endBulkLoad(IIndexBulkLoadContext ictx) throws HyracksDataException;
+
+    /**
+     * @return BufferCache underlying this index.
+     */
+    public IBufferCache getBufferCache();
+
+    /**
+     * @return An enum of the concrete type of this index.
+     */
+    public IndexType getIndexType();
+}
diff --git a/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/IIndexDataflowHelperFactory.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/IIndexDataflowHelperFactory.java
new file mode 100644
index 0000000..ddca470
--- /dev/null
+++ b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/IIndexDataflowHelperFactory.java
@@ -0,0 +1,25 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.common.dataflow;
+
+import java.io.Serializable;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+
+public interface IIndexDataflowHelperFactory extends Serializable {
+    public IndexDataflowHelper createIndexDataflowHelper(IIndexOperatorDescriptor opDesc,
+            final IHyracksTaskContext ctx, int partition);
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/IIndexOperatorDescriptor.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/IIndexOperatorDescriptor.java
new file mode 100644
index 0000000..e37d374
--- /dev/null
+++ b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/IIndexOperatorDescriptor.java
@@ -0,0 +1,38 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.common.dataflow;
+
+import edu.uci.ics.hyracks.api.dataflow.IActivity;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.storage.am.common.api.IOperationCallbackProvider;
+import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
+
+public interface IIndexOperatorDescriptor extends IActivity {
+    public IFileSplitProvider getFileSplitProvider();
+
+    public IStorageManagerInterface getStorageManager();
+
+    public IIndexRegistryProvider<IIndex> getIndexRegistryProvider();    
+    
+    public RecordDescriptor getRecordDescriptor();
+    
+    public IIndexDataflowHelperFactory getIndexDataflowHelperFactory();
+    
+    public boolean getRetainInput();
+    
+    public IOperationCallbackProvider getOpCallbackProvider();
+}
diff --git a/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/IIndexRegistryProvider.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/IIndexRegistryProvider.java
new file mode 100644
index 0000000..ed20de0
--- /dev/null
+++ b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/IIndexRegistryProvider.java
@@ -0,0 +1,24 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.common.dataflow;
+
+import java.io.Serializable;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+
+public interface IIndexRegistryProvider<IndexType> extends Serializable {
+	public IndexRegistry<IndexType> getRegistry(IHyracksTaskContext ctx);
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/ITreeIndexOperatorDescriptor.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/ITreeIndexOperatorDescriptor.java
new file mode 100644
index 0000000..7fba22b
--- /dev/null
+++ b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/ITreeIndexOperatorDescriptor.java
@@ -0,0 +1,28 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.common.dataflow;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.storage.am.common.api.ITupleFilterFactory;
+
+public interface ITreeIndexOperatorDescriptor extends IIndexOperatorDescriptor {
+	public IBinaryComparatorFactory[] getTreeIndexComparatorFactories();
+	
+	public ITypeTraits[] getTreeIndexTypeTraits();
+	
+	public ITupleFilterFactory getTupleFilterFactory();
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/IndexDataflowHelper.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/IndexDataflowHelper.java
new file mode 100644
index 0000000..fa95ce4
--- /dev/null
+++ b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/IndexDataflowHelper.java
@@ -0,0 +1,119 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.common.dataflow;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.io.FileReference;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.storage.am.common.api.IOperationCallbackProvider;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
+
+public abstract class IndexDataflowHelper {
+    protected IIndex index;
+    protected int indexFileId = -1;
+
+    protected final int partition;
+    protected final IIndexOperatorDescriptor opDesc;
+    protected final IHyracksTaskContext ctx;
+
+    public IndexDataflowHelper(IIndexOperatorDescriptor opDesc, final IHyracksTaskContext ctx, int partition) {
+        this.opDesc = opDesc;
+        this.ctx = ctx;
+        this.partition = partition;
+    }
+
+    public void init(boolean forceCreate) throws HyracksDataException {
+        IBufferCache bufferCache = opDesc.getStorageManager().getBufferCache(ctx);
+        IFileMapProvider fileMapProvider = opDesc.getStorageManager().getFileMapProvider(ctx);
+        IndexRegistry<IIndex> indexRegistry = opDesc.getIndexRegistryProvider().getRegistry(ctx);
+        FileReference fileRef = getFilereference();
+        int fileId = -1;
+        boolean fileIsMapped = false;
+        synchronized (fileMapProvider) {
+            fileIsMapped = fileMapProvider.isMapped(fileRef);
+            if (!fileIsMapped) {
+                bufferCache.createFile(fileRef);
+            }
+            fileId = fileMapProvider.lookupFileId(fileRef);
+            try {
+                // Also creates the file if it doesn't exist yet.
+                bufferCache.openFile(fileId);
+            } catch (HyracksDataException e) {
+                // Revert state of buffer cache since file failed to open.
+                if (!fileIsMapped) {
+                    bufferCache.deleteFile(fileId, false);
+                }
+                throw e;
+            }
+        }
+        // Only set indexFileId member after openFile() succeeds.
+        indexFileId = fileId;
+        // Create new index instance and register it.
+        synchronized (indexRegistry) {
+            // Check if the index has already been registered.
+            boolean register = false;
+            index = indexRegistry.get(indexFileId);
+            if (index == null) {
+                index = createIndexInstance();
+                register = true;
+            }
+            if (forceCreate) {
+                index.create(indexFileId);
+            }
+            index.open(indexFileId);
+            if (register) {
+                indexRegistry.register(indexFileId, index);
+            }
+        }
+    }
+
+    public abstract IIndex createIndexInstance() throws HyracksDataException;
+
+    public FileReference getFilereference() {
+        IFileSplitProvider fileSplitProvider = opDesc.getFileSplitProvider();
+        return fileSplitProvider.getFileSplits()[partition].getLocalFile();
+    }
+
+    public void deinit() throws HyracksDataException {
+        if (indexFileId != -1) {
+            IBufferCache bufferCache = opDesc.getStorageManager().getBufferCache(ctx);
+            bufferCache.closeFile(indexFileId);
+            indexFileId = -1;
+        }
+    }
+
+    public IIndex getIndex() {
+        return index;
+    }
+
+    public IHyracksTaskContext getHyracksTaskContext() {
+        return ctx;
+    }
+
+    public IIndexOperatorDescriptor getOperatorDescriptor() {
+        return opDesc;
+    }
+
+    public int getIndexFileId() {
+        return indexFileId;
+    }
+
+    public IOperationCallbackProvider getOpCallbackProvider() {
+        return opDesc.getOpCallbackProvider();
+    }
+}
diff --git a/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/IndexRegistry.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/IndexRegistry.java
new file mode 100644
index 0000000..9aba0be
--- /dev/null
+++ b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/IndexRegistry.java
@@ -0,0 +1,39 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.common.dataflow;
+
+import java.util.HashMap;
+
+public class IndexRegistry<IndexType> {
+
+    private HashMap<Integer, IndexType> map = new HashMap<Integer, IndexType>();
+
+    public IndexType get(int indexId) {
+        return map.get(indexId);
+    }
+
+    public void register(int indexId, IndexType index) {
+        map.put(indexId, index);
+    }
+
+    public void unregister(int indexId) {
+        map.remove(indexId);
+    }
+
+    public int size() {
+        return map.size();
+    }
+}
diff --git a/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/PermutingFrameTupleReference.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/PermutingFrameTupleReference.java
new file mode 100644
index 0000000..0b296f0
--- /dev/null
+++ b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/PermutingFrameTupleReference.java
@@ -0,0 +1,65 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.common.dataflow;
+
+import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+public class PermutingFrameTupleReference implements IFrameTupleReference {
+	private IFrameTupleAccessor fta;
+	private int tIndex;
+	private int[] fieldPermutation;
+
+	public void setFieldPermutation(int[] fieldPermutation) {
+		this.fieldPermutation = fieldPermutation;
+	}
+
+	public void reset(IFrameTupleAccessor fta, int tIndex) {
+		this.fta = fta;
+		this.tIndex = tIndex;
+	}
+
+	@Override
+	public IFrameTupleAccessor getFrameTupleAccessor() {
+		return fta;
+	}
+
+	@Override
+	public int getTupleIndex() {
+		return tIndex;
+	}
+
+	@Override
+	public int getFieldCount() {
+		return fieldPermutation.length;
+	}
+
+	@Override
+	public byte[] getFieldData(int fIdx) {
+		return fta.getBuffer().array();
+	}
+
+	@Override
+	public int getFieldStart(int fIdx) {
+		return fta.getTupleStartOffset(tIndex) + fta.getFieldSlotsLength()
+				+ fta.getFieldStartOffset(tIndex, fieldPermutation[fIdx]);
+	}
+
+	@Override
+	public int getFieldLength(int fIdx) {
+		return fta.getFieldLength(tIndex, fieldPermutation[fIdx]);
+	}
+}
diff --git a/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexBulkLoadOperatorDescriptor.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexBulkLoadOperatorDescriptor.java
new file mode 100644
index 0000000..0020089
--- /dev/null
+++ b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexBulkLoadOperatorDescriptor.java
@@ -0,0 +1,52 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.common.dataflow;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.api.job.IOperatorDescriptorRegistry;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.storage.am.common.api.IOperationCallbackProvider;
+import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
+
+public class TreeIndexBulkLoadOperatorDescriptor extends AbstractTreeIndexOperatorDescriptor {
+
+    private static final long serialVersionUID = 1L;
+
+    private final int[] fieldPermutation;
+    private final float fillFactor;
+
+    public TreeIndexBulkLoadOperatorDescriptor(IOperatorDescriptorRegistry spec, IStorageManagerInterface storageManager,
+            IIndexRegistryProvider<IIndex> indexRegistryProvider, IFileSplitProvider fileSplitProvider,
+            ITypeTraits[] typeTraits, IBinaryComparatorFactory[] comparatorFactories, int[] fieldPermutation,
+            float fillFactor, IIndexDataflowHelperFactory dataflowHelperFactory,
+            IOperationCallbackProvider opCallbackProvider) {
+        super(spec, 1, 0, null, storageManager, indexRegistryProvider, fileSplitProvider, typeTraits,
+                comparatorFactories, dataflowHelperFactory, null, false, opCallbackProvider);
+        this.fieldPermutation = fieldPermutation;
+        this.fillFactor = fillFactor;
+    }
+
+    @Override
+    public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
+            IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) {
+        return new TreeIndexBulkLoadOperatorNodePushable(this, ctx, partition, fieldPermutation,
+                fillFactor, recordDescProvider);
+    }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexBulkLoadOperatorNodePushable.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexBulkLoadOperatorNodePushable.java
new file mode 100644
index 0000000..a2d78a4
--- /dev/null
+++ b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexBulkLoadOperatorNodePushable.java
@@ -0,0 +1,90 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.storage.am.common.dataflow;
+
+import java.nio.ByteBuffer;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryInputSinkOperatorNodePushable;
+import edu.uci.ics.hyracks.storage.am.common.api.IIndexBulkLoadContext;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+
+public class TreeIndexBulkLoadOperatorNodePushable extends AbstractUnaryInputSinkOperatorNodePushable {
+    private float fillFactor;
+    private final TreeIndexDataflowHelper treeIndexHelper;
+    private FrameTupleAccessor accessor;
+    private IIndexBulkLoadContext bulkLoadCtx;
+    private ITreeIndex treeIndex;
+
+    private IRecordDescriptorProvider recordDescProvider;
+
+    private PermutingFrameTupleReference tuple = new PermutingFrameTupleReference();
+
+    public TreeIndexBulkLoadOperatorNodePushable(AbstractTreeIndexOperatorDescriptor opDesc, IHyracksTaskContext ctx,
+            int partition, int[] fieldPermutation, float fillFactor, IRecordDescriptorProvider recordDescProvider) {
+        treeIndexHelper = (TreeIndexDataflowHelper) opDesc.getIndexDataflowHelperFactory().createIndexDataflowHelper(
+                opDesc, ctx, partition);
+        this.fillFactor = fillFactor;
+        this.recordDescProvider = recordDescProvider;
+        tuple.setFieldPermutation(fieldPermutation);
+    }
+
+    @Override
+    public void open() throws HyracksDataException {
+        AbstractTreeIndexOperatorDescriptor opDesc = (AbstractTreeIndexOperatorDescriptor) treeIndexHelper
+                .getOperatorDescriptor();
+        RecordDescriptor recDesc = recordDescProvider.getInputRecordDescriptor(opDesc.getActivityId(), 0);
+        accessor = new FrameTupleAccessor(treeIndexHelper.getHyracksTaskContext().getFrameSize(), recDesc);
+        try {
+            treeIndexHelper.init(false);
+            treeIndex = (ITreeIndex) treeIndexHelper.getIndex();
+            treeIndex.open(treeIndexHelper.getIndexFileId());
+            bulkLoadCtx = treeIndex.beginBulkLoad(fillFactor);
+        } catch (Exception e) {
+            // cleanup in case of failure
+            treeIndexHelper.deinit();
+            throw new HyracksDataException(e);
+        }
+    }
+
+    @Override
+    public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
+        accessor.reset(buffer);
+        int tupleCount = accessor.getTupleCount();
+        for (int i = 0; i < tupleCount; i++) {
+            tuple.reset(accessor, i);
+            treeIndex.bulkLoadAddTuple(tuple, bulkLoadCtx);
+        }
+    }
+
+    @Override
+    public void close() throws HyracksDataException {
+        try {
+            treeIndex.endBulkLoad(bulkLoadCtx);
+        } catch (Exception e) {
+            throw new HyracksDataException(e);
+        } finally {
+            treeIndexHelper.deinit();
+        }
+    }
+
+    @Override
+    public void fail() throws HyracksDataException {
+    }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexCreateOperatorDescriptor.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexCreateOperatorDescriptor.java
new file mode 100644
index 0000000..075a6a4
--- /dev/null
+++ b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexCreateOperatorDescriptor.java
@@ -0,0 +1,45 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.common.dataflow;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.api.job.IOperatorDescriptorRegistry;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.storage.am.common.api.IOperationCallbackProvider;
+import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
+
+public class TreeIndexCreateOperatorDescriptor extends AbstractTreeIndexOperatorDescriptor {
+
+    private static final long serialVersionUID = 1L;
+
+    public TreeIndexCreateOperatorDescriptor(IOperatorDescriptorRegistry spec, IStorageManagerInterface storageManager,
+            IIndexRegistryProvider<IIndex> indexRegistryProvider, IFileSplitProvider fileSplitProvider,
+            ITypeTraits[] typeTraits, IBinaryComparatorFactory[] comparatorFactories,
+            IIndexDataflowHelperFactory dataflowHelperFactory, IOperationCallbackProvider opCallbackProvider) {
+        super(spec, 0, 0, null, storageManager, indexRegistryProvider, fileSplitProvider, typeTraits,
+                comparatorFactories, dataflowHelperFactory, null, false, opCallbackProvider);
+    }
+
+    @Override
+    public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
+            IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) {
+        return new TreeIndexCreateOperatorNodePushable(this, ctx, partition);
+    }
+}
diff --git a/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexCreateOperatorNodePushable.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexCreateOperatorNodePushable.java
new file mode 100644
index 0000000..21348a0
--- /dev/null
+++ b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexCreateOperatorNodePushable.java
@@ -0,0 +1,59 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.common.dataflow;
+
+import edu.uci.ics.hyracks.api.comm.IFrameWriter;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractOperatorNodePushable;
+
+public class TreeIndexCreateOperatorNodePushable extends AbstractOperatorNodePushable {
+    protected final TreeIndexDataflowHelper treeIndexHelper;
+
+    public TreeIndexCreateOperatorNodePushable(AbstractTreeIndexOperatorDescriptor opDesc, IHyracksTaskContext ctx,
+            int partition) {
+        treeIndexHelper = (TreeIndexDataflowHelper) opDesc.getIndexDataflowHelperFactory().createIndexDataflowHelper(
+                opDesc, ctx, partition);
+    }
+
+    @Override
+    public void deinitialize() throws HyracksDataException {
+    }
+
+    @Override
+    public int getInputArity() {
+        return 0;
+    }
+
+    @Override
+    public IFrameWriter getInputFrameWriter(int index) {
+        return null;
+    }
+
+    @Override
+    public void initialize() throws HyracksDataException {
+        try {
+        	treeIndexHelper.init(true);
+        } finally {
+        	treeIndexHelper.deinit();
+        }
+    }
+
+    @Override
+    public void setOutputFrameWriter(int index, IFrameWriter writer, RecordDescriptor recordDesc) {
+    }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexDataflowHelper.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexDataflowHelper.java
new file mode 100644
index 0000000..10d1077
--- /dev/null
+++ b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexDataflowHelper.java
@@ -0,0 +1,38 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.common.dataflow;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
+import edu.uci.ics.hyracks.storage.am.common.impls.TreeDiskOrderScanCursor;
+
+public abstract class TreeIndexDataflowHelper extends IndexDataflowHelper {
+    protected ITreeIndexOperatorDescriptor treeOpDesc;
+
+    public TreeIndexDataflowHelper(IIndexOperatorDescriptor opDesc, IHyracksTaskContext ctx, int partition) {
+        super(opDesc, ctx, partition);
+        this.treeOpDesc = (ITreeIndexOperatorDescriptor) opDesc;
+    }
+
+    public abstract ITreeIndex createIndexInstance() throws HyracksDataException;
+
+    public ITreeIndexCursor createDiskOrderScanCursor(ITreeIndexFrame leafFrame) throws HyracksDataException {
+        return new TreeDiskOrderScanCursor(leafFrame);
+    }
+}
diff --git a/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexDiskOrderScanOperatorDescriptor.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexDiskOrderScanOperatorDescriptor.java
new file mode 100644
index 0000000..324485e
--- /dev/null
+++ b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexDiskOrderScanOperatorDescriptor.java
@@ -0,0 +1,45 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.common.dataflow;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
+import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.job.IOperatorDescriptorRegistry;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.storage.am.common.api.IOperationCallbackProvider;
+import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
+
+public class TreeIndexDiskOrderScanOperatorDescriptor extends AbstractTreeIndexOperatorDescriptor {
+
+    private static final long serialVersionUID = 1L;
+
+    public TreeIndexDiskOrderScanOperatorDescriptor(IOperatorDescriptorRegistry spec, RecordDescriptor recDesc,
+            IStorageManagerInterface storageManager, IIndexRegistryProvider<IIndex> indexRegistryProvider,
+            IFileSplitProvider fileSplitProvider, ITypeTraits[] typeTraits,
+            IIndexDataflowHelperFactory dataflowHelperFactory, IOperationCallbackProvider opCallbackProvider) {
+        super(spec, 0, 1, recDesc, storageManager, indexRegistryProvider, fileSplitProvider, typeTraits, null,
+                dataflowHelperFactory, null, false, opCallbackProvider);
+    }
+
+    @Override
+    public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
+            IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) {
+        return new TreeIndexDiskOrderScanOperatorNodePushable(this, ctx, partition);
+    }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexDiskOrderScanOperatorNodePushable.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexDiskOrderScanOperatorNodePushable.java
new file mode 100644
index 0000000..d02a570
--- /dev/null
+++ b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexDiskOrderScanOperatorNodePushable.java
@@ -0,0 +1,99 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.storage.am.common.dataflow;
+
+import java.io.DataOutput;
+import java.nio.ByteBuffer;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
+import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryOutputSourceOperatorNodePushable;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexAccessor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
+
+public class TreeIndexDiskOrderScanOperatorNodePushable extends AbstractUnaryOutputSourceOperatorNodePushable {
+    private final TreeIndexDataflowHelper treeIndexHelper;
+    private ITreeIndex treeIndex;
+
+    public TreeIndexDiskOrderScanOperatorNodePushable(AbstractTreeIndexOperatorDescriptor opDesc,
+            IHyracksTaskContext ctx, int partition) {
+        treeIndexHelper = (TreeIndexDataflowHelper) opDesc.getIndexDataflowHelperFactory().createIndexDataflowHelper(
+                opDesc, ctx, partition);
+    }
+
+    @Override
+    public void initialize() throws HyracksDataException {
+        try {
+            treeIndexHelper.init(false);
+            treeIndex = (ITreeIndex) treeIndexHelper.getIndex();
+            ITreeIndexFrame cursorFrame = treeIndex.getLeafFrameFactory().createFrame();
+            ITreeIndexCursor cursor = treeIndexHelper.createDiskOrderScanCursor(cursorFrame);
+            ITreeIndexAccessor indexAccessor = (ITreeIndexAccessor) treeIndex.createAccessor();
+            writer.open();
+            try {
+                indexAccessor.diskOrderScan(cursor);
+                int fieldCount = treeIndex.getFieldCount();
+                ByteBuffer frame = treeIndexHelper.getHyracksTaskContext().allocateFrame();
+                FrameTupleAppender appender = new FrameTupleAppender(treeIndexHelper.getHyracksTaskContext()
+                        .getFrameSize());
+                appender.reset(frame, true);
+                ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
+                DataOutput dos = tb.getDataOutput();
+
+                while (cursor.hasNext()) {
+                    tb.reset();
+                    cursor.next();
+
+                    ITupleReference frameTuple = cursor.getTuple();
+                    for (int i = 0; i < frameTuple.getFieldCount(); i++) {
+                        dos.write(frameTuple.getFieldData(i), frameTuple.getFieldStart(i), frameTuple.getFieldLength(i));
+                        tb.addFieldEndOffset();
+                    }
+
+                    if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
+                        FrameUtils.flushFrame(frame, writer);
+                        appender.reset(frame, true);
+                        if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
+                            throw new IllegalStateException();
+                        }
+                    }
+                }
+                if (appender.getTupleCount() > 0) {
+                    FrameUtils.flushFrame(frame, writer);
+                }
+            } catch (Exception e) {
+                writer.fail();
+                throw new HyracksDataException(e);
+            } finally {
+                cursor.close();
+                writer.close();
+            }
+        } catch (Exception e) {
+            deinitialize();
+            throw new HyracksDataException(e);
+        }
+    }
+
+    @Override
+    public void deinitialize() throws HyracksDataException {
+        treeIndexHelper.deinit();
+    }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexDropOperatorDescriptor.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexDropOperatorDescriptor.java
new file mode 100644
index 0000000..7c58031
--- /dev/null
+++ b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexDropOperatorDescriptor.java
@@ -0,0 +1,52 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.common.dataflow;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
+import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import edu.uci.ics.hyracks.api.job.IOperatorDescriptorRegistry;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
+
+public class TreeIndexDropOperatorDescriptor extends
+		AbstractSingleActivityOperatorDescriptor {
+
+	private static final long serialVersionUID = 1L;
+
+	private IStorageManagerInterface storageManager;
+	private IIndexRegistryProvider<IIndex> treeIndexRegistryProvider;
+	private IFileSplitProvider fileSplitProvider;
+
+	public TreeIndexDropOperatorDescriptor(IOperatorDescriptorRegistry spec,
+			IStorageManagerInterface storageManager,
+			IIndexRegistryProvider<IIndex> treeIndexRegistryProvider,
+			IFileSplitProvider fileSplitProvider) {
+		super(spec, 0, 0);
+		this.storageManager = storageManager;
+		this.treeIndexRegistryProvider = treeIndexRegistryProvider;
+		this.fileSplitProvider = fileSplitProvider;
+	}
+
+	@Override
+	public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
+			IRecordDescriptorProvider recordDescProvider,
+			int partition, int nPartitions) {
+		return new TreeIndexDropOperatorNodePushable(ctx, storageManager,
+				treeIndexRegistryProvider, fileSplitProvider, partition);
+	}
+}
diff --git a/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexDropOperatorNodePushable.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexDropOperatorNodePushable.java
new file mode 100644
index 0000000..5f3c0b5
--- /dev/null
+++ b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexDropOperatorNodePushable.java
@@ -0,0 +1,103 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.common.dataflow;
+
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import edu.uci.ics.hyracks.api.comm.IFrameWriter;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.io.FileReference;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractOperatorNodePushable;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
+
+public class TreeIndexDropOperatorNodePushable extends AbstractOperatorNodePushable {
+    private static final Logger LOGGER = Logger.getLogger(TreeIndexDropOperatorNodePushable.class.getName());
+
+    private final IHyracksTaskContext ctx;
+    private IIndexRegistryProvider<IIndex> treeIndexRegistryProvider;
+    private IStorageManagerInterface storageManager;
+    private IFileSplitProvider fileSplitProvider;
+    private int partition;
+
+    public TreeIndexDropOperatorNodePushable(IHyracksTaskContext ctx, IStorageManagerInterface storageManager,
+            IIndexRegistryProvider<IIndex> treeIndexRegistryProvider, IFileSplitProvider fileSplitProvider,
+            int partition) {
+        this.ctx = ctx;
+        this.storageManager = storageManager;
+        this.treeIndexRegistryProvider = treeIndexRegistryProvider;
+        this.fileSplitProvider = fileSplitProvider;
+        this.partition = partition;
+    }
+
+    @Override
+    public void deinitialize() throws HyracksDataException {
+    }
+
+    @Override
+    public int getInputArity() {
+        return 0;
+    }
+
+    @Override
+    public IFrameWriter getInputFrameWriter(int index) {
+        return null;
+    }
+
+    @Override
+    public void initialize() throws HyracksDataException {
+        try {
+            IndexRegistry<IIndex> treeIndexRegistry = treeIndexRegistryProvider.getRegistry(ctx);
+            IBufferCache bufferCache = storageManager.getBufferCache(ctx);
+            IFileMapProvider fileMapProvider = storageManager.getFileMapProvider(ctx);
+
+            FileReference f = fileSplitProvider.getFileSplits()[partition].getLocalFile();
+            int indexFileId = -1;
+            synchronized (fileMapProvider) {
+                boolean fileIsMapped = fileMapProvider.isMapped(f);
+                if (!fileIsMapped) {
+                    throw new HyracksDataException("Cannot drop Tree with name " + f.toString()
+                            + ". No file mapping exists.");
+                }
+                indexFileId = fileMapProvider.lookupFileId(f);
+            }
+            // Unregister tree instance.
+            synchronized (treeIndexRegistry) {
+                treeIndexRegistry.unregister(indexFileId);
+            }
+
+            // remove name to id mapping
+            bufferCache.deleteFile(indexFileId, false);
+        }
+        // TODO: for the time being we don't throw,
+        // with proper exception handling (no hanging job problem) we should
+        // throw
+        catch (Exception e) {
+            if (LOGGER.isLoggable(Level.WARNING)) {
+                LOGGER.warning("Tree Drop Operator Failed Due To Exception: " + e.getMessage());
+            }
+        }
+    }
+
+    @Override
+    public void setOutputFrameWriter(int index, IFrameWriter writer, RecordDescriptor recordDesc) {
+    }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexInsertUpdateDeleteOperatorDescriptor.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexInsertUpdateDeleteOperatorDescriptor.java
new file mode 100644
index 0000000..a615386
--- /dev/null
+++ b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexInsertUpdateDeleteOperatorDescriptor.java
@@ -0,0 +1,56 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.common.dataflow;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.job.IOperatorDescriptorRegistry;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.storage.am.common.api.IOperationCallbackProvider;
+import edu.uci.ics.hyracks.storage.am.common.api.ITupleFilterFactory;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOp;
+import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
+
+public class TreeIndexInsertUpdateDeleteOperatorDescriptor extends AbstractTreeIndexOperatorDescriptor {
+
+    private static final long serialVersionUID = 1L;
+
+    private final int[] fieldPermutation;
+
+    private IndexOp op;
+
+    public TreeIndexInsertUpdateDeleteOperatorDescriptor(IOperatorDescriptorRegistry spec, RecordDescriptor recDesc,
+            IStorageManagerInterface storageManager, IIndexRegistryProvider<IIndex> indexRegistryProvider,
+            IFileSplitProvider fileSplitProvider, ITypeTraits[] typeTraits,
+            IBinaryComparatorFactory[] comparatorFactories, int[] fieldPermutation, IndexOp op,
+            IIndexDataflowHelperFactory dataflowHelperFactory, ITupleFilterFactory tupleFilterFactory, IOperationCallbackProvider opCallbackProvider) {
+        super(spec, 1, 1, recDesc, storageManager, indexRegistryProvider, fileSplitProvider, typeTraits,
+                comparatorFactories, dataflowHelperFactory, tupleFilterFactory, false, opCallbackProvider);
+        this.fieldPermutation = fieldPermutation;
+        this.op = op;
+    }
+
+    @Override
+    public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
+            IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) {
+        return new TreeIndexInsertUpdateDeleteOperatorNodePushable(this, ctx, partition, fieldPermutation,
+                recordDescProvider, op);
+    }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexInsertUpdateDeleteOperatorNodePushable.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexInsertUpdateDeleteOperatorNodePushable.java
new file mode 100644
index 0000000..e05568f
--- /dev/null
+++ b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexInsertUpdateDeleteOperatorNodePushable.java
@@ -0,0 +1,137 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.storage.am.common.dataflow;
+
+import java.nio.ByteBuffer;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.FrameTupleReference;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryInputUnaryOutputOperatorNodePushable;
+import edu.uci.ics.hyracks.storage.am.common.api.IIndexAccessor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+import edu.uci.ics.hyracks.storage.am.common.api.ITupleFilter;
+import edu.uci.ics.hyracks.storage.am.common.api.ITupleFilterFactory;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOp;
+
+public class TreeIndexInsertUpdateDeleteOperatorNodePushable extends AbstractUnaryInputUnaryOutputOperatorNodePushable {
+    private final TreeIndexDataflowHelper treeIndexHelper;
+    private FrameTupleAccessor accessor;
+    private final IRecordDescriptorProvider recordDescProvider;
+    private final IndexOp op;
+    private final PermutingFrameTupleReference tuple = new PermutingFrameTupleReference();
+    private FrameTupleReference frameTuple;
+    private ByteBuffer writeBuffer;
+    private IIndexAccessor indexAccessor;
+    private ITupleFilter tupleFilter;
+
+    public TreeIndexInsertUpdateDeleteOperatorNodePushable(AbstractTreeIndexOperatorDescriptor opDesc,
+            IHyracksTaskContext ctx, int partition, int[] fieldPermutation,
+            IRecordDescriptorProvider recordDescProvider, IndexOp op) {
+        treeIndexHelper = (TreeIndexDataflowHelper) opDesc.getIndexDataflowHelperFactory().createIndexDataflowHelper(
+                opDesc, ctx, partition);
+        this.recordDescProvider = recordDescProvider;
+        this.op = op;
+        tuple.setFieldPermutation(fieldPermutation);
+    }
+
+    @Override
+    public void open() throws HyracksDataException {
+        AbstractTreeIndexOperatorDescriptor opDesc = (AbstractTreeIndexOperatorDescriptor) treeIndexHelper
+                .getOperatorDescriptor();
+        RecordDescriptor inputRecDesc = recordDescProvider.getInputRecordDescriptor(opDesc.getActivityId(), 0);
+        accessor = new FrameTupleAccessor(treeIndexHelper.getHyracksTaskContext().getFrameSize(), inputRecDesc);
+        writeBuffer = treeIndexHelper.getHyracksTaskContext().allocateFrame();
+        writer.open();
+        try {
+            treeIndexHelper.init(false);
+            ITreeIndex treeIndex = (ITreeIndex) treeIndexHelper.getIndex();
+            indexAccessor = treeIndex.createAccessor();
+            ITupleFilterFactory tupleFilterFactory = opDesc.getTupleFilterFactory();
+            if (tupleFilterFactory != null) {
+                tupleFilter = tupleFilterFactory.createTupleFilter(treeIndexHelper.ctx);
+                frameTuple = new FrameTupleReference();
+            }
+        } catch (Exception e) {
+            // cleanup in case of failure
+            treeIndexHelper.deinit();
+            throw new HyracksDataException(e);
+        }
+    }
+
+    @Override
+    public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
+        accessor.reset(buffer);
+        int tupleCount = accessor.getTupleCount();
+        for (int i = 0; i < tupleCount; i++) {
+            try {
+                if (tupleFilter != null) {
+                    frameTuple.reset(accessor, i);
+                    if (!tupleFilter.accept(frameTuple)) {
+                        continue;
+                    }
+                }
+                tuple.reset(accessor, i);
+                switch (op) {
+                    case INSERT: {
+                        indexAccessor.insert(tuple);
+                        break;
+                    }
+                    case UPDATE: {
+                        indexAccessor.update(tuple);
+                        break;
+                    }
+                    case UPSERT: {
+                        indexAccessor.upsert(tuple);
+                        break;
+                    }
+                    case DELETE: {
+                        indexAccessor.delete(tuple);
+                        break;
+                    }
+                    default: {
+                        throw new HyracksDataException("Unsupported operation " + op
+                                + " in tree index InsertUpdateDelete operator");
+                    }
+                }
+            } catch (HyracksDataException e) {
+                throw e;
+            } catch (Exception e) {
+                throw new HyracksDataException(e);
+            }
+        }
+        // Pass a copy of the frame to next op.
+        System.arraycopy(buffer.array(), 0, writeBuffer.array(), 0, buffer.capacity());
+        FrameUtils.flushFrame(writeBuffer, writer);
+    }
+
+    @Override
+    public void close() throws HyracksDataException {
+        try {
+            writer.close();
+        } finally {
+            treeIndexHelper.deinit();
+        }
+    }
+
+    @Override
+    public void fail() throws HyracksDataException {
+        writer.fail();
+    }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexSearchOperatorNodePushable.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexSearchOperatorNodePushable.java
new file mode 100644
index 0000000..5c19483
--- /dev/null
+++ b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexSearchOperatorNodePushable.java
@@ -0,0 +1,160 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.storage.am.common.dataflow;
+
+import java.io.DataOutput;
+import java.nio.ByteBuffer;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
+import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.FrameTupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryInputUnaryOutputOperatorNodePushable;
+import edu.uci.ics.hyracks.storage.am.common.api.IIndexAccessor;
+import edu.uci.ics.hyracks.storage.am.common.api.IIndexCursor;
+import edu.uci.ics.hyracks.storage.am.common.api.ISearchPredicate;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
+
+public abstract class TreeIndexSearchOperatorNodePushable extends AbstractUnaryInputUnaryOutputOperatorNodePushable {
+    protected final TreeIndexDataflowHelper treeIndexHelper;
+    protected FrameTupleAccessor accessor;
+
+    protected ByteBuffer writeBuffer;
+    protected FrameTupleAppender appender;
+    protected ArrayTupleBuilder tb;
+    protected DataOutput dos;
+
+    protected ITreeIndex treeIndex;
+    protected ISearchPredicate searchPred;
+    protected IIndexCursor cursor;
+    protected ITreeIndexFrame cursorFrame;
+    protected IIndexAccessor indexAccessor;
+
+    protected final RecordDescriptor inputRecDesc;
+    protected final boolean retainInput;
+    protected FrameTupleReference frameTuple;
+
+    public TreeIndexSearchOperatorNodePushable(AbstractTreeIndexOperatorDescriptor opDesc, IHyracksTaskContext ctx,
+            int partition, IRecordDescriptorProvider recordDescProvider) {
+        treeIndexHelper = (TreeIndexDataflowHelper) opDesc.getIndexDataflowHelperFactory().createIndexDataflowHelper(
+                opDesc, ctx, partition);
+        this.retainInput = treeIndexHelper.getOperatorDescriptor().getRetainInput();
+        inputRecDesc = recordDescProvider.getInputRecordDescriptor(opDesc.getActivityId(), 0);
+    }
+
+    protected abstract ISearchPredicate createSearchPredicate();
+
+    protected abstract void resetSearchPredicate(int tupleIndex);
+
+    protected IIndexCursor createCursor() {
+        return indexAccessor.createSearchCursor();
+    }
+
+    @Override
+    public void open() throws HyracksDataException {
+        accessor = new FrameTupleAccessor(treeIndexHelper.getHyracksTaskContext().getFrameSize(), inputRecDesc);
+        writer.open();
+        try {        	
+            treeIndexHelper.init(false);
+            treeIndex = (ITreeIndex) treeIndexHelper.getIndex();
+            cursorFrame = treeIndex.getLeafFrameFactory().createFrame();
+            searchPred = createSearchPredicate();
+            writeBuffer = treeIndexHelper.getHyracksTaskContext().allocateFrame();
+            tb = new ArrayTupleBuilder(recordDesc.getFieldCount());
+            dos = tb.getDataOutput();
+            appender = new FrameTupleAppender(treeIndexHelper.getHyracksTaskContext().getFrameSize());
+            appender.reset(writeBuffer, true);
+            indexAccessor = treeIndex.createAccessor();
+            cursor = createCursor();
+            if (retainInput) {
+            	frameTuple = new FrameTupleReference();
+            }
+        } catch (Exception e) {
+            treeIndexHelper.deinit();
+            throw new HyracksDataException(e);
+        }
+    }
+
+    protected void writeSearchResults(int tupleIndex) throws Exception {
+        while (cursor.hasNext()) {
+            tb.reset();
+            cursor.next();
+            if (retainInput) {
+            	frameTuple.reset(accessor, tupleIndex);
+                for (int i = 0; i < frameTuple.getFieldCount(); i++) {
+                	dos.write(frameTuple.getFieldData(i), frameTuple.getFieldStart(i), frameTuple.getFieldLength(i));
+                    tb.addFieldEndOffset();
+                }
+            }
+            ITupleReference tuple = cursor.getTuple();
+            for (int i = 0; i < tuple.getFieldCount(); i++) {
+                dos.write(tuple.getFieldData(i), tuple.getFieldStart(i), tuple.getFieldLength(i));
+                tb.addFieldEndOffset();
+            }
+            if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
+                FrameUtils.flushFrame(writeBuffer, writer);
+                appender.reset(writeBuffer, true);
+                if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
+                    throw new IllegalStateException();
+                }
+            }
+        }
+    }
+
+    @Override
+    public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
+        accessor.reset(buffer);
+        int tupleCount = accessor.getTupleCount();
+        try {
+            for (int i = 0; i < tupleCount; i++) {
+                resetSearchPredicate(i);
+                cursor.reset();
+                indexAccessor.search(cursor, searchPred);
+                writeSearchResults(i);
+            }
+        } catch (Exception e) {
+            throw new HyracksDataException(e);
+        }
+    }
+
+    @Override
+    public void close() throws HyracksDataException {
+        try {
+            if (appender.getTupleCount() > 0) {
+                FrameUtils.flushFrame(writeBuffer, writer);
+            }
+            writer.close();
+            try {
+                cursor.close();
+            } catch (Exception e) {
+                throw new HyracksDataException(e);
+            }
+        } finally {
+            treeIndexHelper.deinit();
+        }
+    }
+
+    @Override
+    public void fail() throws HyracksDataException {
+        writer.fail();
+    }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexStatsOperatorDescriptor.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexStatsOperatorDescriptor.java
new file mode 100644
index 0000000..6bf0983
--- /dev/null
+++ b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexStatsOperatorDescriptor.java
@@ -0,0 +1,49 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.storage.am.common.dataflow;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.job.IOperatorDescriptorRegistry;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.storage.am.common.api.IOperationCallbackProvider;
+import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
+
+public class TreeIndexStatsOperatorDescriptor extends AbstractTreeIndexOperatorDescriptor {
+
+    private static final long serialVersionUID = 1L;
+    private static final RecordDescriptor recDesc = new RecordDescriptor(
+            new ISerializerDeserializer[] { UTF8StringSerializerDeserializer.INSTANCE });
+
+    public TreeIndexStatsOperatorDescriptor(IOperatorDescriptorRegistry spec, IStorageManagerInterface storageManager,
+            IIndexRegistryProvider<IIndex> indexRegistryProvider, IFileSplitProvider fileSplitProvider,
+            ITypeTraits[] typeTraits, IBinaryComparatorFactory[] comparatorFactories,
+            IIndexDataflowHelperFactory dataflowHelperFactory, IOperationCallbackProvider opCallbackProvider) {
+        super(spec, 0, 1, recDesc, storageManager, indexRegistryProvider, fileSplitProvider, typeTraits,
+                comparatorFactories, dataflowHelperFactory, null, false, opCallbackProvider);
+    }
+
+    @Override
+    public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
+            IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) {
+        return new TreeIndexStatsOperatorNodePushable(this, ctx, partition);
+    }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexStatsOperatorNodePushable.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexStatsOperatorNodePushable.java
new file mode 100644
index 0000000..50486f2
--- /dev/null
+++ b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexStatsOperatorNodePushable.java
@@ -0,0 +1,89 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.storage.am.common.dataflow;
+
+import java.io.DataOutput;
+import java.nio.ByteBuffer;
+
+import edu.uci.ics.hyracks.api.comm.IFrameWriter;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
+import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryOutputSourceOperatorNodePushable;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+import edu.uci.ics.hyracks.storage.am.common.util.TreeIndexStats;
+import edu.uci.ics.hyracks.storage.am.common.util.TreeIndexStatsGatherer;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+
+public class TreeIndexStatsOperatorNodePushable extends AbstractUnaryOutputSourceOperatorNodePushable {
+    private final TreeIndexDataflowHelper treeIndexHelper;
+    private final IHyracksTaskContext ctx;
+    private TreeIndexStatsGatherer statsGatherer;
+
+    public TreeIndexStatsOperatorNodePushable(AbstractTreeIndexOperatorDescriptor opDesc, IHyracksTaskContext ctx,
+            int partition) {
+        treeIndexHelper = (TreeIndexDataflowHelper) opDesc.getIndexDataflowHelperFactory().createIndexDataflowHelper(
+                opDesc, ctx, partition);
+        this.ctx = ctx;
+    }
+
+    @Override
+    public void deinitialize() throws HyracksDataException {
+    }
+
+    @Override
+    public IFrameWriter getInputFrameWriter(int index) {
+        return null;
+    }
+
+    @Override
+    public void initialize() throws HyracksDataException {
+        try {
+            writer.open();
+            treeIndexHelper.init(false);
+            ITreeIndex treeIndex = (ITreeIndex) treeIndexHelper.getIndex();
+            IBufferCache bufferCache = treeIndexHelper.getOperatorDescriptor().getStorageManager().getBufferCache(ctx);
+            statsGatherer = new TreeIndexStatsGatherer(bufferCache, treeIndex.getFreePageManager(),
+                    treeIndexHelper.getIndexFileId(), treeIndex.getRootPageId());
+            TreeIndexStats stats = statsGatherer.gatherStats(treeIndex.getLeafFrameFactory().createFrame(), treeIndex
+                    .getInteriorFrameFactory().createFrame(), treeIndex.getFreePageManager().getMetaDataFrameFactory()
+                    .createFrame());
+            // Write the stats output as a single string field.
+            ByteBuffer frame = ctx.allocateFrame();
+            FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
+            appender.reset(frame, true);
+            ArrayTupleBuilder tb = new ArrayTupleBuilder(1);
+            DataOutput dos = tb.getDataOutput();
+            tb.reset();
+            UTF8StringSerializerDeserializer.INSTANCE.serialize(stats.toString(), dos);
+            tb.addFieldEndOffset();
+            if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
+                throw new IllegalStateException();
+            }
+            FrameUtils.flushFrame(frame, writer);
+        } catch (Exception e) {
+            try {
+                treeIndexHelper.deinit();
+            } finally {
+                writer.fail();
+            }
+        } finally {
+            writer.close();
+        }
+    }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/datagen/DataGenThread.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/datagen/DataGenThread.java
new file mode 100644
index 0000000..e8d3d56
--- /dev/null
+++ b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/datagen/DataGenThread.java
@@ -0,0 +1,71 @@
+package edu.uci.ics.hyracks.storage.am.common.datagen;
+
+import java.io.IOException;
+import java.util.Random;
+import java.util.concurrent.BlockingQueue;
+import java.util.concurrent.LinkedBlockingQueue;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+
+/**
+ * Quick & dirty data generator for multi-thread testing. 
+ *
+ */
+@SuppressWarnings("rawtypes")
+public class DataGenThread extends Thread {
+    public final BlockingQueue<TupleBatch> tupleBatchQueue;
+    private final int maxNumBatches;
+    private final int maxOutstandingBatches;        
+    private int numBatches = 0;
+    private final Random rnd;
+    
+    // maxOutstandingBatches pre-created tuple-batches for populating the queue.
+    private TupleBatch[] tupleBatches;
+    private int ringPos;
+    
+    public DataGenThread(int numConsumers, int maxNumBatches, int batchSize, ISerializerDeserializer[] fieldSerdes, int payloadSize, int rndSeed, int maxOutstandingBatches, boolean sorted) {
+        this.maxNumBatches = maxNumBatches;
+        this.maxOutstandingBatches = maxOutstandingBatches;
+        rnd = new Random(rndSeed);
+        tupleBatches = new TupleBatch[maxOutstandingBatches];
+        IFieldValueGenerator[] fieldGens = DataGenUtils.getFieldGensFromSerdes(fieldSerdes, rnd, sorted);
+        for (int i = 0; i < maxOutstandingBatches; i++) {
+            tupleBatches[i] = new TupleBatch(batchSize, fieldGens, fieldSerdes, payloadSize);
+        }
+        tupleBatchQueue = new LinkedBlockingQueue<TupleBatch>(maxOutstandingBatches);
+        ringPos = 0;
+    }
+    
+    @Override
+    public void run() {
+        while(numBatches < maxNumBatches) {
+            boolean added = false;
+            try {
+                if (tupleBatches[ringPos].inUse.compareAndSet(false, true)) {                    
+                    tupleBatches[ringPos].generate();
+                    tupleBatchQueue.put(tupleBatches[ringPos]);
+                    added = true;
+                }
+            } catch (IOException e) {
+                e.printStackTrace();
+            } catch (InterruptedException e) {
+                e.printStackTrace();
+            }
+            if (added) {
+                numBatches++;
+                ringPos++;
+                if (ringPos >= maxOutstandingBatches) {
+                    ringPos = 0;
+                }
+            }
+        }
+    }
+    
+    public TupleBatch getBatch() throws InterruptedException {
+        return tupleBatchQueue.take();
+    }
+    
+    public void releaseBatch(TupleBatch batch) {
+        batch.inUse.set(false);
+    }
+}
diff --git a/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/datagen/DataGenUtils.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/datagen/DataGenUtils.java
new file mode 100644
index 0000000..fdbaa3e
--- /dev/null
+++ b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/datagen/DataGenUtils.java
@@ -0,0 +1,46 @@
+package edu.uci.ics.hyracks.storage.am.common.datagen;
+
+import java.util.Random;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.DoubleSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.FloatSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+
+@SuppressWarnings("rawtypes") 
+public class DataGenUtils {
+    public static IFieldValueGenerator getFieldGenFromSerde(ISerializerDeserializer serde, Random rnd, boolean sorted) {
+        if (serde instanceof IntegerSerializerDeserializer) {
+            if (sorted) {
+                return new SortedIntegerFieldValueGenerator();
+            } else {
+                return new IntegerFieldValueGenerator(rnd);
+            }
+        } else if (serde instanceof FloatSerializerDeserializer) {
+            if (sorted) {
+                return new SortedFloatFieldValueGenerator();
+            } else {
+                return new FloatFieldValueGenerator(rnd);
+            }
+        } else if (serde instanceof DoubleSerializerDeserializer) {
+            if (sorted) {
+                return new SortedDoubleFieldValueGenerator();
+            } else {
+                return new DoubleFieldValueGenerator(rnd);
+            }
+        } else if (serde instanceof UTF8StringSerializerDeserializer) {
+            return new StringFieldValueGenerator(20, rnd);
+        }
+        System.out.println("NULL");
+        return null;
+    }
+    
+    public static IFieldValueGenerator[] getFieldGensFromSerdes(ISerializerDeserializer[] serdes, Random rnd, boolean sorted) {
+        IFieldValueGenerator[] fieldValueGens = new IFieldValueGenerator[serdes.length];
+        for (int i = 0; i < serdes.length; i++) {
+            fieldValueGens[i] = getFieldGenFromSerde(serdes[i], rnd, sorted);
+        }
+        return fieldValueGens;
+    }
+}
diff --git a/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/datagen/DoubleFieldValueGenerator.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/datagen/DoubleFieldValueGenerator.java
new file mode 100644
index 0000000..fcac93a
--- /dev/null
+++ b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/datagen/DoubleFieldValueGenerator.java
@@ -0,0 +1,16 @@
+package edu.uci.ics.hyracks.storage.am.common.datagen;
+
+import java.util.Random;
+
+public class DoubleFieldValueGenerator implements IFieldValueGenerator<Double> {
+    protected final Random rnd;
+
+    public DoubleFieldValueGenerator(Random rnd) {
+        this.rnd = rnd;
+    }
+
+    @Override
+    public Double next() {
+        return rnd.nextDouble();
+    }
+}
diff --git a/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/datagen/FloatFieldValueGenerator.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/datagen/FloatFieldValueGenerator.java
new file mode 100644
index 0000000..6f21c77
--- /dev/null
+++ b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/datagen/FloatFieldValueGenerator.java
@@ -0,0 +1,16 @@
+package edu.uci.ics.hyracks.storage.am.common.datagen;
+
+import java.util.Random;
+
+public class FloatFieldValueGenerator implements IFieldValueGenerator<Float> {
+    protected final Random rnd;
+
+    public FloatFieldValueGenerator(Random rnd) {
+        this.rnd = rnd;
+    }
+
+    @Override
+    public Float next() {
+        return rnd.nextFloat();
+    }
+}
diff --git a/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/datagen/IFieldValueGenerator.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/datagen/IFieldValueGenerator.java
new file mode 100644
index 0000000..ee0d30b
--- /dev/null
+++ b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/datagen/IFieldValueGenerator.java
@@ -0,0 +1,5 @@
+package edu.uci.ics.hyracks.storage.am.common.datagen;
+
+public interface IFieldValueGenerator<T> {
+    public T next();
+}
diff --git a/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/datagen/IntegerFieldValueGenerator.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/datagen/IntegerFieldValueGenerator.java
new file mode 100644
index 0000000..134b1f7
--- /dev/null
+++ b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/datagen/IntegerFieldValueGenerator.java
@@ -0,0 +1,16 @@
+package edu.uci.ics.hyracks.storage.am.common.datagen;
+
+import java.util.Random;
+
+public class IntegerFieldValueGenerator implements IFieldValueGenerator<Integer> {
+    protected final Random rnd;
+
+    public IntegerFieldValueGenerator(Random rnd) {
+        this.rnd = rnd;
+    }
+
+    @Override
+    public Integer next() {
+        return rnd.nextInt();
+    }
+}
diff --git a/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/datagen/SortedDoubleFieldValueGenerator.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/datagen/SortedDoubleFieldValueGenerator.java
new file mode 100644
index 0000000..4193811
--- /dev/null
+++ b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/datagen/SortedDoubleFieldValueGenerator.java
@@ -0,0 +1,17 @@
+package edu.uci.ics.hyracks.storage.am.common.datagen;
+
+public class SortedDoubleFieldValueGenerator implements IFieldValueGenerator<Double> {
+    private double val = 0.0d;
+
+    public SortedDoubleFieldValueGenerator() {
+    }
+    
+    public SortedDoubleFieldValueGenerator(double startVal) {
+        val = startVal;
+    }
+    
+    @Override
+    public Double next() {
+        return val++;
+    }
+}
diff --git a/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/datagen/SortedFloatFieldValueGenerator.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/datagen/SortedFloatFieldValueGenerator.java
new file mode 100644
index 0000000..1f6b315
--- /dev/null
+++ b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/datagen/SortedFloatFieldValueGenerator.java
@@ -0,0 +1,17 @@
+package edu.uci.ics.hyracks.storage.am.common.datagen;
+
+public class SortedFloatFieldValueGenerator implements IFieldValueGenerator<Float> {
+    private float val = 0.0f;
+
+    public SortedFloatFieldValueGenerator() {
+    }
+    
+    public SortedFloatFieldValueGenerator(float startVal) {
+        val = startVal;
+    }
+    
+    @Override
+    public Float next() {
+        return val++;
+    }
+}
diff --git a/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/datagen/SortedIntegerFieldValueGenerator.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/datagen/SortedIntegerFieldValueGenerator.java
new file mode 100644
index 0000000..8f7fdcf
--- /dev/null
+++ b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/datagen/SortedIntegerFieldValueGenerator.java
@@ -0,0 +1,17 @@
+package edu.uci.ics.hyracks.storage.am.common.datagen;
+
+public class SortedIntegerFieldValueGenerator implements IFieldValueGenerator<Integer> {
+    private int val = 0;
+
+    public SortedIntegerFieldValueGenerator() {
+    }
+    
+    public SortedIntegerFieldValueGenerator(int startVal) {
+        val = startVal;
+    }
+    
+    @Override
+    public Integer next() {
+        return val++;
+    }
+}
diff --git a/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/datagen/StringFieldValueGenerator.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/datagen/StringFieldValueGenerator.java
new file mode 100644
index 0000000..0218542
--- /dev/null
+++ b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/datagen/StringFieldValueGenerator.java
@@ -0,0 +1,27 @@
+package edu.uci.ics.hyracks.storage.am.common.datagen;
+
+import java.util.Random;
+
+public class StringFieldValueGenerator implements IFieldValueGenerator<String> {
+    private int maxLen;
+    private final Random rnd;
+    
+    public StringFieldValueGenerator(int maxLen, Random rnd) {
+        this.maxLen = maxLen;
+        this.rnd = rnd;
+    }
+
+    public void setMaxLength(int maxLen) {
+        this.maxLen = maxLen;
+    }
+    
+    @Override
+    public String next() {
+        String s = Long.toHexString(Double.doubleToLongBits(rnd.nextDouble()));
+        StringBuilder strBuilder = new StringBuilder();
+        for (int i = 0; i < s.length() && i < maxLen; i++) {
+            strBuilder.append(s.charAt(Math.abs(rnd.nextInt()) % s.length()));
+        }
+        return strBuilder.toString();
+    }
+}
diff --git a/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/datagen/TupleBatch.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/datagen/TupleBatch.java
new file mode 100644
index 0000000..bfa523f
--- /dev/null
+++ b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/datagen/TupleBatch.java
@@ -0,0 +1,36 @@
+package edu.uci.ics.hyracks.storage.am.common.datagen;
+
+import java.io.IOException;
+import java.util.concurrent.atomic.AtomicBoolean;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+
+@SuppressWarnings("rawtypes")
+public class TupleBatch {
+    private final int size;
+    private final TupleGenerator[] tupleGens;
+    public final AtomicBoolean inUse = new AtomicBoolean(false);
+    
+    public TupleBatch(int size, IFieldValueGenerator[] fieldGens, ISerializerDeserializer[] fieldSerdes, int payloadSize) {        
+        this.size = size;
+        tupleGens = new TupleGenerator[size];
+        for (int i = 0; i < size; i++) {
+            tupleGens[i] = new TupleGenerator(fieldGens, fieldSerdes, payloadSize);
+        }
+    }
+    
+    public void generate() throws IOException {
+        for(TupleGenerator tupleGen : tupleGens) {
+            tupleGen.next();
+        }
+    }
+    
+    public int size() {
+        return size;
+    }
+    
+    public ITupleReference get(int ix) {
+        return tupleGens[ix].get();
+    }
+}
diff --git a/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/datagen/TupleGenerator.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/datagen/TupleGenerator.java
new file mode 100644
index 0000000..2801205
--- /dev/null
+++ b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/datagen/TupleGenerator.java
@@ -0,0 +1,51 @@
+package edu.uci.ics.hyracks.storage.am.common.datagen;
+
+import java.io.DataOutput;
+import java.io.IOException;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+
+@SuppressWarnings({"rawtypes", "unchecked" })
+public class TupleGenerator {    
+    protected final ISerializerDeserializer[] fieldSerdes;
+    protected final IFieldValueGenerator[] fieldGens;
+    protected final ArrayTupleBuilder tb;
+    protected final ArrayTupleReference tuple;
+    protected final byte[] payload;
+    protected final DataOutput tbDos;
+    
+    public TupleGenerator(IFieldValueGenerator[] fieldGens, ISerializerDeserializer[] fieldSerdes, int payloadSize) {
+        this.fieldSerdes = fieldSerdes;
+        this.fieldGens = fieldGens;
+        tuple = new ArrayTupleReference();
+        if (payloadSize > 0) {
+            tb = new ArrayTupleBuilder(fieldSerdes.length + 1);
+            payload = new byte[payloadSize];
+        } else {
+            tb = new ArrayTupleBuilder(fieldSerdes.length);
+            payload = null;
+        }        
+        tbDos = tb.getDataOutput();
+    }
+
+    public ITupleReference next() throws IOException {
+        tb.reset();
+        for (int i = 0; i < fieldSerdes.length; i++) {
+            fieldSerdes[i].serialize(fieldGens[i].next(), tbDos);
+            tb.addFieldEndOffset();
+        }
+        if (payload != null) {
+            tbDos.write(payload);
+            tb.addFieldEndOffset();
+        }
+        tuple.reset(tb.getFieldEndOffsets(), tb.getByteArray());
+        return tuple;
+    }
+    
+    public ITupleReference get() {
+        return tuple;
+    }
+}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/AbstractSlotManager.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/AbstractSlotManager.java
similarity index 100%
rename from hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/AbstractSlotManager.java
rename to hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/AbstractSlotManager.java
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/FrameOpSpaceStatus.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/FrameOpSpaceStatus.java
similarity index 100%
rename from hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/FrameOpSpaceStatus.java
rename to hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/FrameOpSpaceStatus.java
diff --git a/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/LIFOMetaDataFrame.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/LIFOMetaDataFrame.java
new file mode 100644
index 0000000..31c674d
--- /dev/null
+++ b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/LIFOMetaDataFrame.java
@@ -0,0 +1,134 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.common.frames;
+
+import java.nio.ByteBuffer;
+
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrame;
+import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
+
+// all meta pages of this kind have a negative level
+// the first meta page has level -1, all other meta pages have level -2
+// the first meta page is special because it guarantees to have a correct max page
+// other meta pages (i.e., with level -2) have junk in the max page field
+
+public class LIFOMetaDataFrame implements ITreeIndexMetaDataFrame {
+
+    // Arbitrarily chosen magic integer.
+    protected static final int MAGIC_VALID_INT = 0x5bd1e995;
+    
+	protected static final int tupleCountOff = 0; //0
+	protected static final int freeSpaceOff = tupleCountOff + 4; //4
+	protected static final int maxPageOff = freeSpaceOff + 4; //8
+	protected static final int levelOff = maxPageOff + 12; //20
+	protected static final int nextPageOff = levelOff + 1; // 21
+	protected static final int validOff = nextPageOff + 4; // 25
+
+	protected ICachedPage page = null;
+	protected ByteBuffer buf = null;
+
+	public int getMaxPage() {
+		return buf.getInt(maxPageOff);
+	}
+
+	public void setMaxPage(int maxPage) {
+		buf.putInt(maxPageOff, maxPage);
+	}
+
+	public int getFreePage() {
+		int tupleCount = buf.getInt(tupleCountOff);
+		if (tupleCount > 0) {
+			// return the last page from the linked list of free pages
+			// TODO: this is a dumb policy, but good enough for now
+			int lastPageOff = buf.getInt(freeSpaceOff) - 4;
+			buf.putInt(freeSpaceOff, lastPageOff);
+			buf.putInt(tupleCountOff, tupleCount - 1);
+			return buf.getInt(lastPageOff);
+		} else {
+			return -1;
+		}
+	}
+
+	// must be checked before adding free page
+	// user of this class is responsible for getting a free page as a new meta
+	// page, latching it, etc. if there is no space on this page
+	public boolean hasSpace() {
+		return buf.getInt(freeSpaceOff) + 4 < buf.capacity();
+	}
+
+	// no bounds checking is done, there must be free space
+	public void addFreePage(int freePage) {
+		int freeSpace = buf.getInt(freeSpaceOff);
+		buf.putInt(freeSpace, freePage);
+		buf.putInt(freeSpaceOff, freeSpace + 4);
+		buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) + 1);
+	}
+
+	@Override
+	public byte getLevel() {
+		return buf.get(levelOff);
+	}
+
+	@Override
+	public void setLevel(byte level) {
+		buf.put(levelOff, level);
+	}
+
+	@Override
+	public ICachedPage getPage() {
+		return page;
+	}
+
+	@Override
+	public void setPage(ICachedPage page) {
+		this.page = page;
+		this.buf = page.getBuffer();
+	}
+
+	@Override
+	public void initBuffer(byte level) {
+		buf.putInt(tupleCountOff, 0);
+		buf.putInt(freeSpaceOff, validOff + 4);
+		//buf.putInt(maxPageOff, -1);
+		buf.put(levelOff, level);
+		buf.putInt(nextPageOff, -1);
+		setValid(false);
+	}
+
+	@Override
+	public int getNextPage() {
+		return buf.getInt(nextPageOff);
+	}
+
+	@Override
+	public void setNextPage(int nextPage) {
+		buf.putInt(nextPageOff, nextPage);
+	}
+
+    @Override
+    public boolean isValid() {
+        return buf.getInt(validOff) == MAGIC_VALID_INT;
+    }
+
+    @Override
+    public void setValid(boolean isValid) {
+        if (isValid) {
+            buf.putInt(validOff, MAGIC_VALID_INT);
+        } else {
+            buf.putInt(validOff, 0);
+        }
+    }
+}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/LIFOMetaDataFrameFactory.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/LIFOMetaDataFrameFactory.java
similarity index 100%
rename from hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/LIFOMetaDataFrameFactory.java
rename to hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/LIFOMetaDataFrameFactory.java
diff --git a/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/TreeIndexNSMFrame.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/TreeIndexNSMFrame.java
new file mode 100644
index 0000000..e2e28fd
--- /dev/null
+++ b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/TreeIndexNSMFrame.java
@@ -0,0 +1,295 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.common.frames;
+
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.Collections;
+
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.storage.am.common.api.ISlotManager;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriter;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.SlotOffTupleOff;
+import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
+
+public abstract class TreeIndexNSMFrame implements ITreeIndexFrame {
+
+    protected static final int pageLsnOff = 0; // 0
+    protected static final int tupleCountOff = pageLsnOff + 8; // 8
+    protected static final int freeSpaceOff = tupleCountOff + 4; // 12
+    protected static final int totalFreeSpaceOff = freeSpaceOff + 4; // 16
+    protected static final int levelOff = totalFreeSpaceOff + 4; // 20
+    protected static final int smFlagOff = levelOff + 1; // 21
+
+    protected ICachedPage page = null;
+    protected ByteBuffer buf = null;
+    protected ISlotManager slotManager;
+
+    protected ITreeIndexTupleWriter tupleWriter;
+    protected ITreeIndexTupleReference frameTuple;
+
+    public TreeIndexNSMFrame(ITreeIndexTupleWriter tupleWriter, ISlotManager slotManager) {
+        this.tupleWriter = tupleWriter;
+        this.frameTuple = tupleWriter.createTupleReference();
+        this.slotManager = slotManager;
+        this.slotManager.setFrame(this);
+    }
+
+    @Override
+    public void initBuffer(byte level) {
+        buf.putLong(pageLsnOff, 0); // TODO: might to set to a different lsn
+        // during creation
+        buf.putInt(tupleCountOff, 0);
+        resetSpaceParams();
+        buf.put(levelOff, level);
+        buf.put(smFlagOff, (byte) 0);
+    }
+
+    @Override
+    public boolean isLeaf() {
+        return buf.get(levelOff) == 0;
+    }
+
+    @Override
+    public boolean isInterior() {
+        return buf.get(levelOff) > 0;
+    }
+
+    @Override
+    public byte getLevel() {
+        return buf.get(levelOff);
+    }
+
+    @Override
+    public void setLevel(byte level) {
+        buf.put(levelOff, level);
+    }
+
+    @Override
+    public int getFreeSpaceOff() {
+        return buf.getInt(freeSpaceOff);
+    }
+
+    @Override
+    public void setFreeSpaceOff(int freeSpace) {
+        buf.putInt(freeSpaceOff, freeSpace);
+    }
+
+    @Override
+    public void setPage(ICachedPage page) {
+        this.page = page;
+        this.buf = page.getBuffer();
+    }
+
+    @Override
+    public ByteBuffer getBuffer() {
+        return page.getBuffer();
+    }
+
+    @Override
+    public ICachedPage getPage() {
+        return page;
+    }
+
+    @Override
+    public boolean compact() {
+        resetSpaceParams();
+        int tupleCount = buf.getInt(tupleCountOff);
+        int freeSpace = buf.getInt(freeSpaceOff);
+		// Sort the slots by the tuple offset they point to.
+        ArrayList<SlotOffTupleOff> sortedTupleOffs = new ArrayList<SlotOffTupleOff>();
+        sortedTupleOffs.ensureCapacity(tupleCount);
+        for (int i = 0; i < tupleCount; i++) {
+            int slotOff = slotManager.getSlotOff(i);
+            int tupleOff = slotManager.getTupleOff(slotOff);
+            sortedTupleOffs.add(new SlotOffTupleOff(i, slotOff, tupleOff));
+        }
+        Collections.sort(sortedTupleOffs);
+        // Iterate over the sorted slots, and move their corresponding tuples to
+     	// the left, reclaiming free space.
+        for (int i = 0; i < sortedTupleOffs.size(); i++) {
+            int tupleOff = sortedTupleOffs.get(i).tupleOff;
+            frameTuple.resetByTupleOffset(buf, tupleOff);
+            int tupleEndOff = frameTuple.getFieldStart(frameTuple.getFieldCount() - 1)
+                    + frameTuple.getFieldLength(frameTuple.getFieldCount() - 1);
+            int tupleLength = tupleEndOff - tupleOff;
+            System.arraycopy(buf.array(), tupleOff, buf.array(), freeSpace, tupleLength);
+            slotManager.setSlot(sortedTupleOffs.get(i).slotOff, freeSpace);
+            freeSpace += tupleLength;
+        }
+		// Update contiguous free space pointer and total free space indicator.
+        buf.putInt(freeSpaceOff, freeSpace);
+        buf.putInt(totalFreeSpaceOff, buf.capacity() - freeSpace - tupleCount * slotManager.getSlotSize());
+        return false;
+    }
+
+    @Override
+    public void delete(ITupleReference tuple, int tupleIndex) {
+        int slotOff = slotManager.getSlotOff(tupleIndex);
+        int tupleOff = slotManager.getTupleOff(slotOff);
+        frameTuple.resetByTupleOffset(buf, tupleOff);
+        int tupleSize = tupleWriter.bytesRequired(frameTuple);
+
+        // perform deletion (we just do a memcpy to overwrite the slot)
+        int slotStartOff = slotManager.getSlotEndOff();
+        int length = slotOff - slotStartOff;
+        System.arraycopy(buf.array(), slotStartOff, buf.array(), slotStartOff + slotManager.getSlotSize(), length);
+
+        // maintain space information
+        buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) - 1);
+        buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) + tupleSize + slotManager.getSlotSize());
+    }
+
+    @Override
+    public FrameOpSpaceStatus hasSpaceInsert(ITupleReference tuple) {
+        int bytesRequired = tupleWriter.bytesRequired(tuple);
+        // Enough space in the contiguous space region?
+        if (bytesRequired + slotManager.getSlotSize() <= buf.capacity() - buf.getInt(freeSpaceOff) 
+                - (buf.getInt(tupleCountOff) * slotManager.getSlotSize())) {
+            return FrameOpSpaceStatus.SUFFICIENT_CONTIGUOUS_SPACE;
+        }
+        // Enough space after compaction?
+        if (bytesRequired + slotManager.getSlotSize() <= buf.getInt(totalFreeSpaceOff)) {
+            return FrameOpSpaceStatus.SUFFICIENT_SPACE;
+        }
+        return FrameOpSpaceStatus.INSUFFICIENT_SPACE;
+    }
+
+    @Override
+    public FrameOpSpaceStatus hasSpaceUpdate(ITupleReference newTuple, int oldTupleIndex) {
+    	frameTuple.resetByTupleIndex(this, oldTupleIndex);
+    	int oldTupleBytes = frameTuple.getTupleSize();
+    	int newTupleBytes = tupleWriter.bytesRequired(newTuple);
+    	int additionalBytesRequired = newTupleBytes - oldTupleBytes;
+    	// Enough space for an in-place update?
+    	if (additionalBytesRequired <= 0) {
+    		return FrameOpSpaceStatus.SUFFICIENT_INPLACE_SPACE;
+    	}
+    	// Enough space if we delete the old tuple and insert the new one without compaction? 
+    	if (newTupleBytes <= buf.capacity() - buf.getInt(freeSpaceOff)
+                - (buf.getInt(tupleCountOff) * slotManager.getSlotSize())) {
+    		return FrameOpSpaceStatus.SUFFICIENT_CONTIGUOUS_SPACE;
+    	}
+    	// Enough space if we delete the old tuple and compact?
+    	if (additionalBytesRequired <= buf.getInt(totalFreeSpaceOff)) {
+    		return FrameOpSpaceStatus.SUFFICIENT_SPACE;
+    	}
+        return FrameOpSpaceStatus.INSUFFICIENT_SPACE;
+    }
+
+    protected void resetSpaceParams() {
+        buf.putInt(freeSpaceOff, smFlagOff + 1);
+        buf.putInt(totalFreeSpaceOff, buf.capacity() - (smFlagOff + 1));
+    }
+
+    @Override
+    public void insert(ITupleReference tuple, int tupleIndex) {
+        slotManager.insertSlot(tupleIndex, buf.getInt(freeSpaceOff));
+        int bytesWritten = tupleWriter.writeTuple(tuple, buf.array(), buf.getInt(freeSpaceOff));
+        buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) + 1);
+        buf.putInt(freeSpaceOff, buf.getInt(freeSpaceOff) + bytesWritten);
+        buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) - bytesWritten - slotManager.getSlotSize());
+    }
+
+    @Override
+    public void update(ITupleReference newTuple, int oldTupleIndex, boolean inPlace) {
+    	frameTuple.resetByTupleIndex(this, oldTupleIndex);
+		int oldTupleBytes = frameTuple.getTupleSize();
+		int slotOff = slotManager.getSlotOff(oldTupleIndex);
+		int bytesWritten = 0;
+    	if (inPlace) {    		
+    		// Overwrite the old tuple in place.
+    		bytesWritten = tupleWriter.writeTuple(newTuple, buf.array(), buf.getInt(slotOff));
+    	} else {
+    		// Insert the new tuple at the end of the free space, and change the slot value (effectively "deleting" the old tuple).
+    		int newTupleOff = buf.getInt(freeSpaceOff);
+    		bytesWritten = tupleWriter.writeTuple(newTuple, buf.array(), newTupleOff);
+    		// Update slot value.
+    		buf.putInt(slotOff, newTupleOff);
+    		// Update contiguous free space pointer.
+    		buf.putInt(freeSpaceOff, newTupleOff + bytesWritten);
+    	}
+    	buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) + oldTupleBytes - bytesWritten);
+    }
+
+    @Override
+    public String printHeader() {
+    	StringBuilder strBuilder = new StringBuilder();
+    	strBuilder.append("pageLsnOff:        " + pageLsnOff + "\n");
+    	strBuilder.append("tupleCountOff:     " + tupleCountOff + "\n");
+    	strBuilder.append("freeSpaceOff:      " + freeSpaceOff + "\n");
+    	strBuilder.append("totalFreeSpaceOff: " + totalFreeSpaceOff + "\n");
+    	strBuilder.append("levelOff:          " + levelOff + "\n");
+    	strBuilder.append("smFlagOff:         " + smFlagOff + "\n");
+    	return strBuilder.toString();
+    }
+
+    @Override
+    public int getTupleCount() {
+        return buf.getInt(tupleCountOff);
+    }
+
+    public ISlotManager getSlotManager() {
+        return slotManager;
+    }
+
+    @Override
+    public int getTupleOffset(int slotNum) {
+        return slotManager.getTupleOff(slotManager.getSlotStartOff() - slotNum * slotManager.getSlotSize());
+    }
+
+    @Override
+    public long getPageLsn() {
+        return buf.getLong(pageLsnOff);
+    }
+
+    @Override
+    public void setPageLsn(long pageLsn) {
+        buf.putLong(pageLsnOff, pageLsn);
+    }
+
+    @Override
+    public int getTotalFreeSpace() {
+        return buf.getInt(totalFreeSpaceOff);
+    }
+
+    @Override
+    public boolean compress() {
+        return false;
+    }
+
+    @Override
+    public int getSlotSize() {
+        return slotManager.getSlotSize();
+    }
+
+    @Override
+    public ITreeIndexTupleWriter getTupleWriter() {
+        return tupleWriter;
+    }
+    
+    @Override
+    public ITreeIndexTupleReference createTupleReference() {
+    	return tupleWriter.createTupleReference();
+    }
+    
+	public int getFreeContiguousSpace() {
+		return buf.capacity() - getFreeSpaceOff()
+				- (getTupleCount() * slotManager.getSlotSize());
+	}
+}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/freepage/LinkedListFreePageManager.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/freepage/LinkedListFreePageManager.java
similarity index 100%
rename from hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/freepage/LinkedListFreePageManager.java
rename to hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/freepage/LinkedListFreePageManager.java
diff --git a/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/freepage/LinkedListFreePageManagerFactory.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/freepage/LinkedListFreePageManagerFactory.java
new file mode 100644
index 0000000..157b563
--- /dev/null
+++ b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/freepage/LinkedListFreePageManagerFactory.java
@@ -0,0 +1,35 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.common.freepage;
+
+import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManager;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrameFactory;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+
+public class LinkedListFreePageManagerFactory {
+
+	private final ITreeIndexMetaDataFrameFactory metaDataFrameFactory;
+	private final IBufferCache bufferCache;
+	
+	public LinkedListFreePageManagerFactory(IBufferCache bufferCache, ITreeIndexMetaDataFrameFactory metaDataFrameFactory) {
+		this.metaDataFrameFactory = metaDataFrameFactory;
+		this.bufferCache = bufferCache;
+	}
+	
+    public IFreePageManager createFreePageManager() {
+        return new LinkedListFreePageManager(bufferCache, 0, metaDataFrameFactory);
+    }
+}
diff --git a/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/impls/NoOpOperationCallback.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/impls/NoOpOperationCallback.java
new file mode 100644
index 0000000..828dd81
--- /dev/null
+++ b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/impls/NoOpOperationCallback.java
@@ -0,0 +1,41 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.common.impls;
+
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.storage.am.common.api.IOperationCallback;
+
+/**
+ * Dummy operation callback that simply does nothing. Mainly, intended to be
+ * used in non-transaction access method testing.
+ */
+public class NoOpOperationCallback implements IOperationCallback {
+
+    public static IOperationCallback INSTANCE = new NoOpOperationCallback();
+    
+    private NoOpOperationCallback() {
+    }
+    
+    @Override
+    public void pre(ITupleReference tuple) {
+        // Do nothing.
+    }
+
+    @Override
+    public void post(ITupleReference tuple) {
+        // Do nothing.
+    }
+}
diff --git a/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/impls/NoOpOperationCallbackProvider.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/impls/NoOpOperationCallbackProvider.java
new file mode 100644
index 0000000..55dfb74
--- /dev/null
+++ b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/impls/NoOpOperationCallbackProvider.java
@@ -0,0 +1,19 @@
+package edu.uci.ics.hyracks.storage.am.common.impls;
+
+import edu.uci.ics.hyracks.storage.am.common.api.IOperationCallback;
+import edu.uci.ics.hyracks.storage.am.common.api.IOperationCallbackProvider;
+
+/**
+ * Dummy NoOp callback provider used primarily for testing. Always returns the 
+ * {@link NoOpOperationCallback} instance. 
+ *
+ * Implemented as an enum to preserve singleton model while being serializable
+ */
+public enum NoOpOperationCallbackProvider implements IOperationCallbackProvider {
+    INSTANCE;
+
+    @Override
+    public IOperationCallback getOperationCallback() {
+        return NoOpOperationCallback.INSTANCE;
+    }
+}
diff --git a/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/impls/TreeDiskOrderScanCursor.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/impls/TreeDiskOrderScanCursor.java
new file mode 100644
index 0000000..ea4c105
--- /dev/null
+++ b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/impls/TreeDiskOrderScanCursor.java
@@ -0,0 +1,154 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.common.impls;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.storage.am.common.api.ICursorInitialState;
+import edu.uci.ics.hyracks.storage.am.common.api.ISearchPredicate;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
+import edu.uci.ics.hyracks.storage.common.file.BufferedFileHandle;
+
+public class TreeDiskOrderScanCursor implements ITreeIndexCursor {
+
+	private int tupleIndex = 0;
+	private int fileId = -1;
+	private int currentPageId = -1;
+	private int maxPageId = -1;
+	private ICachedPage page = null;	
+	private IBufferCache bufferCache = null;
+	
+	private final ITreeIndexFrame frame;
+	private final ITreeIndexTupleReference frameTuple;
+	
+	public TreeDiskOrderScanCursor(ITreeIndexFrame frame) {
+		this.frame = frame;		
+		this.frameTuple = frame.createTupleReference();
+	}
+
+	@Override
+	public void close() throws HyracksDataException {
+		page.releaseReadLatch();
+		bufferCache.unpin(page);
+		page = null;
+	}
+
+	@Override
+	public ITreeIndexTupleReference getTuple() {
+		return frameTuple;
+	}
+
+	@Override
+	public ICachedPage getPage() {
+		return page;
+	}
+
+	private boolean positionToNextLeaf(boolean skipCurrent)
+			throws HyracksDataException {
+		while ((frame.getLevel() != 0 || skipCurrent || frame.getTupleCount() == 0) && (currentPageId <= maxPageId)) {
+			currentPageId++;
+
+			page.releaseReadLatch();
+            bufferCache.unpin(page);
+			
+			ICachedPage nextPage = bufferCache.pin(
+					BufferedFileHandle.getDiskPageId(fileId, currentPageId),
+					false);
+			nextPage.acquireReadLatch();
+
+			page = nextPage;
+			frame.setPage(page);
+			tupleIndex = 0;
+			skipCurrent = false;
+		}
+		if (currentPageId <= maxPageId) {
+			return true;
+		} else {
+			return false;
+		}
+	}
+
+	@Override
+	public boolean hasNext() throws HyracksDataException {		
+		if (currentPageId > maxPageId) {
+			return false;
+		}
+		if (tupleIndex >= frame.getTupleCount()) {
+			boolean nextLeafExists = positionToNextLeaf(true);
+			if (nextLeafExists) {
+				frameTuple.resetByTupleIndex(frame, tupleIndex);
+				return true;
+			} else {
+				return false;
+			}
+		}		
+		frameTuple.resetByTupleIndex(frame, tupleIndex);		
+		return true;
+	}
+
+	@Override
+	public void next() throws HyracksDataException {
+		tupleIndex++;
+	}
+
+	@Override
+	public void open(ICursorInitialState initialState,
+			ISearchPredicate searchPred) throws HyracksDataException {
+		// in case open is called multiple times without closing
+		if (page != null) {
+			page.releaseReadLatch();
+			bufferCache.unpin(page);
+		}
+		page = initialState.getPage();
+		tupleIndex = 0;		
+		frame.setPage(page);
+		positionToNextLeaf(false);
+	}
+
+	@Override
+	public void reset() {
+		tupleIndex = 0;
+		currentPageId = -1;
+		maxPageId = -1;
+		page = null;
+	}
+
+	@Override
+	public void setBufferCache(IBufferCache bufferCache) {
+		this.bufferCache = bufferCache;
+	}
+
+	@Override
+	public void setFileId(int fileId) {
+		this.fileId = fileId;
+	}
+
+	public void setCurrentPageId(int currentPageId) {
+		this.currentPageId = currentPageId;
+	}
+
+	public void setMaxPageId(int maxPageId) {
+		this.maxPageId = maxPageId;
+	}
+
+	@Override
+	public boolean exclusiveLatchNodes() {
+		return false;
+	}
+}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/FindTupleMode.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/FindTupleMode.java
similarity index 100%
rename from hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/FindTupleMode.java
rename to hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/FindTupleMode.java
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/FindTupleNoExactMatchPolicy.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/FindTupleNoExactMatchPolicy.java
similarity index 100%
rename from hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/FindTupleNoExactMatchPolicy.java
rename to hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/FindTupleNoExactMatchPolicy.java
diff --git a/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/IndexOp.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/IndexOp.java
new file mode 100644
index 0000000..77ad7ff
--- /dev/null
+++ b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/IndexOp.java
@@ -0,0 +1,20 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.common.ophelpers;
+
+public enum IndexOp {
+	INSERT, DELETE, UPDATE, UPSERT, SEARCH, DISKORDERSCAN, PHYSICALDELETE
+}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/IntArrayList.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/IntArrayList.java
similarity index 100%
rename from hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/IntArrayList.java
rename to hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/IntArrayList.java
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/LongArrayList.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/LongArrayList.java
similarity index 100%
rename from hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/LongArrayList.java
rename to hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/LongArrayList.java
diff --git a/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/MultiComparator.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/MultiComparator.java
new file mode 100644
index 0000000..c653c9a
--- /dev/null
+++ b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/MultiComparator.java
@@ -0,0 +1,90 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.common.ophelpers;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+
+public class MultiComparator {
+
+	private final IBinaryComparator[] cmps;
+
+	public MultiComparator(IBinaryComparator[] cmps) {
+		this.cmps = cmps;
+	}
+
+	public int compare(ITupleReference tupleA, ITupleReference tupleB) {
+		for (int i = 0; i < cmps.length; i++) {
+			int cmp = cmps[i].compare(tupleA.getFieldData(i),
+					tupleA.getFieldStart(i), tupleA.getFieldLength(i),
+					tupleB.getFieldData(i), tupleB.getFieldStart(i),
+					tupleB.getFieldLength(i));
+			if (cmp < 0)
+				return -1;
+			else if (cmp > 0)
+				return 1;
+		}
+		return 0;
+	}
+
+	public int fieldRangeCompare(ITupleReference tupleA,
+			ITupleReference tupleB, int startFieldIndex, int numFields) {
+		for (int i = startFieldIndex; i < startFieldIndex + numFields; i++) {
+			int cmp = cmps[i].compare(tupleA.getFieldData(i),
+					tupleA.getFieldStart(i), tupleA.getFieldLength(i),
+					tupleB.getFieldData(i), tupleB.getFieldStart(i),
+					tupleB.getFieldLength(i));
+			if (cmp < 0)
+				return -1;
+			else if (cmp > 0)
+				return 1;
+		}
+		return 0;
+	}
+	
+	public int compare(ITupleReference tupleA,
+			ITupleReference tupleB, int startFieldIndex) {
+		for (int i = 0; i < cmps.length; i++) {
+			int ix = startFieldIndex + i;
+			int cmp = cmps[i].compare(tupleA.getFieldData(ix),
+					tupleA.getFieldStart(ix), tupleA.getFieldLength(ix),
+					tupleB.getFieldData(ix), tupleB.getFieldStart(ix),
+					tupleB.getFieldLength(ix));
+			if (cmp < 0)
+				return -1;
+			else if (cmp > 0)
+				return 1;
+		}
+		return 0;
+	}
+
+	public IBinaryComparator[] getComparators() {
+		return cmps;
+	}
+
+    public int getKeyFieldCount() {
+		return cmps.length;
+	}
+    
+    public static MultiComparator create(IBinaryComparatorFactory[] cmpFactories) {
+        IBinaryComparator[] cmps = new IBinaryComparator[cmpFactories.length];
+        for (int i = 0; i < cmpFactories.length; i++) {
+            cmps[i] = cmpFactories[i].createBinaryComparator();
+        }
+        return new MultiComparator(cmps);
+    }
+}
\ No newline at end of file
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/SlotOffTupleOff.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/SlotOffTupleOff.java
similarity index 100%
rename from hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/SlotOffTupleOff.java
rename to hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/SlotOffTupleOff.java
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/SimpleTupleReference.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/SimpleTupleReference.java
similarity index 100%
rename from hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/SimpleTupleReference.java
rename to hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/SimpleTupleReference.java
diff --git a/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/SimpleTupleWriter.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/SimpleTupleWriter.java
new file mode 100644
index 0000000..f5ec5f3
--- /dev/null
+++ b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/SimpleTupleWriter.java
@@ -0,0 +1,119 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.common.tuples;
+
+import java.nio.ByteBuffer;
+
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriter;
+
+public class SimpleTupleWriter implements ITreeIndexTupleWriter {
+
+	// Write short in little endian to target byte array at given offset.
+	private static void writeShortL(short s, byte[] buf, int targetOff) {
+		buf[targetOff] = (byte)(s >> 8);
+		buf[targetOff + 1] = (byte)(s >> 0);
+	}
+	
+    @Override
+    public int bytesRequired(ITupleReference tuple) {
+        int bytes = getNullFlagsBytes(tuple) + getFieldSlotsBytes(tuple);
+        for (int i = 0; i < tuple.getFieldCount(); i++) {
+            bytes += tuple.getFieldLength(i);
+        }
+        return bytes;
+    }
+
+    @Override
+    public int bytesRequired(ITupleReference tuple, int startField, int numFields) {
+        int bytes = getNullFlagsBytes(tuple, startField, numFields) + getFieldSlotsBytes(tuple, startField, numFields);
+        for (int i = startField; i < startField + numFields; i++) {
+            bytes += tuple.getFieldLength(i);
+        }
+        return bytes;
+    }
+
+    @Override
+    public ITreeIndexTupleReference createTupleReference() {
+        return new SimpleTupleReference();
+    }
+
+    @Override
+    public int writeTuple(ITupleReference tuple, ByteBuffer targetBuf, int targetOff) {
+        return writeTuple(tuple, targetBuf.array(), targetOff);
+    }
+    
+    @Override
+	public int writeTuple(ITupleReference tuple, byte[] targetBuf, int targetOff) {
+    	int runner = targetOff;
+        int nullFlagsBytes = getNullFlagsBytes(tuple);
+        int fieldSlotsBytes = getFieldSlotsBytes(tuple);
+        for (int i = 0; i < nullFlagsBytes; i++) {
+            targetBuf[runner++] = (byte) 0;
+        }
+        runner += fieldSlotsBytes;
+        int fieldEndOff = 0;
+        for (int i = 0; i < tuple.getFieldCount(); i++) {
+            System.arraycopy(tuple.getFieldData(i), tuple.getFieldStart(i), targetBuf, runner,
+                    tuple.getFieldLength(i));
+            fieldEndOff += tuple.getFieldLength(i);
+            runner += tuple.getFieldLength(i);
+            writeShortL((short) fieldEndOff, targetBuf, targetOff + nullFlagsBytes + i * 2);
+        }
+        return runner - targetOff;
+	}
+
+    @Override
+    public int writeTupleFields(ITupleReference tuple, int startField, int numFields, byte[] targetBuf,
+            int targetOff) {
+        int runner = targetOff;
+        int nullFlagsBytes = getNullFlagsBytes(tuple, startField, numFields);
+        for (int i = 0; i < nullFlagsBytes; i++) {
+            targetBuf[runner++] = (byte) 0;
+        }
+        runner += getFieldSlotsBytes(tuple, startField, numFields);
+
+        int fieldEndOff = 0;
+        int fieldCounter = 0;
+        for (int i = startField; i < startField + numFields; i++) {
+            System.arraycopy(tuple.getFieldData(i), tuple.getFieldStart(i), targetBuf, runner,
+                    tuple.getFieldLength(i));
+            fieldEndOff += tuple.getFieldLength(i);
+            runner += tuple.getFieldLength(i);            
+            writeShortL((short) fieldEndOff, targetBuf, targetOff + nullFlagsBytes + fieldCounter * 2);
+            fieldCounter++;
+        }
+
+        return runner - targetOff;
+    }
+
+    protected int getNullFlagsBytes(ITupleReference tuple) {
+        return (int) Math.ceil((double) tuple.getFieldCount() / 8.0);
+    }
+
+    protected int getFieldSlotsBytes(ITupleReference tuple) {
+        return tuple.getFieldCount() * 2;
+    }
+
+    protected int getNullFlagsBytes(ITupleReference tuple, int startField, int numFields) {
+        return (int) Math.ceil((double) numFields / 8.0);
+    }
+
+    protected int getFieldSlotsBytes(ITupleReference tuple, int startField, int numFields) {
+        return numFields * 2;
+    }	
+}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/SimpleTupleWriterFactory.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/SimpleTupleWriterFactory.java
similarity index 100%
rename from hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/SimpleTupleWriterFactory.java
rename to hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/SimpleTupleWriterFactory.java
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/TypeAwareTupleReference.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/TypeAwareTupleReference.java
similarity index 100%
rename from hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/TypeAwareTupleReference.java
rename to hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/TypeAwareTupleReference.java
diff --git a/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/TypeAwareTupleWriter.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/TypeAwareTupleWriter.java
new file mode 100644
index 0000000..9730346
--- /dev/null
+++ b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/TypeAwareTupleWriter.java
@@ -0,0 +1,152 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.common.tuples;
+
+import java.nio.ByteBuffer;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriter;
+
+public class TypeAwareTupleWriter implements ITreeIndexTupleWriter {
+
+    protected ITypeTraits[] typeTraits;
+    protected VarLenIntEncoderDecoder encDec = new VarLenIntEncoderDecoder();
+
+    public TypeAwareTupleWriter(ITypeTraits[] typeTraits) {
+        this.typeTraits = typeTraits;
+    }
+
+    @Override
+    public int bytesRequired(ITupleReference tuple) {
+        int bytes = getNullFlagsBytes(tuple) + getFieldSlotsBytes(tuple);
+        for (int i = 0; i < tuple.getFieldCount(); i++) {
+            bytes += tuple.getFieldLength(i);
+        }
+        return bytes;
+    }
+
+    @Override
+    public int bytesRequired(ITupleReference tuple, int startField, int numFields) {
+        int bytes = getNullFlagsBytes(numFields) + getFieldSlotsBytes(tuple, startField, numFields);
+        for (int i = startField; i < startField + numFields; i++) {
+            bytes += tuple.getFieldLength(i);
+        }
+        return bytes;
+    }
+
+    @Override
+    public ITreeIndexTupleReference createTupleReference() {
+        return new TypeAwareTupleReference(typeTraits);
+    }
+
+    @Override
+    public int writeTuple(ITupleReference tuple, ByteBuffer targetBuf, int targetOff) {
+        return writeTuple(tuple, targetBuf.array(), targetOff);
+    }
+
+    @Override
+    public int writeTuple(ITupleReference tuple, byte[] targetBuf, int targetOff) {
+        int runner = targetOff;
+        int nullFlagsBytes = getNullFlagsBytes(tuple);
+        // write null indicator bits
+        for (int i = 0; i < nullFlagsBytes; i++) {
+            targetBuf[runner++] = (byte) 0;
+        }
+
+        // write field slots for variable length fields
+        encDec.reset(targetBuf, runner);
+        for (int i = 0; i < tuple.getFieldCount(); i++) {
+            if (!typeTraits[i].isFixedLength()) {
+                encDec.encode(tuple.getFieldLength(i));
+            }
+        }
+        runner = encDec.getPos();
+
+        // write data fields
+        for (int i = 0; i < tuple.getFieldCount(); i++) {
+            System.arraycopy(tuple.getFieldData(i), tuple.getFieldStart(i), targetBuf, runner, tuple.getFieldLength(i));
+            runner += tuple.getFieldLength(i);
+        }
+
+        return runner - targetOff;
+    }
+
+    @Override
+    public int writeTupleFields(ITupleReference tuple, int startField, int numFields, byte[] targetBuf,
+            int targetOff) {
+        int runner = targetOff;
+        int nullFlagsBytes = getNullFlagsBytes(numFields);
+        // write null indicator bits
+        for (int i = 0; i < nullFlagsBytes; i++) {
+            targetBuf[runner++] = (byte) 0;
+        }
+
+        // write field slots for variable length fields
+        encDec.reset(targetBuf, runner);
+        for (int i = startField; i < startField + numFields; i++) {
+            if (!typeTraits[i].isFixedLength()) {
+                encDec.encode(tuple.getFieldLength(i));
+            }
+        }
+        runner = encDec.getPos();
+
+        for (int i = startField; i < startField + numFields; i++) {
+            System.arraycopy(tuple.getFieldData(i), tuple.getFieldStart(i), targetBuf, runner,
+                    tuple.getFieldLength(i));
+            runner += tuple.getFieldLength(i);
+        }
+
+        return runner - targetOff;
+    }
+
+    protected int getNullFlagsBytes(ITupleReference tuple) {
+        return (int) Math.ceil((double) tuple.getFieldCount() / 8.0);
+    }
+
+    protected int getFieldSlotsBytes(ITupleReference tuple) {
+        int fieldSlotBytes = 0;
+        for (int i = 0; i < tuple.getFieldCount(); i++) {
+            if (!typeTraits[i].isFixedLength()) {
+                fieldSlotBytes += encDec.getBytesRequired(tuple.getFieldLength(i));
+            }
+        }
+        return fieldSlotBytes;
+    }
+
+    protected int getNullFlagsBytes(int numFields) {
+        return (int) Math.ceil((double) numFields / 8.0);
+    }
+
+    protected int getFieldSlotsBytes(ITupleReference tuple, int startField, int numFields) {
+        int fieldSlotBytes = 0;
+        for (int i = startField; i < startField + numFields; i++) {
+            if (!typeTraits[i].isFixedLength()) {
+                fieldSlotBytes += encDec.getBytesRequired(tuple.getFieldLength(i));
+            }
+        }
+        return fieldSlotBytes;
+    }
+
+    public ITypeTraits[] getTypeTraits() {
+        return typeTraits;
+    }
+
+    public void setTypeTraits(ITypeTraits[] typeTraits) {
+        this.typeTraits = typeTraits;
+    }
+}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/TypeAwareTupleWriterFactory.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/TypeAwareTupleWriterFactory.java
similarity index 100%
rename from hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/TypeAwareTupleWriterFactory.java
rename to hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/TypeAwareTupleWriterFactory.java
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/VarLenIntEncoderDecoder.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/VarLenIntEncoderDecoder.java
similarity index 100%
rename from hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/VarLenIntEncoderDecoder.java
rename to hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/VarLenIntEncoderDecoder.java
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/util/TreeIndexBufferCacheWarmup.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/util/TreeIndexBufferCacheWarmup.java
similarity index 100%
rename from hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/util/TreeIndexBufferCacheWarmup.java
rename to hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/util/TreeIndexBufferCacheWarmup.java
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/util/TreeIndexStats.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/util/TreeIndexStats.java
similarity index 100%
rename from hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/util/TreeIndexStats.java
rename to hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/util/TreeIndexStats.java
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/util/TreeIndexStatsGatherer.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/util/TreeIndexStatsGatherer.java
similarity index 100%
rename from hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/util/TreeIndexStatsGatherer.java
rename to hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/util/TreeIndexStatsGatherer.java
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/util/TreeIndexUtils.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/util/TreeIndexUtils.java
similarity index 100%
rename from hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/util/TreeIndexUtils.java
rename to hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/util/TreeIndexUtils.java
diff --git a/hyracks/hyracks-storage-am-invertedindex/pom.xml b/hyracks/hyracks-storage-am-invertedindex/pom.xml
new file mode 100644
index 0000000..2ba4980
--- /dev/null
+++ b/hyracks/hyracks-storage-am-invertedindex/pom.xml
@@ -0,0 +1,64 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>edu.uci.ics.hyracks</groupId>
+  <artifactId>hyracks-storage-am-invertedindex</artifactId>
+  <version>0.2.3-SNAPSHOT</version>
+  <name>hyracks-storage-am-invertedindex</name>
+
+  <parent>
+    <groupId>edu.uci.ics.hyracks</groupId>
+    <artifactId>hyracks</artifactId>
+    <version>0.2.3-SNAPSHOT</version>
+  </parent>
+
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-compiler-plugin</artifactId>
+        <version>2.0.2</version>
+        <configuration>
+          <source>1.7</source>
+          <target>1.7</target>
+        </configuration>
+      </plugin>
+    </plugins>
+  </build>
+  <dependencies>
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>hyracks-storage-common</artifactId>
+  		<version>0.2.3-SNAPSHOT</version>
+  		<type>jar</type>
+  		<scope>compile</scope>
+  	</dependency>  	
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>hyracks-dataflow-common</artifactId>
+  		<version>0.2.3-SNAPSHOT</version>
+  		<type>jar</type>
+  		<scope>compile</scope>
+  	</dependency>  	
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>hyracks-dataflow-std</artifactId>
+  		<version>0.2.3-SNAPSHOT</version>
+  		<type>jar</type>
+  		<scope>compile</scope>
+  	</dependency>  	
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>hyracks-storage-am-btree</artifactId>
+  		<version>0.2.3-SNAPSHOT</version>
+  		<type>jar</type>
+  		<scope>compile</scope>
+  	</dependency>    
+  	<dependency>
+  		<groupId>junit</groupId>
+  		<artifactId>junit</artifactId>
+  		<version>4.8.1</version>
+  		<type>jar</type>
+  		<scope>test</scope>
+  	</dependency>  	  		
+  </dependencies>
+</project>
diff --git a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/api/IInvertedIndexOperatorDescriptor.java b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/api/IInvertedIndexOperatorDescriptor.java
new file mode 100644
index 0000000..042042f
--- /dev/null
+++ b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/api/IInvertedIndexOperatorDescriptor.java
@@ -0,0 +1,32 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.api;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.ITreeIndexOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.IBinaryTokenizerFactory;
+
+public interface IInvertedIndexOperatorDescriptor extends ITreeIndexOperatorDescriptor {
+    public IBinaryComparatorFactory[] getInvListsComparatorFactories();
+
+    public IBinaryTokenizerFactory getTokenizerFactory();
+    
+    public ITypeTraits[] getInvListsTypeTraits();
+    
+    public IFileSplitProvider getInvListsFileSplitProvider();
+}
diff --git a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/api/IInvertedIndexSearchModifier.java b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/api/IInvertedIndexSearchModifier.java
new file mode 100644
index 0000000..bd96f67
--- /dev/null
+++ b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/api/IInvertedIndexSearchModifier.java
@@ -0,0 +1,24 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.api;
+
+import java.util.List;
+
+public interface IInvertedIndexSearchModifier {
+    public int getOccurrenceThreshold(List<IInvertedListCursor> invListCursors);
+
+    public int getPrefixLists(List<IInvertedListCursor> invListCursors);
+}
diff --git a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/api/IInvertedIndexSearchModifierFactory.java b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/api/IInvertedIndexSearchModifierFactory.java
new file mode 100644
index 0000000..92770d6
--- /dev/null
+++ b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/api/IInvertedIndexSearchModifierFactory.java
@@ -0,0 +1,22 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.api;
+
+import java.io.Serializable;
+
+public interface IInvertedIndexSearchModifierFactory extends Serializable {
+    public IInvertedIndexSearchModifier createSearchModifier();
+}
diff --git a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/api/IInvertedIndexSearcher.java b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/api/IInvertedIndexSearcher.java
new file mode 100644
index 0000000..d633c34
--- /dev/null
+++ b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/api/IInvertedIndexSearcher.java
@@ -0,0 +1,41 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.api;
+
+import java.nio.ByteBuffer;
+import java.util.List;
+
+import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
+import edu.uci.ics.hyracks.storage.am.invertedindex.impls.InvertedIndexSearchCursor;
+import edu.uci.ics.hyracks.storage.am.invertedindex.impls.InvertedIndexSearchPredicate;
+
+public interface IInvertedIndexSearcher {
+    public void search(InvertedIndexSearchCursor resultCursor, InvertedIndexSearchPredicate searchPred)
+            throws HyracksDataException, IndexException;
+
+    public IFrameTupleAccessor createResultFrameTupleAccessor();
+
+    public ITupleReference createResultTupleReference();
+
+    public List<ByteBuffer> getResultBuffers();
+
+    public int getNumValidResultBuffers();
+    
+    public void reset();
+}
diff --git a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/api/IInvertedListBuilder.java b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/api/IInvertedListBuilder.java
new file mode 100644
index 0000000..aaaef56
--- /dev/null
+++ b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/api/IInvertedListBuilder.java
@@ -0,0 +1,32 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.api;
+
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+
+public interface IInvertedListBuilder {
+    public boolean startNewList(ITupleReference tuple, int numTokenFields);
+
+    // returns true if successfully appended
+    // returns false if not enough space in targetBuf
+    public boolean appendElement(ITupleReference tuple, int numTokenFields, int numElementFields);
+
+    public void setTargetBuffer(byte[] targetBuf, int startPos);
+
+    public int getListSize();
+
+    public int getPos();
+}
diff --git a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/api/IInvertedListCursor.java b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/api/IInvertedListCursor.java
new file mode 100644
index 0000000..9435f3c
--- /dev/null
+++ b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/api/IInvertedListCursor.java
@@ -0,0 +1,56 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.api;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+
+public interface IInvertedListCursor extends Comparable<IInvertedListCursor> {
+    void reset(int startPageId, int endPageId, int startOff, int numElements);
+
+    void pinPagesSync() throws HyracksDataException;
+
+    void pinPagesAsync() throws HyracksDataException;
+
+    void unpinPages() throws HyracksDataException;
+
+    boolean hasNext();
+
+    void next();
+
+    ITupleReference getTuple();
+
+    // getters
+    int getNumElements();
+
+    int getStartPageId();
+
+    int getEndPageId();
+
+    int getStartOff();
+
+    // jump to a specific element
+    void positionCursor(int elementIx);
+
+    boolean containsKey(ITupleReference searchTuple, MultiComparator invListCmp);
+
+    // for debugging
+    String printInvList(ISerializerDeserializer[] serdes) throws HyracksDataException;
+
+    String printCurrentElement(ISerializerDeserializer[] serdes) throws HyracksDataException;
+}
diff --git a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/dataflow/AbstractInvertedIndexOperatorDescriptor.java b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/dataflow/AbstractInvertedIndexOperatorDescriptor.java
new file mode 100644
index 0000000..117f96f
--- /dev/null
+++ b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/dataflow/AbstractInvertedIndexOperatorDescriptor.java
@@ -0,0 +1,168 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.dataflow;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.job.IOperatorDescriptorRegistry;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMInteriorFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMLeafFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.IOperationCallbackProvider;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriterFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITupleFilterFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndex;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexRegistryProvider;
+import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriterFactory;
+import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedIndexOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.IBinaryTokenizerFactory;
+import edu.uci.ics.hyracks.storage.am.invertedindex.util.InvertedIndexUtils;
+import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
+
+public abstract class AbstractInvertedIndexOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor
+        implements IInvertedIndexOperatorDescriptor {
+
+    private static final long serialVersionUID = 1L;
+
+    // General.
+    protected final IStorageManagerInterface storageManager;
+    protected final IIndexRegistryProvider<IIndex> indexRegistryProvider;
+    protected final boolean retainInput;
+    protected final IOperationCallbackProvider opCallbackProvider;
+    
+    // Btree.
+    protected final ITreeIndexFrameFactory btreeInteriorFrameFactory;
+    protected final ITreeIndexFrameFactory btreeLeafFrameFactory;
+    protected final ITypeTraits[] btreeTypeTraits;
+    protected final IBinaryComparatorFactory[] btreeComparatorFactories;
+    protected final IIndexDataflowHelperFactory btreeDataflowHelperFactory;
+    protected final IFileSplitProvider btreeFileSplitProvider;
+
+    // Inverted index.
+    protected final ITypeTraits[] invListsTypeTraits;
+    protected final IBinaryComparatorFactory[] invListComparatorFactories;
+    protected final IBinaryTokenizerFactory tokenizerFactory;
+    protected final IFileSplitProvider invListsFileSplitProvider;
+
+    public AbstractInvertedIndexOperatorDescriptor(IOperatorDescriptorRegistry spec, int inputArity, int outputArity,
+            RecordDescriptor recDesc, IStorageManagerInterface storageManager,
+            IFileSplitProvider btreeFileSplitProvider, IFileSplitProvider invListsFileSplitProvider,
+            IIndexRegistryProvider<IIndex> indexRegistryProvider, ITypeTraits[] tokenTypeTraits,
+            IBinaryComparatorFactory[] tokenComparatorFactories, ITypeTraits[] invListsTypeTraits,
+            IBinaryComparatorFactory[] invListComparatorFactories, IBinaryTokenizerFactory tokenizerFactory,
+            IIndexDataflowHelperFactory btreeDataflowHelperFactory, boolean retainInput, IOperationCallbackProvider opCallbackProvider) {
+        super(spec, inputArity, outputArity);
+
+        // General.
+        this.storageManager = storageManager;
+        this.indexRegistryProvider = indexRegistryProvider;
+        this.retainInput = retainInput;
+        this.opCallbackProvider = opCallbackProvider;
+        
+        // Btree.
+        this.btreeTypeTraits = InvertedIndexUtils.getBTreeTypeTraits(tokenTypeTraits);
+        ITreeIndexTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(btreeTypeTraits);
+        this.btreeInteriorFrameFactory = new BTreeNSMInteriorFrameFactory(tupleWriterFactory);
+        this.btreeLeafFrameFactory = new BTreeNSMLeafFrameFactory(tupleWriterFactory);
+        this.btreeComparatorFactories = tokenComparatorFactories;
+        this.btreeDataflowHelperFactory = btreeDataflowHelperFactory;
+        this.btreeFileSplitProvider = btreeFileSplitProvider;
+
+        // Inverted index.
+        this.invListsTypeTraits = invListsTypeTraits;
+        this.invListComparatorFactories = invListComparatorFactories;
+        this.tokenizerFactory = tokenizerFactory;
+        this.invListsFileSplitProvider = invListsFileSplitProvider;
+
+        if (outputArity > 0) {
+            recordDescriptors[0] = recDesc;
+        }
+    }
+
+    @Override
+    public IFileSplitProvider getFileSplitProvider() {
+        return btreeFileSplitProvider;
+    }
+    
+    @Override
+    public IFileSplitProvider getInvListsFileSplitProvider() {
+        return invListsFileSplitProvider;
+    }
+
+    @Override
+    public IBinaryComparatorFactory[] getTreeIndexComparatorFactories() {
+        return btreeComparatorFactories;
+    }
+
+    @Override
+    public ITypeTraits[] getTreeIndexTypeTraits() {
+        return btreeTypeTraits;
+    }
+
+    @Override
+    public IStorageManagerInterface getStorageManager() {
+        return storageManager;
+    }
+
+    @Override
+    public RecordDescriptor getRecordDescriptor() {
+        return recordDescriptors[0];
+    }
+
+    @Override
+    public IBinaryComparatorFactory[] getInvListsComparatorFactories() {
+        return invListComparatorFactories;
+    }
+
+    @Override
+    public IBinaryTokenizerFactory getTokenizerFactory() {
+        return tokenizerFactory;
+    }
+    
+    @Override
+    public ITypeTraits[] getInvListsTypeTraits() {
+        return invListsTypeTraits;
+    }
+
+    @Override
+    public IIndexRegistryProvider<IIndex> getIndexRegistryProvider() {
+        return indexRegistryProvider;
+    }
+    
+    @Override
+    public IIndexDataflowHelperFactory getIndexDataflowHelperFactory() {
+        return btreeDataflowHelperFactory;
+    }
+    
+    @Override
+    public boolean getRetainInput() {
+    	return retainInput;
+    }
+    
+    @Override
+    public IOperationCallbackProvider getOpCallbackProvider() {
+    	return opCallbackProvider;
+    }
+    
+    @Override
+	public ITupleFilterFactory getTupleFilterFactory() {
+		return null;
+	}
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/dataflow/BinaryTokenizerOperatorDescriptor.java b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/dataflow/BinaryTokenizerOperatorDescriptor.java
new file mode 100644
index 0000000..a146479
--- /dev/null
+++ b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/dataflow/BinaryTokenizerOperatorDescriptor.java
@@ -0,0 +1,54 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.dataflow;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
+import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.job.IOperatorDescriptorRegistry;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.IBinaryTokenizerFactory;
+
+public class BinaryTokenizerOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor {
+
+    private static final long serialVersionUID = 1L;
+
+    private final IBinaryTokenizerFactory tokenizerFactory;
+    // Fields that will be tokenized
+    private final int[] tokenFields;
+    // operator will append these key fields to each token, e.g., as
+    // payload for an inverted list
+    // WARNING: too many key fields can cause significant data blowup.
+    private final int[] keyFields;
+
+    public BinaryTokenizerOperatorDescriptor(IOperatorDescriptorRegistry spec, RecordDescriptor recDesc,
+            IBinaryTokenizerFactory tokenizerFactory, int[] tokenFields, int[] keyFields) {
+        super(spec, 1, 1);
+        this.tokenizerFactory = tokenizerFactory;
+        this.tokenFields = tokenFields;
+        this.keyFields = keyFields;
+        recordDescriptors[0] = recDesc;
+    }
+
+    @Override
+    public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
+            IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) throws HyracksDataException {
+        return new BinaryTokenizerOperatorNodePushable(ctx, recordDescProvider.getInputRecordDescriptor(
+                getActivityId(), 0), recordDescriptors[0], tokenizerFactory.createTokenizer(), tokenFields, keyFields);
+    }
+}
diff --git a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/dataflow/BinaryTokenizerOperatorNodePushable.java b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/dataflow/BinaryTokenizerOperatorNodePushable.java
new file mode 100644
index 0000000..6744f70
--- /dev/null
+++ b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/dataflow/BinaryTokenizerOperatorNodePushable.java
@@ -0,0 +1,126 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.dataflow;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.data.std.util.GrowableArray;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
+import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryInputUnaryOutputOperatorNodePushable;
+import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.IBinaryTokenizer;
+import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.IToken;
+
+public class BinaryTokenizerOperatorNodePushable extends AbstractUnaryInputUnaryOutputOperatorNodePushable {
+
+    private final IHyracksTaskContext ctx;
+    private final IBinaryTokenizer tokenizer;
+    private final int[] tokenFields;
+    private final int[] projFields;
+    private final RecordDescriptor inputRecDesc;
+    private final RecordDescriptor outputRecDesc;
+
+    private FrameTupleAccessor accessor;
+    private ArrayTupleBuilder builder;
+    private GrowableArray builderFieldData;
+    private FrameTupleAppender appender;
+    private ByteBuffer writeBuffer;
+
+    public BinaryTokenizerOperatorNodePushable(IHyracksTaskContext ctx, RecordDescriptor inputRecDesc,
+            RecordDescriptor outputRecDesc, IBinaryTokenizer tokenizer, int[] tokenFields, int[] projFields) {
+        this.ctx = ctx;
+        this.tokenizer = tokenizer;
+        this.tokenFields = tokenFields;
+        this.projFields = projFields;
+        this.inputRecDesc = inputRecDesc;
+        this.outputRecDesc = outputRecDesc;
+    }
+
+    @Override
+    public void open() throws HyracksDataException {
+        accessor = new FrameTupleAccessor(ctx.getFrameSize(), inputRecDesc);
+        writeBuffer = ctx.allocateFrame();
+        builder = new ArrayTupleBuilder(outputRecDesc.getFieldCount());
+        builderFieldData = builder.getFieldData();
+        appender = new FrameTupleAppender(ctx.getFrameSize());
+        appender.reset(writeBuffer, true);
+        writer.open();
+    }
+
+    @Override
+    public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
+        accessor.reset(buffer);
+
+        int tupleCount = accessor.getTupleCount();
+        for (int i = 0; i < tupleCount; i++) {
+
+            for (int j = 0; j < tokenFields.length; j++) {
+
+                tokenizer.reset(
+                        accessor.getBuffer().array(),
+                        accessor.getTupleStartOffset(i) + accessor.getFieldSlotsLength()
+                                + accessor.getFieldStartOffset(i, tokenFields[j]),
+                        accessor.getFieldLength(i, tokenFields[j]));
+
+                while (tokenizer.hasNext()) {
+                    tokenizer.next();
+
+                    builder.reset();
+                    try {
+                        IToken token = tokenizer.getToken();
+                        token.serializeToken(builderFieldData);
+                        builder.addFieldEndOffset();
+                    } catch (IOException e) {
+                        throw new HyracksDataException(e.getMessage());
+                    }
+
+                    for (int k = 0; k < projFields.length; k++) {
+                        builder.addField(accessor, i, projFields[k]);
+                    }
+
+                    if (!appender.append(builder.getFieldEndOffsets(), builder.getByteArray(), 0, builder.getSize())) {
+                        FrameUtils.flushFrame(writeBuffer, writer);
+                        appender.reset(writeBuffer, true);
+                        if (!appender
+                                .append(builder.getFieldEndOffsets(), builder.getByteArray(), 0, builder.getSize())) {
+                            throw new IllegalStateException();
+                        }
+                    }
+                }
+            }
+        }
+
+        if (appender.getTupleCount() > 0) {
+            FrameUtils.flushFrame(writeBuffer, writer);
+        }
+    }
+
+    @Override
+    public void close() throws HyracksDataException {
+        writer.close();
+    }
+
+    @Override
+    public void fail() throws HyracksDataException {
+        writer.fail();
+    }
+}
diff --git a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/dataflow/InvertedIndexBulkLoadOperatorDescriptor.java b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/dataflow/InvertedIndexBulkLoadOperatorDescriptor.java
new file mode 100644
index 0000000..7ffec4d
--- /dev/null
+++ b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/dataflow/InvertedIndexBulkLoadOperatorDescriptor.java
@@ -0,0 +1,57 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.dataflow;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.api.job.IOperatorDescriptorRegistry;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.storage.am.common.api.IOperationCallbackProvider;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndex;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexRegistryProvider;
+import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.IBinaryTokenizerFactory;
+import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
+
+public class InvertedIndexBulkLoadOperatorDescriptor extends AbstractInvertedIndexOperatorDescriptor {
+
+    private static final long serialVersionUID = 1L;
+
+    private final int[] fieldPermutation;
+
+    public InvertedIndexBulkLoadOperatorDescriptor(IOperatorDescriptorRegistry spec, int[] fieldPermutation,
+            IStorageManagerInterface storageManager, IFileSplitProvider btreeFileSplitProvider,
+            IFileSplitProvider invListsFileSplitProvider, IIndexRegistryProvider<IIndex> indexRegistryProvider,
+            ITypeTraits[] tokenTypeTraits, IBinaryComparatorFactory[] tokenComparatorFactories,
+            ITypeTraits[] invListsTypeTraits, IBinaryComparatorFactory[] invListComparatorFactories,
+            IBinaryTokenizerFactory tokenizerFactory, IIndexDataflowHelperFactory btreeDataflowHelperFactory,
+            IOperationCallbackProvider opCallbackProvider) {
+        super(spec, 1, 0, null, storageManager, btreeFileSplitProvider, invListsFileSplitProvider,
+                indexRegistryProvider, tokenTypeTraits, tokenComparatorFactories, invListsTypeTraits,
+                invListComparatorFactories, tokenizerFactory, btreeDataflowHelperFactory, false,
+                opCallbackProvider);
+        this.fieldPermutation = fieldPermutation;
+    }
+
+    @Override
+    public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
+            IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) {
+        return new InvertedIndexBulkLoadOperatorNodePushable(this, ctx, partition, fieldPermutation, recordDescProvider);
+    }
+}
diff --git a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/dataflow/InvertedIndexBulkLoadOperatorNodePushable.java b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/dataflow/InvertedIndexBulkLoadOperatorNodePushable.java
new file mode 100644
index 0000000..c6fa56d
--- /dev/null
+++ b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/dataflow/InvertedIndexBulkLoadOperatorNodePushable.java
@@ -0,0 +1,116 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.storage.am.invertedindex.dataflow;
+
+import java.nio.ByteBuffer;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryInputSinkOperatorNodePushable;
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.PermutingFrameTupleReference;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexDataflowHelper;
+import edu.uci.ics.hyracks.storage.am.invertedindex.impls.InvertedIndex;
+import edu.uci.ics.hyracks.storage.am.invertedindex.impls.InvertedIndex.InvertedIndexBulkLoadContext;
+
+public class InvertedIndexBulkLoadOperatorNodePushable extends AbstractUnaryInputSinkOperatorNodePushable {
+    private final TreeIndexDataflowHelper btreeDataflowHelper;
+    private final InvertedIndexDataflowHelper invIndexDataflowHelper;
+    private InvertedIndex invIndex;
+    private InvertedIndex.InvertedIndexBulkLoadContext bulkLoadCtx;
+
+    private FrameTupleAccessor accessor;
+    private PermutingFrameTupleReference tuple = new PermutingFrameTupleReference();
+
+    private IRecordDescriptorProvider recordDescProvider;
+
+    public InvertedIndexBulkLoadOperatorNodePushable(AbstractInvertedIndexOperatorDescriptor opDesc,
+            IHyracksTaskContext ctx, int partition, int[] fieldPermutation, IRecordDescriptorProvider recordDescProvider) {
+        btreeDataflowHelper = (TreeIndexDataflowHelper) opDesc.getIndexDataflowHelperFactory()
+                .createIndexDataflowHelper(opDesc, ctx, partition);
+        invIndexDataflowHelper = new InvertedIndexDataflowHelper(btreeDataflowHelper, opDesc, ctx, partition);
+        this.recordDescProvider = recordDescProvider;
+        tuple.setFieldPermutation(fieldPermutation);
+    }
+
+    @Override
+    public void open() throws HyracksDataException {
+        AbstractInvertedIndexOperatorDescriptor opDesc = (AbstractInvertedIndexOperatorDescriptor) btreeDataflowHelper
+                .getOperatorDescriptor();
+        RecordDescriptor recDesc = recordDescProvider.getInputRecordDescriptor(opDesc.getActivityId(), 0);
+        accessor = new FrameTupleAccessor(btreeDataflowHelper.getHyracksTaskContext().getFrameSize(), recDesc);
+
+        // BTree.
+        try {
+            btreeDataflowHelper.init(false);
+        } catch (Exception e) {
+            // Cleanup in case of failure.
+            btreeDataflowHelper.deinit();
+            if (e instanceof HyracksDataException) {
+                throw (HyracksDataException) e;
+            } else {
+                throw new HyracksDataException(e);
+            }
+        }
+
+        // Inverted Index.
+        try {
+            invIndexDataflowHelper.init(false);
+            invIndex = (InvertedIndex) invIndexDataflowHelper.getIndex();
+            bulkLoadCtx = (InvertedIndexBulkLoadContext) invIndex.beginBulkLoad(BTree.DEFAULT_FILL_FACTOR);
+        } catch (Exception e) {
+            // Cleanup in case of failure.
+            invIndexDataflowHelper.deinit();
+            if (e instanceof HyracksDataException) {
+                throw (HyracksDataException) e;
+            } else {
+                throw new HyracksDataException(e);
+            }
+        }
+    }
+
+    @Override
+    public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
+        accessor.reset(buffer);
+        int tupleCount = accessor.getTupleCount();
+        for (int i = 0; i < tupleCount; i++) {
+            tuple.reset(accessor, i);
+            invIndex.bulkLoadAddTuple(tuple, bulkLoadCtx);
+        }
+    }
+
+    @Override
+    public void close() throws HyracksDataException {
+        try {
+            invIndex.endBulkLoad(bulkLoadCtx);
+        } catch (Exception e) {
+            throw new HyracksDataException(e);
+        } finally {
+            try {
+                btreeDataflowHelper.deinit();
+            } finally {
+                invIndexDataflowHelper.deinit();
+            }
+        }
+    }
+
+    @Override
+    public void fail() throws HyracksDataException {
+        writer.fail();
+    }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/dataflow/InvertedIndexCreateOperatorDescriptor.java b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/dataflow/InvertedIndexCreateOperatorDescriptor.java
new file mode 100644
index 0000000..01d1a97
--- /dev/null
+++ b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/dataflow/InvertedIndexCreateOperatorDescriptor.java
@@ -0,0 +1,53 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.dataflow;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.api.job.IOperatorDescriptorRegistry;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.storage.am.common.api.IOperationCallbackProvider;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndex;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexRegistryProvider;
+import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.IBinaryTokenizerFactory;
+import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
+
+public class InvertedIndexCreateOperatorDescriptor extends AbstractInvertedIndexOperatorDescriptor {
+
+    private static final long serialVersionUID = 1L;
+
+    public InvertedIndexCreateOperatorDescriptor(IOperatorDescriptorRegistry spec,
+            IStorageManagerInterface storageManager, IFileSplitProvider btreeFileSplitProvider,
+            IFileSplitProvider invListsFileSplitProvider, IIndexRegistryProvider<IIndex> indexRegistryProvider,
+            ITypeTraits[] tokenTypeTraits, IBinaryComparatorFactory[] tokenComparatorFactories,
+            ITypeTraits[] invListsTypeTraits, IBinaryComparatorFactory[] invListComparatorFactories,
+            IBinaryTokenizerFactory tokenizerFactory, IIndexDataflowHelperFactory btreeDataflowHelperFactory,
+            IOperationCallbackProvider opCallbackProvider) {
+        super(spec, 0, 0, null, storageManager, btreeFileSplitProvider, invListsFileSplitProvider,
+                indexRegistryProvider, tokenTypeTraits, tokenComparatorFactories, invListsTypeTraits,
+                invListComparatorFactories, tokenizerFactory, btreeDataflowHelperFactory, false, opCallbackProvider);
+    }
+
+    @Override
+    public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
+            IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) {
+        return new InvertedIndexCreateOperatorNodePushable(this, ctx, partition);
+    }
+}
diff --git a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/dataflow/InvertedIndexCreateOperatorNodePushable.java b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/dataflow/InvertedIndexCreateOperatorNodePushable.java
new file mode 100644
index 0000000..eb909d8
--- /dev/null
+++ b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/dataflow/InvertedIndexCreateOperatorNodePushable.java
@@ -0,0 +1,68 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.storage.am.invertedindex.dataflow;
+
+import edu.uci.ics.hyracks.api.comm.IFrameWriter;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractOperatorNodePushable;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexDataflowHelper;
+
+public class InvertedIndexCreateOperatorNodePushable extends AbstractOperatorNodePushable {
+    private final TreeIndexDataflowHelper btreeDataflowHelper;
+    private final InvertedIndexDataflowHelper invIndexDataflowHelper;
+
+    public InvertedIndexCreateOperatorNodePushable(AbstractInvertedIndexOperatorDescriptor opDesc,
+            IHyracksTaskContext ctx, int partition) {
+        btreeDataflowHelper = (TreeIndexDataflowHelper) opDesc.getIndexDataflowHelperFactory()
+                .createIndexDataflowHelper(opDesc, ctx, partition);
+        invIndexDataflowHelper = new InvertedIndexDataflowHelper(btreeDataflowHelper, opDesc, ctx, partition);
+    }
+
+    @Override
+    public void deinitialize() throws HyracksDataException {
+    }
+
+    @Override
+    public int getInputArity() {
+        return 0;
+    }
+
+    @Override
+    public IFrameWriter getInputFrameWriter(int index) {
+        return null;
+    }
+
+    @Override
+    public void initialize() throws HyracksDataException {
+    	// BTree.
+    	try {
+    		btreeDataflowHelper.init(true);
+    	} finally {
+    		btreeDataflowHelper.deinit();
+    	}
+    	// Inverted Index.
+    	try {
+    		invIndexDataflowHelper.init(true);
+    	} finally {
+    		invIndexDataflowHelper.deinit();
+    	}
+    }
+
+    @Override
+    public void setOutputFrameWriter(int index, IFrameWriter writer, RecordDescriptor recordDesc) {
+    }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/dataflow/InvertedIndexDataflowHelper.java b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/dataflow/InvertedIndexDataflowHelper.java
new file mode 100644
index 0000000..6d0ff38
--- /dev/null
+++ b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/dataflow/InvertedIndexDataflowHelper.java
@@ -0,0 +1,57 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.storage.am.invertedindex.dataflow;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.io.FileReference;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndex;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IndexDataflowHelper;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexDataflowHelper;
+import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedIndexOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedListBuilder;
+import edu.uci.ics.hyracks.storage.am.invertedindex.impls.FixedSizeElementInvertedListBuilder;
+import edu.uci.ics.hyracks.storage.am.invertedindex.impls.InvertedIndex;
+
+public final class InvertedIndexDataflowHelper extends IndexDataflowHelper {
+    private final TreeIndexDataflowHelper btreeDataflowHelper;
+
+    public InvertedIndexDataflowHelper(TreeIndexDataflowHelper btreeDataflowHelper, IIndexOperatorDescriptor opDesc,
+            IHyracksTaskContext ctx, int partition) {
+        super(opDesc, ctx, partition);
+        this.btreeDataflowHelper = btreeDataflowHelper;
+    }
+
+    public FileReference getFilereference() {
+        AbstractInvertedIndexOperatorDescriptor invIndexOpDesc = (AbstractInvertedIndexOperatorDescriptor) opDesc;
+        IFileSplitProvider fileSplitProvider = invIndexOpDesc.getInvListsFileSplitProvider();
+        return fileSplitProvider.getFileSplits()[partition].getLocalFile();
+    }
+
+    @Override
+    public IIndex createIndexInstance() throws HyracksDataException {
+        IInvertedIndexOperatorDescriptor invIndexOpDesc = (IInvertedIndexOperatorDescriptor) opDesc;
+        // Assumes btreeDataflowHelper.init() has already been called.
+        BTree btree = (BTree) btreeDataflowHelper.getIndex();
+        IInvertedListBuilder invListBuilder = new FixedSizeElementInvertedListBuilder(
+                invIndexOpDesc.getInvListsTypeTraits());
+        return new InvertedIndex(opDesc.getStorageManager().getBufferCache(ctx), btree,
+                invIndexOpDesc.getInvListsTypeTraits(), invIndexOpDesc.getInvListsComparatorFactories(),
+                invListBuilder);
+    }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/dataflow/InvertedIndexSearchOperatorDescriptor.java b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/dataflow/InvertedIndexSearchOperatorDescriptor.java
new file mode 100644
index 0000000..497dfb5
--- /dev/null
+++ b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/dataflow/InvertedIndexSearchOperatorDescriptor.java
@@ -0,0 +1,65 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.dataflow;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.job.IOperatorDescriptorRegistry;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.storage.am.common.api.IOperationCallbackProvider;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndex;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexRegistryProvider;
+import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedIndexSearchModifier;
+import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedIndexSearchModifierFactory;
+import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.IBinaryTokenizerFactory;
+import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
+
+public class InvertedIndexSearchOperatorDescriptor extends AbstractInvertedIndexOperatorDescriptor {
+    private static final long serialVersionUID = 1L;
+
+    private final int queryField;
+    private final IInvertedIndexSearchModifierFactory searchModifierFactory;
+
+    public InvertedIndexSearchOperatorDescriptor(IOperatorDescriptorRegistry spec, int queryField,
+            IStorageManagerInterface storageManager, IFileSplitProvider btreeFileSplitProvider,
+            IFileSplitProvider invListsFileSplitProvider, IIndexRegistryProvider<IIndex> indexRegistryProvider,
+            ITypeTraits[] tokenTypeTraits, IBinaryComparatorFactory[] tokenComparatorFactories,
+            ITypeTraits[] invListsTypeTraits, IBinaryComparatorFactory[] invListComparatorFactories,
+            IIndexDataflowHelperFactory btreeDataflowHelperFactory, IBinaryTokenizerFactory queryTokenizerFactory,
+            IInvertedIndexSearchModifierFactory searchModifierFactory, RecordDescriptor recDesc, boolean retainInput,
+            IOperationCallbackProvider opCallbackProvider) {
+        super(spec, 1, 1, recDesc, storageManager, btreeFileSplitProvider, invListsFileSplitProvider,
+                indexRegistryProvider, tokenTypeTraits, tokenComparatorFactories, invListsTypeTraits,
+                invListComparatorFactories, queryTokenizerFactory, btreeDataflowHelperFactory, retainInput,
+                opCallbackProvider);
+        this.queryField = queryField;
+        this.searchModifierFactory = searchModifierFactory;
+    }
+
+    @Override
+    public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
+            IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) throws HyracksDataException {
+        IInvertedIndexSearchModifier searchModifier = searchModifierFactory.createSearchModifier();
+        return new InvertedIndexSearchOperatorNodePushable(this, ctx, partition, queryField, searchModifier,
+                recordDescProvider);
+    }
+}
diff --git a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/dataflow/InvertedIndexSearchOperatorNodePushable.java b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/dataflow/InvertedIndexSearchOperatorNodePushable.java
new file mode 100644
index 0000000..f3080f4
--- /dev/null
+++ b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/dataflow/InvertedIndexSearchOperatorNodePushable.java
@@ -0,0 +1,186 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.dataflow;
+
+import java.io.DataOutput;
+import java.nio.ByteBuffer;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
+import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.FrameTupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryInputUnaryOutputOperatorNodePushable;
+import edu.uci.ics.hyracks.storage.am.common.api.IIndexAccessor;
+import edu.uci.ics.hyracks.storage.am.common.api.IIndexCursor;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexDataflowHelper;
+import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedIndexSearchModifier;
+import edu.uci.ics.hyracks.storage.am.invertedindex.impls.InvertedIndex;
+import edu.uci.ics.hyracks.storage.am.invertedindex.impls.InvertedIndexSearchPredicate;
+import edu.uci.ics.hyracks.storage.am.invertedindex.impls.OccurrenceThresholdPanicException;
+
+public class InvertedIndexSearchOperatorNodePushable extends AbstractUnaryInputUnaryOutputOperatorNodePushable {
+    private final TreeIndexDataflowHelper btreeDataflowHelper;
+    private final InvertedIndexDataflowHelper invIndexDataflowHelper;
+    private final int queryField;
+    private FrameTupleAccessor accessor;
+    private FrameTupleReference tuple;
+    private IRecordDescriptorProvider recordDescProvider;
+    private InvertedIndex invIndex;
+
+    private final InvertedIndexSearchPredicate searchPred;
+    private IIndexAccessor indexAccessor;
+    private IIndexCursor resultCursor;
+
+    private ByteBuffer writeBuffer;
+    private FrameTupleAppender appender;
+    private ArrayTupleBuilder tb;
+    private DataOutput dos;
+
+    private final AbstractInvertedIndexOperatorDescriptor opDesc;
+    private final boolean retainInput;
+
+    public InvertedIndexSearchOperatorNodePushable(AbstractInvertedIndexOperatorDescriptor opDesc,
+            IHyracksTaskContext ctx, int partition, int queryField, IInvertedIndexSearchModifier searchModifier,
+            IRecordDescriptorProvider recordDescProvider) {
+        this.opDesc = opDesc;
+        btreeDataflowHelper = (TreeIndexDataflowHelper) opDesc.getIndexDataflowHelperFactory()
+                .createIndexDataflowHelper(opDesc, ctx, partition);
+        invIndexDataflowHelper = new InvertedIndexDataflowHelper(btreeDataflowHelper, opDesc, ctx, partition);
+        this.queryField = queryField;
+        this.searchPred = new InvertedIndexSearchPredicate(opDesc.getTokenizerFactory().createTokenizer(), searchModifier);
+        this.recordDescProvider = recordDescProvider;
+        this.retainInput = invIndexDataflowHelper.getOperatorDescriptor().getRetainInput();
+    }
+
+    @Override
+    public void open() throws HyracksDataException {
+        RecordDescriptor inputRecDesc = recordDescProvider.getInputRecordDescriptor(opDesc.getActivityId(), 0);
+        accessor = new FrameTupleAccessor(btreeDataflowHelper.getHyracksTaskContext().getFrameSize(), inputRecDesc);
+        tuple = new FrameTupleReference();
+        // BTree.
+        try {
+            btreeDataflowHelper.init(false);
+        } catch (Exception e) {
+            // Cleanup in case of failure/
+            btreeDataflowHelper.deinit();
+            if (e instanceof HyracksDataException) {
+                throw (HyracksDataException) e;
+            } else {
+                throw new HyracksDataException(e);
+            }
+        }
+        // Inverted Index.
+        try {
+            invIndexDataflowHelper.init(false);
+            invIndex = (InvertedIndex) invIndexDataflowHelper.getIndex();
+        } catch (Exception e) {
+            // Cleanup in case of failure.
+            invIndexDataflowHelper.deinit();
+            if (e instanceof HyracksDataException) {
+                throw (HyracksDataException) e;
+            } else {
+                throw new HyracksDataException(e);
+            }
+        }
+
+        writeBuffer = btreeDataflowHelper.getHyracksTaskContext().allocateFrame();
+        tb = new ArrayTupleBuilder(recordDesc.getFieldCount());
+        dos = tb.getDataOutput();
+        appender = new FrameTupleAppender(btreeDataflowHelper.getHyracksTaskContext().getFrameSize());
+        appender.reset(writeBuffer, true);
+
+        indexAccessor = invIndex.createAccessor();
+        //InvertedIndex.InvertedIndexAccessor accessor =  
+        resultCursor = indexAccessor.createSearchCursor();
+        writer.open();
+    }
+
+    private void writeSearchResults() throws Exception {
+        while (resultCursor.hasNext()) {
+            resultCursor.next();
+            tb.reset();
+            if (retainInput) {
+                for (int i = 0; i < tuple.getFieldCount(); i++) {
+                	dos.write(tuple.getFieldData(i), tuple.getFieldStart(i), tuple.getFieldLength(i));
+                    tb.addFieldEndOffset();
+                }
+            }
+            ITupleReference invListElement = resultCursor.getTuple();
+            int invListFields = opDesc.getInvListsTypeTraits().length;
+            for (int i = 0; i < invListFields; i++) {
+                dos.write(invListElement.getFieldData(i), invListElement.getFieldStart(i),
+                        invListElement.getFieldLength(i));
+                tb.addFieldEndOffset();
+            }
+            if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
+                FrameUtils.flushFrame(writeBuffer, writer);
+                appender.reset(writeBuffer, true);
+                if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
+                    throw new IllegalStateException();
+                }
+            }
+        }
+    }
+
+    @Override
+    public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
+        accessor.reset(buffer);
+        int tupleCount = accessor.getTupleCount();
+        try {
+            for (int i = 0; i < tupleCount; i++) {
+                tuple.reset(accessor, i);
+                searchPred.setQueryTuple(tuple);
+                searchPred.setQueryFieldIndex(queryField);
+                try {
+                    resultCursor.reset();
+                    indexAccessor.search(resultCursor, searchPred);
+                    writeSearchResults();
+                } catch (OccurrenceThresholdPanicException e) {
+                    // Ignore panic cases for now.
+                }
+            }
+        } catch (Exception e) {
+            throw new HyracksDataException(e);
+        }
+    }
+
+    @Override
+    public void fail() throws HyracksDataException {
+        writer.fail();
+    }
+
+    @Override
+    public void close() throws HyracksDataException {
+        try {
+            if (appender.getTupleCount() > 0) {
+                FrameUtils.flushFrame(writeBuffer, writer);
+            }
+            writer.close();
+        } finally {
+            try {
+                btreeDataflowHelper.deinit();
+            } finally {
+                invIndexDataflowHelper.deinit();
+            }
+        }
+    }
+}
diff --git a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/FixedSizeElementInvertedListBuilder.java b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/FixedSizeElementInvertedListBuilder.java
new file mode 100644
index 0000000..c4ab601
--- /dev/null
+++ b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/FixedSizeElementInvertedListBuilder.java
@@ -0,0 +1,79 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.impls;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedListBuilder;
+
+public class FixedSizeElementInvertedListBuilder implements IInvertedListBuilder {
+    private final int listElementSize;
+    private int listSize = 0;
+
+    private byte[] targetBuf;
+    private int pos;
+
+    public FixedSizeElementInvertedListBuilder(ITypeTraits[] invListFields) {
+        int tmp = 0;
+        for (int i = 0; i < invListFields.length; i++) {
+            tmp += invListFields[i].getFixedLength();
+        }
+        listElementSize = tmp;
+    }
+
+    @Override
+    public boolean startNewList(ITupleReference tuple, int tokenField) {
+        if (pos + listElementSize >= targetBuf.length)
+            return false;
+        else {
+            listSize = 0;
+            return true;
+        }
+    }
+
+    @Override
+    public boolean appendElement(ITupleReference tuple, int numTokenFields, int numElementFields) {
+        if (pos + listElementSize >= targetBuf.length)
+            return false;
+
+        for (int i = 0; i < numElementFields; i++) {
+            int field = numTokenFields + i;
+            System.arraycopy(tuple.getFieldData(field), tuple.getFieldStart(field), targetBuf, pos,
+                    tuple.getFieldLength(field));
+        }
+
+        listSize++;
+        pos += listElementSize;
+
+        return true;
+    }
+
+    @Override
+    public void setTargetBuffer(byte[] targetBuf, int startPos) {
+        this.targetBuf = targetBuf;
+        this.pos = startPos;
+    }
+
+    @Override
+    public int getListSize() {
+        return listSize;
+    }
+
+    @Override
+    public int getPos() {
+        return pos;
+    }
+}
diff --git a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/FixedSizeElementInvertedListCursor.java b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/FixedSizeElementInvertedListCursor.java
new file mode 100644
index 0000000..446f171
--- /dev/null
+++ b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/FixedSizeElementInvertedListCursor.java
@@ -0,0 +1,280 @@
+package edu.uci.ics.hyracks.storage.am.invertedindex.impls;
+
+import java.io.ByteArrayInputStream;
+import java.io.DataInput;
+import java.io.DataInputStream;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedListCursor;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
+import edu.uci.ics.hyracks.storage.common.file.BufferedFileHandle;
+
+public class FixedSizeElementInvertedListCursor implements IInvertedListCursor {
+
+    private final IBufferCache bufferCache;
+    private final int fileId;
+    private final int elementSize;
+    private int currentElementIx;
+    private int currentOff;
+    private int currentPageIx;
+
+    private int startPageId;
+    private int endPageId;
+    private int startOff;
+    private int numElements;
+
+    private final FixedSizeTupleReference tuple;
+    private ICachedPage[] pages = new ICachedPage[10];
+    private int[] elementIndexes = new int[10];
+
+    public FixedSizeElementInvertedListCursor(IBufferCache bufferCache, int fileId, ITypeTraits[] invListFields) {
+        this.bufferCache = bufferCache;
+        this.fileId = fileId;
+        this.currentElementIx = 0;
+        this.currentPageIx = 0;
+
+        int tmp = 0;
+        for (int i = 0; i < invListFields.length; i++) {
+            tmp += invListFields[i].getFixedLength();
+        }
+        elementSize = tmp;
+        this.currentOff = -elementSize;
+        this.tuple = new FixedSizeTupleReference(invListFields);
+    }
+
+    @Override
+    public boolean hasNext() {
+        if (currentElementIx < numElements)
+            return true;
+        else
+            return false;
+    }
+
+    @Override
+    public void next() {
+        if (currentOff + 2 * elementSize >= bufferCache.getPageSize()) {
+            currentPageIx++;
+            currentOff = 0;
+        } else {
+            currentOff += elementSize;
+        }
+
+        currentElementIx++;
+        tuple.reset(pages[currentPageIx].getBuffer().array(), currentOff);
+    }
+
+    @Override
+    public void pinPagesAsync() {
+        // TODO: implement
+    }
+
+    @Override
+    public void pinPagesSync() throws HyracksDataException {
+        int pix = 0;
+        for (int i = startPageId; i <= endPageId; i++) {
+            pages[pix] = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, i), false);
+            pages[pix].acquireReadLatch();
+            pix++;
+        }
+    }
+
+    @Override
+    public void unpinPages() throws HyracksDataException {
+        int numPages = endPageId - startPageId + 1;
+        for (int i = 0; i < numPages; i++) {
+            pages[i].releaseReadLatch();
+            bufferCache.unpin(pages[i]);
+        }
+    }
+
+    @Override
+    public void positionCursor(int elementIx) {
+        int numPages = endPageId - startPageId + 1;
+
+        currentPageIx = binarySearch(elementIndexes, 0, numPages, elementIx);
+        if (currentPageIx < 0) {
+            throw new IndexOutOfBoundsException("Requested index: " + elementIx + " from array with numElements: "
+                    + numElements);
+        }
+
+        if (currentPageIx == 0) {
+            currentOff = startOff + elementIx * elementSize;
+        } else {
+            int relativeElementIx = elementIx - elementIndexes[currentPageIx - 1] - 1;
+            currentOff = relativeElementIx * elementSize;
+        }
+
+        currentElementIx = elementIx;
+        tuple.reset(pages[currentPageIx].getBuffer().array(), currentOff);
+    }
+
+    @Override
+    public boolean containsKey(ITupleReference searchTuple, MultiComparator invListCmp) {
+        int mid;
+        int begin = 0;
+        int end = numElements - 1;
+
+        while (begin <= end) {
+            mid = (begin + end) / 2;
+            positionCursor(mid);
+            int cmp = invListCmp.compare(searchTuple, tuple);
+            if (cmp < 0) {
+                end = mid - 1;
+            } else if (cmp > 0) {
+                begin = mid + 1;
+            } else {
+                return true;
+            }
+        }
+
+        return false;
+    }
+
+    @Override
+    public void reset(int startPageId, int endPageId, int startOff, int numElements) {
+        this.startPageId = startPageId;
+        this.endPageId = endPageId;
+        this.startOff = startOff;
+        this.numElements = numElements;
+        this.currentElementIx = 0;
+        this.currentPageIx = 0;
+        this.currentOff = startOff - elementSize;
+
+        int numPages = endPageId - startPageId + 1;
+        if (numPages > pages.length) {
+            pages = new ICachedPage[endPageId - startPageId + 1];
+            elementIndexes = new int[endPageId - startPageId + 1];
+        }
+
+        // fill elementIndexes
+        // first page
+        int cumulElements = (bufferCache.getPageSize() - startOff) / elementSize;
+        elementIndexes[0] = cumulElements - 1;
+
+        // middle, full pages
+        for (int i = 1; i < numPages - 1; i++) {
+            elementIndexes[i] = elementIndexes[i - 1] + (bufferCache.getPageSize() / elementSize);
+        }
+
+        // last page
+        elementIndexes[numPages - 1] = numElements - 1;
+    }
+
+    @Override
+    public String printInvList(ISerializerDeserializer[] serdes) throws HyracksDataException {
+        int oldCurrentOff = currentOff;
+        int oldCurrentPageId = currentPageIx;
+        int oldCurrentElementIx = currentElementIx;
+
+        currentOff = startOff - elementSize;
+        currentPageIx = 0;
+        currentElementIx = 0;
+
+        StringBuilder strBuilder = new StringBuilder();
+
+        int count = 0;
+        while (hasNext()) {
+            next();
+            count++;
+            for (int i = 0; i < tuple.getFieldCount(); i++) {
+                ByteArrayInputStream inStream = new ByteArrayInputStream(tuple.getFieldData(i), tuple.getFieldStart(i),
+                        tuple.getFieldLength(i));
+                DataInput dataIn = new DataInputStream(inStream);
+                Object o = serdes[i].deserialize(dataIn);
+                strBuilder.append(o.toString());
+                if (i + 1 < tuple.getFieldCount())
+                    strBuilder.append(",");
+            }
+            strBuilder.append(" ");
+        }
+
+        // reset previous state
+        currentOff = oldCurrentOff;
+        currentPageIx = oldCurrentPageId;
+        currentElementIx = oldCurrentElementIx;
+
+        return strBuilder.toString();
+    }
+
+    public String printCurrentElement(ISerializerDeserializer[] serdes) throws HyracksDataException {
+        StringBuilder strBuilder = new StringBuilder();
+        for (int i = 0; i < tuple.getFieldCount(); i++) {
+            ByteArrayInputStream inStream = new ByteArrayInputStream(tuple.getFieldData(i), tuple.getFieldStart(i),
+                    tuple.getFieldLength(i));
+            DataInput dataIn = new DataInputStream(inStream);
+            Object o = serdes[i].deserialize(dataIn);
+            strBuilder.append(o.toString());
+            if (i + 1 < tuple.getFieldCount())
+                strBuilder.append(",");
+        }
+        return strBuilder.toString();
+    }
+
+    private int binarySearch(int[] arr, int arrStart, int arrLength, int key) {
+        int mid;
+        int begin = arrStart;
+        int end = arrStart + arrLength - 1;
+
+        while (begin <= end) {
+            mid = (begin + end) / 2;
+            int cmp = (key - arr[mid]);
+            if (cmp < 0) {
+                end = mid - 1;
+            } else if (cmp > 0) {
+                begin = mid + 1;
+            } else {
+                return mid;
+            }
+        }
+
+        if (begin > arr.length - 1)
+            return -1;
+        if (key < arr[begin])
+            return begin;
+        else
+            return -1;
+    }
+
+    @Override
+    public int compareTo(IInvertedListCursor invListCursor) {
+        return numElements - invListCursor.getNumElements();
+    }
+
+    @Override
+    public int getEndPageId() {
+        return endPageId;
+    }
+
+    @Override
+    public int getNumElements() {
+        return numElements;
+    }
+
+    @Override
+    public int getStartOff() {
+        return startOff;
+    }
+
+    @Override
+    public int getStartPageId() {
+        return startPageId;
+    }
+
+    public int getOffset() {
+        return currentOff;
+    }
+
+    public ICachedPage getPage() {
+        return pages[currentPageIx];
+    }
+
+    @Override
+    public ITupleReference getTuple() {
+        return tuple;
+    }
+}
diff --git a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/FixedSizeFrameTupleAccessor.java b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/FixedSizeFrameTupleAccessor.java
new file mode 100644
index 0000000..cbedc45
--- /dev/null
+++ b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/FixedSizeFrameTupleAccessor.java
@@ -0,0 +1,98 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.impls;
+
+import java.nio.ByteBuffer;
+
+import edu.uci.ics.hyracks.api.comm.FrameHelper;
+import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+
+public class FixedSizeFrameTupleAccessor implements IFrameTupleAccessor {
+
+    private final int frameSize;
+    private ByteBuffer buffer;
+
+    private final ITypeTraits[] fields;
+    private final int[] fieldStartOffsets;
+    private final int tupleSize;
+
+    public FixedSizeFrameTupleAccessor(int frameSize, ITypeTraits[] fields) {
+        this.frameSize = frameSize;
+        this.fields = fields;
+        this.fieldStartOffsets = new int[fields.length];
+        this.fieldStartOffsets[0] = 0;
+        for (int i = 1; i < fields.length; i++) {
+            fieldStartOffsets[i] = fieldStartOffsets[i - 1] + fields[i - 1].getFixedLength();
+        }
+
+        int tmp = 0;
+        for (int i = 0; i < fields.length; i++) {
+            tmp += fields[i].getFixedLength();
+        }
+        tupleSize = tmp;
+    }
+
+    @Override
+    public ByteBuffer getBuffer() {
+        return buffer;
+    }
+
+    @Override
+    public int getFieldCount() {
+        return fields.length;
+    }
+
+    @Override
+    public int getFieldEndOffset(int tupleIndex, int fIdx) {
+        return getTupleStartOffset(tupleIndex) + fieldStartOffsets[fIdx] + fields[fIdx].getFixedLength();
+    }
+
+    @Override
+    public int getFieldLength(int tupleIndex, int fIdx) {
+        return fields[fIdx].getFixedLength();
+    }
+
+    @Override
+    public int getFieldSlotsLength() {
+        return 0;
+    }
+
+    @Override
+    public int getFieldStartOffset(int tupleIndex, int fIdx) {
+        return tupleIndex * tupleSize + fieldStartOffsets[fIdx];
+    }
+
+    @Override
+    public int getTupleCount() {
+        return buffer.getInt(FrameHelper.getTupleCountOffset(frameSize));
+    }
+
+    @Override
+    public int getTupleEndOffset(int tupleIndex) {
+        return getFieldEndOffset(tupleIndex, fields.length - 1);
+    }
+
+    @Override
+    public int getTupleStartOffset(int tupleIndex) {
+        return tupleIndex * tupleSize;
+    }
+
+    @Override
+    public void reset(ByteBuffer buffer) {
+        this.buffer = buffer;
+    }
+}
diff --git a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/FixedSizeFrameTupleAppender.java b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/FixedSizeFrameTupleAppender.java
new file mode 100644
index 0000000..489ec2e
--- /dev/null
+++ b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/FixedSizeFrameTupleAppender.java
@@ -0,0 +1,127 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.impls;
+
+import java.nio.ByteBuffer;
+
+import edu.uci.ics.hyracks.api.comm.FrameHelper;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+
+public class FixedSizeFrameTupleAppender {
+
+    private static final int TUPLE_COUNT_SIZE = 4;
+    private final int frameSize;
+    private final int tupleSize;
+    private ByteBuffer buffer;
+    private int tupleCount;
+    private int tupleDataEndOffset;
+
+    public FixedSizeFrameTupleAppender(int frameSize, ITypeTraits[] fields) {
+        this.frameSize = frameSize;
+        int tmp = 0;
+        for (int i = 0; i < fields.length; i++) {
+            tmp += fields[i].getFixedLength();
+        }
+        tupleSize = tmp;
+    }
+
+    public void reset(ByteBuffer buffer, boolean clear) {
+        this.buffer = buffer;
+        if (clear) {
+            buffer.putInt(FrameHelper.getTupleCountOffset(frameSize), 0);
+            tupleCount = 0;
+            tupleDataEndOffset = 0;
+        }
+    }
+
+    public boolean append(byte[] bytes, int offset) {
+        if (tupleDataEndOffset + tupleSize + TUPLE_COUNT_SIZE <= frameSize) {
+            System.arraycopy(bytes, offset, buffer.array(), tupleDataEndOffset, tupleSize);
+            tupleDataEndOffset += tupleSize;
+            tupleCount++;
+            return true;
+        }
+        return false;
+    }
+
+    public boolean append(byte[] bytes, int offset, int length) {
+        if (tupleDataEndOffset + length + TUPLE_COUNT_SIZE <= frameSize) {
+            System.arraycopy(bytes, offset, buffer.array(), tupleDataEndOffset, length);
+            tupleDataEndOffset += length;
+            return true;
+        }
+        return false;
+    }
+
+    public boolean append(int fieldValue) {
+        if (tupleDataEndOffset + 4 + TUPLE_COUNT_SIZE <= frameSize) {
+            buffer.putInt(tupleDataEndOffset, fieldValue);
+            tupleDataEndOffset += 4;
+            tupleCount++;
+            return true;
+        }
+        return false;
+    }
+
+    public boolean append(long fieldValue) {
+        if (tupleDataEndOffset + 8 + TUPLE_COUNT_SIZE <= frameSize) {
+            buffer.putLong(tupleDataEndOffset, fieldValue);
+            tupleDataEndOffset += 8;
+            tupleCount++;
+            return true;
+        }
+        return false;
+    }
+
+    public boolean append(char fieldValue) {
+        if (tupleDataEndOffset + 2 + TUPLE_COUNT_SIZE <= frameSize) {
+            buffer.putLong(tupleDataEndOffset, fieldValue);
+            tupleDataEndOffset += 2;
+            tupleCount++;
+            return true;
+        }
+        return false;
+    }
+
+    public boolean append(byte fieldValue) {
+        if (tupleDataEndOffset + 1 + TUPLE_COUNT_SIZE <= frameSize) {
+            buffer.put(tupleDataEndOffset, fieldValue);
+            tupleDataEndOffset += 1;
+            tupleCount++;
+            return true;
+        }
+        return false;
+    }
+
+    // returns true if an entire tuple fits
+    // returns false otherwise
+    public boolean hasSpace() {
+        return tupleDataEndOffset + tupleSize + TUPLE_COUNT_SIZE <= frameSize;
+    }
+
+    public void incrementTupleCount(int count) {
+        buffer.putInt(FrameHelper.getTupleCountOffset(frameSize),
+                buffer.getInt(FrameHelper.getTupleCountOffset(frameSize)) + count);
+    }
+
+    public int getTupleCount() {
+        return tupleCount;
+    }
+
+    public ByteBuffer getBuffer() {
+        return buffer;
+    }
+}
diff --git a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/FixedSizeTupleReference.java b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/FixedSizeTupleReference.java
new file mode 100644
index 0000000..0656d69
--- /dev/null
+++ b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/FixedSizeTupleReference.java
@@ -0,0 +1,61 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.impls;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+
+public class FixedSizeTupleReference implements ITupleReference {
+
+    private final ITypeTraits[] typeTraits;
+    private final int[] fieldStartOffsets;
+    private byte[] data;
+    private int startOff;
+
+    public FixedSizeTupleReference(ITypeTraits[] typeTraits) {
+        this.typeTraits = typeTraits;
+        this.fieldStartOffsets = new int[typeTraits.length];
+        this.fieldStartOffsets[0] = 0;
+        for (int i = 1; i < typeTraits.length; i++) {
+            fieldStartOffsets[i] = fieldStartOffsets[i - 1] + typeTraits[i - 1].getFixedLength();
+        }
+    }
+
+    public void reset(byte[] data, int startOff) {
+        this.data = data;
+        this.startOff = startOff;
+    }
+
+    @Override
+    public int getFieldCount() {
+        return typeTraits.length;
+    }
+
+    @Override
+    public byte[] getFieldData(int fIdx) {
+        return data;
+    }
+
+    @Override
+    public int getFieldLength(int fIdx) {
+        return typeTraits[fIdx].getFixedLength();
+    }
+
+    @Override
+    public int getFieldStart(int fIdx) {
+        return startOff + fieldStartOffsets[fIdx];
+    }
+}
diff --git a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/InvertedIndex.java b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/InvertedIndex.java
new file mode 100644
index 0000000..3525fc3
--- /dev/null
+++ b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/InvertedIndex.java
@@ -0,0 +1,358 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.impls;
+
+import java.nio.ByteBuffer;
+
+import edu.uci.ics.hyracks.api.context.IHyracksCommonContext;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.io.IIOManager;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
+import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
+import edu.uci.ics.hyracks.storage.am.common.api.IIndexAccessor;
+import edu.uci.ics.hyracks.storage.am.common.api.IIndexBulkLoadContext;
+import edu.uci.ics.hyracks.storage.am.common.api.IIndexCursor;
+import edu.uci.ics.hyracks.storage.am.common.api.ISearchPredicate;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexAccessor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
+import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
+import edu.uci.ics.hyracks.storage.am.common.api.IndexType;
+import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndex;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedIndexSearcher;
+import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedListBuilder;
+import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedListCursor;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
+import edu.uci.ics.hyracks.storage.common.file.BufferedFileHandle;
+
+/**
+ * An inverted index consists of two files: 1. a file storing (paginated)
+ * inverted lists 2. a BTree-file mapping from tokens to inverted lists.
+ * Implemented features: bulk loading and searching (based on T-Occurrence) Not
+ * implemented features: updates (insert/update/delete) Limitations: a query
+ * cannot exceed the size of a Hyracks frame.
+ */
+public class InvertedIndex implements IIndex {
+    private final IHyracksCommonContext ctx = new DefaultHyracksCommonContext();
+
+    private BTree btree;
+    private int rootPageId = 0;
+    private IBufferCache bufferCache;
+    private int fileId;
+    private final ITypeTraits[] invListTypeTraits;
+    private final IBinaryComparatorFactory[] invListCmpFactories;
+    private final IInvertedListBuilder invListBuilder;
+    private final int numTokenFields;
+    private final int numInvListKeys;
+
+    public InvertedIndex(IBufferCache bufferCache, BTree btree, ITypeTraits[] invListTypeTraits,
+            IBinaryComparatorFactory[] invListCmpFactories, IInvertedListBuilder invListBuilder) {
+        this.bufferCache = bufferCache;
+        this.btree = btree;
+        this.invListTypeTraits = invListTypeTraits;
+        this.invListCmpFactories = invListCmpFactories;
+        this.invListBuilder = invListBuilder;
+        this.numTokenFields = btree.getComparatorFactories().length;
+        this.numInvListKeys = invListCmpFactories.length;
+    }
+
+    @Override
+    public void open(int fileId) {
+        this.fileId = fileId;
+    }
+
+    @Override
+    public void create(int indexFileId) throws HyracksDataException {
+    }
+
+    @Override
+    public void close() {
+        this.fileId = -1;
+    }
+
+    public boolean openCursor(ITreeIndexCursor btreeCursor, RangePredicate btreePred, ITreeIndexAccessor btreeAccessor,
+            IInvertedListCursor invListCursor) throws HyracksDataException, IndexException {
+        btreeAccessor.search(btreeCursor, btreePred);
+        boolean ret = false;
+        try {
+            if (btreeCursor.hasNext()) {
+                btreeCursor.next();
+                ITupleReference frameTuple = btreeCursor.getTuple();
+                // Hardcoded mapping of btree fields
+                int startPageId = IntegerSerializerDeserializer.getInt(frameTuple.getFieldData(1),
+                        frameTuple.getFieldStart(1));
+                int endPageId = IntegerSerializerDeserializer.getInt(frameTuple.getFieldData(2),
+                        frameTuple.getFieldStart(2));
+                int startOff = IntegerSerializerDeserializer.getInt(frameTuple.getFieldData(3),
+                        frameTuple.getFieldStart(3));
+                int numElements = IntegerSerializerDeserializer.getInt(frameTuple.getFieldData(4),
+                        frameTuple.getFieldStart(4));
+                invListCursor.reset(startPageId, endPageId, startOff, numElements);
+                ret = true;
+            } else {
+                invListCursor.reset(0, 0, 0, 0);
+            }
+        } finally {
+            btreeCursor.close();
+            btreeCursor.reset();
+        }
+        return ret;
+    }
+
+    @Override
+    public IIndexBulkLoadContext beginBulkLoad(float fillFactor) throws TreeIndexException, HyracksDataException {
+        InvertedIndexBulkLoadContext ctx = new InvertedIndexBulkLoadContext(fillFactor);
+        ctx.init(rootPageId, fileId);
+        return ctx;
+    }
+
+    /**
+     * Assumptions:
+     * The first btree.getMultiComparator().getKeyFieldCount() fields in tuple
+     * are btree keys (e.g., a string token).
+     * The next invListCmp.getKeyFieldCount() fields in tuple are keys of the
+     * inverted list (e.g., primary key).
+     * Key fields of inverted list are fixed size.
+     */
+    @Override
+    public void bulkLoadAddTuple(ITupleReference tuple, IIndexBulkLoadContext ictx) throws HyracksDataException {
+        InvertedIndexBulkLoadContext ctx = (InvertedIndexBulkLoadContext) ictx;
+        boolean firstElement = ctx.lastTupleBuilder.getSize() == 0;
+        boolean startNewList = firstElement;
+        if (!firstElement) {
+            // If the current and the last token don't match, we start a new list.
+            ctx.lastTuple.reset(ctx.lastTupleBuilder.getFieldEndOffsets(), ctx.lastTupleBuilder.getByteArray());
+            startNewList = ctx.tokenCmp.compare(tuple, ctx.lastTuple) != 0;
+        }
+        if (startNewList) {
+            if (!firstElement) {
+                // Create entry in btree for last inverted list.
+                createAndInsertBTreeTuple(ctx);
+            }
+            if (!invListBuilder.startNewList(tuple, numTokenFields)) {
+                ctx.pinNextPage();
+                invListBuilder.setTargetBuffer(ctx.currentPage.getBuffer().array(), 0);
+                if (!invListBuilder.startNewList(tuple, numTokenFields)) {
+                    throw new IllegalStateException("Failed to create first inverted list.");
+                }
+            }
+            ctx.currentInvListStartPageId = ctx.currentPageId;
+            ctx.currentInvListStartOffset = invListBuilder.getPos();
+        } else {
+            if (ctx.invListCmp.compare(tuple, ctx.lastTuple, numTokenFields) == 0) {
+                // Duplicate inverted-list element.
+                return;
+            }
+        }
+
+        // Append to current inverted list.
+        if (!invListBuilder.appendElement(tuple, numTokenFields, numInvListKeys)) {
+            ctx.pinNextPage();
+            invListBuilder.setTargetBuffer(ctx.currentPage.getBuffer().array(), 0);
+            if (!invListBuilder.appendElement(tuple, numTokenFields, numInvListKeys)) {
+                throw new IllegalStateException(
+                        "Failed to append element to inverted list after switching to a new page.");
+            }
+        }
+
+        // Remember last tuple by creating a copy.
+        // TODO: This portion can be optimized by only copying the token when it changes, and using the last appended inverted-list element as a reference.
+        ctx.lastTupleBuilder.reset();
+        for (int i = 0; i < tuple.getFieldCount(); i++) {
+            ctx.lastTupleBuilder.addField(tuple.getFieldData(i), tuple.getFieldStart(i), tuple.getFieldLength(i));
+        }
+    }
+
+    private void createAndInsertBTreeTuple(InvertedIndexBulkLoadContext ctx) throws HyracksDataException {
+        // Build tuple.        
+        ctx.btreeTupleBuilder.reset();
+        ctx.btreeTupleBuilder.addField(ctx.lastTuple.getFieldData(0), ctx.lastTuple.getFieldStart(0),
+                ctx.lastTuple.getFieldLength(0));
+        ctx.btreeTupleBuilder.addField(IntegerSerializerDeserializer.INSTANCE, ctx.currentInvListStartPageId);
+        ctx.btreeTupleBuilder.addField(IntegerSerializerDeserializer.INSTANCE, ctx.currentPageId);
+        ctx.btreeTupleBuilder.addField(IntegerSerializerDeserializer.INSTANCE, ctx.currentInvListStartOffset);
+        ctx.btreeTupleBuilder.addField(IntegerSerializerDeserializer.INSTANCE, invListBuilder.getListSize());
+        // Reset tuple reference and add it.
+        ctx.btreeTupleReference.reset(ctx.btreeTupleBuilder.getFieldEndOffsets(), ctx.btreeTupleBuilder.getByteArray());
+        btree.bulkLoadAddTuple(ctx.btreeTupleReference, ctx.btreeBulkLoadCtx);
+    }
+
+    @Override
+    public void endBulkLoad(IIndexBulkLoadContext ictx) throws HyracksDataException {
+        // Create entry in btree for last inverted list.
+        InvertedIndexBulkLoadContext ctx = (InvertedIndexBulkLoadContext) ictx;
+        if (ctx.lastTuple.getFieldData(0) != null) {
+            createAndInsertBTreeTuple(ctx);
+        }
+        btree.endBulkLoad(ctx.btreeBulkLoadCtx);
+        ctx.deinit();
+    }
+
+    public final class InvertedIndexBulkLoadContext implements IIndexBulkLoadContext {
+        private final ArrayTupleBuilder btreeTupleBuilder;
+        private final ArrayTupleReference btreeTupleReference;
+        private final float btreeFillFactor;
+        private IIndexBulkLoadContext btreeBulkLoadCtx;
+
+        private int currentInvListStartPageId;
+        private int currentInvListStartOffset;
+        private final ArrayTupleBuilder lastTupleBuilder;
+        private final ArrayTupleReference lastTuple;
+
+        private int currentPageId;
+        private ICachedPage currentPage;
+        private final MultiComparator tokenCmp;
+        private final MultiComparator invListCmp;
+
+        public InvertedIndexBulkLoadContext(float btreeFillFactor) {
+            this.tokenCmp = MultiComparator.create(btree.getComparatorFactories());
+            this.invListCmp = MultiComparator.create(invListCmpFactories);
+            this.btreeTupleBuilder = new ArrayTupleBuilder(btree.getFieldCount());
+            this.btreeTupleReference = new ArrayTupleReference();
+            this.btreeFillFactor = btreeFillFactor;
+            this.lastTupleBuilder = new ArrayTupleBuilder(numTokenFields + numInvListKeys);
+            this.lastTuple = new ArrayTupleReference();
+        }
+
+        public void init(int startPageId, int fileId) throws HyracksDataException, TreeIndexException {
+            btreeBulkLoadCtx = btree.beginBulkLoad(btreeFillFactor);
+            currentPageId = startPageId;
+            currentPage = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, currentPageId), true);
+            currentPage.acquireWriteLatch();
+            invListBuilder.setTargetBuffer(currentPage.getBuffer().array(), 0);
+        }
+
+        public void deinit() throws HyracksDataException {
+            if (currentPage != null) {
+                currentPage.releaseWriteLatch();
+                bufferCache.unpin(currentPage);
+            }
+        }
+
+        public void pinNextPage() throws HyracksDataException {
+            currentPage.releaseWriteLatch();
+            bufferCache.unpin(currentPage);
+            currentPageId++;
+            currentPage = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, currentPageId), true);
+            currentPage.acquireWriteLatch();
+        }
+    }
+
+    @Override
+    public IBufferCache getBufferCache() {
+        return bufferCache;
+    }
+
+    public int getInvListsFileId() {
+        return fileId;
+    }
+
+    public IBinaryComparatorFactory[] getInvListElementCmpFactories() {
+        return invListCmpFactories;
+    }
+
+    public ITypeTraits[] getTypeTraits() {
+        return invListTypeTraits;
+    }
+
+    public BTree getBTree() {
+        return btree;
+    }
+
+    public class InvertedIndexAccessor implements IIndexAccessor {
+        private final IInvertedIndexSearcher searcher;
+
+        public InvertedIndexAccessor(InvertedIndex index) {
+            this.searcher = new TOccurrenceSearcher(ctx, index);
+        }
+
+        @Override
+        public void insert(ITupleReference tuple) throws HyracksDataException, IndexException {
+            // TODO Auto-generated method stub
+        }
+
+        @Override
+        public void update(ITupleReference tuple) throws HyracksDataException, IndexException {
+            // TODO Auto-generated method stub
+        }
+
+        @Override
+        public void delete(ITupleReference tuple) throws HyracksDataException, IndexException {
+            // TODO Auto-generated method stub
+        }
+
+        @Override
+        public void upsert(ITupleReference tuple) throws HyracksDataException, TreeIndexException {
+            // TODO Auto-generated method stub
+        }
+
+        @Override
+        public IIndexCursor createSearchCursor() {
+            return new InvertedIndexSearchCursor(searcher);
+        }
+
+        @Override
+        public void search(IIndexCursor cursor, ISearchPredicate searchPred) throws HyracksDataException,
+                IndexException {
+            searcher.search((InvertedIndexSearchCursor) cursor, (InvertedIndexSearchPredicate) searchPred);
+        }
+
+        public IInvertedIndexSearcher getSearcher() {
+            return searcher;
+        }
+    }
+
+    @Override
+    public IIndexAccessor createAccessor() {
+        return new InvertedIndexAccessor(this);
+    }
+
+    @Override
+    public IndexType getIndexType() {
+        return IndexType.INVERTED;
+    }
+
+    // This is just a dummy hyracks context for allocating frames for temporary
+    // results during inverted index searches.
+    // TODO: In the future we should use the real HyracksTaskContext to track
+    // frame usage.
+    private class DefaultHyracksCommonContext implements IHyracksCommonContext {
+        private final int FRAME_SIZE = 32768;
+
+        @Override
+        public int getFrameSize() {
+            return FRAME_SIZE;
+        }
+
+        @Override
+        public IIOManager getIOManager() {
+            return null;
+        }
+
+        @Override
+        public ByteBuffer allocateFrame() {
+            return ByteBuffer.allocate(FRAME_SIZE);
+        }
+    }
+}
diff --git a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/InvertedIndexException.java b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/InvertedIndexException.java
new file mode 100644
index 0000000..9762a74
--- /dev/null
+++ b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/InvertedIndexException.java
@@ -0,0 +1,26 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.impls;
+
+import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
+
+public class InvertedIndexException extends IndexException {
+    private static final long serialVersionUID = 1L;
+
+    public InvertedIndexException(String msg) {
+        super(msg);
+    }
+}
diff --git a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/InvertedIndexSearchCursor.java b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/InvertedIndexSearchCursor.java
new file mode 100644
index 0000000..1eeb576
--- /dev/null
+++ b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/InvertedIndexSearchCursor.java
@@ -0,0 +1,99 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.impls;
+
+import java.nio.ByteBuffer;
+import java.util.List;
+
+import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.storage.am.common.api.ICursorInitialState;
+import edu.uci.ics.hyracks.storage.am.common.api.IIndexCursor;
+import edu.uci.ics.hyracks.storage.am.common.api.ISearchPredicate;
+import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedIndexSearcher;
+
+public class InvertedIndexSearchCursor implements IIndexCursor {
+
+    private List<ByteBuffer> resultBuffers;
+    private int numResultBuffers;
+    private int currentBufferIndex = 0;
+    private int tupleIndex = 0;
+    private final IInvertedIndexSearcher invIndexSearcher;
+    private final IFrameTupleAccessor fta;
+    private final FixedSizeTupleReference resultTuple;
+    
+    public InvertedIndexSearchCursor(IInvertedIndexSearcher invIndexSearcher) {
+        this.invIndexSearcher = invIndexSearcher;
+        this.fta = invIndexSearcher.createResultFrameTupleAccessor();
+        this.resultTuple = (FixedSizeTupleReference) invIndexSearcher.createResultTupleReference();
+    }
+
+    @Override
+    public void open(ICursorInitialState initialState, ISearchPredicate searchPred) throws HyracksDataException {
+        currentBufferIndex = 0;
+        tupleIndex = 0;
+        resultBuffers = invIndexSearcher.getResultBuffers();
+        numResultBuffers = invIndexSearcher.getNumValidResultBuffers();
+        if (numResultBuffers > 0) {
+            fta.reset(resultBuffers.get(0));
+        }
+    }
+    
+    @Override
+    public boolean hasNext() {
+        if (currentBufferIndex < numResultBuffers && tupleIndex < fta.getTupleCount()) {
+            return true;
+        } else {
+            return false;
+        }
+    }
+
+    @Override
+    public void next() {
+        resultTuple.reset(fta.getBuffer().array(), fta.getTupleStartOffset(tupleIndex));
+        tupleIndex++;
+        if (tupleIndex >= fta.getTupleCount()) {
+            if (currentBufferIndex + 1 < numResultBuffers) {
+                currentBufferIndex++;
+                fta.reset(resultBuffers.get(currentBufferIndex));
+                tupleIndex = 0;
+            }
+        }
+    }
+
+    @Override
+    public ITupleReference getTuple() {
+        return resultTuple;
+    }
+
+    @Override
+    public void reset() {
+        currentBufferIndex = 0;
+        tupleIndex = 0;
+        invIndexSearcher.reset();
+        resultBuffers = invIndexSearcher.getResultBuffers();
+        numResultBuffers = invIndexSearcher.getNumValidResultBuffers();
+    }
+
+    @Override
+    public void close() throws HyracksDataException {
+        currentBufferIndex = 0;
+        tupleIndex = 0;
+        resultBuffers = null;
+        numResultBuffers = 0;
+    }
+}
diff --git a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/InvertedIndexSearchPredicate.java b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/InvertedIndexSearchPredicate.java
new file mode 100644
index 0000000..9e9a2c3
--- /dev/null
+++ b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/InvertedIndexSearchPredicate.java
@@ -0,0 +1,72 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.impls;
+
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.storage.am.common.api.ISearchPredicate;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedIndexSearchModifier;
+import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.IBinaryTokenizer;
+
+public class InvertedIndexSearchPredicate implements ISearchPredicate {
+    private static final long serialVersionUID = 1L;
+
+    private ITupleReference queryTuple;
+    private int queryFieldIndex;
+    private final IBinaryTokenizer queryTokenizer;
+    private final IInvertedIndexSearchModifier searchModifier;    
+    
+    public InvertedIndexSearchPredicate(IBinaryTokenizer queryTokenizer, IInvertedIndexSearchModifier searchModifier) {
+        this.queryTokenizer = queryTokenizer;
+        this.searchModifier = searchModifier;
+    }
+    
+    public void setQueryTuple(ITupleReference queryTuple) {
+        this.queryTuple = queryTuple;
+    }
+    
+    public ITupleReference getQueryTuple() {
+        return queryTuple;
+    }
+    
+    public void setQueryFieldIndex(int queryFieldIndex) {
+        this.queryFieldIndex = queryFieldIndex;
+    }
+    
+    public int getQueryFieldIndex() {
+        return queryFieldIndex;
+    }
+    
+    public IInvertedIndexSearchModifier getSearchModifier() {
+        return searchModifier;
+    }
+    
+    public IBinaryTokenizer getQueryTokenizer() {
+        return queryTokenizer;
+    }
+    
+    @Override
+    public MultiComparator getLowKeyComparator() {
+        // TODO: This doesn't make sense for an inverted index. Change ISearchPredicate interface.
+        return null;
+    }
+
+    @Override
+    public MultiComparator getHighKeyComparator() {
+        // TODO: This doesn't make sense for an inverted index. Change ISearchPredicate interface.
+        return null;
+    }
+}
diff --git a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/OccurrenceThresholdPanicException.java b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/OccurrenceThresholdPanicException.java
new file mode 100644
index 0000000..b0e737c
--- /dev/null
+++ b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/OccurrenceThresholdPanicException.java
@@ -0,0 +1,24 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.impls;
+
+public class OccurrenceThresholdPanicException extends InvertedIndexException {
+    private static final long serialVersionUID = 1L;
+
+    public OccurrenceThresholdPanicException(String msg) {
+        super(msg);
+    }
+}
diff --git a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/TOccurrenceSearcher.java b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/TOccurrenceSearcher.java
new file mode 100644
index 0000000..af5dad3
--- /dev/null
+++ b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/TOccurrenceSearcher.java
@@ -0,0 +1,547 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.impls;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.List;
+
+import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
+import edu.uci.ics.hyracks.api.context.IHyracksCommonContext;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
+import edu.uci.ics.hyracks.data.std.util.GrowableArray;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.FrameTupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeRangeSearchCursor;
+import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexAccessor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
+import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedIndexSearchModifier;
+import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedIndexSearcher;
+import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedListCursor;
+import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.IBinaryTokenizer;
+import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.IToken;
+
+// TODO: The search procedure is rather confusing regarding cursor positions, hasNext() calls etc.
+// Needs an overhaul some time.
+public class TOccurrenceSearcher implements IInvertedIndexSearcher {
+
+    protected final IHyracksCommonContext ctx;
+    protected final FixedSizeFrameTupleAppender resultFrameTupleApp;
+    protected final FixedSizeFrameTupleAccessor resultFrameTupleAcc;
+    protected final FixedSizeTupleReference resultTuple;
+    protected final int invListKeyLength;
+    protected int currentNumResults;
+
+    protected List<ByteBuffer> newResultBuffers = new ArrayList<ByteBuffer>();
+    protected List<ByteBuffer> prevResultBuffers = new ArrayList<ByteBuffer>();
+    protected List<ByteBuffer> swap = null;
+    protected int maxResultBufIdx = 0;
+
+    protected final ITreeIndexFrame leafFrame;
+    protected final ITreeIndexFrame interiorFrame;
+    protected final ITreeIndexCursor btreeCursor;
+    protected final FrameTupleReference searchKey = new FrameTupleReference();
+    protected final RangePredicate btreePred = new RangePredicate(null, null, true, true, null, null);
+    protected final ITreeIndexAccessor btreeAccessor;
+
+    protected RecordDescriptor queryTokenRecDesc = new RecordDescriptor(
+            new ISerializerDeserializer[] { UTF8StringSerializerDeserializer.INSTANCE });
+    protected ArrayTupleBuilder queryTokenBuilder = new ArrayTupleBuilder(queryTokenRecDesc.getFieldCount());
+    protected GrowableArray queryTokenFieldData = queryTokenBuilder.getFieldData();
+    protected FrameTupleAppender queryTokenAppender;
+    protected ByteBuffer queryTokenFrame;
+
+    protected final InvertedIndex invIndex;
+    protected final MultiComparator invListCmp;
+    protected final ITypeTraits[] invListFieldsWithCount;
+    protected int occurrenceThreshold;
+
+    protected final int cursorCacheSize = 10;
+    protected List<IInvertedListCursor> invListCursorCache = new ArrayList<IInvertedListCursor>(cursorCacheSize);
+    protected List<IInvertedListCursor> invListCursors = new ArrayList<IInvertedListCursor>(cursorCacheSize);
+
+    public TOccurrenceSearcher(IHyracksCommonContext ctx, InvertedIndex invIndex) {
+        this.ctx = ctx;
+        this.invIndex = invIndex;
+        this.invListCmp = MultiComparator.create(invIndex.getInvListElementCmpFactories());
+
+        leafFrame = invIndex.getBTree().getLeafFrameFactory().createFrame();
+        interiorFrame = invIndex.getBTree().getInteriorFrameFactory().createFrame();
+
+        btreeCursor = new BTreeRangeSearchCursor((IBTreeLeafFrame) leafFrame, false);
+        ITypeTraits[] invListFields = invIndex.getTypeTraits();
+        invListFieldsWithCount = new ITypeTraits[invListFields.length + 1];
+        int tmp = 0;
+        for (int i = 0; i < invListFields.length; i++) {
+            invListFieldsWithCount[i] = invListFields[i];
+            tmp += invListFields[i].getFixedLength();
+        }
+        // using an integer for counting occurrences
+        invListFieldsWithCount[invListFields.length] = IntegerPointable.TYPE_TRAITS;
+        invListKeyLength = tmp;
+
+        resultFrameTupleApp = new FixedSizeFrameTupleAppender(ctx.getFrameSize(), invListFieldsWithCount);
+        resultFrameTupleAcc = new FixedSizeFrameTupleAccessor(ctx.getFrameSize(), invListFieldsWithCount);
+        resultTuple = new FixedSizeTupleReference(invListFieldsWithCount);
+        newResultBuffers.add(ctx.allocateFrame());
+        prevResultBuffers.add(ctx.allocateFrame());
+
+        MultiComparator searchCmp = MultiComparator.create(invIndex.getBTree().getComparatorFactories());
+        btreePred.setLowKeyComparator(searchCmp);
+        btreePred.setHighKeyComparator(searchCmp);
+        btreePred.setLowKey(searchKey, true);
+        btreePred.setHighKey(searchKey, true);
+
+        // pre-create cursor objects
+        for (int i = 0; i < cursorCacheSize; i++) {
+            invListCursorCache.add(new FixedSizeElementInvertedListCursor(invIndex.getBufferCache(), invIndex
+                    .getInvListsFileId(), invIndex.getTypeTraits()));
+        }
+
+        queryTokenAppender = new FrameTupleAppender(ctx.getFrameSize());
+        queryTokenFrame = ctx.allocateFrame();
+
+        btreeAccessor = invIndex.getBTree().createAccessor();
+        currentNumResults = 0;
+    }
+
+    public void reset() {
+        for (ByteBuffer b : newResultBuffers) {
+            resultFrameTupleApp.reset(b, true);
+        }
+        for (ByteBuffer b : prevResultBuffers) {
+            resultFrameTupleApp.reset(b, true);
+        }
+        currentNumResults = 0;
+    }
+
+    public void search(InvertedIndexSearchCursor resultCursor, InvertedIndexSearchPredicate searchPred) throws HyracksDataException, IndexException {
+        ITupleReference queryTuple = searchPred.getQueryTuple();
+        int queryFieldIndex = searchPred.getQueryFieldIndex();
+        IInvertedIndexSearchModifier searchModifier = searchPred.getSearchModifier();
+        IBinaryTokenizer queryTokenizer = searchPred.getQueryTokenizer();
+        
+        queryTokenAppender.reset(queryTokenFrame, true);                
+        queryTokenizer.reset(queryTuple.getFieldData(queryFieldIndex), queryTuple.getFieldStart(queryFieldIndex),
+                queryTuple.getFieldLength(queryFieldIndex));
+
+        while (queryTokenizer.hasNext()) {
+            queryTokenizer.next();
+            queryTokenBuilder.reset();
+            try {
+                IToken token = queryTokenizer.getToken();
+                token.serializeToken(queryTokenFieldData);
+                queryTokenBuilder.addFieldEndOffset();
+                // WARNING: assuming one frame is big enough to hold all tokens
+                queryTokenAppender.append(queryTokenBuilder.getFieldEndOffsets(), queryTokenBuilder.getByteArray(), 0,
+                        queryTokenBuilder.getSize());
+            } catch (IOException e) {
+                throw new HyracksDataException(e);
+            }
+        }
+
+        FrameTupleAccessor queryTokenAccessor = new FrameTupleAccessor(ctx.getFrameSize(), queryTokenRecDesc);
+        queryTokenAccessor.reset(queryTokenFrame);
+        int numQueryTokens = queryTokenAccessor.getTupleCount();
+
+        // expand cursor cache if necessary
+        if (numQueryTokens > invListCursorCache.size()) {
+            int diff = numQueryTokens - invListCursorCache.size();
+            for (int i = 0; i < diff; i++) {
+                invListCursorCache.add(new FixedSizeElementInvertedListCursor(invIndex.getBufferCache(), invIndex
+                        .getInvListsFileId(), invIndex.getTypeTraits()));
+            }
+        }
+
+        invListCursors.clear();
+        for (int i = 0; i < numQueryTokens; i++) {
+            searchKey.reset(queryTokenAccessor, i);
+            invIndex.openCursor(btreeCursor, btreePred, btreeAccessor, invListCursorCache.get(i));
+            invListCursors.add(invListCursorCache.get(i));
+        }
+
+        occurrenceThreshold = searchModifier.getOccurrenceThreshold(invListCursors);
+
+        // TODO: deal with panic cases properly
+        if (occurrenceThreshold <= 0) {
+            throw new OccurrenceThresholdPanicException("Merge Threshold is <= 0. Failing Search.");
+        }
+
+        int numPrefixLists = searchModifier.getPrefixLists(invListCursors);
+        maxResultBufIdx = mergePrefixLists(numPrefixLists, numQueryTokens);
+        maxResultBufIdx = mergeSuffixLists(numPrefixLists, numQueryTokens, maxResultBufIdx);
+
+        resultCursor.open(null, searchPred);
+    }
+
+    protected int mergePrefixLists(int numPrefixTokens, int numQueryTokens) throws HyracksDataException {
+        int maxPrevBufIdx = 0;
+        for (int i = 0; i < numPrefixTokens; i++) {
+            swap = prevResultBuffers;
+            prevResultBuffers = newResultBuffers;
+            newResultBuffers = swap;
+            currentNumResults = 0;
+
+            invListCursors.get(i).pinPagesSync();
+            maxPrevBufIdx = mergePrefixList(invListCursors.get(i), prevResultBuffers, maxPrevBufIdx, newResultBuffers);
+            invListCursors.get(i).unpinPages();
+        }
+        return maxPrevBufIdx;
+    }
+
+    protected int mergeSuffixLists(int numPrefixTokens, int numQueryTokens, int maxPrevBufIdx) throws HyracksDataException {
+        for (int i = numPrefixTokens; i < numQueryTokens; i++) {
+            swap = prevResultBuffers;
+            prevResultBuffers = newResultBuffers;
+            newResultBuffers = swap;
+
+            invListCursors.get(i).pinPagesSync();
+            int numInvListElements = invListCursors.get(i).getNumElements();
+            // should we binary search the next list or should we sort-merge it?
+            if (currentNumResults * Math.log(numInvListElements) < currentNumResults + numInvListElements) {
+                maxPrevBufIdx = mergeSuffixListProbe(invListCursors.get(i), prevResultBuffers, maxPrevBufIdx,
+                        newResultBuffers, i, numQueryTokens);
+            } else {
+                maxPrevBufIdx = mergeSuffixListScan(invListCursors.get(i), prevResultBuffers, maxPrevBufIdx,
+                        newResultBuffers, i, numQueryTokens);
+            }
+            invListCursors.get(i).unpinPages();
+        }
+        return maxPrevBufIdx;
+    }
+
+    protected int mergeSuffixListProbe(IInvertedListCursor invListCursor, List<ByteBuffer> prevResultBuffers,
+            int maxPrevBufIdx, List<ByteBuffer> newResultBuffers, int invListIx, int numQueryTokens) {
+
+        int newBufIdx = 0;
+        ByteBuffer newCurrentBuffer = newResultBuffers.get(0);
+
+        int prevBufIdx = 0;
+        ByteBuffer prevCurrentBuffer = prevResultBuffers.get(0);
+
+        int resultTidx = 0;
+
+        currentNumResults = 0;
+
+        resultFrameTupleAcc.reset(prevCurrentBuffer);
+        resultFrameTupleApp.reset(newCurrentBuffer, true);
+
+        while (resultTidx < resultFrameTupleAcc.getTupleCount()) {
+
+            resultTuple.reset(prevCurrentBuffer.array(), resultFrameTupleAcc.getTupleStartOffset(resultTidx));
+            int count = IntegerSerializerDeserializer.getInt(resultTuple.getFieldData(0),
+                    resultTuple.getFieldStart(resultTuple.getFieldCount() - 1));
+
+            if (invListCursor.containsKey(resultTuple, invListCmp)) {
+                count++;
+                newBufIdx = appendTupleToNewResults(resultTuple, count, newBufIdx);
+            } else {
+                if (count + numQueryTokens - invListIx > occurrenceThreshold) {
+                    newBufIdx = appendTupleToNewResults(resultTuple, count, newBufIdx);
+                }
+            }
+
+            resultTidx++;
+            if (resultTidx >= resultFrameTupleAcc.getTupleCount()) {
+                prevBufIdx++;
+                if (prevBufIdx <= maxPrevBufIdx) {
+                    prevCurrentBuffer = prevResultBuffers.get(prevBufIdx);
+                    resultFrameTupleAcc.reset(prevCurrentBuffer);
+                    resultTidx = 0;
+                }
+            }
+        }
+
+        return newBufIdx;
+    }
+
+    protected int mergeSuffixListScan(IInvertedListCursor invListCursor, List<ByteBuffer> prevResultBuffers,
+            int maxPrevBufIdx, List<ByteBuffer> newResultBuffers, int invListIx, int numQueryTokens) {
+        int newBufIdx = 0;
+        ByteBuffer newCurrentBuffer = newResultBuffers.get(0);
+
+        int prevBufIdx = 0;
+        ByteBuffer prevCurrentBuffer = prevResultBuffers.get(0);
+
+        boolean advanceCursor = true;
+        boolean advancePrevResult = false;
+        int resultTidx = 0;
+        currentNumResults = 0;
+        
+        resultFrameTupleAcc.reset(prevCurrentBuffer);
+        resultFrameTupleApp.reset(newCurrentBuffer, true);
+
+        int invListTidx = 0;
+        int invListNumTuples = invListCursor.getNumElements();
+
+        if (invListCursor.hasNext())
+            invListCursor.next();
+
+        while (invListTidx < invListNumTuples && resultTidx < resultFrameTupleAcc.getTupleCount()) {
+
+            ITupleReference invListTuple = invListCursor.getTuple();
+
+            resultTuple.reset(prevCurrentBuffer.array(), resultFrameTupleAcc.getTupleStartOffset(resultTidx));
+
+            int cmp = invListCmp.compare(invListTuple, resultTuple);
+            if (cmp == 0) {
+                int count = IntegerSerializerDeserializer.getInt(resultTuple.getFieldData(0),
+                        resultTuple.getFieldStart(resultTuple.getFieldCount() - 1)) + 1;
+                newBufIdx = appendTupleToNewResults(resultTuple, count, newBufIdx);
+                advanceCursor = true;
+                advancePrevResult = true;
+            } else {
+                if (cmp < 0) {
+                    advanceCursor = true;
+                    advancePrevResult = false;
+                } else {
+                    int count = IntegerSerializerDeserializer.getInt(resultTuple.getFieldData(0),
+                            resultTuple.getFieldStart(resultTuple.getFieldCount() - 1));
+                    if (count + numQueryTokens - invListIx > occurrenceThreshold) {
+                        newBufIdx = appendTupleToNewResults(resultTuple, count, newBufIdx);
+                    }
+                    advanceCursor = false;
+                    advancePrevResult = true;
+                }
+            }
+
+            if (advancePrevResult) {
+                resultTidx++;
+                if (resultTidx >= resultFrameTupleAcc.getTupleCount()) {
+                    prevBufIdx++;
+                    if (prevBufIdx <= maxPrevBufIdx) {
+                        prevCurrentBuffer = prevResultBuffers.get(prevBufIdx);
+                        resultFrameTupleAcc.reset(prevCurrentBuffer);
+                        resultTidx = 0;
+                    }
+                }
+            }
+
+            if (advanceCursor) {
+                invListTidx++;
+                if (invListCursor.hasNext()) {
+                	invListCursor.next();
+                }
+            }
+        }
+
+        // append remaining elements from previous result set
+        while (resultTidx < resultFrameTupleAcc.getTupleCount()) {
+
+            resultTuple.reset(prevCurrentBuffer.array(), resultFrameTupleAcc.getTupleStartOffset(resultTidx));
+
+            int count = IntegerSerializerDeserializer.getInt(resultTuple.getFieldData(0),
+                    resultTuple.getFieldStart(resultTuple.getFieldCount() - 1));
+            if (count + numQueryTokens - invListIx > occurrenceThreshold) {
+                newBufIdx = appendTupleToNewResults(resultTuple, count, newBufIdx);
+            }
+
+            resultTidx++;
+            if (resultTidx >= resultFrameTupleAcc.getTupleCount()) {
+                prevBufIdx++;
+                if (prevBufIdx <= maxPrevBufIdx) {
+                    prevCurrentBuffer = prevResultBuffers.get(prevBufIdx);
+                    resultFrameTupleAcc.reset(prevCurrentBuffer);
+                    resultTidx = 0;
+                }
+            }
+        }
+
+        return newBufIdx;
+    }
+
+    protected int mergePrefixList(IInvertedListCursor invListCursor, List<ByteBuffer> prevResultBuffers,
+            int maxPrevBufIdx, List<ByteBuffer> newResultBuffers) {
+        int newBufIdx = 0;
+        ByteBuffer newCurrentBuffer = newResultBuffers.get(0);
+
+        int prevBufIdx = 0;
+        ByteBuffer prevCurrentBuffer = prevResultBuffers.get(0);
+
+        boolean advanceCursor = true;
+        boolean advancePrevResult = false;
+        int resultTidx = 0;
+
+        resultFrameTupleAcc.reset(prevCurrentBuffer);
+        resultFrameTupleApp.reset(newCurrentBuffer, true);
+
+        int invListTidx = 0;
+        int invListNumTuples = invListCursor.getNumElements();
+
+        if (invListCursor.hasNext())
+            invListCursor.next();
+
+        while (invListTidx < invListNumTuples && resultTidx < resultFrameTupleAcc.getTupleCount()) {
+
+            ITupleReference invListTuple = invListCursor.getTuple();
+            resultTuple.reset(prevCurrentBuffer.array(), resultFrameTupleAcc.getTupleStartOffset(resultTidx));
+
+            int cmp = invListCmp.compare(invListTuple, resultTuple);
+            if (cmp == 0) {
+                int count = IntegerSerializerDeserializer.getInt(resultTuple.getFieldData(0),
+                        resultTuple.getFieldStart(resultTuple.getFieldCount() - 1)) + 1;
+                newBufIdx = appendTupleToNewResults(resultTuple, count, newBufIdx);
+                advanceCursor = true;
+                advancePrevResult = true;
+            } else {
+                if (cmp < 0) {
+                    int count = 1;
+                    newBufIdx = appendTupleToNewResults(invListTuple, count, newBufIdx);
+                    advanceCursor = true;
+                    advancePrevResult = false;
+                } else {
+                    int count = IntegerSerializerDeserializer.getInt(resultTuple.getFieldData(0),
+                            resultTuple.getFieldStart(resultTuple.getFieldCount() - 1));
+                    newBufIdx = appendTupleToNewResults(resultTuple, count, newBufIdx);
+                    advanceCursor = false;
+                    advancePrevResult = true;
+                }
+            }
+
+            if (advancePrevResult) {
+                resultTidx++;
+                if (resultTidx >= resultFrameTupleAcc.getTupleCount()) {
+                    prevBufIdx++;
+                    if (prevBufIdx <= maxPrevBufIdx) {
+                        prevCurrentBuffer = prevResultBuffers.get(prevBufIdx);
+                        resultFrameTupleAcc.reset(prevCurrentBuffer);
+                        resultTidx = 0;
+                    }
+                }
+            }
+
+            if (advanceCursor) {
+                invListTidx++;
+                if (invListCursor.hasNext()) {
+                	invListCursor.next();
+                }
+            }
+        }
+
+        // append remaining new elements from inverted list
+        while (invListTidx < invListNumTuples) {
+            ITupleReference invListTuple = invListCursor.getTuple();
+            newBufIdx = appendTupleToNewResults(invListTuple, 1, newBufIdx);
+            invListTidx++;
+            if (invListCursor.hasNext()) {
+                invListCursor.next();
+            }
+        }
+
+        // append remaining elements from previous result set
+        while (resultTidx < resultFrameTupleAcc.getTupleCount()) {
+
+            resultTuple.reset(prevCurrentBuffer.array(), resultFrameTupleAcc.getTupleStartOffset(resultTidx));
+
+            int count = IntegerSerializerDeserializer.getInt(resultTuple.getFieldData(0),
+                    resultTuple.getFieldStart(resultTuple.getFieldCount() - 1));
+            newBufIdx = appendTupleToNewResults(resultTuple, count, newBufIdx);
+
+            resultTidx++;
+            if (resultTidx >= resultFrameTupleAcc.getTupleCount()) {
+                prevBufIdx++;
+                if (prevBufIdx <= maxPrevBufIdx) {
+                    prevCurrentBuffer = prevResultBuffers.get(prevBufIdx);
+                    resultFrameTupleAcc.reset(prevCurrentBuffer);
+                    resultTidx = 0;
+                }
+            }
+        }
+
+        return newBufIdx;
+    }
+
+    protected int appendTupleToNewResults(ITupleReference tuple, int newCount, int newBufIdx) {
+        ByteBuffer newCurrentBuffer = newResultBuffers.get(newBufIdx);
+
+        if (!resultFrameTupleApp.hasSpace()) {
+            newBufIdx++;
+            if (newBufIdx >= newResultBuffers.size()) {
+                newResultBuffers.add(ctx.allocateFrame());
+            }
+            newCurrentBuffer = newResultBuffers.get(newBufIdx);
+            resultFrameTupleApp.reset(newCurrentBuffer, true);
+        }
+
+        // append key
+        if (!resultFrameTupleApp.append(tuple.getFieldData(0), tuple.getFieldStart(0), invListKeyLength)) {
+            throw new IllegalStateException();
+        }
+
+        // append new count
+        if (!resultFrameTupleApp.append(newCount)) {
+            throw new IllegalStateException();
+        }
+
+        resultFrameTupleApp.incrementTupleCount(1);
+
+        currentNumResults++;
+
+        return newBufIdx;
+    }
+
+    public IFrameTupleAccessor createResultFrameTupleAccessor() {
+        return new FixedSizeFrameTupleAccessor(ctx.getFrameSize(), invListFieldsWithCount);
+    }
+
+    public ITupleReference createResultTupleReference() {
+        return new FixedSizeTupleReference(invListFieldsWithCount);
+    }
+
+    @Override
+    public List<ByteBuffer> getResultBuffers() {
+        return newResultBuffers;
+    }
+
+    @Override
+    public int getNumValidResultBuffers() {
+        return maxResultBufIdx + 1;
+    }
+
+    public int getOccurrenceThreshold() {
+        return occurrenceThreshold;
+    }
+    
+    public void printNewResults(int maxResultBufIdx) {
+        StringBuffer strBuffer = new StringBuffer();
+        for (int i = 0; i <= maxResultBufIdx; i++) {
+            ByteBuffer testBuf = newResultBuffers.get(i);
+            resultFrameTupleAcc.reset(testBuf);
+            for (int j = 0; j < resultFrameTupleAcc.getTupleCount(); j++) {
+                strBuffer.append(IntegerSerializerDeserializer.getInt(resultFrameTupleAcc.getBuffer().array(),
+                        resultFrameTupleAcc.getFieldStartOffset(j, 0)) + ",");
+                strBuffer.append(IntegerSerializerDeserializer.getInt(resultFrameTupleAcc.getBuffer().array(),
+                        resultFrameTupleAcc.getFieldStartOffset(j, 1)) + " ");
+            }
+        }
+        System.out.println(strBuffer.toString());
+    }
+}
diff --git a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/TOccurrenceSearcherSuffixProbeOnly.java b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/TOccurrenceSearcherSuffixProbeOnly.java
new file mode 100644
index 0000000..957ecc0
--- /dev/null
+++ b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/TOccurrenceSearcherSuffixProbeOnly.java
@@ -0,0 +1,93 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.impls;
+
+import java.nio.ByteBuffer;
+import java.util.List;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedListCursor;
+
+public class TOccurrenceSearcherSuffixProbeOnly extends TOccurrenceSearcher {
+
+	protected final MultiComparator invListCmp;
+	
+    public TOccurrenceSearcherSuffixProbeOnly(IHyracksTaskContext ctx, InvertedIndex invIndex) {
+        super(ctx, invIndex);
+        this.invListCmp = MultiComparator.create(invIndex.getInvListElementCmpFactories());
+    }
+
+    protected int mergeSuffixLists(int numPrefixTokens, int numQueryTokens, int maxPrevBufIdx) throws HyracksDataException {
+        for (int i = numPrefixTokens; i < numQueryTokens; i++) {
+            swap = prevResultBuffers;
+            prevResultBuffers = newResultBuffers;
+            newResultBuffers = swap;
+            currentNumResults = 0;
+
+            invListCursors.get(i).pinPagesSync();
+            maxPrevBufIdx = mergeSuffixListProbe(invListCursors.get(i), prevResultBuffers, maxPrevBufIdx,
+                    newResultBuffers, i, numQueryTokens);
+            invListCursors.get(i).unpinPages();
+        }
+        return maxPrevBufIdx;
+    }
+
+    protected int mergeSuffixListProbe(IInvertedListCursor invListCursor, List<ByteBuffer> prevResultBuffers,
+            int maxPrevBufIdx, List<ByteBuffer> newResultBuffers, int invListIx, int numQueryTokens) {
+
+        int newBufIdx = 0;
+        ByteBuffer newCurrentBuffer = newResultBuffers.get(0);
+
+        int prevBufIdx = 0;
+        ByteBuffer prevCurrentBuffer = prevResultBuffers.get(0);
+
+        int resultTidx = 0;
+
+        resultFrameTupleAcc.reset(prevCurrentBuffer);
+        resultFrameTupleApp.reset(newCurrentBuffer, true);
+
+        while (resultTidx < resultFrameTupleAcc.getTupleCount()) {
+
+            resultTuple.reset(prevCurrentBuffer.array(), resultFrameTupleAcc.getTupleStartOffset(resultTidx));
+            int count = IntegerSerializerDeserializer.getInt(resultTuple.getFieldData(0),
+                    resultTuple.getFieldStart(resultTuple.getFieldCount() - 1));
+
+            if (invListCursor.containsKey(resultTuple, invListCmp)) {
+                count++;
+                newBufIdx = appendTupleToNewResults(resultTuple, count, newBufIdx);
+            } else {
+                if (count + numQueryTokens - invListIx > occurrenceThreshold) {
+                    newBufIdx = appendTupleToNewResults(resultTuple, count, newBufIdx);
+                }
+            }
+
+            resultTidx++;
+            if (resultTidx >= resultFrameTupleAcc.getTupleCount()) {
+                prevBufIdx++;
+                if (prevBufIdx <= maxPrevBufIdx) {
+                    prevCurrentBuffer = prevResultBuffers.get(prevBufIdx);
+                    resultFrameTupleAcc.reset(prevCurrentBuffer);
+                    resultTidx = 0;
+                }
+            }
+        }
+
+        return newBufIdx;
+    }
+}
diff --git a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/TOccurrenceSearcherSuffixScanOnly.java b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/TOccurrenceSearcherSuffixScanOnly.java
new file mode 100644
index 0000000..bd9bd60
--- /dev/null
+++ b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/TOccurrenceSearcherSuffixScanOnly.java
@@ -0,0 +1,132 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.impls;
+
+import java.nio.ByteBuffer;
+import java.util.List;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedListCursor;
+
+public class TOccurrenceSearcherSuffixScanOnly extends TOccurrenceSearcher {
+
+	protected final MultiComparator invListCmp;
+	
+    public TOccurrenceSearcherSuffixScanOnly(IHyracksTaskContext ctx, InvertedIndex invIndex) {
+        super(ctx, invIndex);
+        this.invListCmp = MultiComparator.create(invIndex.getInvListElementCmpFactories());
+    }
+
+    protected int mergeSuffixLists(int numPrefixTokens, int numQueryTokens, int maxPrevBufIdx) throws HyracksDataException {
+        for (int i = numPrefixTokens; i < numQueryTokens; i++) {
+            swap = prevResultBuffers;
+            prevResultBuffers = newResultBuffers;
+            newResultBuffers = swap;
+            currentNumResults = 0;
+
+            invListCursors.get(i).pinPagesSync();
+            maxPrevBufIdx = mergeSuffixListScan(invListCursors.get(i), prevResultBuffers, maxPrevBufIdx,
+                    newResultBuffers, i, numQueryTokens);
+            invListCursors.get(i).unpinPages();
+        }
+        return maxPrevBufIdx;
+    }
+
+    protected int mergeSuffixListScan(IInvertedListCursor invListCursor, List<ByteBuffer> prevResultBuffers,
+            int maxPrevBufIdx, List<ByteBuffer> newResultBuffers, int invListIx, int numQueryTokens) {
+
+        int newBufIdx = 0;
+        ByteBuffer newCurrentBuffer = newResultBuffers.get(0);
+
+        int prevBufIdx = 0;
+        ByteBuffer prevCurrentBuffer = prevResultBuffers.get(0);
+
+        boolean advanceCursor = true;
+        boolean advancePrevResult = false;
+        int resultTidx = 0;
+
+        resultFrameTupleAcc.reset(prevCurrentBuffer);
+        resultFrameTupleApp.reset(newCurrentBuffer, true);
+
+        while (invListCursor.hasNext() && resultTidx < resultFrameTupleAcc.getTupleCount()) {
+
+            if (advanceCursor)
+                invListCursor.next();
+
+            ITupleReference invListTuple = invListCursor.getTuple();
+
+            resultTuple.reset(prevCurrentBuffer.array(), resultFrameTupleAcc.getTupleStartOffset(resultTidx));
+
+            int cmp = invListCmp.compare(invListTuple, resultTuple);
+            if (cmp == 0) {
+                int count = IntegerSerializerDeserializer.getInt(resultTuple.getFieldData(0),
+                        resultTuple.getFieldStart(resultTuple.getFieldCount() - 1)) + 1;
+                newBufIdx = appendTupleToNewResults(resultTuple, count, newBufIdx);
+                advanceCursor = true;
+                advancePrevResult = true;
+            } else {
+                if (cmp < 0) {
+                    advanceCursor = true;
+                    advancePrevResult = false;
+                } else {
+                    int count = IntegerSerializerDeserializer.getInt(resultTuple.getFieldData(0),
+                            resultTuple.getFieldStart(resultTuple.getFieldCount() - 1));
+                    if (count + numQueryTokens - invListIx > occurrenceThreshold) {
+                        newBufIdx = appendTupleToNewResults(resultTuple, count, newBufIdx);
+                    }
+                    advanceCursor = false;
+                    advancePrevResult = true;
+                }
+            }
+
+            if (advancePrevResult) {
+                resultTidx++;
+                if (resultTidx >= resultFrameTupleAcc.getTupleCount()) {
+                    prevBufIdx++;
+                    if (prevBufIdx <= maxPrevBufIdx) {
+                        prevCurrentBuffer = prevResultBuffers.get(prevBufIdx);
+                        resultFrameTupleAcc.reset(prevCurrentBuffer);
+                        resultTidx = 0;
+                    }
+                }
+            }
+        }
+
+        // append remaining elements from previous result set
+        while (resultTidx < resultFrameTupleAcc.getTupleCount()) {
+
+            int count = IntegerSerializerDeserializer.getInt(resultTuple.getFieldData(0),
+                    resultTuple.getFieldStart(resultTuple.getFieldCount() - 1));
+            newBufIdx = appendTupleToNewResults(resultTuple, count, newBufIdx);
+
+            resultTidx++;
+            if (resultTidx >= resultFrameTupleAcc.getTupleCount()) {
+                prevBufIdx++;
+                if (prevBufIdx <= maxPrevBufIdx) {
+                    prevCurrentBuffer = prevResultBuffers.get(prevBufIdx);
+                    resultFrameTupleAcc.reset(prevCurrentBuffer);
+                    resultTidx = 0;
+                }
+            }
+        }
+
+        return newBufIdx;
+    }
+}
diff --git a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/searchmodifiers/ConjunctiveSearchModifier.java b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/searchmodifiers/ConjunctiveSearchModifier.java
new file mode 100644
index 0000000..55945be
--- /dev/null
+++ b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/searchmodifiers/ConjunctiveSearchModifier.java
@@ -0,0 +1,37 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.searchmodifiers;
+
+import java.util.Collections;
+import java.util.List;
+
+import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedListCursor;
+import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedIndexSearchModifier;
+
+public class ConjunctiveSearchModifier implements IInvertedIndexSearchModifier {
+
+    @Override
+    public int getOccurrenceThreshold(List<IInvertedListCursor> invListCursors) {
+        return invListCursors.size();
+    }
+
+    @Override
+    public int getPrefixLists(List<IInvertedListCursor> invListCursors) {
+        Collections.sort(invListCursors);
+        return 1;
+    }
+
+}
diff --git a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/searchmodifiers/ConjunctiveSearchModifierFactory.java b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/searchmodifiers/ConjunctiveSearchModifierFactory.java
new file mode 100644
index 0000000..0db6008
--- /dev/null
+++ b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/searchmodifiers/ConjunctiveSearchModifierFactory.java
@@ -0,0 +1,28 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.searchmodifiers;
+
+import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedIndexSearchModifier;
+import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedIndexSearchModifierFactory;
+
+public class ConjunctiveSearchModifierFactory implements IInvertedIndexSearchModifierFactory {
+    private static final long serialVersionUID = 1L;
+
+    @Override
+    public IInvertedIndexSearchModifier createSearchModifier() {
+        return new ConjunctiveSearchModifier();
+    }
+}
diff --git a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/searchmodifiers/EditDistanceSearchModifier.java b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/searchmodifiers/EditDistanceSearchModifier.java
new file mode 100644
index 0000000..ac109b6
--- /dev/null
+++ b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/searchmodifiers/EditDistanceSearchModifier.java
@@ -0,0 +1,60 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.searchmodifiers;
+
+import java.util.Collections;
+import java.util.List;
+
+import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedListCursor;
+import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedIndexSearchModifier;
+
+public class EditDistanceSearchModifier implements IInvertedIndexSearchModifier {
+
+    private int gramLength;
+    private int edThresh;
+
+    public EditDistanceSearchModifier(int gramLength, int edThresh) {
+        this.gramLength = gramLength;
+        this.edThresh = edThresh;
+    }
+
+    @Override
+    public int getOccurrenceThreshold(List<IInvertedListCursor> invListCursors) {
+        return invListCursors.size() - edThresh * gramLength;
+    }
+
+    @Override
+    public int getPrefixLists(List<IInvertedListCursor> invListCursors) {
+        Collections.sort(invListCursors);
+        return invListCursors.size() - getOccurrenceThreshold(invListCursors) + 1;
+    }
+
+    public int getGramLength() {
+        return gramLength;
+    }
+
+    public void setGramLength(int gramLength) {
+        this.gramLength = gramLength;
+    }
+
+    public int getEdThresh() {
+        return edThresh;
+    }
+
+    public void setEdThresh(int edThresh) {
+        this.edThresh = edThresh;
+    }
+}
diff --git a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/searchmodifiers/EditDistanceSearchModifierFactory.java b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/searchmodifiers/EditDistanceSearchModifierFactory.java
new file mode 100644
index 0000000..128d9db
--- /dev/null
+++ b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/searchmodifiers/EditDistanceSearchModifierFactory.java
@@ -0,0 +1,37 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.searchmodifiers;
+
+import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedIndexSearchModifier;
+import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedIndexSearchModifierFactory;
+
+public class EditDistanceSearchModifierFactory implements IInvertedIndexSearchModifierFactory {
+
+    private static final long serialVersionUID = 1L;
+
+    private final int gramLength;
+    private final int edThresh;
+    
+    public EditDistanceSearchModifierFactory(int gramLength, int edThresh) {
+        this.gramLength = gramLength;
+        this.edThresh = edThresh;
+    }
+    
+    @Override
+    public IInvertedIndexSearchModifier createSearchModifier() {
+        return new EditDistanceSearchModifier(gramLength, edThresh);
+    }
+}
diff --git a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/searchmodifiers/JaccardSearchModifier.java b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/searchmodifiers/JaccardSearchModifier.java
new file mode 100644
index 0000000..0a4961d
--- /dev/null
+++ b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/searchmodifiers/JaccardSearchModifier.java
@@ -0,0 +1,53 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.searchmodifiers;
+
+import java.util.Collections;
+import java.util.List;
+
+import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedListCursor;
+import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedIndexSearchModifier;
+
+public class JaccardSearchModifier implements IInvertedIndexSearchModifier {
+
+    private float jaccThresh;
+
+    public JaccardSearchModifier(float jaccThresh) {
+        this.jaccThresh = jaccThresh;
+    }
+
+    @Override
+    public int getOccurrenceThreshold(List<IInvertedListCursor> invListCursors) {
+        return Math.max((int) Math.floor((float) invListCursors.size() * jaccThresh), 1);
+    }
+
+    @Override
+    public int getPrefixLists(List<IInvertedListCursor> invListCursors) {
+        Collections.sort(invListCursors);
+        if (invListCursors.size() == 0) {
+            return 0;
+        }
+        return invListCursors.size() - getOccurrenceThreshold(invListCursors) + 1;
+    }
+
+    public float getJaccThresh() {
+        return jaccThresh;
+    }
+
+    public void setJaccThresh(float jaccThresh) {
+        this.jaccThresh = jaccThresh;
+    }
+}
diff --git a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/searchmodifiers/JaccardSearchModifierFactory.java b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/searchmodifiers/JaccardSearchModifierFactory.java
new file mode 100644
index 0000000..bd27c03
--- /dev/null
+++ b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/searchmodifiers/JaccardSearchModifierFactory.java
@@ -0,0 +1,35 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.searchmodifiers;
+
+import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedIndexSearchModifier;
+import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedIndexSearchModifierFactory;
+
+public class JaccardSearchModifierFactory implements IInvertedIndexSearchModifierFactory {
+
+    private static final long serialVersionUID = 1L;
+
+    private final float jaccThresh;
+
+    public JaccardSearchModifierFactory(float jaccThresh) {
+        this.jaccThresh = jaccThresh;
+    }
+
+    @Override
+    public IInvertedIndexSearchModifier createSearchModifier() {
+        return new JaccardSearchModifier(jaccThresh);
+    }
+}
diff --git a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/AbstractUTF8StringBinaryTokenizer.java b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/AbstractUTF8StringBinaryTokenizer.java
new file mode 100644
index 0000000..c2992f5
--- /dev/null
+++ b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/AbstractUTF8StringBinaryTokenizer.java
@@ -0,0 +1,77 @@
+/**
+ * Copyright 2010-2011 The Regents of the University of California
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on
+ * an "AS IS"; BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations under
+ * the License.
+ * 
+ * Author: Alexander Behm <abehm (at) ics.uci.edu>
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers;
+
+import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
+
+public abstract class AbstractUTF8StringBinaryTokenizer implements IBinaryTokenizer {
+
+    protected byte[] data;
+    protected int start;
+    protected int length;
+    protected int tokenLength;
+    protected int index;
+    protected int utf8Length;
+
+    protected final IntArray tokensStart;
+    protected final IntArray tokensLength;
+    protected final IToken token;
+
+    protected final boolean ignoreTokenCount;
+    protected final boolean sourceHasTypeTag;
+
+    public AbstractUTF8StringBinaryTokenizer(boolean ignoreTokenCount, boolean sourceHasTypeTag,
+            ITokenFactory tokenFactory) {
+        this.ignoreTokenCount = ignoreTokenCount;
+        this.sourceHasTypeTag = sourceHasTypeTag;
+        if (!ignoreTokenCount) {
+            tokensStart = new IntArray();
+            tokensLength = new IntArray();
+        } else {
+            tokensStart = null;
+            tokensLength = null;
+        }
+        token = tokenFactory.createToken();
+    }
+
+    @Override
+    public IToken getToken() {
+        return token;
+    }
+
+    @Override
+    public void reset(byte[] data, int start, int length) {
+        this.start = start;
+        index = this.start;
+        if (sourceHasTypeTag) {
+            index++; // skip type tag
+        }
+        utf8Length = UTF8StringPointable.getUTFLength(data, index);
+        index += 2; // skip utf8 length indicator
+        this.data = data;
+        this.length = length + start;
+
+        tokenLength = 0;
+        if (!ignoreTokenCount) {
+            tokensStart.reset();
+            tokensLength.reset();
+        }
+    }
+}
diff --git a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/AbstractUTF8Token.java b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/AbstractUTF8Token.java
new file mode 100644
index 0000000..2f60952
--- /dev/null
+++ b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/AbstractUTF8Token.java
@@ -0,0 +1,105 @@
+/**
+ * Copyright 2010-2011 The Regents of the University of California
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on
+ * an "AS IS"; BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations under
+ * the License.
+ * 
+ * Author: Alexander Behm <abehm (at) ics.uci.edu>
+ */
+package edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers;
+
+import java.io.DataOutput;
+import java.io.IOException;
+
+import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
+import edu.uci.ics.hyracks.data.std.util.GrowableArray;
+
+public abstract class AbstractUTF8Token implements IToken {
+    public static final int GOLDEN_RATIO_32 = 0x09e3779b9;
+
+    protected int length;
+    protected int tokenLength;
+    protected int start;
+    protected int tokenCount;
+    protected byte[] data;
+    protected final byte tokenTypeTag;
+    protected final byte countTypeTag;
+
+    public AbstractUTF8Token() {
+        tokenTypeTag = -1;
+        countTypeTag = -1;
+    }
+
+    public AbstractUTF8Token(byte tokenTypeTag, byte countTypeTag) {
+        this.tokenTypeTag = tokenTypeTag;
+        this.countTypeTag = countTypeTag;
+    }
+
+    @Override
+    public byte[] getData() {
+        return data;
+    }
+
+    @Override
+    public int getLength() {
+        return length;
+    }
+
+    public int getLowerCaseUTF8Len(int size) {
+        int lowerCaseUTF8Len = 0;
+        int pos = start;
+        for (int i = 0; i < size; i++) {
+            char c = Character.toLowerCase(UTF8StringPointable.charAt(data, pos));
+            lowerCaseUTF8Len += UTF8StringPointable.getModifiedUTF8Len(c);
+            pos += UTF8StringPointable.charSize(data, pos);
+        }
+        return lowerCaseUTF8Len;
+    }
+
+    @Override
+    public int getStart() {
+        return start;
+    }
+
+    @Override
+    public int getTokenLength() {
+        return tokenLength;
+    }
+
+    public void handleCountTypeTag(DataOutput dos) throws IOException {
+        if (countTypeTag > 0) {
+            dos.write(countTypeTag);
+        }
+    }
+
+    public void handleTokenTypeTag(DataOutput dos) throws IOException {
+        if (tokenTypeTag > 0) {
+            dos.write(tokenTypeTag);
+        }
+    }
+
+    @Override
+    public void reset(byte[] data, int start, int length, int tokenLength, int tokenCount) {
+        this.data = data;
+        this.start = start;
+        this.length = length;
+        this.tokenLength = tokenLength;
+        this.tokenCount = tokenCount;
+    }
+
+    @Override
+    public void serializeTokenCount(GrowableArray out) throws IOException {
+        handleCountTypeTag(out.getDataOutput());
+        out.getDataOutput().writeInt(tokenCount);
+    }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/AbstractUTF8TokenFactory.java b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/AbstractUTF8TokenFactory.java
new file mode 100644
index 0000000..3b0b82d
--- /dev/null
+++ b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/AbstractUTF8TokenFactory.java
@@ -0,0 +1,36 @@
+/**
+ * Copyright 2010-2011 The Regents of the University of California
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on
+ * an "AS IS"; BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations under
+ * the License.
+ * 
+ * Author: Alexander Behm <abehm (at) ics.uci.edu>
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers;
+
+public abstract class AbstractUTF8TokenFactory implements ITokenFactory {
+	private static final long serialVersionUID = 1L;
+	protected final byte tokenTypeTag;
+	protected final byte countTypeTag;
+
+	public AbstractUTF8TokenFactory() {
+		tokenTypeTag = -1;
+		countTypeTag = -1;
+	}
+
+	public AbstractUTF8TokenFactory(byte tokenTypeTag, byte countTypeTag) {
+		this.tokenTypeTag = tokenTypeTag;
+		this.countTypeTag = countTypeTag;
+	}
+}
diff --git a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/DelimitedUTF8StringBinaryTokenizer.java b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/DelimitedUTF8StringBinaryTokenizer.java
new file mode 100644
index 0000000..9dacde6
--- /dev/null
+++ b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/DelimitedUTF8StringBinaryTokenizer.java
@@ -0,0 +1,81 @@
+/**
+ * Copyright 2010-2011 The Regents of the University of California
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on
+ * an "AS IS"; BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations under
+ * the License.
+ * 
+ * Author: Alexander Behm <abehm (at) ics.uci.edu>
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers;
+
+import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
+
+public class DelimitedUTF8StringBinaryTokenizer extends AbstractUTF8StringBinaryTokenizer {
+
+    public DelimitedUTF8StringBinaryTokenizer(boolean ignoreTokenCount, boolean sourceHasTypeTag,
+            ITokenFactory tokenFactory) {
+        super(ignoreTokenCount, sourceHasTypeTag, tokenFactory);
+    }
+
+    @Override
+    public boolean hasNext() {
+        // skip delimiters
+        while (index < length && isSeparator(UTF8StringPointable.charAt(data, index))) {
+            index += UTF8StringPointable.charSize(data, index);
+        }
+        return index < length;
+    }
+
+    private boolean isSeparator(char c) {
+        return !(Character.isLetterOrDigit(c) || Character.getType(c) == Character.OTHER_LETTER || Character.getType(c) == Character.OTHER_NUMBER);
+    }
+
+    @Override
+    public void next() {
+        tokenLength = 0;
+        int currentTokenStart = index;
+        while (index < length && !isSeparator(UTF8StringPointable.charAt(data, index))) {
+            index += UTF8StringPointable.charSize(data, index);
+            tokenLength++;
+        }
+        int tokenCount = 1;
+        if (tokenLength > 0 && !ignoreTokenCount) {
+            // search if we got the same token before
+            for (int i = 0; i < tokensStart.length(); ++i) {
+                if (tokenLength == tokensLength.get(i)) {
+                    int tokenStart = tokensStart.get(i);
+                    tokenCount++; // assume we found it
+                    int offset = 0;
+                    int currLength = 0;
+                    while (currLength < tokenLength) {
+                        // case insensitive comparison
+                        if (Character.toLowerCase(UTF8StringPointable.charAt(data, currentTokenStart + offset)) != Character
+                                .toLowerCase(UTF8StringPointable.charAt(data, tokenStart + offset))) {
+                            tokenCount--;
+                            break;
+                        }
+                        offset += UTF8StringPointable.charSize(data, currentTokenStart + offset);
+                        currLength++;
+                    }
+                }
+            }
+            // add the new token to the list of seen tokens
+            tokensStart.add(currentTokenStart);
+            tokensLength.add(tokenLength);
+        }
+
+        // set token
+        token.reset(data, currentTokenStart, index, tokenLength, tokenCount);
+    }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/DelimitedUTF8StringBinaryTokenizerFactory.java b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/DelimitedUTF8StringBinaryTokenizerFactory.java
new file mode 100644
index 0000000..4a350b3
--- /dev/null
+++ b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/DelimitedUTF8StringBinaryTokenizerFactory.java
@@ -0,0 +1,42 @@
+/**
+ * Copyright 2010-2011 The Regents of the University of California
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on
+ * an "AS IS"; BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations under
+ * the License.
+ * 
+ * Author: Alexander Behm <abehm (at) ics.uci.edu>
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers;
+
+public class DelimitedUTF8StringBinaryTokenizerFactory implements
+		IBinaryTokenizerFactory {
+
+	private static final long serialVersionUID = 1L;
+	private final boolean ignoreTokenCount;
+	private final boolean sourceHasTypeTag;
+	private final ITokenFactory tokenFactory;
+
+	public DelimitedUTF8StringBinaryTokenizerFactory(boolean ignoreTokenCount,
+			boolean sourceHasTypeTag, ITokenFactory tokenFactory) {
+		this.ignoreTokenCount = ignoreTokenCount;
+		this.sourceHasTypeTag = sourceHasTypeTag;
+		this.tokenFactory = tokenFactory;
+	}
+
+	@Override
+	public IBinaryTokenizer createTokenizer() {
+		return new DelimitedUTF8StringBinaryTokenizer(ignoreTokenCount,
+				sourceHasTypeTag, tokenFactory);
+	}
+}
diff --git a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/HashedUTF8NGramToken.java b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/HashedUTF8NGramToken.java
new file mode 100644
index 0000000..a1a4354
--- /dev/null
+++ b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/HashedUTF8NGramToken.java
@@ -0,0 +1,64 @@
+/**
+ * Copyright 2010-2011 The Regents of the University of California
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on
+ * an "AS IS"; BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations under
+ * the License.
+ * 
+ * Author: Alexander Behm <abehm (at) ics.uci.edu>
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers;
+
+import java.io.IOException;
+
+import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
+import edu.uci.ics.hyracks.data.std.util.GrowableArray;
+
+public class HashedUTF8NGramToken extends UTF8NGramToken {
+    public HashedUTF8NGramToken(byte tokenTypeTag, byte countTypeTag) {
+        super(tokenTypeTag, countTypeTag);
+    }
+
+    @Override
+    public void serializeToken(GrowableArray out) throws IOException {
+        handleTokenTypeTag(out.getDataOutput());
+
+        int hash = GOLDEN_RATIO_32;
+
+        // pre chars
+        for (int i = 0; i < numPreChars; i++) {
+            hash ^= PRECHAR;
+            hash *= GOLDEN_RATIO_32;
+        }
+
+        // regular chars
+        int numRegGrams = tokenLength - numPreChars - numPostChars;
+        int pos = start;
+        for (int i = 0; i < numRegGrams; i++) {
+            hash ^= Character.toLowerCase(UTF8StringPointable.charAt(data, pos));
+            hash *= GOLDEN_RATIO_32;
+            pos += UTF8StringPointable.charSize(data, pos);
+        }
+
+        // post chars
+        for (int i = 0; i < numPostChars; i++) {
+            hash ^= POSTCHAR;
+            hash *= GOLDEN_RATIO_32;
+        }
+
+        // token count
+        hash += tokenCount;
+
+        out.getDataOutput().writeInt(hash);
+    }
+}
diff --git a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/HashedUTF8NGramTokenFactory.java b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/HashedUTF8NGramTokenFactory.java
new file mode 100644
index 0000000..4a87793
--- /dev/null
+++ b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/HashedUTF8NGramTokenFactory.java
@@ -0,0 +1,38 @@
+/**
+ * Copyright 2010-2011 The Regents of the University of California
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on
+ * an "AS IS"; BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations under
+ * the License.
+ * 
+ * Author: Alexander Behm <abehm (at) ics.uci.edu>
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers;
+
+public class HashedUTF8NGramTokenFactory extends AbstractUTF8TokenFactory {
+
+	private static final long serialVersionUID = 1L;
+
+	public HashedUTF8NGramTokenFactory() {
+		super();
+	}
+
+	public HashedUTF8NGramTokenFactory(byte tokenTypeTag, byte countTypeTag) {
+		super(tokenTypeTag, countTypeTag);
+	}
+
+	@Override
+	public IToken createToken() {
+		return new HashedUTF8NGramToken(tokenTypeTag, countTypeTag);
+	}
+}
diff --git a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/HashedUTF8WordToken.java b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/HashedUTF8WordToken.java
new file mode 100644
index 0000000..20405c6
--- /dev/null
+++ b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/HashedUTF8WordToken.java
@@ -0,0 +1,87 @@
+/**
+ * Copyright 2010-2011 The Regents of the University of California
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on
+ * an "AS IS"; BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations under
+ * the License.
+ * 
+ * Author: Alexander Behm <abehm (at) ics.uci.edu>
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers;
+
+import java.io.IOException;
+
+import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
+import edu.uci.ics.hyracks.data.std.util.GrowableArray;
+
+public class HashedUTF8WordToken extends UTF8WordToken {
+
+    private int hash = 0;
+
+    public HashedUTF8WordToken(byte tokenTypeTag, byte countTypeTag) {
+        super(tokenTypeTag, countTypeTag);
+    }
+
+    @Override
+    public boolean equals(Object o) {
+        if (o == null) {
+            return false;
+        }
+        if (!(o instanceof IToken)) {
+            return false;
+        }
+        IToken t = (IToken) o;
+        if (t.getTokenLength() != tokenLength) {
+            return false;
+        }
+        int offset = 0;
+        for (int i = 0; i < tokenLength; i++) {
+            if (UTF8StringPointable.charAt(t.getData(), t.getStart() + offset) != UTF8StringPointable.charAt(data,
+                    start + offset)) {
+                return false;
+            }
+            offset += UTF8StringPointable.charSize(data, start + offset);
+        }
+        return true;
+    }
+
+    @Override
+    public int hashCode() {
+        return hash;
+    }
+
+    @Override
+    public void reset(byte[] data, int start, int length, int tokenLength, int tokenCount) {
+        super.reset(data, start, length, tokenLength, tokenCount);
+
+        // pre-compute hash value using JAQL-like string hashing
+        int pos = start;
+        hash = GOLDEN_RATIO_32;
+        for (int i = 0; i < tokenLength; i++) {
+            hash ^= Character.toLowerCase(UTF8StringPointable.charAt(data, pos));
+            hash *= GOLDEN_RATIO_32;
+            pos += UTF8StringPointable.charSize(data, pos);
+        }
+        hash += tokenCount;
+    }
+
+    @Override
+    public void serializeToken(GrowableArray out) throws IOException {
+        if (tokenTypeTag > 0) {
+            out.getDataOutput().write(tokenTypeTag);
+        }
+
+        // serialize hash value
+        out.getDataOutput().writeInt(hash);
+    }
+}
diff --git a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/HashedUTF8WordTokenFactory.java b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/HashedUTF8WordTokenFactory.java
new file mode 100644
index 0000000..318f041
--- /dev/null
+++ b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/HashedUTF8WordTokenFactory.java
@@ -0,0 +1,38 @@
+/**
+ * Copyright 2010-2011 The Regents of the University of California
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on
+ * an "AS IS"; BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations under
+ * the License.
+ * 
+ * Author: Alexander Behm <abehm (at) ics.uci.edu>
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers;
+
+public class HashedUTF8WordTokenFactory extends AbstractUTF8TokenFactory {
+
+	private static final long serialVersionUID = 1L;
+
+	public HashedUTF8WordTokenFactory() {
+		super();
+	}
+
+	public HashedUTF8WordTokenFactory(byte tokenTypeTag, byte countTypeTag) {
+		super(tokenTypeTag, countTypeTag);
+	}
+
+	@Override
+	public IToken createToken() {
+		return new HashedUTF8WordToken(tokenTypeTag, countTypeTag);
+	}
+}
diff --git a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/IBinaryTokenizer.java b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/IBinaryTokenizer.java
new file mode 100644
index 0000000..05c6d0b
--- /dev/null
+++ b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/IBinaryTokenizer.java
@@ -0,0 +1,30 @@
+/**
+ * Copyright 2010-2011 The Regents of the University of California
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on
+ * an "AS IS"; BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations under
+ * the License.
+ * 
+ * Author: Alexander Behm <abehm (at) ics.uci.edu>
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers;
+
+public interface IBinaryTokenizer {
+	public IToken getToken();
+
+	public boolean hasNext();
+
+	public void next();
+
+	public void reset(byte[] data, int start, int length);
+}
diff --git a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/IBinaryTokenizerFactory.java b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/IBinaryTokenizerFactory.java
new file mode 100644
index 0000000..bfe78ee
--- /dev/null
+++ b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/IBinaryTokenizerFactory.java
@@ -0,0 +1,26 @@
+/**
+ * Copyright 2010-2011 The Regents of the University of California
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on
+ * an "AS IS"; BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations under
+ * the License.
+ * 
+ * Author: Alexander Behm <abehm (at) ics.uci.edu>
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers;
+
+import java.io.Serializable;
+
+public interface IBinaryTokenizerFactory extends Serializable {
+	public IBinaryTokenizer createTokenizer();
+}
diff --git a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/INGramToken.java b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/INGramToken.java
new file mode 100644
index 0000000..befc6d2
--- /dev/null
+++ b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/INGramToken.java
@@ -0,0 +1,28 @@
+/**
+ * Copyright 2010-2011 The Regents of the University of California
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on
+ * an "AS IS"; BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations under
+ * the License.
+ * 
+ * Author: Alexander Behm <abehm (at) ics.uci.edu>
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers;
+
+public interface INGramToken {
+	public int getNumPostChars();
+
+	public int getNumPreChars();
+
+	public void setNumPrePostChars(int numPreChars, int numPostChars);
+}
diff --git a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/IToken.java b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/IToken.java
new file mode 100644
index 0000000..47467a1
--- /dev/null
+++ b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/IToken.java
@@ -0,0 +1,41 @@
+/**
+ * Copyright 2010-2011 The Regents of the University of California
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on
+ * an "AS IS"; BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations under
+ * the License.
+ * 
+ * Author: Alexander Behm <abehm (at) ics.uci.edu>
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers;
+
+import java.io.IOException;
+
+import edu.uci.ics.hyracks.data.std.util.GrowableArray;
+
+public interface IToken {
+	public byte[] getData();
+
+	public int getLength();
+
+	public int getStart();
+
+	public int getTokenLength();
+
+	public void reset(byte[] data, int start, int length, int tokenLength,
+			int tokenCount);
+
+	public void serializeToken(GrowableArray out) throws IOException;
+
+	public void serializeTokenCount(GrowableArray out) throws IOException;
+}
diff --git a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/ITokenFactory.java b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/ITokenFactory.java
new file mode 100644
index 0000000..8b5d71d
--- /dev/null
+++ b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/ITokenFactory.java
@@ -0,0 +1,26 @@
+/**
+ * Copyright 2010-2011 The Regents of the University of California
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on
+ * an "AS IS"; BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations under
+ * the License.
+ * 
+ * Author: Alexander Behm <abehm (at) ics.uci.edu>
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers;
+
+import java.io.Serializable;
+
+public interface ITokenFactory extends Serializable {
+    public IToken createToken();
+}
diff --git a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/IntArray.java b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/IntArray.java
new file mode 100644
index 0000000..2eb9ff4
--- /dev/null
+++ b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/IntArray.java
@@ -0,0 +1,80 @@
+/**
+ * Copyright 2010-2011 The Regents of the University of California
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on
+ * an "AS IS"; BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations under
+ * the License.
+ * 
+ * Author: Rares Vernica <rares (at) ics.uci.edu>
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers;
+
+import java.util.Arrays;
+
+public class IntArray {
+    private static final int SIZE = 128;
+
+    private int[] data;
+    private int length;
+
+    public IntArray() {
+        data = new int[SIZE];
+        length = 0;
+    }
+
+    public void add(int d) {
+        if (length == data.length) {
+            data = Arrays.copyOf(data, data.length << 1);
+        }
+        data[length++] = d;
+    }
+
+    public int[] get() {
+        return data;
+    }
+
+    public int get(int i) {
+        return data[i];
+    }
+
+    public int length() {
+        return length;
+    }
+
+    public void reset() {
+        length = 0;
+    }
+
+    public void sort() {
+        sort(0, length);
+    }
+
+    public void sort(int start, int end) {
+        Arrays.sort(data, start, end);
+    }
+
+    @Override
+    public String toString() {
+        StringBuilder out = new StringBuilder();
+        out.append('[');
+        for (int i = 0; i < length; ++i) {
+            out.append(data[i]);
+            if (i < length - 1) {
+                out.append(',');
+                out.append(' ');
+            }
+        }
+        out.append(']');
+        return out.toString();
+    }
+}
diff --git a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/NGramUTF8StringBinaryTokenizer.java b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/NGramUTF8StringBinaryTokenizer.java
new file mode 100644
index 0000000..fdfc02f
--- /dev/null
+++ b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/NGramUTF8StringBinaryTokenizer.java
@@ -0,0 +1,118 @@
+/**
+ * Copyright 2010-2011 The Regents of the University of California
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on
+ * an "AS IS"; BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations under
+ * the License.
+ * 
+ * Author: Alexander Behm <abehm (at) ics.uci.edu>
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers;
+
+import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
+
+public class NGramUTF8StringBinaryTokenizer extends AbstractUTF8StringBinaryTokenizer {
+
+    private int gramLength;
+    private boolean usePrePost;
+
+    private int gramNum;
+    private int totalGrams;
+
+    private final INGramToken concreteToken;
+
+    public NGramUTF8StringBinaryTokenizer(int gramLength, boolean usePrePost, boolean ignoreTokenCount,
+            boolean sourceHasTypeTag, ITokenFactory tokenFactory) {
+        super(ignoreTokenCount, sourceHasTypeTag, tokenFactory);
+        this.gramLength = gramLength;
+        this.usePrePost = usePrePost;
+        concreteToken = (INGramToken) token;
+    }
+
+    @Override
+    public boolean hasNext() {
+        if (gramNum < totalGrams) {
+            return true;
+        } else {
+            return false;
+        }
+    }
+
+    @Override
+    public void next() {
+        int currentTokenStart = index;
+        int tokenCount = 1;
+        int numPreChars = 0;
+        int numPostChars = 0;
+        if (usePrePost) {
+            numPreChars = Math.max(gramLength - gramNum - 1, 0);
+            numPostChars = (gramNum > totalGrams - gramLength) ? gramLength - totalGrams + gramNum : 0;
+        }
+        gramNum++;
+
+        concreteToken.setNumPrePostChars(numPreChars, numPostChars);
+        if (numPreChars == 0) {
+            index += UTF8StringPointable.charSize(data, index);
+        }
+
+        // compute token count
+        // ignore pre and post grams for duplicate detection
+        if (!ignoreTokenCount && numPreChars == 0 && numPostChars == 0) {
+            int tmpIndex = start;
+            while (tmpIndex < currentTokenStart) {
+                tokenCount++; // assume found
+                int offset = 0;
+                for (int j = 0; j < gramLength; j++) {
+                    if (Character.toLowerCase(UTF8StringPointable.charAt(data, currentTokenStart + offset)) != Character
+                            .toLowerCase(UTF8StringPointable.charAt(data, tmpIndex + offset))) {
+                        tokenCount--;
+                        break;
+                    }
+                    offset += UTF8StringPointable.charSize(data, tmpIndex + offset);
+                }
+                tmpIndex += UTF8StringPointable.charSize(data, tmpIndex);
+            }
+        }
+
+        // set token
+        token.reset(data, currentTokenStart, length, gramLength, tokenCount);
+    }
+
+    @Override
+    public void reset(byte[] data, int start, int length) {
+        super.reset(data, start, length);
+        gramNum = 0;
+
+        int numChars = 0;
+        int pos = index;
+        int end = pos + utf8Length;
+        while (pos < end) {
+            numChars++;
+            pos += UTF8StringPointable.charSize(data, pos);
+        }
+
+        if (usePrePost) {
+            totalGrams = numChars + gramLength - 1;
+        } else {
+            totalGrams = numChars - gramLength + 1;
+        }
+    }
+
+    public void setGramlength(int gramLength) {
+        this.gramLength = gramLength;
+    }
+
+    public void setPrePost(boolean usePrePost) {
+        this.usePrePost = usePrePost;
+    }
+}
diff --git a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/UTF8NGramToken.java b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/UTF8NGramToken.java
new file mode 100644
index 0000000..8cb9818
--- /dev/null
+++ b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/UTF8NGramToken.java
@@ -0,0 +1,91 @@
+/**
+ * Copyright 2010-2011 The Regents of the University of California
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on
+ * an "AS IS"; BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations under
+ * the License.
+ * 
+ * Author: Alexander Behm <abehm (at) ics.uci.edu>
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers;
+
+import java.io.IOException;
+
+import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
+import edu.uci.ics.hyracks.data.std.util.GrowableArray;
+import edu.uci.ics.hyracks.dataflow.common.data.util.StringUtils;
+
+public class UTF8NGramToken extends AbstractUTF8Token implements INGramToken {
+
+    public final static char PRECHAR = '#';
+
+    public final static char POSTCHAR = '$';
+
+    protected int numPreChars;
+    protected int numPostChars;
+
+    public UTF8NGramToken(byte tokenTypeTag, byte countTypeTag) {
+        super(tokenTypeTag, countTypeTag);
+    }
+
+    @Override
+    public int getNumPostChars() {
+        return numPreChars;
+    }
+
+    @Override
+    public int getNumPreChars() {
+        return numPostChars;
+    }
+
+    @Override
+    public void serializeToken(GrowableArray out) throws IOException {
+        handleTokenTypeTag(out.getDataOutput());
+        int tokenUTF8LenOff = out.getLength();
+
+        // regular chars
+        int numRegChars = tokenLength - numPreChars - numPostChars;
+
+        // assuming pre and post char need 1-byte each in utf8
+        int tokenUTF8Len = numPreChars + numPostChars;
+
+        // Write dummy UTF length which will be correctly set later.
+        out.getDataOutput().writeShort(0);
+
+        // pre chars
+        for (int i = 0; i < numPreChars; i++) {
+            StringUtils.writeCharAsModifiedUTF8(PRECHAR, out.getDataOutput());
+        }
+
+        int pos = start;
+        for (int i = 0; i < numRegChars; i++) {
+            char c = Character.toLowerCase(UTF8StringPointable.charAt(data, pos));
+            tokenUTF8Len += StringUtils.writeCharAsModifiedUTF8(c, out.getDataOutput());
+            pos += UTF8StringPointable.charSize(data, pos);
+        }
+
+        // post chars
+        for (int i = 0; i < numPostChars; i++) {
+            StringUtils.writeCharAsModifiedUTF8(POSTCHAR, out.getDataOutput());
+        }
+
+        // Set UTF length of token.
+        out.getByteArray()[tokenUTF8LenOff] = (byte) ((tokenUTF8Len >>> 8) & 0xFF);
+        out.getByteArray()[tokenUTF8LenOff + 1] = (byte) ((tokenUTF8Len >>> 0) & 0xFF);
+    }
+
+    public void setNumPrePostChars(int numPreChars, int numPostChars) {
+        this.numPreChars = numPreChars;
+        this.numPostChars = numPostChars;
+    }
+}
diff --git a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/UTF8NGramTokenFactory.java b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/UTF8NGramTokenFactory.java
new file mode 100644
index 0000000..968d8e1
--- /dev/null
+++ b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/UTF8NGramTokenFactory.java
@@ -0,0 +1,39 @@
+/**
+ * Copyright 2010-2011 The Regents of the University of California
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on
+ * an "AS IS"; BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations under
+ * the License.
+ * 
+ * Author: Alexander Behm <abehm (at) ics.uci.edu>
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers;
+
+public class UTF8NGramTokenFactory extends AbstractUTF8TokenFactory {
+
+	private static final long serialVersionUID = 1L;
+
+	public UTF8NGramTokenFactory() {
+		super();
+	}
+
+	public UTF8NGramTokenFactory(byte tokenTypeTag, byte countTypeTag) {
+		super(tokenTypeTag, countTypeTag);
+	}
+
+	@Override
+	public IToken createToken() {
+		return new UTF8NGramToken(tokenTypeTag, countTypeTag);
+	}
+
+}
diff --git a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/UTF8WordToken.java b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/UTF8WordToken.java
new file mode 100644
index 0000000..9d7fe7c
--- /dev/null
+++ b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/UTF8WordToken.java
@@ -0,0 +1,51 @@
+/**
+ * Copyright 2010-2011 The Regents of the University of California
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on
+ * an "AS IS"; BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations under
+ * the License.
+ * 
+ * Author: Alexander Behm <abehm (at) ics.uci.edu>
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers;
+
+import java.io.IOException;
+
+import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
+import edu.uci.ics.hyracks.data.std.util.GrowableArray;
+import edu.uci.ics.hyracks.dataflow.common.data.util.StringUtils;
+
+public class UTF8WordToken extends AbstractUTF8Token {
+
+    public UTF8WordToken(byte tokenTypeTag, byte countTypeTag) {
+        super(tokenTypeTag, countTypeTag);
+    }
+
+    @Override
+    public void serializeToken(GrowableArray out) throws IOException {
+        handleTokenTypeTag(out.getDataOutput());
+        int tokenUTF8LenOff = out.getLength();
+        int tokenUTF8Len = 0;
+        // Write dummy UTF length which will be correctly set later.
+        out.getDataOutput().writeShort(0);
+        int pos = start;
+        for (int i = 0; i < tokenLength; i++) {
+            char c = Character.toLowerCase(UTF8StringPointable.charAt(data, pos));
+            tokenUTF8Len += StringUtils.writeCharAsModifiedUTF8(c, out.getDataOutput());
+            pos += UTF8StringPointable.charSize(data, pos);
+        }
+        // Set UTF length of token.
+        out.getByteArray()[tokenUTF8LenOff] = (byte) ((tokenUTF8Len >>> 8) & 0xFF);
+        out.getByteArray()[tokenUTF8LenOff + 1] = (byte) ((tokenUTF8Len >>> 0) & 0xFF);
+    }
+}
diff --git a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/UTF8WordTokenFactory.java b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/UTF8WordTokenFactory.java
new file mode 100644
index 0000000..4358254
--- /dev/null
+++ b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/UTF8WordTokenFactory.java
@@ -0,0 +1,39 @@
+/**
+ * Copyright 2010-2011 The Regents of the University of California
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on
+ * an "AS IS"; BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations under
+ * the License.
+ * 
+ * Author: Alexander Behm <abehm (at) ics.uci.edu>
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers;
+
+public class UTF8WordTokenFactory extends AbstractUTF8TokenFactory {
+
+	private static final long serialVersionUID = 1L;
+
+	public UTF8WordTokenFactory() {
+		super();
+	}
+
+	public UTF8WordTokenFactory(byte tokenTypeTag, byte countTypeTag) {
+		super(tokenTypeTag, countTypeTag);
+	}
+
+	@Override
+	public IToken createToken() {
+		return new UTF8WordToken(tokenTypeTag, countTypeTag);
+	}
+
+}
diff --git a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/util/InvertedIndexUtils.java b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/util/InvertedIndexUtils.java
new file mode 100644
index 0000000..a1d1f06
--- /dev/null
+++ b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/util/InvertedIndexUtils.java
@@ -0,0 +1,47 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.util;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
+
+public class InvertedIndexUtils {
+    // Type traits to be appended to the token type trait which finally form the BTree field type traits.
+    private static final ITypeTraits[] btreeValueTypeTraits = new ITypeTraits[4];
+    static {
+        // startPageId
+        btreeValueTypeTraits[0] = IntegerPointable.TYPE_TRAITS;
+        // endPageId
+        btreeValueTypeTraits[1] = IntegerPointable.TYPE_TRAITS;
+        // startOff
+        btreeValueTypeTraits[2] = IntegerPointable.TYPE_TRAITS;
+        // numElements
+        btreeValueTypeTraits[3] = IntegerPointable.TYPE_TRAITS;
+    }
+
+    public static ITypeTraits[] getBTreeTypeTraits(ITypeTraits[] tokenTypeTraits) {
+        ITypeTraits[] btreeTypeTraits = new ITypeTraits[tokenTypeTraits.length + btreeValueTypeTraits.length];
+        // Set key type traits.
+        for (int i = 0; i < tokenTypeTraits.length; i++) {
+            btreeTypeTraits[i] = tokenTypeTraits[i];
+        }
+        // Set value type traits.
+        for (int i = 0; i < btreeValueTypeTraits.length; i++) {
+            btreeTypeTraits[i + tokenTypeTraits.length] = btreeValueTypeTraits[i];
+        }
+        return btreeTypeTraits;
+    }
+}
diff --git a/hyracks/hyracks-storage-am-rtree/pom.xml b/hyracks/hyracks-storage-am-rtree/pom.xml
new file mode 100644
index 0000000..ade69ef
--- /dev/null
+++ b/hyracks/hyracks-storage-am-rtree/pom.xml
@@ -0,0 +1,57 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>edu.uci.ics.hyracks</groupId>
+  <artifactId>hyracks-storage-am-rtree</artifactId>
+  <version>0.2.3-SNAPSHOT</version>
+  <name>hyracks-storage-am-rtree</name>
+
+  <parent>
+    <groupId>edu.uci.ics.hyracks</groupId>
+    <artifactId>hyracks</artifactId>
+    <version>0.2.3-SNAPSHOT</version>
+  </parent>
+
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-compiler-plugin</artifactId>
+        <version>2.0.2</version>
+        <configuration>
+          <source>1.7</source>
+          <target>1.7</target>
+        </configuration>
+      </plugin>
+    </plugins>
+  </build>
+  <dependencies>
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>hyracks-storage-am-common</artifactId>
+  		<version>0.2.3-SNAPSHOT</version>
+  		<type>jar</type>
+  		<scope>compile</scope>
+  	</dependency>  	
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>hyracks-dataflow-common</artifactId>
+  		<version>0.2.3-SNAPSHOT</version>
+  		<type>jar</type>
+  		<scope>compile</scope>
+  	</dependency>  	
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>hyracks-dataflow-std</artifactId>
+  		<version>0.2.3-SNAPSHOT</version>
+  		<type>jar</type>
+  		<scope>compile</scope>
+  	</dependency>  	
+  	<dependency>
+  		<groupId>junit</groupId>
+  		<artifactId>junit</artifactId>
+  		<version>4.8.1</version>
+  		<type>jar</type>
+  		<scope>test</scope>
+  	</dependency>  	  		
+  </dependencies>
+</project>
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/api/IGenericPrimitiveSerializerDeserializer.java b/hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/api/IGenericPrimitiveSerializerDeserializer.java
similarity index 100%
rename from hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/api/IGenericPrimitiveSerializerDeserializer.java
rename to hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/api/IGenericPrimitiveSerializerDeserializer.java
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/api/IRTreeFrame.java b/hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/api/IRTreeFrame.java
similarity index 100%
rename from hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/api/IRTreeFrame.java
rename to hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/api/IRTreeFrame.java
diff --git a/hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/api/IRTreeInteriorFrame.java b/hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/api/IRTreeInteriorFrame.java
new file mode 100644
index 0000000..5f333f3
--- /dev/null
+++ b/hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/api/IRTreeInteriorFrame.java
@@ -0,0 +1,44 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree.api;
+
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.rtree.impls.PathList;
+
+public interface IRTreeInteriorFrame extends IRTreeFrame {
+
+    public boolean findBestChild(ITupleReference tuple, MultiComparator cmp);
+
+    public int getBestChildPageId();
+
+    public int getChildPageId(int tupleIndex);
+
+    public int getChildPageIdIfIntersect(ITupleReference tuple, int tupleIndex, MultiComparator cmp);
+
+    public int findTupleByPointer(ITupleReference tuple, MultiComparator cmp);
+
+    public int findTupleByPointer(ITupleReference tuple, PathList traverseList, int parentIndex, MultiComparator cmp);
+
+    public void adjustKey(ITupleReference tuple, int tupleIndex, MultiComparator cmp) throws TreeIndexException;
+
+    public boolean recomputeMBR(ITupleReference tuple, int tupleIndex, MultiComparator cmp);
+
+    public void enlarge(ITupleReference tuple, MultiComparator cmp);
+
+    boolean checkEnlargement(ITupleReference tuple, MultiComparator cmp);
+}
diff --git a/hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/api/IRTreeLeafFrame.java b/hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/api/IRTreeLeafFrame.java
new file mode 100644
index 0000000..3005785
--- /dev/null
+++ b/hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/api/IRTreeLeafFrame.java
@@ -0,0 +1,27 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree.api;
+
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+
+public interface IRTreeLeafFrame extends IRTreeFrame {
+
+	public int findTupleIndex(ITupleReference tuple, MultiComparator cmp);
+
+	public boolean intersect(ITupleReference tuple, int tupleIndex,
+			MultiComparator cmp);
+}
diff --git a/hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/dataflow/RTreeDataflowHelper.java b/hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/dataflow/RTreeDataflowHelper.java
new file mode 100644
index 0000000..0470da9
--- /dev/null
+++ b/hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/dataflow/RTreeDataflowHelper.java
@@ -0,0 +1,42 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree.dataflow;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexDataflowHelper;
+import edu.uci.ics.hyracks.storage.am.rtree.util.RTreeUtils;
+
+public class RTreeDataflowHelper extends TreeIndexDataflowHelper {
+
+    private final IPrimitiveValueProviderFactory[] valueProviderFactories;
+
+    public RTreeDataflowHelper(IIndexOperatorDescriptor opDesc, IHyracksTaskContext ctx, int partition,
+            IPrimitiveValueProviderFactory[] valueProviderFactories) {
+        super(opDesc, ctx, partition);
+        this.valueProviderFactories = valueProviderFactories;
+    }
+
+    @Override
+    public ITreeIndex createIndexInstance() throws HyracksDataException {
+        return RTreeUtils.createRTree(treeOpDesc.getStorageManager().getBufferCache(ctx),
+                treeOpDesc.getTreeIndexTypeTraits(), valueProviderFactories,
+                treeOpDesc.getTreeIndexComparatorFactories());
+    }
+}
diff --git a/hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/dataflow/RTreeDataflowHelperFactory.java b/hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/dataflow/RTreeDataflowHelperFactory.java
new file mode 100644
index 0000000..6b9fd4c
--- /dev/null
+++ b/hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/dataflow/RTreeDataflowHelperFactory.java
@@ -0,0 +1,39 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree.dataflow;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IndexDataflowHelper;
+
+public class RTreeDataflowHelperFactory implements IIndexDataflowHelperFactory {
+
+    private static final long serialVersionUID = 1L;
+
+    private final IPrimitiveValueProviderFactory[] valueProviderFactories;
+
+    public RTreeDataflowHelperFactory(IPrimitiveValueProviderFactory[] valueProviderFactories) {
+        this.valueProviderFactories = valueProviderFactories;
+    }
+
+    @Override
+    public IndexDataflowHelper createIndexDataflowHelper(IIndexOperatorDescriptor opDesc, IHyracksTaskContext ctx,
+            int partition) {
+        return new RTreeDataflowHelper(opDesc, ctx, partition, valueProviderFactories);
+    }
+}
diff --git a/hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/dataflow/RTreeSearchOperatorDescriptor.java b/hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/dataflow/RTreeSearchOperatorDescriptor.java
new file mode 100644
index 0000000..d9b7b97
--- /dev/null
+++ b/hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/dataflow/RTreeSearchOperatorDescriptor.java
@@ -0,0 +1,56 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree.dataflow;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.job.IOperatorDescriptorRegistry;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.storage.am.common.api.IOperationCallbackProvider;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.AbstractTreeIndexOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndex;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexRegistryProvider;
+import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
+
+public class RTreeSearchOperatorDescriptor extends AbstractTreeIndexOperatorDescriptor {
+
+    private static final long serialVersionUID = 1L;
+
+    protected int[] keyFields; // fields in input tuple to be used as keys
+
+    public RTreeSearchOperatorDescriptor(IOperatorDescriptorRegistry spec, RecordDescriptor recDesc,
+            IStorageManagerInterface storageManager, IIndexRegistryProvider<IIndex> indexRegistryProvider,
+            IFileSplitProvider fileSplitProvider, ITypeTraits[] typeTraits,
+            IBinaryComparatorFactory[] comparatorFactories, int[] keyFields,
+            IIndexDataflowHelperFactory dataflowHelperFactory, boolean retainInput,
+            IOperationCallbackProvider opCallbackProvider) {
+        super(spec, 1, 1, recDesc, storageManager, indexRegistryProvider, fileSplitProvider, typeTraits,
+                comparatorFactories, dataflowHelperFactory, null, retainInput, opCallbackProvider);
+        this.keyFields = keyFields;
+    }
+
+    @Override
+    public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx,
+            IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) {
+        return new RTreeSearchOperatorNodePushable(this, ctx, opCallbackProvider, partition, recordDescProvider,
+                keyFields);
+    }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/dataflow/RTreeSearchOperatorNodePushable.java b/hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/dataflow/RTreeSearchOperatorNodePushable.java
new file mode 100644
index 0000000..3781037
--- /dev/null
+++ b/hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/dataflow/RTreeSearchOperatorNodePushable.java
@@ -0,0 +1,55 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree.dataflow;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import edu.uci.ics.hyracks.storage.am.common.api.IOperationCallbackProvider;
+import edu.uci.ics.hyracks.storage.am.common.api.ISearchPredicate;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.AbstractTreeIndexOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.PermutingFrameTupleReference;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexSearchOperatorNodePushable;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.rtree.impls.SearchPredicate;
+import edu.uci.ics.hyracks.storage.am.rtree.util.RTreeUtils;
+
+public class RTreeSearchOperatorNodePushable extends TreeIndexSearchOperatorNodePushable {
+    protected PermutingFrameTupleReference searchKey;
+    protected MultiComparator cmp;
+
+    public RTreeSearchOperatorNodePushable(AbstractTreeIndexOperatorDescriptor opDesc, IHyracksTaskContext ctx,
+            IOperationCallbackProvider opCallbackProvider, int partition, IRecordDescriptorProvider recordDescProvider,
+            int[] keyFields) {
+        super(opDesc, ctx, partition, recordDescProvider);
+        if (keyFields != null && keyFields.length > 0) {
+            searchKey = new PermutingFrameTupleReference();
+            searchKey.setFieldPermutation(keyFields);
+        }
+    }
+
+    @Override
+    protected ISearchPredicate createSearchPredicate() {
+        cmp = RTreeUtils.getSearchMultiComparator(treeIndex.getComparatorFactories(), searchKey);
+        return new SearchPredicate(searchKey, cmp);
+    }
+
+    @Override
+    protected void resetSearchPredicate(int tupleIndex) {
+        if (searchKey != null) {
+            searchKey.reset(accessor, tupleIndex);
+        }
+    }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMFrame.java b/hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMFrame.java
new file mode 100644
index 0000000..84e66ef
--- /dev/null
+++ b/hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMFrame.java
@@ -0,0 +1,333 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree.frames;
+
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProvider;
+import edu.uci.ics.hyracks.storage.am.common.api.ISplitKey;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriter;
+import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
+import edu.uci.ics.hyracks.storage.am.common.frames.TreeIndexNSMFrame;
+import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeFrame;
+import edu.uci.ics.hyracks.storage.am.rtree.impls.EntriesOrder;
+import edu.uci.ics.hyracks.storage.am.rtree.impls.RTreeSplitKey;
+import edu.uci.ics.hyracks.storage.am.rtree.impls.Rectangle;
+import edu.uci.ics.hyracks.storage.am.rtree.impls.TupleEntryArrayList;
+import edu.uci.ics.hyracks.storage.am.rtree.impls.UnorderedSlotManager;
+import edu.uci.ics.hyracks.storage.am.rtree.tuples.RTreeTypeAwareTupleWriter;
+
+public abstract class RTreeNSMFrame extends TreeIndexNSMFrame implements IRTreeFrame {
+    protected static final int pageNsnOff = smFlagOff + 1;
+    protected static final int rightPageOff = pageNsnOff + 8;
+
+    protected ITreeIndexTupleReference[] tuples;
+    protected ITreeIndexTupleReference cmpFrameTuple;
+    protected TupleEntryArrayList tupleEntries1; // used for split and checking
+                                                 // enlargement
+    protected TupleEntryArrayList tupleEntries2; // used for split
+
+    protected Rectangle[] rec;
+
+    protected static final double splitFactor = 0.4;
+    protected static final int nearMinimumOverlapFactor = 32;
+    private static final double doubleEpsilon = computeDoubleEpsilon();
+    private static final int numTuplesEntries = 100;
+    protected final IPrimitiveValueProvider[] keyValueProviders;
+
+    public RTreeNSMFrame(ITreeIndexTupleWriter tupleWriter, IPrimitiveValueProvider[] keyValueProviders) {
+        super(tupleWriter, new UnorderedSlotManager());
+        this.tuples = new ITreeIndexTupleReference[keyValueProviders.length];
+        for (int i = 0; i < keyValueProviders.length; i++) {
+            this.tuples[i] = tupleWriter.createTupleReference();
+        }
+        cmpFrameTuple = tupleWriter.createTupleReference();
+
+        tupleEntries1 = new TupleEntryArrayList(numTuplesEntries, numTuplesEntries);
+        tupleEntries2 = new TupleEntryArrayList(numTuplesEntries, numTuplesEntries);
+        rec = new Rectangle[4];
+        for (int i = 0; i < 4; i++) {
+            rec[i] = new Rectangle(keyValueProviders.length / 2);
+        }
+        this.keyValueProviders = keyValueProviders;
+    }
+
+    private static double computeDoubleEpsilon() {
+        double doubleEpsilon = 1.0;
+
+        do {
+            doubleEpsilon /= 2.0;
+        } while (1.0 + (doubleEpsilon / 2.0) != 1.0);
+        return doubleEpsilon;
+    }
+
+    public static double doubleEpsilon() {
+        return doubleEpsilon;
+    }
+
+    @Override
+    public void initBuffer(byte level) {
+        super.initBuffer(level);
+        buf.putLong(pageNsnOff, 0);
+        buf.putInt(rightPageOff, -1);
+    }
+
+    public void setTupleCount(int tupleCount) {
+        buf.putInt(tupleCountOff, tupleCount);
+    }
+
+    @Override
+    public void setPageNsn(long pageNsn) {
+        buf.putLong(pageNsnOff, pageNsn);
+    }
+
+    @Override
+    public long getPageNsn() {
+        return buf.getLong(pageNsnOff);
+    }
+
+    @Override
+    protected void resetSpaceParams() {
+        buf.putInt(freeSpaceOff, rightPageOff + 4);
+        buf.putInt(totalFreeSpaceOff, buf.capacity() - (rightPageOff + 4));
+    }
+
+    @Override
+    public int getRightPage() {
+        return buf.getInt(rightPageOff);
+    }
+
+    @Override
+    public void setRightPage(int rightPage) {
+        buf.putInt(rightPageOff, rightPage);
+    }
+
+    protected ITreeIndexTupleReference[] getTuples() {
+        return tuples;
+    }
+
+    @Override
+    public void split(ITreeIndexFrame rightFrame, ITupleReference tuple, ISplitKey splitKey) throws TreeIndexException {
+        RTreeSplitKey rTreeSplitKey = ((RTreeSplitKey) splitKey);
+        RTreeTypeAwareTupleWriter rTreeTupleWriterLeftFrame = ((RTreeTypeAwareTupleWriter) tupleWriter);
+        RTreeTypeAwareTupleWriter rTreeTupleWriterRightFrame = ((RTreeTypeAwareTupleWriter) rightFrame.getTupleWriter());
+
+        // calculations are based on the R*-tree paper
+        int m = (int) Math.floor((getTupleCount() + 1) * splitFactor);
+        int splitDistribution = getTupleCount() - (2 * m) + 2;
+
+        // to calculate the minimum margin in order to pick the split axis
+        double minMargin = Double.MAX_VALUE;
+        int splitAxis = 0, sortOrder = 0;
+
+        int maxFieldPos = keyValueProviders.length / 2;
+        for (int i = 0; i < maxFieldPos; i++) {
+            int j = maxFieldPos + i;
+            for (int k = 0; k < getTupleCount(); ++k) {
+
+                frameTuple.resetByTupleIndex(this, k);
+                double LowerKey = keyValueProviders[i]
+                        .getValue(frameTuple.getFieldData(i), frameTuple.getFieldStart(i));
+                double UpperKey = keyValueProviders[j]
+                        .getValue(frameTuple.getFieldData(j), frameTuple.getFieldStart(j));
+
+                tupleEntries1.add(k, LowerKey);
+                tupleEntries2.add(k, UpperKey);
+            }
+            double LowerKey = keyValueProviders[i].getValue(tuple.getFieldData(i), tuple.getFieldStart(i));
+            double UpperKey = keyValueProviders[j].getValue(tuple.getFieldData(j), tuple.getFieldStart(j));
+
+            tupleEntries1.add(-1, LowerKey);
+            tupleEntries2.add(-1, UpperKey);
+
+            tupleEntries1.sort(EntriesOrder.ASCENDING, getTupleCount() + 1);
+            tupleEntries2.sort(EntriesOrder.ASCENDING, getTupleCount() + 1);
+
+            double lowerMargin = 0.0, upperMargin = 0.0;
+            // generate distribution
+            for (int k = 1; k <= splitDistribution; ++k) {
+                int d = m - 1 + k;
+
+                generateDist(tuple, tupleEntries1, rec[0], 0, d);
+                generateDist(tuple, tupleEntries2, rec[1], 0, d);
+                generateDist(tuple, tupleEntries1, rec[2], d, getTupleCount() + 1);
+                generateDist(tuple, tupleEntries2, rec[3], d, getTupleCount() + 1);
+
+                // calculate the margin of the distributions
+                lowerMargin += rec[0].margin() + rec[2].margin();
+                upperMargin += rec[1].margin() + rec[3].margin();
+            }
+            double margin = Math.min(lowerMargin, upperMargin);
+
+            // store minimum margin as split axis
+            if (margin < minMargin) {
+                minMargin = margin;
+                splitAxis = i;
+                sortOrder = (lowerMargin < upperMargin) ? 0 : 2;
+            }
+
+            tupleEntries1.clear();
+            tupleEntries2.clear();
+        }
+
+        for (int i = 0; i < getTupleCount(); ++i) {
+            frameTuple.resetByTupleIndex(this, i);
+            double key = keyValueProviders[splitAxis + sortOrder].getValue(
+                    frameTuple.getFieldData(splitAxis + sortOrder), frameTuple.getFieldStart(splitAxis + sortOrder));
+            tupleEntries1.add(i, key);
+        }
+        double key = keyValueProviders[splitAxis + sortOrder].getValue(tuple.getFieldData(splitAxis + sortOrder),
+                tuple.getFieldStart(splitAxis + sortOrder));
+        tupleEntries1.add(-1, key);
+        tupleEntries1.sort(EntriesOrder.ASCENDING, getTupleCount() + 1);
+
+        double minArea = Double.MAX_VALUE;
+        double minOverlap = Double.MAX_VALUE;
+        int splitPoint = 0;
+        for (int i = 1; i <= splitDistribution; ++i) {
+            int d = m - 1 + i;
+
+            generateDist(tuple, tupleEntries1, rec[0], 0, d);
+            generateDist(tuple, tupleEntries1, rec[2], d, getTupleCount() + 1);
+
+            double overlap = rec[0].overlappedArea(rec[2]);
+            if (overlap < minOverlap) {
+                splitPoint = d;
+                minOverlap = overlap;
+                minArea = rec[0].area() + rec[2].area();
+            } else if (overlap == minOverlap) {
+                double area = rec[0].area() + rec[2].area();
+                if (area < minArea) {
+                    splitPoint = d;
+                    minArea = area;
+                }
+            }
+        }
+        int startIndex, endIndex;
+        if (splitPoint < (getTupleCount() + 1) / 2) {
+            startIndex = 0;
+            endIndex = splitPoint;
+        } else {
+            startIndex = splitPoint;
+            endIndex = (getTupleCount() + 1);
+        }
+        boolean tupleInserted = false;
+        int totalBytes = 0, numOfDeletedTuples = 0;
+        for (int i = startIndex; i < endIndex; i++) {
+            if (tupleEntries1.get(i).getTupleIndex() != -1) {
+                frameTuple.resetByTupleIndex(this, tupleEntries1.get(i).getTupleIndex());
+                rightFrame.insert(frameTuple, -1);
+                ((UnorderedSlotManager) slotManager).modifySlot(
+                        slotManager.getSlotOff(tupleEntries1.get(i).getTupleIndex()), -1);
+                totalBytes += getTupleSize(frameTuple);
+                numOfDeletedTuples++;
+            } else {
+                rightFrame.insert(tuple, -1);
+                tupleInserted = true;
+            }
+        }
+
+        ((UnorderedSlotManager) slotManager).deleteEmptySlots();
+
+        // maintain space information
+        buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) + totalBytes
+                + (slotManager.getSlotSize() * numOfDeletedTuples));
+
+        // compact both pages
+        rightFrame.compact();
+        compact();
+
+        if (!tupleInserted) {
+            insert(tuple, -1);
+        }
+
+        int tupleOff = slotManager.getTupleOff(slotManager.getSlotEndOff());
+        frameTuple.resetByTupleOffset(buf, tupleOff);
+        int splitKeySize = tupleWriter.bytesRequired(frameTuple, 0, keyValueProviders.length);
+
+        splitKey.initData(splitKeySize);
+        this.adjustMBR();
+        rTreeTupleWriterLeftFrame.writeTupleFields(getTuples(), 0, rTreeSplitKey.getLeftPageBuffer(), 0);
+        rTreeSplitKey.getLeftTuple().resetByTupleOffset(rTreeSplitKey.getLeftPageBuffer(), 0);
+
+        ((IRTreeFrame) rightFrame).adjustMBR();
+        rTreeTupleWriterRightFrame.writeTupleFields(((RTreeNSMFrame) rightFrame).getTuples(), 0,
+                rTreeSplitKey.getRightPageBuffer(), 0);
+        rTreeSplitKey.getRightTuple().resetByTupleOffset(rTreeSplitKey.getRightPageBuffer(), 0);
+
+        tupleEntries1.clear();
+        tupleEntries2.clear();
+    }
+
+    abstract public int getTupleSize(ITupleReference tuple);
+
+    public void generateDist(ITupleReference tuple, TupleEntryArrayList entries, Rectangle rec, int start, int end) {
+        int j = 0;
+        while (entries.get(j).getTupleIndex() == -1) {
+            j++;
+        }
+        frameTuple.resetByTupleIndex(this, entries.get(j).getTupleIndex());
+        rec.set(frameTuple, keyValueProviders);
+        for (int i = start; i < end; ++i) {
+            if (i != j) {
+                if (entries.get(i).getTupleIndex() != -1) {
+                    frameTuple.resetByTupleIndex(this, entries.get(i).getTupleIndex());
+                    rec.enlarge(frameTuple, keyValueProviders);
+                } else {
+                    rec.enlarge(tuple, keyValueProviders);
+                }
+            }
+        }
+    }
+
+    public void adjustMBRImpl(ITreeIndexTupleReference[] tuples) {
+        int maxFieldPos = keyValueProviders.length / 2;
+        for (int i = 1; i < getTupleCount(); i++) {
+            frameTuple.resetByTupleIndex(this, i);
+            for (int j = 0; j < maxFieldPos; j++) {
+                int k = maxFieldPos + j;
+                double valA = keyValueProviders[j].getValue(frameTuple.getFieldData(j), frameTuple.getFieldStart(j));
+                double valB = keyValueProviders[j].getValue(tuples[j].getFieldData(j), tuples[j].getFieldStart(j));
+                if (valA < valB) {
+                    tuples[j].resetByTupleIndex(this, i);
+                }
+                valA = keyValueProviders[k].getValue(frameTuple.getFieldData(k), frameTuple.getFieldStart(k));
+                valB = keyValueProviders[k].getValue(tuples[k].getFieldData(k), tuples[k].getFieldStart(k));
+                if (valA > valB) {
+                    tuples[k].resetByTupleIndex(this, i);
+                }
+            }
+        }
+    }
+
+    @Override
+    public void adjustMBR() {
+        for (int i = 0; i < tuples.length; i++) {
+            tuples[i].setFieldCount(getFieldCount());
+            tuples[i].resetByTupleIndex(this, 0);
+        }
+
+        adjustMBRImpl(tuples);
+    }
+
+    public abstract int getFieldCount();
+
+    @Override
+    public int getPageHeaderSize() {
+        return rightPageOff;
+    }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMInteriorFrame.java b/hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMInteriorFrame.java
new file mode 100644
index 0000000..63387ef
--- /dev/null
+++ b/hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMInteriorFrame.java
@@ -0,0 +1,512 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree.frames;
+
+import java.util.ArrayList;
+import java.util.Collections;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
+import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
+import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProvider;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriter;
+import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
+import edu.uci.ics.hyracks.storage.am.common.frames.FrameOpSpaceStatus;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.SlotOffTupleOff;
+import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeInteriorFrame;
+import edu.uci.ics.hyracks.storage.am.rtree.impls.EntriesOrder;
+import edu.uci.ics.hyracks.storage.am.rtree.impls.PathList;
+
+public class RTreeNSMInteriorFrame extends RTreeNSMFrame implements IRTreeInteriorFrame {
+
+    private static final int childPtrSize = 4;
+    private IBinaryComparator childPtrCmp = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY)
+            .createBinaryComparator();
+    private final int keyFieldCount;
+
+    public RTreeNSMInteriorFrame(ITreeIndexTupleWriter tupleWriter, IPrimitiveValueProvider[] keyValueProviders) {
+        super(tupleWriter, keyValueProviders);
+        keyFieldCount = keyValueProviders.length;
+        frameTuple.setFieldCount(keyFieldCount);
+    }
+
+    @Override
+    public boolean findBestChild(ITupleReference tuple, MultiComparator cmp) {
+        cmpFrameTuple.setFieldCount(cmp.getKeyFieldCount());
+        frameTuple.setFieldCount(cmp.getKeyFieldCount());
+
+        int bestChild = 0;
+        double minEnlargedArea = Double.MAX_VALUE;
+
+        // the children pointers in the node point to leaves
+        if (getLevel() == 1) {
+            // find least overlap enlargement, use minimum enlarged area to
+            // break tie, if tie still exists use minimum area to break it
+            for (int i = 0; i < getTupleCount(); ++i) {
+                frameTuple.resetByTupleIndex(this, i);
+                double enlargedArea = enlargedArea(frameTuple, tuple, cmp);
+                tupleEntries1.add(i, enlargedArea);
+                if (enlargedArea < minEnlargedArea) {
+                    minEnlargedArea = enlargedArea;
+                    bestChild = i;
+                }
+            }
+            if (minEnlargedArea < RTreeNSMFrame.doubleEpsilon() || minEnlargedArea > RTreeNSMFrame.doubleEpsilon()) {
+                minEnlargedArea = Double.MAX_VALUE;
+                int k;
+                if (getTupleCount() > nearMinimumOverlapFactor) {
+                    // sort the entries based on their area enlargement needed
+                    // to include the object
+                    tupleEntries1.sort(EntriesOrder.ASCENDING, getTupleCount());
+                    k = nearMinimumOverlapFactor;
+                } else {
+                    k = getTupleCount();
+                }
+
+                double minOverlap = Double.MAX_VALUE;
+                int id = 0;
+                for (int i = 0; i < k; ++i) {
+                    double difference = 0.0;
+                    for (int j = 0; j < getTupleCount(); ++j) {
+                        frameTuple.resetByTupleIndex(this, j);
+                        cmpFrameTuple.resetByTupleIndex(this, tupleEntries1.get(i).getTupleIndex());
+
+                        int c = pointerCmp(frameTuple, cmpFrameTuple, cmp);
+                        if (c != 0) {
+                            double intersection = overlappedArea(frameTuple, tuple, cmpFrameTuple, cmp);
+                            if (intersection != 0.0) {
+                                difference += intersection - overlappedArea(frameTuple, null, cmpFrameTuple, cmp);
+                            }
+                        } else {
+                            id = j;
+                        }
+                    }
+
+                    double enlargedArea = enlargedArea(cmpFrameTuple, tuple, cmp);
+                    if (difference < minOverlap) {
+                        minOverlap = difference;
+                        minEnlargedArea = enlargedArea;
+                        bestChild = id;
+                    } else if (difference == minOverlap) {
+                        if (enlargedArea < minEnlargedArea) {
+                            minEnlargedArea = enlargedArea;
+                            bestChild = id;
+                        } else if (enlargedArea == minEnlargedArea) {
+                            double area = area(cmpFrameTuple, cmp);
+                            frameTuple.resetByTupleIndex(this, bestChild);
+                            double minArea = area(frameTuple, cmp);
+                            if (area < minArea) {
+                                bestChild = id;
+                            }
+                        }
+                    }
+                }
+            }
+        } else { // find minimum enlarged area, use minimum area to break tie
+            for (int i = 0; i < getTupleCount(); i++) {
+                frameTuple.resetByTupleIndex(this, i);
+                double enlargedArea = enlargedArea(frameTuple, tuple, cmp);
+                if (enlargedArea < minEnlargedArea) {
+                    minEnlargedArea = enlargedArea;
+                    bestChild = i;
+                } else if (enlargedArea == minEnlargedArea) {
+                    double area = area(frameTuple, cmp);
+                    frameTuple.resetByTupleIndex(this, bestChild);
+                    double minArea = area(frameTuple, cmp);
+                    if (area < minArea) {
+                        bestChild = i;
+                    }
+                }
+            }
+        }
+        tupleEntries1.clear();
+
+        frameTuple.resetByTupleIndex(this, bestChild);
+        if (minEnlargedArea > 0.0) {
+            return true;
+        } else {
+            return false;
+        }
+    }
+
+    @Override
+    public ITreeIndexTupleReference createTupleReference() {
+        ITreeIndexTupleReference tuple = tupleWriter.createTupleReference();
+        tuple.setFieldCount(keyFieldCount);
+        return tuple;
+    }
+
+    @Override
+    public int getBestChildPageId() {
+        return buf.getInt(getChildPointerOff(frameTuple));
+    }
+
+    @Override
+    public int findTupleByPointer(ITupleReference tuple, MultiComparator cmp) {
+        frameTuple.setFieldCount(cmp.getKeyFieldCount());
+        for (int i = 0; i < getTupleCount(); i++) {
+            frameTuple.resetByTupleIndex(this, i);
+            int c = pointerCmp(frameTuple, tuple, cmp);
+            if (c == 0) {
+                return i;
+            }
+        }
+        return -1;
+    }
+
+    @Override
+    public int getChildPageId(int tupleIndex) {
+        frameTuple.resetByTupleIndex(this, tupleIndex);
+        return buf.getInt(getChildPointerOff(frameTuple));
+    }
+
+    @Override
+    public int getChildPageIdIfIntersect(ITupleReference tuple, int tupleIndex, MultiComparator cmp) {
+        frameTuple.setFieldCount(cmp.getKeyFieldCount());
+        frameTuple.resetByTupleIndex(this, tupleIndex);
+        int maxFieldPos = cmp.getKeyFieldCount() / 2;
+        for (int i = 0; i < maxFieldPos; i++) {
+            int j = maxFieldPos + i;
+            int c = cmp.getComparators()[i].compare(tuple.getFieldData(i), tuple.getFieldStart(i),
+                    tuple.getFieldLength(i), frameTuple.getFieldData(j), frameTuple.getFieldStart(j),
+                    frameTuple.getFieldLength(j));
+            if (c > 0) {
+                return -1;
+            }
+            c = cmp.getComparators()[i].compare(tuple.getFieldData(j), tuple.getFieldStart(j), tuple.getFieldLength(j),
+                    frameTuple.getFieldData(i), frameTuple.getFieldStart(i), frameTuple.getFieldLength(i));
+            if (c < 0) {
+                return -1;
+            }
+        }
+        return buf.getInt(getChildPointerOff(frameTuple));
+    }
+
+    @Override
+    public int findTupleByPointer(ITupleReference tuple, PathList traverseList, int parentIndex, MultiComparator cmp) {
+        frameTuple.setFieldCount(cmp.getKeyFieldCount());
+        for (int i = 0; i < getTupleCount(); i++) {
+            frameTuple.resetByTupleIndex(this, i);
+
+            int c = pointerCmp(frameTuple, tuple, cmp);
+            if (c == 0) {
+                return i;
+            } else {
+                int pageId = IntegerSerializerDeserializer.getInt(frameTuple.getFieldData(cmp.getKeyFieldCount() - 1),
+                        getChildPointerOff(frameTuple));
+                traverseList.add(pageId, -1, parentIndex);
+            }
+        }
+        return -1;
+    }
+
+    @Override
+    public boolean compact() {
+        resetSpaceParams();
+
+        int tupleCount = buf.getInt(tupleCountOff);
+        int freeSpace = buf.getInt(freeSpaceOff);
+
+        ArrayList<SlotOffTupleOff> sortedTupleOffs = new ArrayList<SlotOffTupleOff>();
+        sortedTupleOffs.ensureCapacity(tupleCount);
+        for (int i = 0; i < tupleCount; i++) {
+            int slotOff = slotManager.getSlotOff(i);
+            int tupleOff = slotManager.getTupleOff(slotOff);
+            sortedTupleOffs.add(new SlotOffTupleOff(i, slotOff, tupleOff));
+        }
+        Collections.sort(sortedTupleOffs);
+
+        for (int i = 0; i < sortedTupleOffs.size(); i++) {
+            int tupleOff = sortedTupleOffs.get(i).tupleOff;
+            frameTuple.resetByTupleOffset(buf, tupleOff);
+
+            int tupleEndOff = frameTuple.getFieldStart(frameTuple.getFieldCount() - 1)
+                    + frameTuple.getFieldLength(frameTuple.getFieldCount() - 1);
+            int tupleLength = tupleEndOff - tupleOff + childPtrSize;
+            System.arraycopy(buf.array(), tupleOff, buf.array(), freeSpace, tupleLength);
+
+            slotManager.setSlot(sortedTupleOffs.get(i).slotOff, freeSpace);
+            freeSpace += tupleLength;
+        }
+
+        buf.putInt(freeSpaceOff, freeSpace);
+        buf.putInt(totalFreeSpaceOff, buf.capacity() - freeSpace - tupleCount * slotManager.getSlotSize());
+
+        return false;
+    }
+
+    @Override
+    public FrameOpSpaceStatus hasSpaceInsert(ITupleReference tuple) {
+        int bytesRequired = tupleWriter.bytesRequired(tuple) + childPtrSize;
+        if (bytesRequired + slotManager.getSlotSize() <= buf.capacity() - buf.getInt(freeSpaceOff)
+                - (buf.getInt(tupleCountOff) * slotManager.getSlotSize()))
+            return FrameOpSpaceStatus.SUFFICIENT_CONTIGUOUS_SPACE;
+        else if (bytesRequired + slotManager.getSlotSize() <= buf.getInt(totalFreeSpaceOff))
+            return FrameOpSpaceStatus.SUFFICIENT_SPACE;
+        else
+            return FrameOpSpaceStatus.INSUFFICIENT_SPACE;
+    }
+
+    @Override
+    public void adjustKey(ITupleReference tuple, int tupleIndex, MultiComparator cmp) throws TreeIndexException {
+        frameTuple.setFieldCount(cmp.getKeyFieldCount());
+        if (tupleIndex == -1) {
+            tupleIndex = findTupleByPointer(tuple, cmp);
+        }
+        if (tupleIndex != -1) {
+            tupleWriter.writeTuple(tuple, buf.array(), getTupleOffset(tupleIndex));
+        } else {
+            throw new TreeIndexException("Error: Faild to find a tuple in a page");
+
+        }
+
+    }
+
+    private int pointerCmp(ITupleReference tupleA, ITupleReference tupleB, MultiComparator cmp) {
+        return childPtrCmp
+                .compare(tupleA.getFieldData(cmp.getKeyFieldCount() - 1), getChildPointerOff(tupleA), childPtrSize,
+                        tupleB.getFieldData(cmp.getKeyFieldCount() - 1), getChildPointerOff(tupleB), childPtrSize);
+    }
+
+    public int getTupleSize(ITupleReference tuple) {
+        return tupleWriter.bytesRequired(tuple) + childPtrSize;
+    }
+
+    private int getChildPointerOff(ITupleReference tuple) {
+        return tuple.getFieldStart(tuple.getFieldCount() - 1) + tuple.getFieldLength(tuple.getFieldCount() - 1);
+    }
+
+    @Override
+    public void insert(ITupleReference tuple, int tupleIndex) {
+        frameTuple.setFieldCount(tuple.getFieldCount());
+        slotManager.insertSlot(-1, buf.getInt(freeSpaceOff));
+        int freeSpace = buf.getInt(freeSpaceOff);
+        int bytesWritten = tupleWriter.writeTupleFields(tuple, 0, tuple.getFieldCount(), buf.array(), freeSpace);
+        System.arraycopy(tuple.getFieldData(tuple.getFieldCount() - 1), getChildPointerOff(tuple), buf.array(),
+                freeSpace + bytesWritten, childPtrSize);
+        int tupleSize = bytesWritten + childPtrSize;
+
+        buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) + 1);
+        buf.putInt(freeSpaceOff, buf.getInt(freeSpaceOff) + tupleSize);
+        buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) - tupleSize - slotManager.getSlotSize());
+
+    }
+
+    @Override
+    public void delete(int tupleIndex, MultiComparator cmp) {
+        frameTuple.setFieldCount(cmp.getKeyFieldCount());
+        int slotOff = slotManager.getSlotOff(tupleIndex);
+
+        int tupleOff = slotManager.getTupleOff(slotOff);
+        frameTuple.resetByTupleOffset(buf, tupleOff);
+        int tupleSize = tupleWriter.bytesRequired(frameTuple);
+
+        // perform deletion (we just do a memcpy to overwrite the slot)
+        int slotStartOff = slotManager.getSlotEndOff();
+        int length = slotOff - slotStartOff;
+        System.arraycopy(buf.array(), slotStartOff, buf.array(), slotStartOff + slotManager.getSlotSize(), length);
+
+        // maintain space information
+        buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) - 1);
+        buf.putInt(totalFreeSpaceOff,
+                buf.getInt(totalFreeSpaceOff) + tupleSize + childPtrSize + slotManager.getSlotSize());
+    }
+
+    @Override
+    public boolean recomputeMBR(ITupleReference tuple, int tupleIndex, MultiComparator cmp) {
+        frameTuple.setFieldCount(cmp.getKeyFieldCount());
+        frameTuple.resetByTupleIndex(this, tupleIndex);
+
+        int maxFieldPos = cmp.getKeyFieldCount() / 2;
+        for (int i = 0; i < maxFieldPos; i++) {
+            int j = maxFieldPos + i;
+            int c = cmp.getComparators()[i].compare(frameTuple.getFieldData(i), frameTuple.getFieldStart(i),
+                    frameTuple.getFieldLength(i), tuple.getFieldData(i), tuple.getFieldStart(i),
+                    tuple.getFieldLength(i));
+            if (c != 0) {
+                return true;
+            }
+            c = cmp.getComparators()[j].compare(frameTuple.getFieldData(j), frameTuple.getFieldStart(j),
+                    frameTuple.getFieldLength(j), tuple.getFieldData(j), tuple.getFieldStart(j),
+                    tuple.getFieldLength(j));
+
+            if (c != 0) {
+                return true;
+            }
+        }
+        return false;
+    }
+
+    private double overlappedArea(ITupleReference tuple1, ITupleReference tupleToBeInserted, ITupleReference tuple2,
+            MultiComparator cmp) {
+        double area = 1.0;
+        double f1, f2;
+
+        int maxFieldPos = cmp.getKeyFieldCount() / 2;
+        for (int i = 0; i < maxFieldPos; i++) {
+            int j = maxFieldPos + i;
+            double pHigh1, pLow1;
+            if (tupleToBeInserted != null) {
+                int c = cmp.getComparators()[i].compare(tuple1.getFieldData(i), tuple1.getFieldStart(i),
+                        tuple1.getFieldLength(i), tupleToBeInserted.getFieldData(i),
+                        tupleToBeInserted.getFieldStart(i), tupleToBeInserted.getFieldLength(i));
+                if (c < 0) {
+                    pLow1 = keyValueProviders[i].getValue(tuple1.getFieldData(i), tuple1.getFieldStart(i));
+                } else {
+                    pLow1 = keyValueProviders[i].getValue(tupleToBeInserted.getFieldData(i),
+                            tupleToBeInserted.getFieldStart(i));
+                }
+
+                c = cmp.getComparators()[j].compare(tuple1.getFieldData(j), tuple1.getFieldStart(j),
+                        tuple1.getFieldLength(j), tupleToBeInserted.getFieldData(j),
+                        tupleToBeInserted.getFieldStart(j), tupleToBeInserted.getFieldLength(j));
+                if (c > 0) {
+                    pHigh1 = keyValueProviders[j].getValue(tuple1.getFieldData(j), tuple1.getFieldStart(j));
+                } else {
+                    pHigh1 = keyValueProviders[j].getValue(tupleToBeInserted.getFieldData(j),
+                            tupleToBeInserted.getFieldStart(j));
+                }
+            } else {
+                pLow1 = keyValueProviders[i].getValue(tuple1.getFieldData(i), tuple1.getFieldStart(i));
+                pHigh1 = keyValueProviders[j].getValue(tuple1.getFieldData(j), tuple1.getFieldStart(j));
+            }
+
+            double pLow2 = keyValueProviders[i].getValue(tuple2.getFieldData(i), tuple2.getFieldStart(i));
+            double pHigh2 = keyValueProviders[j].getValue(tuple2.getFieldData(j), tuple2.getFieldStart(j));
+
+            if (pLow1 > pHigh2 || pHigh1 < pLow2) {
+                return 0.0;
+            }
+
+            f1 = Math.max(pLow1, pLow2);
+            f2 = Math.min(pHigh1, pHigh2);
+            area *= f2 - f1;
+        }
+        return area;
+    }
+
+    private double enlargedArea(ITupleReference tuple, ITupleReference tupleToBeInserted, MultiComparator cmp) {
+        double areaBeforeEnlarge = area(tuple, cmp);
+        double areaAfterEnlarge = 1.0;
+
+        int maxFieldPos = cmp.getKeyFieldCount() / 2;
+        for (int i = 0; i < maxFieldPos; i++) {
+            int j = maxFieldPos + i;
+            double pHigh, pLow;
+            int c = cmp.getComparators()[i].compare(tuple.getFieldData(i), tuple.getFieldStart(i),
+                    tuple.getFieldLength(i), tupleToBeInserted.getFieldData(i), tupleToBeInserted.getFieldStart(i),
+                    tupleToBeInserted.getFieldLength(i));
+            if (c < 0) {
+                pLow = keyValueProviders[i].getValue(tuple.getFieldData(i), tuple.getFieldStart(i));
+            } else {
+                pLow = keyValueProviders[i].getValue(tupleToBeInserted.getFieldData(i),
+                        tupleToBeInserted.getFieldStart(i));
+            }
+
+            c = cmp.getComparators()[j].compare(tuple.getFieldData(j), tuple.getFieldStart(j), tuple.getFieldLength(j),
+                    tupleToBeInserted.getFieldData(j), tupleToBeInserted.getFieldStart(j),
+                    tupleToBeInserted.getFieldLength(j));
+            if (c > 0) {
+                pHigh = keyValueProviders[j].getValue(tuple.getFieldData(j), tuple.getFieldStart(j));
+            } else {
+                pHigh = keyValueProviders[j].getValue(tupleToBeInserted.getFieldData(j),
+                        tupleToBeInserted.getFieldStart(j));
+            }
+            areaAfterEnlarge *= pHigh - pLow;
+        }
+        return areaAfterEnlarge - areaBeforeEnlarge;
+    }
+
+    private double area(ITupleReference tuple, MultiComparator cmp) {
+        double area = 1.0;
+        int maxFieldPos = cmp.getKeyFieldCount() / 2;
+        for (int i = 0; i < maxFieldPos; i++) {
+            int j = maxFieldPos + i;
+            area *= keyValueProviders[j].getValue(tuple.getFieldData(j), tuple.getFieldStart(j))
+                    - keyValueProviders[i].getValue(tuple.getFieldData(i), tuple.getFieldStart(i));
+        }
+        return area;
+    }
+
+    @Override
+    public boolean checkEnlargement(ITupleReference tuple, MultiComparator cmp) {
+        int maxFieldPos = cmp.getKeyFieldCount() / 2;
+        for (int i = 0; i < maxFieldPos; i++) {
+            int j = maxFieldPos + i;
+            int c = cmp.getComparators()[i].compare(frameTuple.getFieldData(i), frameTuple.getFieldStart(i),
+                    frameTuple.getFieldLength(i), tuple.getFieldData(i), tuple.getFieldStart(i),
+                    tuple.getFieldLength(i));
+            if (c > 0) {
+                return true;
+            }
+            c = cmp.getComparators()[j].compare(frameTuple.getFieldData(j), frameTuple.getFieldStart(j),
+                    frameTuple.getFieldLength(j), tuple.getFieldData(j), tuple.getFieldStart(j),
+                    tuple.getFieldLength(j));
+            if (c < 0) {
+                return true;
+            }
+        }
+        return false;
+    }
+
+    @Override
+    public void enlarge(ITupleReference tuple, MultiComparator cmp) {
+        int maxFieldPos = cmp.getKeyFieldCount() / 2;
+        for (int i = 0; i < maxFieldPos; i++) {
+            int j = maxFieldPos + i;
+            int c = cmp.getComparators()[i].compare(frameTuple.getFieldData(i), frameTuple.getFieldStart(i),
+                    frameTuple.getFieldLength(i), tuple.getFieldData(i), tuple.getFieldStart(i),
+                    tuple.getFieldLength(i));
+            if (c > 0) {
+                System.arraycopy(tuple.getFieldData(i), tuple.getFieldStart(i), frameTuple.getFieldData(i),
+                        frameTuple.getFieldStart(i), tuple.getFieldLength(i));
+            }
+            c = cmp.getComparators()[j].compare(frameTuple.getFieldData(j), frameTuple.getFieldStart(j),
+                    frameTuple.getFieldLength(j), tuple.getFieldData(j), tuple.getFieldStart(j),
+                    tuple.getFieldLength(j));
+            if (c < 0) {
+                System.arraycopy(tuple.getFieldData(j), tuple.getFieldStart(j), frameTuple.getFieldData(j),
+                        frameTuple.getFieldStart(j), tuple.getFieldLength(j));
+            }
+        }
+    }
+
+    // For debugging.
+    public ArrayList<Integer> getChildren(MultiComparator cmp) {
+        ArrayList<Integer> ret = new ArrayList<Integer>();
+        frameTuple.setFieldCount(cmp.getKeyFieldCount());
+        int tupleCount = buf.getInt(tupleCountOff);
+        for (int i = 0; i < tupleCount; i++) {
+            int tupleOff = slotManager.getTupleOff(slotManager.getSlotOff(i));
+            frameTuple.resetByTupleOffset(buf, tupleOff);
+            int intVal = IntegerSerializerDeserializer.getInt(
+                    buf.array(),
+                    frameTuple.getFieldStart(frameTuple.getFieldCount() - 1)
+                            + frameTuple.getFieldLength(frameTuple.getFieldCount() - 1));
+            ret.add(intVal);
+        }
+        return ret;
+    }
+
+    @Override
+    public int getFieldCount() {
+        return keyValueProviders.length;
+    }
+}
diff --git a/hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMInteriorFrameFactory.java b/hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMInteriorFrameFactory.java
new file mode 100644
index 0000000..943a179
--- /dev/null
+++ b/hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMInteriorFrameFactory.java
@@ -0,0 +1,51 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree.frames;
+
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProvider;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriterFactory;
+import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeInteriorFrame;
+
+public class RTreeNSMInteriorFrameFactory implements ITreeIndexFrameFactory {
+
+    private static final long serialVersionUID = 1L;
+    private final ITreeIndexTupleWriterFactory tupleWriterFactory;
+    private final IPrimitiveValueProviderFactory[] keyValueProviderFactories;
+
+    public RTreeNSMInteriorFrameFactory(ITreeIndexTupleWriterFactory tupleWriterFactory, IPrimitiveValueProviderFactory[] keyValueProviderFactories) {
+        this.tupleWriterFactory = tupleWriterFactory;
+        if (keyValueProviderFactories.length % 2 != 0) {
+            throw new IllegalArgumentException("The key has different number of dimensions.");
+        }
+        this.keyValueProviderFactories = keyValueProviderFactories;
+    }
+
+    @Override
+    public IRTreeInteriorFrame createFrame() {
+        IPrimitiveValueProvider[] keyValueProviders = new IPrimitiveValueProvider[keyValueProviderFactories.length];
+        for (int i = 0; i < keyValueProviders.length; i++) {
+            keyValueProviders[i] = keyValueProviderFactories[i].createPrimitiveValueProvider();
+        }
+        return new RTreeNSMInteriorFrame(tupleWriterFactory.createTupleWriter(), keyValueProviders);
+    }
+
+	@Override
+	public ITreeIndexTupleWriterFactory getTupleWriterFactory() {
+		return tupleWriterFactory;
+	}
+}
diff --git a/hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMLeafFrame.java b/hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMLeafFrame.java
new file mode 100644
index 0000000..f1d71ff
--- /dev/null
+++ b/hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMLeafFrame.java
@@ -0,0 +1,99 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree.frames;
+
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProvider;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriter;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeLeafFrame;
+
+public class RTreeNSMLeafFrame extends RTreeNSMFrame implements IRTreeLeafFrame {
+
+    public RTreeNSMLeafFrame(ITreeIndexTupleWriter tupleWriter, IPrimitiveValueProvider[] keyValueProviders) {
+        super(tupleWriter, keyValueProviders);
+    }
+
+    @Override
+    public ITreeIndexTupleReference createTupleReference() {
+        return tupleWriter.createTupleReference();
+    }
+
+    @Override
+    public int findTupleIndex(ITupleReference tuple, MultiComparator cmp) {
+        return slotManager.findTupleIndex(tuple, frameTuple, cmp, null, null);
+    }
+
+    @Override
+    public boolean intersect(ITupleReference tuple, int tupleIndex, MultiComparator cmp) {
+        frameTuple.resetByTupleIndex(this, tupleIndex);
+        int maxFieldPos = cmp.getKeyFieldCount() / 2;
+        for (int i = 0; i < maxFieldPos; i++) {
+            int j = maxFieldPos + i;
+            int c = cmp.getComparators()[i].compare(tuple.getFieldData(i), tuple.getFieldStart(i),
+                    tuple.getFieldLength(i), frameTuple.getFieldData(j), frameTuple.getFieldStart(j),
+                    frameTuple.getFieldLength(j));
+            if (c > 0) {
+                return false;
+            }
+            c = cmp.getComparators()[i].compare(tuple.getFieldData(j), tuple.getFieldStart(j), tuple.getFieldLength(j),
+                    frameTuple.getFieldData(i), frameTuple.getFieldStart(i), frameTuple.getFieldLength(i));
+
+            if (c < 0) {
+                return false;
+            }
+        }
+        return true;
+    }
+
+    public int getTupleSize(ITupleReference tuple) {
+        return tupleWriter.bytesRequired(tuple);
+    }
+
+    @Override
+    public void insert(ITupleReference tuple, int tupleIndex) {
+        slotManager.insertSlot(-1, buf.getInt(freeSpaceOff));
+        int bytesWritten = tupleWriter.writeTuple(tuple, buf.array(), buf.getInt(freeSpaceOff));
+
+        buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) + 1);
+        buf.putInt(freeSpaceOff, buf.getInt(freeSpaceOff) + bytesWritten);
+        buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) - bytesWritten - slotManager.getSlotSize());
+    }
+
+    @Override
+    public void delete(int tupleIndex, MultiComparator cmp) {
+        int slotOff = slotManager.getSlotOff(tupleIndex);
+
+        int tupleOff = slotManager.getTupleOff(slotOff);
+        frameTuple.resetByTupleOffset(buf, tupleOff);
+        int tupleSize = tupleWriter.bytesRequired(frameTuple);
+
+        // perform deletion (we just do a memcpy to overwrite the slot)
+        int slotStartOff = slotManager.getSlotEndOff();
+        int length = slotOff - slotStartOff;
+        System.arraycopy(buf.array(), slotStartOff, buf.array(), slotStartOff + slotManager.getSlotSize(), length);
+
+        // maintain space information
+        buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) - 1);
+        buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) + tupleSize + slotManager.getSlotSize());
+    }
+
+    @Override
+    public int getFieldCount() {
+        return frameTuple.getFieldCount();
+    }
+}
diff --git a/hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMLeafFrameFactory.java b/hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMLeafFrameFactory.java
new file mode 100644
index 0000000..e31148f
--- /dev/null
+++ b/hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMLeafFrameFactory.java
@@ -0,0 +1,51 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree.frames;
+
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProvider;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriterFactory;
+import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeLeafFrame;
+
+public class RTreeNSMLeafFrameFactory implements ITreeIndexFrameFactory {
+
+    private static final long serialVersionUID = 1L;
+    private final ITreeIndexTupleWriterFactory tupleWriterFactory;
+    private final IPrimitiveValueProviderFactory[] keyValueProviderFactories;
+
+    public RTreeNSMLeafFrameFactory(ITreeIndexTupleWriterFactory tupleWriterFactory, IPrimitiveValueProviderFactory[] keyValueProviderFactories) {
+        this.tupleWriterFactory = tupleWriterFactory;
+        if (keyValueProviderFactories.length % 2 != 0) {
+            throw new IllegalArgumentException("The key has different number of dimensions.");
+        }
+        this.keyValueProviderFactories = keyValueProviderFactories;
+    }
+
+    @Override
+    public IRTreeLeafFrame createFrame() {
+        IPrimitiveValueProvider[] keyValueProviders = new IPrimitiveValueProvider[keyValueProviderFactories.length];
+        for (int i = 0; i < keyValueProviders.length; i++) {
+            keyValueProviders[i] = keyValueProviderFactories[i].createPrimitiveValueProvider();
+        }
+        return new RTreeNSMLeafFrame(tupleWriterFactory.createTupleWriter(), keyValueProviders);
+    }
+
+	@Override
+	public ITreeIndexTupleWriterFactory getTupleWriterFactory() {
+		return tupleWriterFactory;
+	}
+}
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/DoublePrimitiveValueProviderFactory.java b/hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/DoublePrimitiveValueProviderFactory.java
similarity index 100%
rename from hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/DoublePrimitiveValueProviderFactory.java
rename to hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/DoublePrimitiveValueProviderFactory.java
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/EntriesOrder.java b/hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/EntriesOrder.java
similarity index 100%
rename from hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/EntriesOrder.java
rename to hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/EntriesOrder.java
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/FloatPrimitiveValueProviderFactory.java b/hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/FloatPrimitiveValueProviderFactory.java
similarity index 100%
rename from hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/FloatPrimitiveValueProviderFactory.java
rename to hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/FloatPrimitiveValueProviderFactory.java
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/IntegerPrimitiveValueProviderFactory.java b/hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/IntegerPrimitiveValueProviderFactory.java
similarity index 100%
rename from hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/IntegerPrimitiveValueProviderFactory.java
rename to hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/IntegerPrimitiveValueProviderFactory.java
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/PathList.java b/hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/PathList.java
similarity index 100%
rename from hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/PathList.java
rename to hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/PathList.java
diff --git a/hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTree.java b/hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTree.java
new file mode 100644
index 0000000..cc3cf5b
--- /dev/null
+++ b/hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTree.java
@@ -0,0 +1,996 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree.impls;
+
+import java.util.ArrayList;
+import java.util.concurrent.atomic.AtomicLong;
+import java.util.concurrent.locks.ReadWriteLock;
+import java.util.concurrent.locks.ReentrantReadWriteLock;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManager;
+import edu.uci.ics.hyracks.storage.am.common.api.IIndexBulkLoadContext;
+import edu.uci.ics.hyracks.storage.am.common.api.IIndexCursor;
+import edu.uci.ics.hyracks.storage.am.common.api.IIndexOpContext;
+import edu.uci.ics.hyracks.storage.am.common.api.ISearchPredicate;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexAccessor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrame;
+import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
+import edu.uci.ics.hyracks.storage.am.common.api.IndexType;
+import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
+import edu.uci.ics.hyracks.storage.am.common.frames.FrameOpSpaceStatus;
+import edu.uci.ics.hyracks.storage.am.common.impls.TreeDiskOrderScanCursor;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOp;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.common.util.TreeIndexUtils;
+import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeFrame;
+import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeInteriorFrame;
+import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeLeafFrame;
+import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreeNSMInteriorFrame;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
+import edu.uci.ics.hyracks.storage.common.file.BufferedFileHandle;
+
+public class RTree implements ITreeIndex {
+
+    private final int rootPage = 1;
+
+    // Global node sequence number used for the concurrency control protocol
+    private final AtomicLong globalNsn;
+    private final ReadWriteLock treeLatch;
+
+    private final IFreePageManager freePageManager;
+    private final IBufferCache bufferCache;
+    private int fileId;
+
+    private final ITreeIndexFrameFactory interiorFrameFactory;
+    private final ITreeIndexFrameFactory leafFrameFactory;
+    private final int fieldCount;
+    private final IBinaryComparatorFactory[] cmpFactories;
+
+    public RTree(IBufferCache bufferCache, int fieldCount, IBinaryComparatorFactory[] cmpFactories,
+            IFreePageManager freePageManager, ITreeIndexFrameFactory interiorFrameFactory,
+            ITreeIndexFrameFactory leafFrameFactory) {
+        this.bufferCache = bufferCache;
+        this.fieldCount = fieldCount;
+        this.cmpFactories = cmpFactories;
+        this.freePageManager = freePageManager;
+        this.interiorFrameFactory = interiorFrameFactory;
+        this.leafFrameFactory = leafFrameFactory;
+        globalNsn = new AtomicLong();
+        this.treeLatch = new ReentrantReadWriteLock(true);
+    }
+
+    private long incrementGlobalNsn() {
+        return globalNsn.incrementAndGet();
+    }
+
+    public byte getTreeHeight(IRTreeLeafFrame leafFrame) throws HyracksDataException {
+        ICachedPage rootNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, rootPage), false);
+        rootNode.acquireReadLatch();
+        try {
+            leafFrame.setPage(rootNode);
+            return leafFrame.getLevel();
+        } finally {
+            rootNode.releaseReadLatch();
+            bufferCache.unpin(rootNode);
+        }
+    }
+
+    @SuppressWarnings("rawtypes")
+    public String printTree(IRTreeLeafFrame leafFrame, IRTreeInteriorFrame interiorFrame,
+            ISerializerDeserializer[] keySerdes) throws Exception {
+        MultiComparator cmp = MultiComparator.create(cmpFactories);
+        byte treeHeight = getTreeHeight(leafFrame);
+        StringBuilder strBuilder = new StringBuilder();
+        printTree(rootPage, null, false, leafFrame, interiorFrame, treeHeight, keySerdes, strBuilder, cmp);
+        return strBuilder.toString();
+    }
+
+    @SuppressWarnings("rawtypes")
+    public void printTree(int pageId, ICachedPage parent, boolean unpin, IRTreeLeafFrame leafFrame,
+            IRTreeInteriorFrame interiorFrame, byte treeHeight, ISerializerDeserializer[] keySerdes,
+            StringBuilder strBuilder, MultiComparator cmp) throws Exception {
+        ICachedPage node = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
+        node.acquireReadLatch();
+        try {
+            if (parent != null && unpin == true) {
+                parent.releaseReadLatch();
+                bufferCache.unpin(parent);
+            }
+            interiorFrame.setPage(node);
+            int level = interiorFrame.getLevel();
+            strBuilder.append(String.format("%1d ", level));
+            strBuilder.append(String.format("%3d ", pageId) + ": ");
+            for (int i = 0; i < treeHeight - level; i++) {
+                strBuilder.append("    ");
+            }
+
+            String keyString;
+            long LSN, NSN;
+            int rightPage;
+            if (interiorFrame.isLeaf()) {
+                leafFrame.setPage(node);
+                keyString = TreeIndexUtils.printFrameTuples(leafFrame, keySerdes);
+                LSN = leafFrame.getPageLsn();
+                NSN = leafFrame.getPageNsn();
+                rightPage = leafFrame.getRightPage();
+
+            } else {
+                keyString = TreeIndexUtils.printFrameTuples(interiorFrame, keySerdes);
+                LSN = interiorFrame.getPageLsn();
+                NSN = interiorFrame.getPageNsn();
+                rightPage = interiorFrame.getRightPage();
+            }
+
+            strBuilder.append(keyString + "\n" + "pageId: " + pageId + " LSN: " + LSN + " NSN: " + NSN + " rightPage: "
+                    + rightPage + "\n");
+            if (!interiorFrame.isLeaf()) {
+                ArrayList<Integer> children = ((RTreeNSMInteriorFrame) (interiorFrame)).getChildren(cmp);
+                for (int i = 0; i < children.size(); i++) {
+                    printTree(children.get(i), node, i == children.size() - 1, leafFrame, interiorFrame, treeHeight,
+                            keySerdes, strBuilder, cmp);
+                }
+            } else {
+                node.releaseReadLatch();
+                bufferCache.unpin(node);
+            }
+        } catch (Exception e) {
+            node.releaseReadLatch();
+            bufferCache.unpin(node);
+            e.printStackTrace();
+        }
+    }
+
+    @Override
+    public void create(int fileId) throws HyracksDataException {
+        treeLatch.writeLock().lock();
+        try {
+            ITreeIndexFrame leafFrame = leafFrameFactory.createFrame();
+            ITreeIndexMetaDataFrame metaFrame = freePageManager.getMetaDataFrameFactory().createFrame();
+            freePageManager.open(fileId);
+            freePageManager.init(metaFrame, rootPage);
+
+            // initialize root page
+            ICachedPage rootNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, rootPage), true);
+
+            rootNode.acquireWriteLatch();
+            try {
+                leafFrame.setPage(rootNode);
+                leafFrame.initBuffer((byte) 0);
+            } finally {
+                rootNode.releaseWriteLatch();
+                bufferCache.unpin(rootNode);
+            }
+        } finally {
+            treeLatch.writeLock().unlock();
+        }
+    }
+
+    @Override
+    public void open(int fileId) {
+        this.fileId = fileId;
+        freePageManager.open(fileId);
+    }
+
+    @Override
+    public void close() {
+        fileId = -1;
+        freePageManager.close();
+    }
+
+    @Override
+    public int getFileId() {
+        return fileId;
+    }
+
+    @Override
+    public IBufferCache getBufferCache() {
+        return bufferCache;
+    }
+
+    private RTreeOpContext createOpContext() {
+        return new RTreeOpContext((IRTreeLeafFrame) leafFrameFactory.createFrame(),
+                (IRTreeInteriorFrame) interiorFrameFactory.createFrame(), freePageManager.getMetaDataFrameFactory()
+                        .createFrame(), cmpFactories, 8);
+    }
+
+    private void insert(ITupleReference tuple, IIndexOpContext ictx) throws HyracksDataException, TreeIndexException {
+        RTreeOpContext ctx = (RTreeOpContext) ictx;
+        ctx.reset();
+        ctx.setTuple(tuple);
+        ctx.splitKey.reset();
+        ctx.splitKey.getLeftTuple().setFieldCount(cmpFactories.length);
+        ctx.splitKey.getRightTuple().setFieldCount(cmpFactories.length);
+
+        int maxFieldPos = cmpFactories.length / 2;
+        for (int i = 0; i < maxFieldPos; i++) {
+            int j = maxFieldPos + i;
+            int c = ctx.cmp.getComparators()[i].compare(tuple.getFieldData(i), tuple.getFieldStart(i),
+                    tuple.getFieldLength(i), tuple.getFieldData(j), tuple.getFieldStart(j), tuple.getFieldLength(j));
+            if (c > 0) {
+                throw new IllegalArgumentException("The low key point has larger coordinates than the high key point.");
+            }
+        }
+        try {
+            ICachedPage leafNode = findLeaf(ctx);
+
+            int pageId = ctx.pathList.getLastPageId();
+            ctx.pathList.moveLast();
+            insertTuple(leafNode, pageId, ctx.getTuple(), ctx, true);
+
+            while (true) {
+                if (ctx.splitKey.getLeftPageBuffer() != null) {
+                    updateParentForInsert(ctx);
+                } else {
+                    break;
+                }
+            }
+        } finally {
+            for (int i = ctx.NSNUpdates.size() - 1; i >= 0; i--) {
+                ICachedPage node = ctx.NSNUpdates.get(i);
+                ctx.interiorFrame.setPage(node);
+                ctx.interiorFrame.setPageNsn(incrementGlobalNsn());
+            }
+
+            for (int i = ctx.LSNUpdates.size() - 1; i >= 0; i--) {
+                ICachedPage node = ctx.LSNUpdates.get(i);
+                ctx.interiorFrame.setPage(node);
+                ctx.interiorFrame.setPageLsn(incrementGlobalNsn());
+                node.releaseWriteLatch();
+                bufferCache.unpin(node);
+            }
+        }
+    }
+
+    private ICachedPage findLeaf(RTreeOpContext ctx) throws HyracksDataException {
+        int pageId = rootPage;
+        boolean writeLatched = false;
+        boolean readLatched = false;
+        boolean succeeded = false;
+        ICachedPage node = null;
+        boolean isLeaf = false;
+        long pageLsn = 0, parentLsn = 0;
+
+        try {
+
+            while (true) {
+                if (!writeLatched) {
+                    node = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
+                    ctx.interiorFrame.setPage(node);
+                    isLeaf = ctx.interiorFrame.isLeaf();
+                    if (isLeaf) {
+                        node.acquireWriteLatch();
+                        writeLatched = true;
+
+                        if (!ctx.interiorFrame.isLeaf()) {
+                            node.releaseWriteLatch();
+                            writeLatched = false;
+                            bufferCache.unpin(node);
+                            continue;
+                        }
+                    } else {
+                        // Be optimistic and grab read latch first. We will swap
+                        // it to write latch if we need to enlarge the best
+                        // child tuple.
+                        node.acquireReadLatch();
+                        readLatched = true;
+                    }
+                }
+
+                if (pageId != rootPage && parentLsn < ctx.interiorFrame.getPageNsn()) {
+                    // Concurrent split detected, go back to parent and
+                    // re-choose the best child
+                    if (writeLatched) {
+                        node.releaseWriteLatch();
+                        writeLatched = false;
+                        bufferCache.unpin(node);
+                    } else {
+                        node.releaseReadLatch();
+                        readLatched = false;
+                        bufferCache.unpin(node);
+                    }
+
+                    pageId = ctx.pathList.getLastPageId();
+                    if (pageId != rootPage) {
+                        parentLsn = ctx.pathList.getPageLsn(ctx.pathList.size() - 2);
+                    }
+                    ctx.pathList.moveLast();
+                    continue;
+                }
+
+                pageLsn = ctx.interiorFrame.getPageLsn();
+                ctx.pathList.add(pageId, pageLsn, -1);
+
+                if (!isLeaf) {
+                    // findBestChild must be called *before* getBestChildPageId
+                    boolean enlarementIsNeeded = ctx.interiorFrame.findBestChild(ctx.getTuple(), ctx.cmp);
+                    int childPageId = ctx.interiorFrame.getBestChildPageId();
+
+                    if (enlarementIsNeeded) {
+                        if (!writeLatched) {
+                            node.releaseReadLatch();
+                            readLatched = false;
+                            bufferCache.unpin(node);
+
+                            node = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
+                            node.acquireWriteLatch();
+                            writeLatched = true;
+                            ctx.interiorFrame.setPage(node);
+
+                            if (ctx.interiorFrame.getPageLsn() != pageLsn) {
+                                // The page was changed while we unlocked it;
+                                // thus, retry (re-choose best child)
+
+                                ctx.pathList.moveLast();
+                                continue;
+                            }
+                        }
+                        // We don't need to reset the frameTuple because it is
+                        // already pointing to the best child
+                        ctx.interiorFrame.enlarge(ctx.getTuple(), ctx.cmp);
+
+                        node.releaseWriteLatch();
+                        writeLatched = false;
+                        bufferCache.unpin(node);
+                    } else {
+                        if (readLatched) {
+                            node.releaseReadLatch();
+                            readLatched = false;
+                            bufferCache.unpin(node);
+                        } else if (writeLatched) {
+                            node.releaseWriteLatch();
+                            writeLatched = false;
+                            bufferCache.unpin(node);
+                        }
+                    }
+
+                    pageId = childPageId;
+                    parentLsn = pageLsn;
+                } else {
+                    ctx.leafFrame.setPage(node);
+                    succeeded = true;
+                    return node;
+                }
+            }
+        } finally {
+            if (!succeeded) {
+                if (readLatched) {
+                    node.releaseReadLatch();
+                    readLatched = false;
+                    bufferCache.unpin(node);
+                } else if (writeLatched) {
+                    node.releaseWriteLatch();
+                    writeLatched = false;
+                    bufferCache.unpin(node);
+                }
+            }
+        }
+    }
+
+    private void insertTuple(ICachedPage node, int pageId, ITupleReference tuple, RTreeOpContext ctx, boolean isLeaf)
+            throws HyracksDataException, TreeIndexException {
+        boolean succeeded = false;
+        FrameOpSpaceStatus spaceStatus;
+        if (!isLeaf) {
+            spaceStatus = ctx.interiorFrame.hasSpaceInsert(tuple);
+        } else {
+            spaceStatus = ctx.leafFrame.hasSpaceInsert(tuple);
+        }
+
+        switch (spaceStatus) {
+            case SUFFICIENT_CONTIGUOUS_SPACE: {
+                try {
+                    if (!isLeaf) {
+                        ctx.interiorFrame.insert(tuple, -1);
+                    } else {
+                        ctx.leafFrame.insert(tuple, -1);
+                    }
+                    succeeded = true;
+                } finally {
+                    if (succeeded) {
+                        ctx.LSNUpdates.add(node);
+                        ctx.splitKey.reset();
+                    } else if (isLeaf) {
+                        // In case of a crash, we un-latch the interior node
+                        // inside updateParentForInsert.
+                        node.releaseWriteLatch();
+                        bufferCache.unpin(node);
+                    }
+                }
+                break;
+            }
+
+            case SUFFICIENT_SPACE: {
+                try {
+                    if (!isLeaf) {
+                        ctx.interiorFrame.compact();
+                        ctx.interiorFrame.insert(tuple, -1);
+                    } else {
+                        ctx.leafFrame.compact();
+                        ctx.leafFrame.insert(tuple, -1);
+                    }
+                    succeeded = true;
+                } finally {
+                    if (succeeded) {
+                        ctx.LSNUpdates.add(node);
+                        ctx.splitKey.reset();
+                    } else if (isLeaf) {
+                        // In case of a crash, we un-latch the interior node
+                        // inside updateParentForInsert.
+                        node.releaseWriteLatch();
+                        bufferCache.unpin(node);
+                    }
+                }
+                break;
+            }
+
+            case INSUFFICIENT_SPACE: {
+                int rightPageId = freePageManager.getFreePage(ctx.metaFrame);
+                ICachedPage rightNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, rightPageId), true);
+                rightNode.acquireWriteLatch();
+
+                try {
+                    IRTreeFrame rightFrame;
+                    if (!isLeaf) {
+                        rightFrame = (IRTreeFrame) interiorFrameFactory.createFrame();
+                        rightFrame.setPage(rightNode);
+                        rightFrame.initBuffer((byte) ctx.interiorFrame.getLevel());
+                        rightFrame.setRightPage(ctx.interiorFrame.getRightPage());
+                        ctx.interiorFrame.split(rightFrame, tuple, ctx.splitKey);
+                        ctx.interiorFrame.setRightPage(rightPageId);
+                    } else {
+                        rightFrame = (IRTreeFrame) leafFrameFactory.createFrame();
+                        rightFrame.setPage(rightNode);
+                        rightFrame.initBuffer((byte) 0);
+                        rightFrame.setRightPage(ctx.interiorFrame.getRightPage());
+                        ctx.leafFrame.split(rightFrame, tuple, ctx.splitKey);
+                        ctx.leafFrame.setRightPage(rightPageId);
+                    }
+                    succeeded = true;
+                } finally {
+                    if (succeeded) {
+                        ctx.NSNUpdates.add(rightNode);
+                        ctx.LSNUpdates.add(rightNode);
+                        ctx.NSNUpdates.add(node);
+                        ctx.LSNUpdates.add(node);
+                    } else if (isLeaf) {
+                        // In case of a crash, we un-latch the interior node
+                        // inside updateParentForInsert.
+                        node.releaseWriteLatch();
+                        bufferCache.unpin(node);
+                        rightNode.releaseWriteLatch();
+                        bufferCache.unpin(rightNode);
+                    } else {
+                        rightNode.releaseWriteLatch();
+                        bufferCache.unpin(rightNode);
+                    }
+
+                }
+                ctx.splitKey.setPages(pageId, rightPageId);
+                if (pageId == rootPage) {
+                    int newLeftId = freePageManager.getFreePage(ctx.metaFrame);
+                    ICachedPage newLeftNode = bufferCache
+                            .pin(BufferedFileHandle.getDiskPageId(fileId, newLeftId), true);
+                    newLeftNode.acquireWriteLatch();
+                    succeeded = false;
+                    try {
+                        // copy left child to new left child
+                        System.arraycopy(node.getBuffer().array(), 0, newLeftNode.getBuffer().array(), 0, newLeftNode
+                                .getBuffer().capacity());
+
+                        // initialize new root (leftNode becomes new root)
+                        ctx.interiorFrame.setPage(node);
+                        ctx.interiorFrame.initBuffer((byte) (ctx.interiorFrame.getLevel() + 1));
+
+                        ctx.splitKey.setLeftPage(newLeftId);
+                        ctx.interiorFrame.insert(ctx.splitKey.getLeftTuple(), -1);
+                        ctx.interiorFrame.insert(ctx.splitKey.getRightTuple(), -1);
+
+                        succeeded = true;
+                    } finally {
+                        if (succeeded) {
+                            ctx.NSNUpdates.remove(ctx.NSNUpdates.size() - 1);
+                            ctx.LSNUpdates.remove(ctx.LSNUpdates.size() - 1);
+
+                            ctx.NSNUpdates.add(newLeftNode);
+                            ctx.LSNUpdates.add(newLeftNode);
+
+                            ctx.NSNUpdates.add(node);
+                            ctx.LSNUpdates.add(node);
+                            ctx.splitKey.reset();
+                        } else if (isLeaf) {
+                            // In case of a crash, we un-latch the interior node
+                            // inside updateParentForInsert.
+                            node.releaseWriteLatch();
+                            bufferCache.unpin(node);
+                            rightNode.releaseWriteLatch();
+                            bufferCache.unpin(rightNode);
+                            newLeftNode.releaseWriteLatch();
+                            bufferCache.unpin(newLeftNode);
+                        } else {
+                            rightNode.releaseWriteLatch();
+                            bufferCache.unpin(rightNode);
+                            newLeftNode.releaseWriteLatch();
+                            bufferCache.unpin(newLeftNode);
+                        }
+                    }
+                }
+                break;
+            }
+        }
+    }
+
+    private void updateParentForInsert(RTreeOpContext ctx) throws HyracksDataException, TreeIndexException {
+        boolean succeeded = false;
+        boolean writeLatched = false;
+        int parentId = ctx.pathList.getLastPageId();
+        ICachedPage parentNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, parentId), false);
+        parentNode.acquireWriteLatch();
+        writeLatched = true;
+        ctx.interiorFrame.setPage(parentNode);
+        boolean foundParent = true;
+
+        try {
+            if (ctx.interiorFrame.getPageLsn() != ctx.pathList.getLastPageLsn()) {
+                foundParent = false;
+                while (true) {
+                    if (ctx.interiorFrame.findTupleByPointer(ctx.splitKey.getLeftTuple(), ctx.cmp) != -1) {
+                        // found the parent
+                        foundParent = true;
+                        break;
+                    }
+                    int rightPage = ctx.interiorFrame.getRightPage();
+                    parentNode.releaseWriteLatch();
+                    writeLatched = false;
+                    bufferCache.unpin(parentNode);
+
+                    if (rightPage == -1) {
+                        break;
+                    }
+
+                    parentId = rightPage;
+                    parentNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, parentId), false);
+                    parentNode.acquireWriteLatch();
+                    writeLatched = true;
+                    ctx.interiorFrame.setPage(parentNode);
+                }
+            }
+
+            if (foundParent) {
+                try {
+                    ctx.interiorFrame.adjustKey(ctx.splitKey.getLeftTuple(), -1, ctx.cmp);
+                } catch (TreeIndexException e) {
+                    if (writeLatched) {
+                        parentNode.releaseWriteLatch();
+                        writeLatched = false;
+                        bufferCache.unpin(parentNode);
+                    }
+                    throw e;
+                }
+                insertTuple(parentNode, parentId, ctx.splitKey.getRightTuple(), ctx, ctx.interiorFrame.isLeaf());
+                ctx.pathList.moveLast();
+                succeeded = true;
+                return;
+
+            }
+        } finally {
+            if (!succeeded) {
+                if (writeLatched) {
+                    parentNode.releaseWriteLatch();
+                    writeLatched = false;
+                    bufferCache.unpin(parentNode);
+                }
+            }
+        }
+
+        ctx.traverseList.clear();
+        findPath(ctx);
+        updateParentForInsert(ctx);
+    }
+
+    private void findPath(RTreeOpContext ctx) throws TreeIndexException, HyracksDataException {
+        boolean readLatched = false;
+        int pageId = rootPage;
+        int parentIndex = -1;
+        long parentLsn = 0;
+        long pageLsn;
+        int pageIndex;
+        ICachedPage node = null;
+        ctx.traverseList.add(pageId, -1, parentIndex);
+        try {
+            while (!ctx.traverseList.isLast()) {
+                pageId = ctx.traverseList.getFirstPageId();
+                parentIndex = ctx.traverseList.getFirstPageIndex();
+
+                node = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
+                node.acquireReadLatch();
+                readLatched = true;
+                ctx.interiorFrame.setPage(node);
+                pageLsn = ctx.interiorFrame.getPageLsn();
+                pageIndex = ctx.traverseList.first();
+                ctx.traverseList.setPageLsn(pageIndex, pageLsn);
+
+                ctx.traverseList.moveFirst();
+
+                if (ctx.interiorFrame.isLeaf()) {
+                    throw new TreeIndexException("Error: Failed to re-find parent of a page in the tree.");
+                }
+
+                if (pageId != rootPage) {
+                    parentLsn = ctx.traverseList.getPageLsn(ctx.traverseList.getPageIndex(pageIndex));
+                }
+                if (pageId != rootPage && parentLsn < ctx.interiorFrame.getPageNsn()) {
+                    int rightPage = ctx.interiorFrame.getRightPage();
+                    if (rightPage != -1) {
+                        ctx.traverseList.addFirst(rightPage, -1, parentIndex);
+                    }
+                }
+
+                if (ctx.interiorFrame.findTupleByPointer(ctx.splitKey.getLeftTuple(), ctx.traverseList, pageIndex,
+                        ctx.cmp) != -1) {
+                    ctx.pathList.clear();
+                    fillPath(ctx, pageIndex);
+                    return;
+                }
+                node.releaseReadLatch();
+                readLatched = false;
+                bufferCache.unpin(node);
+            }
+        } finally {
+            if (readLatched) {
+                node.releaseReadLatch();
+                readLatched = false;
+                bufferCache.unpin(node);
+            }
+        }
+    }
+
+    private void fillPath(RTreeOpContext ctx, int pageIndex) {
+        if (pageIndex != -1) {
+            fillPath(ctx, ctx.traverseList.getPageIndex(pageIndex));
+            ctx.pathList.add(ctx.traverseList.getPageId(pageIndex), ctx.traverseList.getPageLsn(pageIndex), -1);
+        }
+    }
+
+    private void delete(ITupleReference tuple, RTreeOpContext ctx) throws HyracksDataException, TreeIndexException {
+        ctx.reset();
+        ctx.setTuple(tuple);
+        ctx.splitKey.reset();
+        ctx.splitKey.getLeftTuple().setFieldCount(cmpFactories.length);
+
+        // We delete the first matching tuple (including the payload data.
+        // We don't update the MBRs of the parents after deleting the record.
+        int tupleIndex = findTupleToDelete(ctx);
+
+        if (tupleIndex != -1) {
+            try {
+                deleteTuple(tupleIndex, ctx);
+            } finally {
+                ctx.leafFrame.getPage().releaseWriteLatch();
+                bufferCache.unpin(ctx.leafFrame.getPage());
+            }
+        }
+    }
+
+    private int findTupleToDelete(RTreeOpContext ctx) throws HyracksDataException {
+        boolean writeLatched = false;
+        boolean readLatched = false;
+        boolean succeeded = false;
+        ICachedPage node = null;
+        ctx.pathList.add(rootPage, -1, -1);
+
+        try {
+            while (!ctx.pathList.isEmpty()) {
+                int pageId = ctx.pathList.getLastPageId();
+                long parentLsn = ctx.pathList.getLastPageLsn();
+                ctx.pathList.moveLast();
+                node = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
+                node.acquireReadLatch();
+                readLatched = true;
+                ctx.interiorFrame.setPage(node);
+                boolean isLeaf = ctx.interiorFrame.isLeaf();
+                long pageLsn = ctx.interiorFrame.getPageLsn();
+
+                if (pageId != rootPage && parentLsn < ctx.interiorFrame.getPageNsn()) {
+                    // Concurrent split detected, we need to visit the right
+                    // page
+                    int rightPage = ctx.interiorFrame.getRightPage();
+                    if (rightPage != -1) {
+                        ctx.pathList.add(rightPage, parentLsn, -1);
+                    }
+                }
+
+                if (!isLeaf) {
+                    for (int i = 0; i < ctx.interiorFrame.getTupleCount(); i++) {
+                        int childPageId = ctx.interiorFrame.getChildPageIdIfIntersect(ctx.tuple, i, ctx.cmp);
+                        if (childPageId != -1) {
+                            ctx.pathList.add(childPageId, pageLsn, -1);
+                        }
+                    }
+                } else {
+                    ctx.leafFrame.setPage(node);
+                    int tupleIndex = ctx.leafFrame.findTupleIndex(ctx.tuple, ctx.cmp);
+                    if (tupleIndex != -1) {
+
+                        node.releaseReadLatch();
+                        readLatched = false;
+                        bufferCache.unpin(node);
+
+                        node = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
+                        node.acquireWriteLatch();
+                        writeLatched = true;
+                        ctx.leafFrame.setPage(node);
+
+                        // A rare case only happen when a root is no longer a
+                        // leaf page. Simply we restart the search.
+                        if (!ctx.leafFrame.isLeaf()) {
+                            ctx.pathList.add(pageId, -1, -1);
+
+                            node.releaseWriteLatch();
+                            writeLatched = false;
+                            bufferCache.unpin(node);
+                            continue;
+                        }
+
+                        if (ctx.leafFrame.getPageLsn() != pageLsn) {
+                            // The page was changed while we unlocked it
+
+                            tupleIndex = ctx.leafFrame.findTupleIndex(ctx.tuple, ctx.cmp);
+                            if (tupleIndex == -1) {
+                                ctx.pathList.add(pageId, parentLsn, -1);
+
+                                node.releaseWriteLatch();
+                                writeLatched = false;
+                                bufferCache.unpin(node);
+                                continue;
+                            } else {
+                                succeeded = true;
+                                return tupleIndex;
+                            }
+                        } else {
+                            succeeded = true;
+                            return tupleIndex;
+                        }
+                    }
+                }
+                node.releaseReadLatch();
+                readLatched = false;
+                bufferCache.unpin(node);
+            }
+        } finally {
+            if (!succeeded) {
+                if (readLatched) {
+                    node.releaseReadLatch();
+                    readLatched = false;
+                    bufferCache.unpin(node);
+                } else if (writeLatched) {
+                    node.releaseWriteLatch();
+                    writeLatched = false;
+                    bufferCache.unpin(node);
+                }
+            }
+        }
+        return -1;
+    }
+
+    private void deleteTuple(int tupleIndex, RTreeOpContext ctx) throws HyracksDataException {
+        ctx.leafFrame.delete(tupleIndex, ctx.cmp);
+        ctx.leafFrame.setPageLsn(incrementGlobalNsn());
+    }
+
+    private void search(ITreeIndexCursor cursor, ISearchPredicate searchPred, RTreeOpContext ctx)
+            throws HyracksDataException, TreeIndexException {
+        ctx.reset();
+        ctx.cursor = cursor;
+
+        cursor.setBufferCache(bufferCache);
+        cursor.setFileId(fileId);
+        ctx.cursorInitialState.setRootPage(rootPage);
+        ctx.cursor.open(ctx.cursorInitialState, (SearchPredicate) searchPred);
+    }
+
+    @Override
+    public ITreeIndexFrameFactory getInteriorFrameFactory() {
+        return interiorFrameFactory;
+    }
+
+    @Override
+    public ITreeIndexFrameFactory getLeafFrameFactory() {
+        return leafFrameFactory;
+    }
+
+    @Override
+    public IBinaryComparatorFactory[] getComparatorFactories() {
+        return cmpFactories;
+    }
+
+    @Override
+    public IFreePageManager getFreePageManager() {
+        return freePageManager;
+    }
+
+    private void update(ITupleReference tuple, RTreeOpContext ctx) {
+        throw new UnsupportedOperationException("RTree Update not implemented.");
+    }
+
+    public boolean isEmptyTree(IRTreeLeafFrame leafFrame) throws HyracksDataException {
+        ICachedPage rootNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, rootPage), false);
+        rootNode.acquireReadLatch();
+        try {
+            leafFrame.setPage(rootNode);
+            if (leafFrame.getLevel() == 0 && leafFrame.getTupleCount() == 0) {
+                return true;
+            } else {
+                return false;
+            }
+        } finally {
+            rootNode.releaseReadLatch();
+            bufferCache.unpin(rootNode);
+        }
+    }
+
+    public final class BulkLoadContext implements IIndexBulkLoadContext {
+
+        public ITreeIndexAccessor indexAccessor;
+
+        public BulkLoadContext(float fillFactor, IRTreeFrame leafFrame, IRTreeFrame interiorFrame,
+                ITreeIndexMetaDataFrame metaFrame) throws HyracksDataException {
+            indexAccessor = createAccessor();
+        }
+    }
+
+    @Override
+    public IIndexBulkLoadContext beginBulkLoad(float fillFactor) throws HyracksDataException {
+        IRTreeLeafFrame leafFrame = (IRTreeLeafFrame) leafFrameFactory.createFrame();
+        if (!isEmptyTree(leafFrame)) {
+            throw new HyracksDataException("Trying to Bulk-load a non-empty RTree.");
+        }
+
+        BulkLoadContext ctx = new BulkLoadContext(fillFactor, (IRTreeFrame) leafFrameFactory.createFrame(),
+                (IRTreeFrame) interiorFrameFactory.createFrame(), freePageManager.getMetaDataFrameFactory()
+                        .createFrame());
+        return ctx;
+    }
+
+    @Override
+    public void bulkLoadAddTuple(ITupleReference tuple, IIndexBulkLoadContext ictx) throws HyracksDataException {
+        try {
+            ((BulkLoadContext) ictx).indexAccessor.insert(tuple);
+        } catch (Exception e) {
+            throw new HyracksDataException("BulkLoad Error", e);
+        }
+    }
+
+    @Override
+    public void endBulkLoad(IIndexBulkLoadContext ictx) throws HyracksDataException {
+    }
+
+    private void diskOrderScan(ITreeIndexCursor icursor, RTreeOpContext ctx) throws HyracksDataException {
+        TreeDiskOrderScanCursor cursor = (TreeDiskOrderScanCursor) icursor;
+        ctx.reset();
+
+        MultiComparator cmp = MultiComparator.create(cmpFactories);
+        SearchPredicate searchPred = new SearchPredicate(null, cmp);
+
+        int currentPageId = rootPage + 1;
+        int maxPageId = freePageManager.getMaxPage(ctx.metaFrame);
+
+        ICachedPage page = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, currentPageId), false);
+        page.acquireReadLatch();
+        try {
+            cursor.setBufferCache(bufferCache);
+            cursor.setFileId(fileId);
+            cursor.setCurrentPageId(currentPageId);
+            cursor.setMaxPageId(maxPageId);
+            ctx.cursorInitialState.setPage(page);
+            cursor.open(ctx.cursorInitialState, searchPred);
+        } catch (Exception e) {
+            page.releaseReadLatch();
+            bufferCache.unpin(page);
+            throw new HyracksDataException(e);
+        }
+    }
+
+    @Override
+    public int getRootPageId() {
+        return rootPage;
+    }
+
+    @Override
+    public int getFieldCount() {
+        return fieldCount;
+    }
+
+    @Override
+    public IndexType getIndexType() {
+        return IndexType.RTREE;
+    }
+
+    @Override
+    public ITreeIndexAccessor createAccessor() {
+        return new RTreeAccessor(this);
+    }
+
+    public class RTreeAccessor implements ITreeIndexAccessor {
+        private RTree rtree;
+        private RTreeOpContext ctx;
+
+        public RTreeAccessor(RTree rtree) {
+            this.rtree = rtree;
+            this.ctx = rtree.createOpContext();
+        }
+
+        @Override
+        public void insert(ITupleReference tuple) throws HyracksDataException, TreeIndexException {
+            ctx.reset(IndexOp.INSERT);
+            rtree.insert(tuple, ctx);
+        }
+
+        @Override
+        public void update(ITupleReference tuple) throws HyracksDataException, TreeIndexException {
+            ctx.reset(IndexOp.UPDATE);
+            rtree.update(tuple, ctx);
+        }
+
+        @Override
+        public void delete(ITupleReference tuple) throws HyracksDataException, TreeIndexException {
+            ctx.reset(IndexOp.DELETE);
+            rtree.delete(tuple, ctx);
+        }
+
+        @Override
+        public ITreeIndexCursor createSearchCursor() {
+            return new RTreeSearchCursor((IRTreeInteriorFrame) interiorFrameFactory.createFrame(),
+                    (IRTreeLeafFrame) leafFrameFactory.createFrame());
+        }
+
+        @Override
+        public void search(IIndexCursor cursor, ISearchPredicate searchPred) throws HyracksDataException,
+                IndexException {
+            ctx.reset(IndexOp.SEARCH);
+            rtree.search((ITreeIndexCursor) cursor, searchPred, ctx);
+        }
+
+        @Override
+        public ITreeIndexCursor createDiskOrderScanCursor() {
+            return new TreeDiskOrderScanCursor(leafFrameFactory.createFrame());
+        }
+
+        @Override
+        public void diskOrderScan(ITreeIndexCursor cursor) throws HyracksDataException {
+            ctx.reset(IndexOp.DISKORDERSCAN);
+            rtree.diskOrderScan(cursor, ctx);
+        }
+
+        public RTreeOpContext getOpContext() {
+            return ctx;
+        }
+
+        @Override
+        public void upsert(ITupleReference tuple) throws HyracksDataException, TreeIndexException {
+            throw new UnsupportedOperationException(
+                    "The RTree does not suypport the notion of keys, therefore upsert does not make sense.");
+        }
+    }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTreeCursorInitialState.java b/hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTreeCursorInitialState.java
new file mode 100644
index 0000000..ac1eb7d
--- /dev/null
+++ b/hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTreeCursorInitialState.java
@@ -0,0 +1,51 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree.impls;
+
+import edu.uci.ics.hyracks.storage.am.common.api.ICursorInitialState;
+import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
+
+public class RTreeCursorInitialState implements ICursorInitialState {
+
+	private PathList pathList;
+	private int rootPage;
+	private ICachedPage page; // for disk order scan
+
+	public RTreeCursorInitialState(PathList pathList, int rootPage) {
+		this.pathList = pathList;
+		this.rootPage = rootPage;
+	}
+
+	public PathList getPathList() {
+		return pathList;
+	}
+
+	public int getRootPage() {
+		return rootPage;
+	}
+
+	public void setRootPage(int rootPage) {
+		this.rootPage = rootPage;
+	}
+
+	public ICachedPage getPage() {
+		return page;
+	}
+
+	public void setPage(ICachedPage page) {
+		this.page = page;
+	}
+}
diff --git a/hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTreeOpContext.java b/hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTreeOpContext.java
new file mode 100644
index 0000000..6683444
--- /dev/null
+++ b/hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTreeOpContext.java
@@ -0,0 +1,99 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree.impls;
+
+import java.util.ArrayList;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.storage.am.common.api.IIndexOpContext;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrame;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOp;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeInteriorFrame;
+import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeLeafFrame;
+import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
+
+public class RTreeOpContext implements IIndexOpContext {
+    private static final int INITIAL_TRAVERSE_LIST_SIZE = 100;
+    public final MultiComparator cmp;
+    public final IRTreeInteriorFrame interiorFrame;
+    public final IRTreeLeafFrame leafFrame;
+    public IndexOp op;
+    public ITreeIndexCursor cursor;
+    public RTreeCursorInitialState cursorInitialState;
+    public ITreeIndexMetaDataFrame metaFrame;
+    public RTreeSplitKey splitKey;
+    public ITupleReference tuple;
+    // Used to record the pageIds and pageLsns of the visited pages.
+    public PathList pathList;
+    // Used for traversing the tree.
+    public PathList traverseList;
+
+    public ArrayList<ICachedPage> NSNUpdates;
+    public ArrayList<ICachedPage> LSNUpdates;
+
+    public RTreeOpContext(IRTreeLeafFrame leafFrame, IRTreeInteriorFrame interiorFrame,
+            ITreeIndexMetaDataFrame metaFrame, IBinaryComparatorFactory[] cmpFactories, int treeHeightHint) {
+        this.cmp = MultiComparator.create(cmpFactories);
+        this.interiorFrame = interiorFrame;
+        this.leafFrame = leafFrame;
+        this.metaFrame = metaFrame;
+        pathList = new PathList(treeHeightHint, treeHeightHint);
+        NSNUpdates = new ArrayList<ICachedPage>();
+        LSNUpdates = new ArrayList<ICachedPage>();
+    }
+
+    public ITupleReference getTuple() {
+        return tuple;
+    }
+
+    public void setTuple(ITupleReference tuple) {
+        this.tuple = tuple;
+    }
+
+    public void reset() {
+        if (pathList != null) {
+            pathList.clear();
+        }
+        if (traverseList != null) {
+            traverseList.clear();
+        }
+        NSNUpdates.clear();
+        LSNUpdates.clear();
+    }
+
+    @Override
+    public void reset(IndexOp newOp) {
+        if (op != null && newOp == op) {
+            return;
+        }
+        if (op != IndexOp.SEARCH && op != IndexOp.DISKORDERSCAN) {
+            if (splitKey == null) {
+                splitKey = new RTreeSplitKey(interiorFrame.getTupleWriter().createTupleReference(), interiorFrame
+                        .getTupleWriter().createTupleReference());
+            }
+            if (traverseList == null) {
+                traverseList = new PathList(INITIAL_TRAVERSE_LIST_SIZE, INITIAL_TRAVERSE_LIST_SIZE);
+            }
+        }
+        if (cursorInitialState == null) {
+            cursorInitialState = new RTreeCursorInitialState(pathList, 1);
+        }
+        this.op = newOp;
+    }
+}
diff --git a/hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTreeSearchCursor.java b/hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTreeSearchCursor.java
new file mode 100644
index 0000000..ee7ec5f
--- /dev/null
+++ b/hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTreeSearchCursor.java
@@ -0,0 +1,255 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree.impls;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.storage.am.common.api.ICursorInitialState;
+import edu.uci.ics.hyracks.storage.am.common.api.ISearchPredicate;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeInteriorFrame;
+import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeLeafFrame;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
+import edu.uci.ics.hyracks.storage.common.file.BufferedFileHandle;
+
+public class RTreeSearchCursor implements ITreeIndexCursor {
+
+    private int fileId = -1;
+    private ICachedPage page = null;
+    private IRTreeInteriorFrame interiorFrame = null;
+    private IRTreeLeafFrame leafFrame = null;
+    private IBufferCache bufferCache = null;
+
+    private SearchPredicate pred;
+    private PathList pathList;
+    private int rootPage;
+    private ITupleReference searchKey;
+
+    private int tupleIndex = 0;
+    private int tupleIndexInc = 0;
+    private int currentTupleIndex = 0;
+    private int pageId = -1;
+
+    private MultiComparator cmp;
+
+    private ITreeIndexTupleReference frameTuple;
+    private boolean readLatched = false;
+
+    public RTreeSearchCursor(IRTreeInteriorFrame interiorFrame, IRTreeLeafFrame leafFrame) {
+        this.interiorFrame = interiorFrame;
+        this.leafFrame = leafFrame;
+        this.frameTuple = leafFrame.createTupleReference();
+    }
+
+    @Override
+    public void close() throws HyracksDataException {
+        if (readLatched) {
+            page.releaseReadLatch();
+            bufferCache.unpin(page);
+            readLatched = false;
+        }
+        tupleIndex = 0;
+        tupleIndexInc = 0;
+        page = null;
+        pathList = null;
+    }
+
+    public ITupleReference getTuple() {
+        return frameTuple;
+    }
+
+    public int getTupleOffset() {
+        return leafFrame.getTupleOffset(currentTupleIndex);
+    }
+
+    public int getPageId() {
+        return pageId;
+    }
+
+    @Override
+    public ICachedPage getPage() {
+        return page;
+    }
+
+    private boolean fetchNextLeafPage() throws HyracksDataException {
+        boolean succeeded = false;
+        if (readLatched) {
+            page.releaseReadLatch();
+            bufferCache.unpin(page);
+            readLatched = false;
+        }
+
+        while (!pathList.isEmpty()) {
+            int pageId = pathList.getLastPageId();
+            long parentLsn = pathList.getLastPageLsn();
+            pathList.moveLast();
+            ICachedPage node = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
+            node.acquireReadLatch();
+            readLatched = true;
+            try {
+                interiorFrame.setPage(node);
+                boolean isLeaf = interiorFrame.isLeaf();
+                long pageLsn = interiorFrame.getPageLsn();
+
+                if (pageId != rootPage && parentLsn < interiorFrame.getPageNsn()) {
+                    // Concurrent split detected, we need to visit the right
+                    // page
+                    int rightPage = interiorFrame.getRightPage();
+                    if (rightPage != -1) {
+                        pathList.add(rightPage, parentLsn, -1);
+                    }
+                }
+
+                if (!isLeaf) {
+                    if (searchKey != null) {
+                        for (int i = 0; i < interiorFrame.getTupleCount(); i++) {
+                            int childPageId = interiorFrame.getChildPageIdIfIntersect(searchKey, i, cmp);
+                            if (childPageId != -1) {
+                                pathList.add(childPageId, pageLsn, -1);
+                            }
+                        }
+                    } else {
+                        for (int i = 0; i < interiorFrame.getTupleCount(); i++) {
+                            int childPageId = interiorFrame.getChildPageId(i);
+                            pathList.add(childPageId, pageLsn, -1);
+                        }
+                    }
+
+                } else {
+                    page = node;
+                    this.pageId = pageId; // This is only needed for the
+                                          // LSMRTree flush operation
+                    leafFrame.setPage(page);
+                    tupleIndex = 0;
+                    succeeded = true;
+                    return true;
+                }
+            } finally {
+                if (!succeeded) {
+                    if (readLatched) {
+                        node.releaseReadLatch();
+                        readLatched = false;
+                        bufferCache.unpin(node);
+                    }
+                }
+            }
+        }
+        return false;
+    }
+
+    @Override
+    public boolean hasNext() throws HyracksDataException {
+        if (page == null) {
+            return false;
+        }
+
+        if (tupleIndex == leafFrame.getTupleCount()) {
+            if (!fetchNextLeafPage()) {
+                return false;
+            }
+        }
+
+        do {
+            for (int i = tupleIndex; i < leafFrame.getTupleCount(); i++) {
+                if (searchKey != null) {
+                    if (leafFrame.intersect(searchKey, i, cmp)) {
+                        frameTuple.resetByTupleIndex(leafFrame, i);
+                        currentTupleIndex = i; // This is only needed for the
+                                               // LSMRTree flush operation
+                        tupleIndexInc = i + 1;
+                        return true;
+                    }
+                } else {
+                    frameTuple.resetByTupleIndex(leafFrame, i);
+                    currentTupleIndex = i; // This is only needed for the
+                                           // LSMRTree
+                                           // flush operation
+                    tupleIndexInc = i + 1;
+                    return true;
+                }
+            }
+        } while (fetchNextLeafPage());
+        return false;
+    }
+
+    @Override
+    public void next() throws HyracksDataException {
+        tupleIndex = tupleIndexInc;
+    }
+
+    @Override
+    public void open(ICursorInitialState initialState, ISearchPredicate searchPred) throws HyracksDataException {
+        // in case open is called multiple times without closing
+        if (this.page != null) {
+            this.page.releaseReadLatch();
+            readLatched = false;
+            bufferCache.unpin(this.page);
+            pathList.clear();
+        }
+
+        pathList = ((RTreeCursorInitialState) initialState).getPathList();
+        rootPage = ((RTreeCursorInitialState) initialState).getRootPage();
+
+        pred = (SearchPredicate) searchPred;
+        cmp = pred.getLowKeyComparator();
+        searchKey = pred.getSearchKey();
+
+        if (searchKey != null) {
+            int maxFieldPos = cmp.getKeyFieldCount() / 2;
+            for (int i = 0; i < maxFieldPos; i++) {
+                int j = maxFieldPos + i;
+                int c = cmp.getComparators()[i].compare(searchKey.getFieldData(i), searchKey.getFieldStart(i),
+                        searchKey.getFieldLength(i), searchKey.getFieldData(j), searchKey.getFieldStart(j),
+                        searchKey.getFieldLength(j));
+                if (c > 0) {
+                    throw new IllegalArgumentException(
+                            "The low key point has larger coordinates than the high key point.");
+                }
+            }
+        }
+
+        pathList.add(this.rootPage, -1, -1);
+        tupleIndex = 0;
+        fetchNextLeafPage();
+    }
+
+    @Override
+    public void reset() {
+        try {
+            close();
+        } catch (Exception e) {
+            e.printStackTrace();
+        }
+    }
+
+    @Override
+    public void setBufferCache(IBufferCache bufferCache) {
+        this.bufferCache = bufferCache;
+    }
+
+    @Override
+    public void setFileId(int fileId) {
+        this.fileId = fileId;
+    }
+
+    @Override
+    public boolean exclusiveLatchNodes() {
+        return false;
+    }
+}
\ No newline at end of file
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTreeSplitKey.java b/hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTreeSplitKey.java
similarity index 100%
rename from hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTreeSplitKey.java
rename to hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTreeSplitKey.java
diff --git a/hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/Rectangle.java b/hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/Rectangle.java
new file mode 100644
index 0000000..cb9b160
--- /dev/null
+++ b/hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/Rectangle.java
@@ -0,0 +1,112 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree.impls;
+
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProvider;
+
+public class Rectangle {
+	private int dim;
+	private double[] low;
+	private double[] high;
+
+	public Rectangle(int dim) {
+		this.dim = dim;
+		low = new double[this.dim];
+		high = new double[this.dim];
+	}
+
+	public int getDim() {
+		return dim;
+	}
+
+	public double getLow(int i) {
+		return low[i];
+	}
+
+	public double getHigh(int i) {
+		return high[i];
+	}
+
+	public void setLow(int i, double value) {
+		low[i] = value;
+	}
+
+	public void setHigh(int i, double value) {
+		high[i] = value;
+	}
+
+	public void set(ITupleReference tuple, IPrimitiveValueProvider[] valueProviders) {
+		for (int i = 0; i < getDim(); i++) {
+			int j = i + getDim();
+			setLow(i, valueProviders[i].getValue(
+					tuple.getFieldData(i), tuple.getFieldStart(i)));
+			setHigh(i, valueProviders[j].getValue(
+					tuple.getFieldData(j), tuple.getFieldStart(j)));
+		}
+	}
+
+	public void enlarge(ITupleReference tupleToBeInserted, IPrimitiveValueProvider[] valueProviders) {
+		for (int i = 0; i < getDim(); i++) {
+			int j = getDim() + i;
+			double low = valueProviders[i].getValue(
+					tupleToBeInserted.getFieldData(i),
+					tupleToBeInserted.getFieldStart(i));
+			if (getLow(i) > low) {
+				setLow(i, low);
+			}
+			double high = valueProviders[j].getValue(
+					tupleToBeInserted.getFieldData(j),
+					tupleToBeInserted.getFieldStart(j));
+			if (getHigh(i) < high) {
+				setHigh(i, high);
+			}
+		}
+	}
+
+	public double margin() {
+		double margin = 0.0;
+		double mul = Math.pow(2, (double) getDim() - 1.0);
+		for (int i = 0; i < getDim(); i++) {
+			margin += (getHigh(i) - getLow(i)) * mul;
+		}
+		return margin;
+	}
+
+	public double overlappedArea(Rectangle rec) {
+		double area = 1.0;
+		double f1, f2;
+
+		for (int i = 0; i < getDim(); i++) {
+			if (getLow(i) > rec.getHigh(i) || getHigh(i) < rec.getLow(i)) {
+				return 0.0;
+			}
+
+			f1 = Math.max(getLow(i), rec.getLow(i));
+			f2 = Math.min(getHigh(i), rec.getHigh(i));
+			area *= f2 - f1;
+		}
+		return area;
+	}
+
+	public double area() {
+		double area = 1.0;
+		for (int i = 0; i < getDim(); i++) {
+			area *= getHigh(i) - getLow(i);
+		}
+		return area;
+	}
+}
\ No newline at end of file
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/SearchPredicate.java b/hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/SearchPredicate.java
similarity index 100%
rename from hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/SearchPredicate.java
rename to hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/SearchPredicate.java
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/TupleEntry.java b/hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/TupleEntry.java
similarity index 100%
rename from hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/TupleEntry.java
rename to hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/TupleEntry.java
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/TupleEntryArrayList.java b/hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/TupleEntryArrayList.java
similarity index 100%
rename from hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/TupleEntryArrayList.java
rename to hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/TupleEntryArrayList.java
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/UnorderedSlotManager.java b/hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/UnorderedSlotManager.java
similarity index 100%
rename from hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/UnorderedSlotManager.java
rename to hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/UnorderedSlotManager.java
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/tuples/RTreeTypeAwareTupleWriter.java b/hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/tuples/RTreeTypeAwareTupleWriter.java
similarity index 100%
rename from hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/tuples/RTreeTypeAwareTupleWriter.java
rename to hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/tuples/RTreeTypeAwareTupleWriter.java
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/tuples/RTreeTypeAwareTupleWriterFactory.java b/hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/tuples/RTreeTypeAwareTupleWriterFactory.java
similarity index 100%
rename from hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/tuples/RTreeTypeAwareTupleWriterFactory.java
rename to hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/tuples/RTreeTypeAwareTupleWriterFactory.java
diff --git a/hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/util/RTreeUtils.java b/hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/util/RTreeUtils.java
new file mode 100644
index 0000000..6f5d36f
--- /dev/null
+++ b/hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/util/RTreeUtils.java
@@ -0,0 +1,74 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree.util;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.data.std.api.IPointableFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManager;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.data.PointablePrimitiveValueProviderFactory;
+import edu.uci.ics.hyracks.storage.am.common.frames.LIFOMetaDataFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.freepage.LinkedListFreePageManager;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreeNSMInteriorFrameFactory;
+import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreeNSMLeafFrameFactory;
+import edu.uci.ics.hyracks.storage.am.rtree.impls.RTree;
+import edu.uci.ics.hyracks.storage.am.rtree.tuples.RTreeTypeAwareTupleWriterFactory;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+
+public class RTreeUtils {
+    public static RTree createRTree(IBufferCache bufferCache, ITypeTraits[] typeTraits,
+            IPrimitiveValueProviderFactory[] valueProviderFactories, IBinaryComparatorFactory[] cmpFactories) {
+
+        RTreeTypeAwareTupleWriterFactory tupleWriterFactory = new RTreeTypeAwareTupleWriterFactory(typeTraits);
+        ITreeIndexFrameFactory interiorFrameFactory = new RTreeNSMInteriorFrameFactory(tupleWriterFactory,
+                valueProviderFactories);
+        ITreeIndexFrameFactory leafFrameFactory = new RTreeNSMLeafFrameFactory(tupleWriterFactory,
+                valueProviderFactories);
+        ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
+
+        IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, 0, metaFrameFactory);
+        RTree rtree = new RTree(bufferCache, typeTraits.length, cmpFactories, freePageManager, interiorFrameFactory,
+                leafFrameFactory);
+        return rtree;
+    }
+
+    // Creates a new MultiComparator by constructing new IBinaryComparators.
+    public static MultiComparator getSearchMultiComparator(IBinaryComparatorFactory[] cmpFactories,
+            ITupleReference searchKey) {
+        if (searchKey == null || cmpFactories.length == searchKey.getFieldCount()) {
+            return MultiComparator.create(cmpFactories);
+        }
+        IBinaryComparator[] newCmps = new IBinaryComparator[searchKey.getFieldCount()];
+        for (int i = 0; i < searchKey.getFieldCount(); i++) {
+            newCmps[i] = cmpFactories[i].createBinaryComparator();
+        }
+        return new MultiComparator(newCmps);
+    }
+
+    public static IPrimitiveValueProviderFactory[] createPrimitiveValueProviderFactories(int len, IPointableFactory pf) {
+        IPrimitiveValueProviderFactory[] pvpfs = new IPrimitiveValueProviderFactory[len];
+        for (int i = 0; i < len; ++i) {
+            pvpfs[i] = new PointablePrimitiveValueProviderFactory(pf);
+        }
+        return pvpfs;
+    }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-storage-common/pom.xml b/hyracks/hyracks-storage-common/pom.xml
new file mode 100644
index 0000000..ee507cc
--- /dev/null
+++ b/hyracks/hyracks-storage-common/pom.xml
@@ -0,0 +1,36 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>edu.uci.ics.hyracks</groupId>
+  <artifactId>hyracks-storage-common</artifactId>
+  <version>0.2.3-SNAPSHOT</version>
+  <name>hyracks-storage-common</name>
+
+  <parent>
+    <groupId>edu.uci.ics.hyracks</groupId>
+    <artifactId>hyracks</artifactId>
+    <version>0.2.3-SNAPSHOT</version>
+  </parent>
+
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-compiler-plugin</artifactId>
+        <version>2.0.2</version>
+        <configuration>
+          <source>1.7</source>
+          <target>1.7</target>
+        </configuration>
+      </plugin>
+    </plugins>
+  </build>
+  <dependencies>
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>hyracks-api</artifactId>
+  		<version>0.2.3-SNAPSHOT</version>
+  		<type>jar</type>
+  		<scope>compile</scope>
+  	</dependency>
+  </dependencies>
+</project>
diff --git a/hyracks/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/IStorageManagerInterface.java b/hyracks/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/IStorageManagerInterface.java
new file mode 100644
index 0000000..562305e
--- /dev/null
+++ b/hyracks/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/IStorageManagerInterface.java
@@ -0,0 +1,27 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.storage.common;
+
+import java.io.Serializable;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
+
+public interface IStorageManagerInterface extends Serializable {
+    public IBufferCache getBufferCache(IHyracksTaskContext ctx);
+
+    public IFileMapProvider getFileMapProvider(IHyracksTaskContext ctx);
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/buffercache/BufferCache.java b/hyracks/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/buffercache/BufferCache.java
new file mode 100644
index 0000000..7f17f38
--- /dev/null
+++ b/hyracks/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/buffercache/BufferCache.java
@@ -0,0 +1,784 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.storage.common.buffercache;
+
+import java.nio.ByteBuffer;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.concurrent.locks.Lock;
+import java.util.concurrent.locks.ReadWriteLock;
+import java.util.concurrent.locks.ReentrantLock;
+import java.util.concurrent.locks.ReentrantReadWriteLock;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.io.FileReference;
+import edu.uci.ics.hyracks.api.io.IFileHandle;
+import edu.uci.ics.hyracks.api.io.IIOManager;
+import edu.uci.ics.hyracks.storage.common.file.BufferedFileHandle;
+import edu.uci.ics.hyracks.storage.common.file.IFileMapManager;
+
+public class BufferCache implements IBufferCacheInternal {
+    private static final Logger LOGGER = Logger.getLogger(BufferCache.class.getName());
+    private static final int MAP_FACTOR = 2;
+
+    private static final int MIN_CLEANED_COUNT_DIFF = 3;
+    private static final int PIN_MAX_WAIT_TIME = 50;
+
+    private final int maxOpenFiles;
+
+    private final IIOManager ioManager;
+    private final int pageSize;
+    private final int numPages;
+    private final CachedPage[] cachedPages;
+    private final CacheBucket[] pageMap;
+    private final IPageReplacementStrategy pageReplacementStrategy;
+    private final IPageCleanerPolicy pageCleanerPolicy;
+    private final IFileMapManager fileMapManager;
+    private final CleanerThread cleanerThread;
+    private final Map<Integer, BufferedFileHandle> fileInfoMap;
+
+    private boolean closed;
+
+    public BufferCache(IIOManager ioManager, ICacheMemoryAllocator allocator,
+            IPageReplacementStrategy pageReplacementStrategy, IPageCleanerPolicy pageCleanerPolicy,
+            IFileMapManager fileMapManager, int pageSize, int numPages, int maxOpenFiles) {
+        this.ioManager = ioManager;
+        this.pageSize = pageSize;
+        this.numPages = numPages;
+        this.maxOpenFiles = maxOpenFiles;
+        pageReplacementStrategy.setBufferCache(this);
+        ByteBuffer[] buffers = allocator.allocate(pageSize, numPages);
+        cachedPages = new CachedPage[buffers.length];
+        for (int i = 0; i < buffers.length; ++i) {
+            cachedPages[i] = new CachedPage(i, buffers[i], pageReplacementStrategy);
+        }
+        pageMap = new CacheBucket[numPages * MAP_FACTOR];
+        for (int i = 0; i < pageMap.length; ++i) {
+            pageMap[i] = new CacheBucket();
+        }
+        this.pageReplacementStrategy = pageReplacementStrategy;
+        this.pageCleanerPolicy = pageCleanerPolicy;
+        this.fileMapManager = fileMapManager;
+        fileInfoMap = new HashMap<Integer, BufferedFileHandle>();
+        cleanerThread = new CleanerThread();
+        cleanerThread.start();
+        closed = false;
+    }
+
+    @Override
+    public int getPageSize() {
+        return pageSize;
+    }
+
+    @Override
+    public int getNumPages() {
+        return numPages;
+    }
+
+    private void pinSanityCheck(long dpid) throws HyracksDataException {
+        if (closed) {
+            throw new HyracksDataException("pin called on a closed cache");
+        }
+
+        // check whether file has been created and opened
+        int fileId = BufferedFileHandle.getFileId(dpid);
+        BufferedFileHandle fInfo = null;
+        synchronized (fileInfoMap) {
+            fInfo = fileInfoMap.get(fileId);
+        }
+        if (fInfo == null) {
+            throw new HyracksDataException("pin called on a fileId " + fileId + " that has not been created.");
+        } else if (fInfo.getReferenceCount() <= 0) {
+            throw new HyracksDataException("pin called on a fileId " + fileId + " that has not been opened.");
+        }
+    }
+
+    @Override
+    public ICachedPage tryPin(long dpid) throws HyracksDataException {
+        pinSanityCheck(dpid);
+        CachedPage cPage = null;
+        int hash = hash(dpid);
+        CacheBucket bucket = pageMap[hash];
+        bucket.bucketLock.lock();
+        try {
+            cPage = bucket.cachedPage;
+            while (cPage != null) {
+                if (cPage.dpid == dpid) {
+                    cPage.pinCount.incrementAndGet();
+                    pageReplacementStrategy.notifyCachePageAccess(cPage);
+                    return cPage;
+                }
+                cPage = cPage.next;
+            }
+        } finally {
+            bucket.bucketLock.unlock();
+        }
+        return cPage;
+    }
+
+    @Override
+    public ICachedPage pin(long dpid, boolean newPage) throws HyracksDataException {
+        pinSanityCheck(dpid);
+        CachedPage cPage = findPage(dpid, newPage);
+        if (!newPage) {
+            if (!cPage.valid) {
+                /*
+                 * We got a buffer and we have pinned it. But its invalid. If its a new page, we just mark it as valid
+                 * and return. Or else, while we hold the page lock, we get a write latch on the data and start a read.
+                 */
+                cPage.acquireWriteLatch(false);
+                try {
+                    if (!cPage.valid) {
+                        read(cPage);
+                    }
+                    cPage.valid = true;
+                } finally {
+                    cPage.releaseWriteLatch();
+                }
+            }
+        } else {
+            cPage.valid = true;
+        }
+        pageReplacementStrategy.notifyCachePageAccess(cPage);
+        return cPage;
+    }
+
+    private CachedPage findPage(long dpid, boolean newPage) throws HyracksDataException {
+        while (true) {
+            int startCleanedCount = cleanerThread.cleanedCount;
+
+            CachedPage cPage = null;
+            /*
+             * Hash dpid to get a bucket and then check if the page exists in the bucket.
+             */
+            int hash = hash(dpid);
+            CacheBucket bucket = pageMap[hash];
+            bucket.bucketLock.lock();
+            try {
+                cPage = bucket.cachedPage;
+                while (cPage != null) {
+                    if (cPage.dpid == dpid) {
+                        cPage.pinCount.incrementAndGet();
+                        return cPage;
+                    }
+                    cPage = cPage.next;
+                }
+            } finally {
+                bucket.bucketLock.unlock();
+            }
+            /*
+             * If we got here, the page was not in the hash table. Now we ask the page replacement strategy to find us a victim.
+             */
+            CachedPage victim = (CachedPage) pageReplacementStrategy.findVictim();
+            if (victim != null) {
+                /*
+                 * We have a victim with the following invariants.
+                 * 1. The dpid on the CachedPage may or may not be valid.
+                 * 2. We have a pin on the CachedPage. We have to deal with three cases here.
+                 *  Case 1: The dpid on the CachedPage is invalid (-1). This indicates that this buffer has never been used.
+                 *  So we are the only ones holding it. Get a lock on the required dpid's hash bucket, check if someone inserted
+                 *  the page we want into the table. If so, decrement the pincount on the victim and return the winner page in the
+                 *  table. If such a winner does not exist, insert the victim and return it.
+                 *  Case 2: The dpid on the CachedPage is valid.
+                 *      Case 2a: The current dpid and required dpid hash to the same bucket.
+                 *      Get the bucket lock, check that the victim is still at pinCount == 1 If so check if there is a winning
+                 *      CachedPage with the required dpid. If so, decrement the pinCount on the victim and return the winner.
+                 *      If not, update the contents of the CachedPage to hold the required dpid and return it. If the picCount
+                 *      on the victim was != 1 or CachedPage was dirty someone used the victim for its old contents -- Decrement
+                 *      the pinCount and retry.
+                 *  Case 2b: The current dpid and required dpid hash to different buckets. Get the two bucket locks in the order
+                 *  of the bucket indexes (Ordering prevents deadlocks). Check for the existence of a winner in the new bucket
+                 *  and for potential use of the victim (pinCount != 1). If everything looks good, remove the CachedPage from
+                 *  the old bucket, and add it to the new bucket and update its header with the new dpid.
+                 */
+                if (victim.dpid < 0) {
+                    /*
+                     * Case 1.
+                     */
+                    bucket.bucketLock.lock();
+                    try {
+                        cPage = bucket.cachedPage;
+                        while (cPage != null) {
+                            if (cPage.dpid == dpid) {
+                                cPage.pinCount.incrementAndGet();
+                                victim.pinCount.decrementAndGet();
+                                return cPage;
+                            }
+                            cPage = cPage.next;
+                        }
+                        victim.reset(dpid);
+                        victim.next = bucket.cachedPage;
+                        bucket.cachedPage = victim;
+                    } finally {
+                        bucket.bucketLock.unlock();
+                    }
+                    return victim;
+                }
+                int victimHash = hash(victim.dpid);
+                if (victimHash == hash) {
+                    /*
+                     * Case 2a.
+                     */
+                    bucket.bucketLock.lock();
+                    try {
+                        if (victim.pinCount.get() != 1) {
+                            victim.pinCount.decrementAndGet();
+                            continue;
+                        }
+                        cPage = bucket.cachedPage;
+                        while (cPage != null) {
+                            if (cPage.dpid == dpid) {
+                                cPage.pinCount.incrementAndGet();
+                                victim.pinCount.decrementAndGet();
+                                return cPage;
+                            }
+                            cPage = cPage.next;
+                        }
+                        victim.reset(dpid);
+                    } finally {
+                        bucket.bucketLock.unlock();
+                    }
+                    return victim;
+                } else {
+                    /*
+                     * Case 2b.
+                     */
+                    CacheBucket victimBucket = pageMap[victimHash];
+                    if (victimHash < hash) {
+                        victimBucket.bucketLock.lock();
+                        bucket.bucketLock.lock();
+                    } else {
+                        bucket.bucketLock.lock();
+                        victimBucket.bucketLock.lock();
+                    }
+                    try {
+                        if (victim.pinCount.get() != 1) {
+                            victim.pinCount.decrementAndGet();
+                            continue;
+                        }
+                        cPage = bucket.cachedPage;
+                        while (cPage != null) {
+                            if (cPage.dpid == dpid) {
+                                cPage.pinCount.incrementAndGet();
+                                victim.pinCount.decrementAndGet();
+                                return cPage;
+                            }
+                            cPage = cPage.next;
+                        }
+                        if (victimBucket.cachedPage == victim) {
+                            victimBucket.cachedPage = victim.next;
+                        } else {
+                            CachedPage victimPrev = victimBucket.cachedPage;
+                            while (victimPrev != null && victimPrev.next != victim) {
+                                victimPrev = victimPrev.next;
+                            }
+                            assert victimPrev != null;
+                            victimPrev.next = victim.next;
+                        }
+                        victim.reset(dpid);
+                        victim.next = bucket.cachedPage;
+                        bucket.cachedPage = victim;
+                    } finally {
+                        victimBucket.bucketLock.unlock();
+                        bucket.bucketLock.unlock();
+                    }
+                    return victim;
+                }
+            }
+            synchronized (cleanerThread) {
+                pageCleanerPolicy.notifyVictimNotFound(cleanerThread);
+            }
+            // Heuristic optimization. Check whether the cleaner thread has
+            // cleaned pages since we did our last pin attempt.
+            if (cleanerThread.cleanedCount - startCleanedCount > MIN_CLEANED_COUNT_DIFF) {
+                // Don't go to sleep and wait for notification from the cleaner,
+                // just try to pin again immediately.
+                continue;
+            }
+            synchronized (cleanerThread.cleanNotification) {
+                try {
+                    cleanerThread.cleanNotification.wait(PIN_MAX_WAIT_TIME);
+                } catch (InterruptedException e) {
+                    // Do nothing
+                }
+            }
+        }
+    }
+
+    private String dumpState() {
+        StringBuilder buffer = new StringBuilder();
+        buffer.append("Buffer cache state\n");
+        buffer.append("Page Size: ").append(pageSize).append('\n');
+        buffer.append("Number of physical pages: ").append(numPages).append('\n');
+        buffer.append("Hash table size: ").append(pageMap.length).append('\n');
+        buffer.append("Page Map:\n");
+        int nCachedPages = 0;
+        for (int i = 0; i < pageMap.length; ++i) {
+            CacheBucket cb = pageMap[i];
+            cb.bucketLock.lock();
+            try {
+                CachedPage cp = cb.cachedPage;
+                if (cp != null) {
+                    buffer.append("   ").append(i).append('\n');
+                    while (cp != null) {
+                        buffer.append("      ").append(cp.cpid).append(" -> [")
+                                .append(BufferedFileHandle.getFileId(cp.dpid)).append(':')
+                                .append(BufferedFileHandle.getPageId(cp.dpid)).append(", ").append(cp.pinCount.get())
+                                .append(", ").append(cp.valid ? "valid" : "invalid").append(", ")
+                                .append(cp.dirty.get() ? "dirty" : "clean").append("]\n");
+                        cp = cp.next;
+                        ++nCachedPages;
+                    }
+                }
+            } finally {
+                cb.bucketLock.unlock();
+            }
+        }
+        buffer.append("Number of cached pages: ").append(nCachedPages).append('\n');
+        return buffer.toString();
+    }
+
+    private void read(CachedPage cPage) throws HyracksDataException {
+        BufferedFileHandle fInfo = getFileInfo(cPage);
+        cPage.buffer.clear();
+        ioManager.syncRead(fInfo.getFileHandle(), (long) BufferedFileHandle.getPageId(cPage.dpid) * pageSize,
+                cPage.buffer);
+    }
+
+    private BufferedFileHandle getFileInfo(CachedPage cPage) throws HyracksDataException {
+        synchronized (fileInfoMap) {
+            BufferedFileHandle fInfo = fileInfoMap.get(BufferedFileHandle.getFileId(cPage.dpid));
+            if (fInfo == null) {
+                throw new HyracksDataException("No such file mapped");
+            }
+            return fInfo;
+        }
+    }
+
+    private void write(CachedPage cPage) throws HyracksDataException {
+        BufferedFileHandle fInfo = getFileInfo(cPage);
+        if (fInfo.fileHasBeenDeleted()) {
+            return;
+        }
+        cPage.buffer.position(0);
+        cPage.buffer.limit(pageSize);
+        ioManager.syncWrite(fInfo.getFileHandle(), (long) BufferedFileHandle.getPageId(cPage.dpid) * pageSize,
+                cPage.buffer);
+    }
+
+    @Override
+    public void unpin(ICachedPage page) throws HyracksDataException {
+        if (closed) {
+            throw new HyracksDataException("unpin called on a closed cache");
+        }
+        ((CachedPage) page).pinCount.decrementAndGet();
+    }
+
+    private int hash(long dpid) {
+        return (int) (dpid % pageMap.length);
+    }
+
+    private static class CacheBucket {
+        private final Lock bucketLock;
+        private CachedPage cachedPage;
+
+        public CacheBucket() {
+            bucketLock = new ReentrantLock();
+        }
+    }
+
+    private class CachedPage implements ICachedPageInternal {
+        private final int cpid;
+        private final ByteBuffer buffer;
+        private final AtomicInteger pinCount;
+        private final AtomicBoolean dirty;
+        private final ReadWriteLock latch;
+        private final Object replacementStrategyObject;
+        volatile long dpid;
+        CachedPage next;
+        volatile boolean valid;
+
+        public CachedPage(int cpid, ByteBuffer buffer, IPageReplacementStrategy pageReplacementStrategy) {
+            this.cpid = cpid;
+            this.buffer = buffer;
+            pinCount = new AtomicInteger();
+            dirty = new AtomicBoolean();
+            latch = new ReentrantReadWriteLock(true);
+            replacementStrategyObject = pageReplacementStrategy.createPerPageStrategyObject(cpid);
+            dpid = -1;
+            valid = false;
+        }
+
+        public void reset(long dpid) {
+            this.dpid = dpid;
+            dirty.set(false);
+            valid = false;
+            pageReplacementStrategy.notifyCachePageReset(this);
+        }
+
+        public void invalidate() {
+            reset(-1);
+        }
+
+        @Override
+        public ByteBuffer getBuffer() {
+            return buffer;
+        }
+
+        @Override
+        public Object getReplacementStrategyObject() {
+            return replacementStrategyObject;
+        }
+
+        @Override
+        public boolean pinIfGoodVictim() {
+            return pinCount.compareAndSet(0, 1);
+        }
+
+        @Override
+        public int getCachedPageId() {
+            return cpid;
+        }
+
+        @Override
+        public void acquireReadLatch() {
+            latch.readLock().lock();
+        }
+
+        private void acquireWriteLatch(boolean markDirty) {
+            latch.writeLock().lock();
+            if (markDirty) {
+                if (dirty.compareAndSet(false, true)) {
+                    pinCount.incrementAndGet();
+                }
+            }
+        }
+
+        @Override
+        public void acquireWriteLatch() {
+            acquireWriteLatch(true);
+        }
+
+        @Override
+        public void releaseReadLatch() {
+            latch.readLock().unlock();
+        }
+
+        @Override
+        public void releaseWriteLatch() {
+            latch.writeLock().unlock();
+        }
+    }
+
+    @Override
+    public ICachedPageInternal getPage(int cpid) {
+        return cachedPages[cpid];
+    }
+
+    private class CleanerThread extends Thread {
+        private boolean shutdownStart = false;
+        private boolean shutdownComplete = false;
+        private final Object cleanNotification = new Object();
+        // Simply keeps incrementing this counter when a page is cleaned.
+        // Used to implement wait-for-cleanerthread heuristic optimizations.
+        // A waiter can detect whether pages have been cleaned.
+        // No need to make this var volatile or synchronize it's access in any
+        // way because it is used for heuristics.
+        private int cleanedCount = 0;
+
+        public CleanerThread() {
+            setPriority(MAX_PRIORITY);
+            setDaemon(true);
+        }
+
+        public void cleanPage(CachedPage cPage, boolean force) {
+            if (cPage.dirty.get()) {
+                boolean proceed = false;
+                if (force) {
+                    cPage.latch.writeLock().lock();
+                    proceed = true;
+                } else {
+                    proceed = cPage.latch.readLock().tryLock();
+                }
+                if (proceed) {
+                    try {
+                        // Make sure page is still dirty.
+                        if (!cPage.dirty.get()) {
+                            return;
+                        }
+                        boolean cleaned = true;
+                        try {
+                            write(cPage);
+                        } catch (HyracksDataException e) {
+                            cleaned = false;
+                        }
+                        if (cleaned) {
+                            cPage.dirty.set(false);
+                            cPage.pinCount.decrementAndGet();
+                            cleanedCount++;
+                            synchronized (cleanNotification) {
+                                cleanNotification.notifyAll();
+                            }
+                        }
+                    } finally {
+                        if (force) {
+                            cPage.latch.writeLock().unlock();
+                        } else {
+                            cPage.latch.readLock().unlock();
+                        }
+                    }
+                } else if (shutdownStart) {
+                    throw new IllegalStateException("Cache closed, but unable to acquire read lock on dirty page: "
+                            + cPage.dpid);
+                }
+            }
+        }
+
+        @Override
+        public synchronized void run() {
+            try {
+                while (true) {
+                    pageCleanerPolicy.notifyCleanCycleStart(this);
+                    for (int i = 0; i < numPages; ++i) {
+                        CachedPage cPage = cachedPages[i];
+                        cleanPage(cPage, false);
+                    }
+                    if (shutdownStart) {
+                        break;
+                    }
+                    pageCleanerPolicy.notifyCleanCycleFinish(this);
+                }
+            } catch (Exception e) {
+                e.printStackTrace();
+            } finally {
+                shutdownComplete = true;
+                notifyAll();
+            }
+        }
+    }
+
+    @Override
+    public void close() {
+        closed = true;
+        synchronized (cleanerThread) {
+            cleanerThread.shutdownStart = true;
+            cleanerThread.notifyAll();
+            while (!cleanerThread.shutdownComplete) {
+                try {
+                    cleanerThread.wait();
+                } catch (InterruptedException e) {
+                    e.printStackTrace();
+                }
+            }
+        }
+
+        synchronized (fileInfoMap) {
+            try {
+                for (Map.Entry<Integer, BufferedFileHandle> entry : fileInfoMap.entrySet()) {
+                    boolean fileHasBeenDeleted = entry.getValue().fileHasBeenDeleted();
+                    sweepAndFlush(entry.getKey(), !fileHasBeenDeleted);
+                    if (!fileHasBeenDeleted) {
+                        ioManager.close(entry.getValue().getFileHandle());
+                    }
+                }
+            } catch (HyracksDataException e) {
+                e.printStackTrace();
+            }
+            fileInfoMap.clear();
+        }
+    }
+
+    @Override
+    public void createFile(FileReference fileRef) throws HyracksDataException {
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Creating file: " + fileRef + " in cache: " + this);
+        }
+        synchronized (fileInfoMap) {
+            fileMapManager.registerFile(fileRef);
+        }
+    }
+
+    @Override
+    public void openFile(int fileId) throws HyracksDataException {
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Opening file: " + fileId + " in cache: " + this);
+        }
+        synchronized (fileInfoMap) {
+            BufferedFileHandle fInfo;
+            fInfo = fileInfoMap.get(fileId);
+            if (fInfo == null) {
+
+                // map is full, make room by cleaning up unreferenced files
+                boolean unreferencedFileFound = true;
+                while (fileInfoMap.size() >= maxOpenFiles && unreferencedFileFound) {
+                    unreferencedFileFound = false;
+                    for (Map.Entry<Integer, BufferedFileHandle> entry : fileInfoMap.entrySet()) {
+                        if (entry.getValue().getReferenceCount() <= 0) {
+                            int entryFileId = entry.getKey();
+                            boolean fileHasBeenDeleted = entry.getValue().fileHasBeenDeleted();
+                            sweepAndFlush(entryFileId, !fileHasBeenDeleted);
+                            if (!fileHasBeenDeleted) {
+                                ioManager.close(entry.getValue().getFileHandle());
+                            }
+                            fileInfoMap.remove(entryFileId);
+                            unreferencedFileFound = true;
+                            // for-each iterator is invalid because we changed fileInfoMap
+                            break;
+                        }
+                    }
+                }
+
+                if (fileInfoMap.size() >= maxOpenFiles) {
+                    throw new HyracksDataException("Could not open fileId " + fileId + ". Max number of files "
+                            + maxOpenFiles + " already opened and referenced.");
+                }
+
+                // create, open, and map new file reference
+                FileReference fileRef = fileMapManager.lookupFileName(fileId);
+                IFileHandle fh = ioManager.open(fileRef, IIOManager.FileReadWriteMode.READ_WRITE,
+                        IIOManager.FileSyncMode.METADATA_ASYNC_DATA_ASYNC);
+                fInfo = new BufferedFileHandle(fileId, fh);
+                fileInfoMap.put(fileId, fInfo);
+            }
+            fInfo.incReferenceCount();
+        }
+    }
+
+    private void sweepAndFlush(int fileId, boolean flushDirtyPages) throws HyracksDataException {
+        for (int i = 0; i < pageMap.length; ++i) {
+            CacheBucket bucket = pageMap[i];
+            bucket.bucketLock.lock();
+            try {
+                CachedPage prev = bucket.cachedPage;
+                while (prev != null) {
+                    CachedPage cPage = prev.next;
+                    if (cPage == null) {
+                        break;
+                    }
+                    if (invalidateIfFileIdMatch(fileId, cPage, flushDirtyPages)) {
+                        prev.next = cPage.next;
+                        cPage.next = null;
+                    } else {
+                        prev = cPage;
+                    }
+                }
+                // Take care of the head of the chain.
+                if (bucket.cachedPage != null) {
+                    if (invalidateIfFileIdMatch(fileId, bucket.cachedPage, flushDirtyPages)) {
+                        CachedPage cPage = bucket.cachedPage;
+                        bucket.cachedPage = bucket.cachedPage.next;
+                        cPage.next = null;
+                    }
+                }
+            } finally {
+                bucket.bucketLock.unlock();
+            }
+        }
+    }
+
+    private boolean invalidateIfFileIdMatch(int fileId, CachedPage cPage, boolean flushDirtyPages)
+            throws HyracksDataException {
+        if (BufferedFileHandle.getFileId(cPage.dpid) == fileId) {
+            int pinCount = -1;
+            if (cPage.dirty.get()) {
+                if (flushDirtyPages) {
+                    write(cPage);
+                }
+                cPage.dirty.set(false);
+                pinCount = cPage.pinCount.decrementAndGet();
+            } else {
+                pinCount = cPage.pinCount.get();
+            }
+            if (pinCount != 0) {
+                throw new IllegalStateException("Page is pinned and file is being closed. Pincount is: " + pinCount);
+            }
+            cPage.invalidate();
+            return true;
+        }
+        return false;
+    }
+
+    @Override
+    public void closeFile(int fileId) throws HyracksDataException {
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Closing file: " + fileId + " in cache: " + this);
+        }
+        if (LOGGER.isLoggable(Level.FINE)) {
+            LOGGER.fine(dumpState());
+        }
+
+        synchronized (fileInfoMap) {
+            BufferedFileHandle fInfo = fileInfoMap.get(fileId);
+            if (fInfo == null) {
+                throw new HyracksDataException("Closing unopened file");
+            }
+            if (fInfo.decReferenceCount() < 0) {
+                throw new HyracksDataException("Closed fileId: " + fileId + " more times than it was opened.");
+            }
+        }
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Closed file: " + fileId + " in cache: " + this);
+        }
+    }
+
+    @Override
+    public void flushDirtyPage(ICachedPage page) throws HyracksDataException {
+        // Assumes the caller has pinned the page.
+        cleanerThread.cleanPage((CachedPage) page, true);
+    }
+
+    @Override
+    public void force(int fileId, boolean metadata) throws HyracksDataException {
+        BufferedFileHandle fInfo = null;
+        synchronized (fileInfoMap) {
+            fInfo = fileInfoMap.get(fileId);
+            ioManager.sync(fInfo.getFileHandle(), metadata);
+        }
+    }
+
+    @Override
+    public synchronized void deleteFile(int fileId, boolean flushDirtyPages) throws HyracksDataException {
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Deleting file: " + fileId + " in cache: " + this);
+        }
+        if (flushDirtyPages) {
+            synchronized (fileInfoMap) {
+                sweepAndFlush(fileId, flushDirtyPages);
+            }
+        }
+        synchronized (fileInfoMap) {
+            BufferedFileHandle fInfo = null;
+            try {
+                fInfo = fileInfoMap.get(fileId);
+                if (fInfo != null && fInfo.getReferenceCount() > 0) {
+                    throw new HyracksDataException("Deleting open file");
+                }
+            } finally {
+                fileMapManager.unregisterFile(fileId);
+                if (fInfo != null) {
+                    // Mark the fInfo as deleted, 
+                    // such that when its pages are reclaimed in openFile(),
+                    // the pages are not flushed to disk but only invalidates.
+                    ioManager.close(fInfo.getFileHandle());
+                    fInfo.markAsDeleted();
+                }
+            }
+        }
+    }
+}
\ No newline at end of file
diff --git a/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/buffercache/ClockPageReplacementStrategy.java b/hyracks/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/buffercache/ClockPageReplacementStrategy.java
similarity index 100%
rename from hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/buffercache/ClockPageReplacementStrategy.java
rename to hyracks/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/buffercache/ClockPageReplacementStrategy.java
diff --git a/hyracks/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/buffercache/DebugBufferCache.java b/hyracks/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/buffercache/DebugBufferCache.java
new file mode 100644
index 0000000..d610c7e
--- /dev/null
+++ b/hyracks/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/buffercache/DebugBufferCache.java
@@ -0,0 +1,168 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.common.buffercache;
+
+import java.util.concurrent.atomic.AtomicLong;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.io.FileReference;
+
+/**
+ * Implementation of an IBufferCache that counts the number of pins/unpins,
+ * latches/unlatches, and file create/delete/open/close called on it. It
+ * delegates the actual functionality to another IBufferCache set in the c'tor.
+ * The counters are updated in a thread-safe fashion using AtomicLong.
+ */
+public class DebugBufferCache implements IBufferCache {
+
+    // Actual BufferCache functionality is delegated to this bufferCache.
+    private final IBufferCache bufferCache;
+    private AtomicLong pinCount;
+    private AtomicLong unpinCount;
+    private AtomicLong readLatchCount;
+    private AtomicLong readUnlatchCount;
+    private AtomicLong writeLatchCount;
+    private AtomicLong writeUnlatchCount;
+    private AtomicLong createFileCount;
+    private AtomicLong deleteFileCount;
+    private AtomicLong openFileCount;
+    private AtomicLong closeFileCount;
+
+    public DebugBufferCache(IBufferCache bufferCache) {
+        this.bufferCache = bufferCache;
+        resetCounters();
+    }
+
+    @Override
+    public void createFile(FileReference fileRef) throws HyracksDataException {
+        bufferCache.createFile(fileRef);
+        createFileCount.addAndGet(1);
+    }
+
+    @Override
+    public void openFile(int fileId) throws HyracksDataException {
+        bufferCache.openFile(fileId);
+        openFileCount.addAndGet(1);
+    }
+
+    @Override
+    public void closeFile(int fileId) throws HyracksDataException {
+        bufferCache.closeFile(fileId);
+        closeFileCount.addAndGet(1);
+    }
+
+    @Override
+    public void deleteFile(int fileId, boolean flushDirtyPages) throws HyracksDataException {
+        bufferCache.deleteFile(fileId, flushDirtyPages);
+        deleteFileCount.addAndGet(1);
+    }
+
+    @Override
+    public ICachedPage tryPin(long dpid) throws HyracksDataException {
+        return bufferCache.tryPin(dpid);
+    }
+
+    @Override
+    public ICachedPage pin(long dpid, boolean newPage) throws HyracksDataException {
+        ICachedPage page = bufferCache.pin(dpid, newPage);
+        pinCount.addAndGet(1);
+        return page;
+    }
+
+    @Override
+    public void unpin(ICachedPage page) throws HyracksDataException {
+        bufferCache.unpin(page);
+        unpinCount.addAndGet(1);
+    }
+
+    @Override
+    public int getPageSize() {
+        return bufferCache.getPageSize();
+    }
+
+    @Override
+    public int getNumPages() {
+        return bufferCache.getNumPages();
+    }
+
+    @Override
+    public void close() {
+        bufferCache.close();
+    }
+
+    public void resetCounters() {
+        pinCount.set(0);
+        unpinCount.set(0);
+        readLatchCount.set(0);
+        readUnlatchCount.set(0);
+        writeLatchCount.set(0);
+        writeUnlatchCount.set(0);
+        createFileCount.set(0);
+        deleteFileCount.set(0);
+        openFileCount.set(0);
+        closeFileCount.set(0);
+    }
+
+    public long getPinCount() {
+        return pinCount.get();
+    }
+
+    public long getUnpinCount() {
+        return unpinCount.get();
+    }
+
+    public long getReadLatchCount() {
+        return readLatchCount.get();
+    }
+
+    public long getReadUnlatchCount() {
+        return readUnlatchCount.get();
+    }
+
+    public long getWriteLatchCount() {
+        return writeLatchCount.get();
+    }
+
+    public long getWriteUnlatchCount() {
+        return writeUnlatchCount.get();
+    }
+
+    public long getCreateFileCount() {
+        return createFileCount.get();
+    }
+
+    public long getDeleteFileCount() {
+        return deleteFileCount.get();
+    }
+
+    public long getOpenFileCount() {
+        return openFileCount.get();
+    }
+
+    public long getCloseFileCount() {
+        return closeFileCount.get();
+    }
+
+	@Override
+	public void flushDirtyPage(ICachedPage page) throws HyracksDataException {
+		bufferCache.flushDirtyPage(page);
+	}
+
+	@Override
+	public void force(int fileId, boolean metadata) throws HyracksDataException {
+		bufferCache.force(fileId, metadata);
+	}
+}
diff --git a/hyracks/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/buffercache/DelayPageCleanerPolicy.java b/hyracks/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/buffercache/DelayPageCleanerPolicy.java
new file mode 100644
index 0000000..c48b235
--- /dev/null
+++ b/hyracks/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/buffercache/DelayPageCleanerPolicy.java
@@ -0,0 +1,44 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.storage.common.buffercache;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+
+public class DelayPageCleanerPolicy implements IPageCleanerPolicy {
+    private long delay;
+
+    public DelayPageCleanerPolicy(long delay) {
+        this.delay = delay;
+    }
+
+    @Override
+    public void notifyCleanCycleStart(Object monitor) throws HyracksDataException {
+
+    }
+
+    @Override
+    public void notifyCleanCycleFinish(Object monitor) throws HyracksDataException {
+        try {
+            monitor.wait(delay);
+        } catch (InterruptedException e) {
+            throw new HyracksDataException(e);
+        }
+    }
+
+    @Override
+    public void notifyVictimNotFound(Object monitor) throws HyracksDataException {
+        monitor.notifyAll();
+    }
+}
\ No newline at end of file
diff --git a/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/buffercache/DirectBufferAllocator.java b/hyracks/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/buffercache/DirectBufferAllocator.java
similarity index 100%
rename from hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/buffercache/DirectBufferAllocator.java
rename to hyracks/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/buffercache/DirectBufferAllocator.java
diff --git a/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/buffercache/HeapBufferAllocator.java b/hyracks/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/buffercache/HeapBufferAllocator.java
similarity index 100%
rename from hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/buffercache/HeapBufferAllocator.java
rename to hyracks/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/buffercache/HeapBufferAllocator.java
diff --git a/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/buffercache/IBufferCache.java b/hyracks/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/buffercache/IBufferCache.java
similarity index 100%
rename from hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/buffercache/IBufferCache.java
rename to hyracks/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/buffercache/IBufferCache.java
diff --git a/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/buffercache/IBufferCacheInternal.java b/hyracks/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/buffercache/IBufferCacheInternal.java
similarity index 100%
rename from hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/buffercache/IBufferCacheInternal.java
rename to hyracks/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/buffercache/IBufferCacheInternal.java
diff --git a/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/buffercache/ICacheMemoryAllocator.java b/hyracks/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/buffercache/ICacheMemoryAllocator.java
similarity index 100%
rename from hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/buffercache/ICacheMemoryAllocator.java
rename to hyracks/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/buffercache/ICacheMemoryAllocator.java
diff --git a/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/buffercache/ICachedPage.java b/hyracks/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/buffercache/ICachedPage.java
similarity index 100%
rename from hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/buffercache/ICachedPage.java
rename to hyracks/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/buffercache/ICachedPage.java
diff --git a/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/buffercache/ICachedPageInternal.java b/hyracks/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/buffercache/ICachedPageInternal.java
similarity index 100%
rename from hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/buffercache/ICachedPageInternal.java
rename to hyracks/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/buffercache/ICachedPageInternal.java
diff --git a/hyracks/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/buffercache/IPageCleanerPolicy.java b/hyracks/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/buffercache/IPageCleanerPolicy.java
new file mode 100644
index 0000000..197dc28
--- /dev/null
+++ b/hyracks/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/buffercache/IPageCleanerPolicy.java
@@ -0,0 +1,51 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.storage.common.buffercache;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+
+/**
+ * Allows customization of the page cleaning strategy by the cleaner thread.
+ * 
+ * @author vinayakb
+ */
+public interface IPageCleanerPolicy {
+    /**
+     * Callback from the cleaner just before the beginning of a cleaning cycle.
+     * 
+     * @param monitor
+     *            - The monitor on which a mutex is held while in this call
+     * @throws HyracksDataException
+     */
+    public void notifyCleanCycleStart(Object monitor) throws HyracksDataException;
+
+    /**
+     * Callback from the cleaner just after the finish of a cleaning cycle.
+     * 
+     * @param monitor
+     *            - The monitor on which a mutex is held while in this call.
+     * @throws HyracksDataException
+     */
+    public void notifyCleanCycleFinish(Object monitor) throws HyracksDataException;
+
+    /**
+     * Callback to indicate that no victim was found.
+     * 
+     * @param monitor
+     *            - The monitor on which a mutex is held while in this call.
+     * @throws HyracksDataException
+     */
+    public void notifyVictimNotFound(Object monitor) throws HyracksDataException;
+}
\ No newline at end of file
diff --git a/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/buffercache/IPageReplacementStrategy.java b/hyracks/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/buffercache/IPageReplacementStrategy.java
similarity index 100%
rename from hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/buffercache/IPageReplacementStrategy.java
rename to hyracks/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/buffercache/IPageReplacementStrategy.java
diff --git a/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/BufferedFileHandle.java b/hyracks/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/BufferedFileHandle.java
similarity index 100%
rename from hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/BufferedFileHandle.java
rename to hyracks/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/BufferedFileHandle.java
diff --git a/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/IFileMapManager.java b/hyracks/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/IFileMapManager.java
similarity index 100%
rename from hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/IFileMapManager.java
rename to hyracks/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/IFileMapManager.java
diff --git a/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/IFileMapProvider.java b/hyracks/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/IFileMapProvider.java
similarity index 100%
rename from hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/IFileMapProvider.java
rename to hyracks/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/IFileMapProvider.java
diff --git a/hyracks/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/smi/TransientFileMapManager.java b/hyracks/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/smi/TransientFileMapManager.java
new file mode 100644
index 0000000..e4ddc08
--- /dev/null
+++ b/hyracks/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/smi/TransientFileMapManager.java
@@ -0,0 +1,57 @@
+package edu.uci.ics.hyracks.storage.common.smi;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.io.FileReference;
+import edu.uci.ics.hyracks.storage.common.file.IFileMapManager;
+
+public class TransientFileMapManager implements IFileMapManager {
+    private static final long serialVersionUID = 1L;
+
+    private Map<Integer, FileReference> id2nameMap = new HashMap<Integer, FileReference>();
+    private Map<FileReference, Integer> name2IdMap = new HashMap<FileReference, Integer>();
+    private int idCounter = 0;
+
+    @Override
+    public FileReference lookupFileName(int fileId) throws HyracksDataException {
+        FileReference fRef = id2nameMap.get(fileId);
+        if (fRef == null) {
+            throw new HyracksDataException("No mapping found for id: " + fileId);
+        }
+        return fRef;
+    }
+
+    @Override
+    public int lookupFileId(FileReference fileRef) throws HyracksDataException {
+        Integer fileId = name2IdMap.get(fileRef);
+        if (fileId == null) {
+            throw new HyracksDataException("No mapping found for name: " + fileRef);
+        }
+        return fileId;
+    }
+
+    @Override
+    public boolean isMapped(FileReference fileRef) {
+        return name2IdMap.containsKey(fileRef);
+    }
+
+    @Override
+    public boolean isMapped(int fileId) {
+        return id2nameMap.containsKey(fileId);
+    }
+
+    @Override
+    public void unregisterFile(int fileId) throws HyracksDataException {
+        FileReference fileRef = id2nameMap.remove(fileId);
+        name2IdMap.remove(fileRef);
+    }
+
+    @Override
+    public void registerFile(FileReference fileRef) throws HyracksDataException {
+        Integer fileId = idCounter++;
+        id2nameMap.put(fileId, fileRef);
+        name2IdMap.put(fileRef, fileId);
+    }
+}
\ No newline at end of file
diff --git a/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/sync/LatchType.java b/hyracks/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/sync/LatchType.java
similarity index 100%
rename from hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/sync/LatchType.java
rename to hyracks/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/sync/LatchType.java
diff --git a/hyracks/hyracks-test-support/pom.xml b/hyracks/hyracks-test-support/pom.xml
new file mode 100644
index 0000000..2f10556
--- /dev/null
+++ b/hyracks/hyracks-test-support/pom.xml
@@ -0,0 +1,69 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>edu.uci.ics.hyracks</groupId>
+  <artifactId>hyracks-test-support</artifactId>
+  <version>0.2.3-SNAPSHOT</version>
+  <name>hyracks-test-support</name>
+
+  <parent>
+    <groupId>edu.uci.ics.hyracks</groupId>
+    <artifactId>hyracks</artifactId>
+    <version>0.2.3-SNAPSHOT</version>
+  </parent>
+
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-compiler-plugin</artifactId>
+        <version>2.0.2</version>
+        <configuration>
+          <source>1.7</source>
+          <target>1.7</target>
+        </configuration>
+      </plugin>
+    </plugins>
+  </build>
+  <dependencies>
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>hyracks-control-nc</artifactId>
+  		<version>0.2.3-SNAPSHOT</version>
+  		<scope>compile</scope>
+  	</dependency>
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>hyracks-storage-common</artifactId>
+  		<version>0.2.3-SNAPSHOT</version>
+  		<scope>compile</scope>
+  	</dependency>
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>hyracks-storage-am-btree</artifactId>
+  		<version>0.2.3-SNAPSHOT</version>
+  		<type>jar</type>
+  		<scope>compile</scope>
+  	</dependency>
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>hyracks-storage-am-rtree</artifactId>
+  		<version>0.2.3-SNAPSHOT</version>
+  		<type>jar</type>
+  		<scope>compile</scope>
+  	</dependency>
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>hyracks-storage-am-invertedindex</artifactId>
+  		<version>0.2.3-SNAPSHOT</version>
+  		<type>jar</type>
+  		<scope>compile</scope>
+  	</dependency>
+  	<dependency>
+  		<groupId>junit</groupId>
+  		<artifactId>junit</artifactId>
+  		<version>4.8.1</version>
+  		<type>jar</type>
+  		<scope>compile</scope>
+  	</dependency>
+  </dependencies>
+</project>
diff --git a/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexBulkLoadTest.java b/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexBulkLoadTest.java
new file mode 100644
index 0000000..85bfdd2
--- /dev/null
+++ b/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexBulkLoadTest.java
@@ -0,0 +1,65 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
+
+@SuppressWarnings("rawtypes")
+public abstract class OrderedIndexBulkLoadTest extends OrderedIndexTestDriver {
+
+    private final OrderedIndexTestUtils orderedIndexTestUtils;
+    private final int bulkLoadRounds;
+
+    public OrderedIndexBulkLoadTest(BTreeLeafFrameType[] leafFrameTypesToTest, int bulkLoadRounds) {
+        super(leafFrameTypesToTest);
+        this.bulkLoadRounds = bulkLoadRounds;
+        this.orderedIndexTestUtils = new OrderedIndexTestUtils();
+    }
+
+    @Override
+    protected void runTest(ISerializerDeserializer[] fieldSerdes, int numKeys, BTreeLeafFrameType leafType,
+            ITupleReference lowKey, ITupleReference highKey, ITupleReference prefixLowKey, ITupleReference prefixHighKey)
+            throws Exception {
+        OrderedIndexTestContext ctx = createTestContext(fieldSerdes, numKeys, leafType);
+        for (int i = 0; i < bulkLoadRounds; i++) {
+            // We assume all fieldSerdes are of the same type. Check the first
+            // one
+            // to determine which field types to generate.
+            if (fieldSerdes[0] instanceof IntegerSerializerDeserializer) {
+                orderedIndexTestUtils.bulkLoadIntTuples(ctx, numTuplesToInsert, getRandom());
+            } else if (fieldSerdes[0] instanceof UTF8StringSerializerDeserializer) {
+                orderedIndexTestUtils.bulkLoadStringTuples(ctx, numTuplesToInsert, getRandom());
+            }
+            orderedIndexTestUtils.checkPointSearches(ctx);
+            orderedIndexTestUtils.checkScan(ctx);
+            orderedIndexTestUtils.checkDiskOrderScan(ctx);
+            orderedIndexTestUtils.checkRangeSearch(ctx, lowKey, highKey, true, true);
+            if (prefixLowKey != null && prefixHighKey != null) {
+                orderedIndexTestUtils.checkRangeSearch(ctx, prefixLowKey, prefixHighKey, true, true);
+            }
+        }
+        ctx.getIndex().close();
+    }
+
+    @Override
+    protected String getTestOpName() {
+        return "BulkLoad";
+    }
+}
diff --git a/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexDeleteTest.java b/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexDeleteTest.java
new file mode 100644
index 0000000..93075a1
--- /dev/null
+++ b/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexDeleteTest.java
@@ -0,0 +1,70 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
+
+@SuppressWarnings("rawtypes")
+public abstract class OrderedIndexDeleteTest extends OrderedIndexTestDriver {
+
+    private final OrderedIndexTestUtils orderedIndexTestUtils;
+
+    public OrderedIndexDeleteTest(BTreeLeafFrameType[] leafFrameTypesToTest) {
+        super(leafFrameTypesToTest);
+        this.orderedIndexTestUtils = new OrderedIndexTestUtils();
+    }
+
+    private static final int numInsertRounds = 3;
+    private static final int numDeleteRounds = 3;
+
+    @Override
+    protected void runTest(ISerializerDeserializer[] fieldSerdes, int numKeys, BTreeLeafFrameType leafType,
+            ITupleReference lowKey, ITupleReference highKey, ITupleReference prefixLowKey, ITupleReference prefixHighKey)
+            throws Exception {
+        OrderedIndexTestContext ctx = createTestContext(fieldSerdes, numKeys, leafType);
+        for (int i = 0; i < numInsertRounds; i++) {
+            // We assume all fieldSerdes are of the same type. Check the first
+            // one to determine which field types to generate.
+            if (fieldSerdes[0] instanceof IntegerSerializerDeserializer) {
+                orderedIndexTestUtils.insertIntTuples(ctx, numTuplesToInsert, getRandom());
+            } else if (fieldSerdes[0] instanceof UTF8StringSerializerDeserializer) {
+                orderedIndexTestUtils.insertStringTuples(ctx, numTuplesToInsert, getRandom());
+            }
+            int numTuplesPerDeleteRound = (int) Math
+                    .ceil((float) ctx.getCheckTuples().size() / (float) numDeleteRounds);
+            for (int j = 0; j < numDeleteRounds; j++) {
+                orderedIndexTestUtils.deleteTuples(ctx, numTuplesPerDeleteRound, getRandom());
+                orderedIndexTestUtils.checkPointSearches(ctx);
+                orderedIndexTestUtils.checkScan(ctx);
+                orderedIndexTestUtils.checkDiskOrderScan(ctx);
+                orderedIndexTestUtils.checkRangeSearch(ctx, lowKey, highKey, true, true);
+                if (prefixLowKey != null && prefixHighKey != null) {
+                    orderedIndexTestUtils.checkRangeSearch(ctx, prefixLowKey, prefixHighKey, true, true);
+                }
+            }
+        }
+        ctx.getIndex().close();
+    }
+
+    @Override
+    protected String getTestOpName() {
+        return "Delete";
+    }
+}
diff --git a/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexExamplesTest.java b/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexExamplesTest.java
new file mode 100644
index 0000000..a29be89
--- /dev/null
+++ b/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexExamplesTest.java
@@ -0,0 +1,644 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree;
+
+import java.util.Random;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
+import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
+import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.util.TupleUtils;
+import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
+import edu.uci.ics.hyracks.storage.am.btree.util.BTreeUtils;
+import edu.uci.ics.hyracks.storage.am.common.api.IIndexAccessor;
+import edu.uci.ics.hyracks.storage.am.common.api.IIndexBulkLoadContext;
+import edu.uci.ics.hyracks.storage.am.common.api.IIndexCursor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexAccessor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
+import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
+import edu.uci.ics.hyracks.storage.am.common.impls.TreeDiskOrderScanCursor;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+
+@SuppressWarnings("rawtypes")
+public abstract class OrderedIndexExamplesTest {
+    protected static final Logger LOGGER = Logger.getLogger(OrderedIndexExamplesTest.class.getName());
+    protected final Random rnd = new Random(50);
+
+    protected abstract ITreeIndex createTreeIndex(ITypeTraits[] typeTraits, IBinaryComparatorFactory[] cmpFactories)
+            throws TreeIndexException;
+
+    protected abstract int getIndexFileId();
+    
+    /**
+     * Fixed-Length Key,Value Example.
+     * 
+     * Create a tree index with one fixed-length key field and one fixed-length value
+     * field. Fill index with random values using insertions (not bulk load).
+     * Perform scans and range search.
+     */
+    @Test
+    public void fixedLengthKeyValueExample() throws Exception {
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Fixed-Length Key,Value Example.");
+        }
+
+        // Declare fields.
+        int fieldCount = 2;
+        ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
+        typeTraits[0] = IntegerPointable.TYPE_TRAITS;
+        typeTraits[1] = IntegerPointable.TYPE_TRAITS;
+        // Declare field serdes.
+        ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE,
+                IntegerSerializerDeserializer.INSTANCE };
+
+        // Declare keys.
+        int keyFieldCount = 1;
+        IBinaryComparatorFactory[] cmpFactories = new IBinaryComparatorFactory[keyFieldCount];
+        cmpFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+
+        int indexFileId = getIndexFileId();
+        ITreeIndex treeIndex = createTreeIndex(typeTraits, cmpFactories);
+        treeIndex.create(indexFileId);
+        treeIndex.open(indexFileId);
+
+        long start = System.currentTimeMillis();
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Inserting into tree...");
+        }
+        ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
+        ArrayTupleReference tuple = new ArrayTupleReference();
+        IIndexAccessor indexAccessor = (IIndexAccessor) treeIndex.createAccessor();
+        int numInserts = 10000;
+        for (int i = 0; i < numInserts; i++) {
+            int f0 = rnd.nextInt() % numInserts;
+            int f1 = 5;
+            TupleUtils.createIntegerTuple(tb, tuple, f0, f1);
+            if (LOGGER.isLoggable(Level.INFO)) {
+                if (i % 1000 == 0) {
+                    LOGGER.info("Inserting " + i + " : " + f0 + " " + f1);
+                }
+            }
+            try {
+                indexAccessor.insert(tuple);
+            } catch (TreeIndexException e) {
+            }
+        }
+        long end = System.currentTimeMillis();
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info(numInserts + " inserts in " + (end - start) + "ms");
+        }
+
+        orderedScan(indexAccessor, fieldSerdes);
+        diskOrderScan(indexAccessor, fieldSerdes);
+
+        // Build low key.
+        ArrayTupleBuilder lowKeyTb = new ArrayTupleBuilder(keyFieldCount);
+        ArrayTupleReference lowKey = new ArrayTupleReference();
+        TupleUtils.createIntegerTuple(lowKeyTb, lowKey, -1000);
+
+        // Build high key.
+        ArrayTupleBuilder highKeyTb = new ArrayTupleBuilder(keyFieldCount);
+        ArrayTupleReference highKey = new ArrayTupleReference();
+        TupleUtils.createIntegerTuple(highKeyTb, highKey, 1000);
+
+        rangeSearch(cmpFactories, indexAccessor, fieldSerdes, lowKey, highKey);
+
+        treeIndex.close();
+    }
+
+    /**
+     * Composite Key Example (Non-Unique Index).
+     * 
+     * Create a tree index with two fixed-length key fields and one fixed-length
+     * value field. Fill index with random values using insertions (not bulk
+     * load) Perform scans and range search.
+     */
+    @Test
+    public void twoFixedLengthKeysOneFixedLengthValueExample() throws Exception {
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Composite Key Test");
+        }
+
+        // Declare fields.
+        int fieldCount = 3;
+        ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
+        typeTraits[0] = IntegerPointable.TYPE_TRAITS;
+        typeTraits[1] = IntegerPointable.TYPE_TRAITS;
+        typeTraits[2] = IntegerPointable.TYPE_TRAITS;
+        // Declare field serdes.
+        ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE,
+                IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
+
+        // declare keys
+        int keyFieldCount = 2;
+        IBinaryComparatorFactory[] cmpFactories = new IBinaryComparatorFactory[keyFieldCount];
+        cmpFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+        cmpFactories[1] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+
+        int indexFileId = getIndexFileId();
+        ITreeIndex treeIndex = createTreeIndex(typeTraits, cmpFactories);
+        treeIndex.create(indexFileId);
+        treeIndex.open(indexFileId);
+
+        long start = System.currentTimeMillis();
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Inserting into tree...");
+        }
+        ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
+        ArrayTupleReference tuple = new ArrayTupleReference();
+        IIndexAccessor indexAccessor = (IIndexAccessor) treeIndex.createAccessor();
+        int numInserts = 10000;
+        for (int i = 0; i < 10000; i++) {
+            int f0 = rnd.nextInt() % 2000;
+            int f1 = rnd.nextInt() % 1000;
+            int f2 = 5;
+            TupleUtils.createIntegerTuple(tb, tuple, f0, f1, f2);
+            if (LOGGER.isLoggable(Level.INFO)) {
+                if (i % 1000 == 0) {
+                    LOGGER.info("Inserting " + i + " : " + f0 + " " + f1 + " " + f2);
+                }
+            }
+            try {
+                indexAccessor.insert(tuple);
+            } catch (TreeIndexException e) {
+            }
+        }
+        long end = System.currentTimeMillis();
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info(numInserts + " inserts in " + (end - start) + "ms");
+        }
+
+        orderedScan(indexAccessor, fieldSerdes);
+        diskOrderScan(indexAccessor, fieldSerdes);
+
+        // Build low key.
+        ArrayTupleBuilder lowKeyTb = new ArrayTupleBuilder(1);
+        ArrayTupleReference lowKey = new ArrayTupleReference();
+        TupleUtils.createIntegerTuple(lowKeyTb, lowKey, -3);
+
+        // Build high key.
+        ArrayTupleBuilder highKeyTb = new ArrayTupleBuilder(1);
+        ArrayTupleReference highKey = new ArrayTupleReference();
+        TupleUtils.createIntegerTuple(highKeyTb, highKey, 3);
+
+        // Prefix-Range search in [-3, 3]
+        rangeSearch(cmpFactories, indexAccessor, fieldSerdes, lowKey, highKey);
+
+        treeIndex.close();
+    }
+
+    /**
+     * Variable-Length Example. Create a BTree with one variable-length key
+     * field and one variable-length value field. Fill BTree with random values
+     * using insertions (not bulk load) Perform ordered scans and range search.
+     */
+    @Test
+    public void varLenKeyValueExample() throws Exception {
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Variable-Length Key,Value Example");
+        }
+
+        // Declare fields.
+        int fieldCount = 2;
+        ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
+        typeTraits[0] = UTF8StringPointable.TYPE_TRAITS;
+        typeTraits[1] = UTF8StringPointable.TYPE_TRAITS;
+        // Declare field serdes.
+        ISerializerDeserializer[] fieldSerdes = { UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE };
+
+        // Declare keys.
+        int keyFieldCount = 1;
+        IBinaryComparatorFactory[] cmpFactories = new IBinaryComparatorFactory[keyFieldCount];
+        cmpFactories[0] = PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY);
+
+        int indexFileId = getIndexFileId();
+        ITreeIndex treeIndex = createTreeIndex(typeTraits, cmpFactories);
+        treeIndex.create(indexFileId);
+        treeIndex.open(indexFileId);
+
+        long start = System.currentTimeMillis();
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Inserting into tree...");
+        }
+        ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
+        ArrayTupleReference tuple = new ArrayTupleReference();
+        IIndexAccessor indexAccessor = (IIndexAccessor) treeIndex.createAccessor();
+        // Max string length to be generated.
+        int maxLength = 10;
+        int numInserts = 10000;
+        for (int i = 0; i < 10000; i++) {
+            String f0 = randomString(Math.abs(rnd.nextInt()) % maxLength + 1, rnd);
+            String f1 = randomString(Math.abs(rnd.nextInt()) % maxLength + 1, rnd);
+            TupleUtils.createTuple(tb, tuple, fieldSerdes, f0, f1);
+            if (LOGGER.isLoggable(Level.INFO)) {
+                if (i % 1000 == 0) {
+                    LOGGER.info("Inserting " + f0 + " " + f1);
+                }
+            }
+            try {
+                indexAccessor.insert(tuple);
+            } catch (TreeIndexException e) {
+            }
+        }
+        long end = System.currentTimeMillis();
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info(numInserts + " inserts in " + (end - start) + "ms");
+        }
+
+        orderedScan(indexAccessor, fieldSerdes);
+        diskOrderScan(indexAccessor, fieldSerdes);
+
+        // Build low key.
+        ArrayTupleBuilder lowKeyTb = new ArrayTupleBuilder(1);
+        ArrayTupleReference lowKey = new ArrayTupleReference();
+        TupleUtils.createTuple(lowKeyTb, lowKey, fieldSerdes, "cbf");
+
+        // Build high key.
+        ArrayTupleBuilder highKeyTb = new ArrayTupleBuilder(1);
+        ArrayTupleReference highKey = new ArrayTupleReference();
+        TupleUtils.createTuple(highKeyTb, highKey, fieldSerdes, "cc7");
+
+        rangeSearch(cmpFactories, indexAccessor, fieldSerdes, lowKey, highKey);
+
+        treeIndex.close();
+    }
+
+    /**
+     * Deletion Example.
+     * 
+     * Create a BTree with one variable-length key field and one variable-length
+     * value field. Fill B-tree with random values using insertions, then delete
+     * entries one-by-one. Repeat procedure a few times on same BTree.
+     */
+    @Test
+    public void deleteExample() throws Exception {
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Deletion Example");
+        }
+
+        // Declare fields.
+        int fieldCount = 2;
+        ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
+        typeTraits[0] = UTF8StringPointable.TYPE_TRAITS;
+        typeTraits[1] = UTF8StringPointable.TYPE_TRAITS;
+        // Declare field serdes.
+        ISerializerDeserializer[] fieldSerdes = { UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE };
+
+        // Declare keys.
+        int keyFieldCount = 1;
+        IBinaryComparatorFactory[] cmpFactories = new IBinaryComparatorFactory[keyFieldCount];
+        cmpFactories[0] = PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY);
+
+        int indexFileId = getIndexFileId();
+        ITreeIndex treeIndex = createTreeIndex(typeTraits, cmpFactories);
+        treeIndex.create(indexFileId);
+        treeIndex.open(indexFileId);
+
+        ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
+        ArrayTupleReference tuple = new ArrayTupleReference();
+        IIndexAccessor indexAccessor = (IIndexAccessor) treeIndex.createAccessor();
+        // Max string length to be generated.
+        int runs = 3;
+        for (int run = 0; run < runs; run++) {
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("Deletion example run: " + (run + 1) + "/" + runs);
+                LOGGER.info("Inserting into tree...");
+            }
+            int maxLength = 10;
+            int ins = 10000;
+            String[] f0s = new String[ins];
+            String[] f1s = new String[ins];
+            int insDone = 0;
+            int[] insDoneCmp = new int[ins];
+            for (int i = 0; i < ins; i++) {
+                String f0 = randomString(Math.abs(rnd.nextInt()) % maxLength + 1, rnd);
+                String f1 = randomString(Math.abs(rnd.nextInt()) % maxLength + 1, rnd);
+                TupleUtils.createTuple(tb, tuple, fieldSerdes, f0, f1);
+                f0s[i] = f0;
+                f1s[i] = f1;
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    if (i % 1000 == 0) {
+                        LOGGER.info("Inserting " + i);
+                    }
+                }
+                try {
+                    indexAccessor.insert(tuple);
+                    insDone++;
+                } catch (TreeIndexException e) {
+                }
+                insDoneCmp[i] = insDone;
+            }
+
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("Deleting from tree...");
+            }
+            int delDone = 0;
+            for (int i = 0; i < ins; i++) {
+                TupleUtils.createTuple(tb, tuple, fieldSerdes, f0s[i], f1s[i]);
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    if (i % 1000 == 0) {
+                        LOGGER.info("Deleting " + i);
+                    }
+                }
+                try {
+                    indexAccessor.delete(tuple);
+                    delDone++;
+                } catch (TreeIndexException e) {
+                }
+                if (insDoneCmp[i] != delDone) {
+                    if (LOGGER.isLoggable(Level.INFO)) {
+                        LOGGER.info("INCONSISTENT STATE, ERROR IN DELETION EXAMPLE.");
+                        LOGGER.info("INSDONECMP: " + insDoneCmp[i] + " " + delDone);
+                    }
+                    break;
+                }
+            }
+            if (insDone != delDone) {
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    LOGGER.info("ERROR! INSDONE: " + insDone + " DELDONE: " + delDone);
+                }
+                break;
+            }
+        }
+        treeIndex.close();
+    }
+
+    /**
+     * Update example.
+     * 
+     * Create a BTree with one variable-length key field and one variable-length
+     * value field. Fill B-tree with random values using insertions, then update
+     * entries one-by-one. Repeat procedure a few times on same BTree.
+     */
+    @Test
+    public void updateExample() throws Exception {
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Update example");
+        }
+
+        // Declare fields.
+        int fieldCount = 2;
+        ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
+        typeTraits[0] = UTF8StringPointable.TYPE_TRAITS;
+        typeTraits[1] = UTF8StringPointable.TYPE_TRAITS;
+        // Declare field serdes.
+        ISerializerDeserializer[] fieldSerdes = { UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE };
+
+        // Declare keys.
+        int keyFieldCount = 1;
+        IBinaryComparatorFactory[] cmpFactories = new IBinaryComparatorFactory[keyFieldCount];
+        cmpFactories[0] = PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY);
+
+        int indexFileId = getIndexFileId();
+        ITreeIndex treeIndex = createTreeIndex(typeTraits, cmpFactories);
+        treeIndex.create(indexFileId);
+        treeIndex.open(indexFileId);
+
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Inserting into tree...");
+        }
+        IIndexAccessor indexAccessor = (IIndexAccessor) treeIndex.createAccessor();
+        ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
+        ArrayTupleReference tuple = new ArrayTupleReference();
+        int maxLength = 10;
+        int ins = 10000;
+        String[] keys = new String[10000];
+        for (int i = 0; i < ins; i++) {
+            String f0 = randomString(Math.abs(rnd.nextInt()) % maxLength + 1, rnd);
+            String f1 = randomString(Math.abs(rnd.nextInt()) % maxLength + 1, rnd);
+            TupleUtils.createTuple(tb, tuple, fieldSerdes, f0, f1);
+            keys[i] = f0;
+            if (LOGGER.isLoggable(Level.INFO)) {
+                if (i % 1000 == 0) {
+                    LOGGER.info("Inserting " + i);
+                }
+            }
+            try {
+                indexAccessor.insert(tuple);
+            } catch (TreeIndexException e) {
+            }
+        }
+        // Print before doing any updates.
+        orderedScan(indexAccessor, fieldSerdes);
+
+        int runs = 3;
+        for (int run = 0; run < runs; run++) {
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("Update test run: " + (run + 1) + "/" + runs);
+                LOGGER.info("Updating BTree");
+            }
+            for (int i = 0; i < ins; i++) {
+                // Generate a new random value for f1.
+                String f1 = randomString(Math.abs(rnd.nextInt()) % maxLength + 1, rnd);
+                TupleUtils.createTuple(tb, tuple, fieldSerdes, keys[i], f1);
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    if (i % 1000 == 0) {
+                        LOGGER.info("Updating " + i);
+                    }
+                }
+                try {
+                    indexAccessor.update(tuple);
+                } catch (TreeIndexException e) {
+                } catch (UnsupportedOperationException e) {
+                }
+            }
+            // Do another scan after a round of updates.
+            orderedScan(indexAccessor, fieldSerdes);
+        }
+        treeIndex.close();
+    }
+
+    /**
+     * Bulk load example.
+     * 
+     * Load a tree with 100,000 tuples. BTree has a composite key to "simulate"
+     * non-unique index creation.
+     * 
+     */
+    @Test
+    public void bulkLoadExample() throws Exception {
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Bulk load example");
+        }
+        // Declare fields.
+        int fieldCount = 3;
+        ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
+        typeTraits[0] = IntegerPointable.TYPE_TRAITS;
+        typeTraits[1] = IntegerPointable.TYPE_TRAITS;
+        typeTraits[2] = IntegerPointable.TYPE_TRAITS;
+        // Declare field serdes.
+        ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE,
+                IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
+
+        // declare keys
+        int keyFieldCount = 2;
+        IBinaryComparatorFactory[] cmpFactories = new IBinaryComparatorFactory[keyFieldCount];
+        cmpFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+        cmpFactories[1] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+
+        int indexFileId = getIndexFileId();
+        ITreeIndex treeIndex = createTreeIndex(typeTraits, cmpFactories);
+        treeIndex.create(indexFileId);
+        treeIndex.open(indexFileId);
+
+        // Load sorted records.
+        int ins = 100000;
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Bulk loading " + ins + " tuples");
+        }
+        long start = System.currentTimeMillis();
+        IIndexBulkLoadContext bulkLoadCtx = treeIndex.beginBulkLoad(0.7f);
+        ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
+        ArrayTupleReference tuple = new ArrayTupleReference();
+        for (int i = 0; i < ins; i++) {
+            TupleUtils.createIntegerTuple(tb, tuple, i, i, 5);
+            treeIndex.bulkLoadAddTuple(tuple, bulkLoadCtx);
+        }
+        treeIndex.endBulkLoad(bulkLoadCtx);
+        long end = System.currentTimeMillis();
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info(ins + " tuples loaded in " + (end - start) + "ms");
+        }
+
+        IIndexAccessor indexAccessor = (IIndexAccessor) treeIndex.createAccessor();
+
+        // Build low key.
+        ArrayTupleBuilder lowKeyTb = new ArrayTupleBuilder(1);
+        ArrayTupleReference lowKey = new ArrayTupleReference();
+        TupleUtils.createIntegerTuple(lowKeyTb, lowKey, 44444);
+
+        // Build high key.
+        ArrayTupleBuilder highKeyTb = new ArrayTupleBuilder(1);
+        ArrayTupleReference highKey = new ArrayTupleReference();
+        TupleUtils.createIntegerTuple(highKeyTb, highKey, 44500);
+
+        // Prefix-Range search in [44444, 44500]
+        rangeSearch(cmpFactories, indexAccessor, fieldSerdes, lowKey, highKey);
+
+        treeIndex.close();
+    }
+
+    private void orderedScan(IIndexAccessor indexAccessor, ISerializerDeserializer[] fieldSerdes)
+            throws Exception {
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Ordered Scan:");
+        }
+        IIndexCursor scanCursor = (IIndexCursor) indexAccessor.createSearchCursor();        
+        RangePredicate nullPred = new RangePredicate(null, null, true, true, null, null);
+        indexAccessor.search(scanCursor, nullPred);
+        try {
+            while (scanCursor.hasNext()) {
+                scanCursor.next();
+                ITupleReference frameTuple = scanCursor.getTuple();
+                String rec = TupleUtils.printTuple(frameTuple, fieldSerdes);
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    LOGGER.info(rec);
+                }
+            }
+        } finally {
+            scanCursor.close();
+        }
+    }
+
+	private void diskOrderScan(IIndexAccessor indexAccessor,
+			ISerializerDeserializer[] fieldSerdes) throws Exception {
+		try {
+			if (LOGGER.isLoggable(Level.INFO)) {
+				LOGGER.info("Disk-Order Scan:");
+			}
+			ITreeIndexAccessor treeIndexAccessor = (ITreeIndexAccessor) indexAccessor;
+			TreeDiskOrderScanCursor diskOrderCursor = (TreeDiskOrderScanCursor) treeIndexAccessor
+					.createDiskOrderScanCursor();
+			treeIndexAccessor.diskOrderScan(diskOrderCursor);
+			try {
+				while (diskOrderCursor.hasNext()) {
+					diskOrderCursor.next();
+					ITupleReference frameTuple = diskOrderCursor.getTuple();
+					String rec = TupleUtils.printTuple(frameTuple, fieldSerdes);
+					if (LOGGER.isLoggable(Level.INFO)) {
+						LOGGER.info(rec);
+					}
+				}
+			} finally {
+				diskOrderCursor.close();
+			}
+		} catch (UnsupportedOperationException e) {
+			// Ignore exception because some indexes, e.g. the LSMBTree, don't
+			// support disk-order scan.
+			if (LOGGER.isLoggable(Level.INFO)) {
+				LOGGER.info("Ignoring disk-order scan since it's not supported.");
+			}
+		} catch (ClassCastException e) {
+			// Ignore exception because IIndexAccessor sometimes isn't
+			// an ITreeIndexAccessor, e.g., for the LSMBTree.
+			if (LOGGER.isLoggable(Level.INFO)) {
+				LOGGER.info("Ignoring disk-order scan since it's not supported.");
+			}
+		}
+	}
+
+    private void rangeSearch(IBinaryComparatorFactory[] cmpFactories, IIndexAccessor indexAccessor, ISerializerDeserializer[] fieldSerdes,
+            ITupleReference lowKey, ITupleReference highKey) throws Exception {
+        if (LOGGER.isLoggable(Level.INFO)) {
+            String lowKeyString = TupleUtils.printTuple(lowKey, fieldSerdes);
+            String highKeyString = TupleUtils.printTuple(highKey, fieldSerdes);
+            LOGGER.info("Range-Search in: [ " + lowKeyString + ", " + highKeyString + "]");
+        }
+        ITreeIndexCursor rangeCursor = (ITreeIndexCursor) indexAccessor.createSearchCursor();
+        MultiComparator lowKeySearchCmp = BTreeUtils.getSearchMultiComparator(cmpFactories, lowKey);
+        MultiComparator highKeySearchCmp = BTreeUtils.getSearchMultiComparator(cmpFactories, highKey);
+        RangePredicate rangePred = new RangePredicate(lowKey, highKey, true, true, lowKeySearchCmp,
+                highKeySearchCmp);
+        indexAccessor.search(rangeCursor, rangePred);
+        try {
+            while (rangeCursor.hasNext()) {
+                rangeCursor.next();
+                ITupleReference frameTuple = rangeCursor.getTuple();
+                String rec = TupleUtils.printTuple(frameTuple, fieldSerdes);
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    LOGGER.info(rec);
+                }
+            }
+        } finally {
+            rangeCursor.close();
+        }
+    }
+
+    public static String randomString(int length, Random random) {
+        String s = Long.toHexString(Double.doubleToLongBits(random.nextDouble()));
+        StringBuilder strBuilder = new StringBuilder();
+        for (int i = 0; i < s.length() && i < length; i++) {
+            strBuilder.append(s.charAt(Math.abs(random.nextInt()) % s.length()));
+        }
+        return strBuilder.toString();
+    }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexInsertTest.java b/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexInsertTest.java
new file mode 100644
index 0000000..d12603b
--- /dev/null
+++ b/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexInsertTest.java
@@ -0,0 +1,72 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
+
+/**
+ * Tests the BTree insert operation with strings and integer fields using
+ * various numbers of key and payload fields.
+ * 
+ * Each tests first fills a BTree with randomly generated tuples. We compare the
+ * following operations against expected results: 1. Point searches for all
+ * tuples. 2. Ordered scan. 3. Disk-order scan. 4. Range search (and prefix
+ * search for composite keys).
+ * 
+ */
+@SuppressWarnings("rawtypes")
+public abstract class OrderedIndexInsertTest extends OrderedIndexTestDriver {
+
+    private final OrderedIndexTestUtils orderedIndexTestUtils;
+
+    public OrderedIndexInsertTest(BTreeLeafFrameType[] leafFrameTypesToTest) {
+        super(leafFrameTypesToTest);
+        this.orderedIndexTestUtils = new OrderedIndexTestUtils();
+    }
+
+    @Override
+    protected void runTest(ISerializerDeserializer[] fieldSerdes, int numKeys, BTreeLeafFrameType leafType,
+            ITupleReference lowKey, ITupleReference highKey, ITupleReference prefixLowKey, ITupleReference prefixHighKey)
+            throws Exception {
+        OrderedIndexTestContext ctx = createTestContext(fieldSerdes, numKeys, leafType);
+        // We assume all fieldSerdes are of the same type. Check the first one
+        // to determine which field types to generate.
+        if (fieldSerdes[0] instanceof IntegerSerializerDeserializer) {
+            orderedIndexTestUtils.insertIntTuples(ctx, numTuplesToInsert, getRandom());
+        } else if (fieldSerdes[0] instanceof UTF8StringSerializerDeserializer) {
+            orderedIndexTestUtils.insertStringTuples(ctx, numTuplesToInsert, getRandom());
+        }
+
+        orderedIndexTestUtils.checkPointSearches(ctx);
+        orderedIndexTestUtils.checkScan(ctx);
+        orderedIndexTestUtils.checkDiskOrderScan(ctx);
+
+        orderedIndexTestUtils.checkRangeSearch(ctx, lowKey, highKey, true, true);
+        if (prefixLowKey != null && prefixHighKey != null) {
+            orderedIndexTestUtils.checkRangeSearch(ctx, prefixLowKey, prefixHighKey, true, true);
+        }
+        ctx.getIndex().close();
+    }
+
+    @Override
+    protected String getTestOpName() {
+        return "Insert";
+    }
+}
diff --git a/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexMultiThreadTest.java b/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexMultiThreadTest.java
new file mode 100644
index 0000000..3a894a2
--- /dev/null
+++ b/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexMultiThreadTest.java
@@ -0,0 +1,126 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree;
+
+import java.util.ArrayList;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.exceptions.HyracksException;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.util.SerdeUtils;
+import edu.uci.ics.hyracks.storage.am.common.ITreeIndexTestWorkerFactory;
+import edu.uci.ics.hyracks.storage.am.common.TestWorkloadConf;
+import edu.uci.ics.hyracks.storage.am.common.TreeIndexMultiThreadTestDriver;
+import edu.uci.ics.hyracks.storage.am.common.TestOperationSelector.TestOperation;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
+
+@SuppressWarnings("rawtypes")
+public abstract class OrderedIndexMultiThreadTest {    
+    
+    protected final Logger LOGGER = Logger.getLogger(OrderedIndexMultiThreadTest.class.getName());
+    
+    // Machine-specific number of threads to use for testing.
+    protected final int REGULAR_NUM_THREADS = Runtime.getRuntime().availableProcessors();
+    // Excessive number of threads for testing.
+    protected final int EXCESSIVE_NUM_THREADS = Runtime.getRuntime().availableProcessors() * 4;
+    protected final int NUM_OPERATIONS = 10000;
+    
+    protected ArrayList<TestWorkloadConf> workloadConfs = getTestWorkloadConf();    
+    
+    protected abstract void setUp() throws HyracksException;
+    
+    protected abstract void tearDown() throws HyracksDataException;        
+
+    protected abstract ITreeIndex createTreeIndex(ITypeTraits[] typeTraits, IBinaryComparatorFactory[] cmpFactories) throws TreeIndexException;
+    
+    protected abstract int getFileId();
+    
+    protected abstract ITreeIndexTestWorkerFactory getWorkerFactory();
+    
+    protected abstract ArrayList<TestWorkloadConf> getTestWorkloadConf();
+    
+    protected abstract String getIndexTypeName();
+    
+    protected static float[] getUniformOpProbs(TestOperation[] ops) {
+        float[] opProbs = new float[ops.length];
+        for (int i = 0; i < ops.length; i++) {
+            opProbs[i] = 1.0f / (float) ops.length;
+        }
+        return opProbs;
+    }
+    
+    protected void runTest(ISerializerDeserializer[] fieldSerdes, int numKeys, int numThreads, TestWorkloadConf conf, String dataMsg) throws InterruptedException, TreeIndexException, HyracksException {
+        setUp();
+        
+        if (LOGGER.isLoggable(Level.INFO)) {
+        	String indexTypeName = getIndexTypeName();
+            LOGGER.info(indexTypeName + " MultiThread Test:\nData: " + dataMsg + "; Threads: " + numThreads + "; Workload: " + conf.toString() + ".");
+        }
+        
+        ITypeTraits[] typeTraits = SerdeUtils.serdesToTypeTraits(fieldSerdes);
+        IBinaryComparatorFactory[] cmpFactories = SerdeUtils.serdesToComparatorFactories(fieldSerdes, numKeys);     
+        
+        ITreeIndex index = createTreeIndex(typeTraits, cmpFactories);
+        ITreeIndexTestWorkerFactory workerFactory = getWorkerFactory();
+        
+        // 4 batches per thread.
+        int batchSize = (NUM_OPERATIONS / numThreads) / 4;
+        
+        TreeIndexMultiThreadTestDriver driver = new TreeIndexMultiThreadTestDriver(index, workerFactory, fieldSerdes, conf.ops, conf.opProbs);
+        driver.init(getFileId());
+        long[] times = driver.run(numThreads, 1, NUM_OPERATIONS, batchSize);
+        driver.deinit();
+        
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("BTree MultiThread Test Time: " + times[0] + "ms");
+        }
+        
+        tearDown();
+    }
+    
+    @Test
+    public void oneIntKeyAndValue() throws InterruptedException, TreeIndexException, HyracksException {        
+        ISerializerDeserializer[] fieldSerdes = new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
+        int numKeys = 1;
+        String dataMsg = "One Int Key And Value";
+        
+        for (TestWorkloadConf conf : workloadConfs) {
+            runTest(fieldSerdes, numKeys, REGULAR_NUM_THREADS, conf, dataMsg);
+            runTest(fieldSerdes, numKeys, EXCESSIVE_NUM_THREADS, conf, dataMsg);
+        }
+    }
+    
+    @Test
+    public void oneStringKeyAndValue() throws InterruptedException, TreeIndexException, HyracksException {        
+        ISerializerDeserializer[] fieldSerdes = new ISerializerDeserializer[] { UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE };
+        int numKeys = 1;
+        String dataMsg = "One String Key And Value";
+        
+        for (TestWorkloadConf conf : workloadConfs) {
+            runTest(fieldSerdes, numKeys, REGULAR_NUM_THREADS, conf, dataMsg);
+            runTest(fieldSerdes, numKeys, EXCESSIVE_NUM_THREADS, conf, dataMsg);
+        }
+    }
+}
diff --git a/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexTestContext.java b/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexTestContext.java
new file mode 100644
index 0000000..f75a1f1
--- /dev/null
+++ b/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexTestContext.java
@@ -0,0 +1,47 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree;
+
+import java.util.Collection;
+import java.util.TreeSet;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.storage.am.common.CheckTuple;
+import edu.uci.ics.hyracks.storage.am.common.TreeIndexTestContext;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+
+@SuppressWarnings("rawtypes")
+public abstract class OrderedIndexTestContext extends TreeIndexTestContext<CheckTuple> {
+
+    protected final TreeSet<CheckTuple> checkTuples = new TreeSet<CheckTuple>();
+
+    public OrderedIndexTestContext(ISerializerDeserializer[] fieldSerdes, ITreeIndex treeIndex) {
+        super(fieldSerdes, treeIndex);
+    }
+
+    public void upsertCheckTuple(CheckTuple checkTuple, Collection<CheckTuple> checkTuples) {
+    	if (checkTuples.contains(checkTuple)) {
+            checkTuples.remove(checkTuple);
+        }
+        checkTuples.add(checkTuple);
+    }
+    
+    @Override
+    public TreeSet<CheckTuple> getCheckTuples() {
+        return checkTuples;
+    }
+
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexTestDriver.java b/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexTestDriver.java
new file mode 100644
index 0000000..8daa5e0
--- /dev/null
+++ b/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexTestDriver.java
@@ -0,0 +1,178 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree;
+
+import java.util.Random;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.util.TupleUtils;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
+
+@SuppressWarnings("rawtypes")
+public abstract class OrderedIndexTestDriver {
+    protected final Logger LOGGER = Logger.getLogger(OrderedIndexTestDriver.class.getName());
+
+    protected static final int numTuplesToInsert = 10000;
+
+    protected abstract OrderedIndexTestContext createTestContext(ISerializerDeserializer[] fieldSerdes, int numKeys,
+            BTreeLeafFrameType leafType) throws Exception;
+
+    protected abstract Random getRandom();
+
+    protected abstract void runTest(ISerializerDeserializer[] fieldSerdes, int numKeys, BTreeLeafFrameType leafType,
+            ITupleReference lowKey, ITupleReference highKey, ITupleReference prefixLowKey, ITupleReference prefixHighKey)
+            throws Exception;
+
+    protected abstract String getTestOpName();
+
+    protected final BTreeLeafFrameType[] leafFrameTypesToTest;
+
+    public OrderedIndexTestDriver(BTreeLeafFrameType[] leafFrameTypesToTest) {
+        this.leafFrameTypesToTest = leafFrameTypesToTest;
+    }
+
+    @Test
+    public void oneIntKeyAndValue() throws Exception {
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("BTree " + getTestOpName() + " Test With One Int Key And Value.");
+        }
+
+        ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE,
+                IntegerSerializerDeserializer.INSTANCE };
+        // Range search in [-1000, 1000]
+        ITupleReference lowKey = TupleUtils.createIntegerTuple(-1000);
+        ITupleReference highKey = TupleUtils.createIntegerTuple(1000);
+
+        for (BTreeLeafFrameType leafFrameType : leafFrameTypesToTest) {
+            runTest(fieldSerdes, 1, leafFrameType, lowKey, highKey, null, null);
+        }
+    }
+
+    @Test
+    public void twoIntKeys() throws Exception {
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("BTree " + getTestOpName() + " Test With Two Int Keys.");
+        }
+
+        ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE,
+                IntegerSerializerDeserializer.INSTANCE };
+
+        // Range search in [50 0, 50 500]
+        ITupleReference lowKey = TupleUtils.createIntegerTuple(50, 0);
+        ITupleReference highKey = TupleUtils.createIntegerTuple(50, 500);
+
+        // Prefix range search in [50, 50]
+        ITupleReference prefixLowKey = TupleUtils.createIntegerTuple(50);
+        ITupleReference prefixHighKey = TupleUtils.createIntegerTuple(50);
+
+        for (BTreeLeafFrameType leafFrameType : leafFrameTypesToTest) {
+            runTest(fieldSerdes, 2, leafFrameType, lowKey, highKey, prefixLowKey, prefixHighKey);
+        }
+    }
+
+    @Test
+    public void twoIntKeysAndValues() throws Exception {
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("BTree " + getTestOpName() + " Test With Two Int Keys And Values.");
+        }
+
+        ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE,
+                IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE,
+                IntegerSerializerDeserializer.INSTANCE };
+
+        // Range search in [50 100, 100 100]
+        ITupleReference lowKey = TupleUtils.createIntegerTuple(-100, -100);
+        ITupleReference highKey = TupleUtils.createIntegerTuple(100, 100);
+
+        // Prefix range search in [50, 50]
+        ITupleReference prefixLowKey = TupleUtils.createIntegerTuple(50);
+        ITupleReference prefixHighKey = TupleUtils.createIntegerTuple(50);
+
+        for (BTreeLeafFrameType leafFrameType : leafFrameTypesToTest) {
+            runTest(fieldSerdes, 2, leafFrameType, lowKey, highKey, prefixLowKey, prefixHighKey);
+        }
+    }
+
+    @Test
+    public void oneStringKeyAndValue() throws Exception {
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("BTree " + getTestOpName() + " Test With One String Key And Value.");
+        }
+
+        ISerializerDeserializer[] fieldSerdes = { UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE };
+
+        // Range search in ["cbf", cc7"]
+        ITupleReference lowKey = TupleUtils.createTuple(fieldSerdes, "cbf");
+        ITupleReference highKey = TupleUtils.createTuple(fieldSerdes, "cc7");
+
+        for (BTreeLeafFrameType leafFrameType : leafFrameTypesToTest) {
+            runTest(fieldSerdes, 1, leafFrameType, lowKey, highKey, null, null);
+        }
+    }
+
+    @Test
+    public void twoStringKeys() throws Exception {
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("BTree " + getTestOpName() + " Test With Two String Keys.");
+        }
+
+        ISerializerDeserializer[] fieldSerdes = { UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE };
+
+        // Range search in ["cbf", "ddd", cc7", "eee"]
+        ITupleReference lowKey = TupleUtils.createTuple(fieldSerdes, "cbf", "ddd");
+        ITupleReference highKey = TupleUtils.createTuple(fieldSerdes, "cc7", "eee");
+
+        // Prefix range search in ["cbf", cc7"]
+        ITupleReference prefixLowKey = TupleUtils.createTuple(fieldSerdes, "cbf");
+        ITupleReference prefixHighKey = TupleUtils.createTuple(fieldSerdes, "cc7");
+
+        for (BTreeLeafFrameType leafFrameType : leafFrameTypesToTest) {
+            runTest(fieldSerdes, 2, leafFrameType, lowKey, highKey, prefixLowKey, prefixHighKey);
+        }
+    }
+
+    @Test
+    public void twoStringKeysAndValues() throws Exception {
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("BTree " + getTestOpName() + " Test With Two String Keys And Values.");
+        }
+
+        ISerializerDeserializer[] fieldSerdes = { UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE };
+
+        // Range search in ["cbf", "ddd", cc7", "eee"]
+        ITupleReference lowKey = TupleUtils.createTuple(fieldSerdes, "cbf", "ddd");
+        ITupleReference highKey = TupleUtils.createTuple(fieldSerdes, "cc7", "eee");
+
+        // Prefix range search in ["cbf", cc7"]
+        ITupleReference prefixLowKey = TupleUtils.createTuple(fieldSerdes, "cbf");
+        ITupleReference prefixHighKey = TupleUtils.createTuple(fieldSerdes, "cc7");
+
+        for (BTreeLeafFrameType leafFrameType : leafFrameTypesToTest) {
+            runTest(fieldSerdes, 2, leafFrameType, lowKey, highKey, prefixLowKey, prefixHighKey);
+        }
+    }
+}
diff --git a/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexTestUtils.java b/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexTestUtils.java
new file mode 100644
index 0000000..a053dde
--- /dev/null
+++ b/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexTestUtils.java
@@ -0,0 +1,440 @@
+package edu.uci.ics.hyracks.storage.am.btree;
+
+import static org.junit.Assert.fail;
+
+import java.io.ByteArrayInputStream;
+import java.io.DataInput;
+import java.io.DataInputStream;
+import java.util.Collection;
+import java.util.Iterator;
+import java.util.NavigableSet;
+import java.util.Random;
+import java.util.TreeSet;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.util.TupleUtils;
+import edu.uci.ics.hyracks.storage.am.btree.exceptions.BTreeDuplicateKeyException;
+import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
+import edu.uci.ics.hyracks.storage.am.btree.util.BTreeUtils;
+import edu.uci.ics.hyracks.storage.am.common.CheckTuple;
+import edu.uci.ics.hyracks.storage.am.common.ITreeIndexTestContext;
+import edu.uci.ics.hyracks.storage.am.common.TreeIndexTestUtils;
+import edu.uci.ics.hyracks.storage.am.common.api.IIndexCursor;
+import edu.uci.ics.hyracks.storage.am.common.api.ISearchPredicate;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+
+@SuppressWarnings("rawtypes")
+public class OrderedIndexTestUtils extends TreeIndexTestUtils {
+    private static final Logger LOGGER = Logger.getLogger(OrderedIndexTestUtils.class.getName());
+
+    private static void compareActualAndExpected(ITupleReference actual, CheckTuple expected,
+            ISerializerDeserializer[] fieldSerdes) throws HyracksDataException {
+        for (int i = 0; i < fieldSerdes.length; i++) {
+            ByteArrayInputStream inStream = new ByteArrayInputStream(actual.getFieldData(i), actual.getFieldStart(i),
+                    actual.getFieldLength(i));
+            DataInput dataIn = new DataInputStream(inStream);
+            Object actualObj = fieldSerdes[i].deserialize(dataIn);
+            if (!actualObj.equals(expected.get(i))) {
+                fail("Actual and expected fields do not match on field " + i + ".\nExpected: " + expected.get(i)
+                        + "\nActual  : " + actualObj);
+            }
+        }
+    }
+
+    @SuppressWarnings("unchecked")
+    // Create a new TreeSet containing the elements satisfying the prefix
+    // search.
+    // Implementing prefix search by changing compareTo() in CheckTuple does not
+    // work.
+    public static TreeSet<CheckTuple> getPrefixExpectedSubset(TreeSet<CheckTuple> checkTuples, CheckTuple lowKey,
+            CheckTuple highKey) {
+        TreeSet<CheckTuple> expectedSubset = new TreeSet<CheckTuple>();
+        Iterator<CheckTuple> iter = checkTuples.iterator();
+        while (iter.hasNext()) {
+            CheckTuple t = iter.next();
+            boolean geLowKey = true;
+            boolean leHighKey = true;
+            for (int i = 0; i < lowKey.getNumKeys(); i++) {
+                if (t.get(i).compareTo(lowKey.get(i)) < 0) {
+                    geLowKey = false;
+                    break;
+                }
+            }
+            for (int i = 0; i < highKey.getNumKeys(); i++) {
+                if (t.get(i).compareTo(highKey.get(i)) > 0) {
+                    leHighKey = false;
+                    break;
+                }
+            }
+            if (geLowKey && leHighKey) {
+                expectedSubset.add(t);
+            }
+        }
+        return expectedSubset;
+    }
+
+    @SuppressWarnings("unchecked")
+    public void checkRangeSearch(ITreeIndexTestContext ctx, ITupleReference lowKey, ITupleReference highKey,
+            boolean lowKeyInclusive, boolean highKeyInclusive) throws Exception {
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Testing Range Search.");
+        }
+        MultiComparator lowKeyCmp = BTreeUtils.getSearchMultiComparator(ctx.getComparatorFactories(), lowKey);
+        MultiComparator highKeyCmp = BTreeUtils.getSearchMultiComparator(ctx.getComparatorFactories(), highKey);
+        IIndexCursor searchCursor = ctx.getIndexAccessor().createSearchCursor();
+        RangePredicate rangePred = new RangePredicate(lowKey, highKey, lowKeyInclusive, highKeyInclusive, lowKeyCmp,
+                highKeyCmp);
+        ctx.getIndexAccessor().search(searchCursor, rangePred);
+        // Get the subset of elements from the expected set within given key
+        // range.
+        CheckTuple lowKeyCheck = createCheckTupleFromTuple(lowKey, ctx.getFieldSerdes(), lowKeyCmp.getKeyFieldCount());
+        CheckTuple highKeyCheck = createCheckTupleFromTuple(highKey, ctx.getFieldSerdes(),
+                highKeyCmp.getKeyFieldCount());
+        NavigableSet<CheckTuple> expectedSubset = null;
+        if (lowKeyCmp.getKeyFieldCount() < ctx.getKeyFieldCount()
+                || highKeyCmp.getKeyFieldCount() < ctx.getKeyFieldCount()) {
+            // Searching on a key prefix (low key or high key or both).
+            expectedSubset = getPrefixExpectedSubset((TreeSet<CheckTuple>) ctx.getCheckTuples(), lowKeyCheck,
+                    highKeyCheck);
+        } else {
+            // Searching on all key fields.
+            expectedSubset = ((TreeSet<CheckTuple>) ctx.getCheckTuples()).subSet(lowKeyCheck, lowKeyInclusive,
+                    highKeyCheck, highKeyInclusive);
+        }
+        Iterator<CheckTuple> checkIter = expectedSubset.iterator();
+        int actualCount = 0;
+        try {
+            while (searchCursor.hasNext()) {
+                if (!checkIter.hasNext()) {
+                    fail("Range search returned more answers than expected.\nExpected: " + expectedSubset.size());
+                }
+                searchCursor.next();
+                CheckTuple expectedTuple = checkIter.next();
+                ITupleReference tuple = searchCursor.getTuple();
+                compareActualAndExpected(tuple, expectedTuple, ctx.getFieldSerdes());
+                actualCount++;
+            }
+            if (actualCount < expectedSubset.size()) {
+                fail("Range search returned fewer answers than expected.\nExpected: " + expectedSubset.size()
+                        + "\nActual  : " + actualCount);
+            }
+        } finally {
+            searchCursor.close();
+        }
+    }
+
+    public void checkPointSearches(ITreeIndexTestContext ictx) throws Exception {
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Testing Point Searches On All Expected Keys.");
+        }
+        OrderedIndexTestContext ctx = (OrderedIndexTestContext) ictx;
+        IIndexCursor searchCursor = ctx.getIndexAccessor().createSearchCursor();
+
+        ArrayTupleBuilder lowKeyBuilder = new ArrayTupleBuilder(ctx.getKeyFieldCount());
+        ArrayTupleReference lowKey = new ArrayTupleReference();
+        ArrayTupleBuilder highKeyBuilder = new ArrayTupleBuilder(ctx.getKeyFieldCount());
+        ArrayTupleReference highKey = new ArrayTupleReference();
+        RangePredicate rangePred = new RangePredicate(lowKey, highKey, true, true, null, null);
+
+        // Iterate through expected tuples, and perform a point search in the
+        // BTree to verify the tuple can be reached.
+        for (CheckTuple checkTuple : ctx.getCheckTuples()) {
+            createTupleFromCheckTuple(checkTuple, lowKeyBuilder, lowKey, ctx.getFieldSerdes());
+            createTupleFromCheckTuple(checkTuple, highKeyBuilder, highKey, ctx.getFieldSerdes());
+            MultiComparator lowKeyCmp = BTreeUtils.getSearchMultiComparator(ctx.getComparatorFactories(), lowKey);
+            MultiComparator highKeyCmp = BTreeUtils.getSearchMultiComparator(ctx.getComparatorFactories(), highKey);
+
+            rangePred.setLowKey(lowKey, true);
+            rangePred.setHighKey(highKey, true);
+            rangePred.setLowKeyComparator(lowKeyCmp);
+            rangePred.setHighKeyComparator(highKeyCmp);
+
+            ctx.getIndexAccessor().search(searchCursor, rangePred);
+
+            try {
+                // We expect exactly one answer.
+                if (searchCursor.hasNext()) {
+                    searchCursor.next();
+                    ITupleReference tuple = searchCursor.getTuple();
+                    compareActualAndExpected(tuple, checkTuple, ctx.getFieldSerdes());
+                }
+                if (searchCursor.hasNext()) {
+                    fail("Point search returned more than one answer.");
+                }
+            } finally {
+                searchCursor.close();
+            }
+        }
+    }
+
+    @SuppressWarnings("unchecked")
+    public void insertStringTuples(ITreeIndexTestContext ctx, int numTuples, Random rnd) throws Exception {
+        int fieldCount = ctx.getFieldCount();
+        int numKeyFields = ctx.getKeyFieldCount();
+        String[] fieldValues = new String[fieldCount];
+        for (int i = 0; i < numTuples; i++) {
+            if (LOGGER.isLoggable(Level.INFO)) {
+                if ((i + 1) % (numTuples / Math.min(10, numTuples)) == 0) {
+                    LOGGER.info("Inserting Tuple " + (i + 1) + "/" + numTuples);
+                }
+            }
+            // Set keys.
+            for (int j = 0; j < numKeyFields; j++) {
+                int length = (Math.abs(rnd.nextInt()) % 10) + 1;
+                fieldValues[j] = getRandomString(length, rnd);
+            }
+            // Set values.
+            for (int j = numKeyFields; j < fieldCount; j++) {
+                fieldValues[j] = getRandomString(5, rnd);
+            }
+            TupleUtils.createTuple(ctx.getTupleBuilder(), ctx.getTuple(), ctx.getFieldSerdes(), (Object[]) fieldValues);
+            try {
+                ctx.getIndexAccessor().insert(ctx.getTuple());
+                // Set expected values. Do this only after insertion succeeds
+                // because we ignore duplicate keys.
+                ctx.insertCheckTuple(createStringCheckTuple(fieldValues, ctx.getKeyFieldCount()), ctx.getCheckTuples());
+            } catch (BTreeDuplicateKeyException e) {
+                // Ignore duplicate key insertions.
+            }
+        }
+    }
+    
+    public void upsertStringTuples(ITreeIndexTestContext ictx, int numTuples, Random rnd) throws Exception {
+    	OrderedIndexTestContext ctx = (OrderedIndexTestContext) ictx;
+    	int fieldCount = ctx.getFieldCount();
+        int numKeyFields = ctx.getKeyFieldCount();
+        String[] fieldValues = new String[fieldCount];
+        for (int i = 0; i < numTuples; i++) {
+            if (LOGGER.isLoggable(Level.INFO)) {
+                if ((i + 1) % (numTuples / Math.min(10, numTuples)) == 0) {
+                    LOGGER.info("Inserting Tuple " + (i + 1) + "/" + numTuples);
+                }
+            }
+            // Set keys.
+            for (int j = 0; j < numKeyFields; j++) {
+                int length = (Math.abs(rnd.nextInt()) % 10) + 1;
+                fieldValues[j] = getRandomString(length, rnd);
+            }
+            // Set values.
+            for (int j = numKeyFields; j < fieldCount; j++) {
+                fieldValues[j] = getRandomString(5, rnd);
+            }
+            TupleUtils.createTuple(ctx.getTupleBuilder(), ctx.getTuple(), ctx.getFieldSerdes(), (Object[]) fieldValues);
+            ctx.getIndexAccessor().upsert(ctx.getTuple());
+            ctx.upsertCheckTuple(createStringCheckTuple(fieldValues, ctx.getKeyFieldCount()), ctx.getCheckTuples());
+        }
+    }
+
+    @SuppressWarnings("unchecked")
+    public void bulkLoadStringTuples(ITreeIndexTestContext ctx, int numTuples, Random rnd) throws Exception {
+        int fieldCount = ctx.getFieldCount();
+        int numKeyFields = ctx.getKeyFieldCount();
+        String[] fieldValues = new String[fieldCount];
+        TreeSet<CheckTuple> tmpCheckTuples = new TreeSet<CheckTuple>();
+        for (int i = 0; i < numTuples; i++) {
+            // Set keys.
+            for (int j = 0; j < numKeyFields; j++) {
+                int length = (Math.abs(rnd.nextInt()) % 10) + 1;
+                fieldValues[j] = getRandomString(length, rnd);
+            }
+            // Set values.
+            for (int j = numKeyFields; j < fieldCount; j++) {
+                fieldValues[j] = getRandomString(5, rnd);
+            }
+            // Set expected values. We also use these as the pre-sorted stream
+            // for bulk loading.
+            ctx.insertCheckTuple(createStringCheckTuple(fieldValues, ctx.getKeyFieldCount()), tmpCheckTuples);
+        }
+        bulkLoadCheckTuples(ctx, tmpCheckTuples);
+
+        // Add tmpCheckTuples to ctx check tuples for comparing searches.
+        for (CheckTuple checkTuple : tmpCheckTuples) {
+            ctx.insertCheckTuple(checkTuple, ctx.getCheckTuples());
+        }
+    }
+
+    public void upsertIntTuples(ITreeIndexTestContext ictx, int numTuples, Random rnd) throws Exception {
+        OrderedIndexTestContext ctx = (OrderedIndexTestContext) ictx;
+    	int fieldCount = ctx.getFieldCount();
+        int numKeyFields = ctx.getKeyFieldCount();
+        int[] fieldValues = new int[ctx.getFieldCount()];
+        // Scale range of values according to number of keys.
+        // For example, for 2 keys we want the square root of numTuples, for 3
+        // keys the cube root of numTuples, etc.
+        int maxValue = (int) Math.ceil(Math.pow(numTuples, 1.0 / (double) numKeyFields));
+        for (int i = 0; i < numTuples; i++) {
+            // Set keys.
+            setIntKeyFields(fieldValues, numKeyFields, maxValue, rnd);
+            // Set values.
+            setIntPayloadFields(fieldValues, numKeyFields, fieldCount);
+            TupleUtils.createIntegerTuple(ctx.getTupleBuilder(), ctx.getTuple(), fieldValues);
+            if (LOGGER.isLoggable(Level.INFO)) {
+                if ((i + 1) % (numTuples / Math.min(10, numTuples)) == 0) {
+                    LOGGER.info("Inserting Tuple " + (i + 1) + "/" + numTuples);
+                }
+            }
+            ctx.getIndexAccessor().upsert(ctx.getTuple());
+            ctx.upsertCheckTuple(createIntCheckTuple(fieldValues, ctx.getKeyFieldCount()), ctx.getCheckTuples());
+        }
+    }
+    
+    @SuppressWarnings("unchecked")
+    public void updateTuples(ITreeIndexTestContext ictx, int numTuples, Random rnd) throws Exception {
+        OrderedIndexTestContext ctx = (OrderedIndexTestContext) ictx;
+        int fieldCount = ctx.getFieldCount();
+        int keyFieldCount = ctx.getKeyFieldCount();
+        // This is a noop because we can only update non-key fields.
+        if (fieldCount == keyFieldCount) {
+            return;
+        }
+        ArrayTupleBuilder updateTupleBuilder = new ArrayTupleBuilder(fieldCount);
+        ArrayTupleReference updateTuple = new ArrayTupleReference();
+        int numCheckTuples = ctx.getCheckTuples().size();
+        // Copy CheckTuple references into array, so we can randomly pick from
+        // there.
+        CheckTuple[] checkTuples = new CheckTuple[numCheckTuples];
+        int idx = 0;
+        for (CheckTuple checkTuple : ctx.getCheckTuples()) {
+            checkTuples[idx++] = checkTuple;
+        }
+        for (int i = 0; i < numTuples && numCheckTuples > 0; i++) {
+            if (LOGGER.isLoggable(Level.INFO)) {
+                if ((i + 1) % (numTuples / Math.min(10, numTuples)) == 0) {
+                    LOGGER.info("Updating Tuple " + (i + 1) + "/" + numTuples);
+                }
+            }
+            int checkTupleIdx = Math.abs(rnd.nextInt() % numCheckTuples);
+            CheckTuple checkTuple = checkTuples[checkTupleIdx];
+            // Update check tuple's non-key fields.
+            for (int j = keyFieldCount; j < fieldCount; j++) {
+                Comparable newValue = getRandomUpdateValue(ctx.getFieldSerdes()[j], rnd);
+                checkTuple.set(j, newValue);
+            }
+
+            createTupleFromCheckTuple(checkTuple, updateTupleBuilder, updateTuple, ctx.getFieldSerdes());
+            ctx.getIndexAccessor().update(updateTuple);
+
+            // Swap with last "valid" CheckTuple.
+            CheckTuple tmp = checkTuples[numCheckTuples - 1];
+            checkTuples[numCheckTuples - 1] = checkTuple;
+            checkTuples[checkTupleIdx] = tmp;
+            numCheckTuples--;
+        }
+    }
+
+    public CheckTuple createStringCheckTuple(String[] fieldValues, int numKeyFields) {
+        CheckTuple<String> checkTuple = new CheckTuple<String>(fieldValues.length, numKeyFields);
+        for (String s : fieldValues) {
+            checkTuple.add((String) s);
+        }
+        return checkTuple;
+    }
+
+    private static Comparable getRandomUpdateValue(ISerializerDeserializer serde, Random rnd) {
+        if (serde instanceof IntegerSerializerDeserializer) {
+            return Integer.valueOf(rnd.nextInt());
+        } else if (serde instanceof UTF8StringSerializerDeserializer) {
+            return getRandomString(10, rnd);
+        }
+        return null;
+    }
+
+    public static String getRandomString(int length, Random rnd) {
+        String s = Long.toHexString(Double.doubleToLongBits(rnd.nextDouble()));
+        StringBuilder strBuilder = new StringBuilder();
+        for (int i = 0; i < s.length() && i < length; i++) {
+            strBuilder.append(s.charAt(Math.abs(rnd.nextInt()) % s.length()));
+        }
+        return strBuilder.toString();
+    }
+
+    @Override
+    protected CheckTuple createCheckTuple(int numFields, int numKeyFields) {
+        return new CheckTuple(numFields, numKeyFields);
+    }
+
+    @Override
+    protected ISearchPredicate createNullSearchPredicate() {
+        return new RangePredicate(null, null, true, true, null, null);
+    }
+
+    @Override
+    public void checkExpectedResults(ITreeIndexCursor cursor, Collection checkTuples,
+            ISerializerDeserializer[] fieldSerdes, int keyFieldCount, Iterator<CheckTuple> checkIter) throws Exception {
+        int actualCount = 0;
+        try {
+            while (cursor.hasNext()) {
+                if (!checkIter.hasNext()) {
+                    fail("Ordered scan returned more answers than expected.\nExpected: " + checkTuples.size());
+                }
+                cursor.next();
+                CheckTuple expectedTuple = checkIter.next();
+                ITupleReference tuple = cursor.getTuple();
+                compareActualAndExpected(tuple, expectedTuple, fieldSerdes);
+                actualCount++;
+            }
+            if (actualCount < checkTuples.size()) {
+                fail("Ordered scan returned fewer answers than expected.\nExpected: " + checkTuples.size()
+                        + "\nActual  : " + actualCount);
+            }
+        } finally {
+            cursor.close();
+        }
+
+    }
+
+    @Override
+    protected CheckTuple createIntCheckTuple(int[] fieldValues, int numKeyFields) {
+        CheckTuple<Integer> checkTuple = new CheckTuple<Integer>(fieldValues.length, numKeyFields);
+        for (int v : fieldValues) {
+            checkTuple.add(v);
+        }
+        return checkTuple;
+    }
+
+    @Override
+    protected void setIntKeyFields(int[] fieldValues, int numKeyFields, int maxValue, Random rnd) {
+        for (int j = 0; j < numKeyFields; j++) {
+            fieldValues[j] = rnd.nextInt() % maxValue;
+        }
+    }
+
+    @Override
+    protected void setIntPayloadFields(int[] fieldValues, int numKeyFields, int numFields) {
+        for (int j = numKeyFields; j < numFields; j++) {
+            fieldValues[j] = j;
+        }
+    }
+
+    @Override
+    protected Collection createCheckTuplesCollection() {
+        return new TreeSet<CheckTuple>();
+    }
+
+    @Override
+    protected ArrayTupleBuilder createDeleteTupleBuilder(ITreeIndexTestContext ctx) {
+        return new ArrayTupleBuilder(ctx.getKeyFieldCount());
+    }
+
+    @Override
+    protected boolean checkDiskOrderScanResult(ITupleReference tuple, CheckTuple checkTuple, ITreeIndexTestContext ctx)
+            throws HyracksDataException {
+        @SuppressWarnings("unchecked")
+        TreeSet<CheckTuple> checkTuples = (TreeSet<CheckTuple>) ctx.getCheckTuples();
+        CheckTuple matchingCheckTuple = checkTuples.floor(checkTuple);
+        if (matchingCheckTuple == null) {
+            return false;
+        }
+        compareActualAndExpected(tuple, matchingCheckTuple, ctx.getFieldSerdes());
+        return true;
+    }
+}
diff --git a/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexUpdateTest.java b/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexUpdateTest.java
new file mode 100644
index 0000000..65b2ade
--- /dev/null
+++ b/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexUpdateTest.java
@@ -0,0 +1,69 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
+
+@SuppressWarnings("rawtypes")
+public abstract class OrderedIndexUpdateTest extends OrderedIndexTestDriver {
+
+    private final OrderedIndexTestUtils orderedIndexTestUtils;
+
+    public OrderedIndexUpdateTest(BTreeLeafFrameType[] leafFrameTypesToTest) {
+        super(leafFrameTypesToTest);
+        this.orderedIndexTestUtils = new OrderedIndexTestUtils();
+    }
+
+    private static final int numUpdateRounds = 3;
+
+    @Override
+    protected void runTest(ISerializerDeserializer[] fieldSerdes, int numKeys, BTreeLeafFrameType leafType,
+            ITupleReference lowKey, ITupleReference highKey, ITupleReference prefixLowKey, ITupleReference prefixHighKey)
+            throws Exception {
+        // This is a noop because we can only update non-key fields.
+        if (fieldSerdes.length == numKeys) {
+            return;
+        }
+        OrderedIndexTestContext ctx = createTestContext(fieldSerdes, numKeys, leafType);
+        // We assume all fieldSerdes are of the same type. Check the first one
+        // to determine which field types to generate.
+        if (fieldSerdes[0] instanceof IntegerSerializerDeserializer) {
+            orderedIndexTestUtils.insertIntTuples(ctx, numTuplesToInsert, getRandom());
+        } else if (fieldSerdes[0] instanceof UTF8StringSerializerDeserializer) {
+            orderedIndexTestUtils.insertStringTuples(ctx, numTuplesToInsert, getRandom());
+        }
+        int numTuplesPerDeleteRound = (int) Math.ceil((float) ctx.getCheckTuples().size() / (float) numUpdateRounds);
+        for (int j = 0; j < numUpdateRounds; j++) {
+            orderedIndexTestUtils.updateTuples(ctx, numTuplesPerDeleteRound, getRandom());
+            orderedIndexTestUtils.checkPointSearches(ctx);
+            orderedIndexTestUtils.checkScan(ctx);
+            orderedIndexTestUtils.checkDiskOrderScan(ctx);
+            orderedIndexTestUtils.checkRangeSearch(ctx, lowKey, highKey, true, true);
+            if (prefixLowKey != null && prefixHighKey != null) {
+                orderedIndexTestUtils.checkRangeSearch(ctx, prefixLowKey, prefixHighKey, true, true);
+            }
+        }
+    }
+
+    @Override
+    protected String getTestOpName() {
+        return "Update";
+    }
+}
diff --git a/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexUpsertTest.java b/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexUpsertTest.java
new file mode 100644
index 0000000..0d94a18
--- /dev/null
+++ b/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexUpsertTest.java
@@ -0,0 +1,72 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
+
+/**
+ * Tests the BTree insert operation with strings and integer fields using
+ * various numbers of key and payload fields.
+ * 
+ * Each tests first fills a BTree with randomly generated tuples. We compare the
+ * following operations against expected results: 1. Point searches for all
+ * tuples. 2. Ordered scan. 3. Disk-order scan. 4. Range search (and prefix
+ * search for composite keys).
+ * 
+ */
+@SuppressWarnings("rawtypes")
+public abstract class OrderedIndexUpsertTest extends OrderedIndexTestDriver {
+
+    private final OrderedIndexTestUtils orderedIndexTestUtils;
+
+    public OrderedIndexUpsertTest(BTreeLeafFrameType[] leafFrameTypesToTest) {
+        super(leafFrameTypesToTest);
+        this.orderedIndexTestUtils = new OrderedIndexTestUtils();
+    }
+
+    @Override
+    protected void runTest(ISerializerDeserializer[] fieldSerdes, int numKeys, BTreeLeafFrameType leafType,
+            ITupleReference lowKey, ITupleReference highKey, ITupleReference prefixLowKey, ITupleReference prefixHighKey)
+            throws Exception {
+        OrderedIndexTestContext ctx = createTestContext(fieldSerdes, numKeys, leafType);
+        // We assume all fieldSerdes are of the same type. Check the first one
+        // to determine which field types to generate.
+        if (fieldSerdes[0] instanceof IntegerSerializerDeserializer) {
+            orderedIndexTestUtils.upsertIntTuples(ctx, numTuplesToInsert, getRandom());
+        } else if (fieldSerdes[0] instanceof UTF8StringSerializerDeserializer) {
+            orderedIndexTestUtils.upsertStringTuples(ctx, numTuplesToInsert, getRandom());
+        }
+
+        orderedIndexTestUtils.checkPointSearches(ctx);
+        orderedIndexTestUtils.checkScan(ctx);
+        orderedIndexTestUtils.checkDiskOrderScan(ctx);
+
+        orderedIndexTestUtils.checkRangeSearch(ctx, lowKey, highKey, true, true);
+        if (prefixLowKey != null && prefixHighKey != null) {
+            orderedIndexTestUtils.checkRangeSearch(ctx, prefixLowKey, prefixHighKey, true, true);
+        }
+        ctx.getIndex().close();
+    }
+
+    @Override
+    protected String getTestOpName() {
+        return "Insert";
+    }
+}
diff --git a/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/AbstractTreeIndexTestWorker.java b/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/AbstractTreeIndexTestWorker.java
new file mode 100644
index 0000000..eca9b35
--- /dev/null
+++ b/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/AbstractTreeIndexTestWorker.java
@@ -0,0 +1,70 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.common;
+
+import java.util.Random;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.storage.am.common.TestOperationSelector.TestOperation;
+import edu.uci.ics.hyracks.storage.am.common.api.IIndexAccessor;
+import edu.uci.ics.hyracks.storage.am.common.api.IIndexCursor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+import edu.uci.ics.hyracks.storage.am.common.datagen.DataGenThread;
+import edu.uci.ics.hyracks.storage.am.common.datagen.TupleBatch;
+
+public abstract class AbstractTreeIndexTestWorker extends Thread implements ITreeIndexTestWorker {
+    private Random rnd = new Random();
+    private final DataGenThread dataGen;
+    private final TestOperationSelector opSelector;
+    private final int numBatches;
+    
+    protected final IIndexAccessor indexAccessor;
+    
+    public AbstractTreeIndexTestWorker(DataGenThread dataGen, TestOperationSelector opSelector, ITreeIndex index, int numBatches) {
+        this.dataGen = dataGen;
+        this.opSelector = opSelector;
+        this.numBatches = numBatches;
+        indexAccessor = index.createAccessor();
+    }
+    
+    @Override
+    public void run() {
+        try {
+            for (int i = 0; i < numBatches; i++) {
+                TupleBatch batch = dataGen.getBatch();     
+                for (int j = 0; j < batch.size(); j++) {
+                    TestOperation op = opSelector.getOp(rnd.nextInt());
+                    ITupleReference tuple = batch.get(j);
+                    performOp(tuple, op);
+                }
+                dataGen.releaseBatch(batch);
+            }
+        } catch (Exception e) {
+            e.printStackTrace();
+        }
+    }
+    
+    protected void consumeCursorTuples(IIndexCursor cursor) throws HyracksDataException {
+        try {
+            while (cursor.hasNext()) {
+                cursor.next();
+            }
+        } finally {
+            cursor.close();
+        }
+    }
+}
diff --git a/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/CheckTuple.java b/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/CheckTuple.java
new file mode 100644
index 0000000..4b4b90b
--- /dev/null
+++ b/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/CheckTuple.java
@@ -0,0 +1,73 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.common;
+
+@SuppressWarnings({"rawtypes", "unchecked"})
+public class CheckTuple<T extends Comparable<T>> implements Comparable<T> {
+    protected final int numKeys;    
+    protected final Comparable[] tuple;
+    protected int pos;
+
+    public CheckTuple(int numFields, int numKeys) {
+        this.numKeys = numKeys;
+        this.tuple = new Comparable[numFields];
+        pos = 0;
+    }
+
+    public void add(T e) {
+        tuple[pos++] = e;
+    }
+
+    @Override
+    public int compareTo(T o) {
+        CheckTuple<T> other = (CheckTuple<T>)o;
+        for (int i = 0; i < numKeys; i++) {            
+            int cmp = tuple[i].compareTo(other.get(i));
+            if (cmp != 0) {
+                return cmp;
+            }
+        }
+        return 0;
+    }
+
+    public T get(int idx) {
+        return (T)tuple[idx];
+    }
+    
+    public void set(int idx, T e) {
+        tuple[idx] = e;
+    }
+    
+    public int size() {
+        return tuple.length;
+    }
+    
+    public int getNumKeys() {
+        return numKeys;
+    }
+    
+    @Override
+    public String toString() {
+        StringBuilder strBuilder = new StringBuilder();
+        for (int i = 0; i < tuple.length; i++) {
+            strBuilder.append(tuple[i].toString());
+            if (i != tuple.length-1) {
+                strBuilder.append(" ");
+            }
+        }
+        return strBuilder.toString();
+    }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/ITreeIndexTestContext.java b/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/ITreeIndexTestContext.java
new file mode 100644
index 0000000..9be3e29
--- /dev/null
+++ b/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/ITreeIndexTestContext.java
@@ -0,0 +1,51 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.common;
+
+import java.util.Collection;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleReference;
+import edu.uci.ics.hyracks.storage.am.common.api.IIndexAccessor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+
+@SuppressWarnings("rawtypes")
+public interface ITreeIndexTestContext<T extends CheckTuple> {
+    public int getFieldCount();
+
+    public int getKeyFieldCount();
+
+    public ISerializerDeserializer[] getFieldSerdes();
+
+    public IBinaryComparatorFactory[] getComparatorFactories();
+
+    public IIndexAccessor getIndexAccessor();
+
+    public ITreeIndex getIndex();
+
+    public ArrayTupleReference getTuple();
+
+    public ArrayTupleBuilder getTupleBuilder();
+
+    public void insertCheckTuple(T checkTuple, Collection<T> checkTuples);      
+
+    public void deleteCheckTuple(T checkTuple, Collection<T> checkTuples);
+
+    public Collection<T> getCheckTuples();
+
+}
diff --git a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/ITreeIndexTestWorker.java b/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/ITreeIndexTestWorker.java
similarity index 100%
rename from hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/ITreeIndexTestWorker.java
rename to hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/ITreeIndexTestWorker.java
diff --git a/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/ITreeIndexTestWorkerFactory.java b/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/ITreeIndexTestWorkerFactory.java
new file mode 100644
index 0000000..64b5aea
--- /dev/null
+++ b/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/ITreeIndexTestWorkerFactory.java
@@ -0,0 +1,23 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.common;
+
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+import edu.uci.ics.hyracks.storage.am.common.datagen.DataGenThread;
+
+public interface ITreeIndexTestWorkerFactory {
+    public AbstractTreeIndexTestWorker create(DataGenThread dataGen, TestOperationSelector opSelector, ITreeIndex index, int numBatches);
+}
diff --git a/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/TestOperationSelector.java b/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/TestOperationSelector.java
new file mode 100644
index 0000000..1ae79a1
--- /dev/null
+++ b/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/TestOperationSelector.java
@@ -0,0 +1,83 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.common;
+
+import java.util.Arrays;
+
+public class TestOperationSelector {
+
+    public static enum TestOperation {
+        INSERT,
+        DELETE,
+        UPDATE,
+        UPSERT,
+        POINT_SEARCH,
+        RANGE_SEARCH,
+        SCAN,
+        DISKORDER_SCAN,
+        MERGE        
+    }
+    
+    private final TestOperation[] ops;
+    private final int[] opRanges;    
+    
+    public TestOperationSelector(TestOperation[] ops, float[] opProbs) {
+        sanityCheck(ops, opProbs);
+        this.ops = ops;
+        this.opRanges = getOpRanges(opProbs);
+    }
+    
+    private void sanityCheck(TestOperation[] ops, float[] opProbs) {
+        if (ops.length == 0) {
+            throw new RuntimeException("Empty op array.");
+        }
+        if (opProbs.length == 0) {
+            throw new RuntimeException("Empty op probabilities.");
+        }
+        if (ops.length != opProbs.length) {
+            throw new RuntimeException("Ops and op probabilities have unequal length.");
+        }
+        float sum = 0.0f;
+        for (int i = 0; i < opProbs.length; i++) {
+            sum += opProbs[i];
+        }
+        if (sum != 1.0f) {
+            throw new RuntimeException("Op probabilities don't add up to 1.");
+        }
+    }
+    
+    private int[] getOpRanges(float[] opProbabilities) {
+        int[] opRanges = new int[opProbabilities.length];
+        if (opRanges.length > 1) {
+            opRanges[0] = (int) Math.floor(Integer.MAX_VALUE * opProbabilities[0]);
+            for (int i = 1; i < opRanges.length - 1; i++) {
+                opRanges[i] = opRanges[i - 1] + (int) Math.floor(Integer.MAX_VALUE * opProbabilities[i]);
+            }
+            opRanges[opRanges.length - 1] = Integer.MAX_VALUE;
+        } else {
+            opRanges[0] = Integer.MAX_VALUE;
+        }
+        return opRanges;
+    }
+    
+    public TestOperation getOp(int randomInt) {
+        int ix = Arrays.binarySearch(opRanges, randomInt);
+        if (ix < 0) {
+            ix = -ix - 1;
+        }
+        return ops[ix];
+    }
+}
diff --git a/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/TestWorkloadConf.java b/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/TestWorkloadConf.java
new file mode 100644
index 0000000..2437514
--- /dev/null
+++ b/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/TestWorkloadConf.java
@@ -0,0 +1,38 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.common;
+
+import edu.uci.ics.hyracks.storage.am.common.TestOperationSelector.TestOperation;
+
+public class TestWorkloadConf {
+    public final TestOperation[] ops;
+    public final float[] opProbs;
+
+    public TestWorkloadConf(TestOperation[] ops, float[] opProbs) {
+        this.ops = ops;
+        this.opProbs = opProbs;
+    }
+    
+    public String toString() {
+        StringBuilder strBuilder = new StringBuilder();
+        for (TestOperation op : ops) {
+            strBuilder.append(op.toString());
+            strBuilder.append(',');
+        }
+        strBuilder.deleteCharAt(strBuilder.length() - 1);
+        return strBuilder.toString();
+    }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/TreeIndexMultiThreadTestDriver.java b/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/TreeIndexMultiThreadTestDriver.java
new file mode 100644
index 0000000..8c1d06f
--- /dev/null
+++ b/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/TreeIndexMultiThreadTestDriver.java
@@ -0,0 +1,88 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.common;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.storage.am.common.TestOperationSelector.TestOperation;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
+import edu.uci.ics.hyracks.storage.am.common.datagen.DataGenThread;
+
+@SuppressWarnings("rawtypes")
+public class TreeIndexMultiThreadTestDriver {
+    private static final int RANDOM_SEED = 50;
+    // Means no additional payload. Only the specified fields.
+    private static final int PAYLOAD_SIZE = 0;
+    private final TestOperationSelector opSelector;    
+    private final ISerializerDeserializer[] fieldSerdes;
+    private final ITreeIndex index;
+    private final ITreeIndexTestWorkerFactory workerFactory;
+    
+    public TreeIndexMultiThreadTestDriver(ITreeIndex index, ITreeIndexTestWorkerFactory workerFactory,
+            ISerializerDeserializer[] fieldSerdes, TestOperation[] ops, float[] opProbs) {
+        this.index = index;
+        this.workerFactory = workerFactory;
+        this.fieldSerdes = fieldSerdes;
+        this.opSelector = new TestOperationSelector(ops, opProbs);
+    }      
+    
+    public void init(int fileId) throws HyracksDataException {
+    	index.create(fileId);
+    	index.open(fileId);
+    }
+    
+    public long[] run(int numThreads, int numRepeats, int numOps, int batchSize) throws InterruptedException, TreeIndexException {
+        int numBatches = numOps / batchSize;
+        int threadNumBatches = numBatches / numThreads;
+        if (threadNumBatches <= 0) {
+            throw new TreeIndexException("Inconsistent parameters given. Need at least one batch per thread.");
+        }
+        long[] times = new long[numRepeats];
+        for (int i = 0; i < numRepeats; i++) {
+            DataGenThread dataGen = createDatagenThread(numThreads, numBatches, batchSize);
+            dataGen.start();
+            // Wait until the tupleBatchQueue is filled to capacity.
+            while (dataGen.tupleBatchQueue.remainingCapacity() != 0 && dataGen.tupleBatchQueue.size() != numBatches) {
+                Thread.sleep(10);
+            }
+                        
+            // Start worker threads.
+            AbstractTreeIndexTestWorker[] workers = new AbstractTreeIndexTestWorker[numThreads];
+            long start = System.currentTimeMillis();
+            for (int j = 0; j < numThreads; j++) {
+                workers[j] = workerFactory.create(dataGen, opSelector, index, threadNumBatches);
+                workers[j].start();
+            }
+            // Join worker threads.
+            for (int j = 0; j < numThreads; j++) {                
+                workers[j].join();
+            }
+            long end = System.currentTimeMillis();
+            times[i] = end - start;
+        }
+        return times;
+    }
+    
+    public void deinit() throws HyracksDataException {
+    	index.close();
+    }
+    
+    // To allow subclasses to override the data gen params.
+    public DataGenThread createDatagenThread(int numThreads, int numBatches, int batchSize) {
+        return new DataGenThread(numThreads, numBatches, batchSize, fieldSerdes, PAYLOAD_SIZE, RANDOM_SEED, 2*numThreads, false);
+    }
+}
diff --git a/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/TreeIndexTestContext.java b/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/TreeIndexTestContext.java
new file mode 100644
index 0000000..bc5312c
--- /dev/null
+++ b/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/TreeIndexTestContext.java
@@ -0,0 +1,80 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.common;
+
+import java.util.Collection;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleReference;
+import edu.uci.ics.hyracks.storage.am.common.api.IIndexAccessor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+
+@SuppressWarnings("rawtypes")
+public abstract class TreeIndexTestContext<T extends CheckTuple> implements ITreeIndexTestContext<T> {
+    protected final ISerializerDeserializer[] fieldSerdes;
+    protected final ITreeIndex treeIndex;
+    protected final ArrayTupleBuilder tupleBuilder;
+    protected final ArrayTupleReference tuple = new ArrayTupleReference();
+    protected final IIndexAccessor indexAccessor;
+
+    public TreeIndexTestContext(ISerializerDeserializer[] fieldSerdes, ITreeIndex treeIndex) {
+        this.fieldSerdes = fieldSerdes;
+        this.treeIndex = treeIndex;
+        this.indexAccessor = (IIndexAccessor) treeIndex.createAccessor();
+        this.tupleBuilder = new ArrayTupleBuilder(fieldSerdes.length);
+    }
+
+    @Override
+    public int getFieldCount() {
+        return fieldSerdes.length;
+    }
+
+    @Override
+    public IIndexAccessor getIndexAccessor() {
+        return indexAccessor;
+    }
+
+    @Override
+    public ArrayTupleReference getTuple() {
+        return tuple;
+    }
+
+    @Override
+    public ArrayTupleBuilder getTupleBuilder() {
+        return tupleBuilder;
+    }
+
+    @Override
+    public ISerializerDeserializer[] getFieldSerdes() {
+        return fieldSerdes;
+    }
+
+    @Override
+    public ITreeIndex getIndex() {
+        return treeIndex;
+    }
+
+    @Override
+    public void insertCheckTuple(T checkTuple, Collection<T> checkTuples) {
+        checkTuples.add(checkTuple);
+    }
+
+    @Override
+    public void deleteCheckTuple(T checkTuple, Collection<T> checkTuples) {
+        checkTuples.remove(checkTuple);
+    }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/TreeIndexTestUtils.java b/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/TreeIndexTestUtils.java
new file mode 100644
index 0000000..d16553a
--- /dev/null
+++ b/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/TreeIndexTestUtils.java
@@ -0,0 +1,283 @@
+package edu.uci.ics.hyracks.storage.am.common;
+
+import static org.junit.Assert.fail;
+
+import java.io.ByteArrayInputStream;
+import java.io.DataInput;
+import java.io.DataInputStream;
+import java.io.DataOutput;
+import java.util.Collection;
+import java.util.Iterator;
+import java.util.Random;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.common.util.TupleUtils;
+import edu.uci.ics.hyracks.storage.am.common.api.IIndexBulkLoadContext;
+import edu.uci.ics.hyracks.storage.am.common.api.ISearchPredicate;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexAccessor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
+import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
+import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
+
+@SuppressWarnings("rawtypes")
+public abstract class TreeIndexTestUtils {
+    private static final Logger LOGGER = Logger.getLogger(TreeIndexTestUtils.class.getName());
+
+    protected abstract CheckTuple createCheckTuple(int numFields, int numKeyFields);
+
+    protected abstract ISearchPredicate createNullSearchPredicate();
+
+    public abstract void checkExpectedResults(ITreeIndexCursor cursor, Collection checkTuples,
+            ISerializerDeserializer[] fieldSerdes, int keyFieldCount, Iterator<CheckTuple> checkIter) throws Exception;
+
+    protected abstract CheckTuple createIntCheckTuple(int[] fieldValues, int numKeyFields);
+
+    protected abstract void setIntKeyFields(int[] fieldValues, int numKeyFields, int maxValue, Random rnd);
+
+    protected abstract void setIntPayloadFields(int[] fieldValues, int numKeyFields, int numFields);
+
+    protected abstract Collection createCheckTuplesCollection();
+
+    protected abstract ArrayTupleBuilder createDeleteTupleBuilder(ITreeIndexTestContext ctx);
+
+    // See if tuple with corresponding checkTuple exists in ctx.checkTuples.
+    protected abstract boolean checkDiskOrderScanResult(ITupleReference tuple, CheckTuple checkTuple,
+            ITreeIndexTestContext ctx) throws HyracksDataException;
+
+    @SuppressWarnings("unchecked")
+    public static void createTupleFromCheckTuple(CheckTuple checkTuple, ArrayTupleBuilder tupleBuilder,
+            ArrayTupleReference tuple, ISerializerDeserializer[] fieldSerdes) throws HyracksDataException {
+        int fieldCount = tupleBuilder.getFieldEndOffsets().length;
+        DataOutput dos = tupleBuilder.getDataOutput();
+        tupleBuilder.reset();
+        for (int i = 0; i < fieldCount; i++) {
+            fieldSerdes[i].serialize(checkTuple.get(i), dos);
+            tupleBuilder.addFieldEndOffset();
+        }
+        tuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());
+    }
+
+    @SuppressWarnings("unchecked")
+    public CheckTuple createCheckTupleFromTuple(ITupleReference tuple, ISerializerDeserializer[] fieldSerdes,
+            int numKeys) throws HyracksDataException {
+        CheckTuple checkTuple = createCheckTuple(fieldSerdes.length, numKeys);
+        int fieldCount = Math.min(fieldSerdes.length, tuple.getFieldCount());
+        for (int i = 0; i < fieldCount; i++) {
+            ByteArrayInputStream inStream = new ByteArrayInputStream(tuple.getFieldData(i), tuple.getFieldStart(i),
+                    tuple.getFieldLength(i));
+            DataInput dataIn = new DataInputStream(inStream);
+            Comparable fieldObj = (Comparable) fieldSerdes[i].deserialize(dataIn);
+            checkTuple.add(fieldObj);
+        }
+        return checkTuple;
+    }
+
+    @SuppressWarnings("unchecked")
+    public void checkScan(ITreeIndexTestContext ctx) throws Exception {
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Testing Scan.");
+        }
+        ITreeIndexCursor scanCursor = (ITreeIndexCursor) ctx.getIndexAccessor().createSearchCursor();
+        ISearchPredicate nullPred = createNullSearchPredicate();
+        ctx.getIndexAccessor().search(scanCursor, nullPred);
+        Iterator<CheckTuple> checkIter = ctx.getCheckTuples().iterator();
+        checkExpectedResults(scanCursor, ctx.getCheckTuples(), ctx.getFieldSerdes(), ctx.getKeyFieldCount(), checkIter);
+    }
+
+    public void checkDiskOrderScan(ITreeIndexTestContext ctx) throws Exception {
+        try {
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("Testing Disk-Order Scan.");
+            }
+            ITreeIndexAccessor treeIndexAccessor = (ITreeIndexAccessor) ctx.getIndexAccessor();
+            ITreeIndexCursor diskOrderCursor = treeIndexAccessor.createDiskOrderScanCursor();
+            treeIndexAccessor.diskOrderScan(diskOrderCursor);
+            int actualCount = 0;
+            try {
+                while (diskOrderCursor.hasNext()) {
+                    diskOrderCursor.next();
+                    ITupleReference tuple = diskOrderCursor.getTuple();
+                    CheckTuple checkTuple = createCheckTupleFromTuple(tuple, ctx.getFieldSerdes(),
+                            ctx.getKeyFieldCount());
+                    if (!checkDiskOrderScanResult(tuple, checkTuple, ctx)) {
+                        fail("Disk-order scan returned unexpected answer: " + checkTuple.toString());
+                    }
+                    actualCount++;
+                }
+                if (actualCount < ctx.getCheckTuples().size()) {
+                    fail("Disk-order scan returned fewer answers than expected.\nExpected: "
+                            + ctx.getCheckTuples().size() + "\nActual  : " + actualCount);
+                }
+                if (actualCount > ctx.getCheckTuples().size()) {
+                    fail("Disk-order scan returned more answers than expected.\nExpected: "
+                            + ctx.getCheckTuples().size() + "\nActual  : " + actualCount);
+                }
+            } finally {
+                diskOrderCursor.close();
+            }
+        } catch (UnsupportedOperationException e) {
+            // Ignore exception because some indexes, e.g. the LSMTrees, don't
+            // support disk-order scan.
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("Ignoring disk-order scan since it's not supported.");
+            }
+        } catch (ClassCastException e) {
+			// Ignore exception because IIndexAccessor sometimes isn't
+			// an ITreeIndexAccessor, e.g., for the LSMBTree.
+			if (LOGGER.isLoggable(Level.INFO)) {
+				LOGGER.info("Ignoring disk-order scan since it's not supported.");
+			}
+		}
+    }
+
+    @SuppressWarnings("unchecked")
+    public void insertIntTuples(ITreeIndexTestContext ctx, int numTuples, Random rnd) throws Exception {
+        int fieldCount = ctx.getFieldCount();
+        int numKeyFields = ctx.getKeyFieldCount();
+        int[] fieldValues = new int[ctx.getFieldCount()];
+        // Scale range of values according to number of keys.
+        // For example, for 2 keys we want the square root of numTuples, for 3
+        // keys the cube root of numTuples, etc.
+        int maxValue = (int) Math.ceil(Math.pow(numTuples, 1.0 / (double) numKeyFields));
+        for (int i = 0; i < numTuples; i++) {
+            // Set keys.
+            setIntKeyFields(fieldValues, numKeyFields, maxValue, rnd);
+            // Set values.
+            setIntPayloadFields(fieldValues, numKeyFields, fieldCount);
+            TupleUtils.createIntegerTuple(ctx.getTupleBuilder(), ctx.getTuple(), fieldValues);
+            if (LOGGER.isLoggable(Level.INFO)) {
+                if ((i + 1) % (numTuples / Math.min(10, numTuples)) == 0) {
+                    LOGGER.info("Inserting Tuple " + (i + 1) + "/" + numTuples);
+                }
+            }
+            try {
+                ctx.getIndexAccessor().insert(ctx.getTuple());
+                ctx.insertCheckTuple(createIntCheckTuple(fieldValues, ctx.getKeyFieldCount()), ctx.getCheckTuples());
+            } catch (TreeIndexException e) {
+                // We set expected values only after insertion succeeds because
+                // we ignore duplicate keys.
+            }
+        }
+    }
+    
+    @SuppressWarnings("unchecked")
+    public void upsertIntTuples(ITreeIndexTestContext ctx, int numTuples, Random rnd) throws Exception {
+        int fieldCount = ctx.getFieldCount();
+        int numKeyFields = ctx.getKeyFieldCount();
+        int[] fieldValues = new int[ctx.getFieldCount()];
+        // Scale range of values according to number of keys.
+        // For example, for 2 keys we want the square root of numTuples, for 3
+        // keys the cube root of numTuples, etc.
+        int maxValue = (int) Math.ceil(Math.pow(numTuples, 1.0 / (double) numKeyFields));
+        for (int i = 0; i < numTuples; i++) {
+            // Set keys.
+            setIntKeyFields(fieldValues, numKeyFields, maxValue, rnd);
+            // Set values.
+            setIntPayloadFields(fieldValues, numKeyFields, fieldCount);
+            TupleUtils.createIntegerTuple(ctx.getTupleBuilder(), ctx.getTuple(), fieldValues);
+            if (LOGGER.isLoggable(Level.INFO)) {
+                if ((i + 1) % (numTuples / Math.min(10, numTuples)) == 0) {
+                    LOGGER.info("Inserting Tuple " + (i + 1) + "/" + numTuples);
+                }
+            }
+            try {
+                ctx.getIndexAccessor().upsert(ctx.getTuple());
+                ctx.insertCheckTuple(createIntCheckTuple(fieldValues, ctx.getKeyFieldCount()), ctx.getCheckTuples());
+            } catch (TreeIndexException e) {
+                // We set expected values only after insertion succeeds because
+                // we ignore duplicate keys.
+            }
+        }
+    }
+
+    @SuppressWarnings("unchecked")
+    public void bulkLoadIntTuples(ITreeIndexTestContext ctx, int numTuples, Random rnd) throws Exception {
+        int fieldCount = ctx.getFieldCount();
+        int numKeyFields = ctx.getKeyFieldCount();
+        int[] fieldValues = new int[ctx.getFieldCount()];
+        int maxValue = (int) Math.ceil(Math.pow(numTuples, 1.0 / (double) numKeyFields));
+        Collection<CheckTuple> tmpCheckTuples = createCheckTuplesCollection();
+        for (int i = 0; i < numTuples; i++) {
+            // Set keys.
+            setIntKeyFields(fieldValues, numKeyFields, maxValue, rnd);
+            // Set values.
+            setIntPayloadFields(fieldValues, numKeyFields, fieldCount);
+
+            // Set expected values. (We also use these as the pre-sorted stream
+            // for ordered indexes bulk loading).
+            ctx.insertCheckTuple(createIntCheckTuple(fieldValues, ctx.getKeyFieldCount()), tmpCheckTuples);
+        }
+        bulkLoadCheckTuples(ctx, tmpCheckTuples);
+
+        // Add tmpCheckTuples to ctx check tuples for comparing searches.
+        for (CheckTuple checkTuple : tmpCheckTuples) {
+            ctx.insertCheckTuple(checkTuple, ctx.getCheckTuples());
+        }
+    }
+
+    public static void bulkLoadCheckTuples(ITreeIndexTestContext ctx, Collection<CheckTuple> checkTuples)
+            throws HyracksDataException, IndexException {
+        int fieldCount = ctx.getFieldCount();
+        int numTuples = checkTuples.size();
+        ArrayTupleBuilder tupleBuilder = new ArrayTupleBuilder(fieldCount);
+        ArrayTupleReference tuple = new ArrayTupleReference();
+        // Perform bulk load.
+        IIndexBulkLoadContext bulkLoadCtx = ctx.getIndex().beginBulkLoad(0.7f);
+        int c = 1;
+        for (CheckTuple checkTuple : checkTuples) {
+            if (LOGGER.isLoggable(Level.INFO)) {
+                if (c % (numTuples / 10) == 0) {
+                    LOGGER.info("Bulk Loading Tuple " + c + "/" + numTuples);
+                }
+            }
+            createTupleFromCheckTuple(checkTuple, tupleBuilder, tuple, ctx.getFieldSerdes());
+            ctx.getIndex().bulkLoadAddTuple(tuple, bulkLoadCtx);
+            c++;
+        }
+        ctx.getIndex().endBulkLoad(bulkLoadCtx);
+    }
+
+    @SuppressWarnings("unchecked")
+    public void deleteTuples(ITreeIndexTestContext ctx, int numTuples, Random rnd) throws Exception {
+        ArrayTupleBuilder deleteTupleBuilder = createDeleteTupleBuilder(ctx);
+        ArrayTupleReference deleteTuple = new ArrayTupleReference();
+        int numCheckTuples = ctx.getCheckTuples().size();
+        // Copy CheckTuple references into array, so we can randomly pick from
+        // there.
+        CheckTuple[] checkTuples = new CheckTuple[numCheckTuples];
+        int idx = 0;
+        Iterator<CheckTuple> iter = ctx.getCheckTuples().iterator();
+        while (iter.hasNext()) {
+            CheckTuple checkTuple = iter.next();
+            checkTuples[idx++] = checkTuple;
+        }
+
+        for (int i = 0; i < numTuples && numCheckTuples > 0; i++) {
+            if (LOGGER.isLoggable(Level.INFO)) {
+                if ((i + 1) % (numTuples / Math.min(10, numTuples)) == 0) {
+                    LOGGER.info("Deleting Tuple " + (i + 1) + "/" + numTuples);
+                }
+            }
+            int checkTupleIdx = Math.abs(rnd.nextInt() % numCheckTuples);
+            CheckTuple checkTuple = checkTuples[checkTupleIdx];
+            createTupleFromCheckTuple(checkTuple, deleteTupleBuilder, deleteTuple, ctx.getFieldSerdes());
+            ctx.getIndexAccessor().delete(deleteTuple);
+
+            // Remove check tuple from expected results.
+            ctx.deleteCheckTuple(checkTuple, ctx.getCheckTuples());
+
+            // Swap with last "valid" CheckTuple.
+            CheckTuple tmp = checkTuples[numCheckTuples - 1];
+            checkTuples[numCheckTuples - 1] = checkTuple;
+            checkTuples[checkTupleIdx] = tmp;
+            numCheckTuples--;
+        }
+    }
+
+}
diff --git a/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/AbstractRTreeBulkLoadTest.java b/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/AbstractRTreeBulkLoadTest.java
new file mode 100644
index 0000000..198ac58
--- /dev/null
+++ b/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/AbstractRTreeBulkLoadTest.java
@@ -0,0 +1,59 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.DoubleSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
+
+@SuppressWarnings("rawtypes")
+public abstract class AbstractRTreeBulkLoadTest extends AbstractRTreeTestDriver {
+
+    private final RTreeTestUtils rTreeTestUtils;
+    private final int bulkLoadRounds;
+
+    public AbstractRTreeBulkLoadTest(int bulkLoadRounds) {
+        this.bulkLoadRounds = bulkLoadRounds;
+        this.rTreeTestUtils = new RTreeTestUtils();
+    }
+
+    @Override
+    protected void runTest(ISerializerDeserializer[] fieldSerdes,
+            IPrimitiveValueProviderFactory[] valueProviderFactories, int numKeys, ITupleReference key) throws Exception {
+        AbstractRTreeTestContext ctx = createTestContext(fieldSerdes, valueProviderFactories, numKeys);
+        for (int i = 0; i < bulkLoadRounds; i++) {
+            // We assume all fieldSerdes are of the same type. Check the first
+            // one to determine which field types to generate.
+            if (fieldSerdes[0] instanceof IntegerSerializerDeserializer) {
+                rTreeTestUtils.bulkLoadIntTuples(ctx, numTuplesToInsert, getRandom());
+            } else if (fieldSerdes[0] instanceof DoubleSerializerDeserializer) {
+                rTreeTestUtils.bulkLoadDoubleTuples(ctx, numTuplesToInsert, getRandom());
+            }
+
+            rTreeTestUtils.checkScan(ctx);
+            rTreeTestUtils.checkDiskOrderScan(ctx);
+            rTreeTestUtils.checkRangeSearch(ctx, key);
+        }
+        ctx.getIndex().close();
+    }
+
+    @Override
+    protected String getTestOpName() {
+        return "BulkLoad";
+    }
+}
diff --git a/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/AbstractRTreeDeleteTest.java b/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/AbstractRTreeDeleteTest.java
new file mode 100644
index 0000000..e70f433
--- /dev/null
+++ b/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/AbstractRTreeDeleteTest.java
@@ -0,0 +1,64 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.DoubleSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
+
+@SuppressWarnings("rawtypes")
+public abstract class AbstractRTreeDeleteTest extends AbstractRTreeTestDriver {
+
+    private final RTreeTestUtils rTreeTestUtils;
+
+    private static final int numInsertRounds = 2;
+    private static final int numDeleteRounds = 2;
+
+    public AbstractRTreeDeleteTest() {
+        this.rTreeTestUtils = new RTreeTestUtils();
+    }
+
+    @Override
+    protected void runTest(ISerializerDeserializer[] fieldSerdes,
+            IPrimitiveValueProviderFactory[] valueProviderFactories, int numKeys, ITupleReference key) throws Exception {
+        AbstractRTreeTestContext ctx = createTestContext(fieldSerdes, valueProviderFactories, numKeys);
+        for (int i = 0; i < numInsertRounds; i++) {
+            // We assume all fieldSerdes are of the same type. Check the first
+            // one to determine which field types to generate.
+            if (fieldSerdes[0] instanceof IntegerSerializerDeserializer) {
+                rTreeTestUtils.insertIntTuples(ctx, numTuplesToInsert, getRandom());
+            } else if (fieldSerdes[0] instanceof DoubleSerializerDeserializer) {
+                rTreeTestUtils.insertDoubleTuples(ctx, numTuplesToInsert, getRandom());
+            }
+            int numTuplesPerDeleteRound = (int) Math
+                    .ceil((float) ctx.getCheckTuples().size() / (float) numDeleteRounds);
+            for (int j = 0; j < numDeleteRounds; j++) {
+                rTreeTestUtils.deleteTuples(ctx, numTuplesPerDeleteRound, getRandom());
+                rTreeTestUtils.checkScan(ctx);
+                rTreeTestUtils.checkDiskOrderScan(ctx);
+                rTreeTestUtils.checkRangeSearch(ctx, key);
+            }
+        }
+        ctx.getIndex().close();
+    }
+
+    @Override
+    protected String getTestOpName() {
+        return "Delete";
+    }
+}
diff --git a/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/AbstractRTreeExamplesTest.java b/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/AbstractRTreeExamplesTest.java
new file mode 100644
index 0000000..7192c53
--- /dev/null
+++ b/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/AbstractRTreeExamplesTest.java
@@ -0,0 +1,575 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree;
+
+import java.util.Random;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
+import edu.uci.ics.hyracks.data.std.primitive.DoublePointable;
+import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.DoubleSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.util.TupleUtils;
+import edu.uci.ics.hyracks.storage.am.common.api.IIndexAccessor;
+import edu.uci.ics.hyracks.storage.am.common.api.IIndexBulkLoadContext;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexAccessor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
+import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
+import edu.uci.ics.hyracks.storage.am.common.impls.TreeDiskOrderScanCursor;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.rtree.impls.SearchPredicate;
+import edu.uci.ics.hyracks.storage.am.rtree.util.RTreeUtils;
+
+@SuppressWarnings("rawtypes")
+public abstract class AbstractRTreeExamplesTest {
+    protected static final Logger LOGGER = Logger.getLogger(AbstractRTreeExamplesTest.class.getName());
+    protected final Random rnd = new Random(50);
+
+    protected abstract ITreeIndex createTreeIndex(ITypeTraits[] typeTraits,
+            IBinaryComparatorFactory[] rtreeCmpFactories, IBinaryComparatorFactory[] btreeCmpFactories,
+            IPrimitiveValueProviderFactory[] valueProviderFactories) throws TreeIndexException;
+
+    protected abstract int getIndexFileId();
+
+    /**
+     * Two Dimensions Example.
+     * 
+     * Create an RTree index of two dimensions, where they keys are of type
+     * integer, and the payload is two integer values. Fill index with random
+     * values using insertions (not bulk load). Perform scans and range search.
+     */
+    @Test
+    public void twoDimensionsExample() throws Exception {
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Fixed-Length Key,Value Example.");
+        }
+
+        // Declare fields.
+        int fieldCount = 6;
+        ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
+        typeTraits[0] = IntegerPointable.TYPE_TRAITS;
+        typeTraits[1] = IntegerPointable.TYPE_TRAITS;
+        typeTraits[2] = IntegerPointable.TYPE_TRAITS;
+        typeTraits[3] = IntegerPointable.TYPE_TRAITS;
+        typeTraits[4] = IntegerPointable.TYPE_TRAITS;
+        typeTraits[5] = IntegerPointable.TYPE_TRAITS;
+        // Declare field serdes.
+        ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE,
+                IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE,
+                IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE,
+                IntegerSerializerDeserializer.INSTANCE };
+
+        // Declare RTree keys.
+        int rtreeKeyFieldCount = 4;
+        IBinaryComparatorFactory[] rtreeCmpFactories = new IBinaryComparatorFactory[rtreeKeyFieldCount];
+        rtreeCmpFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+        rtreeCmpFactories[1] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+        rtreeCmpFactories[2] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+        rtreeCmpFactories[3] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+
+        // Declare BTree keys, this will only be used for LSMRTree
+        int btreeKeyFieldCount = 6;
+        IBinaryComparatorFactory[] btreeCmpFactories = new IBinaryComparatorFactory[btreeKeyFieldCount];
+        btreeCmpFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+        btreeCmpFactories[1] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+        btreeCmpFactories[2] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+        btreeCmpFactories[3] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+        btreeCmpFactories[4] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+        btreeCmpFactories[5] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+
+        // create value providers
+        IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(
+                rtreeCmpFactories.length, IntegerPointable.FACTORY);
+
+        int indexFileId = getIndexFileId();
+        ITreeIndex treeIndex = createTreeIndex(typeTraits, rtreeCmpFactories, btreeCmpFactories, valueProviderFactories);
+        treeIndex.create(indexFileId);
+        treeIndex.open(indexFileId);
+
+        long start = System.currentTimeMillis();
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Inserting into tree...");
+        }
+        ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
+        ArrayTupleReference tuple = new ArrayTupleReference();
+        IIndexAccessor indexAccessor = (IIndexAccessor) treeIndex.createAccessor();
+        int numInserts = 10000;
+        for (int i = 0; i < numInserts; i++) {
+            int p1x = rnd.nextInt();
+            int p1y = rnd.nextInt();
+            int p2x = rnd.nextInt();
+            int p2y = rnd.nextInt();
+
+            int pk1 = 5;
+            int pk2 = 10;
+
+            TupleUtils.createIntegerTuple(tb, tuple, Math.min(p1x, p2x), Math.min(p1y, p2y), Math.max(p1x, p2x),
+                    Math.max(p1y, p2y), pk1, pk2);
+            if (LOGGER.isLoggable(Level.INFO)) {
+                if (i % 1000 == 0) {
+                    LOGGER.info("Inserting " + i + " " + Math.min(p1x, p2x) + " " + Math.min(p1y, p2y) + " "
+                            + Math.max(p1x, p2x) + " " + Math.max(p1y, p2y) + ", " + pk1 + ", " + pk2);
+                }
+            }
+            try {
+                indexAccessor.insert(tuple);
+            } catch (TreeIndexException e) {
+            }
+        }
+        long end = System.currentTimeMillis();
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info(numInserts + " inserts in " + (end - start) + "ms");
+        }
+
+        scan(indexAccessor, fieldSerdes);
+        diskOrderScan(indexAccessor, fieldSerdes);
+
+        // Build key.
+        ArrayTupleBuilder keyTb = new ArrayTupleBuilder(rtreeKeyFieldCount);
+        ArrayTupleReference key = new ArrayTupleReference();
+        TupleUtils.createIntegerTuple(keyTb, key, -1000, -1000, 1000, 1000);
+
+        rangeSearch(rtreeCmpFactories, indexAccessor, fieldSerdes, key);
+
+        treeIndex.close();
+    }
+
+    /**
+     * Two Dimensions Example.
+     * 
+     * Create an RTree index of three dimensions, where they keys are of type
+     * double, and the payload is one double value. Fill index with random
+     * values using insertions (not bulk load). Perform scans and range search.
+     */
+    @Test
+    public void threeDimensionsExample() throws Exception {
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Fixed-Length Key,Value Example.");
+        }
+
+        // Declare fields.
+        int fieldCount = 7;
+        ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
+        typeTraits[0] = DoublePointable.TYPE_TRAITS;
+        typeTraits[1] = DoublePointable.TYPE_TRAITS;
+        typeTraits[2] = DoublePointable.TYPE_TRAITS;
+        typeTraits[3] = DoublePointable.TYPE_TRAITS;
+        typeTraits[4] = DoublePointable.TYPE_TRAITS;
+        typeTraits[5] = DoublePointable.TYPE_TRAITS;
+        typeTraits[6] = DoublePointable.TYPE_TRAITS;
+        // Declare field serdes.
+        ISerializerDeserializer[] fieldSerdes = { DoubleSerializerDeserializer.INSTANCE,
+                DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
+                DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
+                DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE };
+
+        // Declare RTree keys.
+        int rtreeKeyFieldCount = 6;
+        IBinaryComparatorFactory[] rtreeCmpFactories = new IBinaryComparatorFactory[rtreeKeyFieldCount];
+        rtreeCmpFactories[0] = PointableBinaryComparatorFactory.of(DoublePointable.FACTORY);
+        rtreeCmpFactories[1] = PointableBinaryComparatorFactory.of(DoublePointable.FACTORY);
+        rtreeCmpFactories[2] = PointableBinaryComparatorFactory.of(DoublePointable.FACTORY);
+        rtreeCmpFactories[3] = PointableBinaryComparatorFactory.of(DoublePointable.FACTORY);
+        rtreeCmpFactories[4] = PointableBinaryComparatorFactory.of(DoublePointable.FACTORY);
+        rtreeCmpFactories[5] = PointableBinaryComparatorFactory.of(DoublePointable.FACTORY);
+
+        // Declare RTree keys.
+        int btreeKeyFieldCount = 7;
+        IBinaryComparatorFactory[] btreeCmpFactories = new IBinaryComparatorFactory[btreeKeyFieldCount];
+        btreeCmpFactories[0] = PointableBinaryComparatorFactory.of(DoublePointable.FACTORY);
+        btreeCmpFactories[1] = PointableBinaryComparatorFactory.of(DoublePointable.FACTORY);
+        btreeCmpFactories[2] = PointableBinaryComparatorFactory.of(DoublePointable.FACTORY);
+        btreeCmpFactories[3] = PointableBinaryComparatorFactory.of(DoublePointable.FACTORY);
+        btreeCmpFactories[4] = PointableBinaryComparatorFactory.of(DoublePointable.FACTORY);
+        btreeCmpFactories[5] = PointableBinaryComparatorFactory.of(DoublePointable.FACTORY);
+        btreeCmpFactories[6] = PointableBinaryComparatorFactory.of(DoublePointable.FACTORY);
+
+        // create value providers
+        IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(
+                rtreeCmpFactories.length, DoublePointable.FACTORY);
+
+        int indexFileId = getIndexFileId();
+        ITreeIndex treeIndex = createTreeIndex(typeTraits, rtreeCmpFactories, btreeCmpFactories, valueProviderFactories);
+        treeIndex.create(indexFileId);
+        treeIndex.open(indexFileId);
+
+        long start = System.currentTimeMillis();
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Inserting into tree...");
+        }
+        ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
+        ArrayTupleReference tuple = new ArrayTupleReference();
+        IIndexAccessor indexAccessor = (IIndexAccessor) treeIndex.createAccessor();
+        int numInserts = 10000;
+        for (int i = 0; i < numInserts; i++) {
+            double p1x = rnd.nextDouble();
+            double p1y = rnd.nextDouble();
+            double p1z = rnd.nextDouble();
+            double p2x = rnd.nextDouble();
+            double p2y = rnd.nextDouble();
+            double p2z = rnd.nextDouble();
+
+            double pk = 5.0;
+
+            TupleUtils.createDoubleTuple(tb, tuple, Math.min(p1x, p2x), Math.min(p1y, p2y), Math.min(p1z, p2z),
+                    Math.max(p1x, p2x), Math.max(p1y, p2y), Math.max(p1z, p2z), pk);
+            if (LOGGER.isLoggable(Level.INFO)) {
+                if (i % 1000 == 0) {
+                    LOGGER.info("Inserting " + i + " " + Math.min(p1x, p2x) + " " + Math.min(p1y, p2y) + " "
+                            + Math.min(p1z, p2z) + " " + Math.max(p1x, p2x) + " " + Math.max(p1y, p2y) + " "
+                            + Math.max(p1z, p2z) + ", " + pk);
+                }
+            }
+            try {
+                indexAccessor.insert(tuple);
+            } catch (TreeIndexException e) {
+            }
+        }
+        long end = System.currentTimeMillis();
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info(numInserts + " inserts in " + (end - start) + "ms");
+        }
+
+        scan(indexAccessor, fieldSerdes);
+        diskOrderScan(indexAccessor, fieldSerdes);
+
+        // Build key.
+        ArrayTupleBuilder keyTb = new ArrayTupleBuilder(rtreeKeyFieldCount);
+        ArrayTupleReference key = new ArrayTupleReference();
+        TupleUtils.createDoubleTuple(keyTb, key, -1000.0, -1000.0, -1000.0, 1000.0, 1000.0, 1000.0);
+
+        rangeSearch(rtreeCmpFactories, indexAccessor, fieldSerdes, key);
+
+        treeIndex.close();
+    }
+
+    /**
+     * Deletion Example.
+     * 
+     * Create an RTree index of two dimensions, where they keys are of type
+     * integer, and the payload is one integer value. Fill index with random
+     * values using insertions, then delete entries one-by-one. Repeat procedure
+     * a few times on same RTree.
+     */
+    @Test
+    public void deleteExample() throws Exception {
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Deletion Example");
+        }
+
+        // Declare fields.
+        int fieldCount = 5;
+        ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
+        typeTraits[0] = IntegerPointable.TYPE_TRAITS;
+        typeTraits[1] = IntegerPointable.TYPE_TRAITS;
+        typeTraits[2] = IntegerPointable.TYPE_TRAITS;
+        typeTraits[3] = IntegerPointable.TYPE_TRAITS;
+        typeTraits[4] = IntegerPointable.TYPE_TRAITS;
+
+        // Declare RTree keys.
+        int rtreeKeyFieldCount = 4;
+        IBinaryComparatorFactory[] rtreeCmpFactories = new IBinaryComparatorFactory[rtreeKeyFieldCount];
+        rtreeCmpFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+        rtreeCmpFactories[1] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+        rtreeCmpFactories[2] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+        rtreeCmpFactories[3] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+
+        // Declare BTree keys.
+        int btreeKeyFieldCount = 5;
+        IBinaryComparatorFactory[] btreeCmpFactories = new IBinaryComparatorFactory[btreeKeyFieldCount];
+        btreeCmpFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+        btreeCmpFactories[1] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+        btreeCmpFactories[2] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+        btreeCmpFactories[3] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+        btreeCmpFactories[4] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+
+        // create value providers
+        IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(
+                rtreeCmpFactories.length, IntegerPointable.FACTORY);
+
+        int indexFileId = getIndexFileId();
+        ITreeIndex treeIndex = createTreeIndex(typeTraits, rtreeCmpFactories, btreeCmpFactories, valueProviderFactories);
+        treeIndex.create(indexFileId);
+        treeIndex.open(indexFileId);
+
+        ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
+        ArrayTupleReference tuple = new ArrayTupleReference();
+        IIndexAccessor indexAccessor = (IIndexAccessor) treeIndex.createAccessor();
+
+        int runs = 3;
+        for (int run = 0; run < runs; run++) {
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("Deletion example run: " + (run + 1) + "/" + runs);
+                LOGGER.info("Inserting into tree...");
+            }
+
+            int numInserts = 10000;
+            int[] p1xs = new int[numInserts];
+            int[] p1ys = new int[numInserts];
+            int[] p2xs = new int[numInserts];
+            int[] p2ys = new int[numInserts];
+            int[] pks = new int[numInserts];
+            int insDone = 0;
+
+            int[] insDoneCmp = new int[numInserts];
+            for (int i = 0; i < numInserts; i++) {
+                int p1x = rnd.nextInt();
+                int p1y = rnd.nextInt();
+                int p2x = rnd.nextInt();
+                int p2y = rnd.nextInt();
+                int pk = 5;
+
+                p1xs[i] = Math.min(p1x, p2x);
+                p1ys[i] = Math.min(p1y, p2y);
+                p2xs[i] = Math.max(p1x, p2x);
+                p2ys[i] = Math.max(p1y, p2y);
+                pks[i] = pk;
+
+                TupleUtils.createIntegerTuple(tb, tuple, Math.min(p1x, p2x), Math.min(p1y, p2y), Math.max(p1x, p2x),
+                        Math.max(p1y, p2y), pk);
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    if (i % 1000 == 0) {
+                        LOGGER.info("Inserting " + i);
+                    }
+                }
+                try {
+                    indexAccessor.insert(tuple);
+                } catch (TreeIndexException e) {
+                }
+                insDoneCmp[i] = insDone;
+            }
+
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("Deleting from tree...");
+            }
+            int delDone = 0;
+            for (int i = 0; i < numInserts; i++) {
+                TupleUtils.createIntegerTuple(tb, tuple, p1xs[i], p1ys[i], p2xs[i], p2ys[i], pks[i]);
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    if (i % 1000 == 0) {
+                        LOGGER.info("Deleting " + i);
+                    }
+                }
+                try {
+                    indexAccessor.delete(tuple);
+                    delDone++;
+                } catch (TreeIndexException e) {
+                }
+                if (insDoneCmp[i] != delDone) {
+                    if (LOGGER.isLoggable(Level.INFO)) {
+                        LOGGER.info("INCONSISTENT STATE, ERROR IN DELETION EXAMPLE.");
+                        LOGGER.info("INSDONECMP: " + insDoneCmp[i] + " " + delDone);
+                    }
+                    break;
+                }
+            }
+            if (insDone != delDone) {
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    LOGGER.info("ERROR! INSDONE: " + insDone + " DELDONE: " + delDone);
+                }
+                break;
+            }
+        }
+        treeIndex.close();
+    }
+
+    /**
+     * Bulk load example.
+     * 
+     * Load a tree with 10,000 tuples.
+     * 
+     */
+    @Test
+    public void bulkLoadExample() throws Exception {
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Bulk load example");
+        }
+        // Declare fields.
+        int fieldCount = 5;
+        ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
+        typeTraits[0] = IntegerPointable.TYPE_TRAITS;
+        typeTraits[1] = IntegerPointable.TYPE_TRAITS;
+        typeTraits[2] = IntegerPointable.TYPE_TRAITS;
+        typeTraits[3] = IntegerPointable.TYPE_TRAITS;
+        typeTraits[4] = IntegerPointable.TYPE_TRAITS;
+
+        // Declare field serdes.
+        ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE,
+                IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE,
+                IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
+
+        // Declare RTree keys.
+        int rtreeKeyFieldCount = 4;
+        IBinaryComparatorFactory[] rtreeCmpFactories = new IBinaryComparatorFactory[rtreeKeyFieldCount];
+        rtreeCmpFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+        rtreeCmpFactories[1] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+        rtreeCmpFactories[2] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+        rtreeCmpFactories[3] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+
+        // Declare BTree keys.
+        int btreeKeyFieldCount = 5;
+        IBinaryComparatorFactory[] btreeCmpFactories = new IBinaryComparatorFactory[btreeKeyFieldCount];
+        btreeCmpFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+        btreeCmpFactories[1] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+        btreeCmpFactories[2] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+        btreeCmpFactories[3] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+        btreeCmpFactories[4] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+
+        // create value providers
+        IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(
+                rtreeCmpFactories.length, IntegerPointable.FACTORY);
+
+        int indexFileId = getIndexFileId();
+        ITreeIndex treeIndex = createTreeIndex(typeTraits, rtreeCmpFactories, btreeCmpFactories, valueProviderFactories);
+        treeIndex.create(indexFileId);
+        treeIndex.open(indexFileId);
+
+        // Load records.
+        int numInserts = 10000;
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Bulk loading " + numInserts + " tuples");
+        }
+        long start = System.currentTimeMillis();
+        IIndexBulkLoadContext bulkLoadCtx = treeIndex.beginBulkLoad(0.7f);
+        ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
+        ArrayTupleReference tuple = new ArrayTupleReference();
+
+        for (int i = 0; i < numInserts; i++) {
+            int p1x = rnd.nextInt();
+            int p1y = rnd.nextInt();
+            int p2x = rnd.nextInt();
+            int p2y = rnd.nextInt();
+
+            int pk = 5;
+
+            TupleUtils.createIntegerTuple(tb, tuple, Math.min(p1x, p2x), Math.min(p1y, p2y), Math.max(p1x, p2x),
+                    Math.max(p1y, p2y), pk);
+            treeIndex.bulkLoadAddTuple(tuple, bulkLoadCtx);
+        }
+
+        treeIndex.endBulkLoad(bulkLoadCtx);
+        long end = System.currentTimeMillis();
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info(numInserts + " tuples loaded in " + (end - start) + "ms");
+        }
+
+        IIndexAccessor indexAccessor = (IIndexAccessor) treeIndex.createAccessor();
+
+        // Build key.
+        ArrayTupleBuilder keyTb = new ArrayTupleBuilder(rtreeKeyFieldCount);
+        ArrayTupleReference key = new ArrayTupleReference();
+        TupleUtils.createIntegerTuple(keyTb, key, -1000, -1000, 1000, 1000);
+
+        rangeSearch(rtreeCmpFactories, indexAccessor, fieldSerdes, key);
+
+        treeIndex.close();
+    }
+
+    private void scan(IIndexAccessor indexAccessor, ISerializerDeserializer[] fieldSerdes) throws Exception {
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Scan:");
+        }
+        ITreeIndexCursor scanCursor = (ITreeIndexCursor) indexAccessor.createSearchCursor();
+        SearchPredicate nullPred = new SearchPredicate(null, null);
+        indexAccessor.search(scanCursor, nullPred);
+        try {
+            while (scanCursor.hasNext()) {
+                scanCursor.next();
+                ITupleReference frameTuple = scanCursor.getTuple();
+                String rec = TupleUtils.printTuple(frameTuple, fieldSerdes);
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    LOGGER.info(rec);
+                }
+            }
+        } finally {
+            scanCursor.close();
+        }
+    }
+
+    private void diskOrderScan(IIndexAccessor indexAccessor, ISerializerDeserializer[] fieldSerdes)
+            throws Exception {
+        try {
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("Disk-Order Scan:");
+            }
+            ITreeIndexAccessor treeIndexAccessor = (ITreeIndexAccessor) indexAccessor;
+            TreeDiskOrderScanCursor diskOrderCursor = (TreeDiskOrderScanCursor) treeIndexAccessor
+                    .createDiskOrderScanCursor();
+            treeIndexAccessor.diskOrderScan(diskOrderCursor);
+            try {
+                while (diskOrderCursor.hasNext()) {
+                    diskOrderCursor.next();
+                    ITupleReference frameTuple = diskOrderCursor.getTuple();
+                    String rec = TupleUtils.printTuple(frameTuple, fieldSerdes);
+                    if (LOGGER.isLoggable(Level.INFO)) {
+                        LOGGER.info(rec);
+                    }
+                }
+            } finally {
+                diskOrderCursor.close();
+            }
+        } catch (UnsupportedOperationException e) {
+            // Ignore exception because some indexes, e.g. the LSMRTree, don't
+            // support disk-order scan.
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("Ignoring disk-order scan since it's not supported.");
+            }
+        } catch (ClassCastException e) {
+			// Ignore exception because IIndexAccessor sometimes isn't
+			// an ITreeIndexAccessor, e.g., for the LSMRTree.
+			if (LOGGER.isLoggable(Level.INFO)) {
+				LOGGER.info("Ignoring disk-order scan since it's not supported.");
+			}
+		}
+    }
+
+    private void rangeSearch(IBinaryComparatorFactory[] cmpFactories, IIndexAccessor indexAccessor,
+            ISerializerDeserializer[] fieldSerdes, ITupleReference key) throws Exception {
+        if (LOGGER.isLoggable(Level.INFO)) {
+            String kString = TupleUtils.printTuple(key, fieldSerdes);
+            LOGGER.info("Range-Search using key: " + kString);
+        }
+        ITreeIndexCursor rangeCursor = (ITreeIndexCursor) indexAccessor.createSearchCursor();
+        MultiComparator cmp = RTreeUtils.getSearchMultiComparator(cmpFactories, key);
+        SearchPredicate rangePred = new SearchPredicate(key, cmp);
+        indexAccessor.search(rangeCursor, rangePred);
+        try {
+            while (rangeCursor.hasNext()) {
+                rangeCursor.next();
+                ITupleReference frameTuple = rangeCursor.getTuple();
+                String rec = TupleUtils.printTuple(frameTuple, fieldSerdes);
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    LOGGER.info(rec);
+                }
+            }
+        } finally {
+            rangeCursor.close();
+        }
+    }
+
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/AbstractRTreeInsertTest.java b/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/AbstractRTreeInsertTest.java
new file mode 100644
index 0000000..cdd6ee0
--- /dev/null
+++ b/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/AbstractRTreeInsertTest.java
@@ -0,0 +1,64 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.DoubleSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
+
+/**
+ * Tests the RTree insert operation with integer and double fields using various
+ * numbers of dimensions and payload fields.
+ * 
+ * Each tests first fills an RTree with randomly generated tuples. We compare
+ * the following operations against expected results: 1. RTree scan. 3.
+ * Disk-order scan. 4. Range search.
+ * 
+ */
+@SuppressWarnings("rawtypes")
+public abstract class AbstractRTreeInsertTest extends AbstractRTreeTestDriver {
+
+    private final RTreeTestUtils rTreeTestUtils;
+
+    public AbstractRTreeInsertTest() {
+        this.rTreeTestUtils = new RTreeTestUtils();
+    }
+
+    @Override
+    protected void runTest(ISerializerDeserializer[] fieldSerdes,
+            IPrimitiveValueProviderFactory[] valueProviderFactories, int numKeys, ITupleReference key) throws Exception {
+        AbstractRTreeTestContext ctx = createTestContext(fieldSerdes, valueProviderFactories, numKeys);
+        // We assume all fieldSerdes are of the same type. Check the first one
+        // to determine which field types to generate.
+        if (fieldSerdes[0] instanceof IntegerSerializerDeserializer) {
+            rTreeTestUtils.insertIntTuples(ctx, numTuplesToInsert, getRandom());
+        } else if (fieldSerdes[0] instanceof DoubleSerializerDeserializer) {
+            rTreeTestUtils.insertDoubleTuples(ctx, numTuplesToInsert, getRandom());
+        }
+
+        rTreeTestUtils.checkScan(ctx);
+        rTreeTestUtils.checkDiskOrderScan(ctx);
+        rTreeTestUtils.checkRangeSearch(ctx, key);
+        ctx.getIndex().close();
+    }
+
+    @Override
+    protected String getTestOpName() {
+        return "Insert";
+    }
+}
diff --git a/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/AbstractRTreeMultiThreadTest.java b/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/AbstractRTreeMultiThreadTest.java
new file mode 100644
index 0000000..2c185a5
--- /dev/null
+++ b/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/AbstractRTreeMultiThreadTest.java
@@ -0,0 +1,152 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree;
+
+import java.util.ArrayList;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.exceptions.HyracksException;
+import edu.uci.ics.hyracks.data.std.primitive.DoublePointable;
+import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.DoubleSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.util.SerdeUtils;
+import edu.uci.ics.hyracks.storage.am.common.ITreeIndexTestWorkerFactory;
+import edu.uci.ics.hyracks.storage.am.common.TestOperationSelector.TestOperation;
+import edu.uci.ics.hyracks.storage.am.common.TestWorkloadConf;
+import edu.uci.ics.hyracks.storage.am.common.TreeIndexMultiThreadTestDriver;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
+import edu.uci.ics.hyracks.storage.am.rtree.util.RTreeUtils;
+
+@SuppressWarnings("rawtypes")
+public abstract class AbstractRTreeMultiThreadTest {
+
+    protected final Logger LOGGER = Logger.getLogger(AbstractRTreeMultiThreadTest.class.getName());
+
+    // Machine-specific number of threads to use for testing.
+    protected final int REGULAR_NUM_THREADS = Runtime.getRuntime().availableProcessors();
+    // Excessive number of threads for testing.
+    protected final int EXCESSIVE_NUM_THREADS = Runtime.getRuntime().availableProcessors() * 4;
+    protected final int NUM_OPERATIONS = 5000;
+
+    protected ArrayList<TestWorkloadConf> workloadConfs = getTestWorkloadConf();
+
+    protected abstract void setUp() throws HyracksException;
+
+    protected abstract void tearDown() throws HyracksDataException;
+
+    protected abstract ITreeIndex createTreeIndex(ITypeTraits[] typeTraits,
+            IBinaryComparatorFactory[] rtreeCmpFactories, IBinaryComparatorFactory[] btreeCmpFactories,
+            IPrimitiveValueProviderFactory[] valueProviderFactories) throws TreeIndexException;
+
+    protected abstract int getFileId();
+
+    protected abstract ITreeIndexTestWorkerFactory getWorkerFactory();
+
+    protected abstract ArrayList<TestWorkloadConf> getTestWorkloadConf();
+
+    protected abstract String getIndexTypeName();
+
+    protected static float[] getUniformOpProbs(TestOperation[] ops) {
+        float[] opProbs = new float[ops.length];
+        for (int i = 0; i < ops.length; i++) {
+            opProbs[i] = 1.0f / (float) ops.length;
+        }
+        return opProbs;
+    }
+
+    protected void runTest(ISerializerDeserializer[] fieldSerdes,
+            IPrimitiveValueProviderFactory[] valueProviderFactories, int numKeys, int numThreads,
+            TestWorkloadConf conf, String dataMsg) throws HyracksException, InterruptedException, TreeIndexException {
+        setUp();
+
+        if (LOGGER.isLoggable(Level.INFO)) {
+            String indexTypeName = getIndexTypeName();
+            LOGGER.info(indexTypeName + " MultiThread Test:\nData: " + dataMsg + "; Threads: " + numThreads
+                    + "; Workload: " + conf.toString() + ".");
+        }
+
+        ITypeTraits[] typeTraits = SerdeUtils.serdesToTypeTraits(fieldSerdes);
+        IBinaryComparatorFactory[] rtreeCmpFactories = SerdeUtils.serdesToComparatorFactories(fieldSerdes, numKeys);
+        IBinaryComparatorFactory[] btreeCmpFactories = SerdeUtils.serdesToComparatorFactories(fieldSerdes,
+                fieldSerdes.length);
+
+        ITreeIndex index = createTreeIndex(typeTraits, rtreeCmpFactories, btreeCmpFactories, valueProviderFactories);
+        ITreeIndexTestWorkerFactory workerFactory = getWorkerFactory();
+
+        // 4 batches per thread.
+        int batchSize = (NUM_OPERATIONS / numThreads) / 4;
+
+        TreeIndexMultiThreadTestDriver driver = new TreeIndexMultiThreadTestDriver(index, workerFactory, fieldSerdes,
+                conf.ops, conf.opProbs);
+        driver.init(getFileId());
+        long[] times = driver.run(numThreads, 1, NUM_OPERATIONS, batchSize);
+        driver.deinit();
+
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("RTree MultiThread Test Time: " + times[0] + "ms");
+        }
+
+        tearDown();
+    }
+
+    @Test
+    public void twoDimensionsInt() throws InterruptedException, HyracksException, TreeIndexException {
+        ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE,
+                IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE,
+                IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
+
+        int numKeys = 4;
+        IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(
+                numKeys, IntegerPointable.FACTORY);
+
+        String dataMsg = "Two Dimensions Of Integer Values";
+
+        for (TestWorkloadConf conf : workloadConfs) {
+            runTest(fieldSerdes, valueProviderFactories, numKeys, REGULAR_NUM_THREADS, conf, dataMsg);
+            runTest(fieldSerdes, valueProviderFactories, numKeys, EXCESSIVE_NUM_THREADS, conf, dataMsg);
+        }
+    }
+
+    @Test
+    public void fourDimensionsDouble() throws InterruptedException, HyracksException, TreeIndexException {
+        ISerializerDeserializer[] fieldSerdes = { DoubleSerializerDeserializer.INSTANCE,
+                DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
+                DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
+                DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
+                DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE };
+
+        int numKeys = 8;
+        IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(
+                numKeys, DoublePointable.FACTORY);
+
+        String dataMsg = "Four Dimensions Of Double Values";
+
+        for (TestWorkloadConf conf : workloadConfs) {
+            runTest(fieldSerdes, valueProviderFactories, numKeys, REGULAR_NUM_THREADS, conf, dataMsg);
+            runTest(fieldSerdes, valueProviderFactories, numKeys, EXCESSIVE_NUM_THREADS, conf, dataMsg);
+        }
+    }
+}
diff --git a/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/AbstractRTreeTestContext.java b/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/AbstractRTreeTestContext.java
new file mode 100644
index 0000000..9affc47
--- /dev/null
+++ b/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/AbstractRTreeTestContext.java
@@ -0,0 +1,38 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree;
+
+import java.util.ArrayList;
+import java.util.Collection;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.storage.am.common.TreeIndexTestContext;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+
+@SuppressWarnings("rawtypes")
+public abstract class AbstractRTreeTestContext extends TreeIndexTestContext<RTreeCheckTuple> {
+    private final ArrayList<RTreeCheckTuple> checkTuples = new ArrayList<RTreeCheckTuple>();
+
+    public AbstractRTreeTestContext(ISerializerDeserializer[] fieldSerdes, ITreeIndex treeIndex) {
+        super(fieldSerdes, treeIndex);
+    }
+
+    @Override
+    public Collection<RTreeCheckTuple> getCheckTuples() {
+        return checkTuples;
+    }
+
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/AbstractRTreeTestDriver.java b/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/AbstractRTreeTestDriver.java
new file mode 100644
index 0000000..10f4364
--- /dev/null
+++ b/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/AbstractRTreeTestDriver.java
@@ -0,0 +1,116 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree;
+
+import java.util.Random;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.data.std.primitive.DoublePointable;
+import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.DoubleSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.util.TupleUtils;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
+import edu.uci.ics.hyracks.storage.am.rtree.util.RTreeUtils;
+
+@SuppressWarnings("rawtypes")
+public abstract class AbstractRTreeTestDriver {
+    protected final Logger LOGGER = Logger.getLogger(AbstractRTreeTestDriver.class.getName());
+
+    protected static final int numTuplesToInsert = 5000;
+
+    protected abstract AbstractRTreeTestContext createTestContext(ISerializerDeserializer[] fieldSerdes,
+            IPrimitiveValueProviderFactory[] valueProviderFactories, int numKeys) throws Exception;
+
+    protected abstract Random getRandom();
+
+    protected abstract void runTest(ISerializerDeserializer[] fieldSerdes,
+            IPrimitiveValueProviderFactory[] valueProviderFactories, int numKeys, ITupleReference key) throws Exception;
+
+    protected abstract String getTestOpName();
+
+    @Test
+    public void twoDimensionsInt() throws Exception {
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("RTree " + getTestOpName() + " Test With Two Dimensions With Integer Keys.");
+        }
+
+        ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE,
+                IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE,
+                IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
+
+        int numKeys = 4;
+        IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(
+                numKeys, IntegerPointable.FACTORY);
+        // Range search, the rectangle bottom left coordinates are -1000, -1000
+        // and the top right coordinates are 1000, 1000
+        ITupleReference key = TupleUtils.createIntegerTuple(-1000, -1000, 1000, 1000);
+
+        runTest(fieldSerdes, valueProviderFactories, numKeys, key);
+
+    }
+
+    @Test
+    public void twoDimensionsDouble() throws Exception {
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("RTree " + getTestOpName() + " Test With Two Dimensions With Double Keys.");
+        }
+
+        ISerializerDeserializer[] fieldSerdes = { DoubleSerializerDeserializer.INSTANCE,
+                DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
+                DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE };
+
+        int numKeys = 4;
+        IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(
+                numKeys, DoublePointable.FACTORY);
+        // Range search, the rectangle bottom left coordinates are -1000.0,
+        // -1000.0 and the top right coordinates are 1000.0, 1000.0
+        ITupleReference key = TupleUtils.createDoubleTuple(-1000.0, -1000.0, 1000.0, 1000.0);
+
+        runTest(fieldSerdes, valueProviderFactories, numKeys, key);
+
+    }
+
+    @Test
+    public void fourDimensionsDouble() throws Exception {
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("RTree " + getTestOpName() + " Test With Four Dimensions With Double Keys.");
+        }
+
+        ISerializerDeserializer[] fieldSerdes = { DoubleSerializerDeserializer.INSTANCE,
+                DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
+                DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
+                DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
+                DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE };
+
+        int numKeys = 8;
+        IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(
+                numKeys, DoublePointable.FACTORY);
+        // Range search, the rectangle bottom left coordinates are -1000.0,
+        // -1000.0, -1000.0, -1000.0 and the top right coordinates are 1000.0,
+        // 1000.0, 1000.0, 1000.0
+        ITupleReference key = TupleUtils.createDoubleTuple(-1000.0, -1000.0, -1000.0, -1000.0, 1000.0, 1000.0, 1000.0,
+                1000.0);
+
+        runTest(fieldSerdes, valueProviderFactories, numKeys, key);
+
+    }
+}
diff --git a/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeCheckTuple.java b/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeCheckTuple.java
new file mode 100644
index 0000000..98800e5
--- /dev/null
+++ b/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeCheckTuple.java
@@ -0,0 +1,56 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree;
+
+import edu.uci.ics.hyracks.storage.am.common.CheckTuple;
+
+@SuppressWarnings({ "rawtypes", "unchecked" })
+public class RTreeCheckTuple<T> extends CheckTuple {
+
+    public RTreeCheckTuple(int numFields, int numKeys) {
+        super(numFields, numKeys);
+    }
+
+    @Override
+    public boolean equals(Object o) {
+        RTreeCheckTuple<T> other = (RTreeCheckTuple<T>) o;
+        for (int i = 0; i < tuple.length; i++) {
+            int cmp = tuple[i].compareTo(other.get(i));
+            if (cmp != 0) {
+                return false;
+            }
+        }
+        return true;
+    }
+
+    public boolean intersect(T o) {
+        RTreeCheckTuple<T> other = (RTreeCheckTuple<T>) o;
+        int maxFieldPos = numKeys / 2;
+        for (int i = 0; i < maxFieldPos; i++) {
+            int j = maxFieldPos + i;
+            int cmp = tuple[i].compareTo(other.get(j));
+            if (cmp > 0) {
+                return false;
+            }
+            cmp = tuple[j].compareTo(other.get(i));
+            if (cmp < 0) {
+                return false;
+            }
+        }
+        return true;
+    }
+
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeTestUtils.java b/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeTestUtils.java
new file mode 100644
index 0000000..a23f375
--- /dev/null
+++ b/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeTestUtils.java
@@ -0,0 +1,239 @@
+package edu.uci.ics.hyracks.storage.am.rtree;
+
+import static org.junit.Assert.fail;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Iterator;
+import java.util.Random;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.common.util.TupleUtils;
+import edu.uci.ics.hyracks.storage.am.common.CheckTuple;
+import edu.uci.ics.hyracks.storage.am.common.ITreeIndexTestContext;
+import edu.uci.ics.hyracks.storage.am.common.TreeIndexTestUtils;
+import edu.uci.ics.hyracks.storage.am.common.api.ISearchPredicate;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
+import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.rtree.impls.SearchPredicate;
+import edu.uci.ics.hyracks.storage.am.rtree.util.RTreeUtils;
+
+@SuppressWarnings("rawtypes")
+public class RTreeTestUtils extends TreeIndexTestUtils {
+    private static final Logger LOGGER = Logger.getLogger(RTreeTestUtils.class.getName());
+    private int intPayloadValue = 0;
+    private double doublePayloadValue = 0.0;
+
+    @SuppressWarnings("unchecked")
+    // Create a new ArrayList containing the elements satisfying the search key
+    public ArrayList<RTreeCheckTuple> getRangeSearchExpectedResults(ArrayList<RTreeCheckTuple> checkTuples,
+            RTreeCheckTuple key) {
+        ArrayList<RTreeCheckTuple> expectedResult = new ArrayList<RTreeCheckTuple>();
+        Iterator<RTreeCheckTuple> iter = checkTuples.iterator();
+        while (iter.hasNext()) {
+            RTreeCheckTuple t = iter.next();
+            if (t.intersect(key)) {
+                expectedResult.add(t);
+            }
+        }
+        return expectedResult;
+    }
+
+    public void checkRangeSearch(ITreeIndexTestContext ictx, ITupleReference key) throws Exception {
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Testing Range Search.");
+        }
+        AbstractRTreeTestContext ctx = (AbstractRTreeTestContext) ictx;
+        MultiComparator cmp = RTreeUtils.getSearchMultiComparator(ctx.getComparatorFactories(), key);
+
+        ITreeIndexCursor searchCursor = (ITreeIndexCursor) ctx.getIndexAccessor().createSearchCursor();
+        SearchPredicate searchPred = new SearchPredicate(key, cmp);
+        ctx.getIndexAccessor().search(searchCursor, searchPred);
+
+        // Get the subset of elements from the expected set within given key
+        // range.
+        RTreeCheckTuple keyCheck = (RTreeCheckTuple) createCheckTupleFromTuple(key, ctx.getFieldSerdes(),
+                cmp.getKeyFieldCount());
+
+        ArrayList<RTreeCheckTuple> expectedResult = null;
+
+        expectedResult = getRangeSearchExpectedResults((ArrayList<RTreeCheckTuple>) ctx.getCheckTuples(), keyCheck);
+        checkExpectedResults(searchCursor, expectedResult, ctx.getFieldSerdes(), ctx.getKeyFieldCount(), null);
+    }
+
+    @SuppressWarnings("unchecked")
+    public void insertDoubleTuples(ITreeIndexTestContext ctx, int numTuples, Random rnd) throws Exception {
+        int fieldCount = ctx.getFieldCount();
+        int numKeyFields = ctx.getKeyFieldCount();
+        double[] fieldValues = new double[ctx.getFieldCount()];
+        // Scale range of values according to number of keys.
+        // For example, for 2 keys we want the square root of numTuples, for 3
+        // keys the cube root of numTuples, etc.
+        double maxValue = Math.ceil(Math.pow(numTuples, 1.0 / (double) numKeyFields));
+        for (int i = 0; i < numTuples; i++) {
+            // Set keys.
+            setDoubleKeyFields(fieldValues, numKeyFields, maxValue, rnd);
+            // Set values.
+            setDoublePayloadFields(fieldValues, numKeyFields, fieldCount);
+            TupleUtils.createDoubleTuple(ctx.getTupleBuilder(), ctx.getTuple(), fieldValues);
+            if (LOGGER.isLoggable(Level.INFO)) {
+                if ((i + 1) % (numTuples / Math.min(10, numTuples)) == 0) {
+                    LOGGER.info("Inserting Tuple " + (i + 1) + "/" + numTuples);
+                }
+            }
+            try {
+                ctx.getIndexAccessor().insert(ctx.getTuple());
+                ctx.insertCheckTuple(createDoubleCheckTuple(fieldValues, ctx.getKeyFieldCount()), ctx.getCheckTuples());
+            } catch (TreeIndexException e) {
+                // We set expected values only after insertion succeeds because
+                // we
+                // ignore duplicate keys.
+            }
+        }
+    }
+
+    private void setDoubleKeyFields(double[] fieldValues, int numKeyFields, double maxValue, Random rnd) {
+        int maxFieldPos = numKeyFields / 2;
+        for (int j = 0; j < maxFieldPos; j++) {
+            int k = maxFieldPos + j;
+            double firstValue = rnd.nextDouble() % maxValue;
+            double secondValue;
+            do {
+                secondValue = rnd.nextDouble() % maxValue;
+            } while (secondValue < firstValue);
+            fieldValues[j] = firstValue;
+            fieldValues[k] = secondValue;
+        }
+    }
+    
+    private void setDoublePayloadFields(double[] fieldValues, int numKeyFields, int numFields) {
+        for (int j = numKeyFields; j < numFields; j++) {
+            fieldValues[j] = doublePayloadValue++;
+        }
+    }
+
+    @SuppressWarnings("unchecked")
+    protected CheckTuple createDoubleCheckTuple(double[] fieldValues, int numKeyFields) {
+        RTreeCheckTuple<Double> checkTuple = new RTreeCheckTuple<Double>(fieldValues.length, numKeyFields);
+        for (double v : fieldValues) {
+            checkTuple.add(v);
+        }
+        return checkTuple;
+    }
+
+    @SuppressWarnings("unchecked")
+    public void bulkLoadDoubleTuples(ITreeIndexTestContext ctx, int numTuples, Random rnd) throws Exception {
+        int fieldCount = ctx.getFieldCount();
+        int numKeyFields = ctx.getKeyFieldCount();
+        double[] fieldValues = new double[ctx.getFieldCount()];
+        double maxValue = Math.ceil(Math.pow(numTuples, 1.0 / (double) numKeyFields));
+        Collection<CheckTuple> tmpCheckTuples = createCheckTuplesCollection();
+        for (int i = 0; i < numTuples; i++) {
+            // Set keys.
+            setDoubleKeyFields(fieldValues, numKeyFields, maxValue, rnd);
+            // Set values.
+            setDoublePayloadFields(fieldValues, numKeyFields, fieldCount);
+
+            // Set expected values.
+            ctx.insertCheckTuple(createDoubleCheckTuple(fieldValues, ctx.getKeyFieldCount()), tmpCheckTuples);
+        }
+        bulkLoadCheckTuples(ctx, tmpCheckTuples);
+
+        // Add tmpCheckTuples to ctx check tuples for comparing searches.
+        for (CheckTuple checkTuple : tmpCheckTuples) {
+            ctx.insertCheckTuple(checkTuple, ctx.getCheckTuples());
+        }
+    }
+
+    @Override
+    public void checkExpectedResults(ITreeIndexCursor cursor, Collection checkTuples,
+            ISerializerDeserializer[] fieldSerdes, int keyFieldCount, Iterator<CheckTuple> checkIter) throws Exception {
+        int actualCount = 0;
+        try {
+            while (cursor.hasNext()) {
+                cursor.next();
+                ITupleReference tuple = cursor.getTuple();
+                RTreeCheckTuple checkTuple = (RTreeCheckTuple) createCheckTupleFromTuple(tuple, fieldSerdes,
+                        keyFieldCount);
+                if (!checkTuples.contains(checkTuple)) {
+                    fail("Scan or range search returned unexpected answer: " + checkTuple.toString());
+                }
+                actualCount++;
+            }
+            if (actualCount < checkTuples.size()) {
+                fail("Scan or range search returned fewer answers than expected.\nExpected: " + checkTuples.size()
+                        + "\nActual  : " + actualCount);
+            }
+            if (actualCount > checkTuples.size()) {
+                fail("Scan or range search returned more answers than expected.\nExpected: " + checkTuples.size()
+                        + "\nActual  : " + actualCount);
+            }
+        } finally {
+            cursor.close();
+        }
+    }
+
+    @Override
+    protected CheckTuple createCheckTuple(int numFields, int numKeyFields) {
+        return new RTreeCheckTuple(numFields, numKeyFields);
+    }
+
+    @Override
+    protected ISearchPredicate createNullSearchPredicate() {
+        return new SearchPredicate(null, null);
+    }
+
+    @SuppressWarnings("unchecked")
+    @Override
+    protected CheckTuple createIntCheckTuple(int[] fieldValues, int numKeyFields) {
+        RTreeCheckTuple<Integer> checkTuple = new RTreeCheckTuple<Integer>(fieldValues.length, numKeyFields);
+        for (int v : fieldValues) {
+            checkTuple.add(v);
+        }
+        return checkTuple;
+    }
+
+    @Override
+    protected void setIntKeyFields(int[] fieldValues, int numKeyFields, int maxValue, Random rnd) {
+        int maxFieldPos = numKeyFields / 2;
+        for (int j = 0; j < maxFieldPos; j++) {
+            int k = maxFieldPos + j;
+            int firstValue = rnd.nextInt() % maxValue;
+            int secondValue;
+            do {
+                secondValue = rnd.nextInt() % maxValue;
+            } while (secondValue < firstValue);
+            fieldValues[j] = firstValue;
+            fieldValues[k] = secondValue;
+        }
+    }
+    
+    @Override
+    protected void setIntPayloadFields(int[] fieldValues, int numKeyFields, int numFields) {
+        for (int j = numKeyFields; j < numFields; j++) {
+            fieldValues[j] = intPayloadValue++;
+        }
+    }
+
+    @Override
+    protected Collection createCheckTuplesCollection() {
+        return new ArrayList<RTreeCheckTuple>();
+    }
+
+    @Override
+    protected ArrayTupleBuilder createDeleteTupleBuilder(ITreeIndexTestContext ctx) {
+        return new ArrayTupleBuilder(ctx.getFieldCount());
+    }
+
+    @Override
+    protected boolean checkDiskOrderScanResult(ITupleReference tuple, CheckTuple checkTuple, ITreeIndexTestContext ctx)
+            throws HyracksDataException {
+        return ctx.getCheckTuples().contains(checkTuple);
+    }
+}
diff --git a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/test/support/CounterContext.java b/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/test/support/CounterContext.java
similarity index 100%
rename from hyracks-test-support/src/main/java/edu/uci/ics/hyracks/test/support/CounterContext.java
rename to hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/test/support/CounterContext.java
diff --git a/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/test/support/TestIndexRegistryProvider.java b/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/test/support/TestIndexRegistryProvider.java
new file mode 100644
index 0000000..27d50f5
--- /dev/null
+++ b/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/test/support/TestIndexRegistryProvider.java
@@ -0,0 +1,29 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.test.support;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndex;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexRegistryProvider;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IndexRegistry;
+
+public class TestIndexRegistryProvider implements IIndexRegistryProvider<IIndex> {
+    private static final long serialVersionUID = 1L;
+
+    @Override
+    public IndexRegistry<IIndex> getRegistry(IHyracksTaskContext ctx) {
+        return TestStorageManagerComponentHolder.getIndexRegistry(ctx);
+    }
+}
\ No newline at end of file
diff --git a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/test/support/TestJobletContext.java b/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/test/support/TestJobletContext.java
similarity index 100%
rename from hyracks-test-support/src/main/java/edu/uci/ics/hyracks/test/support/TestJobletContext.java
rename to hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/test/support/TestJobletContext.java
diff --git a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/test/support/TestNCApplicationContext.java b/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/test/support/TestNCApplicationContext.java
similarity index 100%
rename from hyracks-test-support/src/main/java/edu/uci/ics/hyracks/test/support/TestNCApplicationContext.java
rename to hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/test/support/TestNCApplicationContext.java
diff --git a/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/test/support/TestRootContext.java b/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/test/support/TestRootContext.java
new file mode 100644
index 0000000..2fe5ded
--- /dev/null
+++ b/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/test/support/TestRootContext.java
@@ -0,0 +1,48 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.test.support;
+
+import java.io.File;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.Executors;
+
+import edu.uci.ics.hyracks.api.client.NodeControllerInfo;
+import edu.uci.ics.hyracks.api.context.IHyracksRootContext;
+import edu.uci.ics.hyracks.api.exceptions.HyracksException;
+import edu.uci.ics.hyracks.api.io.IIOManager;
+import edu.uci.ics.hyracks.api.io.IODeviceHandle;
+import edu.uci.ics.hyracks.control.nc.io.IOManager;
+
+public class TestRootContext implements IHyracksRootContext {
+    private IOManager ioManager;
+
+    public TestRootContext() throws HyracksException {
+        List<IODeviceHandle> devices = new ArrayList<IODeviceHandle>();
+        devices.add(new IODeviceHandle(new File(System.getProperty("java.io.tmpdir")), "."));
+        ioManager = new IOManager(devices, Executors.newCachedThreadPool());
+    }
+
+    @Override
+    public IIOManager getIOManager() {
+        return ioManager;
+    }
+
+    @Override
+    public Map<String, NodeControllerInfo> getNodeControllerInfos() throws Exception {
+        return null;
+    }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/test/support/TestStorageManagerComponentHolder.java b/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/test/support/TestStorageManagerComponentHolder.java
new file mode 100644
index 0000000..ce5e989
--- /dev/null
+++ b/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/test/support/TestStorageManagerComponentHolder.java
@@ -0,0 +1,91 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.test.support;
+
+import java.io.File;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.concurrent.Executors;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.exceptions.HyracksException;
+import edu.uci.ics.hyracks.api.io.IODeviceHandle;
+import edu.uci.ics.hyracks.control.nc.io.IOManager;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndex;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IndexRegistry;
+import edu.uci.ics.hyracks.storage.common.buffercache.BufferCache;
+import edu.uci.ics.hyracks.storage.common.buffercache.ClockPageReplacementStrategy;
+import edu.uci.ics.hyracks.storage.common.buffercache.DelayPageCleanerPolicy;
+import edu.uci.ics.hyracks.storage.common.buffercache.HeapBufferAllocator;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+import edu.uci.ics.hyracks.storage.common.buffercache.ICacheMemoryAllocator;
+import edu.uci.ics.hyracks.storage.common.buffercache.IPageReplacementStrategy;
+import edu.uci.ics.hyracks.storage.common.file.IFileMapManager;
+import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
+import edu.uci.ics.hyracks.storage.common.smi.TransientFileMapManager;
+
+public class TestStorageManagerComponentHolder {
+    private static IBufferCache bufferCache;
+    private static IFileMapProvider fileMapProvider;
+    private static IndexRegistry<IIndex> indexRegistry;
+    private static IOManager ioManager;
+
+    private static int pageSize;
+    private static int numPages;
+    private static int maxOpenFiles;
+
+    public static void init(int pageSize, int numPages, int maxOpenFiles) {
+        TestStorageManagerComponentHolder.pageSize = pageSize;
+        TestStorageManagerComponentHolder.numPages = numPages;
+        TestStorageManagerComponentHolder.maxOpenFiles = maxOpenFiles;
+        bufferCache = null;
+        fileMapProvider = null;
+        indexRegistry = null;
+    }
+
+    public synchronized static IBufferCache getBufferCache(IHyracksTaskContext ctx) {
+        if (bufferCache == null) {
+            ICacheMemoryAllocator allocator = new HeapBufferAllocator();
+            IPageReplacementStrategy prs = new ClockPageReplacementStrategy();
+            IFileMapProvider fileMapProvider = getFileMapProvider(ctx);
+            bufferCache = new BufferCache(ctx.getIOManager(), allocator, prs, new DelayPageCleanerPolicy(1000),
+                    (IFileMapManager) fileMapProvider, pageSize, numPages, maxOpenFiles);
+        }
+        return bufferCache;
+    }
+
+    public synchronized static IFileMapProvider getFileMapProvider(IHyracksTaskContext ctx) {
+        if (fileMapProvider == null) {
+            fileMapProvider = new TransientFileMapManager();
+        }
+        return fileMapProvider;
+    }
+
+    public synchronized static IndexRegistry<IIndex> getIndexRegistry(IHyracksTaskContext ctx) {
+        if (indexRegistry == null) {
+            indexRegistry = new IndexRegistry<IIndex>();
+        }
+        return indexRegistry;
+    }
+
+    public synchronized static IOManager getIOManager() throws HyracksException {
+        if (ioManager == null) {
+            List<IODeviceHandle> devices = new ArrayList<IODeviceHandle>();
+            devices.add(new IODeviceHandle(new File(System.getProperty("java.io.tmpdir")), "iodev_test_wa"));
+            ioManager = new IOManager(devices, Executors.newCachedThreadPool());
+        }
+        return ioManager;
+    }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/test/support/TestStorageManagerInterface.java b/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/test/support/TestStorageManagerInterface.java
new file mode 100644
index 0000000..4059ef0
--- /dev/null
+++ b/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/test/support/TestStorageManagerInterface.java
@@ -0,0 +1,34 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.test.support;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
+
+public class TestStorageManagerInterface implements IStorageManagerInterface {
+    private static final long serialVersionUID = 1L;
+
+    @Override
+    public IBufferCache getBufferCache(IHyracksTaskContext ctx) {
+        return TestStorageManagerComponentHolder.getBufferCache(ctx);
+    }
+
+    @Override
+    public IFileMapProvider getFileMapProvider(IHyracksTaskContext ctx) {
+        return TestStorageManagerComponentHolder.getFileMapProvider(ctx);
+    }
+}
\ No newline at end of file
diff --git a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/test/support/TestTaskContext.java b/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/test/support/TestTaskContext.java
similarity index 100%
rename from hyracks-test-support/src/main/java/edu/uci/ics/hyracks/test/support/TestTaskContext.java
rename to hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/test/support/TestTaskContext.java
diff --git a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/test/support/TestUtils.java b/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/test/support/TestUtils.java
similarity index 100%
rename from hyracks-test-support/src/main/java/edu/uci/ics/hyracks/test/support/TestUtils.java
rename to hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/test/support/TestUtils.java
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/pom.xml b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/pom.xml
new file mode 100644
index 0000000..ebd8bcc
--- /dev/null
+++ b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/pom.xml
@@ -0,0 +1,50 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>edu.uci.ics.hyracks</groupId>
+  <artifactId>hyracks-storage-am-btree-test</artifactId>
+  <version>0.2.3-SNAPSHOT</version>
+  <name>hyracks-storage-am-btree-test</name>
+
+  <parent>
+    <groupId>edu.uci.ics.hyracks</groupId>
+    <artifactId>hyracks-tests</artifactId>
+    <version>0.2.3-SNAPSHOT</version>
+  </parent>
+
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-compiler-plugin</artifactId>
+        <version>2.0.2</version>
+        <configuration>
+          <source>1.7</source>
+          <target>1.7</target>
+        </configuration>
+      </plugin>
+    </plugins>
+  </build>
+  <dependencies>
+  	<dependency>
+  		<groupId>junit</groupId>
+  		<artifactId>junit</artifactId>
+  		<version>4.8.1</version>
+  		<type>jar</type>
+  		<scope>test</scope>
+  	</dependency>
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>hyracks-storage-am-btree</artifactId>
+  		<version>0.2.3-SNAPSHOT</version>
+  		<type>jar</type>
+  		<scope>compile</scope>
+  	</dependency>
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>hyracks-test-support</artifactId>
+  		<version>0.2.3-SNAPSHOT</version>
+  		<type>jar</type>
+  		<scope>test</scope>
+  	</dependency>
+  </dependencies>
+</project>
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeBulkLoadTest.java b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeBulkLoadTest.java
new file mode 100644
index 0000000..11c47c7
--- /dev/null
+++ b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeBulkLoadTest.java
@@ -0,0 +1,61 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree;
+
+import java.util.Random;
+
+import org.junit.After;
+import org.junit.Before;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.storage.am.btree.OrderedIndexBulkLoadTest;
+import edu.uci.ics.hyracks.storage.am.btree.OrderedIndexTestContext;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
+import edu.uci.ics.hyracks.storage.am.btree.util.BTreeTestContext;
+import edu.uci.ics.hyracks.storage.am.btree.util.BTreeTestHarness;
+
+@SuppressWarnings("rawtypes")
+public class BTreeBulkLoadTest extends OrderedIndexBulkLoadTest {
+
+    public BTreeBulkLoadTest() {
+        super(BTreeTestHarness.LEAF_FRAMES_TO_TEST, 1);
+    }
+
+    private final BTreeTestHarness harness = new BTreeTestHarness();
+
+    @Before
+    public void setUp() throws HyracksDataException {
+        harness.setUp();
+    }
+
+    @After
+    public void tearDown() throws HyracksDataException {
+        harness.tearDown();
+    }
+
+    @Override
+    protected OrderedIndexTestContext createTestContext(ISerializerDeserializer[] fieldSerdes, int numKeys,
+            BTreeLeafFrameType leafType) throws Exception {
+        return BTreeTestContext.create(harness.getBufferCache(), harness.getBTreeFileId(), fieldSerdes, numKeys,
+                leafType);
+    }
+
+    @Override
+    protected Random getRandom() {
+        return harness.getRandom();
+    }
+}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeDeleteTest.java b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeDeleteTest.java
new file mode 100644
index 0000000..0205540
--- /dev/null
+++ b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeDeleteTest.java
@@ -0,0 +1,61 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree;
+
+import java.util.Random;
+
+import org.junit.After;
+import org.junit.Before;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.storage.am.btree.OrderedIndexDeleteTest;
+import edu.uci.ics.hyracks.storage.am.btree.OrderedIndexTestContext;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
+import edu.uci.ics.hyracks.storage.am.btree.util.BTreeTestContext;
+import edu.uci.ics.hyracks.storage.am.btree.util.BTreeTestHarness;
+
+@SuppressWarnings("rawtypes")
+public class BTreeDeleteTest extends OrderedIndexDeleteTest {
+
+    public BTreeDeleteTest() {
+        super(BTreeTestHarness.LEAF_FRAMES_TO_TEST);
+    }
+
+    private final BTreeTestHarness harness = new BTreeTestHarness();
+
+    @Before
+    public void setUp() throws HyracksDataException {
+        harness.setUp();
+    }
+
+    @After
+    public void tearDown() throws HyracksDataException {
+        harness.tearDown();
+    }
+
+    @Override
+    protected OrderedIndexTestContext createTestContext(ISerializerDeserializer[] fieldSerdes, int numKeys,
+            BTreeLeafFrameType leafType) throws Exception {
+        return BTreeTestContext.create(harness.getBufferCache(), harness.getBTreeFileId(), fieldSerdes, numKeys,
+                leafType);
+    }
+
+    @Override
+    protected Random getRandom() {
+        return harness.getRandom();
+    }
+}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeExamplesTest.java b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeExamplesTest.java
new file mode 100644
index 0000000..f4f8b12f
--- /dev/null
+++ b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeExamplesTest.java
@@ -0,0 +1,52 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree;
+
+import org.junit.After;
+import org.junit.Before;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.storage.am.btree.OrderedIndexExamplesTest;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
+import edu.uci.ics.hyracks.storage.am.btree.util.BTreeTestHarness;
+import edu.uci.ics.hyracks.storage.am.btree.util.BTreeUtils;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
+
+public class BTreeExamplesTest extends OrderedIndexExamplesTest {
+    private final BTreeTestHarness harness = new BTreeTestHarness();
+
+    @Before
+    public void setUp() throws HyracksDataException {
+        harness.setUp();
+    }
+
+    @After
+    public void tearDown() throws HyracksDataException {
+        harness.tearDown();
+    }
+    
+    protected ITreeIndex createTreeIndex(ITypeTraits[] typeTraits, IBinaryComparatorFactory[] cmpFactories) throws TreeIndexException {
+        return BTreeUtils.createBTree(harness.getBufferCache(), harness.getOpCallback(), typeTraits, cmpFactories,
+                BTreeLeafFrameType.REGULAR_NSM);
+    }
+    
+    protected int getIndexFileId() {
+        return harness.getBTreeFileId();
+    }
+}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeInsertTest.java b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeInsertTest.java
new file mode 100644
index 0000000..0b6cf4d
--- /dev/null
+++ b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeInsertTest.java
@@ -0,0 +1,71 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree;
+
+import java.util.Random;
+
+import org.junit.After;
+import org.junit.Before;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.storage.am.btree.OrderedIndexInsertTest;
+import edu.uci.ics.hyracks.storage.am.btree.OrderedIndexTestContext;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
+import edu.uci.ics.hyracks.storage.am.btree.util.BTreeTestContext;
+import edu.uci.ics.hyracks.storage.am.btree.util.BTreeTestHarness;
+
+/**
+ * Tests the BTree insert operation with strings and integer fields using
+ * various numbers of key and payload fields.
+ * 
+ * Each tests first fills a BTree with randomly generated tuples. We compare the
+ * following operations against expected results: 1. Point searches for all
+ * tuples. 2. Ordered scan. 3. Disk-order scan. 4. Range search (and prefix
+ * search for composite keys).
+ * 
+ */
+@SuppressWarnings("rawtypes")
+public class BTreeInsertTest extends OrderedIndexInsertTest {
+
+    public BTreeInsertTest() {
+        super(BTreeTestHarness.LEAF_FRAMES_TO_TEST);
+    }
+
+    private final BTreeTestHarness harness = new BTreeTestHarness();
+
+    @Before
+    public void setUp() throws HyracksDataException {
+        harness.setUp();
+    }
+
+    @After
+    public void tearDown() throws HyracksDataException {
+        harness.tearDown();
+    }
+
+    @Override
+    protected OrderedIndexTestContext createTestContext(ISerializerDeserializer[] fieldSerdes, int numKeys,
+            BTreeLeafFrameType leafType) throws Exception {
+        return BTreeTestContext.create(harness.getBufferCache(), harness.getBTreeFileId(), fieldSerdes, numKeys,
+                leafType);
+    }
+
+    @Override
+    protected Random getRandom() {
+        return harness.getRandom();
+    }
+}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeSearchCursorTest.java b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeSearchCursorTest.java
new file mode 100644
index 0000000..4003cf1
--- /dev/null
+++ b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeSearchCursorTest.java
@@ -0,0 +1,421 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree;
+
+import java.io.ByteArrayInputStream;
+import java.io.DataInput;
+import java.io.DataInputStream;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Random;
+import java.util.TreeSet;
+import java.util.logging.Level;
+
+import org.junit.Before;
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
+import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.util.TupleUtils;
+import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrame;
+import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
+import edu.uci.ics.hyracks.storage.am.btree.exceptions.BTreeException;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMInteriorFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMLeafFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeRangeSearchCursor;
+import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
+import edu.uci.ics.hyracks.storage.am.btree.util.AbstractBTreeTest;
+import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManager;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexAccessor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrame;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.frames.LIFOMetaDataFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.freepage.LinkedListFreePageManager;
+import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallback;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriterFactory;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+
+public class BTreeSearchCursorTest extends AbstractBTreeTest {
+    // Declare fields
+    int fieldCount = 2;
+    ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
+
+    TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
+    ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
+    ITreeIndexMetaDataFrame metaFrame = metaFrameFactory.createFrame();
+
+    Random rnd = new Random(50);
+
+    @Before
+    public void setUp() throws HyracksDataException {
+        super.setUp();
+        typeTraits[0] = IntegerPointable.TYPE_TRAITS;
+        typeTraits[1] = IntegerPointable.TYPE_TRAITS;
+    }
+
+    @Test
+    public void uniqueIndexTest() throws Exception {
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("TESTING RANGE SEARCH CURSOR ON UNIQUE INDEX");
+        }
+
+        IBufferCache bufferCache = harness.getBufferCache();
+        int btreeFileId = harness.getBTreeFileId();
+        
+        // declare keys
+        int keyFieldCount = 1;
+        IBinaryComparatorFactory[] cmpFactories = new IBinaryComparatorFactory[keyFieldCount];
+        cmpFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+
+        ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(tupleWriterFactory);
+        ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(tupleWriterFactory);
+
+        IBTreeLeafFrame leafFrame = (IBTreeLeafFrame) leafFrameFactory.createFrame();
+        IBTreeInteriorFrame interiorFrame = (IBTreeInteriorFrame) interiorFrameFactory.createFrame();
+
+        IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, 0, metaFrameFactory);
+
+        BTree btree = new BTree(bufferCache, NoOpOperationCallback.INSTANCE, fieldCount, cmpFactories, freePageManager, interiorFrameFactory, leafFrameFactory);
+        btree.create(btreeFileId);
+        btree.open(btreeFileId);
+
+        ArrayTupleBuilder tupleBuilder = new ArrayTupleBuilder(fieldCount);
+        ArrayTupleReference tuple = new ArrayTupleReference();
+
+        ITreeIndexAccessor indexAccessor = btree.createAccessor();
+
+        // generate keys
+        int numKeys = 50;
+        int maxKey = 1000;
+        TreeSet<Integer> uniqueKeys = new TreeSet<Integer>();
+        ArrayList<Integer> keys = new ArrayList<Integer>();
+        while (uniqueKeys.size() < numKeys) {
+            int key = rnd.nextInt() % maxKey;
+            uniqueKeys.add(key);
+        }
+        for (Integer i : uniqueKeys) {
+            keys.add(i);
+        }
+
+        // insert keys into btree
+        for (int i = 0; i < keys.size(); i++) {
+
+            TupleUtils.createIntegerTuple(tupleBuilder, tuple, keys.get(i), i);
+            tuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());
+
+            try {
+                indexAccessor.insert(tuple);
+            } catch (BTreeException e) {
+            } catch (Exception e) {
+                e.printStackTrace();
+            }
+        }
+
+        // btree.printTree(leafFrame, interiorFrame, recDescSers);
+
+        int minSearchKey = -100;
+        int maxSearchKey = 100;
+
+        // forward searches
+        performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, true, false);
+        performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, false, true, false);
+        performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, false, false);
+        performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, true, false);
+
+        btree.close();
+    }
+
+    @Test
+    public void nonUniqueIndexTest() throws Exception {
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("TESTING RANGE SEARCH CURSOR ON NONUNIQUE INDEX");
+        }
+
+        IBufferCache bufferCache = harness.getBufferCache();
+        int btreeFileId = harness.getBTreeFileId();
+        
+        // declare keys
+        int keyFieldCount = 2;
+        IBinaryComparatorFactory[] cmpFactories = new IBinaryComparatorFactory[keyFieldCount];
+        cmpFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+        cmpFactories[1] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+
+        ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(tupleWriterFactory);
+        ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(tupleWriterFactory);
+
+        IBTreeLeafFrame leafFrame = (IBTreeLeafFrame) leafFrameFactory.createFrame();
+        IBTreeInteriorFrame interiorFrame = (IBTreeInteriorFrame) interiorFrameFactory.createFrame();
+
+        IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, 0, metaFrameFactory);
+
+        BTree btree = new BTree(bufferCache, NoOpOperationCallback.INSTANCE, fieldCount, cmpFactories, freePageManager, interiorFrameFactory, leafFrameFactory);
+        btree.create(btreeFileId);
+        btree.open(btreeFileId);
+
+        ArrayTupleBuilder tupleBuilder = new ArrayTupleBuilder(fieldCount);
+        ArrayTupleReference tuple = new ArrayTupleReference();
+
+        ITreeIndexAccessor indexAccessor = btree.createAccessor();
+
+        // generate keys
+        int numKeys = 50;
+        int maxKey = 10;
+        ArrayList<Integer> keys = new ArrayList<Integer>();
+        for (int i = 0; i < numKeys; i++) {
+            int k = rnd.nextInt() % maxKey;
+            keys.add(k);
+        }
+        Collections.sort(keys);
+
+        // insert keys into btree
+        for (int i = 0; i < keys.size(); i++) {
+
+            TupleUtils.createIntegerTuple(tupleBuilder, tuple, keys.get(i), i);
+            tuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());
+
+            try {
+                indexAccessor.insert(tuple);
+            } catch (BTreeException e) {
+            } catch (Exception e) {
+                e.printStackTrace();
+            }
+        }
+
+        // btree.printTree(leafFrame, interiorFrame, recDescSers);
+
+        int minSearchKey = -100;
+        int maxSearchKey = 100;
+
+        // forward searches
+        performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, true, false);
+        performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, false, true, false);
+        performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, false, false);
+        performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, true, false);
+
+        btree.close();
+    }
+
+    @Test
+    public void nonUniqueFieldPrefixIndexTest() throws Exception {
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("TESTING RANGE SEARCH CURSOR ON NONUNIQUE FIELD-PREFIX COMPRESSED INDEX");
+        }
+
+        IBufferCache bufferCache = harness.getBufferCache();
+        int btreeFileId = harness.getBTreeFileId();
+        
+        // declare keys
+        int keyFieldCount = 2;
+        IBinaryComparatorFactory[] cmpFactories = new IBinaryComparatorFactory[keyFieldCount];
+        cmpFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+        cmpFactories[1] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+
+        ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(tupleWriterFactory);
+        ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(tupleWriterFactory);
+
+        IBTreeLeafFrame leafFrame = (IBTreeLeafFrame) leafFrameFactory.createFrame();
+        IBTreeInteriorFrame interiorFrame = (IBTreeInteriorFrame) interiorFrameFactory.createFrame();
+
+        IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, 0, metaFrameFactory);
+
+        BTree btree = new BTree(bufferCache, NoOpOperationCallback.INSTANCE, fieldCount, cmpFactories, freePageManager, interiorFrameFactory, leafFrameFactory);
+        btree.create(btreeFileId);
+        btree.open(btreeFileId);
+
+        ArrayTupleBuilder tupleBuilder = new ArrayTupleBuilder(fieldCount);
+        ArrayTupleReference tuple = new ArrayTupleReference();
+
+        ITreeIndexAccessor indexAccessor = btree.createAccessor();
+
+        // generate keys
+        int numKeys = 50;
+        int maxKey = 10;
+        ArrayList<Integer> keys = new ArrayList<Integer>();
+        for (int i = 0; i < numKeys; i++) {
+            int k = rnd.nextInt() % maxKey;
+            keys.add(k);
+        }
+        Collections.sort(keys);
+
+        // insert keys into btree
+        for (int i = 0; i < keys.size(); i++) {
+
+            TupleUtils.createIntegerTuple(tupleBuilder, tuple, keys.get(i), i);
+            tuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());
+
+            try {
+                indexAccessor.insert(tuple);
+            } catch (BTreeException e) {
+            } catch (Exception e) {
+                e.printStackTrace();
+            }
+        }
+
+        // btree.printTree(leafFrame, interiorFrame, recDescSers);
+
+        int minSearchKey = -100;
+        int maxSearchKey = 100;
+
+        // forward searches
+        performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, true, false);
+        performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, false, true, false);
+        performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, false, false);
+        performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, true, false);
+
+        btree.close();
+    }
+
+    public RangePredicate createRangePredicate(int lk, int hk, boolean lowKeyInclusive,
+            boolean highKeyInclusive) throws HyracksDataException {
+
+        // create tuplereferences for search keys
+        ITupleReference lowKey = TupleUtils.createIntegerTuple(lk);
+        ITupleReference highKey = TupleUtils.createIntegerTuple(hk);
+
+        IBinaryComparator[] searchCmps = new IBinaryComparator[1];
+        searchCmps[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY).createBinaryComparator();
+        MultiComparator searchCmp = new MultiComparator(searchCmps);
+
+        RangePredicate rangePred = new RangePredicate(lowKey, highKey, lowKeyInclusive, highKeyInclusive,
+                searchCmp, searchCmp);
+        return rangePred;
+    }
+
+    public void getExpectedResults(ArrayList<Integer> expectedResults, ArrayList<Integer> keys, int lk, int hk,
+            boolean lowKeyInclusive, boolean highKeyInclusive) {
+
+        // special cases
+        if (lk == hk && (!lowKeyInclusive || !highKeyInclusive))
+            return;
+        if (lk > hk)
+            return;
+
+        for (int i = 0; i < keys.size(); i++) {
+            if ((lk == keys.get(i) && lowKeyInclusive) || (hk == keys.get(i) && highKeyInclusive)) {
+                expectedResults.add(keys.get(i));
+                continue;
+            }
+
+            if (lk < keys.get(i) && hk > keys.get(i)) {
+                expectedResults.add(keys.get(i));
+                continue;
+            }
+        }
+    }
+
+    public boolean performSearches(ArrayList<Integer> keys, BTree btree, IBTreeLeafFrame leafFrame,
+            IBTreeInteriorFrame interiorFrame, int minKey, int maxKey, boolean lowKeyInclusive,
+            boolean highKeyInclusive, boolean printExpectedResults) throws Exception {
+
+        ArrayList<Integer> results = new ArrayList<Integer>();
+        ArrayList<Integer> expectedResults = new ArrayList<Integer>();
+
+        for (int i = minKey; i < maxKey; i++) {
+            for (int j = minKey; j < maxKey; j++) {
+
+                results.clear();
+                expectedResults.clear();
+
+                int lowKey = i;
+                int highKey = j;
+
+                ITreeIndexCursor rangeCursor = new BTreeRangeSearchCursor(leafFrame, false);
+                RangePredicate rangePred = createRangePredicate(lowKey, highKey, lowKeyInclusive,
+                        highKeyInclusive);
+                ITreeIndexAccessor indexAccessor = btree.createAccessor();
+                indexAccessor.search(rangeCursor, rangePred);
+
+                try {
+                    while (rangeCursor.hasNext()) {
+                        rangeCursor.next();
+                        ITupleReference frameTuple = rangeCursor.getTuple();
+                        ByteArrayInputStream inStream = new ByteArrayInputStream(frameTuple.getFieldData(0),
+                                frameTuple.getFieldStart(0), frameTuple.getFieldLength(0));
+                        DataInput dataIn = new DataInputStream(inStream);
+                        Integer res = IntegerSerializerDeserializer.INSTANCE.deserialize(dataIn);
+                        results.add(res);
+                    }
+                } catch (Exception e) {
+                    e.printStackTrace();
+                } finally {
+                    rangeCursor.close();
+                }
+
+                getExpectedResults(expectedResults, keys, lowKey, highKey, lowKeyInclusive, highKeyInclusive);
+
+                if (printExpectedResults) {
+                    if (expectedResults.size() > 0) {
+                        char l, u;
+
+                        if (lowKeyInclusive)
+                            l = '[';
+                        else
+                            l = '(';
+
+                        if (highKeyInclusive)
+                            u = ']';
+                        else
+                            u = ')';
+
+                        if (LOGGER.isLoggable(Level.INFO)) {
+                            LOGGER.info("RANGE: " + l + " " + lowKey + " , " + highKey + " " + u);
+                        }
+                        StringBuilder strBuilder = new StringBuilder();
+                        for (Integer r : expectedResults) {
+                            strBuilder.append(r + " ");
+                        }
+                        if (LOGGER.isLoggable(Level.INFO)) {
+                            LOGGER.info(strBuilder.toString());
+                        }
+                    }
+                }
+
+                if (results.size() == expectedResults.size()) {
+                    for (int k = 0; k < results.size(); k++) {
+                        if (!results.get(k).equals(expectedResults.get(k))) {
+                            if (LOGGER.isLoggable(Level.INFO)) {
+                                LOGGER.info("DIFFERENT RESULTS AT: i=" + i + " j=" + j + " k=" + k);
+                                LOGGER.info(results.get(k) + " " + expectedResults.get(k));
+                            }
+                            return false;
+                        }
+                    }
+                } else {
+                    if (LOGGER.isLoggable(Level.INFO)) {
+                        LOGGER.info("UNEQUAL NUMBER OF RESULTS AT: i=" + i + " j=" + j);
+                        LOGGER.info("RESULTS: " + results.size());
+                        LOGGER.info("EXPECTED RESULTS: " + expectedResults.size());
+                    }
+                    return false;
+                }
+            }
+        }
+
+        return true;
+    }
+}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeStatsTest.java b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeStatsTest.java
new file mode 100644
index 0000000..c33b4e9
--- /dev/null
+++ b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeStatsTest.java
@@ -0,0 +1,163 @@
+package edu.uci.ics.hyracks.storage.am.btree;
+
+import java.io.DataOutput;
+import java.io.File;
+import java.nio.ByteBuffer;
+import java.util.Random;
+import java.util.logging.Level;
+
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.io.FileReference;
+import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
+import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.FrameTupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrame;
+import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMInteriorFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMLeafFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
+import edu.uci.ics.hyracks.storage.am.btree.util.AbstractBTreeTest;
+import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManager;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexAccessor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrame;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
+import edu.uci.ics.hyracks.storage.am.common.frames.LIFOMetaDataFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.freepage.LinkedListFreePageManager;
+import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallback;
+import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriterFactory;
+import edu.uci.ics.hyracks.storage.am.common.util.TreeIndexBufferCacheWarmup;
+import edu.uci.ics.hyracks.storage.am.common.util.TreeIndexStats;
+import edu.uci.ics.hyracks.storage.am.common.util.TreeIndexStatsGatherer;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
+import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
+import edu.uci.ics.hyracks.test.support.TestUtils;
+
+@SuppressWarnings("rawtypes")
+public class BTreeStatsTest extends AbstractBTreeTest {
+    private static final int PAGE_SIZE = 4096;
+    private static final int NUM_PAGES = 1000;
+    private static final int MAX_OPEN_FILES = 10;
+    private static final int HYRACKS_FRAME_SIZE = 128;
+    private IHyracksTaskContext ctx = TestUtils.create(HYRACKS_FRAME_SIZE);
+
+    @Test
+    public void test01() throws Exception {
+
+        TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES);
+        IBufferCache bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
+        IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
+        FileReference file = new FileReference(new File(harness.getFileName()));
+        bufferCache.createFile(file);
+        int fileId = fmp.lookupFileId(file);
+        bufferCache.openFile(fileId);
+
+        // declare fields
+        int fieldCount = 2;
+        ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
+        typeTraits[0] = IntegerPointable.TYPE_TRAITS;
+        typeTraits[1] = IntegerPointable.TYPE_TRAITS;
+
+        // declare keys
+        int keyFieldCount = 1;
+        IBinaryComparatorFactory[] cmpFactories = new IBinaryComparatorFactory[keyFieldCount];
+        cmpFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+
+        TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
+        ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(tupleWriterFactory);
+        ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(tupleWriterFactory);
+        ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
+
+        IBTreeLeafFrame leafFrame = (IBTreeLeafFrame) leafFrameFactory.createFrame();
+        IBTreeInteriorFrame interiorFrame = (IBTreeInteriorFrame) interiorFrameFactory.createFrame();
+        ITreeIndexMetaDataFrame metaFrame = metaFrameFactory.createFrame();
+
+        IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, 0, metaFrameFactory);
+
+        BTree btree = new BTree(bufferCache, NoOpOperationCallback.INSTANCE, fieldCount, cmpFactories, freePageManager, interiorFrameFactory, leafFrameFactory);
+        btree.create(fileId);
+        btree.open(fileId);
+
+        Random rnd = new Random();
+        rnd.setSeed(50);
+
+        long start = System.currentTimeMillis();
+
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("INSERTING INTO TREE");
+        }
+
+        ByteBuffer frame = ctx.allocateFrame();
+        FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
+        ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
+        DataOutput dos = tb.getDataOutput();
+
+        ISerializerDeserializer[] recDescSers = { IntegerSerializerDeserializer.INSTANCE,
+                IntegerSerializerDeserializer.INSTANCE };
+        RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
+        IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx.getFrameSize(), recDesc);
+        accessor.reset(frame);
+        FrameTupleReference tuple = new FrameTupleReference();
+
+        ITreeIndexAccessor indexAccessor = btree.createAccessor();
+        // 10000
+        for (int i = 0; i < 100000; i++) {
+
+            int f0 = rnd.nextInt() % 100000;
+            int f1 = 5;
+
+            tb.reset();
+            IntegerSerializerDeserializer.INSTANCE.serialize(f0, dos);
+            tb.addFieldEndOffset();
+            IntegerSerializerDeserializer.INSTANCE.serialize(f1, dos);
+            tb.addFieldEndOffset();
+
+            appender.reset(frame, true);
+            appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
+
+            tuple.reset(accessor, 0);
+
+            if (LOGGER.isLoggable(Level.INFO)) {
+                if (i % 10000 == 0) {
+                    long end = System.currentTimeMillis();
+                    LOGGER.info("INSERTING " + i + " : " + f0 + " " + f1 + " " + (end - start));
+                }
+            }
+
+            try {
+                indexAccessor.insert(tuple);
+            } catch (TreeIndexException e) {
+            } catch (Exception e) {
+                e.printStackTrace();
+            }
+        }
+
+        TreeIndexStatsGatherer statsGatherer = new TreeIndexStatsGatherer(bufferCache, freePageManager, fileId,
+                btree.getRootPageId());
+        TreeIndexStats stats = statsGatherer.gatherStats(leafFrame, interiorFrame, metaFrame);
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("\n" + stats.toString());
+        }
+
+        TreeIndexBufferCacheWarmup bufferCacheWarmup = new TreeIndexBufferCacheWarmup(bufferCache, freePageManager,
+                fileId);
+        bufferCacheWarmup.warmup(leafFrame, metaFrame, new int[] { 1, 2 }, new int[] { 2, 5 });
+
+        btree.close();
+        bufferCache.closeFile(fileId);
+        bufferCache.close();
+    }
+}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeUpdateSearchTest.java b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeUpdateSearchTest.java
new file mode 100644
index 0000000..2b03a6a
--- /dev/null
+++ b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeUpdateSearchTest.java
@@ -0,0 +1,154 @@
+package edu.uci.ics.hyracks.storage.am.btree;
+
+import java.util.Random;
+import java.util.logging.Level;
+
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
+import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.util.TupleUtils;
+import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMInteriorFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMLeafFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeRangeSearchCursor;
+import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
+import edu.uci.ics.hyracks.storage.am.btree.util.AbstractBTreeTest;
+import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManager;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexAccessor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
+import edu.uci.ics.hyracks.storage.am.common.frames.LIFOMetaDataFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.freepage.LinkedListFreePageManager;
+import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallback;
+import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriterFactory;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+
+@SuppressWarnings("rawtypes")
+public class BTreeUpdateSearchTest extends AbstractBTreeTest {
+
+    // Update scan test on fixed-length tuples.
+    @Test
+    public void test01() throws Exception {
+        IBufferCache bufferCache = harness.getBufferCache();
+        int btreeFileId = harness.getBTreeFileId();
+        
+        // declare fields
+        int fieldCount = 2;
+        ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
+        typeTraits[0] = IntegerPointable.TYPE_TRAITS;
+        typeTraits[1] = IntegerPointable.TYPE_TRAITS;
+
+        // declare keys
+        int keyFieldCount = 1;
+        IBinaryComparatorFactory[] cmpFactories = new IBinaryComparatorFactory[keyFieldCount];
+        cmpFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+
+        ISerializerDeserializer[] recDescSers = { IntegerSerializerDeserializer.INSTANCE,
+                IntegerSerializerDeserializer.INSTANCE };
+
+        TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
+        ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(tupleWriterFactory);
+        ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(tupleWriterFactory);
+        ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
+
+        IBTreeLeafFrame leafFrame = (IBTreeLeafFrame) leafFrameFactory.createFrame();
+
+        IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, 0, metaFrameFactory);
+        BTree btree = new BTree(bufferCache, NoOpOperationCallback.INSTANCE, fieldCount, cmpFactories, freePageManager, interiorFrameFactory, leafFrameFactory);
+        btree.create(btreeFileId);
+        btree.open(btreeFileId);
+
+        Random rnd = new Random();
+        rnd.setSeed(50);
+
+        long start = System.currentTimeMillis();
+
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("INSERTING INTO TREE");
+        }
+
+        ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
+        ArrayTupleReference insertTuple = new ArrayTupleReference();
+        ITreeIndexAccessor indexAccessor = btree.createAccessor();
+
+        int numInserts = 10000;
+        for (int i = 0; i < numInserts; i++) {
+            int f0 = rnd.nextInt() % 10000;
+            int f1 = 5;
+            TupleUtils.createIntegerTuple(tb, insertTuple, f0, f1);
+            if (LOGGER.isLoggable(Level.INFO)) {
+                if (i % 10000 == 0) {
+                    long end = System.currentTimeMillis();
+                    LOGGER.info("INSERTING " + i + " : " + f0 + " " + f1 + " " + (end - start));
+                }
+            }
+
+            try {
+                indexAccessor.insert(insertTuple);
+            } catch (TreeIndexException e) {
+            } catch (Exception e) {
+                e.printStackTrace();
+            }
+        }
+        long end = System.currentTimeMillis();
+        long duration = end - start;
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("DURATION: " + duration);
+        }
+
+        // Update scan.
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("UPDATE SCAN:");
+        }
+        // Set the cursor to X latch nodes.
+        ITreeIndexCursor updateScanCursor = new BTreeRangeSearchCursor(leafFrame, true);
+        RangePredicate nullPred = new RangePredicate(null, null, true, true, null, null);
+        indexAccessor.search(updateScanCursor, nullPred);
+        try {
+            while (updateScanCursor.hasNext()) {
+                updateScanCursor.next();
+                ITupleReference tuple = updateScanCursor.getTuple();
+                // Change the value field.
+                IntegerSerializerDeserializer.putInt(10, tuple.getFieldData(1), tuple.getFieldStart(1));
+            }
+        } catch (Exception e) {
+            e.printStackTrace();
+        } finally {
+            updateScanCursor.close();
+        }
+
+        // Ordered scan to verify the values.
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("ORDERED SCAN:");
+        }
+        // Set the cursor to X latch nodes.
+        ITreeIndexCursor scanCursor = new BTreeRangeSearchCursor(leafFrame, true);
+        indexAccessor.search(scanCursor, nullPred);
+        try {
+            while (scanCursor.hasNext()) {
+                scanCursor.next();
+                ITupleReference tuple = scanCursor.getTuple();
+                String rec = TupleUtils.printTuple(tuple, recDescSers);
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    LOGGER.info(rec);
+                }
+            }
+        } catch (Exception e) {
+            e.printStackTrace();
+        } finally {
+            scanCursor.close();
+        }
+        btree.close();
+    }
+}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeUpdateTest.java b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeUpdateTest.java
new file mode 100644
index 0000000..c3b56d5
--- /dev/null
+++ b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeUpdateTest.java
@@ -0,0 +1,61 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree;
+
+import java.util.Random;
+
+import org.junit.After;
+import org.junit.Before;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.storage.am.btree.OrderedIndexTestContext;
+import edu.uci.ics.hyracks.storage.am.btree.OrderedIndexUpdateTest;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
+import edu.uci.ics.hyracks.storage.am.btree.util.BTreeTestContext;
+import edu.uci.ics.hyracks.storage.am.btree.util.BTreeTestHarness;
+
+@SuppressWarnings("rawtypes")
+public class BTreeUpdateTest extends OrderedIndexUpdateTest {
+
+    public BTreeUpdateTest() {
+        super(BTreeTestHarness.LEAF_FRAMES_TO_TEST);
+    }
+
+    private final BTreeTestHarness harness = new BTreeTestHarness();
+
+    @Before
+    public void setUp() throws HyracksDataException {
+        harness.setUp();
+    }
+
+    @After
+    public void tearDown() throws HyracksDataException {
+        harness.tearDown();
+    }
+
+    @Override
+    protected OrderedIndexTestContext createTestContext(ISerializerDeserializer[] fieldSerdes, int numKeys,
+            BTreeLeafFrameType leafType) throws Exception {
+        return BTreeTestContext.create(harness.getBufferCache(), harness.getBTreeFileId(), fieldSerdes, numKeys,
+                leafType);
+    }
+
+    @Override
+    protected Random getRandom() {
+        return harness.getRandom();
+    }
+}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeUpsertTest.java b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeUpsertTest.java
new file mode 100644
index 0000000..6e14607
--- /dev/null
+++ b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeUpsertTest.java
@@ -0,0 +1,69 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree;
+
+import java.util.Random;
+
+import org.junit.After;
+import org.junit.Before;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
+import edu.uci.ics.hyracks.storage.am.btree.util.BTreeTestContext;
+import edu.uci.ics.hyracks.storage.am.btree.util.BTreeTestHarness;
+
+/**
+ * Tests the BTree insert operation with strings and integer fields using
+ * various numbers of key and payload fields.
+ * 
+ * Each tests first fills a BTree with randomly generated tuples. We compare the
+ * following operations against expected results: 1. Point searches for all
+ * tuples. 2. Ordered scan. 3. Disk-order scan. 4. Range search (and prefix
+ * search for composite keys).
+ * 
+ */
+@SuppressWarnings("rawtypes")
+public class BTreeUpsertTest extends OrderedIndexUpsertTest {
+
+    public BTreeUpsertTest() {
+        super(BTreeTestHarness.LEAF_FRAMES_TO_TEST);
+    }
+
+    private final BTreeTestHarness harness = new BTreeTestHarness();
+
+    @Before
+    public void setUp() throws HyracksDataException {
+        harness.setUp();
+    }
+
+    @After
+    public void tearDown() throws HyracksDataException {
+        harness.tearDown();
+    }
+
+    @Override
+    protected OrderedIndexTestContext createTestContext(ISerializerDeserializer[] fieldSerdes, int numKeys,
+            BTreeLeafFrameType leafType) throws Exception {
+        return BTreeTestContext.create(harness.getBufferCache(), harness.getBTreeFileId(), fieldSerdes, numKeys,
+                leafType);
+    }
+
+    @Override
+    protected Random getRandom() {
+        return harness.getRandom();
+    }
+}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/FieldPrefixNSMTest.java b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/FieldPrefixNSMTest.java
new file mode 100644
index 0000000..d61d16a
--- /dev/null
+++ b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/FieldPrefixNSMTest.java
@@ -0,0 +1,225 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree;
+
+import java.io.DataOutput;
+import java.nio.ByteBuffer;
+import java.util.Random;
+import java.util.logging.Level;
+
+import org.junit.Assert;
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
+import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.FrameTupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.storage.am.btree.exceptions.BTreeException;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeFieldPrefixNSMLeafFrame;
+import edu.uci.ics.hyracks.storage.am.btree.util.AbstractBTreeTest;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriter;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriter;
+import edu.uci.ics.hyracks.storage.am.common.util.TreeIndexUtils;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
+import edu.uci.ics.hyracks.storage.common.file.BufferedFileHandle;
+
+@SuppressWarnings("rawtypes")
+public class FieldPrefixNSMTest extends AbstractBTreeTest {
+
+    private static final int PAGE_SIZE = 32768; // 32K
+    private static final int NUM_PAGES = 40;
+    private static final int MAX_OPEN_FILES = 10;
+    private static final int HYRACKS_FRAME_SIZE = 128;
+
+    public FieldPrefixNSMTest() {        
+        super(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES, HYRACKS_FRAME_SIZE);
+    }
+    
+    private ITupleReference createTuple(IHyracksTaskContext ctx, int f0, int f1, int f2, boolean print)
+            throws HyracksDataException {
+        if (print) {
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("CREATING: " + f0 + " " + f1 + " " + f2);
+            }
+        }
+
+        ByteBuffer buf = ctx.allocateFrame();
+        FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
+        ArrayTupleBuilder tb = new ArrayTupleBuilder(3);
+        DataOutput dos = tb.getDataOutput();
+        
+        ISerializerDeserializer[] recDescSers = { IntegerSerializerDeserializer.INSTANCE,
+                IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
+        RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
+        IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx.getFrameSize(), recDesc);
+        accessor.reset(buf);
+        FrameTupleReference tuple = new FrameTupleReference();
+
+        tb.reset();
+        IntegerSerializerDeserializer.INSTANCE.serialize(f0, dos);
+        tb.addFieldEndOffset();
+        IntegerSerializerDeserializer.INSTANCE.serialize(f1, dos);
+        tb.addFieldEndOffset();
+        IntegerSerializerDeserializer.INSTANCE.serialize(f2, dos);
+        tb.addFieldEndOffset();
+
+        appender.reset(buf, true);
+        appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
+
+        tuple.reset(accessor, 0);
+
+        return tuple;
+    }
+
+    @Test
+    public void test01() throws Exception {        
+        
+        // declare fields
+        int fieldCount = 3;
+        ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
+        typeTraits[0] = IntegerPointable.TYPE_TRAITS;
+        typeTraits[1] = IntegerPointable.TYPE_TRAITS;
+        typeTraits[2] = IntegerPointable.TYPE_TRAITS;
+
+        // declare keys
+        int keyFieldCount = 3;
+        IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
+        cmps[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY).createBinaryComparator();
+        cmps[1] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY).createBinaryComparator();
+        cmps[2] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY).createBinaryComparator();
+        MultiComparator cmp = new MultiComparator(cmps);
+
+        // just for printing
+        ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE,
+                IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
+
+        Random rnd = new Random();
+        rnd.setSeed(50);
+
+        IBufferCache bufferCache = harness.getBufferCache();
+        int btreeFileId = harness.getBTreeFileId();
+        IHyracksTaskContext ctx = harness.getHyracksTaskContext();
+        ICachedPage page = bufferCache.pin(BufferedFileHandle.getDiskPageId(btreeFileId, 0), false);
+        try {
+
+            ITreeIndexTupleWriter tupleWriter = new TypeAwareTupleWriter(typeTraits);
+            BTreeFieldPrefixNSMLeafFrame frame = new BTreeFieldPrefixNSMLeafFrame(tupleWriter);
+            frame.setPage(page);
+            frame.initBuffer((byte) 0);
+            frame.setMultiComparator(cmp);
+            frame.setPrefixTupleCount(0);
+
+            String before = new String();
+            String after = new String();
+
+            int compactFreq = 5;
+            int compressFreq = 5;
+            int smallMax = 10;
+            int numRecords = 1000;
+
+            int[][] savedFields = new int[numRecords][3];
+
+            // insert records with random calls to compact and compress
+            for (int i = 0; i < numRecords; i++) {
+
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    if ((i + 1) % 100 == 0) {
+                        LOGGER.info("INSERTING " + (i + 1) + " / " + numRecords);
+                    }
+                }
+
+                int a = rnd.nextInt() % smallMax;
+                int b = rnd.nextInt() % smallMax;
+                int c = i;
+
+                ITupleReference tuple = createTuple(ctx, a, b, c, false);
+                try {
+                    int targetTupleIndex = frame.findInsertTupleIndex(tuple);
+                    frame.insert(tuple, targetTupleIndex);
+                } catch (BTreeException e) {
+                    e.printStackTrace();
+                } catch (Exception e) {
+                    e.printStackTrace();
+                }
+
+                savedFields[i][0] = a;
+                savedFields[i][1] = b;
+                savedFields[i][2] = c;
+
+                if (rnd.nextInt() % compactFreq == 0) {
+                    before = TreeIndexUtils.printFrameTuples(frame, fieldSerdes);
+                    frame.compact();
+                    after = TreeIndexUtils.printFrameTuples(frame, fieldSerdes);
+                    Assert.assertEquals(before, after);
+                }
+
+                if (rnd.nextInt() % compressFreq == 0) {
+                    before = TreeIndexUtils.printFrameTuples(frame, fieldSerdes);
+                    frame.compress();
+                    after = TreeIndexUtils.printFrameTuples(frame, fieldSerdes);
+                    Assert.assertEquals(before, after);
+                }
+
+            }
+
+            // delete records with random calls to compact and compress
+            for (int i = 0; i < numRecords; i++) {
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    if ((i + 1) % 100 == 0) {
+                        LOGGER.info("DELETING " + (i + 1) + " / " + numRecords);
+                    }
+                }
+
+                ITupleReference tuple = createTuple(ctx, savedFields[i][0], savedFields[i][1], savedFields[i][2], false);
+                try {
+                    int tupleIndex = frame.findDeleteTupleIndex(tuple);
+                    frame.delete(tuple, tupleIndex);
+                } catch (Exception e) {
+                }
+
+                if (rnd.nextInt() % compactFreq == 0) {
+                    before = TreeIndexUtils.printFrameTuples(frame, fieldSerdes);
+                    frame.compact();
+                    after = TreeIndexUtils.printFrameTuples(frame, fieldSerdes);
+                    Assert.assertEquals(before, after);
+                }
+
+                if (rnd.nextInt() % compressFreq == 0) {
+                    before = TreeIndexUtils.printFrameTuples(frame, fieldSerdes);
+                    frame.compress();
+                    after = TreeIndexUtils.printFrameTuples(frame, fieldSerdes);
+                    Assert.assertEquals(before, after);
+                }
+            }
+
+        } finally {
+            bufferCache.unpin(page);
+        }
+    }
+}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/StorageManagerTest.java b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/StorageManagerTest.java
new file mode 100644
index 0000000..9ec64b5
--- /dev/null
+++ b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/StorageManagerTest.java
@@ -0,0 +1,259 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree;
+
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Random;
+import java.util.logging.Level;
+
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.storage.am.btree.util.AbstractBTreeTest;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
+import edu.uci.ics.hyracks.storage.common.file.BufferedFileHandle;
+import edu.uci.ics.hyracks.storage.common.sync.LatchType;
+
+public class StorageManagerTest extends AbstractBTreeTest {	
+    public class PinnedLatchedPage {
+        public final ICachedPage page;
+        public final LatchType latch;
+        public final int pageId;
+
+        public PinnedLatchedPage(ICachedPage page, int pageId, LatchType latch) {
+            this.page = page;
+            this.pageId = pageId;
+            this.latch = latch;
+        }
+    }
+
+    public enum FileAccessType {
+        FTA_READONLY, FTA_WRITEONLY, FTA_MIXED, FTA_UNLATCHED
+    }
+
+    public class FileAccessWorker implements Runnable {
+        private int workerId;
+        private final IBufferCache bufferCache;
+        private final int maxPages;
+        private final int fileId;
+        private final long thinkTime;
+        private final int maxLoopCount;
+        private final int maxPinnedPages;
+        private final int closeFileChance;
+        private final FileAccessType fta;
+        private int loopCount = 0;
+        private boolean fileIsOpen = false;
+        private Random rnd = new Random(50);
+        private List<PinnedLatchedPage> pinnedPages = new LinkedList<PinnedLatchedPage>();
+
+        public FileAccessWorker(int workerId, IBufferCache bufferCache, FileAccessType fta, int fileId, int maxPages,
+                int maxPinnedPages, int maxLoopCount, int closeFileChance, long thinkTime) {
+            this.bufferCache = bufferCache;
+            this.fileId = fileId;
+            this.maxPages = maxPages;
+            this.maxLoopCount = maxLoopCount;
+            this.maxPinnedPages = maxPinnedPages;
+            this.thinkTime = thinkTime;
+            this.closeFileChance = closeFileChance;
+            this.workerId = workerId;
+            this.fta = fta;
+        }
+
+        private void pinRandomPage() {
+            int pageId = Math.abs(rnd.nextInt() % maxPages);
+
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info(workerId + " PINNING PAGE: " + pageId);
+            }
+
+            try {
+                ICachedPage page = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
+                LatchType latch = null;
+
+                switch (fta) {
+
+                    case FTA_UNLATCHED: {
+                        latch = null;
+                    }
+                        break;
+
+                    case FTA_READONLY: {
+                        if (LOGGER.isLoggable(Level.INFO)) {
+                            LOGGER.info(workerId + " S LATCHING: " + pageId);
+                        }
+                        page.acquireReadLatch();
+                        latch = LatchType.LATCH_S;
+                    }
+                        break;
+
+                    case FTA_WRITEONLY: {
+                        if (LOGGER.isLoggable(Level.INFO)) {
+                            LOGGER.info(workerId + " X LATCHING: " + pageId);
+                        }
+                        page.acquireWriteLatch();
+                        latch = LatchType.LATCH_X;
+                    }
+                        break;
+
+                    case FTA_MIXED: {
+                        if (rnd.nextInt() % 2 == 0) {
+                            if (LOGGER.isLoggable(Level.INFO)) {
+                                LOGGER.info(workerId + " S LATCHING: " + pageId);
+                            }
+                            page.acquireReadLatch();
+                            latch = LatchType.LATCH_S;
+                        } else {
+                            if (LOGGER.isLoggable(Level.INFO)) {
+                                LOGGER.info(workerId + " X LATCHING: " + pageId);
+                            }
+                            page.acquireWriteLatch();
+                            latch = LatchType.LATCH_X;
+                        }
+                    }
+                        break;
+
+                }
+
+                PinnedLatchedPage plPage = new PinnedLatchedPage(page, pageId, latch);
+                pinnedPages.add(plPage);
+            } catch (HyracksDataException e) {
+                e.printStackTrace();
+            }
+        }
+
+        private void unpinRandomPage() {
+            int index = Math.abs(rnd.nextInt() % pinnedPages.size());
+            try {
+                PinnedLatchedPage plPage = pinnedPages.get(index);
+
+                if (plPage.latch != null) {
+                    if (plPage.latch == LatchType.LATCH_S) {
+                        if (LOGGER.isLoggable(Level.INFO)) {
+                            LOGGER.info(workerId + " S UNLATCHING: " + plPage.pageId);
+                        }
+                        plPage.page.releaseReadLatch();
+                    } else {
+                        if (LOGGER.isLoggable(Level.INFO)) {
+                            LOGGER.info(workerId + " X UNLATCHING: " + plPage.pageId);
+                        }
+                        plPage.page.releaseWriteLatch();
+                    }
+                }
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    LOGGER.info(workerId + " UNPINNING PAGE: " + plPage.pageId);
+                }
+
+                bufferCache.unpin(plPage.page);
+                pinnedPages.remove(index);
+            } catch (HyracksDataException e) {
+                e.printStackTrace();
+            }
+        }
+
+        private void openFile() {
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info(workerId + " OPENING FILE: " + fileId);
+            }
+            try {
+                bufferCache.openFile(fileId);
+                fileIsOpen = true;
+            } catch (HyracksDataException e) {
+                e.printStackTrace();
+            }
+        }
+
+        private void closeFile() {
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info(workerId + " CLOSING FILE: " + fileId);
+            }
+            try {
+                bufferCache.closeFile(fileId);
+                fileIsOpen = false;
+            } catch (HyracksDataException e) {
+                e.printStackTrace();
+            }
+        }
+
+        @Override
+        public void run() {
+
+            openFile();
+
+            while (loopCount < maxLoopCount) {
+                loopCount++;
+
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    LOGGER.info(workerId + " LOOP: " + loopCount + "/" + maxLoopCount);
+                }
+
+                if (fileIsOpen) {
+
+                    // pin some pages
+                    int pagesToPin = Math.abs(rnd.nextInt()) % (maxPinnedPages - pinnedPages.size());
+                    for (int i = 0; i < pagesToPin; i++) {
+                        pinRandomPage();
+                    }
+
+                    // do some thinking
+                    try {
+                        Thread.sleep(thinkTime);
+                    } catch (InterruptedException e) {
+                        e.printStackTrace();
+                    }
+
+                    // unpin some pages
+                    if (!pinnedPages.isEmpty()) {
+                        int pagesToUnpin = Math.abs(rnd.nextInt()) % pinnedPages.size();
+                        for (int i = 0; i < pagesToUnpin; i++) {
+                            unpinRandomPage();
+                        }
+                    }
+
+                    // possibly close file
+                    int closeFileCheck = Math.abs(rnd.nextInt()) % closeFileChance;
+                    if (pinnedPages.isEmpty() || closeFileCheck == 0) {
+                        int numPinnedPages = pinnedPages.size();
+                        for (int i = 0; i < numPinnedPages; i++) {
+                            unpinRandomPage();
+                        }
+                        closeFile();
+                    }
+                } else {
+                    openFile();
+                }
+            }
+
+            if (fileIsOpen) {
+                int numPinnedPages = pinnedPages.size();
+                for (int i = 0; i < numPinnedPages; i++) {
+                    unpinRandomPage();
+                }
+                closeFile();
+            }
+        }
+    }
+
+    @Test
+    public void oneThreadOneFileTest() throws Exception { 
+		Thread worker = new Thread(new FileAccessWorker(0,
+				harness.getBufferCache(), FileAccessType.FTA_UNLATCHED,
+				harness.getBTreeFileId(), 10, 10, 100, 10, 0));
+        worker.start();
+        worker.join();
+    }
+}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/multithread/BTreeMultiThreadTest.java b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/multithread/BTreeMultiThreadTest.java
new file mode 100644
index 0000000..596fa31
--- /dev/null
+++ b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/multithread/BTreeMultiThreadTest.java
@@ -0,0 +1,91 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree.multithread;
+
+import java.util.ArrayList;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.storage.am.btree.OrderedIndexMultiThreadTest;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
+import edu.uci.ics.hyracks.storage.am.btree.util.BTreeTestHarness;
+import edu.uci.ics.hyracks.storage.am.btree.util.BTreeUtils;
+import edu.uci.ics.hyracks.storage.am.common.ITreeIndexTestWorkerFactory;
+import edu.uci.ics.hyracks.storage.am.common.TestWorkloadConf;
+import edu.uci.ics.hyracks.storage.am.common.TestOperationSelector.TestOperation;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
+
+public class BTreeMultiThreadTest extends OrderedIndexMultiThreadTest {
+
+    private BTreeTestHarness harness = new BTreeTestHarness();
+    
+    private BTreeTestWorkerFactory workerFactory = new BTreeTestWorkerFactory();
+    
+    @Override
+    protected void setUp() throws HyracksDataException {
+        harness.setUp();
+    }
+
+    @Override
+    protected void tearDown() throws HyracksDataException {
+        harness.tearDown();
+    }
+
+    @Override
+    protected ITreeIndex createTreeIndex(ITypeTraits[] typeTraits, IBinaryComparatorFactory[] cmpFactories) throws TreeIndexException {
+        return BTreeUtils.createBTree(harness.getBufferCache(), harness.getOpCallback(), typeTraits, cmpFactories, BTreeLeafFrameType.REGULAR_NSM);
+    }
+
+    @Override
+    protected ITreeIndexTestWorkerFactory getWorkerFactory() {
+        return workerFactory;
+    }
+
+    @Override
+    protected ArrayList<TestWorkloadConf> getTestWorkloadConf() {
+        ArrayList<TestWorkloadConf> workloadConfs = new ArrayList<TestWorkloadConf>();
+        
+        // Insert only workload.
+        TestOperation[] insertOnlyOps = new TestOperation[] { TestOperation.INSERT };
+        workloadConfs.add(new TestWorkloadConf(insertOnlyOps, getUniformOpProbs(insertOnlyOps)));
+        
+        // Inserts mixed with point searches and scans.
+        TestOperation[] insertSearchOnlyOps = new TestOperation[] { TestOperation.INSERT, TestOperation.POINT_SEARCH, TestOperation.SCAN, TestOperation.DISKORDER_SCAN };
+        workloadConfs.add(new TestWorkloadConf(insertSearchOnlyOps, getUniformOpProbs(insertSearchOnlyOps)));
+        
+        // Inserts, updates, deletes, and upserts.        
+        TestOperation[] insertDeleteUpdateUpsertOps = new TestOperation[] { TestOperation.INSERT, TestOperation.DELETE, TestOperation.UPDATE, TestOperation.UPSERT };
+        workloadConfs.add(new TestWorkloadConf(insertDeleteUpdateUpsertOps, getUniformOpProbs(insertDeleteUpdateUpsertOps)));
+        
+        // All operations mixed.
+        TestOperation[] allOps = new TestOperation[] { TestOperation.INSERT, TestOperation.DELETE, TestOperation.UPDATE, TestOperation.UPSERT, TestOperation.POINT_SEARCH, TestOperation.SCAN, TestOperation.DISKORDER_SCAN };
+        workloadConfs.add(new TestWorkloadConf(allOps, getUniformOpProbs(allOps)));
+        
+        return workloadConfs;
+    }
+
+    @Override
+    protected int getFileId() {
+        return harness.getBTreeFileId();
+    }
+
+    @Override
+    protected String getIndexTypeName() {
+        return "BTree";
+    }
+}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/multithread/BTreeTestWorker.java b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/multithread/BTreeTestWorker.java
new file mode 100644
index 0000000..7d8de7d
--- /dev/null
+++ b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/multithread/BTreeTestWorker.java
@@ -0,0 +1,133 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree.multithread;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.storage.am.btree.exceptions.BTreeDuplicateKeyException;
+import edu.uci.ics.hyracks.storage.am.btree.exceptions.BTreeNonExistentKeyException;
+import edu.uci.ics.hyracks.storage.am.btree.exceptions.BTreeNotUpdateableException;
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
+import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
+import edu.uci.ics.hyracks.storage.am.common.AbstractTreeIndexTestWorker;
+import edu.uci.ics.hyracks.storage.am.common.TestOperationSelector;
+import edu.uci.ics.hyracks.storage.am.common.TestOperationSelector.TestOperation;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
+import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
+import edu.uci.ics.hyracks.storage.am.common.datagen.DataGenThread;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+
+public class BTreeTestWorker extends AbstractTreeIndexTestWorker {
+    
+    private final BTree btree;
+    private final int numKeyFields;
+    private final ArrayTupleBuilder deleteTb;
+    private final ArrayTupleReference deleteTuple = new ArrayTupleReference();
+    
+    public BTreeTestWorker(DataGenThread dataGen, TestOperationSelector opSelector, ITreeIndex index, int numBatches) {
+        super(dataGen, opSelector, index, numBatches);
+        btree = (BTree) index;
+        numKeyFields = btree.getComparatorFactories().length;
+        deleteTb = new ArrayTupleBuilder(numKeyFields);
+    }
+    
+    @Override
+    public void performOp(ITupleReference tuple, TestOperation op) throws HyracksDataException, TreeIndexException {        
+        BTree.BTreeAccessor accessor = (BTree.BTreeAccessor) indexAccessor;
+        ITreeIndexCursor searchCursor = accessor.createSearchCursor();
+        ITreeIndexCursor diskOrderScanCursor = accessor.createDiskOrderScanCursor();
+        MultiComparator cmp = accessor.getOpContext().cmp;
+        RangePredicate rangePred = new RangePredicate(tuple, tuple, true, true, cmp, cmp);
+        
+        switch (op) {
+            case INSERT:
+                try {
+                    accessor.insert(tuple);
+                } catch (BTreeDuplicateKeyException e) {
+                    // Ignore duplicate keys, since we get random tuples.
+                }
+                break;
+                
+            case DELETE:
+                // Create a tuple reference with only key fields.
+                deleteTb.reset();
+                for (int i = 0; i < numKeyFields; i++) {
+                    deleteTb.addField(tuple.getFieldData(i), tuple.getFieldStart(i), tuple.getFieldLength(i));
+                }
+                deleteTuple.reset(deleteTb.getFieldEndOffsets(), deleteTb.getByteArray());
+                try {
+                    accessor.delete(deleteTuple);
+                } catch (BTreeNonExistentKeyException e) {
+                    // Ignore non-existant keys, since we get random tuples.
+                }
+                break;
+                
+            case UPDATE:
+                try {
+                    accessor.update(tuple);
+                } catch (BTreeNonExistentKeyException e) {
+                    // Ignore non-existant keys, since we get random tuples.
+                } catch (BTreeNotUpdateableException e) {
+                    // Ignore not updateable exception due to numKeys == numFields.
+                }
+                break;
+                
+            case UPSERT:
+                accessor.upsert(tuple);
+                // Upsert should not throw. If it does, there's 
+                // a bigger problem and the test should fail.
+                break;
+                
+            case POINT_SEARCH: 
+                searchCursor.reset();
+                rangePred.setLowKey(tuple, true);
+                rangePred.setHighKey(tuple, true);
+                accessor.search(searchCursor, rangePred);
+                consumeCursorTuples(searchCursor);
+                break;
+                
+            case SCAN:
+                searchCursor.reset();
+                rangePred.setLowKey(null, true);
+                rangePred.setHighKey(null, true);
+                accessor.search(searchCursor, rangePred);
+                consumeCursorTuples(searchCursor);
+                break;
+                
+            case DISKORDER_SCAN:
+                diskOrderScanCursor.reset();
+                accessor.diskOrderScan(diskOrderScanCursor);
+                consumeCursorTuples(diskOrderScanCursor);
+                break;                            
+            
+            default:
+                throw new HyracksDataException("Op " + op.toString() + " not supported.");
+        }
+    }
+    
+    private void consumeCursorTuples(ITreeIndexCursor cursor) throws HyracksDataException {
+        try {
+            while(cursor.hasNext()) {
+                cursor.next();
+            }
+        } finally {
+            cursor.close();
+        }
+    }
+}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/multithread/BTreeTestWorkerFactory.java b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/multithread/BTreeTestWorkerFactory.java
new file mode 100644
index 0000000..dc4d883
--- /dev/null
+++ b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/multithread/BTreeTestWorkerFactory.java
@@ -0,0 +1,30 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree.multithread;
+
+import edu.uci.ics.hyracks.storage.am.common.AbstractTreeIndexTestWorker;
+import edu.uci.ics.hyracks.storage.am.common.ITreeIndexTestWorkerFactory;
+import edu.uci.ics.hyracks.storage.am.common.TestOperationSelector;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+import edu.uci.ics.hyracks.storage.am.common.datagen.DataGenThread;
+
+public class BTreeTestWorkerFactory implements ITreeIndexTestWorkerFactory {
+    @Override
+    public AbstractTreeIndexTestWorker create(DataGenThread dataGen, TestOperationSelector opSelector,
+            ITreeIndex index, int numBatches) {
+        return new BTreeTestWorker(dataGen, opSelector, index, numBatches);
+    }
+}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/util/AbstractBTreeTest.java b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/util/AbstractBTreeTest.java
new file mode 100644
index 0000000..f4eca1b
--- /dev/null
+++ b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/util/AbstractBTreeTest.java
@@ -0,0 +1,46 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree.util;
+
+import java.util.logging.Logger;
+
+import org.junit.After;
+import org.junit.Before;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+
+public abstract class AbstractBTreeTest {
+	protected final Logger LOGGER = Logger.getLogger(BTreeTestHarness.class.getName());
+	protected final BTreeTestHarness harness;
+	
+	public AbstractBTreeTest() {
+		harness = new BTreeTestHarness();
+    }
+    
+    public AbstractBTreeTest(int pageSize, int numPages, int maxOpenFiles, int hyracksFrameSize) {
+    	harness = new BTreeTestHarness(pageSize, numPages, maxOpenFiles, hyracksFrameSize);
+    }
+	
+	@Before
+	public void setUp() throws HyracksDataException {
+		harness.setUp();
+    }
+	
+	@After
+    public void tearDown() throws HyracksDataException {
+		harness.tearDown();
+    }
+}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/util/BTreeTestContext.java b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/util/BTreeTestContext.java
new file mode 100644
index 0000000..b820f93
--- /dev/null
+++ b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/util/BTreeTestContext.java
@@ -0,0 +1,57 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree.util;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.dataflow.common.util.SerdeUtils;
+import edu.uci.ics.hyracks.storage.am.btree.OrderedIndexTestContext;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallback;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+
+@SuppressWarnings("rawtypes")
+public class BTreeTestContext extends OrderedIndexTestContext {
+    
+    public BTreeTestContext(ISerializerDeserializer[] fieldSerdes, ITreeIndex treeIndex) {
+        super(fieldSerdes, treeIndex);
+    }
+
+    @Override
+    public int getKeyFieldCount() {
+        BTree btree = (BTree) treeIndex;
+        return btree.getComparatorFactories().length;
+    }
+    
+    @Override
+    public IBinaryComparatorFactory[] getComparatorFactories() {
+        BTree btree = (BTree) treeIndex;
+        return btree.getComparatorFactories();
+    }
+    
+    public static BTreeTestContext create(IBufferCache bufferCache, int btreeFileId, ISerializerDeserializer[] fieldSerdes, int numKeyFields, BTreeLeafFrameType leafType) throws Exception {        
+        ITypeTraits[] typeTraits = SerdeUtils.serdesToTypeTraits(fieldSerdes);
+        IBinaryComparatorFactory[] cmpFactories = SerdeUtils.serdesToComparatorFactories(fieldSerdes, numKeyFields);
+        BTree btree = BTreeUtils.createBTree(bufferCache, NoOpOperationCallback.INSTANCE, typeTraits, cmpFactories, leafType);
+        btree.create(btreeFileId);
+        btree.open(btreeFileId);
+        BTreeTestContext testCtx = new BTreeTestContext(fieldSerdes, btree);
+        return testCtx;
+    }
+}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/util/BTreeTestHarness.java b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/util/BTreeTestHarness.java
new file mode 100644
index 0000000..1b450d8
--- /dev/null
+++ b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/util/BTreeTestHarness.java
@@ -0,0 +1,132 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree.util;
+
+import java.io.File;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+import java.util.Random;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.io.FileReference;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
+import edu.uci.ics.hyracks.storage.am.common.api.IOperationCallback;
+import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallback;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
+import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
+import edu.uci.ics.hyracks.test.support.TestUtils;
+
+public class BTreeTestHarness {    
+    public static final BTreeLeafFrameType[] LEAF_FRAMES_TO_TEST = new BTreeLeafFrameType[] {
+        BTreeLeafFrameType.REGULAR_NSM, BTreeLeafFrameType.FIELD_PREFIX_COMPRESSED_NSM };
+    
+    private static final long RANDOM_SEED = 50;
+    private static final int DEFAULT_PAGE_SIZE = 256;
+    private static final int DEFAULT_NUM_PAGES = 100;
+    private static final int DEFAULT_MAX_OPEN_FILES = 10;
+    private static final int DEFAULT_HYRACKS_FRAME_SIZE = 128;
+    
+    protected final int pageSize;
+    protected final int numPages;
+    protected final int maxOpenFiles;
+    protected final int hyracksFrameSize;
+        
+    protected IHyracksTaskContext ctx; 
+    protected IBufferCache bufferCache;
+    protected int btreeFileId;
+    
+    protected final Random rnd = new Random();
+    protected final SimpleDateFormat simpleDateFormat = new SimpleDateFormat("ddMMyy-hhmmssSS");
+    protected final String tmpDir = System.getProperty("java.io.tmpdir");
+    protected final String sep = System.getProperty("file.separator");
+    protected String fileName;
+    
+    public BTreeTestHarness() {
+    	this.pageSize = DEFAULT_PAGE_SIZE;
+    	this.numPages = DEFAULT_NUM_PAGES;
+    	this.maxOpenFiles = DEFAULT_MAX_OPEN_FILES;
+    	this.hyracksFrameSize = DEFAULT_HYRACKS_FRAME_SIZE;
+    }
+    
+    public BTreeTestHarness(int pageSize, int numPages, int maxOpenFiles, int hyracksFrameSize) {
+    	this.pageSize = pageSize;
+    	this.numPages = numPages;
+    	this.maxOpenFiles = maxOpenFiles;
+    	this.hyracksFrameSize = hyracksFrameSize;
+    }
+    
+    public void setUp() throws HyracksDataException {
+        fileName = tmpDir + sep + simpleDateFormat.format(new Date());
+        ctx = TestUtils.create(getHyracksFrameSize());
+        TestStorageManagerComponentHolder.init(pageSize, numPages, maxOpenFiles);
+        bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
+        IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
+        FileReference file = new FileReference(new File(fileName));
+        bufferCache.createFile(file);
+        btreeFileId = fmp.lookupFileId(file);
+        bufferCache.openFile(btreeFileId);
+        rnd.setSeed(RANDOM_SEED);
+    }
+    
+    public void tearDown() throws HyracksDataException {
+        bufferCache.closeFile(btreeFileId);
+        bufferCache.close();
+        File f = new File(fileName);
+        f.deleteOnExit();
+    }
+    
+    public IHyracksTaskContext getHyracksTaskContext() {
+    	return ctx;
+    }
+    
+    public IBufferCache getBufferCache() {
+    	return bufferCache;
+    }
+    
+    public int getBTreeFileId() {
+    	return btreeFileId;
+    }
+    
+    public String getFileName() {
+        return fileName;
+    }
+    
+    public Random getRandom() {
+    	return rnd;
+    }
+    
+    public int getPageSize() {
+        return pageSize;
+    }
+    
+    public int getNumPages() {
+        return numPages;
+    }
+    
+    public int getHyracksFrameSize() {
+        return hyracksFrameSize;
+    }
+    
+    public int getMaxOpenFiles() {
+        return maxOpenFiles;
+    }
+    
+    public IOperationCallback getOpCallback() {
+        return NoOpOperationCallback.INSTANCE;
+    }
+}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/pom.xml b/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/pom.xml
new file mode 100644
index 0000000..bba6a0e
--- /dev/null
+++ b/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/pom.xml
@@ -0,0 +1,51 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>edu.uci.ics.hyracks</groupId>
+  <artifactId>hyracks-storage-am-invertedindex-test</artifactId>
+  <version>0.2.3-SNAPSHOT</version>
+  <name>hyracks-storage-am-invertedindex-test</name>
+
+  <parent>
+    <groupId>edu.uci.ics.hyracks</groupId>
+    <artifactId>hyracks-tests</artifactId>
+    <version>0.2.3-SNAPSHOT</version>
+  </parent>
+
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-compiler-plugin</artifactId>
+        <version>2.0.2</version>
+        <configuration>
+          <source>1.7</source>
+          <target>1.7</target>
+          <encoding>UTF-8</encoding>
+        </configuration>
+      </plugin>
+    </plugins>
+  </build>
+  <dependencies>
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>hyracks-storage-am-invertedindex</artifactId>
+  		<version>0.2.3-SNAPSHOT</version>
+  		<type>jar</type>
+  		<scope>compile</scope>
+  	</dependency>
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>hyracks-test-support</artifactId>
+  		<version>0.2.3-SNAPSHOT</version>
+  		<type>jar</type>
+  		<scope>test</scope>
+  	</dependency>
+  	<dependency>
+  		<groupId>junit</groupId>
+  		<artifactId>junit</artifactId>
+  		<version>4.8.1</version>
+  		<type>jar</type>
+  		<scope>test</scope>
+  	</dependency>
+  </dependencies>
+</project>
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/AbstractInvIndexSearchTest.java b/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/AbstractInvIndexSearchTest.java
new file mode 100644
index 0000000..e086af6
--- /dev/null
+++ b/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/AbstractInvIndexSearchTest.java
@@ -0,0 +1,193 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex;
+
+import java.io.File;
+import java.util.ArrayList;
+import java.util.Random;
+
+import org.junit.After;
+import org.junit.Before;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.io.FileReference;
+import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
+import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
+import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMInteriorFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMLeafFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
+import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManager;
+import edu.uci.ics.hyracks.storage.am.common.api.IIndexCursor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrame;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.frames.LIFOMetaDataFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.freepage.LinkedListFreePageManager;
+import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallback;
+import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriterFactory;
+import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedListBuilder;
+import edu.uci.ics.hyracks.storage.am.invertedindex.impls.FixedSizeElementInvertedListBuilder;
+import edu.uci.ics.hyracks.storage.am.invertedindex.impls.InvertedIndex;
+import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.IBinaryTokenizer;
+import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.ITokenFactory;
+import edu.uci.ics.hyracks.storage.am.invertedindex.util.InvertedIndexUtils;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
+import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
+import edu.uci.ics.hyracks.test.support.TestUtils;
+
+public abstract class AbstractInvIndexSearchTest extends AbstractInvIndexTest {
+    protected final int PAGE_SIZE = 32768;
+    protected final int NUM_PAGES = 100;
+    protected final int MAX_OPEN_FILES = 10;
+    protected final int HYRACKS_FRAME_SIZE = 32768;
+    protected IHyracksTaskContext taskCtx = TestUtils.create(HYRACKS_FRAME_SIZE);
+
+    protected IBufferCache bufferCache;
+    protected IFileMapProvider fmp;
+
+    // --- BTREE ---
+
+    // create file refs
+    protected FileReference btreeFile = new FileReference(new File(btreeFileName));
+    protected int btreeFileId;
+
+    // declare token type traits
+    protected ITypeTraits[] tokenTypeTraits = new ITypeTraits[] { UTF8StringPointable.TYPE_TRAITS };
+    protected ITypeTraits[] btreeTypeTraits = InvertedIndexUtils.getBTreeTypeTraits(tokenTypeTraits);
+
+    // declare btree keys
+    protected int btreeKeyFieldCount = 1;
+    protected IBinaryComparatorFactory[] btreeCmpFactories = new IBinaryComparatorFactory[btreeKeyFieldCount];
+
+    // btree frame factories
+    protected TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(btreeTypeTraits);
+    protected ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(tupleWriterFactory);
+    protected ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(tupleWriterFactory);
+    protected ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
+
+    // btree frames
+    protected ITreeIndexFrame leafFrame = leafFrameFactory.createFrame();
+    protected ITreeIndexMetaDataFrame metaFrame = metaFrameFactory.createFrame();
+
+    protected IFreePageManager freePageManager;
+
+    protected BTree btree;
+
+    // --- INVERTED INDEX ---
+
+    protected FileReference invListsFile = new FileReference(new File(invListsFileName));
+    protected int invListsFileId;
+
+    protected int invListFields = 1;
+    protected ITypeTraits[] invListTypeTraits = new ITypeTraits[invListFields];
+
+    protected int invListKeys = 1;
+    protected IBinaryComparatorFactory[] invListCmpFactories = new IBinaryComparatorFactory[invListKeys];
+
+    protected InvertedIndex invIndex;
+
+    protected Random rnd = new Random();
+
+    protected ArrayTupleBuilder tb = new ArrayTupleBuilder(2);
+    protected ArrayTupleReference tuple = new ArrayTupleReference();
+
+    protected ISerializerDeserializer[] insertSerde = { UTF8StringSerializerDeserializer.INSTANCE,
+            IntegerSerializerDeserializer.INSTANCE };
+    protected RecordDescriptor insertRecDesc = new RecordDescriptor(insertSerde);
+
+    protected ArrayList<ArrayList<Integer>> checkInvLists = new ArrayList<ArrayList<Integer>>();
+
+    protected int maxId = 1000000;
+    protected int[] scanCountArray = new int[maxId];
+    protected ArrayList<Integer> expectedResults = new ArrayList<Integer>();
+
+    protected ISerializerDeserializer[] querySerde = { UTF8StringSerializerDeserializer.INSTANCE };
+    protected RecordDescriptor queryRecDesc = new RecordDescriptor(querySerde);
+
+    protected ArrayTupleBuilder queryTb = new ArrayTupleBuilder(querySerde.length);
+    protected ArrayTupleReference queryTuple = new ArrayTupleReference();
+
+    protected ITokenFactory tokenFactory;
+    protected IBinaryTokenizer tokenizer;
+
+    protected IIndexCursor resultCursor;
+
+    protected abstract void setTokenizer();
+    
+    /**
+     * Initialize members, generate data, and bulk load the inverted index.
+     */
+    @Before
+    public void start() throws Exception {
+        TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES);
+        bufferCache = TestStorageManagerComponentHolder.getBufferCache(taskCtx);
+        fmp = TestStorageManagerComponentHolder.getFileMapProvider(taskCtx);
+
+        // --- BTREE ---
+
+        bufferCache.createFile(btreeFile);
+        btreeFileId = fmp.lookupFileId(btreeFile);
+        bufferCache.openFile(btreeFileId);
+
+        btreeCmpFactories[0] = PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY);
+
+        freePageManager = new LinkedListFreePageManager(bufferCache, 0, metaFrameFactory);
+
+        btree = new BTree(bufferCache, NoOpOperationCallback.INSTANCE, btreeTypeTraits.length, btreeCmpFactories,
+                freePageManager, interiorFrameFactory, leafFrameFactory);
+        btree.create(btreeFileId);
+        btree.open(btreeFileId);
+
+        // --- INVERTED INDEX ---
+
+        setTokenizer();
+        
+        bufferCache.createFile(invListsFile);
+        invListsFileId = fmp.lookupFileId(invListsFile);
+        bufferCache.openFile(invListsFileId);
+
+        invListTypeTraits[0] = IntegerPointable.TYPE_TRAITS;
+        invListCmpFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+
+        IInvertedListBuilder invListBuilder = new FixedSizeElementInvertedListBuilder(invListTypeTraits);
+        invIndex = new InvertedIndex(bufferCache, btree, invListTypeTraits, invListCmpFactories, invListBuilder);
+        invIndex.open(invListsFileId);
+
+        rnd.setSeed(50);
+    }
+
+    @After
+    public void deinit() throws HyracksDataException {
+        AbstractInvIndexTest.tearDown();
+        btree.close();
+        invIndex.close();
+        bufferCache.closeFile(btreeFileId);
+        bufferCache.closeFile(invListsFileId);
+        bufferCache.close();
+    }
+}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/AbstractInvIndexTest.java b/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/AbstractInvIndexTest.java
new file mode 100644
index 0000000..cc8ab15
--- /dev/null
+++ b/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/AbstractInvIndexTest.java
@@ -0,0 +1,43 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex;
+
+import java.io.File;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+import java.util.logging.Logger;
+
+public abstract class AbstractInvIndexTest {
+
+	protected static final Logger LOGGER = Logger
+			.getLogger(AbstractInvIndexTest.class.getName());
+
+	protected final static SimpleDateFormat simpleDateFormat = new SimpleDateFormat(
+			"ddMMyy-hhmmssSS");
+	protected final static String tmpDir = System.getProperty("java.io.tmpdir");
+	protected final static String sep = System.getProperty("file.separator");
+	protected final static String baseFileName = tmpDir + sep
+			+ simpleDateFormat.format(new Date());
+	protected final static String btreeFileName = baseFileName + "btree";
+	protected final static String invListsFileName = baseFileName + "invlists";
+
+	public static void tearDown() {
+		File btreeFile = new File(btreeFileName);
+		btreeFile.deleteOnExit();
+		File invListsFile = new File(invListsFileName);
+		invListsFile.deleteOnExit();
+	}
+}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/BulkLoadTest.java b/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/BulkLoadTest.java
new file mode 100644
index 0000000..9fdc1c4
--- /dev/null
+++ b/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/BulkLoadTest.java
@@ -0,0 +1,294 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex;
+
+import java.io.DataOutput;
+import java.io.File;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Random;
+
+import junit.framework.Assert;
+
+import org.junit.AfterClass;
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.io.FileReference;
+import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
+import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
+import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.FrameTupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMInteriorFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMLeafFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeRangeSearchCursor;
+import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
+import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManager;
+import edu.uci.ics.hyracks.storage.am.common.api.IIndexBulkLoadContext;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexAccessor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.frames.LIFOMetaDataFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.freepage.LinkedListFreePageManager;
+import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallback;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriterFactory;
+import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedListBuilder;
+import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedListCursor;
+import edu.uci.ics.hyracks.storage.am.invertedindex.impls.FixedSizeElementInvertedListBuilder;
+import edu.uci.ics.hyracks.storage.am.invertedindex.impls.FixedSizeElementInvertedListCursor;
+import edu.uci.ics.hyracks.storage.am.invertedindex.impls.InvertedIndex;
+import edu.uci.ics.hyracks.storage.am.invertedindex.util.InvertedIndexUtils;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
+import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
+import edu.uci.ics.hyracks.test.support.TestUtils;
+
+public class BulkLoadTest extends AbstractInvIndexTest {
+
+    private static final int PAGE_SIZE = 32768;
+    private static final int NUM_PAGES = 100;
+    private static final int MAX_OPEN_FILES = 10;
+    private static final int HYRACKS_FRAME_SIZE = 32768;
+    private IHyracksTaskContext stageletCtx = TestUtils.create(HYRACKS_FRAME_SIZE);
+
+    /**
+     * This test generates a list of <word-token, id> pairs which are pre-sorted
+     * on the token. Those pairs for the input to an inverted-index bulk load.
+     * The contents of the inverted lists are verified against the generated
+     * data.
+     */
+    @Test
+    public void singleFieldPayloadTest() throws Exception {
+
+        TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES);
+        IBufferCache bufferCache = TestStorageManagerComponentHolder.getBufferCache(stageletCtx);
+        IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(stageletCtx);
+
+        // create file refs
+        FileReference btreeFile = new FileReference(new File(btreeFileName));
+        bufferCache.createFile(btreeFile);
+        int btreeFileId = fmp.lookupFileId(btreeFile);
+        bufferCache.openFile(btreeFileId);
+
+        FileReference invListsFile = new FileReference(new File(invListsFileName));
+        bufferCache.createFile(invListsFile);
+        int invListsFileId = fmp.lookupFileId(invListsFile);
+        bufferCache.openFile(invListsFileId);
+
+        // Declare token type traits, and compute BTree type traits.
+        ITypeTraits[] tokenTypeTraits = new ITypeTraits[] { UTF8StringPointable.TYPE_TRAITS };
+        ITypeTraits[] btreeTypeTraits = InvertedIndexUtils.getBTreeTypeTraits(tokenTypeTraits);
+
+        // declare btree keys
+        int keyFieldCount = 1;
+        IBinaryComparatorFactory[] cmpFactories = new IBinaryComparatorFactory[keyFieldCount];
+        cmpFactories[0] = PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY);
+
+        TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(btreeTypeTraits);
+        ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(tupleWriterFactory);
+        ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(tupleWriterFactory);
+        ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
+
+        ITreeIndexFrame leafFrame = leafFrameFactory.createFrame();
+
+        IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, 0, metaFrameFactory);
+
+        BTree btree = new BTree(bufferCache, NoOpOperationCallback.INSTANCE, btreeTypeTraits.length, cmpFactories,
+                freePageManager, interiorFrameFactory, leafFrameFactory);
+        btree.create(btreeFileId);
+        btree.open(btreeFileId);
+
+        int invListFields = 1;
+        ITypeTraits[] invListTypeTraits = new ITypeTraits[invListFields];
+        invListTypeTraits[0] = IntegerPointable.TYPE_TRAITS;
+
+        int invListKeys = 1;
+        IBinaryComparatorFactory[] invListCmpFactories = new IBinaryComparatorFactory[invListKeys];
+        invListCmpFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+
+        IInvertedListBuilder invListBuilder = new FixedSizeElementInvertedListBuilder(invListTypeTraits);
+        InvertedIndex invIndex = new InvertedIndex(bufferCache, btree, invListTypeTraits, invListCmpFactories, invListBuilder);
+        invIndex.open(invListsFileId);
+
+        Random rnd = new Random();
+        rnd.setSeed(50);
+
+        ByteBuffer frame = stageletCtx.allocateFrame();
+        FrameTupleAppender appender = new FrameTupleAppender(stageletCtx.getFrameSize());
+        ArrayTupleBuilder tb = new ArrayTupleBuilder(2);
+        DataOutput dos = tb.getDataOutput();
+
+        ISerializerDeserializer[] insertSerde = { UTF8StringSerializerDeserializer.INSTANCE,
+                IntegerSerializerDeserializer.INSTANCE };
+        RecordDescriptor insertRecDesc = new RecordDescriptor(insertSerde);
+        IFrameTupleAccessor accessor = new FrameTupleAccessor(stageletCtx.getFrameSize(), insertRecDesc);
+        accessor.reset(frame);
+        FrameTupleReference tuple = new FrameTupleReference();
+
+        List<String> tokens = new ArrayList<String>();
+        tokens.add("compilers");
+        tokens.add("computer");
+        tokens.add("databases");
+        tokens.add("fast");
+        tokens.add("hyracks");
+        tokens.add("major");
+        tokens.add("science");
+        tokens.add("systems");
+        tokens.add("university");
+
+        ArrayList<ArrayList<Integer>> checkListElements = new ArrayList<ArrayList<Integer>>();
+        for (int i = 0; i < tokens.size(); i++) {
+            checkListElements.add(new ArrayList<Integer>());
+        }
+
+        int maxId = 1000000;
+        int addProb = 0;
+        int addProbStep = 10;
+
+        IIndexBulkLoadContext ctx = invIndex.beginBulkLoad(BTree.DEFAULT_FILL_FACTOR);
+
+        for (int i = 0; i < tokens.size(); i++) {
+
+            addProb += addProbStep * (i + 1);
+            for (int j = 0; j < maxId; j++) {
+                if ((Math.abs(rnd.nextInt()) % addProb) == 0) {
+
+                    tb.reset();
+                    UTF8StringSerializerDeserializer.INSTANCE.serialize(tokens.get(i), dos);
+                    tb.addFieldEndOffset();
+                    IntegerSerializerDeserializer.INSTANCE.serialize(j, dos);
+                    tb.addFieldEndOffset();
+
+                    checkListElements.get(i).add(j);
+
+                    appender.reset(frame, true);
+                    appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
+
+                    tuple.reset(accessor, 0);
+
+                    try {
+                        invIndex.bulkLoadAddTuple(tuple, ctx);
+                    } catch (Exception e) {
+                        e.printStackTrace();
+                    }
+                }
+            }
+        }
+        invIndex.endBulkLoad(ctx);
+
+        // ------- START VERIFICATION -----------
+
+        ITreeIndexCursor btreeCursor = new BTreeRangeSearchCursor((IBTreeLeafFrame) leafFrame, false);
+        FrameTupleReference searchKey = new FrameTupleReference();
+        MultiComparator btreeCmp = MultiComparator.create(cmpFactories);
+        RangePredicate btreePred = new RangePredicate(searchKey, searchKey, true, true, btreeCmp, btreeCmp);
+
+        IInvertedListCursor invListCursor = new FixedSizeElementInvertedListCursor(bufferCache, invListsFileId,
+                invListTypeTraits);
+
+        ISerializerDeserializer[] tokenSerde = { UTF8StringSerializerDeserializer.INSTANCE };
+        RecordDescriptor tokenRecDesc = new RecordDescriptor(tokenSerde);
+        FrameTupleAppender tokenAppender = new FrameTupleAppender(stageletCtx.getFrameSize());
+        ArrayTupleBuilder tokenTupleBuilder = new ArrayTupleBuilder(1);
+        DataOutput tokenDos = tokenTupleBuilder.getDataOutput();
+        IFrameTupleAccessor tokenAccessor = new FrameTupleAccessor(stageletCtx.getFrameSize(), tokenRecDesc);
+        tokenAccessor.reset(frame);
+
+        ITreeIndexAccessor btreeAccessor = invIndex.getBTree().createAccessor();
+
+        // verify created inverted lists one-by-one
+        for (int i = 0; i < tokens.size(); i++) {
+
+            tokenTupleBuilder.reset();
+            UTF8StringSerializerDeserializer.INSTANCE.serialize(tokens.get(i), tokenDos);
+            tokenTupleBuilder.addFieldEndOffset();
+
+            tokenAppender.reset(frame, true);
+            tokenAppender.append(tokenTupleBuilder.getFieldEndOffsets(), tokenTupleBuilder.getByteArray(), 0,
+                    tokenTupleBuilder.getSize());
+
+            searchKey.reset(tokenAccessor, 0);
+
+            invIndex.openCursor(btreeCursor, btreePred, btreeAccessor, invListCursor);
+
+            invListCursor.pinPagesSync();
+            int checkIndex = 0;
+            while (invListCursor.hasNext()) {
+                invListCursor.next();
+                ITupleReference invListTuple = invListCursor.getTuple();
+                int invListElement = IntegerSerializerDeserializer.getInt(invListTuple.getFieldData(0),
+                        invListTuple.getFieldStart(0));
+                int checkInvListElement = checkListElements.get(i).get(checkIndex).intValue();
+                Assert.assertEquals(invListElement, checkInvListElement);
+                checkIndex++;
+            }
+            invListCursor.unpinPages();
+            Assert.assertEquals(checkIndex, checkListElements.get(i).size());
+        }
+
+        // check that non-existing tokens have an empty inverted list
+        List<String> nonExistingTokens = new ArrayList<String>();
+        nonExistingTokens.add("watermelon");
+        nonExistingTokens.add("avocado");
+        nonExistingTokens.add("lemon");
+
+        for (int i = 0; i < nonExistingTokens.size(); i++) {
+
+            tokenTupleBuilder.reset();
+            UTF8StringSerializerDeserializer.INSTANCE.serialize(nonExistingTokens.get(i), tokenDos);
+            tokenTupleBuilder.addFieldEndOffset();
+
+            tokenAppender.reset(frame, true);
+            tokenAppender.append(tokenTupleBuilder.getFieldEndOffsets(), tokenTupleBuilder.getByteArray(), 0,
+                    tokenTupleBuilder.getSize());
+
+            searchKey.reset(tokenAccessor, 0);
+
+            invIndex.openCursor(btreeCursor, btreePred, btreeAccessor, invListCursor);
+
+            invListCursor.pinPagesSync();
+            Assert.assertEquals(invListCursor.hasNext(), false);
+            invListCursor.unpinPages();
+        }
+
+        btree.close();
+        bufferCache.closeFile(btreeFileId);
+        bufferCache.closeFile(invListsFileId);
+        bufferCache.close();
+    }
+
+    @AfterClass
+    public static void deinit() {
+        AbstractInvIndexTest.tearDown();
+    }
+}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/FixedSizeFrameTupleTest.java b/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/FixedSizeFrameTupleTest.java
new file mode 100644
index 0000000..9c7ec09
--- /dev/null
+++ b/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/FixedSizeFrameTupleTest.java
@@ -0,0 +1,75 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex;
+
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.Random;
+
+import junit.framework.Assert;
+
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.storage.am.invertedindex.impls.FixedSizeFrameTupleAccessor;
+import edu.uci.ics.hyracks.storage.am.invertedindex.impls.FixedSizeFrameTupleAppender;
+
+public class FixedSizeFrameTupleTest {
+
+    private static int FRAME_SIZE = 4096;
+
+    private Random rnd = new Random(50);
+
+    /**
+     * This test verifies the correct behavior of the FixedSizeFrameTuple class.
+     * Frames containing FixedSizeFrameTuple's require neither tuple slots nor
+     * field slots. The tests inserts generated data into a frame until the
+     * frame is full, and then verifies the frame's contents.
+     * 
+     */
+    @Test
+    public void singleFieldTest() throws Exception {
+        ByteBuffer buffer = ByteBuffer.allocate(FRAME_SIZE);
+
+        ITypeTraits[] fields = new ITypeTraits[1];
+        fields[0] = IntegerPointable.TYPE_TRAITS;
+
+        FixedSizeFrameTupleAppender ftapp = new FixedSizeFrameTupleAppender(FRAME_SIZE, fields);
+        FixedSizeFrameTupleAccessor ftacc = new FixedSizeFrameTupleAccessor(FRAME_SIZE, fields);
+
+        boolean frameHasSpace = true;
+
+        ArrayList<Integer> check = new ArrayList<Integer>();
+
+        ftapp.reset(buffer, true);
+        while (frameHasSpace) {
+            int val = rnd.nextInt();
+            frameHasSpace = ftapp.append(val);
+            if (frameHasSpace) {
+                check.add(val);
+                ftapp.incrementTupleCount(1);
+            }
+        }
+
+        ftacc.reset(buffer);
+        for (int i = 0; i < ftacc.getTupleCount(); i++) {
+            int val = IntegerSerializerDeserializer.getInt(ftacc.getBuffer().array(), ftacc.getTupleStartOffset(i));
+            Assert.assertEquals(check.get(i).intValue(), val);
+        }
+    }
+}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/NGramTokenizerTest.java b/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/NGramTokenizerTest.java
new file mode 100644
index 0000000..3fb6407
--- /dev/null
+++ b/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/NGramTokenizerTest.java
@@ -0,0 +1,237 @@
+/**
+ * Copyright 2010-2011 The Regents of the University of California
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on
+ * an "AS IS"; BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations under
+ * the License.
+ * 
+ * Author: Alexander Behm <abehm (at) ics.uci.edu>
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.DataInput;
+import java.io.DataInputStream;
+import java.io.DataOutput;
+import java.io.DataOutputStream;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
+
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.data.std.util.GrowableArray;
+import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.AbstractUTF8Token;
+import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.HashedUTF8NGramTokenFactory;
+import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.IToken;
+import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.NGramUTF8StringBinaryTokenizer;
+import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.UTF8NGramTokenFactory;
+
+public class NGramTokenizerTest {
+
+    private char PRECHAR = '#';
+    private char POSTCHAR = '$';
+
+    private String str = "Jürgen S. Generic's Car";
+    private byte[] inputBuffer;
+
+    private int gramLength = 3;
+
+    private void getExpectedGrams(String s, int gramLength, ArrayList<String> grams, boolean prePost) {
+
+        String tmp = s.toLowerCase();
+        if (prePost) {
+            StringBuilder preBuilder = new StringBuilder();
+            for (int i = 0; i < gramLength - 1; i++) {
+                preBuilder.append(PRECHAR);
+            }
+            String pre = preBuilder.toString();
+
+            StringBuilder postBuilder = new StringBuilder();
+            for (int i = 0; i < gramLength - 1; i++) {
+                postBuilder.append(POSTCHAR);
+            }
+            String post = postBuilder.toString();
+
+            tmp = pre + s.toLowerCase() + post;
+        }
+
+        for (int i = 0; i < tmp.length() - gramLength + 1; i++) {
+            String gram = tmp.substring(i, i + gramLength);
+            grams.add(gram);
+        }
+    }
+
+    @Before
+    public void init() throws Exception {
+        // serialize string into bytes
+        ByteArrayOutputStream baos = new ByteArrayOutputStream();
+        DataOutput dos = new DataOutputStream(baos);
+        dos.writeUTF(str);
+        inputBuffer = baos.toByteArray();
+    }
+
+    void runTestNGramTokenizerWithCountedHashedUTF8Tokens(boolean prePost) throws IOException {
+        HashedUTF8NGramTokenFactory tokenFactory = new HashedUTF8NGramTokenFactory();
+        NGramUTF8StringBinaryTokenizer tokenizer = new NGramUTF8StringBinaryTokenizer(gramLength, prePost, false,
+                false, tokenFactory);
+        tokenizer.reset(inputBuffer, 0, inputBuffer.length);
+
+        ArrayList<String> expectedGrams = new ArrayList<String>();
+        getExpectedGrams(str, gramLength, expectedGrams, prePost);
+        ArrayList<Integer> expectedHashedGrams = new ArrayList<Integer>();
+        HashMap<String, Integer> gramCounts = new HashMap<String, Integer>();
+        for (String s : expectedGrams) {
+            Integer count = gramCounts.get(s);
+            if (count == null) {
+                count = 1;
+                gramCounts.put(s, count);
+            } else {
+                count++;
+            }
+
+            int hash = tokenHash(s, count);
+            expectedHashedGrams.add(hash);
+        }
+
+        int tokenCount = 0;
+
+        while (tokenizer.hasNext()) {
+            tokenizer.next();
+
+            // serialize hashed token
+            GrowableArray tokenStorage = new GrowableArray();
+
+            IToken token = tokenizer.getToken();
+            token.serializeToken(tokenStorage);
+
+            // deserialize token
+            ByteArrayInputStream bais = new ByteArrayInputStream(tokenStorage.getByteArray());
+            DataInput in = new DataInputStream(bais);
+
+            Integer hashedGram = in.readInt();
+
+            // System.out.println(hashedGram);
+
+            Assert.assertEquals(expectedHashedGrams.get(tokenCount), hashedGram);
+
+            tokenCount++;
+        }
+        // System.out.println("---------");
+    }
+
+    void runTestNGramTokenizerWithHashedUTF8Tokens(boolean prePost) throws IOException {
+        HashedUTF8NGramTokenFactory tokenFactory = new HashedUTF8NGramTokenFactory();
+        NGramUTF8StringBinaryTokenizer tokenizer = new NGramUTF8StringBinaryTokenizer(gramLength, prePost, true, false,
+                tokenFactory);
+        tokenizer.reset(inputBuffer, 0, inputBuffer.length);
+
+        ArrayList<String> expectedGrams = new ArrayList<String>();
+        getExpectedGrams(str, gramLength, expectedGrams, prePost);
+        ArrayList<Integer> expectedHashedGrams = new ArrayList<Integer>();
+        for (String s : expectedGrams) {
+            int hash = tokenHash(s, 1);
+            expectedHashedGrams.add(hash);
+        }
+
+        int tokenCount = 0;
+
+        while (tokenizer.hasNext()) {
+            tokenizer.next();
+
+            // serialize hashed token
+            GrowableArray tokenStorage = new GrowableArray();
+
+            IToken token = tokenizer.getToken();
+            token.serializeToken(tokenStorage);
+
+            // deserialize token
+            ByteArrayInputStream bais = new ByteArrayInputStream(tokenStorage.getByteArray());
+            DataInput in = new DataInputStream(bais);
+
+            Integer hashedGram = in.readInt();
+
+            // System.out.println(hashedGram);
+
+            Assert.assertEquals(expectedHashedGrams.get(tokenCount), hashedGram);
+
+            tokenCount++;
+        }
+        // System.out.println("---------");
+    }
+
+    void runTestNGramTokenizerWithUTF8Tokens(boolean prePost) throws IOException {
+        UTF8NGramTokenFactory tokenFactory = new UTF8NGramTokenFactory();
+        NGramUTF8StringBinaryTokenizer tokenizer = new NGramUTF8StringBinaryTokenizer(gramLength, prePost, true, false,
+                tokenFactory);
+        tokenizer.reset(inputBuffer, 0, inputBuffer.length);
+
+        ArrayList<String> expectedGrams = new ArrayList<String>();
+        getExpectedGrams(str, gramLength, expectedGrams, prePost);
+
+        int tokenCount = 0;
+
+        while (tokenizer.hasNext()) {
+            tokenizer.next();
+
+            // serialize hashed token
+            GrowableArray tokenStorage = new GrowableArray();
+
+            IToken token = tokenizer.getToken();
+            token.serializeToken(tokenStorage);
+
+            // deserialize token
+            ByteArrayInputStream bais = new ByteArrayInputStream(tokenStorage.getByteArray());
+            DataInput in = new DataInputStream(bais);
+
+            String strGram = in.readUTF();
+
+            // System.out.println("\"" + strGram + "\"");
+
+            Assert.assertEquals(expectedGrams.get(tokenCount), strGram);
+
+            tokenCount++;
+        }
+        // System.out.println("---------");
+    }
+
+    @Test
+    public void testNGramTokenizerWithCountedHashedUTF8Tokens() throws Exception {
+        runTestNGramTokenizerWithCountedHashedUTF8Tokens(false);
+        runTestNGramTokenizerWithCountedHashedUTF8Tokens(true);
+    }
+
+    @Test
+    public void testNGramTokenizerWithHashedUTF8Tokens() throws Exception {
+        runTestNGramTokenizerWithHashedUTF8Tokens(false);
+        runTestNGramTokenizerWithHashedUTF8Tokens(true);
+    }
+
+    @Test
+    public void testNGramTokenizerWithUTF8Tokens() throws IOException {
+        runTestNGramTokenizerWithUTF8Tokens(false);
+        runTestNGramTokenizerWithUTF8Tokens(true);
+    }
+
+    public int tokenHash(String token, int tokenCount) {
+        int h = AbstractUTF8Token.GOLDEN_RATIO_32;
+        for (int i = 0; i < token.length(); i++) {
+            h ^= token.charAt(i);
+            h *= AbstractUTF8Token.GOLDEN_RATIO_32;
+        }
+        return h + tokenCount;
+    }
+}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/SearchPerfTest.java b/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/SearchPerfTest.java
new file mode 100644
index 0000000..33bf0c5
--- /dev/null
+++ b/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/SearchPerfTest.java
@@ -0,0 +1,303 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.logging.Level;
+
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
+import edu.uci.ics.hyracks.storage.am.common.api.IIndexBulkLoadContext;
+import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
+import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedIndexSearchModifier;
+import edu.uci.ics.hyracks.storage.am.invertedindex.impls.InvertedIndex.InvertedIndexAccessor;
+import edu.uci.ics.hyracks.storage.am.invertedindex.impls.InvertedIndexSearchPredicate;
+import edu.uci.ics.hyracks.storage.am.invertedindex.impls.OccurrenceThresholdPanicException;
+import edu.uci.ics.hyracks.storage.am.invertedindex.impls.TOccurrenceSearcher;
+import edu.uci.ics.hyracks.storage.am.invertedindex.searchmodifiers.ConjunctiveSearchModifier;
+import edu.uci.ics.hyracks.storage.am.invertedindex.searchmodifiers.JaccardSearchModifier;
+import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.DelimitedUTF8StringBinaryTokenizer;
+import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.UTF8WordTokenFactory;
+
+/**
+ * The purpose of this test is to evaluate the performance of searches against
+ * an inverted index. First, we generate random <token, id> pairs sorted on
+ * token, which are bulk loaded into an inverted index. Next, we build random
+ * queries from a list of predefined tokens in the index, and measure the
+ * performance of executing them with different search modifiers. We test the
+ * ConjunctiveSearchModifier and the JaccardSearchModifier.
+ * 
+ */
+public class SearchPerfTest extends AbstractInvIndexSearchTest {
+
+	protected List<String> tokens = new ArrayList<String>();
+
+	@Override
+	protected void setTokenizer() {
+		tokenFactory = new UTF8WordTokenFactory();
+		tokenizer = new DelimitedUTF8StringBinaryTokenizer(true, false,
+				tokenFactory);
+	}
+	
+	@Before
+	public void start() throws Exception {
+		super.start();
+		loadData();
+	}
+
+	public void loadData() throws HyracksDataException, TreeIndexException {
+		tokens.add("compilers");
+		tokens.add("computer");
+		tokens.add("databases");
+		tokens.add("fast");
+		tokens.add("hyracks");
+		tokens.add("major");
+		tokens.add("science");
+		tokens.add("systems");
+		tokens.add("university");
+
+		for (int i = 0; i < tokens.size(); i++) {
+			checkInvLists.add(new ArrayList<Integer>());
+		}
+
+		// for generating length-skewed inverted lists
+		int addProb = 0;
+		int addProbStep = 10;
+
+		IIndexBulkLoadContext ctx = invIndex.beginBulkLoad(BTree.DEFAULT_FILL_FACTOR);
+
+		for (int i = 0; i < tokens.size(); i++) {
+
+			addProb += addProbStep * (i + 1);
+			for (int j = 0; j < maxId; j++) {
+				if ((Math.abs(rnd.nextInt()) % addProb) == 0) {
+					tb.reset();
+					UTF8StringSerializerDeserializer.INSTANCE.serialize(
+							tokens.get(i), tb.getDataOutput());
+					tb.addFieldEndOffset();
+					IntegerSerializerDeserializer.INSTANCE.serialize(j, tb.getDataOutput());
+					tb.addFieldEndOffset();
+					tuple.reset(tb.getFieldEndOffsets(), tb.getByteArray());
+					checkInvLists.get(i).add(j);
+					try {
+						invIndex.bulkLoadAddTuple(tuple, ctx);
+					} catch (Exception e) {
+						e.printStackTrace();
+					}
+				}
+			}
+		}
+		invIndex.endBulkLoad(ctx);
+	}
+
+	/**
+	 * Determine the expected results with the ScanCount algorithm. The
+	 * ScanCount algorithm is very simple, so we can be confident the results
+	 * are correct.
+	 * 
+	 */
+	protected void fillExpectedResults(int[] queryTokenIndexes,
+			int numQueryTokens, int occurrenceThreshold) {
+		// reset scan count array
+		for (int i = 0; i < maxId; i++) {
+			scanCountArray[i] = 0;
+		}
+
+		// count occurrences
+		for (int i = 0; i < numQueryTokens; i++) {
+			ArrayList<Integer> list = checkInvLists.get(queryTokenIndexes[i]);
+			for (int j = 0; j < list.size(); j++) {
+				scanCountArray[list.get(j)]++;
+			}
+		}
+
+		// check threshold
+		expectedResults.clear();
+		for (int i = 0; i < maxId; i++) {
+			if (scanCountArray[i] >= occurrenceThreshold) {
+				expectedResults.add(i);
+			}
+		}
+	}
+
+	/**
+	 * Generates a specified number of queries. Each query consists of a set of
+	 * randomly chosen tokens that are picked from the pre-defined set of
+	 * tokens. We run each query, measure it's time, and verify it's results
+	 * against the results produced by ScanCount, implemented in
+	 * fillExpectedResults().
+	 * 
+	 */
+	private void runQueries(IInvertedIndexSearchModifier searchModifier,
+			int numQueries) throws Exception {
+
+		rnd.setSeed(50);
+
+		InvertedIndexAccessor accessor = (InvertedIndexAccessor) invIndex.createAccessor();
+		InvertedIndexSearchPredicate searchPred = new InvertedIndexSearchPredicate(tokenizer, searchModifier);
+		
+		// generate random queries
+		int[] queryTokenIndexes = new int[tokens.size()];
+		for (int i = 0; i < numQueries; i++) {
+
+			int numQueryTokens = Math.abs(rnd.nextInt() % tokens.size()) + 1;
+			for (int j = 0; j < numQueryTokens; j++) {
+				queryTokenIndexes[j] = Math.abs(rnd.nextInt() % tokens.size());
+			}
+
+			StringBuilder strBuilder = new StringBuilder();
+			for (int j = 0; j < numQueryTokens; j++) {
+				strBuilder.append(tokens.get(queryTokenIndexes[j]));
+				if (j + 1 != numQueryTokens) {
+					strBuilder.append(" ");
+				}
+			}
+
+			String queryString = strBuilder.toString();
+
+			// Serialize query.
+			queryTb.reset();
+			UTF8StringSerializerDeserializer.INSTANCE.serialize(queryString,
+					queryTb.getDataOutput());
+			queryTb.addFieldEndOffset();
+			queryTuple.reset(queryTb.getFieldEndOffsets(), queryTb.getByteArray());
+
+			// Set query tuple in search predicate.
+			searchPred.setQueryTuple(queryTuple);
+			searchPred.setQueryFieldIndex(0);
+			
+			boolean panic = false;
+
+			resultCursor = accessor.createSearchCursor();
+			int repeats = 1;
+			double totalTime = 0;
+			for (int j = 0; j < repeats; j++) {
+				long timeStart = System.currentTimeMillis();
+				try {
+					resultCursor.reset();
+					accessor.search(resultCursor, searchPred);
+				} catch (OccurrenceThresholdPanicException e) {
+					panic = true;
+				}
+				long timeEnd = System.currentTimeMillis();
+				totalTime += timeEnd - timeStart;
+			}
+			double avgTime = totalTime / (double) repeats;
+			if (LOGGER.isLoggable(Level.INFO)) {
+				LOGGER.info(i + ": " + "\"" + queryString + "\": " + avgTime
+						+ "ms");
+			}
+
+			if (!panic) {
+				TOccurrenceSearcher searcher = (TOccurrenceSearcher) accessor.getSearcher();
+				fillExpectedResults(queryTokenIndexes, numQueryTokens,
+						searcher.getOccurrenceThreshold());
+				// verify results
+				int checkIndex = 0;
+				while (resultCursor.hasNext()) {
+					resultCursor.next();
+					ITupleReference resultTuple = resultCursor.getTuple();
+					int id = IntegerSerializerDeserializer.getInt(
+							resultTuple.getFieldData(0),
+							resultTuple.getFieldStart(0));
+					Assert.assertEquals(expectedResults.get(checkIndex)
+							.intValue(), id);
+					checkIndex++;
+				}
+
+				if (expectedResults.size() != checkIndex) {
+					if (LOGGER.isLoggable(Level.INFO)) {
+						LOGGER.info("CHECKING");
+					}
+					StringBuilder expectedStrBuilder = new StringBuilder();
+					for (Integer x : expectedResults) {
+						expectedStrBuilder.append(x + " ");
+					}
+					if (LOGGER.isLoggable(Level.INFO)) {
+						LOGGER.info(expectedStrBuilder.toString());
+					}
+				}
+
+				Assert.assertEquals(expectedResults.size(), checkIndex);
+			}
+		}
+	}
+
+	/**
+	 * Runs 50 random conjunctive search queries to test the
+	 * ConjunctiveSearchModifier.
+	 * 
+	 */
+	@Test
+	public void conjunctiveKeywordQueryTest() throws Exception {
+		IInvertedIndexSearchModifier searchModifier = new ConjunctiveSearchModifier();
+		runQueries(searchModifier, 50);
+	}
+
+	/**
+	 * Runs 50 random jaccard-based search queries with thresholds 1.0, 0.9,
+	 * 0.8, 0.7, 0.6, 0.5. Tests the JaccardSearchModifier.
+	 * 
+	 */
+	@Test
+	public void jaccardKeywordQueryTest() throws Exception {
+		JaccardSearchModifier searchModifier = new JaccardSearchModifier(1.0f);
+
+		if (LOGGER.isLoggable(Level.INFO)) {
+			LOGGER.info("JACCARD: " + 1.0f);
+		}
+		searchModifier.setJaccThresh(1.0f);
+		runQueries(searchModifier, 50);
+
+		if (LOGGER.isLoggable(Level.INFO)) {
+			LOGGER.info("JACCARD: " + 0.9f);
+		}
+		searchModifier.setJaccThresh(0.9f);
+		runQueries(searchModifier, 50);
+
+		if (LOGGER.isLoggable(Level.INFO)) {
+			LOGGER.info("JACCARD: " + 0.8f);
+		}
+		searchModifier.setJaccThresh(0.8f);
+		runQueries(searchModifier, 50);
+
+		if (LOGGER.isLoggable(Level.INFO)) {
+			LOGGER.info("JACCARD: " + 0.7f);
+		}
+		searchModifier.setJaccThresh(0.7f);
+		runQueries(searchModifier, 50);
+
+		if (LOGGER.isLoggable(Level.INFO)) {
+			LOGGER.info("JACCARD: " + 0.6f);
+		}
+		searchModifier.setJaccThresh(0.6f);
+		runQueries(searchModifier, 50);
+
+		if (LOGGER.isLoggable(Level.INFO)) {
+			LOGGER.info("JACCARD: " + 0.5f);
+		}
+		searchModifier.setJaccThresh(0.5f);
+		runQueries(searchModifier, 50);
+	}
+}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/SearchTest.java b/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/SearchTest.java
new file mode 100644
index 0000000..47a068b
--- /dev/null
+++ b/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/SearchTest.java
@@ -0,0 +1,308 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex;
+
+import java.io.DataOutputStream;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.logging.Level;
+
+import org.junit.Before;
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
+import edu.uci.ics.hyracks.data.std.util.ByteArrayAccessibleOutputStream;
+import edu.uci.ics.hyracks.data.std.util.GrowableArray;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
+import edu.uci.ics.hyracks.storage.am.common.api.IIndexBulkLoadContext;
+import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
+import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedIndexSearchModifier;
+import edu.uci.ics.hyracks.storage.am.invertedindex.impls.InvertedIndex.InvertedIndexAccessor;
+import edu.uci.ics.hyracks.storage.am.invertedindex.impls.InvertedIndexSearchPredicate;
+import edu.uci.ics.hyracks.storage.am.invertedindex.impls.OccurrenceThresholdPanicException;
+import edu.uci.ics.hyracks.storage.am.invertedindex.searchmodifiers.ConjunctiveSearchModifier;
+import edu.uci.ics.hyracks.storage.am.invertedindex.searchmodifiers.EditDistanceSearchModifier;
+import edu.uci.ics.hyracks.storage.am.invertedindex.searchmodifiers.JaccardSearchModifier;
+import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.IToken;
+import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.NGramUTF8StringBinaryTokenizer;
+import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.UTF8NGramTokenFactory;
+
+public class SearchTest extends AbstractInvIndexSearchTest {
+
+	protected List<String> dataStrings = new ArrayList<String>();
+	protected List<String> firstNames = new ArrayList<String>();
+	protected List<String> lastNames = new ArrayList<String>();
+
+	protected IBinaryComparator[] btreeBinCmps;
+	
+	@Override
+	protected void setTokenizer() {
+		tokenFactory = new UTF8NGramTokenFactory();
+		tokenizer = new NGramUTF8StringBinaryTokenizer(3, false, true, false,
+				tokenFactory);
+	}
+	
+	@Before
+	public void start() throws Exception {
+		super.start();
+		btreeBinCmps = new IBinaryComparator[btreeCmpFactories.length];
+		for (int i = 0; i < btreeCmpFactories.length; i++) {
+			btreeBinCmps[i] = btreeCmpFactories[i].createBinaryComparator();
+		}
+		generateDataStrings();
+		loadData();
+	}
+
+	public void generateDataStrings() {
+		firstNames.add("Kathrin");
+		firstNames.add("Cathrin");
+		firstNames.add("Kathryn");
+		firstNames.add("Cathryn");
+		firstNames.add("Kathrine");
+		firstNames.add("Cathrine");
+		firstNames.add("Kathryne");
+		firstNames.add("Cathryne");
+		firstNames.add("Katherin");
+		firstNames.add("Catherin");
+		firstNames.add("Katheryn");
+		firstNames.add("Catheryn");
+		firstNames.add("Katherine");
+		firstNames.add("Catherine");
+		firstNames.add("Katheryne");
+		firstNames.add("Catheryne");
+		firstNames.add("John");
+		firstNames.add("Jack");
+		firstNames.add("Jonathan");
+		firstNames.add("Nathan");
+
+		lastNames.add("Miller");
+		lastNames.add("Myller");
+		lastNames.add("Keller");
+		lastNames.add("Ketler");
+		lastNames.add("Muller");
+		lastNames.add("Fuller");
+		lastNames.add("Smith");
+		lastNames.add("Smyth");
+		lastNames.add("Smithe");
+		lastNames.add("Smythe");
+
+		// Generate all 'firstName lastName' combinations as data strings
+		for (String f : firstNames) {
+			for (String l : lastNames) {
+				dataStrings.add(f + " " + l);
+			}
+		}
+	}
+
+	private class TokenIdPair implements Comparable<TokenIdPair> {
+	    public final GrowableArray tokenStorage = new GrowableArray();
+		public int id;
+
+		TokenIdPair(IToken token, int id) throws IOException {
+			token.serializeToken(tokenStorage);
+			this.id = id;
+		}
+
+		@Override
+		public int compareTo(TokenIdPair o) {
+			int cmp = btreeBinCmps[0].compare(tokenStorage.getByteArray(), 0,
+			        tokenStorage.getByteArray().length, o.tokenStorage.getByteArray(), 0,
+					o.tokenStorage.getByteArray().length);
+			if (cmp == 0) {
+				return id - o.id;
+			} else {
+				return cmp;
+			}
+		}
+	}
+
+	public void loadData() throws IOException, TreeIndexException {
+		List<TokenIdPair> pairs = new ArrayList<TokenIdPair>();
+		// generate pairs for subsequent sorting and bulk-loading
+		int id = 0;
+		for (String s : dataStrings) {
+			ByteArrayAccessibleOutputStream baaos = new ByteArrayAccessibleOutputStream();
+			DataOutputStream dos = new DataOutputStream(baaos);
+			UTF8StringSerializerDeserializer.INSTANCE.serialize(s, dos);
+			tokenizer.reset(baaos.getByteArray(), 0, baaos.size());
+			while (tokenizer.hasNext()) {
+				tokenizer.next();
+				IToken token = tokenizer.getToken();
+				pairs.add(new TokenIdPair(token, id));
+			}
+			++id;
+		}
+		Collections.sort(pairs);
+
+		// bulk load index
+		IIndexBulkLoadContext ctx = invIndex.beginBulkLoad(BTree.DEFAULT_FILL_FACTOR);
+
+		for (TokenIdPair t : pairs) {
+			tb.reset();
+			tb.addField(t.tokenStorage.getByteArray(), 0,
+					t.tokenStorage.getByteArray().length);
+			IntegerSerializerDeserializer.INSTANCE.serialize(t.id, tb.getDataOutput());
+			tb.addFieldEndOffset();
+			tuple.reset(tb.getFieldEndOffsets(), tb.getByteArray());
+
+			try {
+				invIndex.bulkLoadAddTuple(tuple, ctx);
+			} catch (Exception e) {
+				e.printStackTrace();
+			}
+		}
+		invIndex.endBulkLoad(ctx);
+	}
+
+	/**
+	 * Runs a specified number of randomly picked strings from dataStrings as
+	 * queries. We run each query, measure it's time, and print it's results.
+	 * 
+	 */
+	private void runQueries(IInvertedIndexSearchModifier searchModifier,
+			int numQueries) throws Exception {
+
+		rnd.setSeed(50);
+
+		InvertedIndexAccessor accessor = (InvertedIndexAccessor) invIndex.createAccessor();
+		InvertedIndexSearchPredicate searchPred = new InvertedIndexSearchPredicate(tokenizer, searchModifier);
+		
+		for (int i = 0; i < numQueries; i++) {
+
+			int queryIndex = Math.abs(rnd.nextInt() % dataStrings.size());
+			String queryString = dataStrings.get(queryIndex);
+
+			// Serialize query.
+			queryTb.reset();
+			UTF8StringSerializerDeserializer.INSTANCE.serialize(queryString,
+					queryTb.getDataOutput());
+			queryTb.addFieldEndOffset();
+			queryTuple.reset(queryTb.getFieldEndOffsets(), queryTb.getByteArray());
+
+			// Set query tuple in search predicate.
+			searchPred.setQueryTuple(queryTuple);
+			searchPred.setQueryFieldIndex(0);
+			
+			resultCursor = accessor.createSearchCursor();
+			
+			int repeats = 1;
+			double totalTime = 0;
+			for (int j = 0; j < repeats; j++) {
+				long timeStart = System.currentTimeMillis();
+				try {
+					resultCursor.reset();
+					accessor.search(resultCursor, searchPred);
+				} catch (OccurrenceThresholdPanicException e) {
+					// ignore panic queries
+				}
+				long timeEnd = System.currentTimeMillis();
+				totalTime += timeEnd - timeStart;
+			}
+			double avgTime = totalTime / (double) repeats;
+			StringBuilder strBuilder = new StringBuilder();
+			strBuilder.append(i + ": " + "\"" + queryString + "\": " + avgTime
+					+ "ms" + "\n");
+			strBuilder.append("CANDIDATE RESULTS:\n");
+			while (resultCursor.hasNext()) {
+				resultCursor.next();
+				ITupleReference resultTuple = resultCursor.getTuple();
+				int id = IntegerSerializerDeserializer.getInt(
+						resultTuple.getFieldData(0),
+						resultTuple.getFieldStart(0));
+				strBuilder.append(id + " " + dataStrings.get(id));
+				strBuilder.append('\n');
+			}
+			// remove trailing newline
+			strBuilder.deleteCharAt(strBuilder.length() - 1);
+			if (LOGGER.isLoggable(Level.INFO)) {
+				LOGGER.info(strBuilder.toString());
+			}
+		}
+	}
+
+	/**
+	 * Runs 5 random conjunctive search queries to test the
+	 * ConjunctiveSearchModifier.
+	 * 
+	 */
+	@Test
+	public void conjunctiveQueryTest() throws Exception {
+		IInvertedIndexSearchModifier searchModifier = new ConjunctiveSearchModifier();
+		runQueries(searchModifier, 5);
+	}
+
+	/**
+	 * Runs 5 random jaccard-based search queries with thresholds 0.9, 0.8, 0.7.
+	 * Tests the JaccardSearchModifier.
+	 * 
+	 */
+	@Test
+	public void jaccardQueryTest() throws Exception {
+		JaccardSearchModifier searchModifier = new JaccardSearchModifier(1.0f);
+
+		if (LOGGER.isLoggable(Level.INFO)) {
+			LOGGER.info("JACCARD: " + 0.9f);
+		}
+		searchModifier.setJaccThresh(0.9f);
+		runQueries(searchModifier, 5);
+
+		if (LOGGER.isLoggable(Level.INFO)) {
+			LOGGER.info("JACCARD: " + 0.8f);
+		}
+		searchModifier.setJaccThresh(0.8f);
+		runQueries(searchModifier, 5);
+
+		if (LOGGER.isLoggable(Level.INFO)) {
+			LOGGER.info("JACCARD: " + 0.7f);
+		}
+		searchModifier.setJaccThresh(0.7f);
+		runQueries(searchModifier, 5);
+	}
+
+	/**
+	 * Runs 5 random edit-distance based search queries with thresholds 1, 2, 3.
+	 * Tests the EditDistanceSearchModifier.
+	 * 
+	 */
+	@Test
+	public void editDistanceQueryTest() throws Exception {
+		EditDistanceSearchModifier searchModifier = new EditDistanceSearchModifier(
+				3, 0);
+
+		if (LOGGER.isLoggable(Level.INFO)) {
+			LOGGER.info("EDIT DISTANCE: " + 1);
+		}
+		searchModifier.setEdThresh(1);
+		runQueries(searchModifier, 5);
+
+		if (LOGGER.isLoggable(Level.INFO)) {
+			LOGGER.info("EDIT DISTANCE: " + 2);
+		}
+		searchModifier.setEdThresh(2);
+		runQueries(searchModifier, 5);
+
+		if (LOGGER.isLoggable(Level.INFO)) {
+			LOGGER.info("EDIT DISTANCE: " + 3);
+		}
+		searchModifier.setEdThresh(3);
+		runQueries(searchModifier, 5);
+	}
+}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/WordTokenizerTest.java b/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/WordTokenizerTest.java
new file mode 100644
index 0000000..810c5f5
--- /dev/null
+++ b/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/WordTokenizerTest.java
@@ -0,0 +1,219 @@
+/**
+ * Copyright 2010-2011 The Regents of the University of California
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on
+ * an "AS IS"; BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations under
+ * the License.
+ * 
+ * Author: Alexander Behm <abehm (at) ics.uci.edu>
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.DataInput;
+import java.io.DataInputStream;
+import java.io.DataOutput;
+import java.io.DataOutputStream;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
+
+import junit.framework.Assert;
+
+import org.junit.Before;
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.data.std.util.GrowableArray;
+import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.AbstractUTF8Token;
+import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.DelimitedUTF8StringBinaryTokenizer;
+import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.HashedUTF8WordTokenFactory;
+import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.IToken;
+import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.UTF8WordTokenFactory;
+
+public class WordTokenizerTest {
+
+    private String text = "Hello World, I would like to inform you of the importance of Foo Bar. Yes, Foo Bar. Jürgen.";
+    private byte[] inputBuffer;
+
+    private ArrayList<String> expectedUTF8Tokens = new ArrayList<String>();
+    private ArrayList<Integer> expectedHashedUTF8Tokens = new ArrayList<Integer>();
+    private ArrayList<Integer> expectedCountedHashedUTF8Tokens = new ArrayList<Integer>();
+
+    private boolean isSeparator(char c) {
+        return !(Character.isLetterOrDigit(c) || Character.getType(c) == Character.OTHER_LETTER || Character.getType(c) == Character.OTHER_NUMBER);
+    }
+    
+    private void tokenize(String text, ArrayList<String> tokens) {
+    	String lowerCaseText = text.toLowerCase();
+    	int startIx = 0;
+    	
+    	// Skip separators at beginning of string.
+    	while(isSeparator(lowerCaseText.charAt(startIx))) {
+    		startIx++;
+    	}
+    	while(startIx < lowerCaseText.length()) {
+    		while(startIx < lowerCaseText.length() && isSeparator(lowerCaseText.charAt(startIx))) {
+        	    startIx++;
+        	}
+    		int tokenStart = startIx;
+    		
+    		while(startIx < lowerCaseText.length() && !isSeparator(lowerCaseText.charAt(startIx))) {
+        	    startIx++;
+        	}
+    		int tokenEnd = startIx;
+    		
+    		// Emit token.
+    		String token = lowerCaseText.substring(tokenStart, tokenEnd);
+    		
+    		tokens.add(token);
+    	}
+    }
+    
+    @Before
+    public void init() throws IOException {
+        // serialize text into bytes
+        ByteArrayOutputStream baos = new ByteArrayOutputStream();
+        DataOutput dos = new DataOutputStream(baos);
+        dos.writeUTF(text);
+        inputBuffer = baos.toByteArray();
+        
+        // init expected string tokens
+        tokenize(text, expectedUTF8Tokens);
+        
+        // hashed tokens ignoring token count
+        for (int i = 0; i < expectedUTF8Tokens.size(); i++) {
+            int hash = tokenHash(expectedUTF8Tokens.get(i), 1);
+            expectedHashedUTF8Tokens.add(hash);
+        }
+
+        // hashed tokens using token count
+        HashMap<String, Integer> tokenCounts = new HashMap<String, Integer>();
+        for (int i = 0; i < expectedUTF8Tokens.size(); i++) {
+            Integer count = tokenCounts.get(expectedUTF8Tokens.get(i));
+            if (count == null) {
+                count = 1;
+                tokenCounts.put(expectedUTF8Tokens.get(i), count);
+            } else {
+                count++;
+            }
+
+            int hash = tokenHash(expectedUTF8Tokens.get(i), count);
+            expectedCountedHashedUTF8Tokens.add(hash);
+        }
+    }
+
+    @Test
+    public void testWordTokenizerWithCountedHashedUTF8Tokens() throws IOException {
+
+        HashedUTF8WordTokenFactory tokenFactory = new HashedUTF8WordTokenFactory();
+        DelimitedUTF8StringBinaryTokenizer tokenizer = new DelimitedUTF8StringBinaryTokenizer(false, false,
+                tokenFactory);
+
+        tokenizer.reset(inputBuffer, 0, inputBuffer.length);
+
+        int tokenCount = 0;
+
+        while (tokenizer.hasNext()) {
+            tokenizer.next();
+
+            // serialize token
+            GrowableArray tokenStorage = new GrowableArray();
+
+            IToken token = tokenizer.getToken();
+            token.serializeToken(tokenStorage);
+
+            // deserialize token
+            ByteArrayInputStream bais = new ByteArrayInputStream(tokenStorage.getByteArray());
+            DataInput in = new DataInputStream(bais);
+
+            Integer hashedToken = in.readInt();
+
+            Assert.assertEquals(hashedToken, expectedCountedHashedUTF8Tokens.get(tokenCount));
+
+            tokenCount++;
+        }
+    }
+
+    @Test
+    public void testWordTokenizerWithHashedUTF8Tokens() throws IOException {
+
+        HashedUTF8WordTokenFactory tokenFactory = new HashedUTF8WordTokenFactory();
+        DelimitedUTF8StringBinaryTokenizer tokenizer = new DelimitedUTF8StringBinaryTokenizer(true, false, tokenFactory);
+
+        tokenizer.reset(inputBuffer, 0, inputBuffer.length);
+
+        int tokenCount = 0;
+
+        while (tokenizer.hasNext()) {
+            tokenizer.next();
+
+            // serialize token
+            GrowableArray tokenStorage = new GrowableArray();
+
+            IToken token = tokenizer.getToken();
+            token.serializeToken(tokenStorage);
+
+            // deserialize token
+            ByteArrayInputStream bais = new ByteArrayInputStream(tokenStorage.getByteArray());
+            DataInput in = new DataInputStream(bais);
+
+            Integer hashedToken = in.readInt();
+
+            Assert.assertEquals(expectedHashedUTF8Tokens.get(tokenCount), hashedToken);
+
+            tokenCount++;
+        }
+    }
+
+    @Test
+    public void testWordTokenizerWithUTF8Tokens() throws IOException {
+
+        UTF8WordTokenFactory tokenFactory = new UTF8WordTokenFactory();
+        DelimitedUTF8StringBinaryTokenizer tokenizer = new DelimitedUTF8StringBinaryTokenizer(true, false, tokenFactory);
+
+        tokenizer.reset(inputBuffer, 0, inputBuffer.length);
+
+        int tokenCount = 0;
+
+        while (tokenizer.hasNext()) {
+            tokenizer.next();
+
+            // serialize hashed token
+            GrowableArray tokenStorage = new GrowableArray();
+
+            IToken token = tokenizer.getToken();
+            token.serializeToken(tokenStorage);
+
+            // deserialize token
+            ByteArrayInputStream bais = new ByteArrayInputStream(tokenStorage.getByteArray());
+            DataInput in = new DataInputStream(bais);
+
+            String strToken = in.readUTF();
+
+            Assert.assertEquals(expectedUTF8Tokens.get(tokenCount), strToken);
+
+            tokenCount++;
+        }
+    }
+
+    // JAQL Hash
+    public int tokenHash(String token, int tokenCount) {
+        int h = AbstractUTF8Token.GOLDEN_RATIO_32;
+        for (int i = 0; i < token.length(); i++) {
+        	h ^= token.charAt(i);
+            h *= AbstractUTF8Token.GOLDEN_RATIO_32;
+        }
+        return h + tokenCount;
+    }
+}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/pom.xml b/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/pom.xml
new file mode 100644
index 0000000..72d9a78
--- /dev/null
+++ b/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/pom.xml
@@ -0,0 +1,50 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>edu.uci.ics.hyracks</groupId>
+  <artifactId>hyracks-storage-am-rtree-test</artifactId>
+  <version>0.2.3-SNAPSHOT</version>
+  <name>hyracks-storage-am-rtree-test</name>
+
+  <parent>
+    <groupId>edu.uci.ics.hyracks</groupId>
+    <artifactId>hyracks-tests</artifactId>
+    <version>0.2.3-SNAPSHOT</version>
+  </parent>
+
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-compiler-plugin</artifactId>
+        <version>2.0.2</version>
+        <configuration>
+          <source>1.7</source>
+          <target>1.7</target>
+        </configuration>
+      </plugin>
+    </plugins>
+  </build>
+  <dependencies>
+  	<dependency>
+  		<groupId>junit</groupId>
+  		<artifactId>junit</artifactId>
+  		<version>4.8.1</version>
+  		<type>jar</type>
+  		<scope>test</scope>
+  	</dependency>
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>hyracks-storage-am-rtree</artifactId>
+  		<version>0.2.3-SNAPSHOT</version>
+  		<type>jar</type>
+  		<scope>compile</scope>
+  	</dependency>
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>hyracks-test-support</artifactId>
+  		<version>0.2.3-SNAPSHOT</version>
+  		<type>jar</type>
+  		<scope>test</scope>
+  	</dependency>
+  </dependencies>
+</project>
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeBulkLoadTest.java b/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeBulkLoadTest.java
new file mode 100644
index 0000000..58bca10
--- /dev/null
+++ b/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeBulkLoadTest.java
@@ -0,0 +1,61 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree;
+
+import java.util.Random;
+
+import org.junit.After;
+import org.junit.Before;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
+import edu.uci.ics.hyracks.storage.am.rtree.AbstractRTreeBulkLoadTest;
+import edu.uci.ics.hyracks.storage.am.rtree.AbstractRTreeTestContext;
+import edu.uci.ics.hyracks.storage.am.rtree.utils.RTreeTestContext;
+import edu.uci.ics.hyracks.storage.am.rtree.utils.RTreeTestHarness;
+
+@SuppressWarnings("rawtypes")
+public class RTreeBulkLoadTest extends AbstractRTreeBulkLoadTest {
+
+    public RTreeBulkLoadTest() {
+        super(1);
+    }
+
+    private final RTreeTestHarness harness = new RTreeTestHarness();
+
+    @Before
+    public void setUp() throws HyracksDataException {
+        harness.setUp();
+    }
+
+    @After
+    public void tearDown() throws HyracksDataException {
+        harness.tearDown();
+    }
+
+    @Override
+    protected AbstractRTreeTestContext createTestContext(ISerializerDeserializer[] fieldSerdes,
+            IPrimitiveValueProviderFactory[] valueProviderFactories, int numKeys) throws Exception {
+        return RTreeTestContext.create(harness.getBufferCache(), harness.getTreeFileId(), fieldSerdes,
+                valueProviderFactories, numKeys);
+    }
+
+    @Override
+    protected Random getRandom() {
+        return harness.getRandom();
+    }
+}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeDeleteTest.java b/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeDeleteTest.java
new file mode 100644
index 0000000..42e933e
--- /dev/null
+++ b/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeDeleteTest.java
@@ -0,0 +1,57 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree;
+
+import java.util.Random;
+
+import org.junit.After;
+import org.junit.Before;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
+import edu.uci.ics.hyracks.storage.am.rtree.AbstractRTreeDeleteTest;
+import edu.uci.ics.hyracks.storage.am.rtree.AbstractRTreeTestContext;
+import edu.uci.ics.hyracks.storage.am.rtree.utils.RTreeTestContext;
+import edu.uci.ics.hyracks.storage.am.rtree.utils.RTreeTestHarness;
+
+@SuppressWarnings("rawtypes")
+public class RTreeDeleteTest extends AbstractRTreeDeleteTest {
+
+    private final RTreeTestHarness harness = new RTreeTestHarness();
+
+    @Before
+    public void setUp() throws HyracksDataException {
+        harness.setUp();
+    }
+
+    @After
+    public void tearDown() throws HyracksDataException {
+        harness.tearDown();
+    }
+
+    @Override
+    protected AbstractRTreeTestContext createTestContext(ISerializerDeserializer[] fieldSerdes,
+            IPrimitiveValueProviderFactory[] valueProviderFactories, int numKeys) throws Exception {
+        return RTreeTestContext.create(harness.getBufferCache(), harness.getTreeFileId(), fieldSerdes,
+                valueProviderFactories, numKeys);
+    }
+
+    @Override
+    protected Random getRandom() {
+        return harness.getRandom();
+    }
+}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeExamplesTest.java b/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeExamplesTest.java
new file mode 100644
index 0000000..c72338e
--- /dev/null
+++ b/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeExamplesTest.java
@@ -0,0 +1,56 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree;
+
+import org.junit.After;
+import org.junit.Before;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
+import edu.uci.ics.hyracks.storage.am.rtree.AbstractRTreeExamplesTest;
+import edu.uci.ics.hyracks.storage.am.rtree.util.RTreeUtils;
+import edu.uci.ics.hyracks.storage.am.rtree.utils.RTreeTestHarness;
+
+public class RTreeExamplesTest extends AbstractRTreeExamplesTest {
+    private final RTreeTestHarness harness = new RTreeTestHarness();
+
+    @Before
+    public void setUp() throws HyracksDataException {
+        harness.setUp();
+    }
+
+    @After
+    public void tearDown() throws HyracksDataException {
+        harness.tearDown();
+    }
+
+    @Override
+    protected ITreeIndex createTreeIndex(ITypeTraits[] typeTraits, IBinaryComparatorFactory[] rtreeCmpFactories,
+            IBinaryComparatorFactory[] btreeCmpFactories, IPrimitiveValueProviderFactory[] valueProviderFactories)
+            throws TreeIndexException {
+        return RTreeUtils.createRTree(harness.getBufferCache(), typeTraits,
+                valueProviderFactories, rtreeCmpFactories);
+    }
+
+    @Override
+    protected int getIndexFileId() {
+        return harness.getTreeFileId();
+    }
+}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeInsertTest.java b/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeInsertTest.java
new file mode 100644
index 0000000..6efa620
--- /dev/null
+++ b/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeInsertTest.java
@@ -0,0 +1,67 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree;
+
+import java.util.Random;
+
+import org.junit.After;
+import org.junit.Before;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
+import edu.uci.ics.hyracks.storage.am.rtree.AbstractRTreeInsertTest;
+import edu.uci.ics.hyracks.storage.am.rtree.AbstractRTreeTestContext;
+import edu.uci.ics.hyracks.storage.am.rtree.utils.RTreeTestContext;
+import edu.uci.ics.hyracks.storage.am.rtree.utils.RTreeTestHarness;
+
+/**
+ * Tests the BTree insert operation with strings and integer fields using
+ * various numbers of key and payload fields.
+ * 
+ * Each tests first fills a BTree with randomly generated tuples. We compare the
+ * following operations against expected results: 1. Point searches for all
+ * tuples. 2. Ordered scan. 3. Disk-order scan. 4. Range search (and prefix
+ * search for composite keys).
+ * 
+ */
+@SuppressWarnings("rawtypes")
+public class RTreeInsertTest extends AbstractRTreeInsertTest {
+
+    private final RTreeTestHarness harness = new RTreeTestHarness();
+
+    @Before
+    public void setUp() throws HyracksDataException {
+        harness.setUp();
+    }
+
+    @After
+    public void tearDown() throws HyracksDataException {
+        harness.tearDown();
+    }
+
+    @Override
+    protected AbstractRTreeTestContext createTestContext(ISerializerDeserializer[] fieldSerdes,
+            IPrimitiveValueProviderFactory[] valueProviderFactories, int numKeys) throws Exception {
+        return RTreeTestContext.create(harness.getBufferCache(), harness.getTreeFileId(), fieldSerdes,
+                valueProviderFactories, numKeys);
+    }
+
+    @Override
+    protected Random getRandom() {
+        return harness.getRandom();
+    }
+}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeSearchCursorTest.java b/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeSearchCursorTest.java
new file mode 100644
index 0000000..8332c11
--- /dev/null
+++ b/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeSearchCursorTest.java
@@ -0,0 +1,175 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree;
+
+import java.util.ArrayList;
+import java.util.Random;
+import java.util.logging.Level;
+
+import org.junit.Before;
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
+import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.util.TupleUtils;
+import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManager;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexAccessor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
+import edu.uci.ics.hyracks.storage.am.common.frames.LIFOMetaDataFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.freepage.LinkedListFreePageManager;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.rtree.RTreeCheckTuple;
+import edu.uci.ics.hyracks.storage.am.rtree.RTreeTestUtils;
+import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeInteriorFrame;
+import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeLeafFrame;
+import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreeNSMInteriorFrameFactory;
+import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreeNSMLeafFrameFactory;
+import edu.uci.ics.hyracks.storage.am.rtree.impls.RTree;
+import edu.uci.ics.hyracks.storage.am.rtree.impls.RTreeSearchCursor;
+import edu.uci.ics.hyracks.storage.am.rtree.impls.SearchPredicate;
+import edu.uci.ics.hyracks.storage.am.rtree.tuples.RTreeTypeAwareTupleWriterFactory;
+import edu.uci.ics.hyracks.storage.am.rtree.util.RTreeUtils;
+import edu.uci.ics.hyracks.storage.am.rtree.utils.AbstractRTreeTest;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+
+public class RTreeSearchCursorTest extends AbstractRTreeTest {
+
+    private final RTreeTestUtils rTreeTestUtils;
+    private Random rnd = new Random(50);
+
+    public RTreeSearchCursorTest() {
+        this.rTreeTestUtils = new RTreeTestUtils();
+    }
+
+    @Before
+    public void setUp() throws HyracksDataException {
+        super.setUp();
+    }
+
+    @SuppressWarnings({ "unchecked", "rawtypes" })
+    @Test
+    public void rangeSearchTest() throws Exception {
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("TESTING RANGE SEARCH CURSOR FOR RTREE");
+        }
+
+        IBufferCache bufferCache = harness.getBufferCache();
+        int rtreeFileId = harness.getTreeFileId();
+
+        // Declare fields.
+        int fieldCount = 5;
+        ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
+        typeTraits[0] = IntegerPointable.TYPE_TRAITS;
+        typeTraits[1] = IntegerPointable.TYPE_TRAITS;
+        typeTraits[2] = IntegerPointable.TYPE_TRAITS;
+        typeTraits[3] = IntegerPointable.TYPE_TRAITS;
+        typeTraits[4] = IntegerPointable.TYPE_TRAITS;
+        // Declare field serdes.
+        ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE,
+                IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE,
+                IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
+
+        // Declare keys.
+        int keyFieldCount = 4;
+        IBinaryComparatorFactory[] cmpFactories = new IBinaryComparatorFactory[keyFieldCount];
+        cmpFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+        cmpFactories[1] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+        cmpFactories[2] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+        cmpFactories[3] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+
+        // create value providers
+        IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(
+                cmpFactories.length, IntegerPointable.FACTORY);
+
+        RTreeTypeAwareTupleWriterFactory tupleWriterFactory = new RTreeTypeAwareTupleWriterFactory(typeTraits);
+        ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
+
+        ITreeIndexFrameFactory interiorFrameFactory = new RTreeNSMInteriorFrameFactory(tupleWriterFactory,
+                valueProviderFactories);
+        ITreeIndexFrameFactory leafFrameFactory = new RTreeNSMLeafFrameFactory(tupleWriterFactory,
+                valueProviderFactories);
+
+        IRTreeInteriorFrame interiorFrame = (IRTreeInteriorFrame) interiorFrameFactory.createFrame();
+        IRTreeLeafFrame leafFrame = (IRTreeLeafFrame) leafFrameFactory.createFrame();
+        IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, 0, metaFrameFactory);
+
+        RTree rtree = new RTree(bufferCache, fieldCount, cmpFactories, freePageManager, interiorFrameFactory,
+                leafFrameFactory);
+        rtree.create(rtreeFileId);
+        rtree.open(rtreeFileId);
+
+        ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
+        ArrayTupleReference tuple = new ArrayTupleReference();
+        ITreeIndexAccessor indexAccessor = rtree.createAccessor();
+        int numInserts = 10000;
+        ArrayList<RTreeCheckTuple> checkTuples = new ArrayList<RTreeCheckTuple>();
+        for (int i = 0; i < numInserts; i++) {
+            int p1x = rnd.nextInt();
+            int p1y = rnd.nextInt();
+            int p2x = rnd.nextInt();
+            int p2y = rnd.nextInt();
+
+            int pk = rnd.nextInt();;
+
+            TupleUtils.createIntegerTuple(tb, tuple, Math.min(p1x, p2x), Math.min(p1y, p2y), Math.max(p1x, p2x),
+                    Math.max(p1y, p2y), pk);
+            try {
+                indexAccessor.insert(tuple);
+            } catch (TreeIndexException e) {
+            }
+            RTreeCheckTuple checkTuple = new RTreeCheckTuple(fieldCount, keyFieldCount);
+            checkTuple.add(Math.min(p1x, p2x));
+            checkTuple.add(Math.min(p1y, p2y));
+            checkTuple.add(Math.max(p1x, p2x));
+            checkTuple.add(Math.max(p1y, p2y));
+            checkTuple.add(pk);
+
+            checkTuples.add(checkTuple);
+        }
+
+        // Build key.
+        ArrayTupleBuilder keyTb = new ArrayTupleBuilder(keyFieldCount);
+        ArrayTupleReference key = new ArrayTupleReference();
+        TupleUtils.createIntegerTuple(keyTb, key, -1000, -1000, 1000, 1000);
+
+        MultiComparator cmp = MultiComparator.create(cmpFactories);
+        ITreeIndexCursor searchCursor = new RTreeSearchCursor(interiorFrame, leafFrame);
+        SearchPredicate searchPredicate = new SearchPredicate(key, cmp);
+
+        RTreeCheckTuple keyCheck = (RTreeCheckTuple) rTreeTestUtils.createCheckTupleFromTuple(key, fieldSerdes,
+                keyFieldCount);
+        ArrayList<RTreeCheckTuple> expectedResult = rTreeTestUtils.getRangeSearchExpectedResults(checkTuples, keyCheck);
+
+        rTreeTestUtils.getRangeSearchExpectedResults(checkTuples, keyCheck);
+        indexAccessor.search(searchCursor, searchPredicate);
+
+        rTreeTestUtils.checkExpectedResults(searchCursor, expectedResult, fieldSerdes, keyFieldCount, null);
+
+        rtree.close();
+    }
+
+}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/multithread/RTreeMultiThreadTest.java b/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/multithread/RTreeMultiThreadTest.java
new file mode 100644
index 0000000..7520793
--- /dev/null
+++ b/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/multithread/RTreeMultiThreadTest.java
@@ -0,0 +1,97 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree.multithread;
+
+import java.util.ArrayList;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.storage.am.common.ITreeIndexTestWorkerFactory;
+import edu.uci.ics.hyracks.storage.am.common.TestWorkloadConf;
+import edu.uci.ics.hyracks.storage.am.common.TestOperationSelector.TestOperation;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
+import edu.uci.ics.hyracks.storage.am.rtree.AbstractRTreeMultiThreadTest;
+import edu.uci.ics.hyracks.storage.am.rtree.util.RTreeUtils;
+import edu.uci.ics.hyracks.storage.am.rtree.utils.RTreeTestHarness;
+
+public class RTreeMultiThreadTest extends AbstractRTreeMultiThreadTest {
+
+    private RTreeTestHarness harness = new RTreeTestHarness();
+
+    private RTreeTestWorkerFactory workerFactory = new RTreeTestWorkerFactory();
+
+    @Override
+    protected void setUp() throws HyracksDataException {
+        harness.setUp();
+    }
+
+    @Override
+    protected void tearDown() throws HyracksDataException {
+        harness.tearDown();
+    }
+
+    @Override
+    protected ITreeIndex createTreeIndex(ITypeTraits[] typeTraits, IBinaryComparatorFactory[] rtreeCmpFactories,
+            IBinaryComparatorFactory[] btreeCmpFactories, IPrimitiveValueProviderFactory[] valueProviderFactories)
+            throws TreeIndexException {
+        return RTreeUtils.createRTree(harness.getBufferCache(), typeTraits,
+                valueProviderFactories, rtreeCmpFactories);
+
+    }
+
+    @Override
+    protected ITreeIndexTestWorkerFactory getWorkerFactory() {
+        return workerFactory;
+    }
+
+    @Override
+    protected ArrayList<TestWorkloadConf> getTestWorkloadConf() {
+        ArrayList<TestWorkloadConf> workloadConfs = new ArrayList<TestWorkloadConf>();
+
+        // Insert only workload.
+        TestOperation[] insertOnlyOps = new TestOperation[] { TestOperation.INSERT };
+        workloadConfs.add(new TestWorkloadConf(insertOnlyOps, getUniformOpProbs(insertOnlyOps)));
+
+        // Inserts mixed with scans.
+        TestOperation[] insertSearchOnlyOps = new TestOperation[] { TestOperation.INSERT, TestOperation.SCAN,
+                TestOperation.DISKORDER_SCAN };
+        workloadConfs.add(new TestWorkloadConf(insertSearchOnlyOps, getUniformOpProbs(insertSearchOnlyOps)));
+
+        // Inserts and deletes.
+        TestOperation[] insertDeleteOps = new TestOperation[] { TestOperation.INSERT, TestOperation.DELETE };
+        workloadConfs.add(new TestWorkloadConf(insertDeleteOps, getUniformOpProbs(insertDeleteOps)));
+
+        // All operations mixed.
+        TestOperation[] allOps = new TestOperation[] { TestOperation.INSERT, TestOperation.DELETE, TestOperation.SCAN,
+                TestOperation.DISKORDER_SCAN };
+        workloadConfs.add(new TestWorkloadConf(allOps, getUniformOpProbs(allOps)));
+
+        return workloadConfs;
+    }
+
+    @Override
+    protected int getFileId() {
+        return harness.getTreeFileId();
+    }
+
+    @Override
+    protected String getIndexTypeName() {
+        return "RTree";
+    }
+}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/multithread/RTreeTestWorker.java b/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/multithread/RTreeTestWorker.java
new file mode 100644
index 0000000..f5867e6
--- /dev/null
+++ b/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/multithread/RTreeTestWorker.java
@@ -0,0 +1,115 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree.multithread;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.storage.am.common.AbstractTreeIndexTestWorker;
+import edu.uci.ics.hyracks.storage.am.common.TestOperationSelector;
+import edu.uci.ics.hyracks.storage.am.common.TestOperationSelector.TestOperation;
+import edu.uci.ics.hyracks.storage.am.common.api.IIndexCursor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
+import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
+import edu.uci.ics.hyracks.storage.am.common.datagen.DataGenThread;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.rtree.impls.RTree;
+import edu.uci.ics.hyracks.storage.am.rtree.impls.SearchPredicate;
+
+public class RTreeTestWorker extends AbstractTreeIndexTestWorker {
+
+    private final RTree rtree;
+    private final int numFields;
+    private final ArrayTupleReference rearrangedTuple = new ArrayTupleReference();
+    private final ArrayTupleBuilder rearrangedTb;
+
+    public RTreeTestWorker(DataGenThread dataGen, TestOperationSelector opSelector, ITreeIndex index, int numBatches) {
+        super(dataGen, opSelector, index, numBatches);
+        rtree = (RTree) index;
+        numFields = rtree.getFieldCount();
+        rearrangedTb = new ArrayTupleBuilder(numFields);
+    }
+
+    @Override
+    public void performOp(ITupleReference tuple, TestOperation op) throws HyracksDataException, IndexException {
+        RTree.RTreeAccessor accessor = (RTree.RTreeAccessor) indexAccessor;
+        IIndexCursor searchCursor = accessor.createSearchCursor();
+        ITreeIndexCursor diskOrderScanCursor = accessor.createDiskOrderScanCursor();
+        MultiComparator cmp = accessor.getOpContext().cmp;
+        SearchPredicate rangePred = new SearchPredicate(tuple, cmp);
+
+        switch (op) {
+            case INSERT:
+                rearrangeTuple(tuple, cmp);
+                accessor.insert(rearrangedTuple);
+                break;
+
+            case DELETE:
+                rearrangeTuple(tuple, cmp);
+                accessor.delete(rearrangedTuple);
+                break;
+
+            case SCAN:
+                searchCursor.reset();
+                rangePred.setSearchKey(null);
+                accessor.search(searchCursor, rangePred);
+                consumeCursorTuples(searchCursor);
+                break;
+
+            case DISKORDER_SCAN:
+                diskOrderScanCursor.reset();
+                accessor.diskOrderScan(diskOrderScanCursor);
+                consumeCursorTuples(diskOrderScanCursor);
+                break;
+
+            default:
+                throw new HyracksDataException("Op " + op.toString() + " not supported.");
+        }
+    }
+
+    private void rearrangeTuple(ITupleReference tuple, MultiComparator cmp) throws HyracksDataException {
+        // Create a tuple with rearranged key values to make sure lower points
+        // have larger coordinates than high points.
+        rearrangedTb.reset();
+        int maxFieldPos = cmp.getKeyFieldCount() / 2;
+        for (int i = 0; i < maxFieldPos; i++) {
+            int j = maxFieldPos + i;
+            int c = cmp.getComparators()[i].compare(tuple.getFieldData(i), tuple.getFieldStart(i),
+                    tuple.getFieldLength(i), tuple.getFieldData(j), tuple.getFieldStart(j), tuple.getFieldLength(j));
+            if (c > 0) {
+                rearrangedTb.addField(tuple.getFieldData(j), tuple.getFieldStart(j), tuple.getFieldLength(j));
+            } else {
+                rearrangedTb.addField(tuple.getFieldData(i), tuple.getFieldStart(i), tuple.getFieldLength(i));
+            }
+        }
+        for (int i = 0; i < maxFieldPos; i++) {
+            int j = maxFieldPos + i;
+            int c = cmp.getComparators()[i].compare(tuple.getFieldData(i), tuple.getFieldStart(i),
+                    tuple.getFieldLength(i), tuple.getFieldData(j), tuple.getFieldStart(j), tuple.getFieldLength(j));
+            if (c > 0) {
+                rearrangedTb.addField(tuple.getFieldData(i), tuple.getFieldStart(i), tuple.getFieldLength(i));
+            } else {
+                rearrangedTb.addField(tuple.getFieldData(j), tuple.getFieldStart(j), tuple.getFieldLength(j));
+            }
+        }
+        for (int i = cmp.getKeyFieldCount(); i < numFields; i++) {
+            rearrangedTb.addField(tuple.getFieldData(i), tuple.getFieldStart(i), tuple.getFieldLength(i));
+        }
+        rearrangedTuple.reset(rearrangedTb.getFieldEndOffsets(), rearrangedTb.getByteArray());
+    }
+}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/multithread/RTreeTestWorkerFactory.java b/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/multithread/RTreeTestWorkerFactory.java
new file mode 100644
index 0000000..d4f14ca
--- /dev/null
+++ b/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/multithread/RTreeTestWorkerFactory.java
@@ -0,0 +1,30 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree.multithread;
+
+import edu.uci.ics.hyracks.storage.am.common.AbstractTreeIndexTestWorker;
+import edu.uci.ics.hyracks.storage.am.common.ITreeIndexTestWorkerFactory;
+import edu.uci.ics.hyracks.storage.am.common.TestOperationSelector;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+import edu.uci.ics.hyracks.storage.am.common.datagen.DataGenThread;
+
+public class RTreeTestWorkerFactory implements ITreeIndexTestWorkerFactory {
+    @Override
+    public AbstractTreeIndexTestWorker create(DataGenThread dataGen, TestOperationSelector opSelector,
+            ITreeIndex index, int numBatches) {
+        return new RTreeTestWorker(dataGen, opSelector, index, numBatches);
+    }
+}
diff --git a/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/utils/AbstractRTreeTest.java b/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/utils/AbstractRTreeTest.java
similarity index 100%
rename from hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/utils/AbstractRTreeTest.java
rename to hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/utils/AbstractRTreeTest.java
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/utils/RTreeTestContext.java b/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/utils/RTreeTestContext.java
new file mode 100644
index 0000000..039fb0b
--- /dev/null
+++ b/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/utils/RTreeTestContext.java
@@ -0,0 +1,60 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree.utils;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.dataflow.common.util.SerdeUtils;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+import edu.uci.ics.hyracks.storage.am.rtree.AbstractRTreeTestContext;
+import edu.uci.ics.hyracks.storage.am.rtree.impls.RTree;
+import edu.uci.ics.hyracks.storage.am.rtree.util.RTreeUtils;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+
+@SuppressWarnings("rawtypes")
+public class RTreeTestContext extends AbstractRTreeTestContext {
+
+    public RTreeTestContext(ISerializerDeserializer[] fieldSerdes, ITreeIndex treeIndex) {
+        super(fieldSerdes, treeIndex);
+    }
+
+    @Override
+    public int getKeyFieldCount() {
+        RTree rtree = (RTree) treeIndex;
+        return rtree.getComparatorFactories().length;
+    }
+
+    @Override
+    public IBinaryComparatorFactory[] getComparatorFactories() {
+        RTree rtree = (RTree) treeIndex;
+        return rtree.getComparatorFactories();
+    }
+
+    public static RTreeTestContext create(IBufferCache bufferCache, int rtreeFileId,
+            ISerializerDeserializer[] fieldSerdes, IPrimitiveValueProviderFactory[] valueProviderFactories,
+            int numKeyFields) throws Exception {
+        ITypeTraits[] typeTraits = SerdeUtils.serdesToTypeTraits(fieldSerdes);
+        IBinaryComparatorFactory[] cmpFactories = SerdeUtils.serdesToComparatorFactories(fieldSerdes, numKeyFields);
+        RTree rtree = RTreeUtils
+                .createRTree(bufferCache, typeTraits, valueProviderFactories, cmpFactories);
+        rtree.create(rtreeFileId);
+        rtree.open(rtreeFileId);
+        RTreeTestContext testCtx = new RTreeTestContext(fieldSerdes, rtree);
+        return testCtx;
+    }
+}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/utils/RTreeTestHarness.java b/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/utils/RTreeTestHarness.java
new file mode 100644
index 0000000..c0cec35
--- /dev/null
+++ b/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/utils/RTreeTestHarness.java
@@ -0,0 +1,123 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree.utils;
+
+import java.io.File;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+import java.util.Random;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.io.FileReference;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
+import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
+import edu.uci.ics.hyracks.test.support.TestUtils;
+
+public class RTreeTestHarness {
+
+    private static final long RANDOM_SEED = 50;
+    private static final int DEFAULT_PAGE_SIZE = 256;
+    private static final int DEFAULT_NUM_PAGES = 1000;
+    private static final int DEFAULT_MAX_OPEN_FILES = 10;
+    private static final int DEFAULT_HYRACKS_FRAME_SIZE = 128;
+
+    protected final int pageSize;
+    protected final int numPages;
+    protected final int maxOpenFiles;
+    protected final int hyracksFrameSize;
+
+    protected IHyracksTaskContext ctx;
+    protected IBufferCache bufferCache;
+    protected int treeFileId;
+
+    protected final Random rnd = new Random();
+    protected final SimpleDateFormat simpleDateFormat = new SimpleDateFormat("ddMMyy-hhmmssSS");
+    protected final String tmpDir = System.getProperty("java.io.tmpdir");
+    protected final String sep = System.getProperty("file.separator");
+    protected String fileName;
+
+    public RTreeTestHarness() {
+        this.pageSize = DEFAULT_PAGE_SIZE;
+        this.numPages = DEFAULT_NUM_PAGES;
+        this.maxOpenFiles = DEFAULT_MAX_OPEN_FILES;
+        this.hyracksFrameSize = DEFAULT_HYRACKS_FRAME_SIZE;
+    }
+
+    public RTreeTestHarness(int pageSize, int numPages, int maxOpenFiles, int hyracksFrameSize) {
+        this.pageSize = pageSize;
+        this.numPages = numPages;
+        this.maxOpenFiles = maxOpenFiles;
+        this.hyracksFrameSize = hyracksFrameSize;
+    }
+
+    public void setUp() throws HyracksDataException {
+        fileName = tmpDir + sep + simpleDateFormat.format(new Date());
+        ctx = TestUtils.create(getHyracksFrameSize());
+        TestStorageManagerComponentHolder.init(pageSize, numPages, maxOpenFiles);
+        bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
+        IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
+        FileReference file = new FileReference(new File(fileName));
+        bufferCache.createFile(file);
+        treeFileId = fmp.lookupFileId(file);
+        bufferCache.openFile(treeFileId);
+        rnd.setSeed(RANDOM_SEED);
+    }
+
+    public void tearDown() throws HyracksDataException {
+        bufferCache.closeFile(treeFileId);
+        bufferCache.close();
+        File f = new File(fileName);
+        f.deleteOnExit();
+    }
+
+    public IHyracksTaskContext getHyracksTaskContext() {
+        return ctx;
+    }
+
+    public IBufferCache getBufferCache() {
+        return bufferCache;
+    }
+
+    public int getTreeFileId() {
+        return treeFileId;
+    }
+
+    public String getFileName() {
+        return fileName;
+    }
+
+    public Random getRandom() {
+        return rnd;
+    }
+
+    public int getPageSize() {
+        return pageSize;
+    }
+
+    public int getNumPages() {
+        return numPages;
+    }
+
+    public int getHyracksFrameSize() {
+        return hyracksFrameSize;
+    }
+
+    public int getMaxOpenFiles() {
+        return maxOpenFiles;
+    }
+}
diff --git a/hyracks/hyracks-tests/hyracks-storage-common-test/pom.xml b/hyracks/hyracks-tests/hyracks-storage-common-test/pom.xml
new file mode 100644
index 0000000..6063de5
--- /dev/null
+++ b/hyracks/hyracks-tests/hyracks-storage-common-test/pom.xml
@@ -0,0 +1,49 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>edu.uci.ics.hyracks</groupId>
+  <artifactId>hyracks-storage-common-test</artifactId>
+  <version>0.2.3-SNAPSHOT</version>
+  <name>hyracks-storage-common-test</name>
+
+  <parent>
+    <groupId>edu.uci.ics.hyracks</groupId>
+    <artifactId>hyracks-tests</artifactId>
+    <version>0.2.3-SNAPSHOT</version>
+  </parent>
+
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-compiler-plugin</artifactId>
+        <version>2.0.2</version>
+        <configuration>
+          <source>1.7</source>
+          <target>1.7</target>
+        </configuration>
+      </plugin>
+    </plugins>
+  </build>
+  <dependencies>
+  	<dependency>
+  		<groupId>junit</groupId>
+  		<artifactId>junit</artifactId>
+  		<version>4.8.1</version>
+  		<type>jar</type>
+  		<scope>test</scope>
+  	</dependency>
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>hyracks-storage-common</artifactId>
+  		<version>0.2.3-SNAPSHOT</version>
+  		<scope>compile</scope>
+  	</dependency>
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>hyracks-test-support</artifactId>
+  		<version>0.2.3-SNAPSHOT</version>
+  		<type>jar</type>
+  		<scope>compile</scope>
+  	</dependency>
+  </dependencies>
+</project>
diff --git a/hyracks-tests/hyracks-storage-common-test/src/test/java/edu/uci/ics/hyracks/storage/common/BufferCacheRegressionTests.java b/hyracks/hyracks-tests/hyracks-storage-common-test/src/test/java/edu/uci/ics/hyracks/storage/common/BufferCacheRegressionTests.java
similarity index 100%
rename from hyracks-tests/hyracks-storage-common-test/src/test/java/edu/uci/ics/hyracks/storage/common/BufferCacheRegressionTests.java
rename to hyracks/hyracks-tests/hyracks-storage-common-test/src/test/java/edu/uci/ics/hyracks/storage/common/BufferCacheRegressionTests.java
diff --git a/hyracks-tests/hyracks-storage-common-test/src/test/java/edu/uci/ics/hyracks/storage/common/BufferCacheTest.java b/hyracks/hyracks-tests/hyracks-storage-common-test/src/test/java/edu/uci/ics/hyracks/storage/common/BufferCacheTest.java
similarity index 100%
rename from hyracks-tests/hyracks-storage-common-test/src/test/java/edu/uci/ics/hyracks/storage/common/BufferCacheTest.java
rename to hyracks/hyracks-tests/hyracks-storage-common-test/src/test/java/edu/uci/ics/hyracks/storage/common/BufferCacheTest.java
diff --git a/hyracks/hyracks-tests/pom.xml b/hyracks/hyracks-tests/pom.xml
new file mode 100644
index 0000000..054ec68
--- /dev/null
+++ b/hyracks/hyracks-tests/pom.xml
@@ -0,0 +1,21 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>edu.uci.ics.hyracks</groupId>
+  <artifactId>hyracks-tests</artifactId>
+  <version>0.2.3-SNAPSHOT</version>
+  <packaging>pom</packaging>
+  <name>hyracks-tests</name>
+
+  <parent>
+    <groupId>edu.uci.ics.hyracks</groupId>
+    <artifactId>hyracks</artifactId>
+    <version>0.2.3-SNAPSHOT</version>
+  </parent>
+
+  <modules>
+    <module>hyracks-storage-common-test</module>
+    <module>hyracks-storage-am-btree-test</module>
+    <module>hyracks-storage-am-invertedindex-test</module>
+    <module>hyracks-storage-am-rtree-test</module>
+  </modules>
+</project>
diff --git a/hyracks/hyracks-yarn/hyracks-yarn-am/pom.xml b/hyracks/hyracks-yarn/hyracks-yarn-am/pom.xml
new file mode 100644
index 0000000..d33ddc5
--- /dev/null
+++ b/hyracks/hyracks-yarn/hyracks-yarn-am/pom.xml
@@ -0,0 +1,74 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <artifactId>hyracks-yarn-am</artifactId>
+  <parent>
+    <groupId>edu.uci.ics.hyracks</groupId>
+    <artifactId>hyracks-yarn</artifactId>
+    <version>0.2.1-SNAPSHOT</version>
+  </parent>
+
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-compiler-plugin</artifactId>
+        <version>2.0.2</version>
+        <configuration>
+          <source>1.7</source>
+          <target>1.7</target>
+        </configuration>
+      </plugin>
+      <plugin>
+        <groupId>org.codehaus.mojo</groupId>
+        <artifactId>appassembler-maven-plugin</artifactId>
+        <executions>
+          <execution>
+            <configuration>
+              <programs>
+                <program>
+                  <mainClass>edu.uci.ics.hyracks.yarn.am.HyracksYarnApplicationMaster</mainClass>
+                  <name>hyracks-yarn-am</name>
+                </program>
+              </programs>
+              <repositoryLayout>flat</repositoryLayout>
+              <repositoryName>lib</repositoryName>
+            </configuration>
+            <phase>package</phase>
+            <goals>
+              <goal>assemble</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <artifactId>maven-assembly-plugin</artifactId>
+        <version>2.2-beta-5</version>
+        <executions>
+          <execution>
+            <configuration>
+              <descriptors>
+                <descriptor>src/main/assembly/binary-assembly.xml</descriptor>
+              </descriptors>
+            </configuration>
+            <phase>package</phase>
+            <goals>
+              <goal>attached</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+    </plugins>
+  </build>
+  <dependencies>
+  <dependency>
+  	<groupId>args4j</groupId>
+  	<artifactId>args4j</artifactId>
+  	<version>2.0.16</version>
+  </dependency>
+  <dependency>
+  	<groupId>edu.uci.ics.hyracks</groupId>
+  	<artifactId>hyracks-yarn-common</artifactId>
+  	<version>0.2.1-SNAPSHOT</version>
+  </dependency>
+  </dependencies>
+</project>
diff --git a/hyracks-yarn/hyracks-yarn-am/src/main/assembly/binary-assembly.xml b/hyracks/hyracks-yarn/hyracks-yarn-am/src/main/assembly/binary-assembly.xml
similarity index 100%
rename from hyracks-yarn/hyracks-yarn-am/src/main/assembly/binary-assembly.xml
rename to hyracks/hyracks-yarn/hyracks-yarn-am/src/main/assembly/binary-assembly.xml
diff --git a/hyracks-yarn/hyracks-yarn-am/src/main/java/edu/uci/ics/hyracks/yarn/am/HyracksYarnApplicationMaster.java b/hyracks/hyracks-yarn/hyracks-yarn-am/src/main/java/edu/uci/ics/hyracks/yarn/am/HyracksYarnApplicationMaster.java
similarity index 100%
rename from hyracks-yarn/hyracks-yarn-am/src/main/java/edu/uci/ics/hyracks/yarn/am/HyracksYarnApplicationMaster.java
rename to hyracks/hyracks-yarn/hyracks-yarn-am/src/main/java/edu/uci/ics/hyracks/yarn/am/HyracksYarnApplicationMaster.java
diff --git a/hyracks-yarn/hyracks-yarn-am/src/main/java/edu/uci/ics/hyracks/yarn/am/manifest/AbstractProcess.java b/hyracks/hyracks-yarn/hyracks-yarn-am/src/main/java/edu/uci/ics/hyracks/yarn/am/manifest/AbstractProcess.java
similarity index 100%
rename from hyracks-yarn/hyracks-yarn-am/src/main/java/edu/uci/ics/hyracks/yarn/am/manifest/AbstractProcess.java
rename to hyracks/hyracks-yarn/hyracks-yarn-am/src/main/java/edu/uci/ics/hyracks/yarn/am/manifest/AbstractProcess.java
diff --git a/hyracks-yarn/hyracks-yarn-am/src/main/java/edu/uci/ics/hyracks/yarn/am/manifest/ClusterController.java b/hyracks/hyracks-yarn/hyracks-yarn-am/src/main/java/edu/uci/ics/hyracks/yarn/am/manifest/ClusterController.java
similarity index 100%
rename from hyracks-yarn/hyracks-yarn-am/src/main/java/edu/uci/ics/hyracks/yarn/am/manifest/ClusterController.java
rename to hyracks/hyracks-yarn/hyracks-yarn-am/src/main/java/edu/uci/ics/hyracks/yarn/am/manifest/ClusterController.java
diff --git a/hyracks-yarn/hyracks-yarn-am/src/main/java/edu/uci/ics/hyracks/yarn/am/manifest/ContainerSpecification.java b/hyracks/hyracks-yarn/hyracks-yarn-am/src/main/java/edu/uci/ics/hyracks/yarn/am/manifest/ContainerSpecification.java
similarity index 100%
rename from hyracks-yarn/hyracks-yarn-am/src/main/java/edu/uci/ics/hyracks/yarn/am/manifest/ContainerSpecification.java
rename to hyracks/hyracks-yarn/hyracks-yarn-am/src/main/java/edu/uci/ics/hyracks/yarn/am/manifest/ContainerSpecification.java
diff --git a/hyracks-yarn/hyracks-yarn-am/src/main/java/edu/uci/ics/hyracks/yarn/am/manifest/HyracksCluster.java b/hyracks/hyracks-yarn/hyracks-yarn-am/src/main/java/edu/uci/ics/hyracks/yarn/am/manifest/HyracksCluster.java
similarity index 100%
rename from hyracks-yarn/hyracks-yarn-am/src/main/java/edu/uci/ics/hyracks/yarn/am/manifest/HyracksCluster.java
rename to hyracks/hyracks-yarn/hyracks-yarn-am/src/main/java/edu/uci/ics/hyracks/yarn/am/manifest/HyracksCluster.java
diff --git a/hyracks-yarn/hyracks-yarn-am/src/main/java/edu/uci/ics/hyracks/yarn/am/manifest/ManifestParser.java b/hyracks/hyracks-yarn/hyracks-yarn-am/src/main/java/edu/uci/ics/hyracks/yarn/am/manifest/ManifestParser.java
similarity index 100%
rename from hyracks-yarn/hyracks-yarn-am/src/main/java/edu/uci/ics/hyracks/yarn/am/manifest/ManifestParser.java
rename to hyracks/hyracks-yarn/hyracks-yarn-am/src/main/java/edu/uci/ics/hyracks/yarn/am/manifest/ManifestParser.java
diff --git a/hyracks-yarn/hyracks-yarn-am/src/main/java/edu/uci/ics/hyracks/yarn/am/manifest/NodeController.java b/hyracks/hyracks-yarn/hyracks-yarn-am/src/main/java/edu/uci/ics/hyracks/yarn/am/manifest/NodeController.java
similarity index 100%
rename from hyracks-yarn/hyracks-yarn-am/src/main/java/edu/uci/ics/hyracks/yarn/am/manifest/NodeController.java
rename to hyracks/hyracks-yarn/hyracks-yarn-am/src/main/java/edu/uci/ics/hyracks/yarn/am/manifest/NodeController.java
diff --git a/hyracks/hyracks-yarn/hyracks-yarn-client/pom.xml b/hyracks/hyracks-yarn/hyracks-yarn-client/pom.xml
new file mode 100644
index 0000000..649aa6c
--- /dev/null
+++ b/hyracks/hyracks-yarn/hyracks-yarn-client/pom.xml
@@ -0,0 +1,85 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <artifactId>hyracks-yarn-client</artifactId>
+  <parent>
+    <groupId>edu.uci.ics.hyracks</groupId>
+    <artifactId>hyracks-yarn</artifactId>
+    <version>0.2.1-SNAPSHOT</version>
+  </parent>
+
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-compiler-plugin</artifactId>
+        <version>2.0.2</version>
+        <configuration>
+          <source>1.7</source>
+          <target>1.7</target>
+        </configuration>
+      </plugin>
+      <plugin>
+        <groupId>org.codehaus.mojo</groupId>
+        <artifactId>appassembler-maven-plugin</artifactId>
+        <executions>
+          <execution>
+            <configuration>
+              <programs>
+                <program>
+                  <mainClass>edu.uci.ics.hyracks.yarn.client.LaunchHyracksApplication</mainClass>
+                  <name>launch-hyracks-application</name>
+                </program>
+                <program>
+                  <mainClass>edu.uci.ics.hyracks.yarn.client.KillHyracksApplication</mainClass>
+                  <name>kill-hyracks-application</name>
+                </program>
+              </programs>
+              <repositoryLayout>flat</repositoryLayout>
+              <repositoryName>lib</repositoryName>
+            </configuration>
+            <phase>package</phase>
+            <goals>
+              <goal>assemble</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <artifactId>maven-assembly-plugin</artifactId>
+        <version>2.2-beta-5</version>
+        <executions>
+          <execution>
+            <configuration>
+              <descriptors>
+                <descriptor>src/main/assembly/binary-assembly.xml</descriptor>
+              </descriptors>
+            </configuration>
+            <phase>package</phase>
+            <goals>
+              <goal>attached</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+    </plugins>
+  </build>
+  <dependencies>
+  <dependency>
+  	<groupId>args4j</groupId>
+  	<artifactId>args4j</artifactId>
+  	<version>2.0.16</version>
+  </dependency>
+  <dependency>
+  	<groupId>edu.uci.ics.hyracks</groupId>
+  	<artifactId>hyracks-yarn-common</artifactId>
+  	<version>0.2.1-SNAPSHOT</version>
+  </dependency>
+  <dependency>
+  	<groupId>edu.uci.ics.hyracks</groupId>
+  	<artifactId>hyracks-yarn-am</artifactId>
+  	<version>0.2.1-SNAPSHOT</version>
+    <type>zip</type>
+    <classifier>binary-assembly</classifier>
+  </dependency>
+  </dependencies>
+</project>
diff --git a/hyracks-yarn/hyracks-yarn-client/src/main/assembly/binary-assembly.xml b/hyracks/hyracks-yarn/hyracks-yarn-client/src/main/assembly/binary-assembly.xml
similarity index 100%
rename from hyracks-yarn/hyracks-yarn-client/src/main/assembly/binary-assembly.xml
rename to hyracks/hyracks-yarn/hyracks-yarn-client/src/main/assembly/binary-assembly.xml
diff --git a/hyracks-yarn/hyracks-yarn-client/src/main/java/edu/uci/ics/hyracks/yarn/client/KillHyracksApplication.java b/hyracks/hyracks-yarn/hyracks-yarn-client/src/main/java/edu/uci/ics/hyracks/yarn/client/KillHyracksApplication.java
similarity index 100%
rename from hyracks-yarn/hyracks-yarn-client/src/main/java/edu/uci/ics/hyracks/yarn/client/KillHyracksApplication.java
rename to hyracks/hyracks-yarn/hyracks-yarn-client/src/main/java/edu/uci/ics/hyracks/yarn/client/KillHyracksApplication.java
diff --git a/hyracks-yarn/hyracks-yarn-client/src/main/java/edu/uci/ics/hyracks/yarn/client/LaunchHyracksApplication.java b/hyracks/hyracks-yarn/hyracks-yarn-client/src/main/java/edu/uci/ics/hyracks/yarn/client/LaunchHyracksApplication.java
similarity index 100%
rename from hyracks-yarn/hyracks-yarn-client/src/main/java/edu/uci/ics/hyracks/yarn/client/LaunchHyracksApplication.java
rename to hyracks/hyracks-yarn/hyracks-yarn-client/src/main/java/edu/uci/ics/hyracks/yarn/client/LaunchHyracksApplication.java
diff --git a/hyracks/hyracks-yarn/hyracks-yarn-common/pom.xml b/hyracks/hyracks-yarn/hyracks-yarn-common/pom.xml
new file mode 100644
index 0000000..fe210fd
--- /dev/null
+++ b/hyracks/hyracks-yarn/hyracks-yarn-common/pom.xml
@@ -0,0 +1,40 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <artifactId>hyracks-yarn-common</artifactId>
+  <parent>
+    <groupId>edu.uci.ics.hyracks</groupId>
+    <artifactId>hyracks-yarn</artifactId>
+    <version>0.2.1-SNAPSHOT</version>
+  </parent>
+
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-compiler-plugin</artifactId>
+        <version>2.0.2</version>
+        <configuration>
+          <source>1.7</source>
+          <target>1.7</target>
+        </configuration>
+      </plugin>
+    </plugins>
+  </build>
+  <dependencies>
+  <dependency>
+  	<groupId>org.apache.hadoop</groupId>
+  	<artifactId>hadoop-yarn-api</artifactId>
+  	<version>2.0.0-alpha</version>
+  </dependency>
+  <dependency>
+  	<groupId>org.apache.hadoop</groupId>
+  	<artifactId>hadoop-yarn-common</artifactId>
+  	<version>2.0.0-alpha</version>
+  </dependency>
+  <dependency>
+  	<groupId>org.apache.hadoop</groupId>
+  	<artifactId>hadoop-common</artifactId>
+  	<version>2.0.0-alpha</version>
+  </dependency>
+  </dependencies>
+</project>
diff --git a/hyracks-yarn/hyracks-yarn-common/src/main/java/edu/uci/ics/hyracks/yarn/common/protocols/amrm/AMRMConnection.java b/hyracks/hyracks-yarn/hyracks-yarn-common/src/main/java/edu/uci/ics/hyracks/yarn/common/protocols/amrm/AMRMConnection.java
similarity index 100%
rename from hyracks-yarn/hyracks-yarn-common/src/main/java/edu/uci/ics/hyracks/yarn/common/protocols/amrm/AMRMConnection.java
rename to hyracks/hyracks-yarn/hyracks-yarn-common/src/main/java/edu/uci/ics/hyracks/yarn/common/protocols/amrm/AMRMConnection.java
diff --git a/hyracks-yarn/hyracks-yarn-common/src/main/java/edu/uci/ics/hyracks/yarn/common/protocols/clientrm/YarnApplication.java b/hyracks/hyracks-yarn/hyracks-yarn-common/src/main/java/edu/uci/ics/hyracks/yarn/common/protocols/clientrm/YarnApplication.java
similarity index 100%
rename from hyracks-yarn/hyracks-yarn-common/src/main/java/edu/uci/ics/hyracks/yarn/common/protocols/clientrm/YarnApplication.java
rename to hyracks/hyracks-yarn/hyracks-yarn-common/src/main/java/edu/uci/ics/hyracks/yarn/common/protocols/clientrm/YarnApplication.java
diff --git a/hyracks-yarn/hyracks-yarn-common/src/main/java/edu/uci/ics/hyracks/yarn/common/protocols/clientrm/YarnClientRMConnection.java b/hyracks/hyracks-yarn/hyracks-yarn-common/src/main/java/edu/uci/ics/hyracks/yarn/common/protocols/clientrm/YarnClientRMConnection.java
similarity index 100%
rename from hyracks-yarn/hyracks-yarn-common/src/main/java/edu/uci/ics/hyracks/yarn/common/protocols/clientrm/YarnClientRMConnection.java
rename to hyracks/hyracks-yarn/hyracks-yarn-common/src/main/java/edu/uci/ics/hyracks/yarn/common/protocols/clientrm/YarnClientRMConnection.java
diff --git a/hyracks-yarn/hyracks-yarn-common/src/main/java/edu/uci/ics/hyracks/yarn/common/resources/LocalResourceHelper.java b/hyracks/hyracks-yarn/hyracks-yarn-common/src/main/java/edu/uci/ics/hyracks/yarn/common/resources/LocalResourceHelper.java
similarity index 100%
rename from hyracks-yarn/hyracks-yarn-common/src/main/java/edu/uci/ics/hyracks/yarn/common/resources/LocalResourceHelper.java
rename to hyracks/hyracks-yarn/hyracks-yarn-common/src/main/java/edu/uci/ics/hyracks/yarn/common/resources/LocalResourceHelper.java
diff --git a/hyracks-yarn/hyracks-yarn-common/src/main/java/edu/uci/ics/hyracks/yarn/common/resources/ResourceHelper.java b/hyracks/hyracks-yarn/hyracks-yarn-common/src/main/java/edu/uci/ics/hyracks/yarn/common/resources/ResourceHelper.java
similarity index 100%
rename from hyracks-yarn/hyracks-yarn-common/src/main/java/edu/uci/ics/hyracks/yarn/common/resources/ResourceHelper.java
rename to hyracks/hyracks-yarn/hyracks-yarn-common/src/main/java/edu/uci/ics/hyracks/yarn/common/resources/ResourceHelper.java
diff --git a/hyracks/hyracks-yarn/pom.xml b/hyracks/hyracks-yarn/pom.xml
new file mode 100644
index 0000000..c6e24b5
--- /dev/null
+++ b/hyracks/hyracks-yarn/pom.xml
@@ -0,0 +1,18 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <artifactId>hyracks-yarn</artifactId>
+  <packaging>pom</packaging>
+  <name>hyracks-yarn</name>
+
+  <parent>
+    <groupId>edu.uci.ics.hyracks</groupId>
+    <artifactId>hyracks</artifactId>
+    <version>0.2.1-SNAPSHOT</version>
+  </parent>
+
+  <modules>
+    <module>hyracks-yarn-common</module>
+    <module>hyracks-yarn-client</module>
+    <module>hyracks-yarn-am</module>
+  </modules>
+</project>
diff --git a/hyracks/pom.xml b/hyracks/pom.xml
new file mode 100644
index 0000000..09925fb
--- /dev/null
+++ b/hyracks/pom.xml
@@ -0,0 +1,109 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>edu.uci.ics.hyracks</groupId>
+  <artifactId>hyracks</artifactId>
+  <version>0.2.3-SNAPSHOT</version>
+  <packaging>pom</packaging>
+  <name>hyracks</name>
+
+  <properties>
+    <jvm.extraargs/>
+  </properties>
+
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-release-plugin</artifactId>
+        <version>2.0</version>
+        <configuration>
+            <goals>package source:jar javadoc:jar deploy:deploy</goals>
+        </configuration>
+      </plugin>
+      <plugin>
+      	<groupId>org.codehaus.mojo</groupId>
+      	<artifactId>versions-maven-plugin</artifactId>
+      	<version>1.2</version>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-surefire-plugin</artifactId>
+        <version>2.13</version>
+        <configuration>
+            <forkMode>pertest</forkMode>
+            <argLine>-enableassertions -Djava.util.logging.config.file=${user.home}/logging.properties -Xdebug -Xrunjdwp:transport=dt_socket,server=y,address=8000,suspend=n ${jvm.extraargs}</argLine>
+        </configuration>
+      </plugin>
+    </plugins>
+  </build>
+
+  <reporting>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-changelog-plugin</artifactId>
+        <version>2.2</version>
+      </plugin>
+    </plugins>
+  </reporting>
+
+  <distributionManagement>
+    <repository>
+      <id>hyracks-releases</id>
+      <url>http://obelix.ics.uci.edu/nexus/content/repositories/hyracks-releases/</url>
+    </repository>
+    <snapshotRepository>
+      <id>hyracks-snapshots</id>
+      <url>http://obelix.ics.uci.edu/nexus/content/repositories/hyracks-snapshots/</url>
+    </snapshotRepository>
+  </distributionManagement>
+
+  <repositories>
+    <repository>
+      <id>hyracks-public</id>
+      <url>http://obelix.ics.uci.edu/nexus/content/groups/hyracks-public/</url>
+    </repository>
+    <repository>
+      <id>jboss-public</id>
+      <url>https://repository.jboss.org/nexus/content/groups/public/</url>
+    </repository>
+  </repositories>
+
+  <pluginRepositories>
+    <pluginRepository>
+      <id>hyracks-public</id>
+      <url>http://obelix.ics.uci.edu/nexus/content/groups/hyracks-public/</url>
+      <releases>
+        <updatePolicy>always</updatePolicy>
+      </releases>
+    </pluginRepository>
+  </pluginRepositories>
+
+  <modules>
+    <module>hyracks-ipc</module>
+    <module>hyracks-api</module>
+    <module>hyracks-dataflow-common</module>
+    <module>hyracks-dataflow-std</module>
+    <module>hyracks-dataflow-hadoop</module>
+    <module>hyracks-control</module>
+    <module>hyracks-net</module>
+    <module>hyracks-data</module>
+    <module>hyracks-cli</module>
+    <module>hyracks-storage-common</module>
+    <module>hyracks-storage-am-common</module>
+    <module>hyracks-storage-am-btree</module>
+    <module>hyracks-storage-am-invertedindex</module>
+    <module>hyracks-storage-am-rtree</module>
+    <module>hyracks-test-support</module>
+    <module>hyracks-tests</module>
+    <module>hyracks-server</module>
+    <module>hyracks-examples</module>
+    <module>hyracks-documentation</module>
+    <module>hyracks-hadoop-compat</module>
+    <!--module>hyracks-yarn</module-->
+    <module>hyracks-maven-plugins</module>
+    <module>hyracks-hdfs</module>
+    <module>hyracks-dist</module>
+  </modules>
+</project>
diff --git a/pom.xml b/pom.xml
index c642992..e4920b3 100644
--- a/pom.xml
+++ b/pom.xml
@@ -2,44 +2,33 @@
 <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
   <modelVersion>4.0.0</modelVersion>
   <groupId>edu.uci.ics.hyracks</groupId>
-  <artifactId>hyracks</artifactId>
-  <version>0.2.2-SNAPSHOT</version>
+  <artifactId>fullstack</artifactId>
+  <version>0.2.3-SNAPSHOT</version>
   <packaging>pom</packaging>
-
-  <properties>
-    <jvm.extraargs />
-  </properties>
+  <name>hyracks-ecosystem-full-stack</name>
 
   <build>
     <plugins>
       <plugin>
         <groupId>org.apache.maven.plugins</groupId>
         <artifactId>maven-release-plugin</artifactId>
-        <version>2.0</version>
+        <version>2.1</version>
         <configuration>
-          <goals>package source:jar javadoc:jar deploy:deploy</goals>
+            <goals>package source:jar javadoc:jar deploy:deploy</goals>
         </configuration>
       </plugin>
       <plugin>
-        <groupId>org.codehaus.mojo</groupId>
-        <artifactId>versions-maven-plugin</artifactId>
-        <version>1.2</version>
-      </plugin>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-surefire-plugin</artifactId>
-        <configuration>
-          <forkMode>pertest</forkMode>
-          <argLine>-enableassertions -Djava.util.logging.config.file=${user.home}/logging.properties -Xdebug -Xrunjdwp:transport=dt_socket,server=y,address=8000,suspend=n ${jvm.extraargs} -Xmx2048m</argLine>
-        </configuration>
+      	<groupId>org.codehaus.mojo</groupId>
+      	<artifactId>versions-maven-plugin</artifactId>
+      	<version>1.2</version>
       </plugin>
     </plugins>
   </build>
 
   <scm>
-    <connection>scm:svn:https://hyracks.googlecode.com/svn/trunk/hyracks</connection>
-    <developerConnection>scm:svn:https://hyracks.googlecode.com/svn/trunk/hyracks</developerConnection>
-    <url>http://code.google.com/p/hyracks/source/browse/#svn/trunk/hyracks</url>
+    <connection>scm:svn:https://hyracks.googlecode.com/svn/trunk/fullstack</connection>
+    <developerConnection>scm:svn:https://hyracks.googlecode.com/svn/trunk/fullstack</developerConnection>
+    <url>http://code.google.com/p/hyracks/source/browse/#svn/trunk/fullstack</url>
   </scm>
 
   <distributionManagement>
@@ -53,15 +42,6 @@
     </snapshotRepository>
   </distributionManagement>
 
-  <reporting>
-    <plugins>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-changelog-plugin</artifactId>
-      </plugin>
-    </plugins>
-  </reporting>
-
   <repositories>
     <repository>
       <id>hyracks-public</id>
@@ -84,32 +64,9 @@
   </pluginRepositories>
 
   <modules>
-    <module>hyracks-ipc</module>
-    <module>hyracks-api</module>
-    <module>hyracks-dataflow-common</module>
-    <module>hyracks-dataflow-std</module>
-    <module>hyracks-dataflow-hadoop</module>
-    <module>hyracks-control</module>
-    <module>hyracks-net</module>
-    <module>hyracks-data</module>
-    <module>hyracks-cli</module>
-    <module>hyracks-storage-common</module>
-    <module>hyracks-storage-am-common</module>
-    <module>hyracks-storage-am-bloomfilter</module>
-    <module>hyracks-storage-am-btree</module>
-    <module>hyracks-storage-am-lsm-invertedindex</module>
-    <module>hyracks-storage-am-lsm-common</module>
-    <module>hyracks-storage-am-lsm-btree</module>
-    <module>hyracks-storage-am-lsm-rtree</module>
-    <module>hyracks-storage-am-rtree</module>
-    <module>hyracks-test-support</module>
-    <module>hyracks-tests</module>
-    <module>hyracks-server</module>
-    <module>hyracks-examples</module>
-    <module>hyracks-documentation</module>
-    <module>hyracks-hadoop-compat</module>
-    <module>hyracks-algebricks</module>
-    <!--module>hyracks-yarn</module -->
-    <module>hyracks-maven-plugins</module>
+    <module>hyracks</module>
+    <module>algebricks</module>
+    <module>pregelix</module>
+    <module>hivesterix</module>
   </modules>
 </project>
diff --git a/pregelix/HyracksCodeFormatProfile.xml b/pregelix/HyracksCodeFormatProfile.xml
new file mode 100644
index 0000000..2cde66d
--- /dev/null
+++ b/pregelix/HyracksCodeFormatProfile.xml
@@ -0,0 +1,279 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<profiles version="11">
+<profile kind="CodeFormatterProfile" name="HyracksCodeFormatProfile" version="11">
+<setting id="org.eclipse.jdt.core.formatter.comment.insert_new_line_before_root_tags" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.disabling_tag" value="@formatter:off"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_annotation" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_type_parameters" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_type_declaration" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_type_arguments" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.brace_position_for_anonymous_type_declaration" value="end_of_line"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_case" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_brace_in_array_initializer" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.comment.new_lines_at_block_boundaries" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_annotation_declaration" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_closing_brace_in_array_initializer" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_annotation" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_field" value="0"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_while" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.use_on_off_tags" value="false"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_annotation_type_member_declaration" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_else_in_if_statement" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_prefix_operator" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.keep_else_statement_on_same_line" value="false"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_ellipsis" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.comment.insert_new_line_for_parameter" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_annotation_type_declaration" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.indent_breaks_compare_to_cases" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_at_in_annotation" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_multiple_fields" value="16"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_expressions_in_array_initializer" value="16"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_conditional_expression" value="80"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_for" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_binary_operator" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_question_in_wildcard" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.brace_position_for_array_initializer" value="end_of_line"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_enum_constant" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_finally_in_try_statement" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_local_variable" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_catch_in_try_statement" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_while" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.blank_lines_after_package" value="1"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_type_parameters" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.continuation_indentation" value="2"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_postfix_operator" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_method_invocation" value="16"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_type_arguments" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_superinterfaces" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_new_chunk" value="1"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_binary_operator" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_package" value="0"/>
+<setting id="org.eclipse.jdt.core.compiler.source" value="1.5"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_enum_constant_arguments" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_constructor_declaration" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.comment.format_line_comments" value="false"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_closing_angle_bracket_in_type_arguments" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_enum_declarations" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.join_wrapped_lines" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_block" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_explicit_constructor_call" value="16"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_invocation_arguments" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_member_type" value="1"/>
+<setting id="org.eclipse.jdt.core.formatter.align_type_members_on_columns" value="false"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_enum_constant" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_for" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_method_declaration" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_selector_in_method_invocation" value="16"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_switch" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_unary_operator" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_case" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.comment.indent_parameter_description" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_method_declaration" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_switch" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_enum_declaration" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_parameters" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_type_declaration" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.comment.clear_blank_lines_in_block_comment" value="false"/>
+<setting id="org.eclipse.jdt.core.formatter.lineSplit" value="120"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_if" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_between_brackets_in_array_type_reference" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_parenthesized_expression" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_explicitconstructorcall_arguments" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_constructor_declaration" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_first_class_body_declaration" value="0"/>
+<setting id="org.eclipse.jdt.core.formatter.indentation.size" value="4"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_declaration" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.enabling_tag" value="@formatter:on"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_enum_constant" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_superclass_in_type_declaration" value="16"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_assignment" value="0"/>
+<setting id="org.eclipse.jdt.core.compiler.problem.assertIdentifier" value="error"/>
+<setting id="org.eclipse.jdt.core.formatter.tabulation.char" value="space"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_constructor_declaration_parameters" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_prefix_operator" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.indent_statements_compare_to_body" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_method" value="1"/>
+<setting id="org.eclipse.jdt.core.formatter.wrap_outer_expressions_when_nested" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.format_guardian_clause_on_one_line" value="false"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_for" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_cast" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_parameters_in_constructor_declaration" value="16"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_labeled_statement" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.brace_position_for_annotation_type_declaration" value="end_of_line"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_method_body" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_method_declaration" value="0"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_method_invocation" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_bracket_in_array_allocation_expression" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_enum_constant" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_annotation" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_at_in_annotation_type_declaration" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_declaration_throws" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_if" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.brace_position_for_switch" value="end_of_line"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_declaration_throws" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_parenthesized_expression_in_return" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_annotation" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_question_in_conditional" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_question_in_wildcard" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_bracket_in_array_allocation_expression" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_parenthesized_expression_in_throw" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_type_arguments" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.compiler.problem.enumIdentifier" value="error"/>
+<setting id="org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_switch" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_ellipsis" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.brace_position_for_block" value="end_of_line"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_for_inits" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.brace_position_for_method_declaration" value="end_of_line"/>
+<setting id="org.eclipse.jdt.core.formatter.compact_else_if" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_array_initializer" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_for_increments" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.format_line_comment_starting_on_first_column" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_bracket_in_array_reference" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.brace_position_for_enum_constant" value="end_of_line"/>
+<setting id="org.eclipse.jdt.core.formatter.comment.indent_root_tags" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_declarations" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_explicitconstructorcall_arguments" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_switch" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_superinterfaces" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_declaration_parameters" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_allocation_expression" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.tabulation.size" value="4"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_type_reference" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_opening_brace_in_array_initializer" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_closing_brace_in_block" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_reference" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_enum_constant" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_type_arguments" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_constructor_declaration" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_if" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_throws" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.comment.clear_blank_lines_in_javadoc_comment" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_constructor_declaration" value="16"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_assignment_operator" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_assignment_operator" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.indent_empty_lines" value="false"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_synchronized" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_closing_paren_in_cast" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_declaration_parameters" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.brace_position_for_block_in_case" value="end_of_line"/>
+<setting id="org.eclipse.jdt.core.formatter.number_of_empty_lines_to_preserve" value="1"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_method_declaration" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_catch" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_constructor_declaration" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_method_invocation" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_bracket_in_array_reference" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_and_in_type_parameter" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_qualified_allocation_expression" value="16"/>
+<setting id="org.eclipse.jdt.core.compiler.compliance" value="1.5"/>
+<setting id="org.eclipse.jdt.core.formatter.continuation_indentation_for_array_initializer" value="2"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_brackets_in_array_allocation_expression" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_at_in_annotation_type_declaration" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_allocation_expression" value="16"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_cast" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_unary_operator" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_parameterized_type_reference" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_anonymous_type_declaration" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.keep_empty_array_initializer_on_one_line" value="false"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_enum_declaration" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.keep_imple_if_on_one_line" value="false"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_parameters" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_closing_angle_bracket_in_type_parameters" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_new_line_at_end_of_file_if_missing" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_for" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_labeled_statement" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_parameterized_type_reference" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_type_declaration" value="16"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_binary_expression" value="16"/>
+<setting id="org.eclipse.jdt.core.formatter.brace_position_for_enum_declaration" value="end_of_line"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_while" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode" value="enabled"/>
+<setting id="org.eclipse.jdt.core.formatter.put_empty_statement_on_new_line" value="false"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_label" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_parameter" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_type_parameters" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_invocation" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_while_in_do_statement" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_enum_constant" value="48"/>
+<setting id="org.eclipse.jdt.core.formatter.comment.format_javadoc_comments" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.comment.line_length" value="9999"/>
+<setting id="org.eclipse.jdt.core.formatter.blank_lines_between_import_groups" value="1"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_constant_arguments" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_semicolon" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.brace_position_for_constructor_declaration" value="end_of_line"/>
+<setting id="org.eclipse.jdt.core.formatter.number_of_blank_lines_at_beginning_of_method_body" value="0"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_conditional" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_type_header" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_annotation_type_member_declaration" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.wrap_before_binary_operator" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_declaration_header" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.blank_lines_between_type_declarations" value="1"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_synchronized" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.indent_statements_compare_to_block" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_enum_declaration" value="16"/>
+<setting id="org.eclipse.jdt.core.formatter.join_lines_in_comments" value="false"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_question_in_conditional" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_multiple_field_declarations" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_compact_if" value="16"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_for_inits" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_cases" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_array_initializer" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_default" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_and_in_type_parameter" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_constructor_declaration" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_imports" value="1"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_assert" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.comment.format_html" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_method_declaration" value="16"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_type_parameters" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_allocation_expression" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_anonymous_type_declaration" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_conditional" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_parameterized_type_reference" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_for" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_postfix_operator" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.comment.format_source_code" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_synchronized" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_allocation_expression" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_constructor_declaration_throws" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_parameters_in_method_declaration" value="16"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_brace_in_array_initializer" value="insert"/>
+<setting id="org.eclipse.jdt.core.compiler.codegen.targetPlatform" value="1.5"/>
+<setting id="org.eclipse.jdt.core.formatter.use_tabs_only_for_leading_indentations" value="false"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_annotation" value="0"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_member" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.comment.format_header" value="false"/>
+<setting id="org.eclipse.jdt.core.formatter.comment.format_block_comments" value="false"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_enum_constant" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_enum_constants" value="49"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_block" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_annotation_declaration_header" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_parenthesized_expression" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_parenthesized_expression" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_catch" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_multiple_local_declarations" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_switch" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_for_increments" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_method_invocation" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_assert" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.brace_position_for_type_declaration" value="end_of_line"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_array_initializer" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_braces_in_array_initializer" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_method_declaration" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_semicolon_in_for" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_catch" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_parameterized_type_reference" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_multiple_field_declarations" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_annotation" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_parameterized_type_reference" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_invocation_arguments" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.comment.new_lines_at_javadoc_boundaries" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.blank_lines_after_imports" value="1"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_multiple_local_declarations" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_constant_header" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_semicolon_in_for" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.never_indent_line_comments_on_first_column" value="false"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_arguments" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.never_indent_block_comments_on_first_column" value="false"/>
+<setting id="org.eclipse.jdt.core.formatter.keep_then_statement_on_same_line" value="false"/>
+</profile>
+</profiles>
diff --git a/pregelix/pom.xml b/pregelix/pom.xml
new file mode 100644
index 0000000..7d08fb7
--- /dev/null
+++ b/pregelix/pom.xml
@@ -0,0 +1,108 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>edu.uci.ics.hyracks</groupId>
+  <artifactId>pregelix</artifactId>
+  <version>0.2.3-SNAPSHOT</version>
+  <packaging>pom</packaging>
+  <name>pregelix</name>
+
+  <properties>
+    <jvm.extraargs />
+  </properties>
+
+  <profiles>
+    <profile>
+      <id>macosx</id>
+      <activation>
+        <os>
+          <name>mac os x</name>
+        </os>
+        <jdk>1.7</jdk>
+      </activation>
+      <properties>
+        <jvm.extraargs>-Djava.nio.channels.spi.SelectorProvider=sun.nio.ch.KQueueSelectorProvider</jvm.extraargs>
+      </properties>
+    </profile>
+  </profiles>
+
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-release-plugin</artifactId>
+        <version>2.0</version>
+        <configuration>
+            <goals>package source:jar javadoc:jar deploy:deploy</goals>
+        </configuration>
+      </plugin>
+      <plugin>
+      	<groupId>org.codehaus.mojo</groupId>
+      	<artifactId>versions-maven-plugin</artifactId>
+      	<version>1.2</version>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-surefire-plugin</artifactId>
+        <version>2.13</version>
+        <configuration>
+            <forkMode>pertest</forkMode>
+            <argLine>-enableassertions -Djava.util.logging.config.file=${user.home}/logging.properties -Xdebug -Xrunjdwp:transport=dt_socket,server=y,address=8000,suspend=n ${jvm.extraargs}</argLine>
+        </configuration>
+      </plugin>
+    </plugins>
+  </build>
+
+  <reporting>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-changelog-plugin</artifactId>
+        <version>2.2</version>
+      </plugin>
+    </plugins>
+  </reporting>
+
+  <distributionManagement>
+    <repository>
+      <id>hyracks-releases</id>
+      <url>http://obelix.ics.uci.edu/nexus/content/repositories/hyracks-releases/</url>
+    </repository>
+    <snapshotRepository>
+      <id>hyracks-snapshots</id>
+      <url>http://obelix.ics.uci.edu/nexus/content/repositories/hyracks-snapshots/</url>
+    </snapshotRepository>
+  </distributionManagement>
+
+  <repositories>
+    <repository>
+      <id>hyracks-public</id>
+      <url>http://obelix.ics.uci.edu/nexus/content/groups/hyracks-public/</url>
+    </repository>
+    <repository>
+      <id>jboss-public</id>
+      <url>https://repository.jboss.org/nexus/content/groups/public/</url>
+    </repository>
+  </repositories>
+
+  <pluginRepositories>
+    <pluginRepository>
+      <id>hyracks-public</id>
+      <url>http://obelix.ics.uci.edu/nexus/content/groups/hyracks-public/</url>
+      <releases>
+        <updatePolicy>always</updatePolicy>
+      </releases>
+    </pluginRepository>
+  </pluginRepositories>
+
+  <modules>
+    <module>pregelix-api</module>
+    <module>pregelix-dataflow-std-base</module>
+    <module>pregelix-dataflow-std</module>
+    <module>pregelix-dataflow</module>
+    <module>pregelix-runtime</module>
+    <module>pregelix-core</module>
+    <module>pregelix-example</module>
+    <module>pregelix-dist</module>
+  </modules>
+</project>
diff --git a/pregelix/pregelix-api/pom.xml b/pregelix/pregelix-api/pom.xml
new file mode 100644
index 0000000..fe98b92
--- /dev/null
+++ b/pregelix/pregelix-api/pom.xml
@@ -0,0 +1,87 @@
+<?xml version="1.0"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+	<modelVersion>4.0.0</modelVersion>
+	<artifactId>pregelix-api</artifactId>
+	<name>pregelix-api</name>
+
+	<parent>
+		<groupId>edu.uci.ics.hyracks</groupId>
+		<artifactId>pregelix</artifactId>
+		<version>0.2.3-SNAPSHOT</version>
+	</parent>
+
+	<properties>
+		<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+	</properties>
+
+	<build>
+		<plugins>
+			<plugin>
+				<groupId>org.apache.maven.plugins</groupId>
+				<artifactId>maven-compiler-plugin</artifactId>
+				<version>2.0.2</version>
+				<configuration>
+					<source>1.7</source>
+					<target>1.7</target>
+				</configuration>
+			</plugin>
+			<plugin>
+				<groupId>org.apache.maven.plugins</groupId>
+				<artifactId>maven-surefire-plugin</artifactId>
+				<version>2.7.2</version>
+				<configuration>
+					<forkMode>pertest</forkMode>
+					<argLine>-enableassertions -Xmx512m -Dfile.encoding=UTF-8
+						-Djava.util.logging.config.file=src/test/resources/logging.properties</argLine>
+					<includes>
+						<include>**/*TestSuite.java</include>
+						<include>**/*Test.java</include>
+					</includes>
+				</configuration>
+			</plugin>
+			<plugin>
+				<artifactId>maven-clean-plugin</artifactId>
+				<version>2.5</version>
+				<configuration>
+					<filesets>
+						<fileset>
+							<directory>.</directory>
+							<includes>
+								<include>teststore*</include>
+								<include>edu*</include>
+								<include>actual*</include>
+								<include>build*</include>
+								<include>expect*</include>
+								<include>ClusterController*</include>
+								<include>edu.uci.*</include>
+							</includes>
+						</fileset>
+					</filesets>
+				</configuration>
+			</plugin>
+		</plugins>
+	</build>
+
+	<dependencies>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>hyracks-dataflow-common</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>junit</groupId>
+			<artifactId>junit</artifactId>
+			<version>3.8.1</version>
+			<scope>test</scope>
+		</dependency>
+		<dependency>
+                        <groupId>edu.uci.ics.hyracks</groupId>
+                        <artifactId>hyracks-hdfs-core</artifactId>
+                        <version>0.2.3-SNAPSHOT</version>
+                        <type>jar</type>
+                        <scope>compile</scope>
+                </dependency>
+	</dependencies>
+</project>
diff --git a/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/graph/Edge.java b/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/graph/Edge.java
new file mode 100644
index 0000000..e5f42fe
--- /dev/null
+++ b/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/graph/Edge.java
@@ -0,0 +1,157 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.pregelix.api.graph;
+
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+
+import org.apache.hadoop.conf.Configurable;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.io.WritableComparable;
+
+import edu.uci.ics.pregelix.api.util.BspUtils;
+
+/**
+ * The Edge class, represent an outgoing edge inside an {@link Vertex} object.
+ * 
+ * @param <I>
+ *            Vertex index
+ * @param <E>
+ *            Edge value
+ */
+@SuppressWarnings("rawtypes")
+public class Edge<I extends WritableComparable, E extends Writable> implements Writable, Configurable, Comparable {
+    /** Destination vertex id */
+    private I destVertexId = null;
+    /** Edge value */
+    private E edgeValue = null;
+    /** Configuration - Used to instantiate classes */
+    private Configuration conf = null;
+    /** Whether the edgeValue field is not null */
+    private boolean hasEdgeValue = false;
+
+    /**
+     * Constructor for reflection
+     */
+    public Edge() {
+    }
+
+    /**
+     * Create the edge with final values
+     * 
+     * @param destVertexId
+     * @param edgeValue
+     */
+    public Edge(I destVertexId, E edgeValue) {
+        this.destVertexId = destVertexId;
+        this.edgeValue = edgeValue;
+        if (edgeValue != null)
+            hasEdgeValue = true;
+    }
+
+    /**
+     * Get the destination vertex index of this edge
+     * 
+     * @return Destination vertex index of this edge
+     */
+    public I getDestVertexId() {
+        return destVertexId;
+    }
+
+    /**
+     * set the destination vertex id
+     * 
+     * @param destVertexId
+     */
+    public void setDestVertexId(I destVertexId) {
+        this.destVertexId = destVertexId;
+    }
+
+    /**
+     * Get the edge value of the edge
+     * 
+     * @return Edge value of this edge
+     */
+    public E getEdgeValue() {
+        return edgeValue;
+    }
+
+    /**
+     * set the edge of value
+     * 
+     * @param edgeValue
+     */
+    public void setEdgeValue(E edgeValue) {
+        this.edgeValue = edgeValue;
+        if (edgeValue != null)
+            hasEdgeValue = true;
+    }
+
+    @Override
+    public String toString() {
+        return "(DestVertexIndex = " + destVertexId + ", edgeValue = " + edgeValue + ")";
+    }
+
+    @SuppressWarnings("unchecked")
+    @Override
+    public void readFields(DataInput input) throws IOException {
+        if (destVertexId == null)
+            destVertexId = (I) BspUtils.createVertexIndex(getConf());
+        destVertexId.readFields(input);
+        hasEdgeValue = input.readBoolean();
+        if (hasEdgeValue) {
+            if (edgeValue == null) {
+                edgeValue = (E) BspUtils.createEdgeValue(getConf());
+            }
+            edgeValue.readFields(input);
+        }
+    }
+
+    @Override
+    public void write(DataOutput output) throws IOException {
+        if (destVertexId == null) {
+            throw new IllegalStateException("write: Null destination vertex index");
+        }
+        destVertexId.write(output);
+        output.writeBoolean(hasEdgeValue);
+        if (hasEdgeValue) {
+            edgeValue.write(output);
+        }
+    }
+
+    @Override
+    public Configuration getConf() {
+        return conf;
+    }
+
+    @Override
+    public void setConf(Configuration conf) {
+        this.conf = conf;
+    }
+
+    public boolean equals(Edge<I, E> edge) {
+        return this.destVertexId.equals(edge.getDestVertexId());
+    }
+
+    @SuppressWarnings("unchecked")
+    @Override
+    public int compareTo(Object o) {
+        Edge<I, E> edge = (Edge<I, E>) o;
+        return destVertexId.compareTo(edge.getDestVertexId());
+    }
+}
diff --git a/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/graph/GlobalAggregator.java b/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/graph/GlobalAggregator.java
new file mode 100644
index 0000000..cb27249
--- /dev/null
+++ b/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/graph/GlobalAggregator.java
@@ -0,0 +1,64 @@
+package edu.uci.ics.pregelix.api.graph;
+
+import java.io.IOException;
+
+import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.io.WritableComparable;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+
+/**
+ * This is the abstract class to implement for aggregating the state of all the vertices globally in the graph.
+ * </p>
+ * The global aggregation of vertices in a distributed cluster include two phase:
+ * 1. a local phase which aggregates vertice sent from a single machine and produces
+ * the partially aggregated state;
+ * 2. a final phase which aggregates all partially aggregated states
+ * 
+ * @param <I extends Writable> vertex identifier type
+ * @param <E extends Writable> vertex value type
+ * @param <E extends Writable> edge type
+ * @param <M extends Writable> message type
+ * @param <P extends Writable>
+ *        the type of the partial aggregate state
+ * @param <F extends Writable> the type of the final aggregate value
+ */
+
+@SuppressWarnings("rawtypes")
+public abstract class GlobalAggregator<I extends WritableComparable, V extends Writable, E extends Writable, M extends Writable, P extends Writable, F extends Writable> {
+    /**
+     * initialize aggregator
+     */
+    public abstract void init();
+
+    /**
+     * step through all vertex at each slave partition
+     * 
+     * @param vertexIndex
+     * @param msg
+     * @throws IOException
+     */
+    public abstract void step(Vertex<I, V, E, M> v) throws HyracksDataException;
+
+    /**
+     * step through all intermediate aggregate result
+     * 
+     * @param partialResult
+     *            partial aggregate value
+     */
+    public abstract void step(P partialResult);
+
+    /**
+     * finish partial aggregate
+     * 
+     * @return the final aggregate value
+     */
+    public abstract P finishPartial();
+
+    /**
+     * finish final aggregate
+     * 
+     * @return the final aggregate value
+     */
+    public abstract F finishFinal();
+}
diff --git a/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/graph/MessageCombiner.java b/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/graph/MessageCombiner.java
new file mode 100644
index 0000000..e4f8ef9
--- /dev/null
+++ b/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/graph/MessageCombiner.java
@@ -0,0 +1,85 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.pregelix.api.graph;
+
+import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.io.WritableComparable;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+
+/**
+ * This is the abstract class to implement for combining of messages that are sent to the same vertex.
+ * </p>
+ * This is similar to the concept of Combiner in Hadoop. The combining of messages in a distributed
+ * cluster include two phase:
+ * 1. a local phase which combines messages sent from a single machine and produces
+ * the partially combined message;
+ * 2. a final phase which combines messages at each receiver machine after the repartitioning (shuffling)
+ * and produces the final combined message
+ * 
+ * @param <I extends Writable> vertex identifier
+ * @param <M extends Writable> message body type
+ * @param <P extends Writable>
+ *        the type of the partially combined messages
+ */
+@SuppressWarnings("rawtypes")
+public abstract class MessageCombiner<I extends WritableComparable, M extends Writable, P extends Writable> {
+
+    /**
+     * initialize combiner
+     * 
+     * @param providedMsgList
+     *            the provided msg list for user implementation to update, which *should* be returned
+     *            by the finishFinal() method
+     */
+    public abstract void init(MsgList providedMsgList);
+
+    /**
+     * step call for local combiner
+     * 
+     * @param vertexIndex
+     *            the receiver vertex identifier
+     * @param msg
+     *            a single message body
+     * @throws HyracksDataException
+     */
+    public abstract void stepPartial(I vertexIndex, M msg) throws HyracksDataException;
+
+    /**
+     * step call for global combiner
+     * 
+     * @param vertexIndex
+     *            the receiver vertex identifier
+     * @param partialAggregate
+     *            the partial aggregate value
+     * @throws HyracksDataException
+     */
+    public abstract void stepFinal(I vertexIndex, P partialAggregate) throws HyracksDataException;
+
+    /**
+     * finish partial combiner
+     * 
+     * @return the intermediate combined message of type P
+     */
+    public abstract P finishPartial();
+
+    /**
+     * finish final combiner
+     * 
+     * @return the final message List
+     */
+    public abstract MsgList<M> finishFinal();
+}
diff --git a/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/graph/MsgList.java b/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/graph/MsgList.java
new file mode 100644
index 0000000..8d3d4c6
--- /dev/null
+++ b/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/graph/MsgList.java
@@ -0,0 +1,46 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.pregelix.api.graph;
+
+import org.apache.hadoop.io.Writable;
+
+import edu.uci.ics.pregelix.api.util.ArrayListWritable;
+import edu.uci.ics.pregelix.api.util.BspUtils;
+
+/**
+ * Wrapper around {@link ArrayListWritable} that allows the message class to be
+ * set prior to calling readFields().
+ * 
+ * @param <M>
+ *            message type
+ */
+public class MsgList<M extends Writable> extends ArrayListWritable<M> {
+    /** Defining a layout version for a serializable class. */
+    private static final long serialVersionUID = 1L;
+
+    /**
+     * Default constructor.s
+     */
+    public MsgList() {
+        super();
+    }
+
+    @SuppressWarnings("unchecked")
+    @Override
+    public void setClass() {
+        setClass((Class<M>) BspUtils.getMessageValueClass(getConf()));
+    }
+}
diff --git a/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/graph/Vertex.java b/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/graph/Vertex.java
new file mode 100644
index 0000000..a8cd3db
--- /dev/null
+++ b/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/graph/Vertex.java
@@ -0,0 +1,547 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.pregelix.api.graph;
+
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.io.WritableComparable;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+
+import edu.uci.ics.hyracks.api.comm.IFrameWriter;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
+import edu.uci.ics.pregelix.api.util.BspUtils;
+import edu.uci.ics.pregelix.api.util.SerDeUtils;
+
+/**
+ * User applications should all inherit {@link Vertex}, and implement their own
+ * *compute* method.
+ * 
+ * @param <I>
+ *            Vertex identifier type
+ * @param <V>
+ *            Vertex value type
+ * @param <E>
+ *            Edge value type
+ * @param <M>
+ *            Message value type
+ */
+@SuppressWarnings("rawtypes")
+public abstract class Vertex<I extends WritableComparable, V extends Writable, E extends Writable, M extends Writable>
+        implements Writable {
+    private static long superstep = 0;
+    /** Class-wide number of vertices */
+    private static long numVertices = -1;
+    /** Class-wide number of edges */
+    private static long numEdges = -1;
+    /** Vertex id */
+    private I vertexId = null;
+    /** Vertex value */
+    private V vertexValue = null;
+    /** Map of destination vertices and their edge values */
+    private final List<Edge<I, E>> destEdgeList = new ArrayList<Edge<I, E>>();
+    /** If true, do not do anymore computation on this vertex. */
+    boolean halt = false;
+    /** List of incoming messages from the previous superstep */
+    private final List<M> msgList = new ArrayList<M>();
+    /** map context */
+    private static TaskAttemptContext context = null;
+    /** a delegate for hyracks stuff */
+    private VertexDelegate<I, V, E, M> delegate = new VertexDelegate<I, V, E, M>(this);
+    /** this vertex is updated or not */
+    private boolean updated = false;
+    /** has outgoing messages */
+    private boolean hasMessage = false;
+
+    /**
+     * use object pool for re-using objects
+     */
+    private List<Edge<I, E>> edgePool = new ArrayList<Edge<I, E>>();
+    private List<M> msgPool = new ArrayList<M>();
+    private List<V> valuePool = new ArrayList<V>();
+    private int usedEdge = 0;
+    private int usedMessage = 0;
+    private int usedValue = 0;
+
+    /**
+     * The key method that users need to implement
+     * 
+     * @param msgIterator
+     *            an iterator of incoming messages
+     */
+    public abstract void compute(Iterator<M> msgIterator);
+
+    /**
+     * Add an edge for the vertex.
+     * 
+     * @param targetVertexId
+     * @param edgeValue
+     * @return successful or not
+     */
+    public final boolean addEdge(I targetVertexId, E edgeValue) {
+        Edge<I, E> edge = this.allocateEdge();
+        edge.setDestVertexId(targetVertexId);
+        edge.setEdgeValue(edgeValue);
+        destEdgeList.add(edge);
+        return true;
+    }
+
+    /**
+     * Initialize a new vertex
+     * 
+     * @param vertexId
+     * @param vertexValue
+     * @param edges
+     * @param messages
+     */
+    public void initialize(I vertexId, V vertexValue, Map<I, E> edges, List<M> messages) {
+        if (vertexId != null) {
+            setVertexId(vertexId);
+        }
+        if (vertexValue != null) {
+            setVertexValue(vertexValue);
+        }
+        destEdgeList.clear();
+        if (edges != null && !edges.isEmpty()) {
+            for (Map.Entry<I, E> entry : edges.entrySet()) {
+                destEdgeList.add(new Edge<I, E>(entry.getKey(), entry.getValue()));
+            }
+        }
+        if (messages != null && !messages.isEmpty()) {
+            msgList.addAll(messages);
+        }
+    }
+
+    /**
+     * reset a vertex object: clear its internal states
+     */
+    public void reset() {
+        usedEdge = 0;
+        usedMessage = 0;
+        usedValue = 0;
+    }
+
+    /**
+     * Set the vertex id
+     * 
+     * @param vertexId
+     */
+    public final void setVertexId(I vertexId) {
+        this.vertexId = vertexId;
+        delegate.setVertexId(vertexId);
+    }
+
+    /**
+     * Get the vertex id
+     * 
+     * @return vertex id
+     */
+    public final I getVertexId() {
+        return vertexId;
+    }
+
+    /**
+     * Get the vertex value
+     * 
+     * @return the vertex value
+     */
+    public final V getVertexValue() {
+        return vertexValue;
+    }
+
+    /**
+     * Set the vertex value
+     * 
+     * @param vertexValue
+     */
+    public final void setVertexValue(V vertexValue) {
+        this.vertexValue = vertexValue;
+        this.updated = true;
+    }
+
+    /***
+     * Send a message to a specific vertex
+     * 
+     * @param id
+     *            the receiver vertex id
+     * @param msg
+     *            the message
+     */
+    public final void sendMsg(I id, M msg) {
+        if (msg == null) {
+            throw new IllegalArgumentException("sendMsg: Cannot send null message to " + id);
+        }
+        delegate.sendMsg(id, msg);
+        this.hasMessage = true;
+    }
+
+    /**
+     * Send a message to all direct outgoing neighbors
+     * 
+     * @param msg
+     *            the message
+     */
+    public final void sendMsgToAllEdges(M msg) {
+        if (msg == null) {
+            throw new IllegalArgumentException("sendMsgToAllEdges: Cannot send null message to all edges");
+        }
+        for (Edge<I, E> edge : destEdgeList) {
+            sendMsg(edge.getDestVertexId(), msg);
+        }
+    }
+
+    /**
+     * Vote to halt. Once all vertex vote to halt and no more messages, a
+     * Pregelix job will terminate.
+     */
+    public final void voteToHalt() {
+        halt = true;
+    }
+
+    /**
+     * @return the vertex is halted (true) or not (false)
+     */
+    public final boolean isHalted() {
+        return halt;
+    }
+
+    @Override
+    final public void readFields(DataInput in) throws IOException {
+        reset();
+        if (vertexId == null)
+            vertexId = BspUtils.<I> createVertexIndex(getContext().getConfiguration());
+        vertexId.readFields(in);
+        delegate.setVertexId(vertexId);
+        boolean hasVertexValue = in.readBoolean();
+
+        if (hasVertexValue) {
+            vertexValue = allocateValue();
+            vertexValue.readFields(in);
+            delegate.setVertex(this);
+        }
+        destEdgeList.clear();
+        long edgeMapSize = SerDeUtils.readVLong(in);
+        for (long i = 0; i < edgeMapSize; ++i) {
+            Edge<I, E> edge = allocateEdge();
+            edge.setConf(getContext().getConfiguration());
+            edge.readFields(in);
+            addEdge(edge);
+        }
+        msgList.clear();
+        long msgListSize = SerDeUtils.readVLong(in);
+        for (long i = 0; i < msgListSize; ++i) {
+            M msg = allocateMessage();
+            msg.readFields(in);
+            msgList.add(msg);
+        }
+        halt = in.readBoolean();
+        updated = false;
+        hasMessage = false;
+    }
+
+    @Override
+    public void write(DataOutput out) throws IOException {
+        vertexId.write(out);
+        out.writeBoolean(vertexValue != null);
+        if (vertexValue != null) {
+            vertexValue.write(out);
+        }
+        SerDeUtils.writeVLong(out, destEdgeList.size());
+        for (Edge<I, E> edge : destEdgeList) {
+            edge.write(out);
+        }
+        SerDeUtils.writeVLong(out, msgList.size());
+        for (M msg : msgList) {
+            msg.write(out);
+        }
+        out.writeBoolean(halt);
+    }
+
+    /**
+     * Get the list of incoming messages
+     * 
+     * @return the list of messages
+     */
+    public List<M> getMsgList() {
+        return msgList;
+    }
+
+    /**
+     * Get outgoing edge list
+     * 
+     * @return a list of outgoing edges
+     */
+    public List<Edge<I, E>> getEdges() {
+        return this.destEdgeList;
+    }
+
+    @Override
+    @SuppressWarnings("unchecked")
+    public String toString() {
+        Collections.sort(destEdgeList);
+        StringBuffer edgeBuffer = new StringBuffer();
+        edgeBuffer.append("(");
+        for (Edge<I, E> edge : destEdgeList) {
+            edgeBuffer.append(edge.getDestVertexId()).append(",");
+        }
+        edgeBuffer.append(")");
+        return "Vertex(id=" + getVertexId() + ",value=" + getVertexValue() + ", edges=" + edgeBuffer + ")";
+    }
+
+    /**
+     * Get the number of outgoing edges
+     * 
+     * @return the number of outging edges
+     */
+    public int getNumOutEdges() {
+        return destEdgeList.size();
+    }
+
+    /**
+     * Pregelix internal use only
+     * 
+     * @param writers
+     */
+    public void setOutputWriters(List<IFrameWriter> writers) {
+        delegate.setOutputWriters(writers);
+    }
+
+    /**
+     * Pregelix internal use only
+     * 
+     * @param writers
+     */
+    public void setOutputAppenders(List<FrameTupleAppender> appenders) {
+        delegate.setOutputAppenders(appenders);
+    }
+
+    /**
+     * Pregelix internal use only
+     * 
+     * @param writers
+     */
+    public void setOutputTupleBuilders(List<ArrayTupleBuilder> tbs) {
+        delegate.setOutputTupleBuilders(tbs);
+    }
+
+    /**
+     * Pregelix internal use only
+     * 
+     * @param writers
+     */
+    public void finishCompute() throws IOException {
+        delegate.finishCompute();
+    }
+
+    /**
+     * Pregelix internal use only
+     */
+    public boolean hasUpdate() {
+        return this.updated;
+    }
+
+    /**
+     * Pregelix internal use only
+     */
+    public boolean hasMessage() {
+        return this.hasMessage;
+    }
+
+    /**
+     * sort the edges
+     */
+    @SuppressWarnings("unchecked")
+    public void sortEdges() {
+        Collections.sort(destEdgeList);
+    }
+
+    /**
+     * Allocate a new edge from the edge pool
+     */
+    private Edge<I, E> allocateEdge() {
+        Edge<I, E> edge;
+        if (usedEdge < edgePool.size()) {
+            edge = edgePool.get(usedEdge);
+            usedEdge++;
+        } else {
+            edge = new Edge<I, E>();
+            edgePool.add(edge);
+            usedEdge++;
+        }
+        return edge;
+    }
+
+    /**
+     * Allocate a new message from the message pool
+     */
+    private M allocateMessage() {
+        M message;
+        if (usedMessage < msgPool.size()) {
+            message = msgPool.get(usedEdge);
+            usedMessage++;
+        } else {
+            message = BspUtils.<M> createMessageValue(getContext().getConfiguration());
+            msgPool.add(message);
+            usedMessage++;
+        }
+        return message;
+    }
+
+    /**
+     * Set the global superstep for all the vertices (internal use)
+     * 
+     * @param superstep
+     *            New superstep
+     */
+    public static final void setSuperstep(long superstep) {
+        Vertex.superstep = superstep;
+    }
+
+    /**
+     * Add an outgoing edge into the vertex
+     * 
+     * @param edge
+     *            the edge to be added
+     * @return true if the edge list changed as a result of this call
+     */
+    public boolean addEdge(Edge<I, E> edge) {
+        edge.setConf(getContext().getConfiguration());
+        return destEdgeList.add(edge);
+    }
+
+    /**
+     * remove an outgoing edge in the graph
+     * 
+     * @param edge
+     *            the edge to be removed
+     * @return true if the edge is in the edge list of the vertex
+     */
+    public boolean removeEdge(Edge<I, E> edge) {
+        return destEdgeList.remove(edge);
+    }
+
+    /**
+     * Add a new vertex into the graph
+     * 
+     * @param vertexId
+     *            the vertex id
+     * @param vertex
+     *            the vertex
+     */
+    public final void addVertex(I vertexId, V vertex) {
+        delegate.addVertex(vertexId, vertex);
+    }
+
+    /**
+     * Delete a vertex from id
+     * 
+     * @param vertexId
+     *            the vertex id
+     */
+    public final void deleteVertex(I vertexId) {
+        delegate.deleteVertex(vertexId);
+    }
+
+    /**
+     * Allocate a vertex value from the object pool
+     * 
+     * @return a vertex value instance
+     */
+    private V allocateValue() {
+        V value;
+        if (usedValue < valuePool.size()) {
+            value = valuePool.get(usedValue);
+            usedValue++;
+        } else {
+            value = BspUtils.<V> createVertexValue(getContext().getConfiguration());
+            valuePool.add(value);
+            usedValue++;
+        }
+        return value;
+    }
+
+    /**
+     * Get the current global superstep number
+     * 
+     * @return the current superstep number
+     */
+    public static final long getSuperstep() {
+        return superstep;
+    }
+
+    /**
+     * Set the total number of vertices from the last superstep.
+     * 
+     * @param numVertices
+     *            Aggregate vertices in the last superstep
+     */
+    public static final void setNumVertices(long numVertices) {
+        Vertex.numVertices = numVertices;
+    }
+
+    /**
+     * Get the number of vertexes in the graph
+     * 
+     * @return the number of vertexes in the graph
+     */
+    public static final long getNumVertices() {
+        return numVertices;
+    }
+
+    /**
+     * Set the total number of edges from the last superstep.
+     * 
+     * @param numEdges
+     *            Aggregate edges in the last superstep
+     */
+    public static void setNumEdges(long numEdges) {
+        Vertex.numEdges = numEdges;
+    }
+
+    /**
+     * Get the number of edges from this graph
+     * 
+     * @return the number of edges in the graph
+     */
+    public static final long getNumEdges() {
+        return numEdges;
+    }
+
+    /**
+     * Pregelix internal use only
+     */
+    public static final TaskAttemptContext getContext() {
+        return context;
+    }
+
+    /**
+     * Pregelix internal use only
+     * 
+     * @param context
+     */
+    public static final void setContext(TaskAttemptContext context) {
+        Vertex.context = context;
+    }
+
+}
diff --git a/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/graph/VertexDelegate.java b/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/graph/VertexDelegate.java
new file mode 100644
index 0000000..d949bc5
--- /dev/null
+++ b/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/graph/VertexDelegate.java
@@ -0,0 +1,161 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.pregelix.api.graph;
+
+import java.io.DataOutput;
+import java.io.IOException;
+import java.util.List;
+
+import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.io.WritableComparable;
+
+import edu.uci.ics.hyracks.api.comm.IFrameWriter;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
+import edu.uci.ics.pregelix.api.util.FrameTupleUtils;
+
+@SuppressWarnings("rawtypes")
+class VertexDelegate<I extends WritableComparable, V extends Writable, E extends Writable, M extends Writable> {
+    /** Vertex id */
+    private I vertexId = null;
+    /** Vertex value */
+    private Vertex vertex = null;
+
+    /** message tuple builder */
+    private ArrayTupleBuilder message;
+    private IFrameWriter msgWriter;
+    private FrameTupleAppender appenderMsg;
+
+    /** alive tuple builder */
+    private ArrayTupleBuilder alive;
+    private IFrameWriter aliveWriter;
+    private FrameTupleAppender appenderAlive;
+
+    /** the tuple for insert */
+    private ArrayTupleBuilder insertTb;
+    private IFrameWriter insertWriter;
+    private FrameTupleAppender appenderInsert;
+
+    /** the tuple for insert */
+    private ArrayTupleBuilder deleteTb;
+    private IFrameWriter deleteWriter;
+    private FrameTupleAppender appenderDelete;
+
+    /** message list */
+    private MsgList dummyMessageList = new MsgList();
+    /** whether alive message should be pushed out */
+    private boolean pushAlive;
+
+    public VertexDelegate(Vertex vertex) {
+        this.vertex = vertex;
+    }
+
+    public void finishCompute() throws IOException {
+        // package alive info
+        if (pushAlive && !vertex.isHalted()) {
+            alive.reset();
+            DataOutput outputAlive = alive.getDataOutput();
+            vertexId.write(outputAlive);
+            alive.addFieldEndOffset();
+            dummyMessageList.write(outputAlive);
+            alive.addFieldEndOffset();
+            FrameTupleUtils.flushTuple(appenderAlive, alive, aliveWriter);
+        }
+    }
+
+    public final void sendMsg(I id, M msg) {
+        if (msg == null) {
+            throw new IllegalArgumentException("sendMsg: Cannot send null message to " + id);
+        }
+
+        /**
+         * send out message along message channel
+         */
+        try {
+            message.reset();
+            DataOutput outputMsg = message.getDataOutput();
+            id.write(outputMsg);
+            message.addFieldEndOffset();
+            msg.write(outputMsg);
+            message.addFieldEndOffset();
+            FrameTupleUtils.flushTuple(appenderMsg, message, msgWriter);
+        } catch (Exception e) {
+            throw new IllegalStateException(e);
+        }
+    }
+
+    public final void setVertex(Vertex vertex) {
+        this.vertex = vertex;
+    }
+
+    public final void setVertexId(I vertexId) {
+        this.vertexId = vertexId;
+    }
+
+    public final void addVertex(I vertexId, V vertex) {
+        try {
+            insertTb.reset();
+            DataOutput outputInsert = insertTb.getDataOutput();
+            vertexId.write(outputInsert);
+            insertTb.addFieldEndOffset();
+            vertex.write(outputInsert);
+            insertTb.addFieldEndOffset();
+            FrameTupleUtils.flushTuple(appenderInsert, insertTb, insertWriter);
+        } catch (Exception e) {
+            throw new IllegalStateException(e);
+        }
+    }
+
+    public final void deleteVertex(I vertexId) {
+        try {
+            deleteTb.reset();
+            DataOutput outputDelete = deleteTb.getDataOutput();
+            vertexId.write(outputDelete);
+            deleteTb.addFieldEndOffset();
+            FrameTupleUtils.flushTuple(appenderDelete, deleteTb, deleteWriter);
+        } catch (Exception e) {
+            throw new IllegalStateException(e);
+        }
+    }
+
+    public final void setOutputWriters(List<IFrameWriter> outputs) {
+        msgWriter = outputs.get(0);
+        insertWriter = outputs.get(1);
+        deleteWriter = outputs.get(2);
+        if (outputs.size() > 3) {
+            aliveWriter = outputs.get(outputs.size() - 1);
+            pushAlive = true;
+        }
+    }
+
+    public final void setOutputAppenders(List<FrameTupleAppender> appenders) {
+        appenderMsg = appenders.get(0);
+        appenderInsert = appenders.get(1);
+        appenderDelete = appenders.get(2);
+        if (appenders.size() > 3) {
+            appenderAlive = appenders.get(appenders.size() - 1);
+        }
+    }
+
+    public final void setOutputTupleBuilders(List<ArrayTupleBuilder> tbs) {
+        message = tbs.get(0);
+        insertTb = tbs.get(1);
+        deleteTb = tbs.get(2);
+        if (tbs.size() > 3) {
+            alive = tbs.get(tbs.size() - 1);
+        }
+    }
+}
diff --git a/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/io/BasicGenInputSplit.java b/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/io/BasicGenInputSplit.java
new file mode 100644
index 0000000..ea33691
--- /dev/null
+++ b/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/io/BasicGenInputSplit.java
@@ -0,0 +1,84 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.pregelix.api.io;
+
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+import java.io.Serializable;
+
+import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.mapreduce.lib.input.FileSplit;
+
+/**
+ * This InputSplit will not give any ordering or location data. It is used
+ * internally by BspInputFormat (which determines how many tasks to run the
+ * application on). Users should not use this directly.
+ */
+public class BasicGenInputSplit extends FileSplit implements Writable,
+		Serializable {
+	private static final long serialVersionUID = 1L;
+	/** Number of splits */
+	private int numSplits = -1;
+	/** Split index */
+	private int splitIndex = -1;
+
+	public BasicGenInputSplit() {
+		super(null, 0, 0, null);
+	}
+
+	public BasicGenInputSplit(int splitIndex, int numSplits) {
+		super(null, 0, 0, null);
+		this.splitIndex = splitIndex;
+		this.numSplits = numSplits;
+	}
+
+	@Override
+	public long getLength() {
+		return 0;
+	}
+
+	@Override
+	public String[] getLocations() throws IOException {
+		return new String[] {};
+	}
+
+	@Override
+	public void readFields(DataInput in) throws IOException {
+		splitIndex = in.readInt();
+		numSplits = in.readInt();
+	}
+
+	@Override
+	public void write(DataOutput out) throws IOException {
+		out.writeInt(splitIndex);
+		out.writeInt(numSplits);
+	}
+
+	public int getSplitIndex() {
+		return splitIndex;
+	}
+
+	public int getNumSplits() {
+		return numSplits;
+	}
+
+	@Override
+	public String toString() {
+		return "'" + getClass().getCanonicalName() + ", index="
+				+ getSplitIndex() + ", num=" + getNumSplits();
+	}
+}
diff --git a/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/io/VertexInputFormat.java b/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/io/VertexInputFormat.java
new file mode 100644
index 0000000..98076fd
--- /dev/null
+++ b/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/io/VertexInputFormat.java
@@ -0,0 +1,73 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.pregelix.api.io;
+
+import java.io.IOException;
+import java.util.List;
+
+import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.io.WritableComparable;
+import org.apache.hadoop.mapreduce.InputSplit;
+import org.apache.hadoop.mapreduce.JobContext;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+
+/**
+ * Use this to load data for a BSP application. Note that the InputSplit must
+ * also implement Writable. The InputSplits will determine the partitioning of
+ * vertices across the mappers, so keep that in consideration when implementing
+ * getSplits().
+ * 
+ * @param <I>
+ *            Vertex id
+ * @param <V>
+ *            Vertex value
+ * @param <E>
+ *            Edge value
+ * @param <M>
+ *            Message data
+ */
+@SuppressWarnings("rawtypes")
+public abstract class VertexInputFormat<I extends WritableComparable, V extends Writable, E extends Writable, M extends Writable> {
+    /**
+     * Logically split the vertices for a graph processing application.
+     * Each {@link InputSplit} is then assigned to a worker for processing.
+     * <p>
+     * <i>Note</i>: The split is a <i>logical</i> split of the inputs and the input files are not physically split into chunks. For e.g. a split could be <i>&lt;input-file-path, start, offset&gt;</i> tuple. The InputFormat also creates the {@link VertexReader} to read the {@link InputSplit}. Also, the number of workers is a hint given to the developer to try to intelligently determine how many splits to create (if this is adjustable) at runtime.
+     * 
+     * @param context
+     *            Context of the job
+     * @param numWorkers
+     *            Number of workers used for this job
+     * @return an array of {@link InputSplit}s for the job.
+     */
+    public abstract List<InputSplit> getSplits(JobContext context, int numWorkers) throws IOException,
+            InterruptedException;
+
+    /**
+     * Create a vertex reader for a given split. The framework will call {@link VertexReader#initialize(InputSplit, TaskAttemptContext)} before
+     * the split is used.
+     * 
+     * @param split
+     *            the split to be read
+     * @param context
+     *            the information about the task
+     * @return a new record reader
+     * @throws IOException
+     * @throws InterruptedException
+     */
+    public abstract VertexReader<I, V, E, M> createVertexReader(InputSplit split, TaskAttemptContext context)
+            throws IOException;
+}
diff --git a/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/io/VertexOutputFormat.java b/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/io/VertexOutputFormat.java
new file mode 100644
index 0000000..6a761a6
--- /dev/null
+++ b/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/io/VertexOutputFormat.java
@@ -0,0 +1,80 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.pregelix.api.io;
+
+import java.io.IOException;
+
+import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.io.WritableComparable;
+import org.apache.hadoop.mapreduce.InputSplit;
+import org.apache.hadoop.mapreduce.JobContext;
+import org.apache.hadoop.mapreduce.OutputCommitter;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+
+/**
+ * Implement to output the graph after the computation. It is modeled directly
+ * after the Hadoop OutputFormat.
+ * 
+ * @param <I>
+ *            Vertex index value
+ * @param <V>
+ *            Vertex value
+ * @param <E>
+ *            Edge value
+ */
+@SuppressWarnings("rawtypes")
+public abstract class VertexOutputFormat<I extends WritableComparable, V extends Writable, E extends Writable> {
+    /**
+     * Create a vertex writer for a given split. The framework will call {@link VertexReader#initialize(InputSplit, TaskAttemptContext)} before
+     * the split is used.
+     * 
+     * @param context
+     *            the information about the task
+     * @return a new vertex writer
+     * @throws IOException
+     * @throws InterruptedException
+     */
+    public abstract VertexWriter<I, V, E> createVertexWriter(TaskAttemptContext context) throws IOException,
+            InterruptedException;
+
+    /**
+     * Check for validity of the output-specification for the job. (Copied from
+     * Hadoop OutputFormat)
+     * <p>
+     * This is to validate the output specification for the job when it is a job is submitted. Typically checks that it does not already exist, throwing an exception when it already exists, so that output is not overwritten.
+     * </p>
+     * 
+     * @param context
+     *            information about the job
+     * @throws IOException
+     *             when output should not be attempted
+     */
+    public abstract void checkOutputSpecs(JobContext context) throws IOException, InterruptedException;
+
+    /**
+     * Get the output committer for this output format. This is responsible for
+     * ensuring the output is committed correctly. (Copied from Hadoop
+     * OutputFormat)
+     * 
+     * @param context
+     *            the task context
+     * @return an output committer
+     * @throws IOException
+     * @throws InterruptedException
+     */
+    public abstract OutputCommitter getOutputCommitter(TaskAttemptContext context) throws IOException,
+            InterruptedException;
+}
diff --git a/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/io/VertexReader.java b/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/io/VertexReader.java
new file mode 100644
index 0000000..3e899b8
--- /dev/null
+++ b/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/io/VertexReader.java
@@ -0,0 +1,89 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.pregelix.api.io;
+
+import java.io.IOException;
+
+import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.io.WritableComparable;
+import org.apache.hadoop.mapreduce.InputSplit;
+import org.apache.hadoop.mapreduce.RecordReader;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+
+import edu.uci.ics.pregelix.api.graph.Vertex;
+
+/**
+ * Analogous to {@link RecordReader} for vertices. Will read the vertices from
+ * an input split.
+ * 
+ * @param <I>
+ *            Vertex id
+ * @param <V>
+ *            Vertex data
+ * @param <E>
+ *            Edge data
+ * @param <M>
+ *            Message data
+ */
+@SuppressWarnings("rawtypes")
+public interface VertexReader<I extends WritableComparable, V extends Writable, E extends Writable, M extends Writable> {
+    /**
+     * Use the input split and context t o setup reading the vertices.
+     * Guaranteed to be called prior to any other function.
+     * 
+     * @param inputSplit
+     *            Input split to be used for reading vertices.
+     * @param context
+     *            Context from the task.
+     * @throws IOException
+     * @throws InterruptedException
+     */
+    void initialize(InputSplit inputSplit, TaskAttemptContext context) throws IOException, InterruptedException;
+
+    /**
+     * @return false iff there are no more vertices
+     * @throws IOException
+     * @throws InterruptedException
+     */
+    boolean nextVertex() throws IOException, InterruptedException;
+
+    /**
+     * Get the current vertex.
+     * 
+     * @return the current vertex which has been read. nextVertex() should be
+     *         called first.
+     * @throws IOException
+     * @throws InterruptedException
+     */
+    Vertex<I, V, E, M> getCurrentVertex() throws IOException, InterruptedException;
+
+    /**
+     * Close this {@link VertexReader} to future operations.
+     * 
+     * @throws IOException
+     */
+    void close() throws IOException;
+
+    /**
+     * How much of the input has the {@link VertexReader} consumed i.e. has been
+     * processed by?
+     * 
+     * @return Progress from <code>0.0</code> to <code>1.0</code>.
+     * @throws IOException
+     * @throws InterruptedException
+     */
+    float getProgress() throws IOException, InterruptedException;
+}
diff --git a/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/io/VertexWriter.java b/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/io/VertexWriter.java
new file mode 100644
index 0000000..fcad020
--- /dev/null
+++ b/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/io/VertexWriter.java
@@ -0,0 +1,68 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.pregelix.api.io;
+
+import java.io.IOException;
+
+import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.io.WritableComparable;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+
+import edu.uci.ics.pregelix.api.graph.Vertex;
+
+/**
+ * Implement to output a vertex range of the graph after the computation
+ * 
+ * @param <I>
+ *            Vertex id
+ * @param <V>
+ *            Vertex value
+ * @param <E>
+ *            Edge value
+ */
+@SuppressWarnings("rawtypes")
+public interface VertexWriter<I extends WritableComparable, V extends Writable, E extends Writable> {
+    /**
+     * Use the context to setup writing the vertices. Guaranteed to be called
+     * prior to any other function.
+     * 
+     * @param context
+     *            Context used to write the vertices.
+     * @throws IOException
+     * @throws InterruptedException
+     */
+    void initialize(TaskAttemptContext context) throws IOException, InterruptedException;
+
+    /**
+     * Writes the next vertex and associated data
+     * 
+     * @param vertex
+     *            set the properties of this vertex
+     * @throws IOException
+     * @throws InterruptedException
+     */
+    void writeVertex(Vertex<I, V, E, ?> vertex) throws IOException, InterruptedException;
+
+    /**
+     * Close this {@link VertexWriter} to future operations.
+     * 
+     * @param context
+     *            the context of the task
+     * @throws IOException
+     * @throws InterruptedException
+     */
+    void close(TaskAttemptContext context) throws IOException, InterruptedException;
+}
diff --git a/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/io/generated/GeneratedVertexInputFormat.java b/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/io/generated/GeneratedVertexInputFormat.java
new file mode 100644
index 0000000..7e7825a
--- /dev/null
+++ b/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/io/generated/GeneratedVertexInputFormat.java
@@ -0,0 +1,48 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.pregelix.api.io.generated;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.io.WritableComparable;
+import org.apache.hadoop.mapreduce.InputSplit;
+import org.apache.hadoop.mapreduce.JobContext;
+
+import edu.uci.ics.pregelix.api.io.BasicGenInputSplit;
+import edu.uci.ics.pregelix.api.io.VertexInputFormat;
+
+/**
+ * This VertexInputFormat is meant for testing/debugging. It simply generates
+ * some vertex data that can be consumed by test applications.
+ */
+@SuppressWarnings("rawtypes")
+public abstract class GeneratedVertexInputFormat<I extends WritableComparable, V extends Writable, E extends Writable, M extends Writable>
+        extends VertexInputFormat<I, V, E, M> {
+
+    @Override
+    public List<InputSplit> getSplits(JobContext context, int numWorkers) throws IOException, InterruptedException {
+        // This is meaningless, the VertexReader will generate all the test
+        // data.
+        List<InputSplit> inputSplitList = new ArrayList<InputSplit>();
+        for (int i = 0; i < numWorkers; ++i) {
+            inputSplitList.add(new BasicGenInputSplit(i, numWorkers));
+        }
+        return inputSplitList;
+    }
+}
diff --git a/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/io/generated/GeneratedVertexReader.java b/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/io/generated/GeneratedVertexReader.java
new file mode 100644
index 0000000..370583b
--- /dev/null
+++ b/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/io/generated/GeneratedVertexReader.java
@@ -0,0 +1,79 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.pregelix.api.io.generated;
+
+import java.io.IOException;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.io.WritableComparable;
+import org.apache.hadoop.mapreduce.InputSplit;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+
+import edu.uci.ics.pregelix.api.io.BasicGenInputSplit;
+import edu.uci.ics.pregelix.api.io.VertexReader;
+
+/**
+ * Used by GeneratedVertexInputFormat to read some generated data
+ * 
+ * @param <I>
+ *            Vertex index value
+ * @param <V>
+ *            Vertex value
+ * @param <E>
+ *            Edge value
+ */
+@SuppressWarnings("rawtypes")
+public abstract class GeneratedVertexReader<I extends WritableComparable, V extends Writable, E extends Writable, M extends Writable>
+        implements VertexReader<I, V, E, M> {
+    /** Records read so far */
+    protected long recordsRead = 0;
+    /** Total records to read (on this split alone) */
+    protected long totalRecords = 0;
+    /** The input split from initialize(). */
+    protected BasicGenInputSplit inputSplit = null;
+    /** Reverse the id order? */
+    protected boolean reverseIdOrder;
+
+    protected Configuration configuration = null;
+
+    public static final String READER_VERTICES = "GeneratedVertexReader.reader_vertices";
+    public static final long DEFAULT_READER_VERTICES = 10;
+    public static final String REVERSE_ID_ORDER = "GeneratedVertexReader.reverseIdOrder";
+    public static final boolean DEAFULT_REVERSE_ID_ORDER = false;
+
+    public GeneratedVertexReader() {
+    }
+
+    @Override
+    final public void initialize(InputSplit inputSplit, TaskAttemptContext context) throws IOException {
+        configuration = context.getConfiguration();
+        totalRecords = configuration.getLong(GeneratedVertexReader.READER_VERTICES,
+                GeneratedVertexReader.DEFAULT_READER_VERTICES);
+        reverseIdOrder = configuration.getBoolean(GeneratedVertexReader.REVERSE_ID_ORDER,
+                GeneratedVertexReader.DEAFULT_REVERSE_ID_ORDER);
+        this.inputSplit = (BasicGenInputSplit) inputSplit;
+    }
+
+    @Override
+    public void close() throws IOException {
+    }
+
+    @Override
+    final public float getProgress() throws IOException {
+        return recordsRead * 100.0f / totalRecords;
+    }
+}
diff --git a/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/io/text/TextVertexInputFormat.java b/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/io/text/TextVertexInputFormat.java
new file mode 100644
index 0000000..9fcb1c6
--- /dev/null
+++ b/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/io/text/TextVertexInputFormat.java
@@ -0,0 +1,124 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.pregelix.api.io.text;
+
+import java.io.IOException;
+import java.util.List;
+
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.io.WritableComparable;
+import org.apache.hadoop.mapreduce.InputSplit;
+import org.apache.hadoop.mapreduce.JobContext;
+import org.apache.hadoop.mapreduce.RecordReader;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
+
+import edu.uci.ics.pregelix.api.io.VertexInputFormat;
+import edu.uci.ics.pregelix.api.io.VertexReader;
+
+/**
+ * Abstract class that users should subclass to use their own text based vertex
+ * output format.
+ * 
+ * @param <I>
+ *            Vertex index value
+ * @param <V>
+ *            Vertex value
+ * @param <E>
+ *            Edge value
+ * @param <M>
+ *            Message value
+ */
+@SuppressWarnings("rawtypes")
+public abstract class TextVertexInputFormat<I extends WritableComparable, V extends Writable, E extends Writable, M extends Writable>
+        extends VertexInputFormat<I, V, E, M> {
+    /** Uses the TextInputFormat to do everything */
+    protected TextInputFormat textInputFormat = new TextInputFormat();
+
+    /**
+     * Abstract class to be implemented by the user based on their specific
+     * vertex input. Easiest to ignore the key value separator and only use key
+     * instead.
+     * 
+     * @param <I>
+     *            Vertex index value
+     * @param <V>
+     *            Vertex value
+     * @param <E>
+     *            Edge value
+     */
+    public static abstract class TextVertexReader<I extends WritableComparable, V extends Writable, E extends Writable, M extends Writable>
+            implements VertexReader<I, V, E, M> {
+        /** Internal line record reader */
+        private final RecordReader<LongWritable, Text> lineRecordReader;
+        /** Context passed to initialize */
+        private TaskAttemptContext context;
+
+        /**
+         * Initialize with the LineRecordReader.
+         * 
+         * @param lineRecordReader
+         *            Line record reader from TextInputFormat
+         */
+        public TextVertexReader(RecordReader<LongWritable, Text> lineRecordReader) {
+            this.lineRecordReader = lineRecordReader;
+        }
+
+        @Override
+        public void initialize(InputSplit inputSplit, TaskAttemptContext context) throws IOException,
+                InterruptedException {
+            lineRecordReader.initialize(inputSplit, context);
+            this.context = context;
+        }
+
+        @Override
+        public void close() throws IOException {
+            lineRecordReader.close();
+        }
+
+        @Override
+        public float getProgress() throws IOException, InterruptedException {
+            return lineRecordReader.getProgress();
+        }
+
+        /**
+         * Get the line record reader.
+         * 
+         * @return Record reader to be used for reading.
+         */
+        protected RecordReader<LongWritable, Text> getRecordReader() {
+            return lineRecordReader;
+        }
+
+        /**
+         * Get the context.
+         * 
+         * @return Context passed to initialize.
+         */
+        protected TaskAttemptContext getContext() {
+            return context;
+        }
+    }
+
+    @Override
+    public List<InputSplit> getSplits(JobContext context, int numWorkers) throws IOException, InterruptedException {
+        // Ignore the hint of numWorkers here since we are using TextInputFormat
+        // to do this for us
+        return textInputFormat.getSplits(context);
+    }
+}
diff --git a/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/io/text/TextVertexOutputFormat.java b/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/io/text/TextVertexOutputFormat.java
new file mode 100644
index 0000000..355b9f8
--- /dev/null
+++ b/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/io/text/TextVertexOutputFormat.java
@@ -0,0 +1,116 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.pregelix.api.io.text;
+
+import java.io.IOException;
+
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.io.WritableComparable;
+import org.apache.hadoop.mapreduce.JobContext;
+import org.apache.hadoop.mapreduce.OutputCommitter;
+import org.apache.hadoop.mapreduce.RecordWriter;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
+
+import edu.uci.ics.pregelix.api.io.VertexOutputFormat;
+import edu.uci.ics.pregelix.api.io.VertexWriter;
+
+/**
+ * Abstract class that users should subclass to use their own text based vertex
+ * output format.
+ * 
+ * @param <I>
+ *            Vertex index value
+ * @param <V>
+ *            Vertex value
+ * @param <E>
+ *            Edge value
+ */
+@SuppressWarnings("rawtypes")
+public abstract class TextVertexOutputFormat<I extends WritableComparable, V extends Writable, E extends Writable>
+        extends VertexOutputFormat<I, V, E> {
+    /** Uses the TextOutputFormat to do everything */
+    protected TextOutputFormat<Text, Text> textOutputFormat = new TextOutputFormat<Text, Text>();
+
+    /**
+     * Abstract class to be implemented by the user based on their specific
+     * vertex output. Easiest to ignore the key value separator and only use key
+     * instead.
+     * 
+     * @param <I>
+     *            Vertex index value
+     * @param <V>
+     *            Vertex value
+     * @param <E>
+     *            Edge value
+     */
+    public static abstract class TextVertexWriter<I extends WritableComparable, V extends Writable, E extends Writable>
+            implements VertexWriter<I, V, E> {
+        /** Context passed to initialize */
+        private TaskAttemptContext context;
+        /** Internal line record writer */
+        private final RecordWriter<Text, Text> lineRecordWriter;
+
+        /**
+         * Initialize with the LineRecordWriter.
+         * 
+         * @param lineRecordWriter
+         *            Line record writer from TextOutputFormat
+         */
+        public TextVertexWriter(RecordWriter<Text, Text> lineRecordWriter) {
+            this.lineRecordWriter = lineRecordWriter;
+        }
+
+        @Override
+        public void initialize(TaskAttemptContext context) throws IOException {
+            this.context = context;
+        }
+
+        @Override
+        public void close(TaskAttemptContext context) throws IOException, InterruptedException {
+            lineRecordWriter.close(context);
+        }
+
+        /**
+         * Get the line record writer.
+         * 
+         * @return Record writer to be used for writing.
+         */
+        public RecordWriter<Text, Text> getRecordWriter() {
+            return lineRecordWriter;
+        }
+
+        /**
+         * Get the context.
+         * 
+         * @return Context passed to initialize.
+         */
+        public TaskAttemptContext getContext() {
+            return context;
+        }
+    }
+
+    @Override
+    public void checkOutputSpecs(JobContext context) throws IOException, InterruptedException {
+        textOutputFormat.checkOutputSpecs(context);
+    }
+
+    @Override
+    public OutputCommitter getOutputCommitter(TaskAttemptContext context) throws IOException, InterruptedException {
+        return textOutputFormat.getOutputCommitter(context);
+    }
+}
diff --git a/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/job/PregelixJob.java b/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/job/PregelixJob.java
new file mode 100644
index 0000000..8b6d1b6
--- /dev/null
+++ b/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/job/PregelixJob.java
@@ -0,0 +1,169 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.pregelix.api.job;
+
+import java.io.IOException;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.mapreduce.Job;
+
+import edu.uci.ics.pregelix.api.graph.GlobalAggregator;
+import edu.uci.ics.pregelix.api.graph.MessageCombiner;
+import edu.uci.ics.pregelix.api.graph.Vertex;
+import edu.uci.ics.pregelix.api.io.VertexInputFormat;
+import edu.uci.ics.pregelix.api.io.VertexOutputFormat;
+
+/**
+ * This class represents a Pregelix job.
+ */
+public class PregelixJob extends Job {
+    /** Vertex class - required */
+    public static final String VERTEX_CLASS = "pregelix.vertexClass";
+    /** VertexInputFormat class - required */
+    public static final String VERTEX_INPUT_FORMAT_CLASS = "pregelix.vertexInputFormatClass";
+    /** VertexOutputFormat class - optional */
+    public static final String VERTEX_OUTPUT_FORMAT_CLASS = "pregelix.vertexOutputFormatClass";
+    /** Vertex combiner class - optional */
+    public static final String Message_COMBINER_CLASS = "pregelix.combinerClass";
+    /** Global aggregator class - optional */
+    public static final String GLOBAL_AGGREGATOR_CLASS = "pregelix.aggregatorClass";
+    /** Vertex resolver class - optional */
+    public static final String VERTEX_RESOLVER_CLASS = "pregelix.vertexResolverClass";
+    /** Vertex index class */
+    public static final String VERTEX_INDEX_CLASS = "pregelix.vertexIndexClass";
+    /** Vertex value class */
+    public static final String VERTEX_VALUE_CLASS = "pregelix.vertexValueClass";
+    /** Edge value class */
+    public static final String EDGE_VALUE_CLASS = "pregelix.edgeValueClass";
+    /** Message value class */
+    public static final String MESSAGE_VALUE_CLASS = "pregelix.messageValueClass";
+    /** Partial combiner value class */
+    public static final String PARTIAL_COMBINE_VALUE_CLASS = "pregelix.partialCombinedValueClass";
+    /** Partial aggregate value class */
+    public static final String PARTIAL_AGGREGATE_VALUE_CLASS = "pregelix.partialAggregateValueClass";
+    /** Final aggregate value class */
+    public static final String FINAL_AGGREGATE_VALUE_CLASS = "pregelix.finalAggregateValueClass";
+    /** num of vertices */
+    public static final String NUM_VERTICE = "pregelix.numVertices";
+    /** num of edges */
+    public static final String NUM_EDGES = "pregelix.numEdges";
+    /** increase state length */
+    public static final String INCREASE_STATE_LENGTH = "pregelix.incStateLength";
+    /** job id */
+    public static final String JOB_ID = "pregelix.jobid";
+    /** frame size */
+    public static final String FRAME_SIZE = "pregelix.framesize";
+
+    /**
+     * Constructor that will instantiate the configuration
+     * 
+     * @param jobName
+     *            User-defined job name
+     * @throws IOException
+     */
+    public PregelixJob(String jobName) throws IOException {
+        super(new Configuration(), jobName);
+    }
+
+    /**
+     * Constructor.
+     * 
+     * @param conf
+     *            User-defined configuration
+     * @param jobName
+     *            User-defined job name
+     * @throws IOException
+     */
+    public PregelixJob(Configuration conf, String jobName) throws IOException {
+        super(conf, jobName);
+    }
+
+    /**
+     * Set the vertex class (required)
+     * 
+     * @param vertexClass
+     *            Runs vertex computation
+     */
+    final public void setVertexClass(Class<?> vertexClass) {
+        getConfiguration().setClass(VERTEX_CLASS, vertexClass, Vertex.class);
+    }
+
+    /**
+     * Set the vertex input format class (required)
+     * 
+     * @param vertexInputFormatClass
+     *            Determines how graph is input
+     */
+    final public void setVertexInputFormatClass(Class<?> vertexInputFormatClass) {
+        getConfiguration().setClass(VERTEX_INPUT_FORMAT_CLASS, vertexInputFormatClass, VertexInputFormat.class);
+    }
+
+    /**
+     * Set the vertex output format class (optional)
+     * 
+     * @param vertexOutputFormatClass
+     *            Determines how graph is output
+     */
+    final public void setVertexOutputFormatClass(Class<?> vertexOutputFormatClass) {
+        getConfiguration().setClass(VERTEX_OUTPUT_FORMAT_CLASS, vertexOutputFormatClass, VertexOutputFormat.class);
+    }
+
+    /**
+     * Set the vertex combiner class (optional)
+     * 
+     * @param vertexCombinerClass
+     *            Determines how vertex messages are combined
+     */
+    final public void setMessageCombinerClass(Class<?> vertexCombinerClass) {
+        getConfiguration().setClass(Message_COMBINER_CLASS, vertexCombinerClass, MessageCombiner.class);
+    }
+
+    /**
+     * Set the global aggregator class (optional)
+     * 
+     * @param globalAggregatorClass
+     *            Determines how messages are globally aggregated
+     */
+    final public void setGlobalAggregatorClass(Class<?> globalAggregatorClass) {
+        getConfiguration().setClass(GLOBAL_AGGREGATOR_CLASS, globalAggregatorClass, GlobalAggregator.class);
+    }
+
+    /**
+     * Set the job Id
+     */
+    final public void setJobId(String jobId) {
+        getConfiguration().set(JOB_ID, jobId);
+    }
+
+    /**
+     * Set whether the vertex state length can be dynamically increased
+     * 
+     * @param jobId
+     */
+    final public void setDynamicVertexValueSize(boolean incStateLengthDynamically) {
+        getConfiguration().setBoolean(INCREASE_STATE_LENGTH, incStateLengthDynamically);
+    }
+
+    /**
+     * Set the frame size for a job
+     * 
+     * @param frameSize
+     *            the desired frame size
+     */
+    final public void setFrameSize(int frameSize) {
+        getConfiguration().setInt(FRAME_SIZE, frameSize);
+    }
+}
diff --git a/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/util/ArrayListWritable.java b/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/util/ArrayListWritable.java
new file mode 100644
index 0000000..d2ba28d
--- /dev/null
+++ b/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/util/ArrayListWritable.java
@@ -0,0 +1,221 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.pregelix.api.util;
+
+import java.io.DataInput;
+import java.io.DataInputStream;
+import java.io.DataOutput;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+
+import org.apache.hadoop.conf.Configurable;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.util.ReflectionUtils;
+
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+
+/**
+ * A Writable for ListArray containing instances of a class.
+ */
+public abstract class ArrayListWritable<M extends Writable> extends ArrayList<M> implements Writable, Configurable {
+    /** Used for instantiation */
+    private Class<M> refClass = null;
+    /** Defining a layout version for a serializable class. */
+    private static final long serialVersionUID = 1L;
+    /** Configuration */
+    private Configuration conf;
+    /** content object pool */
+    private List<M> pool = new ArrayList<M>();
+    /** how many instance in the pool has been used */
+    private int used = 0;
+    /** intermediate buffer for copy data element */
+    private final ArrayBackedValueStorage intermediateBuffer = new ArrayBackedValueStorage();
+    /** intermediate data output */
+    private final DataOutput intermediateOutput = intermediateBuffer.getDataOutput();
+    /** input stream */
+    private final ResetableByteArrayInputStream inputStream = new ResetableByteArrayInputStream();
+    /** data input */
+    private final DataInput dataInput = new DataInputStream(inputStream);
+
+    /**
+     * Using the default constructor requires that the user implement
+     * setClass(), guaranteed to be invoked prior to instantiation in
+     * readFields()
+     */
+    public ArrayListWritable() {
+    }
+
+    /**
+     * clear all the elements
+     */
+    public void clearElements() {
+        this.used = 0;
+        this.clear();
+    }
+
+    /**
+     * Add all elements from another list
+     * 
+     * @param list
+     *            the list of M
+     * @return true if successful, else false
+     */
+    public boolean addAllElements(List<M> list) {
+        for (int i = 0; i < list.size(); i++) {
+            addElement(list.get(i));
+        }
+        return true;
+    }
+
+    /**
+     * Add an element
+     * 
+     * @param element
+     *            M
+     * @return true if successful, else false
+     */
+    public boolean addElement(M element) {
+        try {
+            intermediateBuffer.reset();
+            element.write(intermediateOutput);
+            inputStream.setByteArray(intermediateBuffer.getByteArray(), 0);
+            M value = allocateValue();
+            value.readFields(dataInput);
+            add(value);
+            return true;
+        } catch (Exception e) {
+            throw new IllegalStateException(e);
+        }
+    }
+
+    /**
+     * This constructor allows setting the refClass during construction.
+     * 
+     * @param refClass
+     *            internal type class
+     */
+    public ArrayListWritable(Class<M> refClass) {
+        super();
+        this.refClass = refClass;
+    }
+
+    /**
+     * This is a one-time operation to set the class type
+     * 
+     * @param refClass
+     *            internal type class
+     */
+    public void setClass(Class<M> refClass) {
+        if (this.refClass != null) {
+            throw new RuntimeException("setClass: refClass is already set to " + this.refClass.getName());
+        }
+        this.refClass = refClass;
+    }
+
+    /**
+     * Subclasses must set the class type appropriately and can use
+     * setClass(Class<M> refClass) to do it.
+     */
+    public abstract void setClass();
+
+    public void readFields(DataInput in) throws IOException {
+        if (this.refClass == null) {
+            setClass();
+        }
+        used = 0;
+        this.clear();
+        int numValues = in.readInt(); // read number of values
+        if (numValues > 100) {
+            System.out.println("num values: " + numValues);
+        }
+        for (int i = 0; i < numValues; i++) {
+            M value = allocateValue();
+            value.readFields(in); // read a value
+            add(value); // store it in values
+        }
+    }
+
+    public void write(DataOutput out) throws IOException {
+        int numValues = size();
+        if (numValues > 100) {
+            System.out.println("write num values: " + numValues);
+        }
+        out.writeInt(numValues); // write number of values
+        for (int i = 0; i < numValues; i++) {
+            get(i).write(out);
+        }
+    }
+
+    public final Configuration getConf() {
+        return conf;
+    }
+
+    public final void setConf(Configuration conf) {
+        this.conf = conf;
+        if (this.refClass == null) {
+            setClass();
+        }
+    }
+
+    private M allocateValue() {
+        if (used >= pool.size()) {
+            M value = ReflectionUtils.newInstance(refClass, conf);
+            pool.add(value);
+            used++;
+            return value;
+        } else {
+            M value = pool.get(used);
+            used++;
+            return value;
+        }
+    }
+
+    public void reset(ArrayIterator<M> iterator) {
+        iterator.reset(this);
+    }
+
+    public static class ArrayIterator<M> implements Iterator<M> {
+
+        private int pos = 0;
+        private List<M> list;
+
+        private void reset(List<M> list) {
+            this.list = list;
+            pos = 0;
+        }
+
+        @Override
+        public boolean hasNext() {
+            return pos < list.size();
+        }
+
+        @Override
+        public M next() {
+            M item = list.get(pos);
+            pos++;
+            return item;
+        }
+
+        @Override
+        public void remove() {
+            throw new IllegalStateException("should not be called");
+        }
+
+    }
+}
diff --git a/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/util/BspUtils.java b/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/util/BspUtils.java
new file mode 100644
index 0000000..ff9724d
--- /dev/null
+++ b/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/util/BspUtils.java
@@ -0,0 +1,435 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.pregelix.api.util;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.io.WritableComparable;
+import org.apache.hadoop.util.ReflectionUtils;
+
+import edu.uci.ics.pregelix.api.graph.GlobalAggregator;
+import edu.uci.ics.pregelix.api.graph.MessageCombiner;
+import edu.uci.ics.pregelix.api.graph.MsgList;
+import edu.uci.ics.pregelix.api.graph.Vertex;
+import edu.uci.ics.pregelix.api.io.VertexInputFormat;
+import edu.uci.ics.pregelix.api.io.VertexOutputFormat;
+import edu.uci.ics.pregelix.api.job.PregelixJob;
+
+/**
+ * Help to use the configuration to get the appropriate classes or instantiate
+ * them.
+ */
+public class BspUtils {
+    private static Configuration defaultConf = null;
+
+    public static void setDefaultConfiguration(Configuration conf) {
+        defaultConf = conf;
+    }
+
+    /**
+     * Get the user's subclassed {@link VertexInputFormat}.
+     * 
+     * @param conf
+     *            Configuration to check
+     * @return User's vertex input format class
+     */
+    @SuppressWarnings({ "rawtypes", "unchecked" })
+    public static <I extends WritableComparable, V extends Writable, E extends Writable, M extends Writable> Class<? extends VertexInputFormat<I, V, E, M>> getVertexInputFormatClass(
+            Configuration conf) {
+        return (Class<? extends VertexInputFormat<I, V, E, M>>) conf.getClass(PregelixJob.VERTEX_INPUT_FORMAT_CLASS,
+                null, VertexInputFormat.class);
+    }
+
+    /**
+     * Create a user vertex input format class
+     * 
+     * @param conf
+     *            Configuration to check
+     * @return Instantiated user vertex input format class
+     */
+    @SuppressWarnings("rawtypes")
+    public static <I extends WritableComparable, V extends Writable, E extends Writable, M extends Writable> VertexInputFormat<I, V, E, M> createVertexInputFormat(
+            Configuration conf) {
+        Class<? extends VertexInputFormat<I, V, E, M>> vertexInputFormatClass = getVertexInputFormatClass(conf);
+        VertexInputFormat<I, V, E, M> inputFormat = ReflectionUtils.newInstance(vertexInputFormatClass, conf);
+        return inputFormat;
+    }
+
+    /**
+     * Get the user's subclassed {@link VertexOutputFormat}.
+     * 
+     * @param conf
+     *            Configuration to check
+     * @return User's vertex output format class
+     */
+    @SuppressWarnings({ "rawtypes", "unchecked" })
+    public static <I extends WritableComparable, V extends Writable, E extends Writable> Class<? extends VertexOutputFormat<I, V, E>> getVertexOutputFormatClass(
+            Configuration conf) {
+        return (Class<? extends VertexOutputFormat<I, V, E>>) conf.getClass(PregelixJob.VERTEX_OUTPUT_FORMAT_CLASS,
+                null, VertexOutputFormat.class);
+    }
+
+    /**
+     * Create a user vertex output format class
+     * 
+     * @param conf
+     *            Configuration to check
+     * @return Instantiated user vertex output format class
+     */
+    @SuppressWarnings("rawtypes")
+    public static <I extends WritableComparable, V extends Writable, E extends Writable> VertexOutputFormat<I, V, E> createVertexOutputFormat(
+            Configuration conf) {
+        Class<? extends VertexOutputFormat<I, V, E>> vertexOutputFormatClass = getVertexOutputFormatClass(conf);
+        return ReflectionUtils.newInstance(vertexOutputFormatClass, conf);
+    }
+
+    /**
+     * Get the user's subclassed {@link MessageCombiner}.
+     * 
+     * @param conf
+     *            Configuration to check
+     * @return User's vertex combiner class
+     */
+    @SuppressWarnings({ "rawtypes", "unchecked" })
+    public static <I extends WritableComparable, M extends Writable, P extends Writable> Class<? extends MessageCombiner<I, M, P>> getMessageCombinerClass(
+            Configuration conf) {
+        return (Class<? extends MessageCombiner<I, M, P>>) conf.getClass(PregelixJob.Message_COMBINER_CLASS,
+                DefaultMessageCombiner.class, MessageCombiner.class);
+    }
+
+    /**
+     * Get the user's subclassed {@link GlobalAggregator}.
+     * 
+     * @param conf
+     *            Configuration to check
+     * @return User's vertex combiner class
+     */
+    @SuppressWarnings({ "rawtypes", "unchecked" })
+    public static <I extends WritableComparable, V extends Writable, E extends Writable, M extends Writable, P extends Writable, F extends Writable> Class<? extends GlobalAggregator<I, V, E, M, P, F>> getGlobalAggregatorClass(
+            Configuration conf) {
+        return (Class<? extends GlobalAggregator<I, V, E, M, P, F>>) conf.getClass(PregelixJob.GLOBAL_AGGREGATOR_CLASS,
+                GlobalCountAggregator.class, GlobalAggregator.class);
+    }
+
+    public static String getJobId(Configuration conf) {
+        return conf.get(PregelixJob.JOB_ID);
+    }
+
+    /**
+     * Create a user vertex combiner class
+     * 
+     * @param conf
+     *            Configuration to check
+     * @return Instantiated user vertex combiner class
+     */
+    @SuppressWarnings("rawtypes")
+    public static <I extends WritableComparable, M extends Writable, P extends Writable> MessageCombiner<I, M, P> createMessageCombiner(
+            Configuration conf) {
+        Class<? extends MessageCombiner<I, M, P>> vertexCombinerClass = getMessageCombinerClass(conf);
+        return ReflectionUtils.newInstance(vertexCombinerClass, conf);
+    }
+
+    /**
+     * Create a global aggregator class
+     * 
+     * @param conf
+     *            Configuration to check
+     * @return Instantiated user vertex combiner class
+     */
+    @SuppressWarnings("rawtypes")
+    public static <I extends WritableComparable, V extends Writable, E extends Writable, M extends Writable, P extends Writable, F extends Writable> GlobalAggregator<I, V, E, M, P, F> createGlobalAggregator(
+            Configuration conf) {
+        Class<? extends GlobalAggregator<I, V, E, M, P, F>> globalAggregatorClass = getGlobalAggregatorClass(conf);
+        return ReflectionUtils.newInstance(globalAggregatorClass, conf);
+    }
+
+    /**
+     * Get the user's subclassed Vertex.
+     * 
+     * @param conf
+     *            Configuration to check
+     * @return User's vertex class
+     */
+    @SuppressWarnings({ "rawtypes", "unchecked" })
+    public static <I extends WritableComparable, V extends Writable, E extends Writable, M extends Writable> Class<? extends Vertex<I, V, E, M>> getVertexClass(
+            Configuration conf) {
+        return (Class<? extends Vertex<I, V, E, M>>) conf.getClass(PregelixJob.VERTEX_CLASS, null, Vertex.class);
+    }
+
+    /**
+     * Create a user vertex
+     * 
+     * @param conf
+     *            Configuration to check
+     * @return Instantiated user vertex
+     */
+    @SuppressWarnings("rawtypes")
+    public static <I extends WritableComparable, V extends Writable, E extends Writable, M extends Writable> Vertex<I, V, E, M> createVertex(
+            Configuration conf) {
+        Class<? extends Vertex<I, V, E, M>> vertexClass = getVertexClass(conf);
+        Vertex<I, V, E, M> vertex = ReflectionUtils.newInstance(vertexClass, conf);
+        return vertex;
+    }
+
+    /**
+     * Get the user's subclassed vertex index class.
+     * 
+     * @param conf
+     *            Configuration to check
+     * @return User's vertex index class
+     */
+    @SuppressWarnings("unchecked")
+    public static <I extends Writable> Class<I> getVertexIndexClass(Configuration conf) {
+        if (conf == null)
+            conf = defaultConf;
+        return (Class<I>) conf.getClass(PregelixJob.VERTEX_INDEX_CLASS, WritableComparable.class);
+    }
+
+    /**
+     * Create a user vertex index
+     * 
+     * @param conf
+     *            Configuration to check
+     * @return Instantiated user vertex index
+     */
+    @SuppressWarnings("rawtypes")
+    public static <I extends WritableComparable> I createVertexIndex(Configuration conf) {
+        Class<I> vertexClass = getVertexIndexClass(conf);
+        try {
+            return vertexClass.newInstance();
+        } catch (InstantiationException e) {
+            throw new IllegalArgumentException("createVertexIndex: Failed to instantiate", e);
+        } catch (IllegalAccessException e) {
+            throw new IllegalArgumentException("createVertexIndex: Illegally accessed", e);
+        }
+    }
+
+    /**
+     * Get the user's subclassed vertex value class.
+     * 
+     * @param conf
+     *            Configuration to check
+     * @return User's vertex value class
+     */
+    @SuppressWarnings("unchecked")
+    public static <V extends Writable> Class<V> getVertexValueClass(Configuration conf) {
+        return (Class<V>) conf.getClass(PregelixJob.VERTEX_VALUE_CLASS, Writable.class);
+    }
+
+    /**
+     * Create a user vertex value
+     * 
+     * @param conf
+     *            Configuration to check
+     * @return Instantiated user vertex value
+     */
+    public static <V extends Writable> V createVertexValue(Configuration conf) {
+        Class<V> vertexValueClass = getVertexValueClass(conf);
+        try {
+            return vertexValueClass.newInstance();
+        } catch (InstantiationException e) {
+            throw new IllegalArgumentException("createVertexValue: Failed to instantiate", e);
+        } catch (IllegalAccessException e) {
+            throw new IllegalArgumentException("createVertexValue: Illegally accessed", e);
+        }
+    }
+
+    /**
+     * Get the user's subclassed edge value class.
+     * 
+     * @param conf
+     *            Configuration to check
+     * @return User's vertex edge value class
+     */
+    @SuppressWarnings("unchecked")
+    public static <E extends Writable> Class<E> getEdgeValueClass(Configuration conf) {
+        return (Class<E>) conf.getClass(PregelixJob.EDGE_VALUE_CLASS, Writable.class);
+    }
+
+    /**
+     * Create a user edge value
+     * 
+     * @param conf
+     *            Configuration to check
+     * @return Instantiated user edge value
+     */
+    public static <E extends Writable> E createEdgeValue(Configuration conf) {
+        Class<E> edgeValueClass = getEdgeValueClass(conf);
+        try {
+            return edgeValueClass.newInstance();
+        } catch (InstantiationException e) {
+            throw new IllegalArgumentException("createEdgeValue: Failed to instantiate", e);
+        } catch (IllegalAccessException e) {
+            throw new IllegalArgumentException("createEdgeValue: Illegally accessed", e);
+        }
+    }
+
+    /**
+     * Get the user's subclassed vertex message value class.
+     * 
+     * @param conf
+     *            Configuration to check
+     * @return User's vertex message value class
+     */
+    @SuppressWarnings("unchecked")
+    public static <M extends Writable> Class<M> getMessageValueClass(Configuration conf) {
+        if (conf == null)
+            conf = defaultConf;
+        return (Class<M>) conf.getClass(PregelixJob.MESSAGE_VALUE_CLASS, Writable.class);
+    }
+
+    /**
+     * Get the user's subclassed global aggregator's partial aggregate value class.
+     * 
+     * @param conf
+     *            Configuration to check
+     * @return User's global aggregate value class
+     */
+    @SuppressWarnings("unchecked")
+    public static <M extends Writable> Class<M> getPartialAggregateValueClass(Configuration conf) {
+        if (conf == null)
+            conf = defaultConf;
+        return (Class<M>) conf.getClass(PregelixJob.PARTIAL_AGGREGATE_VALUE_CLASS, Writable.class);
+    }
+
+    /**
+     * Get the user's subclassed combiner's partial combine value class.
+     * 
+     * @param conf
+     *            Configuration to check
+     * @return User's global aggregate value class
+     */
+    @SuppressWarnings("unchecked")
+    public static <M extends Writable> Class<M> getPartialCombineValueClass(Configuration conf) {
+        if (conf == null)
+            conf = defaultConf;
+        return (Class<M>) conf.getClass(PregelixJob.PARTIAL_COMBINE_VALUE_CLASS, Writable.class);
+    }
+
+    /**
+     * Get the user's subclassed global aggregator's global value class.
+     * 
+     * @param conf
+     *            Configuration to check
+     * @return User's global aggregate value class
+     */
+    @SuppressWarnings("unchecked")
+    public static <M extends Writable> Class<M> getFinalAggregateValueClass(Configuration conf) {
+        if (conf == null)
+            conf = defaultConf;
+        return (Class<M>) conf.getClass(PregelixJob.FINAL_AGGREGATE_VALUE_CLASS, Writable.class);
+    }
+
+    /**
+     * Create a user vertex message value
+     * 
+     * @param conf
+     *            Configuration to check
+     * @return Instantiated user vertex message value
+     */
+    public static <M extends Writable> M createMessageValue(Configuration conf) {
+        Class<M> messageValueClass = getMessageValueClass(conf);
+        try {
+            return messageValueClass.newInstance();
+        } catch (InstantiationException e) {
+            throw new IllegalArgumentException("createMessageValue: Failed to instantiate", e);
+        } catch (IllegalAccessException e) {
+            throw new IllegalArgumentException("createMessageValue: Illegally accessed", e);
+        }
+    }
+
+    /**
+     * Create a user partial aggregate value
+     * 
+     * @param conf
+     *            Configuration to check
+     * @return Instantiated user aggregate value
+     */
+    public static <M extends Writable> M createPartialAggregateValue(Configuration conf) {
+        Class<M> aggregateValueClass = getPartialAggregateValueClass(conf);
+        try {
+            return aggregateValueClass.newInstance();
+        } catch (InstantiationException e) {
+            throw new IllegalArgumentException("createMessageValue: Failed to instantiate", e);
+        } catch (IllegalAccessException e) {
+            throw new IllegalArgumentException("createMessageValue: Illegally accessed", e);
+        }
+    }
+
+    /**
+     * Create a user partial combine value
+     * 
+     * @param conf
+     *            Configuration to check
+     * @return Instantiated user aggregate value
+     */
+    @SuppressWarnings("rawtypes")
+    public static <M extends Writable> M createPartialCombineValue(Configuration conf) {
+        Class<M> aggregateValueClass = getPartialCombineValueClass(conf);
+        try {
+            M instance = aggregateValueClass.newInstance();
+            if (instance instanceof MsgList) {
+                // set conf for msg list, if the value type is msglist
+                ((MsgList) instance).setConf(conf);
+            }
+            return instance;
+        } catch (InstantiationException e) {
+            throw new IllegalArgumentException("createMessageValue: Failed to instantiate", e);
+        } catch (IllegalAccessException e) {
+            throw new IllegalArgumentException("createMessageValue: Illegally accessed", e);
+        }
+    }
+
+    /**
+     * Create a user aggregate value
+     * 
+     * @param conf
+     *            Configuration to check
+     * @return Instantiated user aggregate value
+     */
+    public static <M extends Writable> M createFinalAggregateValue(Configuration conf) {
+        Class<M> aggregateValueClass = getFinalAggregateValueClass(conf);
+        try {
+            return aggregateValueClass.newInstance();
+        } catch (InstantiationException e) {
+            throw new IllegalArgumentException("createMessageValue: Failed to instantiate", e);
+        } catch (IllegalAccessException e) {
+            throw new IllegalArgumentException("createMessageValue: Illegally accessed", e);
+        }
+    }
+
+    /**
+     * Get the job configuration parameter whether the vertex states will increase dynamically
+     * 
+     * @param conf
+     *            the job configuration
+     * @return the boolean setting of the parameter, by default it is false
+     */
+    public static boolean getDynamicVertexValueSize(Configuration conf) {
+        return conf.getBoolean(PregelixJob.INCREASE_STATE_LENGTH, false);
+    }
+
+    /**
+     * Get the specified frame size
+     * 
+     * @param conf
+     *            the job configuration
+     * @return the specified frame size; -1 if it is not set by users
+     */
+    public static int getFrameSize(Configuration conf) {
+        return conf.getInt(PregelixJob.FRAME_SIZE, -1);
+    }
+}
diff --git a/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/util/DefaultMessageCombiner.java b/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/util/DefaultMessageCombiner.java
new file mode 100644
index 0000000..1468431
--- /dev/null
+++ b/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/util/DefaultMessageCombiner.java
@@ -0,0 +1,41 @@
+package edu.uci.ics.pregelix.api.util;
+
+import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.io.WritableComparable;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.pregelix.api.graph.MessageCombiner;
+import edu.uci.ics.pregelix.api.graph.MsgList;
+
+@SuppressWarnings({ "rawtypes", "unchecked" })
+public class DefaultMessageCombiner<I extends WritableComparable, M extends Writable> extends
+        MessageCombiner<I, M, MsgList> {
+    private MsgList<M> msgList;
+
+    @Override
+    public void init(MsgList providedMsgList) {
+        this.msgList = providedMsgList;
+        this.msgList.clearElements();
+    }
+
+    @Override
+    public void stepPartial(I vertexIndex, M msg) throws HyracksDataException {
+        msgList.addElement(msg);
+    }
+
+    @Override
+    public void stepFinal(I vertexIndex, MsgList partialAggregate) throws HyracksDataException {
+        msgList.addAllElements(partialAggregate);
+    }
+
+    @Override
+    public MsgList finishPartial() {
+        return msgList;
+    }
+
+    @Override
+    public MsgList<M> finishFinal() {
+        return msgList;
+    }
+
+}
diff --git a/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/util/FrameTupleUtils.java b/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/util/FrameTupleUtils.java
new file mode 100644
index 0000000..402249e
--- /dev/null
+++ b/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/util/FrameTupleUtils.java
@@ -0,0 +1,44 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.pregelix.api.util;
+
+import edu.uci.ics.hyracks.api.comm.IFrameWriter;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
+import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
+
+public class FrameTupleUtils {
+
+    public static void flushTuple(FrameTupleAppender appender, ArrayTupleBuilder tb, IFrameWriter writer)
+            throws HyracksDataException {
+        if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
+            FrameUtils.flushFrame(appender.getBuffer(), writer);
+            appender.reset(appender.getBuffer(), true);
+            if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
+                throw new IllegalStateException();
+            }
+        }
+    }
+
+    public static void flushTuplesFinal(FrameTupleAppender appender, IFrameWriter writer) throws HyracksDataException {
+        if (appender.getTupleCount() > 0) {
+            FrameUtils.flushFrame(appender.getBuffer(), writer);
+            appender.reset(appender.getBuffer(), true);
+        }
+    }
+
+}
diff --git a/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/util/GlobalCountAggregator.java b/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/util/GlobalCountAggregator.java
new file mode 100644
index 0000000..1c1fa92
--- /dev/null
+++ b/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/util/GlobalCountAggregator.java
@@ -0,0 +1,42 @@
+package edu.uci.ics.pregelix.api.util;
+
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.io.WritableComparable;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.pregelix.api.graph.GlobalAggregator;
+import edu.uci.ics.pregelix.api.graph.Vertex;
+
+@SuppressWarnings("rawtypes")
+public class GlobalCountAggregator<I extends WritableComparable, V extends Writable, E extends Writable, M extends Writable>
+        extends GlobalAggregator<I, V, E, M, LongWritable, LongWritable> {
+
+    private LongWritable state = new LongWritable(0);
+
+    @Override
+    public void init() {
+        state.set(0);
+    }
+
+    @Override
+    public void step(Vertex<I, V, E, M> v) throws HyracksDataException {
+        state.set(state.get() + 1);
+    }
+
+    @Override
+    public void step(LongWritable partialResult) {
+        state.set(state.get() + partialResult.get());
+    }
+
+    @Override
+    public LongWritable finishPartial() {
+        return state;
+    }
+
+    @Override
+    public LongWritable finishFinal() {
+        return state;
+    }
+
+}
diff --git a/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/util/ReflectionUtils.java b/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/util/ReflectionUtils.java
new file mode 100644
index 0000000..1366df1
--- /dev/null
+++ b/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/util/ReflectionUtils.java
@@ -0,0 +1,163 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.pregelix.api.util;
+
+import java.lang.reflect.Array;
+import java.lang.reflect.Field;
+import java.lang.reflect.GenericArrayType;
+import java.lang.reflect.ParameterizedType;
+import java.lang.reflect.Type;
+import java.lang.reflect.TypeVariable;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Helper methods to get type arguments to generic classes. Courtesy of Ian
+ * Robertson (overstock.com). Make sure to use with abstract generic classes,
+ * not interfaces.
+ */
+public class ReflectionUtils {
+    /**
+     * Do not instantiate.
+     */
+    private ReflectionUtils() {
+    }
+
+    /**
+     * Get the underlying class for a type, or null if the type is a variable
+     * type.
+     * 
+     * @param type
+     *            the type
+     * @return the underlying class
+     */
+    public static Class<?> getClass(Type type) {
+        if (type instanceof Class) {
+            return (Class<?>) type;
+        } else if (type instanceof ParameterizedType) {
+            return getClass(((ParameterizedType) type).getRawType());
+        } else if (type instanceof GenericArrayType) {
+            Type componentType = ((GenericArrayType) type).getGenericComponentType();
+            Class<?> componentClass = getClass(componentType);
+            if (componentClass != null) {
+                return Array.newInstance(componentClass, 0).getClass();
+            } else {
+                return null;
+            }
+        } else {
+            return null;
+        }
+    }
+
+    /**
+     * Get the actual type arguments a child class has used to extend a generic
+     * base class.
+     * 
+     * @param <T>
+     *            Type to evaluate.
+     * @param baseClass
+     *            the base class
+     * @param childClass
+     *            the child class
+     * @return a list of the raw classes for the actual type arguments.
+     */
+    public static <T> List<Class<?>> getTypeArguments(Class<T> baseClass, Class<? extends T> childClass) {
+        Map<Type, Type> resolvedTypes = new HashMap<Type, Type>();
+        Type type = childClass;
+        // start walking up the inheritance hierarchy until we hit baseClass
+        while (!getClass(type).equals(baseClass)) {
+            if (type instanceof Class) {
+                // there is no useful information for us in raw types,
+                // so just keep going.
+                type = ((Class<?>) type).getGenericSuperclass();
+            } else {
+                ParameterizedType parameterizedType = (ParameterizedType) type;
+                Class<?> rawType = (Class<?>) parameterizedType.getRawType();
+
+                Type[] actualTypeArguments = parameterizedType.getActualTypeArguments();
+                TypeVariable<?>[] typeParameters = rawType.getTypeParameters();
+                for (int i = 0; i < actualTypeArguments.length; i++) {
+                    resolvedTypes.put(typeParameters[i], actualTypeArguments[i]);
+                }
+
+                if (!rawType.equals(baseClass)) {
+                    type = rawType.getGenericSuperclass();
+                }
+            }
+        }
+
+        // finally, for each actual type argument provided to baseClass,
+        // determine (if possible)
+        // the raw class for that type argument.
+        Type[] actualTypeArguments;
+        if (type instanceof Class) {
+            actualTypeArguments = ((Class<?>) type).getTypeParameters();
+        } else {
+            actualTypeArguments = ((ParameterizedType) type).getActualTypeArguments();
+        }
+        List<Class<?>> typeArgumentsAsClasses = new ArrayList<Class<?>>();
+        // resolve types by chasing down type variables.
+        for (Type baseType : actualTypeArguments) {
+            while (resolvedTypes.containsKey(baseType)) {
+                baseType = resolvedTypes.get(baseType);
+            }
+            typeArgumentsAsClasses.add(getClass(baseType));
+        }
+        return typeArgumentsAsClasses;
+    }
+
+    /**
+     * Try to directly set a (possibly private) field on an Object.
+     * 
+     * @param target
+     *            Target to set the field on.
+     * @param fieldname
+     *            Name of field.
+     * @param value
+     *            Value to set on target.
+     */
+    public static void setField(Object target, String fieldname, Object value) throws NoSuchFieldException,
+            IllegalAccessException {
+        Field field = findDeclaredField(target.getClass(), fieldname);
+        field.setAccessible(true);
+        field.set(target, value);
+    }
+
+    /**
+     * Find a declared field in a class or one of its super classes
+     * 
+     * @param inClass
+     *            Class to search for declared field.
+     * @param fieldname
+     *            Field name to search for
+     * @return Field or will throw.
+     * @throws NoSuchFieldException
+     *             When field not found.
+     */
+    private static Field findDeclaredField(Class<?> inClass, String fieldname) throws NoSuchFieldException {
+        while (!Object.class.equals(inClass)) {
+            for (Field field : inClass.getDeclaredFields()) {
+                if (field.getName().equalsIgnoreCase(fieldname)) {
+                    return field;
+                }
+            }
+            inClass = inClass.getSuperclass();
+        }
+        throw new NoSuchFieldException();
+    }
+}
diff --git a/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/util/ResetableByteArrayInputStream.java b/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/util/ResetableByteArrayInputStream.java
new file mode 100755
index 0000000..5702642
--- /dev/null
+++ b/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/util/ResetableByteArrayInputStream.java
@@ -0,0 +1,60 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.pregelix.api.util;
+
+import java.io.InputStream;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+public class ResetableByteArrayInputStream extends InputStream {
+    private static final Logger LOGGER = Logger.getLogger(ResetableByteArrayInputStream.class.getName());
+
+    private byte[] data;
+    private int position;
+
+    public ResetableByteArrayInputStream() {
+    }
+
+    public void setByteArray(byte[] data, int position) {
+        this.data = data;
+        this.position = position;
+    }
+
+    @Override
+    public int read() {
+        int remaining = data.length - position;
+        int value = remaining > 0 ? (data[position++] & 0xff) : -1;
+        if (LOGGER.isLoggable(Level.FINEST)) {
+            LOGGER.finest("read(): value: " + value + " remaining: " + remaining + " position: " + position);
+        }
+        return value;
+    }
+
+    @Override
+    public int read(byte[] bytes, int offset, int length) {
+        int remaining = data.length - position;
+        if (LOGGER.isLoggable(Level.FINEST)) {
+            LOGGER.finest("read(bytes[], int, int): remaining: " + remaining + " offset: " + offset + " length: "
+                    + length + " position: " + position);
+        }
+        if (remaining == 0) {
+            return -1;
+        }
+        int l = Math.min(length, remaining);
+        System.arraycopy(data, position, bytes, offset, l);
+        position += l;
+        return l;
+    }
+}
\ No newline at end of file
diff --git a/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/util/ResetableByteArrayOutputStream.java b/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/util/ResetableByteArrayOutputStream.java
new file mode 100755
index 0000000..307a3ae
--- /dev/null
+++ b/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/util/ResetableByteArrayOutputStream.java
@@ -0,0 +1,58 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.pregelix.api.util;
+
+import java.io.OutputStream;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+public class ResetableByteArrayOutputStream extends OutputStream {
+    private static final Logger LOGGER = Logger.getLogger(ResetableByteArrayOutputStream.class.getName());
+
+    private byte[] data;
+    private int position;
+
+    public ResetableByteArrayOutputStream() {
+    }
+
+    public void setByteArray(byte[] data, int position) {
+        this.data = data;
+        this.position = position;
+    }
+
+    @Override
+    public void write(int b) {
+        int remaining = data.length - position;
+        if (position + 1 > data.length - 1)
+            throw new IndexOutOfBoundsException();
+        data[position] = (byte) b;
+        position++;
+        if (LOGGER.isLoggable(Level.FINEST)) {
+            LOGGER.finest("write(): value: " + b + " remaining: " + remaining + " position: " + position);
+        }
+    }
+
+    @Override
+    public void write(byte[] bytes, int offset, int length) {
+        if (LOGGER.isLoggable(Level.FINEST)) {
+            LOGGER.finest("write(bytes[], int, int) offset: " + offset + " length: " + length + " position: "
+                    + position);
+        }
+        if (position + length > data.length - 1)
+            throw new IndexOutOfBoundsException();
+        System.arraycopy(bytes, offset, data, position, length);
+        position += length;
+    }
+}
\ No newline at end of file
diff --git a/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/util/SerDeUtils.java b/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/util/SerDeUtils.java
new file mode 100644
index 0000000..32c21ac
--- /dev/null
+++ b/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/util/SerDeUtils.java
@@ -0,0 +1,86 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.pregelix.api.util;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.DataInput;
+import java.io.DataInputStream;
+import java.io.DataOutput;
+import java.io.DataOutputStream;
+import java.io.IOException;
+
+import org.apache.hadoop.io.Writable;
+
+public class SerDeUtils {
+
+    public static byte[] serialize(Writable object) throws IOException {
+        ByteArrayOutputStream bbos = new ByteArrayOutputStream();
+        DataOutput output = new DataOutputStream(bbos);
+        object.write(output);
+        return bbos.toByteArray();
+    }
+
+    public static void deserialize(Writable object, byte[] buffer) throws IOException {
+        ByteArrayInputStream bbis = new ByteArrayInputStream(buffer);
+        DataInput input = new DataInputStream(bbis);
+        object.readFields(input);
+    }
+
+    public static long readVLong(DataInput in) throws IOException {
+        int vLen = 0;
+        long value = 0L;
+        while (true) {
+            byte b = (byte) in.readByte();
+            ++vLen;
+            value += (((long) (b & 0x7f)) << ((vLen - 1) * 7));
+            if ((b & 0x80) == 0) {
+                break;
+            }
+        }
+        return value;
+    }
+
+    public static void writeVLong(DataOutput out, long value) throws IOException {
+        long data = value;
+        do {
+            byte b = (byte) (data & 0x7f);
+            data >>= 7;
+            if (data != 0) {
+                b |= 0x80;
+            }
+            out.write(b);
+        } while (data != 0);
+    }
+
+    public static long readVLong(byte[] data, int start, int length) {
+        int vLen = 0;
+        long value = 0L;
+        while (true) {
+            byte b = (byte) data[start];
+            ++vLen;
+            value += (((long) (b & 0x7f)) << ((vLen - 1) * 7));
+            if ((b & 0x80) == 0) {
+                break;
+            }
+            ++start;
+        }
+        if (vLen != length)
+            throw new IllegalStateException("length mismatch -- vLen:" + vLen + " length:" + length);
+        return value;
+    }
+
+}
diff --git a/hyracks-algebricks/hyracks-algebricks-tests/data/tpch0.001/customer-part1.tbl b/pregelix/pregelix-core/data/tpch0.001/customer-part1.tbl
similarity index 100%
copy from hyracks-algebricks/hyracks-algebricks-tests/data/tpch0.001/customer-part1.tbl
copy to pregelix/pregelix-core/data/tpch0.001/customer-part1.tbl
diff --git a/hyracks-algebricks/hyracks-algebricks-tests/data/tpch0.001/customer-part2.tbl b/pregelix/pregelix-core/data/tpch0.001/customer-part2.tbl
similarity index 100%
copy from hyracks-algebricks/hyracks-algebricks-tests/data/tpch0.001/customer-part2.tbl
copy to pregelix/pregelix-core/data/tpch0.001/customer-part2.tbl
diff --git a/hyracks-algebricks/hyracks-algebricks-tests/data/tpch0.001/customer.tbl b/pregelix/pregelix-core/data/tpch0.001/customer.tbl
similarity index 100%
copy from hyracks-algebricks/hyracks-algebricks-tests/data/tpch0.001/customer.tbl
copy to pregelix/pregelix-core/data/tpch0.001/customer.tbl
diff --git a/hyracks-algebricks/hyracks-algebricks-tests/data/tpch0.001/lineitem.tbl b/pregelix/pregelix-core/data/tpch0.001/lineitem.tbl
similarity index 100%
copy from hyracks-algebricks/hyracks-algebricks-tests/data/tpch0.001/lineitem.tbl
copy to pregelix/pregelix-core/data/tpch0.001/lineitem.tbl
diff --git a/hyracks-algebricks/hyracks-algebricks-tests/data/tpch0.001/nation.tbl b/pregelix/pregelix-core/data/tpch0.001/nation.tbl
similarity index 100%
copy from hyracks-algebricks/hyracks-algebricks-tests/data/tpch0.001/nation.tbl
copy to pregelix/pregelix-core/data/tpch0.001/nation.tbl
diff --git a/hyracks-algebricks/hyracks-algebricks-tests/data/tpch0.001/orders-part1.tbl b/pregelix/pregelix-core/data/tpch0.001/orders-part1.tbl
similarity index 100%
copy from hyracks-algebricks/hyracks-algebricks-tests/data/tpch0.001/orders-part1.tbl
copy to pregelix/pregelix-core/data/tpch0.001/orders-part1.tbl
diff --git a/hyracks-algebricks/hyracks-algebricks-tests/data/tpch0.001/orders-part2.tbl b/pregelix/pregelix-core/data/tpch0.001/orders-part2.tbl
similarity index 100%
copy from hyracks-algebricks/hyracks-algebricks-tests/data/tpch0.001/orders-part2.tbl
copy to pregelix/pregelix-core/data/tpch0.001/orders-part2.tbl
diff --git a/hyracks-algebricks/hyracks-algebricks-tests/data/tpch0.001/orders.tbl b/pregelix/pregelix-core/data/tpch0.001/orders.tbl
similarity index 100%
copy from hyracks-algebricks/hyracks-algebricks-tests/data/tpch0.001/orders.tbl
copy to pregelix/pregelix-core/data/tpch0.001/orders.tbl
diff --git a/hyracks-algebricks/hyracks-algebricks-tests/data/tpch0.001/part.tbl b/pregelix/pregelix-core/data/tpch0.001/part.tbl
similarity index 100%
copy from hyracks-algebricks/hyracks-algebricks-tests/data/tpch0.001/part.tbl
copy to pregelix/pregelix-core/data/tpch0.001/part.tbl
diff --git a/hyracks-algebricks/hyracks-algebricks-tests/data/tpch0.001/partsupp.tbl b/pregelix/pregelix-core/data/tpch0.001/partsupp.tbl
similarity index 100%
copy from hyracks-algebricks/hyracks-algebricks-tests/data/tpch0.001/partsupp.tbl
copy to pregelix/pregelix-core/data/tpch0.001/partsupp.tbl
diff --git a/hyracks-algebricks/hyracks-algebricks-tests/data/tpch0.001/region.tbl b/pregelix/pregelix-core/data/tpch0.001/region.tbl
similarity index 100%
copy from hyracks-algebricks/hyracks-algebricks-tests/data/tpch0.001/region.tbl
copy to pregelix/pregelix-core/data/tpch0.001/region.tbl
diff --git a/hyracks-algebricks/hyracks-algebricks-tests/data/tpch0.001/supplier.tbl b/pregelix/pregelix-core/data/tpch0.001/supplier.tbl
similarity index 100%
copy from hyracks-algebricks/hyracks-algebricks-tests/data/tpch0.001/supplier.tbl
copy to pregelix/pregelix-core/data/tpch0.001/supplier.tbl
diff --git a/hyracks-algebricks/hyracks-algebricks-tests/data/tpch0.001/tpch.ddl b/pregelix/pregelix-core/data/tpch0.001/tpch.ddl
similarity index 100%
copy from hyracks-algebricks/hyracks-algebricks-tests/data/tpch0.001/tpch.ddl
copy to pregelix/pregelix-core/data/tpch0.001/tpch.ddl
diff --git a/pregelix/pregelix-core/data/webmap/webmap_link.txt b/pregelix/pregelix-core/data/webmap/webmap_link.txt
new file mode 100644
index 0000000..2bd5a0e
--- /dev/null
+++ b/pregelix/pregelix-core/data/webmap/webmap_link.txt
@@ -0,0 +1,20 @@
+0 1
+1 1 2
+2 1 2 3
+3 1 2 3 4
+4 1 2 3 4 5
+5 1 2 3 4 5 6
+6 1 2 3 4 5 6 7
+7 1 2 3 4 5 6 7 8
+8 1 2 3 4 5 6 7 8 9
+9 1 2 3 4 5 6 7 8 9 10
+10 11
+11 11 12
+12 11 12 13
+13 11 12 13 14
+14 11 12 13 14 15
+15 11 12 13 14 15 16
+16 11 12 13 14 15 16 17
+17 11 12 13 14 15 16 17 18
+18 11 12 13 14 15 16 17 18 19
+19 0 11 12 13 14 15 16 17 18 19
diff --git a/pregelix/pregelix-core/pom.xml b/pregelix/pregelix-core/pom.xml
new file mode 100644
index 0000000..ab163cb
--- /dev/null
+++ b/pregelix/pregelix-core/pom.xml
@@ -0,0 +1,313 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+	xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+	<modelVersion>4.0.0</modelVersion>
+	<artifactId>pregelix-core</artifactId>
+	<packaging>jar</packaging>
+	<name>pregelix-core</name>
+
+	<parent>
+		<groupId>edu.uci.ics.hyracks</groupId>
+		<artifactId>pregelix</artifactId>
+		<version>0.2.3-SNAPSHOT</version>
+	</parent>
+
+
+	<properties>
+		<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+	</properties>
+
+	<build>
+		<plugins>
+			<plugin>
+				<artifactId>maven-jar-plugin</artifactId>
+				<version>2.4</version>
+				<executions>
+					<execution>
+						<id>balancer</id>
+						<goals>
+							<goal>jar</goal>
+						</goals>
+						<phase>package</phase>
+						<configuration>
+							<classifier>balancer</classifier>
+							<archive>
+								<manifest>
+									<MainClass>edu.uci.ics.pregelix.core.util.DataBalancer</MainClass>
+								</manifest>
+							</archive>
+							<includes>
+								<include>**/edu/uci/ics/pregelix/core/util/DataBalancer*</include>
+							</includes>
+						</configuration>
+					</execution>
+					<execution>
+						<id>generator</id>
+						<goals>
+							<goal>jar</goal>
+						</goals>
+						<phase>package</phase>
+						<configuration>
+							<classifier>generator</classifier>
+							<archive>
+								<manifest>
+									<MainClass>edu.uci.ics.pregelix.core.util.DataGenerator</MainClass>
+								</manifest>
+							</archive>
+							<includes>
+								<include>**/edu/uci/ics/pregelix/core/util/DataGenerator*</include>
+							</includes>
+						</configuration>
+					</execution>
+				</executions>
+			</plugin>
+			<plugin>
+				<groupId>org.apache.maven.plugins</groupId>
+				<artifactId>maven-compiler-plugin</artifactId>
+				<version>2.0.2</version>
+				<configuration>
+					<source>1.7</source>
+					<target>1.7</target>
+				</configuration>
+			</plugin>
+			<plugin>
+				<groupId>org.codehaus.mojo</groupId>
+				<artifactId>appassembler-maven-plugin</artifactId>
+				<version>1.3</version>
+				<executions>
+					<execution>
+						<configuration>
+							<programs>
+								<program>
+									<mainClass>org.apache.hadoop.util.RunJar</mainClass>
+									<name>pregelix-obselete</name>
+								</program>
+							</programs>
+							<repositoryLayout>flat</repositoryLayout>
+							<repositoryName>lib</repositoryName>
+						</configuration>
+						<phase>package</phase>
+						<goals>
+							<goal>assemble</goal>
+						</goals>
+					</execution>
+				</executions>
+			</plugin>
+			<plugin>
+				<artifactId>maven-assembly-plugin</artifactId>
+				<version>2.2-beta-5</version>
+				<executions>
+					<execution>
+						<configuration>
+							<descriptors>
+								<descriptor>src/main/assembly/binary-assembly.xml</descriptor>
+							</descriptors>
+						</configuration>
+						<phase>package</phase>
+						<goals>
+							<goal>attached</goal>
+						</goals>
+					</execution>
+				</executions>
+			</plugin>
+			<plugin>
+				<artifactId>maven-resources-plugin</artifactId>
+				<version>2.5</version>
+				<executions>
+					<execution>
+						<id>copy-scripts</id>
+						<!-- here the phase you need -->
+						<phase>package</phase>
+						<goals>
+							<goal>copy-resources</goal>
+						</goals>
+						<configuration>
+							<outputDirectory>target/appassembler/bin</outputDirectory>
+							<resources>
+								<resource>
+									<directory>src/main/resources/scripts</directory>
+								</resource>
+							</resources>
+						</configuration>
+					</execution>
+					<execution>
+						<id>copy-conf</id>
+						<!-- here the phase you need -->
+						<phase>package</phase>
+						<goals>
+							<goal>copy-resources</goal>
+						</goals>
+						<configuration>
+							<outputDirectory>target/appassembler/conf</outputDirectory>
+							<resources>
+								<resource>
+									<directory>src/main/resources/conf</directory>
+								</resource>
+							</resources>
+						</configuration>
+					</execution>
+				</executions>
+			</plugin>
+			<plugin>
+				<groupId>org.apache.maven.plugins</groupId>
+				<artifactId>maven-surefire-plugin</artifactId>
+				<version>2.7.2</version>
+				<configuration>
+					<forkMode>pertest</forkMode>
+					<argLine>-enableassertions -Xmx512m -XX:MaxPermSize=300m
+						-Dfile.encoding=UTF-8
+						-Djava.util.logging.config.file=src/test/resources/logging.properties</argLine>
+					<includes>
+						<include>**/*TestSuite.java</include>
+						<include>**/*Test.java</include>
+					</includes>
+				</configuration>
+			</plugin>
+			<plugin>
+				<artifactId>maven-clean-plugin</artifactId>
+				<version>2.5</version>
+				<configuration>
+					<filesets>
+						<fileset>
+							<directory>.</directory>
+							<includes>
+								<include>teststore*</include>
+								<include>edu*</include>
+								<include>actual*</include>
+								<include>build*</include>
+								<include>expect*</include>
+								<include>ClusterController*</include>
+							</includes>
+						</fileset>
+					</filesets>
+				</configuration>
+			</plugin>
+		</plugins>
+	</build>
+
+	<dependencies>
+		<dependency>
+			<groupId>junit</groupId>
+			<artifactId>junit</artifactId>
+			<version>4.8.1</version>
+			<scope>test</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>pregelix-api</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>pregelix-dataflow-std</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>pregelix-dataflow</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>hyracks-dataflow-std</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>pregelix-runtime</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>hyracks-api</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>hyracks-dataflow-common</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>hyracks-data-std</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>hyracks-storage-am-common</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>hyracks-storage-am-btree</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>hyracks-control-cc</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>hyracks-control-nc</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>com.kenai.nbpwr</groupId>
+			<artifactId>org-apache-commons-io</artifactId>
+			<version>1.3.1-201002241208</version>
+			<type>nbm</type>
+			<scope>test</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks.examples</groupId>
+			<artifactId>hyracks-integration-tests</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+			<scope>test</scope>
+		</dependency>
+		<dependency>
+			<groupId>com.sun.jersey</groupId>
+			<artifactId>jersey</artifactId>
+			<version>0.8-ea</version>
+			<type>jar</type>
+			<scope>test</scope>
+		</dependency>
+		<dependency>
+			<groupId>javax.servlet</groupId>
+			<artifactId>javax.servlet-api</artifactId>
+			<version>3.0.1</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>hyracks-ipc</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+	</dependencies>
+</project>
diff --git a/hyracks-cli/src/main/assembly/binary-assembly.xml b/pregelix/pregelix-core/src/main/assembly/binary-assembly.xml
old mode 100644
new mode 100755
similarity index 100%
copy from hyracks-cli/src/main/assembly/binary-assembly.xml
copy to pregelix/pregelix-core/src/main/assembly/binary-assembly.xml
diff --git a/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/base/IDriver.java b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/base/IDriver.java
new file mode 100644
index 0000000..ccdd965
--- /dev/null
+++ b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/base/IDriver.java
@@ -0,0 +1,34 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.pregelix.core.base;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksException;
+import edu.uci.ics.pregelix.api.job.PregelixJob;
+
+public interface IDriver {
+
+    public static enum Plan {
+        INNER_JOIN,
+        OUTER_JOIN,
+        OUTER_JOIN_SORT,
+        OUTER_JOIN_SINGLE_SORT
+    }
+
+    public void runJob(PregelixJob job, String ipAddress, int port) throws HyracksException;
+
+    public void runJob(PregelixJob job, Plan planChoice, String ipAddress, int port, boolean profiling)
+            throws HyracksException;
+}
diff --git a/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/base/IJobGen.java b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/base/IJobGen.java
new file mode 100644
index 0000000..065ed5f
--- /dev/null
+++ b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/base/IJobGen.java
@@ -0,0 +1,29 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.pregelix.core.base;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksException;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+
+public interface IJobGen {
+
+    public JobSpecification generateCreatingJob() throws HyracksException;
+
+    public JobSpecification generateLoadingJob() throws HyracksException;
+
+    public JobSpecification generateJob(int iteration) throws HyracksException;
+
+}
diff --git a/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/base/INormalizedKeyComputerFactoryProvider.java b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/base/INormalizedKeyComputerFactoryProvider.java
new file mode 100644
index 0000000..b6c995a
--- /dev/null
+++ b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/base/INormalizedKeyComputerFactoryProvider.java
@@ -0,0 +1,27 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.pregelix.core.base;
+
+import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputerFactory;
+
+public interface INormalizedKeyComputerFactoryProvider {
+
+    @SuppressWarnings("rawtypes")
+    INormalizedKeyComputerFactory getAscINormalizedKeyComputerFactory(Class keyClass);
+
+    @SuppressWarnings("rawtypes")
+    INormalizedKeyComputerFactory getDescINormalizedKeyComputerFactory(Class keyClass);
+}
diff --git a/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/data/TypeTraits.java b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/data/TypeTraits.java
new file mode 100644
index 0000000..7659bf9
--- /dev/null
+++ b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/data/TypeTraits.java
@@ -0,0 +1,46 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.pregelix.core.data;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+
+public class TypeTraits implements ITypeTraits {
+
+    private static final long serialVersionUID = 1L;
+    private final int length;
+    private final boolean isFixedLength;
+
+    public TypeTraits(boolean isFixedLength) {
+        this.isFixedLength = isFixedLength;
+        this.length = 0;
+    }
+
+    public TypeTraits(int length) {
+        this.isFixedLength = true;
+        this.length = length;
+    }
+
+    @Override
+    public boolean isFixedLength() {
+        return isFixedLength;
+    }
+
+    @Override
+    public int getFixedLength() {
+        return length;
+    }
+
+}
diff --git a/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/driver/Driver.java b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/driver/Driver.java
new file mode 100644
index 0000000..f07a246
--- /dev/null
+++ b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/driver/Driver.java
@@ -0,0 +1,254 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.pregelix.core.driver;
+
+import java.io.File;
+import java.io.FilenameFilter;
+import java.net.URL;
+import java.net.URLClassLoader;
+import java.util.ArrayList;
+import java.util.EnumSet;
+import java.util.List;
+import java.util.Set;
+import java.util.TreeSet;
+import java.util.UUID;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
+
+import edu.uci.ics.hyracks.api.client.HyracksConnection;
+import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
+import edu.uci.ics.hyracks.api.exceptions.HyracksException;
+import edu.uci.ics.hyracks.api.job.JobFlag;
+import edu.uci.ics.hyracks.api.job.JobId;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.pregelix.api.job.PregelixJob;
+import edu.uci.ics.pregelix.core.base.IDriver;
+import edu.uci.ics.pregelix.core.jobgen.JobGen;
+import edu.uci.ics.pregelix.core.jobgen.JobGenInnerJoin;
+import edu.uci.ics.pregelix.core.jobgen.JobGenOuterJoin;
+import edu.uci.ics.pregelix.core.jobgen.JobGenOuterJoinSingleSort;
+import edu.uci.ics.pregelix.core.jobgen.JobGenOuterJoinSort;
+import edu.uci.ics.pregelix.core.jobgen.clusterconfig.ClusterConfig;
+import edu.uci.ics.pregelix.core.util.Utilities;
+import edu.uci.ics.pregelix.dataflow.util.IterationUtils;
+
+@SuppressWarnings("rawtypes")
+public class Driver implements IDriver {
+    private static final Log LOG = LogFactory.getLog(Driver.class);
+    private JobGen jobGen;
+    private boolean profiling;
+
+    private String applicationName;
+    private IHyracksClientConnection hcc;
+
+    private Class exampleClass;
+
+    public Driver(Class exampleClass) {
+        this.exampleClass = exampleClass;
+    }
+
+    @Override
+    public void runJob(PregelixJob job, String ipAddress, int port) throws HyracksException {
+        runJob(job, Plan.OUTER_JOIN, ipAddress, port, false);
+    }
+
+    @Override
+    public void runJob(PregelixJob job, Plan planChoice, String ipAddress, int port, boolean profiling)
+            throws HyracksException {
+        applicationName = exampleClass.getSimpleName() + UUID.randomUUID();
+        try {
+            /** add hadoop configurations */
+            URL hadoopCore = job.getClass().getClassLoader().getResource("core-site.xml");
+            job.getConfiguration().addResource(hadoopCore);
+            URL hadoopMapRed = job.getClass().getClassLoader().getResource("mapred-site.xml");
+            job.getConfiguration().addResource(hadoopMapRed);
+            URL hadoopHdfs = job.getClass().getClassLoader().getResource("hdfs-site.xml");
+            job.getConfiguration().addResource(hadoopHdfs);
+            ClusterConfig.loadClusterConfig(ipAddress, port);
+
+            LOG.info("job started");
+            long start = System.currentTimeMillis();
+            long end = start;
+            long time = 0;
+
+            this.profiling = profiling;
+
+            switch (planChoice) {
+                case INNER_JOIN:
+                    jobGen = new JobGenInnerJoin(job);
+                    break;
+                case OUTER_JOIN:
+                    jobGen = new JobGenOuterJoin(job);
+                    break;
+                case OUTER_JOIN_SORT:
+                    jobGen = new JobGenOuterJoinSort(job);
+                    break;
+                case OUTER_JOIN_SINGLE_SORT:
+                    jobGen = new JobGenOuterJoinSingleSort(job);
+                    break;
+                default:
+                    jobGen = new JobGenInnerJoin(job);
+            }
+
+            if (hcc == null)
+                hcc = new HyracksConnection(ipAddress, port);
+
+            URLClassLoader classLoader = (URLClassLoader) exampleClass.getClassLoader();
+            List<File> jars = new ArrayList<File>();
+            URL[] urls = classLoader.getURLs();
+            for (URL url : urls)
+                if (url.toString().endsWith(".jar"))
+                    jars.add(new File(url.getPath()));
+            installApplication(jars);
+
+            start = System.currentTimeMillis();
+            FileSystem dfs = FileSystem.get(job.getConfiguration());
+            dfs.delete(FileOutputFormat.getOutputPath(job), true);
+            runCreate(jobGen);
+            runDataLoad(jobGen);
+            end = System.currentTimeMillis();
+            time = end - start;
+            LOG.info("data loading finished " + time + "ms");
+            int i = 1;
+            boolean terminate = false;
+            do {
+                start = System.currentTimeMillis();
+                runLoopBodyIteration(jobGen, i);
+                end = System.currentTimeMillis();
+                time = end - start;
+                LOG.info("iteration " + i + " finished " + time + "ms");
+                terminate = IterationUtils.readTerminationState(job.getConfiguration(), jobGen.getJobId())
+                        || IterationUtils.readForceTerminationState(job.getConfiguration(), jobGen.getJobId());
+                i++;
+            } while (!terminate);
+
+            start = System.currentTimeMillis();
+            runHDFSWRite(jobGen);
+            runCleanup(jobGen);
+            destroyApplication(applicationName);
+            end = System.currentTimeMillis();
+            time = end - start;
+            LOG.info("result writing finished " + time + "ms");
+            LOG.info("job finished");
+        } catch (Exception e) {
+            try {
+                /**
+                 * destroy application if there is any exception
+                 */
+                if (hcc != null) {
+                    destroyApplication(applicationName);
+                }
+            } catch (Exception e2) {
+                throw new HyracksException(e2);
+            }
+            throw new HyracksException(e);
+        }
+    }
+
+    private void runCreate(JobGen jobGen) throws Exception {
+        try {
+            JobSpecification treeCreateSpec = jobGen.generateCreatingJob();
+            execute(treeCreateSpec);
+        } catch (Exception e) {
+            throw e;
+        }
+    }
+
+    private void runDataLoad(JobGen jobGen) throws Exception {
+        try {
+            JobSpecification bulkLoadJobSpec = jobGen.generateLoadingJob();
+            execute(bulkLoadJobSpec);
+        } catch (Exception e) {
+            throw e;
+        }
+    }
+
+    private void runLoopBodyIteration(JobGen jobGen, int iteration) throws Exception {
+        try {
+            JobSpecification loopBody = jobGen.generateJob(iteration);
+            execute(loopBody);
+        } catch (Exception e) {
+            throw e;
+        }
+    }
+
+    private void runHDFSWRite(JobGen jobGen) throws Exception {
+        try {
+            JobSpecification scanSortPrintJobSpec = jobGen.scanIndexWriteGraph();
+            execute(scanSortPrintJobSpec);
+        } catch (Exception e) {
+            throw e;
+        }
+    }
+
+    private void runCleanup(JobGen jobGen) throws Exception {
+        try {
+            JobSpecification[] cleanups = jobGen.generateCleanup();
+            runJobArray(cleanups);
+        } catch (Exception e) {
+            throw e;
+        }
+    }
+
+    private void runJobArray(JobSpecification[] jobs) throws Exception {
+        for (JobSpecification job : jobs) {
+            execute(job);
+        }
+    }
+
+    private void execute(JobSpecification job) throws Exception {
+        job.setUseConnectorPolicyForScheduling(false);
+        JobId jobId = hcc.startJob(applicationName, job,
+                profiling ? EnumSet.of(JobFlag.PROFILE_RUNTIME) : EnumSet.noneOf(JobFlag.class));
+        hcc.waitForCompletion(jobId);
+    }
+
+    public void installApplication(List<File> jars) throws Exception {
+        Set<String> allJars = new TreeSet<String>();
+        for (File jar : jars) {
+            allJars.add(jar.getAbsolutePath());
+        }
+        long start = System.currentTimeMillis();
+        File appZip = Utilities.getHyracksArchive(applicationName, allJars);
+        long end = System.currentTimeMillis();
+        LOG.info("jar packing finished " + (end - start) + "ms");
+
+        start = System.currentTimeMillis();
+        hcc.createApplication(applicationName, appZip);
+        end = System.currentTimeMillis();
+        LOG.info("jar deployment finished " + (end - start) + "ms");
+    }
+
+    public void destroyApplication(String appName) throws Exception {
+        hcc.destroyApplication(appName);
+    }
+
+}
+
+class FileFilter implements FilenameFilter {
+    private String ext;
+
+    public FileFilter(String ext) {
+        this.ext = "." + ext;
+    }
+
+    public boolean accept(File dir, String name) {
+        return name.endsWith(ext);
+    }
+}
diff --git a/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/hadoop/config/ConfigurationFactory.java b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/hadoop/config/ConfigurationFactory.java
new file mode 100644
index 0000000..f3089ba
--- /dev/null
+++ b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/hadoop/config/ConfigurationFactory.java
@@ -0,0 +1,47 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.pregelix.core.hadoop.config;
+
+import org.apache.hadoop.conf.Configuration;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.pregelix.api.util.SerDeUtils;
+import edu.uci.ics.pregelix.dataflow.base.IConfigurationFactory;
+
+public class ConfigurationFactory implements IConfigurationFactory {
+    private static final long serialVersionUID = 1L;
+    private final byte[] data;
+
+    public ConfigurationFactory(Configuration conf) {
+        try {
+            data = SerDeUtils.serialize(conf);
+        } catch (Exception e) {
+            throw new IllegalStateException(e);
+        }
+    }
+
+    @Override
+    public Configuration createConfiguration() throws HyracksDataException {
+        try {
+            Configuration conf = new Configuration();
+            SerDeUtils.deserialize(conf, data);
+            conf.setClassLoader(this.getClass().getClassLoader());
+            return conf;
+        } catch (Exception e) {
+            throw new HyracksDataException(e);
+        }
+    }
+}
diff --git a/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/hadoop/data/Message.java b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/hadoop/data/Message.java
new file mode 100644
index 0000000..a812a6b
--- /dev/null
+++ b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/hadoop/data/Message.java
@@ -0,0 +1,79 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.pregelix.core.hadoop.data;
+
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.util.ReflectionUtils;
+
+import edu.uci.ics.pregelix.api.util.BspUtils;
+
+public class Message<I extends Writable, M extends Writable> implements Writable {
+    private I receiverId;
+    private M body;
+    private Configuration conf;
+
+    public Message() {
+    }
+
+    public Message(I receiverId, M body) {
+        this.receiverId = receiverId;
+        this.body = body;
+    }
+
+    public void setConf(Configuration conf) {
+        this.conf = conf;
+    }
+
+    public Configuration getConf() {
+        return conf;
+    }
+
+    @SuppressWarnings("unchecked")
+    @Override
+    public void readFields(DataInput input) throws IOException {
+        if (this.receiverId == null && this.body == null) {
+            setClass((Class<I>) BspUtils.getVertexIndexClass(getConf()),
+                    (Class<M>) BspUtils.getMessageValueClass(getConf()));
+        }
+        receiverId.readFields(input);
+        body.readFields(input);
+    }
+
+    @Override
+    public void write(DataOutput output) throws IOException {
+        receiverId.write(output);
+        body.write(output);
+    }
+
+    public I getReceiverVertexId() {
+        return receiverId;
+    }
+
+    public M getMessageBody() {
+        return body;
+    }
+
+    private void setClass(Class<I> idClass, Class<M> bodyClass) {
+        receiverId = ReflectionUtils.newInstance(idClass, getConf());
+        body = ReflectionUtils.newInstance(bodyClass, getConf());
+    }
+
+}
diff --git a/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/hadoop/data/MessageList.java b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/hadoop/data/MessageList.java
new file mode 100644
index 0000000..bff3783
--- /dev/null
+++ b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/hadoop/data/MessageList.java
@@ -0,0 +1,29 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.pregelix.core.hadoop.data;
+
+import edu.uci.ics.pregelix.api.util.ArrayListWritable;
+
+@SuppressWarnings("rawtypes")
+public class MessageList extends ArrayListWritable<Message> {
+    private static final long serialVersionUID = 1L;
+
+    @Override
+    public void setClass() {
+        setClass(Message.class);
+    }
+
+}
diff --git a/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/jobgen/JobGen.java b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/jobgen/JobGen.java
new file mode 100644
index 0000000..0b1be61
--- /dev/null
+++ b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/jobgen/JobGen.java
@@ -0,0 +1,475 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.pregelix.core.jobgen;
+
+import java.io.DataOutput;
+import java.io.File;
+import java.lang.reflect.Type;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.UUID;
+import java.util.logging.Logger;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.io.WritableComparable;
+import org.apache.hadoop.io.WritableComparator;
+import org.apache.hadoop.mapreduce.InputSplit;
+
+import edu.uci.ics.hyracks.api.constraints.PartitionConstraintHelper;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputerFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.ITuplePartitionComputerFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.exceptions.HyracksException;
+import edu.uci.ics.hyracks.api.io.FileReference;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.std.connectors.MToNPartitioningConnectorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.connectors.MToNPartitioningMergingConnectorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.file.ConstantFileSplitProvider;
+import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.dataflow.std.misc.ConstantTupleSourceOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.sort.ExternalSortOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeSearchOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndex;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexRegistryProvider;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexBulkLoadOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexCreateOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexDropOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackProvider;
+import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
+import edu.uci.ics.pregelix.api.graph.GlobalAggregator;
+import edu.uci.ics.pregelix.api.graph.MessageCombiner;
+import edu.uci.ics.pregelix.api.graph.Vertex;
+import edu.uci.ics.pregelix.api.io.VertexInputFormat;
+import edu.uci.ics.pregelix.api.job.PregelixJob;
+import edu.uci.ics.pregelix.api.util.BspUtils;
+import edu.uci.ics.pregelix.api.util.ReflectionUtils;
+import edu.uci.ics.pregelix.core.base.IJobGen;
+import edu.uci.ics.pregelix.core.data.TypeTraits;
+import edu.uci.ics.pregelix.core.hadoop.config.ConfigurationFactory;
+import edu.uci.ics.pregelix.core.jobgen.clusterconfig.ClusterConfig;
+import edu.uci.ics.pregelix.core.jobgen.provider.NormalizedKeyComputerFactoryProvider;
+import edu.uci.ics.pregelix.core.runtime.touchpoint.WritableComparingBinaryComparatorFactory;
+import edu.uci.ics.pregelix.core.util.DataflowUtils;
+import edu.uci.ics.pregelix.core.util.DatatypeHelper;
+import edu.uci.ics.pregelix.dataflow.HDFSFileWriteOperatorDescriptor;
+import edu.uci.ics.pregelix.dataflow.VertexFileScanOperatorDescriptor;
+import edu.uci.ics.pregelix.dataflow.VertexWriteOperatorDescriptor;
+import edu.uci.ics.pregelix.dataflow.base.IConfigurationFactory;
+import edu.uci.ics.pregelix.dataflow.std.base.IRecordDescriptorFactory;
+import edu.uci.ics.pregelix.dataflow.std.base.IRuntimeHookFactory;
+import edu.uci.ics.pregelix.runtime.bootstrap.StorageManagerInterface;
+import edu.uci.ics.pregelix.runtime.bootstrap.TreeIndexRegistryProvider;
+import edu.uci.ics.pregelix.runtime.touchpoint.RuntimeHookFactory;
+import edu.uci.ics.pregelix.runtime.touchpoint.VertexIdPartitionComputerFactory;
+
+public abstract class JobGen implements IJobGen {
+    private static final Logger LOGGER = Logger.getLogger(JobGen.class.getName());
+    protected static final int MB = 1048576;
+    protected static final float DEFAULT_BTREE_FILL_FACTOR = 1.00f;
+    protected static final int tableSize = 10485767;
+    protected static final String PRIMARY_INDEX = "primary";
+    protected final Configuration conf;
+    protected final PregelixJob giraphJob;
+    protected IIndexRegistryProvider<IIndex> treeRegistryProvider = TreeIndexRegistryProvider.INSTANCE;
+    protected IStorageManagerInterface storageManagerInterface = StorageManagerInterface.INSTANCE;
+    protected String jobId = new UUID(System.currentTimeMillis(), System.nanoTime()).toString();
+    protected int frameSize = ClusterConfig.getFrameSize();
+    protected int maxFrameNumber = (int) (((long) 32 * MB) / frameSize);
+
+    protected static final String SECONDARY_INDEX_ODD = "secondary1";
+    protected static final String SECONDARY_INDEX_EVEN = "secondary2";
+
+    public JobGen(PregelixJob job) {
+        this.conf = job.getConfiguration();
+        this.giraphJob = job;
+        this.initJobConfiguration();
+        job.setJobId(jobId);
+
+        // set the frame size to be the one user specified if the user did
+        // specify.
+        int specifiedFrameSize = BspUtils.getFrameSize(job.getConfiguration());
+        if (specifiedFrameSize > 0) {
+            frameSize = specifiedFrameSize;
+            maxFrameNumber = (int) (((long) 32 * MB) / frameSize);
+        }
+        if (maxFrameNumber <= 0) {
+            maxFrameNumber = 1;
+        }
+    }
+
+    @SuppressWarnings({ "rawtypes", "unchecked" })
+    private void initJobConfiguration() {
+        Class vertexClass = conf.getClass(PregelixJob.VERTEX_CLASS, Vertex.class);
+        List<Type> parameterTypes = ReflectionUtils.getTypeArguments(Vertex.class, vertexClass);
+        Type vertexIndexType = parameterTypes.get(0);
+        Type vertexValueType = parameterTypes.get(1);
+        Type edgeValueType = parameterTypes.get(2);
+        Type messageValueType = parameterTypes.get(3);
+        conf.setClass(PregelixJob.VERTEX_INDEX_CLASS, (Class<?>) vertexIndexType, WritableComparable.class);
+        conf.setClass(PregelixJob.VERTEX_VALUE_CLASS, (Class<?>) vertexValueType, Writable.class);
+        conf.setClass(PregelixJob.EDGE_VALUE_CLASS, (Class<?>) edgeValueType, Writable.class);
+        conf.setClass(PregelixJob.MESSAGE_VALUE_CLASS, (Class<?>) messageValueType, Writable.class);
+
+        Class aggregatorClass = BspUtils.getGlobalAggregatorClass(conf);
+        if (!aggregatorClass.equals(GlobalAggregator.class)) {
+            List<Type> argTypes = ReflectionUtils.getTypeArguments(GlobalAggregator.class, aggregatorClass);
+            Type partialAggregateValueType = argTypes.get(4);
+            conf.setClass(PregelixJob.PARTIAL_AGGREGATE_VALUE_CLASS, (Class<?>) partialAggregateValueType,
+                    Writable.class);
+            Type finalAggregateValueType = argTypes.get(5);
+            conf.setClass(PregelixJob.FINAL_AGGREGATE_VALUE_CLASS, (Class<?>) finalAggregateValueType, Writable.class);
+        }
+
+        Class combinerClass = BspUtils.getMessageCombinerClass(conf);
+        if (!combinerClass.equals(MessageCombiner.class)) {
+            List<Type> argTypes = ReflectionUtils.getTypeArguments(MessageCombiner.class, combinerClass);
+            Type partialCombineValueType = argTypes.get(2);
+            conf.setClass(PregelixJob.PARTIAL_COMBINE_VALUE_CLASS, (Class<?>) partialCombineValueType, Writable.class);
+        }
+    }
+
+    public String getJobId() {
+        return jobId;
+    }
+
+    @SuppressWarnings({ "rawtypes", "unchecked" })
+    @Override
+    public JobSpecification generateCreatingJob() throws HyracksException {
+        Class<? extends WritableComparable<?>> vertexIdClass = BspUtils.getVertexIndexClass(conf);
+        JobSpecification spec = new JobSpecification();
+        ITypeTraits[] typeTraits = new ITypeTraits[2];
+        typeTraits[0] = new TypeTraits(false);
+        typeTraits[1] = new TypeTraits(false);
+        IBinaryComparatorFactory[] comparatorFactories = new IBinaryComparatorFactory[1];
+        comparatorFactories[0] = new WritableComparingBinaryComparatorFactory(WritableComparator.get(vertexIdClass)
+                .getClass());
+
+        IFileSplitProvider fileSplitProvider = ClusterConfig.getFileSplitProvider(jobId, PRIMARY_INDEX);
+        TreeIndexCreateOperatorDescriptor btreeCreate = new TreeIndexCreateOperatorDescriptor(spec,
+                storageManagerInterface, treeRegistryProvider, fileSplitProvider, typeTraits, comparatorFactories,
+                new BTreeDataflowHelperFactory(), NoOpOperationCallbackProvider.INSTANCE);
+        ClusterConfig.setLocationConstraint(spec, btreeCreate);
+        return spec;
+    }
+
+    @SuppressWarnings({ "rawtypes", "unchecked" })
+    @Override
+    public JobSpecification generateLoadingJob() throws HyracksException {
+        Class<? extends WritableComparable<?>> vertexIdClass = BspUtils.getVertexIndexClass(conf);
+        Class<? extends Writable> vertexClass = BspUtils.getVertexClass(conf);
+        JobSpecification spec = new JobSpecification();
+        IFileSplitProvider fileSplitProvider = ClusterConfig.getFileSplitProvider(jobId, PRIMARY_INDEX);
+
+        /**
+         * the graph file scan operator and use count constraint first, will use
+         * absolute constraint later
+         */
+        VertexInputFormat inputFormat = BspUtils.createVertexInputFormat(conf);
+        List<InputSplit> splits = new ArrayList<InputSplit>();
+        try {
+            splits = inputFormat.getSplits(giraphJob, fileSplitProvider.getFileSplits().length);
+            LOGGER.info("number of splits: " + splits.size());
+            for (InputSplit split : splits)
+                LOGGER.info(split.toString());
+        } catch (Exception e) {
+            throw new HyracksDataException(e);
+        }
+        RecordDescriptor recordDescriptor = DataflowUtils.getRecordDescriptorFromKeyValueClasses(
+                vertexIdClass.getName(), vertexClass.getName());
+        IConfigurationFactory confFactory = new ConfigurationFactory(conf);
+        String[] readSchedule = ClusterConfig.getHdfsScheduler().getLocationConstraints(splits);
+        VertexFileScanOperatorDescriptor scanner = new VertexFileScanOperatorDescriptor(spec, recordDescriptor, splits,
+                readSchedule, confFactory);
+        ClusterConfig.setLocationConstraint(spec, scanner);
+
+        /**
+         * construct sort operator
+         */
+        int[] sortFields = new int[1];
+        sortFields[0] = 0;
+        INormalizedKeyComputerFactory nkmFactory = NormalizedKeyComputerFactoryProvider.INSTANCE
+                .getAscINormalizedKeyComputerFactory(vertexIdClass);
+        IBinaryComparatorFactory[] comparatorFactories = new IBinaryComparatorFactory[1];
+        comparatorFactories[0] = new WritableComparingBinaryComparatorFactory(WritableComparator.get(vertexIdClass)
+                .getClass());
+        ExternalSortOperatorDescriptor sorter = new ExternalSortOperatorDescriptor(spec, maxFrameNumber, sortFields,
+                nkmFactory, comparatorFactories, recordDescriptor);
+        ClusterConfig.setLocationConstraint(spec, sorter);
+
+        /**
+         * construct tree bulk load operator
+         */
+        int[] fieldPermutation = new int[2];
+        fieldPermutation[0] = 0;
+        fieldPermutation[1] = 1;
+        ITypeTraits[] typeTraits = new ITypeTraits[2];
+        typeTraits[0] = new TypeTraits(false);
+        typeTraits[1] = new TypeTraits(false);
+        TreeIndexBulkLoadOperatorDescriptor btreeBulkLoad = new TreeIndexBulkLoadOperatorDescriptor(spec,
+                storageManagerInterface, treeRegistryProvider, fileSplitProvider, typeTraits, comparatorFactories,
+                fieldPermutation, DEFAULT_BTREE_FILL_FACTOR, new BTreeDataflowHelperFactory(),
+                NoOpOperationCallbackProvider.INSTANCE);
+        ClusterConfig.setLocationConstraint(spec, btreeBulkLoad);
+
+        /**
+         * connect operator descriptors
+         */
+        ITuplePartitionComputerFactory hashPartitionComputerFactory = new VertexIdPartitionComputerFactory(
+                DatatypeHelper.createSerializerDeserializer(vertexIdClass));
+        spec.connect(new MToNPartitioningConnectorDescriptor(spec, hashPartitionComputerFactory), scanner, 0, sorter, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), sorter, 0, btreeBulkLoad, 0);
+        return spec;
+    }
+
+    @Override
+    public JobSpecification generateJob(int iteration) throws HyracksException {
+        if (iteration <= 0)
+            throw new IllegalStateException("iteration number cannot be less than 1");
+        if (iteration == 1)
+            return generateFirstIteration(iteration);
+        else
+            return generateNonFirstIteration(iteration);
+    }
+
+    @SuppressWarnings({ "rawtypes", "unchecked" })
+    public JobSpecification scanSortPrintGraph(String nodeName, String path) throws HyracksException {
+        Class<? extends WritableComparable<?>> vertexIdClass = BspUtils.getVertexIndexClass(conf);
+        Class<? extends Writable> vertexClass = BspUtils.getVertexClass(conf);
+        int maxFrameLimit = (int) (((long) 512 * MB) / frameSize);
+        JobSpecification spec = new JobSpecification();
+        IFileSplitProvider fileSplitProvider = ClusterConfig.getFileSplitProvider(jobId, PRIMARY_INDEX);
+
+        /**
+         * the graph file scan operator and use count constraint first, will use
+         * absolute constraint later
+         */
+        VertexInputFormat inputFormat = BspUtils.createVertexInputFormat(conf);
+        List<InputSplit> splits = new ArrayList<InputSplit>();
+        try {
+            splits = inputFormat.getSplits(giraphJob, fileSplitProvider.getFileSplits().length);
+        } catch (Exception e) {
+            throw new HyracksDataException(e);
+        }
+        RecordDescriptor recordDescriptor = DataflowUtils.getRecordDescriptorFromKeyValueClasses(
+                vertexIdClass.getName(), vertexClass.getName());
+        IConfigurationFactory confFactory = new ConfigurationFactory(conf);
+        String[] readSchedule = ClusterConfig.getHdfsScheduler().getLocationConstraints(splits);
+        VertexFileScanOperatorDescriptor scanner = new VertexFileScanOperatorDescriptor(spec, recordDescriptor, splits,
+                readSchedule, confFactory);
+        ClusterConfig.setLocationConstraint(spec, scanner);
+
+        /**
+         * construct sort operator
+         */
+        int[] sortFields = new int[1];
+        sortFields[0] = 0;
+        INormalizedKeyComputerFactory nkmFactory = NormalizedKeyComputerFactoryProvider.INSTANCE
+                .getAscINormalizedKeyComputerFactory(vertexIdClass);
+        IBinaryComparatorFactory[] comparatorFactories = new IBinaryComparatorFactory[1];
+        comparatorFactories[0] = new WritableComparingBinaryComparatorFactory(WritableComparator.get(vertexIdClass)
+                .getClass());
+        ExternalSortOperatorDescriptor sorter = new ExternalSortOperatorDescriptor(spec, maxFrameLimit, sortFields,
+                nkmFactory, comparatorFactories, recordDescriptor);
+        ClusterConfig.setLocationConstraint(spec, sorter);
+
+        /**
+         * construct write file operator
+         */
+        FileSplit resultFile = new FileSplit(nodeName, new FileReference(new File(path)));
+        FileSplit[] results = new FileSplit[1];
+        results[0] = resultFile;
+        IFileSplitProvider resultFileSplitProvider = new ConstantFileSplitProvider(results);
+        IRuntimeHookFactory preHookFactory = new RuntimeHookFactory(confFactory);
+        IRecordDescriptorFactory inputRdFactory = DataflowUtils.getWritableRecordDescriptorFactoryFromWritableClasses(
+                vertexIdClass.getName(), vertexClass.getName());
+        VertexWriteOperatorDescriptor writer = new VertexWriteOperatorDescriptor(spec, inputRdFactory,
+                resultFileSplitProvider, preHookFactory, null);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, writer, new String[] { "nc1" });
+        PartitionConstraintHelper.addPartitionCountConstraint(spec, writer, 1);
+
+        /**
+         * connect operator descriptors
+         */
+        ITuplePartitionComputerFactory hashPartitionComputerFactory = new VertexIdPartitionComputerFactory(
+                DatatypeHelper.createSerializerDeserializer(vertexIdClass));
+        spec.connect(new OneToOneConnectorDescriptor(spec), scanner, 0, sorter, 0);
+        spec.connect(new MToNPartitioningMergingConnectorDescriptor(spec, hashPartitionComputerFactory, sortFields,
+                comparatorFactories), sorter, 0, writer, 0);
+        return spec;
+    }
+
+    @SuppressWarnings({ "rawtypes", "unchecked" })
+    public JobSpecification scanIndexPrintGraph(String nodeName, String path) throws HyracksException {
+        Class<? extends WritableComparable<?>> vertexIdClass = BspUtils.getVertexIndexClass(conf);
+        Class<? extends Writable> vertexClass = BspUtils.getVertexClass(conf);
+        JobSpecification spec = new JobSpecification();
+
+        /**
+         * construct empty tuple operator
+         */
+        ArrayTupleBuilder tb = new ArrayTupleBuilder(2);
+        DataOutput dos = tb.getDataOutput();
+        tb.reset();
+        UTF8StringSerializerDeserializer.INSTANCE.serialize("0", dos);
+        tb.addFieldEndOffset();
+        ISerializerDeserializer[] keyRecDescSers = { UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE };
+        RecordDescriptor keyRecDesc = new RecordDescriptor(keyRecDescSers);
+        ConstantTupleSourceOperatorDescriptor emptyTupleSource = new ConstantTupleSourceOperatorDescriptor(spec,
+                keyRecDesc, tb.getFieldEndOffsets(), tb.getByteArray(), tb.getSize());
+        ClusterConfig.setLocationConstraint(spec, emptyTupleSource);
+
+        /**
+         * construct btree search operator
+         */
+        IConfigurationFactory confFactory = new ConfigurationFactory(conf);
+        RecordDescriptor recordDescriptor = DataflowUtils.getRecordDescriptorFromKeyValueClasses(
+                vertexIdClass.getName(), vertexClass.getName());
+        IBinaryComparatorFactory[] comparatorFactories = new IBinaryComparatorFactory[1];
+        comparatorFactories[0] = new WritableComparingBinaryComparatorFactory(WritableComparator.get(vertexIdClass)
+                .getClass());
+        IFileSplitProvider fileSplitProvider = ClusterConfig.getFileSplitProvider(jobId, PRIMARY_INDEX);
+        ITypeTraits[] typeTraits = new ITypeTraits[2];
+        typeTraits[0] = new TypeTraits(false);
+        typeTraits[1] = new TypeTraits(false);
+        BTreeSearchOperatorDescriptor scanner = new BTreeSearchOperatorDescriptor(spec, recordDescriptor,
+                storageManagerInterface, treeRegistryProvider, fileSplitProvider, typeTraits, comparatorFactories,
+                null, null, true, true, new BTreeDataflowHelperFactory(), false, NoOpOperationCallbackProvider.INSTANCE);
+        ClusterConfig.setLocationConstraint(spec, scanner);
+
+        /**
+         * construct write file operator
+         */
+        FileSplit resultFile = new FileSplit(nodeName, new FileReference(new File(path)));
+        FileSplit[] results = new FileSplit[1];
+        results[0] = resultFile;
+        IFileSplitProvider resultFileSplitProvider = new ConstantFileSplitProvider(results);
+        IRuntimeHookFactory preHookFactory = new RuntimeHookFactory(confFactory);
+        IRecordDescriptorFactory inputRdFactory = DataflowUtils.getWritableRecordDescriptorFactoryFromWritableClasses(
+                vertexIdClass.getName(), vertexClass.getName());
+        VertexWriteOperatorDescriptor writer = new VertexWriteOperatorDescriptor(spec, inputRdFactory,
+                resultFileSplitProvider, preHookFactory, null);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, writer, new String[] { "nc1" });
+        PartitionConstraintHelper.addPartitionCountConstraint(spec, writer, 1);
+
+        /**
+         * connect operator descriptors
+         */
+        int[] sortFields = new int[1];
+        sortFields[0] = 0;
+        ITuplePartitionComputerFactory hashPartitionComputerFactory = new VertexIdPartitionComputerFactory(
+                DatatypeHelper.createSerializerDeserializer(vertexIdClass));
+        spec.connect(new OneToOneConnectorDescriptor(spec), emptyTupleSource, 0, scanner, 0);
+        spec.connect(new MToNPartitioningMergingConnectorDescriptor(spec, hashPartitionComputerFactory, sortFields,
+                comparatorFactories), scanner, 0, writer, 0);
+        spec.setFrameSize(frameSize);
+        return spec;
+    }
+
+    @SuppressWarnings({ "rawtypes", "unchecked" })
+    public JobSpecification scanIndexWriteGraph() throws HyracksException {
+        Class<? extends WritableComparable<?>> vertexIdClass = BspUtils.getVertexIndexClass(conf);
+        Class<? extends Writable> vertexClass = BspUtils.getVertexClass(conf);
+        JobSpecification spec = new JobSpecification();
+
+        /**
+         * construct empty tuple operator
+         */
+        ArrayTupleBuilder tb = new ArrayTupleBuilder(2);
+        DataOutput dos = tb.getDataOutput();
+        tb.reset();
+        UTF8StringSerializerDeserializer.INSTANCE.serialize("0", dos);
+        tb.addFieldEndOffset();
+        ISerializerDeserializer[] keyRecDescSers = { UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE };
+        RecordDescriptor keyRecDesc = new RecordDescriptor(keyRecDescSers);
+        ConstantTupleSourceOperatorDescriptor emptyTupleSource = new ConstantTupleSourceOperatorDescriptor(spec,
+                keyRecDesc, tb.getFieldEndOffsets(), tb.getByteArray(), tb.getSize());
+        ClusterConfig.setLocationConstraint(spec, emptyTupleSource);
+
+        /**
+         * construct btree search operator
+         */
+        IConfigurationFactory confFactory = new ConfigurationFactory(conf);
+        RecordDescriptor recordDescriptor = DataflowUtils.getRecordDescriptorFromKeyValueClasses(
+                vertexIdClass.getName(), vertexClass.getName());
+        IBinaryComparatorFactory[] comparatorFactories = new IBinaryComparatorFactory[1];
+        comparatorFactories[0] = new WritableComparingBinaryComparatorFactory(WritableComparator.get(vertexIdClass)
+                .getClass());
+        IFileSplitProvider fileSplitProvider = ClusterConfig.getFileSplitProvider(jobId, PRIMARY_INDEX);
+
+        ITypeTraits[] typeTraits = new ITypeTraits[2];
+        typeTraits[0] = new TypeTraits(false);
+        typeTraits[1] = new TypeTraits(false);
+        BTreeSearchOperatorDescriptor scanner = new BTreeSearchOperatorDescriptor(spec, recordDescriptor,
+                storageManagerInterface, treeRegistryProvider, fileSplitProvider, typeTraits, comparatorFactories,
+                null, null, true, true, new BTreeDataflowHelperFactory(), false, NoOpOperationCallbackProvider.INSTANCE);
+        ClusterConfig.setLocationConstraint(spec, scanner);
+
+        /**
+         * construct write file operator
+         */
+        IRecordDescriptorFactory inputRdFactory = DataflowUtils.getWritableRecordDescriptorFactoryFromWritableClasses(
+                vertexIdClass.getName(), vertexClass.getName());
+        HDFSFileWriteOperatorDescriptor writer = new HDFSFileWriteOperatorDescriptor(spec, confFactory, inputRdFactory);
+        ClusterConfig.setLocationConstraint(spec, writer);
+
+        /**
+         * connect operator descriptors
+         */
+        spec.connect(new OneToOneConnectorDescriptor(spec), emptyTupleSource, 0, scanner, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), scanner, 0, writer, 0);
+        return spec;
+    }
+
+    /***
+     * drop the sindex
+     * 
+     * @return JobSpecification
+     * @throws HyracksException
+     */
+    protected JobSpecification dropIndex(String indexName) throws HyracksException {
+        JobSpecification spec = new JobSpecification();
+
+        IFileSplitProvider fileSplitProvider = ClusterConfig.getFileSplitProvider(jobId, indexName);
+        TreeIndexDropOperatorDescriptor drop = new TreeIndexDropOperatorDescriptor(spec, storageManagerInterface,
+                treeRegistryProvider, fileSplitProvider);
+
+        ClusterConfig.setLocationConstraint(spec, drop);
+        spec.addRoot(drop);
+        return spec;
+    }
+
+    /** generate non-first iteration job */
+    protected abstract JobSpecification generateNonFirstIteration(int iteration) throws HyracksException;
+
+    /** generate first iteration job */
+    protected abstract JobSpecification generateFirstIteration(int iteration) throws HyracksException;
+
+    /** generate clean-up job */
+    public abstract JobSpecification[] generateCleanup() throws HyracksException;
+
+}
\ No newline at end of file
diff --git a/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/jobgen/JobGenInnerJoin.java b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/jobgen/JobGenInnerJoin.java
new file mode 100644
index 0000000..727e7fe
--- /dev/null
+++ b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/jobgen/JobGenInnerJoin.java
@@ -0,0 +1,522 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.pregelix.core.jobgen;
+
+import org.apache.hadoop.io.VLongWritable;
+import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.io.WritableComparable;
+import org.apache.hadoop.io.WritableComparator;
+
+import edu.uci.ics.hyracks.api.constraints.PartitionConstraintHelper;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputerFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ITuplePartitionComputerFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksException;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.dataflow.std.connectors.MToNPartitioningConnectorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.connectors.MToNPartitioningMergingConnectorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.dataflow.std.group.IAggregatorDescriptorFactory;
+import edu.uci.ics.hyracks.dataflow.std.group.preclustered.PreclusteredGroupOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.sort.ExternalSortOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMInteriorFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMLeafFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexInsertUpdateDeleteOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackProvider;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOp;
+import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriterFactory;
+import edu.uci.ics.pregelix.api.graph.MsgList;
+import edu.uci.ics.pregelix.api.job.PregelixJob;
+import edu.uci.ics.pregelix.api.util.BspUtils;
+import edu.uci.ics.pregelix.core.data.TypeTraits;
+import edu.uci.ics.pregelix.core.hadoop.config.ConfigurationFactory;
+import edu.uci.ics.pregelix.core.jobgen.clusterconfig.ClusterConfig;
+import edu.uci.ics.pregelix.core.runtime.touchpoint.WritableComparingBinaryComparatorFactory;
+import edu.uci.ics.pregelix.core.util.DataflowUtils;
+import edu.uci.ics.pregelix.dataflow.ConnectorPolicyAssignmentPolicy;
+import edu.uci.ics.pregelix.dataflow.EmptySinkOperatorDescriptor;
+import edu.uci.ics.pregelix.dataflow.EmptyTupleSourceOperatorDescriptor;
+import edu.uci.ics.pregelix.dataflow.FinalAggregateOperatorDescriptor;
+import edu.uci.ics.pregelix.dataflow.MaterializingReadOperatorDescriptor;
+import edu.uci.ics.pregelix.dataflow.MaterializingWriteOperatorDescriptor;
+import edu.uci.ics.pregelix.dataflow.TerminationStateWriterOperatorDescriptor;
+import edu.uci.ics.pregelix.dataflow.base.IConfigurationFactory;
+import edu.uci.ics.pregelix.dataflow.std.BTreeSearchFunctionUpdateOperatorDescriptor;
+import edu.uci.ics.pregelix.dataflow.std.IndexNestedLoopJoinFunctionUpdateOperatorDescriptor;
+import edu.uci.ics.pregelix.dataflow.std.IndexNestedLoopJoinOperatorDescriptor;
+import edu.uci.ics.pregelix.dataflow.std.RuntimeHookOperatorDescriptor;
+import edu.uci.ics.pregelix.dataflow.std.TreeIndexBulkReLoadOperatorDescriptor;
+import edu.uci.ics.pregelix.dataflow.std.base.IRecordDescriptorFactory;
+import edu.uci.ics.pregelix.dataflow.std.base.IRuntimeHookFactory;
+import edu.uci.ics.pregelix.runtime.function.ComputeUpdateFunctionFactory;
+import edu.uci.ics.pregelix.runtime.function.StartComputeUpdateFunctionFactory;
+import edu.uci.ics.pregelix.runtime.touchpoint.MergePartitionComputerFactory;
+import edu.uci.ics.pregelix.runtime.touchpoint.PostSuperStepRuntimeHookFactory;
+import edu.uci.ics.pregelix.runtime.touchpoint.PreSuperStepRuntimeHookFactory;
+import edu.uci.ics.pregelix.runtime.touchpoint.RuntimeHookFactory;
+import edu.uci.ics.pregelix.runtime.touchpoint.VertexIdPartitionComputerFactory;
+
+public class JobGenInnerJoin extends JobGen {
+
+    public JobGenInnerJoin(PregelixJob job) {
+        super(job);
+    }
+
+    @SuppressWarnings({ "rawtypes", "unchecked" })
+    protected JobSpecification generateFirstIteration(int iteration) throws HyracksException {
+        Class<? extends WritableComparable<?>> vertexIdClass = BspUtils.getVertexIndexClass(conf);
+        Class<? extends Writable> vertexClass = BspUtils.getVertexClass(conf);
+        Class<? extends Writable> messageValueClass = BspUtils.getMessageValueClass(conf);
+        Class<? extends Writable> partialAggregateValueClass = BspUtils.getPartialAggregateValueClass(conf);
+        IConfigurationFactory confFactory = new ConfigurationFactory(conf);
+        JobSpecification spec = new JobSpecification();
+
+        /**
+         * construct empty tuple operator
+         */
+        EmptyTupleSourceOperatorDescriptor emptyTupleSource = new EmptyTupleSourceOperatorDescriptor(spec);
+        ClusterConfig.setLocationConstraint(spec, emptyTupleSource);
+
+        /** construct runtime hook */
+        RuntimeHookOperatorDescriptor preSuperStep = new RuntimeHookOperatorDescriptor(spec,
+                new PreSuperStepRuntimeHookFactory(jobId, confFactory));
+        ClusterConfig.setLocationConstraint(spec, preSuperStep);
+
+        /**
+         * construct drop index operator
+         */
+        IFileSplitProvider secondaryFileSplitProvider = ClusterConfig.getFileSplitProvider(jobId, SECONDARY_INDEX_ODD);
+        IFileSplitProvider fileSplitProvider = ClusterConfig.getFileSplitProvider(jobId, PRIMARY_INDEX);
+
+        /**
+         * construct btree search and function call update operator
+         */
+        RecordDescriptor recordDescriptor = DataflowUtils.getRecordDescriptorFromKeyValueClasses(
+                vertexIdClass.getName(), vertexClass.getName());
+        IBinaryComparatorFactory[] comparatorFactories = new IBinaryComparatorFactory[1];
+        comparatorFactories[0] = new WritableComparingBinaryComparatorFactory(WritableComparator.get(vertexIdClass)
+                .getClass());
+
+        ITypeTraits[] typeTraits = new ITypeTraits[2];
+        typeTraits[0] = new TypeTraits(false);
+        typeTraits[1] = new TypeTraits(false);
+
+        RecordDescriptor rdDummy = DataflowUtils.getRecordDescriptorFromWritableClasses(VLongWritable.class.getName());
+        RecordDescriptor rdPartialAggregate = DataflowUtils
+                .getRecordDescriptorFromWritableClasses(partialAggregateValueClass.getName());
+        IConfigurationFactory configurationFactory = new ConfigurationFactory(conf);
+        IRuntimeHookFactory preHookFactory = new RuntimeHookFactory(configurationFactory);
+        IRecordDescriptorFactory inputRdFactory = DataflowUtils.getWritableRecordDescriptorFactoryFromWritableClasses(
+                vertexIdClass.getName(), vertexClass.getName());
+        RecordDescriptor rdFinal = DataflowUtils.getRecordDescriptorFromKeyValueClasses(vertexIdClass.getName(),
+                MsgList.class.getName());
+        RecordDescriptor rdUnnestedMessage = DataflowUtils.getRecordDescriptorFromKeyValueClasses(
+                vertexIdClass.getName(), messageValueClass.getName());
+        RecordDescriptor rdInsert = DataflowUtils.getRecordDescriptorFromKeyValueClasses(vertexIdClass.getName(),
+                vertexClass.getName());
+        RecordDescriptor rdDelete = DataflowUtils.getRecordDescriptorFromWritableClasses(vertexIdClass.getName());
+
+        BTreeSearchFunctionUpdateOperatorDescriptor scanner = new BTreeSearchFunctionUpdateOperatorDescriptor(spec,
+                recordDescriptor, storageManagerInterface, treeRegistryProvider, fileSplitProvider, typeTraits,
+                comparatorFactories, JobGenUtil.getForwardScan(iteration), null, null, true, true,
+                new BTreeDataflowHelperFactory(), inputRdFactory, 6,
+                new StartComputeUpdateFunctionFactory(confFactory), preHookFactory, null, rdUnnestedMessage, rdDummy,
+                rdPartialAggregate, rdInsert, rdDelete, rdFinal);
+        ClusterConfig.setLocationConstraint(spec, scanner);
+
+        /**
+         * termination state write operator
+         */
+        TerminationStateWriterOperatorDescriptor terminateWriter = new TerminationStateWriterOperatorDescriptor(spec,
+                configurationFactory, jobId);
+        PartitionConstraintHelper.addPartitionCountConstraint(spec, terminateWriter, 1);
+
+        /**
+         * final aggregate write operator
+         */
+        IRecordDescriptorFactory aggRdFactory = DataflowUtils
+                .getWritableRecordDescriptorFactoryFromWritableClasses(partialAggregateValueClass.getName());
+        FinalAggregateOperatorDescriptor finalAggregator = new FinalAggregateOperatorDescriptor(spec,
+                configurationFactory, aggRdFactory, jobId);
+        PartitionConstraintHelper.addPartitionCountConstraint(spec, finalAggregator, 1);
+
+        /**
+         * construct bulk-load index operator
+         */
+        int[] fieldPermutation = new int[] { 0, 1 };
+        IBinaryComparatorFactory[] indexCmpFactories = new IBinaryComparatorFactory[1];
+        indexCmpFactories[0] = JobGenUtil.getIBinaryComparatorFactory(iteration + 1,
+                WritableComparator.get(vertexIdClass).getClass());
+        TreeIndexBulkReLoadOperatorDescriptor btreeBulkLoad = new TreeIndexBulkReLoadOperatorDescriptor(spec,
+                storageManagerInterface, treeRegistryProvider, secondaryFileSplitProvider, typeTraits,
+                indexCmpFactories, fieldPermutation, DEFAULT_BTREE_FILL_FACTOR, new BTreeDataflowHelperFactory());
+        ClusterConfig.setLocationConstraint(spec, btreeBulkLoad);
+
+        /**
+         * construct local sort operator
+         */
+        int[] keyFields = new int[] { 0 };
+        INormalizedKeyComputerFactory nkmFactory = JobGenUtil
+                .getINormalizedKeyComputerFactory(iteration, vertexIdClass);
+        IBinaryComparatorFactory[] sortCmpFactories = new IBinaryComparatorFactory[1];
+        sortCmpFactories[0] = JobGenUtil.getIBinaryComparatorFactory(iteration, WritableComparator.get(vertexIdClass)
+                .getClass());
+        ExternalSortOperatorDescriptor localSort = new ExternalSortOperatorDescriptor(spec, maxFrameNumber, keyFields,
+                nkmFactory, sortCmpFactories, rdUnnestedMessage);
+        ClusterConfig.setLocationConstraint(spec, localSort);
+
+        /**
+         * construct local pre-clustered group-by operator
+         */
+        IAggregatorDescriptorFactory aggregatorFactory = DataflowUtils.getAccumulatingAggregatorFactory(conf, false,
+                false);
+        PreclusteredGroupOperatorDescriptor localGby = new PreclusteredGroupOperatorDescriptor(spec, keyFields,
+                sortCmpFactories, aggregatorFactory, rdUnnestedMessage);
+        ClusterConfig.setLocationConstraint(spec, localGby);
+
+        /**
+         * construct global group-by operator
+         */
+        IAggregatorDescriptorFactory aggregatorFactoryFinal = DataflowUtils.getAccumulatingAggregatorFactory(conf,
+                true, true);
+        PreclusteredGroupOperatorDescriptor globalGby = new PreclusteredGroupOperatorDescriptor(spec, keyFields,
+                sortCmpFactories, aggregatorFactoryFinal, rdFinal);
+        ClusterConfig.setLocationConstraint(spec, globalGby);
+
+        /**
+         * construct the materializing write operator
+         */
+        MaterializingWriteOperatorDescriptor materialize = new MaterializingWriteOperatorDescriptor(spec, rdFinal);
+        ClusterConfig.setLocationConstraint(spec, materialize);
+
+        /**
+         * do pre- & post- super step
+         */
+        RuntimeHookOperatorDescriptor postSuperStep = new RuntimeHookOperatorDescriptor(spec,
+                new PostSuperStepRuntimeHookFactory(jobId));
+        ClusterConfig.setLocationConstraint(spec, postSuperStep);
+
+        /** construct empty sink operator */
+        EmptySinkOperatorDescriptor emptySink = new EmptySinkOperatorDescriptor(spec);
+        ClusterConfig.setLocationConstraint(spec, emptySink);
+
+        /**
+         * add the insert operator to insert vertexes
+         */
+        TreeIndexInsertUpdateDeleteOperatorDescriptor insertOp = new TreeIndexInsertUpdateDeleteOperatorDescriptor(
+                spec, rdInsert, storageManagerInterface, treeRegistryProvider, fileSplitProvider, typeTraits,
+                comparatorFactories, fieldPermutation, IndexOp.INSERT, new BTreeDataflowHelperFactory(), null,
+                NoOpOperationCallbackProvider.INSTANCE);
+        ClusterConfig.setLocationConstraint(spec, insertOp);
+
+        /**
+         * add the delete operator to delete vertexes
+         */
+        TreeIndexInsertUpdateDeleteOperatorDescriptor deleteOp = new TreeIndexInsertUpdateDeleteOperatorDescriptor(
+                spec, rdDelete, storageManagerInterface, treeRegistryProvider, fileSplitProvider, typeTraits,
+                comparatorFactories, fieldPermutation, IndexOp.DELETE, new BTreeDataflowHelperFactory(), null,
+                NoOpOperationCallbackProvider.INSTANCE);
+        ClusterConfig.setLocationConstraint(spec, deleteOp);
+
+        /** construct empty sink operator */
+        EmptySinkOperatorDescriptor emptySink3 = new EmptySinkOperatorDescriptor(spec);
+        ClusterConfig.setLocationConstraint(spec, emptySink3);
+
+        /** construct empty sink operator */
+        EmptySinkOperatorDescriptor emptySink4 = new EmptySinkOperatorDescriptor(spec);
+        ClusterConfig.setLocationConstraint(spec, emptySink4);
+
+        ITuplePartitionComputerFactory partionFactory = new VertexIdPartitionComputerFactory(
+                rdUnnestedMessage.getFields()[0]);
+        ITuplePartitionComputerFactory hashPartitionComputerFactory = new MergePartitionComputerFactory();
+
+        /** connect all operators **/
+        spec.connect(new OneToOneConnectorDescriptor(spec), emptyTupleSource, 0, preSuperStep, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), preSuperStep, 0, scanner, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), scanner, 0, localSort, 0);
+        spec.connect(new MToNPartitioningConnectorDescriptor(spec, hashPartitionComputerFactory), scanner, 1,
+                terminateWriter, 0);
+        spec.connect(new MToNPartitioningConnectorDescriptor(spec, hashPartitionComputerFactory), scanner, 2,
+                finalAggregator, 0);
+
+        /**
+         * connect the insert/delete operator
+         */
+        spec.connect(new MToNPartitioningConnectorDescriptor(spec, hashPartitionComputerFactory), scanner, 3, insertOp,
+                0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), insertOp, 0, emptySink3, 0);
+        spec.connect(new MToNPartitioningConnectorDescriptor(spec, hashPartitionComputerFactory), scanner, 4, deleteOp,
+                0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), deleteOp, 0, emptySink4, 0);
+
+        spec.connect(new OneToOneConnectorDescriptor(spec), scanner, 5, btreeBulkLoad, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), localSort, 0, localGby, 0);
+        spec.connect(new MToNPartitioningMergingConnectorDescriptor(spec, partionFactory, keyFields, sortCmpFactories),
+                localGby, 0, globalGby, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), globalGby, 0, materialize, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), materialize, 0, postSuperStep, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), postSuperStep, 0, emptySink, 0);
+
+        spec.addRoot(emptySink);
+        spec.addRoot(btreeBulkLoad);
+        spec.addRoot(terminateWriter);
+        spec.addRoot(finalAggregator);
+        spec.addRoot(emptySink3);
+        spec.addRoot(emptySink4);
+
+        spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy());
+        spec.setFrameSize(frameSize);
+        return spec;
+    }
+
+    @SuppressWarnings({ "rawtypes", "unchecked" })
+    @Override
+    protected JobSpecification generateNonFirstIteration(int iteration) throws HyracksException {
+        Class<? extends WritableComparable<?>> vertexIdClass = BspUtils.getVertexIndexClass(conf);
+        Class<? extends Writable> vertexClass = BspUtils.getVertexClass(conf);
+        Class<? extends Writable> messageValueClass = BspUtils.getMessageValueClass(conf);
+        Class<? extends Writable> partialAggregateValueClass = BspUtils.getPartialAggregateValueClass(conf);
+        JobSpecification spec = new JobSpecification();
+
+        /**
+         * source aggregate
+         */
+        int[] keyFields = new int[] { 0 };
+        RecordDescriptor rdUnnestedMessage = DataflowUtils.getRecordDescriptorFromKeyValueClasses(
+                vertexIdClass.getName(), messageValueClass.getName());
+        IBinaryComparatorFactory[] comparatorFactories = new IBinaryComparatorFactory[1];
+        comparatorFactories[0] = new WritableComparingBinaryComparatorFactory(WritableComparator.get(vertexIdClass)
+                .getClass());
+        RecordDescriptor rdFinal = DataflowUtils.getRecordDescriptorFromKeyValueClasses(vertexIdClass.getName(),
+                MsgList.class.getName());
+        RecordDescriptor rdInsert = DataflowUtils.getRecordDescriptorFromKeyValueClasses(vertexIdClass.getName(),
+                vertexClass.getName());
+        RecordDescriptor rdDelete = DataflowUtils.getRecordDescriptorFromWritableClasses(vertexIdClass.getName());
+
+        /**
+         * construct empty tuple operator
+         */
+        EmptyTupleSourceOperatorDescriptor emptyTupleSource = new EmptyTupleSourceOperatorDescriptor(spec);
+        ClusterConfig.setLocationConstraint(spec, emptyTupleSource);
+
+        /**
+         * construct pre-superstep
+         */
+        IConfigurationFactory confFactory = new ConfigurationFactory(conf);
+        RuntimeHookOperatorDescriptor preSuperStep = new RuntimeHookOperatorDescriptor(spec,
+                new PreSuperStepRuntimeHookFactory(jobId, confFactory));
+        ClusterConfig.setLocationConstraint(spec, preSuperStep);
+
+        /**
+         * construct the materializing write operator
+         */
+        MaterializingReadOperatorDescriptor materializeRead = new MaterializingReadOperatorDescriptor(spec, rdFinal);
+        ClusterConfig.setLocationConstraint(spec, materializeRead);
+
+        /**
+         * construct the index-set-union operator
+         */
+        String readFile = iteration % 2 == 0 ? SECONDARY_INDEX_ODD : SECONDARY_INDEX_EVEN;
+        IFileSplitProvider secondaryFileSplitProviderRead = ClusterConfig.getFileSplitProvider(jobId, readFile);
+
+        ITypeTraits[] typeTraits = new ITypeTraits[2];
+        typeTraits[0] = new TypeTraits(false);
+        typeTraits[1] = new TypeTraits(false);
+        ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(new TypeAwareTupleWriterFactory(
+                typeTraits));
+        ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(new TypeAwareTupleWriterFactory(
+                typeTraits));
+        IndexNestedLoopJoinOperatorDescriptor setUnion = new IndexNestedLoopJoinOperatorDescriptor(spec, rdFinal,
+                storageManagerInterface, treeRegistryProvider, secondaryFileSplitProviderRead, interiorFrameFactory,
+                leafFrameFactory, typeTraits, comparatorFactories, true, keyFields, keyFields, true, true,
+                new BTreeDataflowHelperFactory(), true);
+        ClusterConfig.setLocationConstraint(spec, setUnion);
+
+        /**
+         * construct index-join-function-update operator
+         */
+        IFileSplitProvider fileSplitProvider = ClusterConfig.getFileSplitProvider(jobId, PRIMARY_INDEX);
+        RecordDescriptor rdDummy = DataflowUtils.getRecordDescriptorFromWritableClasses(VLongWritable.class.getName());
+        RecordDescriptor rdPartialAggregate = DataflowUtils
+                .getRecordDescriptorFromWritableClasses(partialAggregateValueClass.getName());
+        IConfigurationFactory configurationFactory = new ConfigurationFactory(conf);
+        IRuntimeHookFactory preHookFactory = new RuntimeHookFactory(configurationFactory);
+        IRecordDescriptorFactory inputRdFactory = DataflowUtils.getWritableRecordDescriptorFactoryFromWritableClasses(
+                vertexIdClass.getName(), MsgList.class.getName(), vertexIdClass.getName(), vertexClass.getName());
+
+        IndexNestedLoopJoinFunctionUpdateOperatorDescriptor join = new IndexNestedLoopJoinFunctionUpdateOperatorDescriptor(
+                spec, storageManagerInterface, treeRegistryProvider, fileSplitProvider, typeTraits,
+                comparatorFactories, JobGenUtil.getForwardScan(iteration), keyFields, keyFields, true, true,
+                new BTreeDataflowHelperFactory(), inputRdFactory, 6, new ComputeUpdateFunctionFactory(confFactory),
+                preHookFactory, null, rdUnnestedMessage, rdDummy, rdPartialAggregate, rdInsert, rdDelete, rdFinal);
+        ClusterConfig.setLocationConstraint(spec, join);
+
+        /**
+         * construct bulk-load index operator
+         */
+        int fieldPermutation[] = new int[] { 0, 1 };
+        IBinaryComparatorFactory[] indexCmpFactories = new IBinaryComparatorFactory[1];
+        indexCmpFactories[0] = JobGenUtil.getIBinaryComparatorFactory(iteration + 1,
+                WritableComparator.get(vertexIdClass).getClass());
+        String writeFile = iteration % 2 == 0 ? SECONDARY_INDEX_EVEN : SECONDARY_INDEX_ODD;
+        IFileSplitProvider secondaryFileSplitProviderWrite = ClusterConfig.getFileSplitProvider(jobId, writeFile);
+        TreeIndexBulkReLoadOperatorDescriptor btreeBulkLoad = new TreeIndexBulkReLoadOperatorDescriptor(spec,
+                storageManagerInterface, treeRegistryProvider, secondaryFileSplitProviderWrite, typeTraits,
+                indexCmpFactories, fieldPermutation, DEFAULT_BTREE_FILL_FACTOR, new BTreeDataflowHelperFactory());
+        ClusterConfig.setLocationConstraint(spec, btreeBulkLoad);
+
+        /**
+         * construct local sort operator
+         */
+        INormalizedKeyComputerFactory nkmFactory = JobGenUtil
+                .getINormalizedKeyComputerFactory(iteration, vertexIdClass);
+        IBinaryComparatorFactory[] sortCmpFactories = new IBinaryComparatorFactory[1];
+        sortCmpFactories[0] = JobGenUtil.getIBinaryComparatorFactory(iteration, WritableComparator.get(vertexIdClass)
+                .getClass());
+        ExternalSortOperatorDescriptor localSort = new ExternalSortOperatorDescriptor(spec, maxFrameNumber, keyFields,
+                nkmFactory, sortCmpFactories, rdUnnestedMessage);
+        ClusterConfig.setLocationConstraint(spec, localSort);
+
+        /**
+         * construct local pre-clustered group-by operator
+         */
+        IAggregatorDescriptorFactory aggregatorFactory = DataflowUtils.getAccumulatingAggregatorFactory(conf, false,
+                false);
+        PreclusteredGroupOperatorDescriptor localGby = new PreclusteredGroupOperatorDescriptor(spec, keyFields,
+                sortCmpFactories, aggregatorFactory, rdUnnestedMessage);
+        ClusterConfig.setLocationConstraint(spec, localGby);
+
+        /**
+         * construct global group-by operator
+         */
+        IAggregatorDescriptorFactory aggregatorFactoryFinal = DataflowUtils.getAccumulatingAggregatorFactory(conf,
+                true, true);
+        PreclusteredGroupOperatorDescriptor globalGby = new PreclusteredGroupOperatorDescriptor(spec, keyFields,
+                sortCmpFactories, aggregatorFactoryFinal, rdFinal);
+        ClusterConfig.setLocationConstraint(spec, globalGby);
+
+        /**
+         * construct the materializing write operator
+         */
+        MaterializingWriteOperatorDescriptor materialize = new MaterializingWriteOperatorDescriptor(spec, rdFinal);
+        ClusterConfig.setLocationConstraint(spec, materialize);
+
+        /** construct runtime hook */
+        RuntimeHookOperatorDescriptor postSuperStep = new RuntimeHookOperatorDescriptor(spec,
+                new PostSuperStepRuntimeHookFactory(jobId));
+        ClusterConfig.setLocationConstraint(spec, postSuperStep);
+
+        /** construct empty sink operator */
+        EmptySinkOperatorDescriptor emptySink = new EmptySinkOperatorDescriptor(spec);
+        ClusterConfig.setLocationConstraint(spec, emptySink);
+
+        /**
+         * termination state write operator
+         */
+        TerminationStateWriterOperatorDescriptor terminateWriter = new TerminationStateWriterOperatorDescriptor(spec,
+                configurationFactory, jobId);
+        PartitionConstraintHelper.addPartitionCountConstraint(spec, terminateWriter, 1);
+
+        /**
+         * final aggregate write operator
+         */
+        IRecordDescriptorFactory aggRdFactory = DataflowUtils
+                .getWritableRecordDescriptorFactoryFromWritableClasses(partialAggregateValueClass.getName());
+        FinalAggregateOperatorDescriptor finalAggregator = new FinalAggregateOperatorDescriptor(spec,
+                configurationFactory, aggRdFactory, jobId);
+        PartitionConstraintHelper.addPartitionCountConstraint(spec, finalAggregator, 1);
+
+        /**
+         * add the insert operator to insert vertexes
+         */
+        TreeIndexInsertUpdateDeleteOperatorDescriptor insertOp = new TreeIndexInsertUpdateDeleteOperatorDescriptor(
+                spec, rdInsert, storageManagerInterface, treeRegistryProvider, fileSplitProvider, typeTraits,
+                comparatorFactories, fieldPermutation, IndexOp.INSERT, new BTreeDataflowHelperFactory(), null,
+                NoOpOperationCallbackProvider.INSTANCE);
+        ClusterConfig.setLocationConstraint(spec, insertOp);
+
+        /**
+         * add the delete operator to delete vertexes
+         */
+        TreeIndexInsertUpdateDeleteOperatorDescriptor deleteOp = new TreeIndexInsertUpdateDeleteOperatorDescriptor(
+                spec, rdDelete, storageManagerInterface, treeRegistryProvider, fileSplitProvider, typeTraits,
+                comparatorFactories, fieldPermutation, IndexOp.DELETE, new BTreeDataflowHelperFactory(), null,
+                NoOpOperationCallbackProvider.INSTANCE);
+        ClusterConfig.setLocationConstraint(spec, deleteOp);
+
+        /** construct empty sink operator */
+        EmptySinkOperatorDescriptor emptySink3 = new EmptySinkOperatorDescriptor(spec);
+        ClusterConfig.setLocationConstraint(spec, emptySink3);
+
+        /** construct empty sink operator */
+        EmptySinkOperatorDescriptor emptySink4 = new EmptySinkOperatorDescriptor(spec);
+        ClusterConfig.setLocationConstraint(spec, emptySink4);
+
+        ITuplePartitionComputerFactory hashPartitionComputerFactory = new MergePartitionComputerFactory();
+        ITuplePartitionComputerFactory partionFactory = new VertexIdPartitionComputerFactory(
+                rdUnnestedMessage.getFields()[0]);
+        /** connect all operators **/
+        spec.connect(new OneToOneConnectorDescriptor(spec), emptyTupleSource, 0, preSuperStep, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), preSuperStep, 0, materializeRead, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), materializeRead, 0, setUnion, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), setUnion, 0, join, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), join, 0, localSort, 0);
+        spec.connect(new MToNPartitioningConnectorDescriptor(spec, hashPartitionComputerFactory), join, 1,
+                terminateWriter, 0);
+        spec.connect(new MToNPartitioningConnectorDescriptor(spec, hashPartitionComputerFactory), join, 2,
+                finalAggregator, 0);
+
+        /**
+         * connect the insert/delete operator
+         */
+        spec.connect(new MToNPartitioningConnectorDescriptor(spec, hashPartitionComputerFactory), join, 3, insertOp, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), insertOp, 0, emptySink3, 0);
+        spec.connect(new MToNPartitioningConnectorDescriptor(spec, hashPartitionComputerFactory), join, 4, deleteOp, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), deleteOp, 0, emptySink4, 0);
+
+        spec.connect(new OneToOneConnectorDescriptor(spec), join, 5, btreeBulkLoad, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), localSort, 0, localGby, 0);
+        spec.connect(new MToNPartitioningMergingConnectorDescriptor(spec, partionFactory, keyFields, sortCmpFactories),
+                localGby, 0, globalGby, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), globalGby, 0, materialize, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), materialize, 0, postSuperStep, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), postSuperStep, 0, emptySink, 0);
+
+        spec.addRoot(emptySink);
+        spec.addRoot(btreeBulkLoad);
+        spec.addRoot(terminateWriter);
+        spec.addRoot(emptySink3);
+        spec.addRoot(emptySink4);
+
+        spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy());
+        spec.setFrameSize(frameSize);
+        return spec;
+    }
+
+    @Override
+    public JobSpecification[] generateCleanup() throws HyracksException {
+        JobSpecification[] cleanups = new JobSpecification[3];
+        cleanups[0] = this.dropIndex(PRIMARY_INDEX);
+        cleanups[1] = this.dropIndex(SECONDARY_INDEX_ODD);
+        cleanups[2] = this.dropIndex(SECONDARY_INDEX_EVEN);
+        return cleanups;
+    }
+}
diff --git a/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/jobgen/JobGenOuterJoin.java b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/jobgen/JobGenOuterJoin.java
new file mode 100644
index 0000000..9bad169
--- /dev/null
+++ b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/jobgen/JobGenOuterJoin.java
@@ -0,0 +1,480 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.pregelix.core.jobgen;
+
+import org.apache.hadoop.io.VLongWritable;
+import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.io.WritableComparable;
+import org.apache.hadoop.io.WritableComparator;
+
+import edu.uci.ics.hyracks.api.constraints.PartitionConstraintHelper;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputerFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.INullWriterFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ITuplePartitionComputerFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksException;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.dataflow.std.connectors.MToNPartitioningConnectorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.connectors.MToNPartitioningMergingConnectorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.dataflow.std.group.IAggregatorDescriptorFactory;
+import edu.uci.ics.hyracks.dataflow.std.group.preclustered.PreclusteredGroupOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.sort.ExternalSortOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMInteriorFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMLeafFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexInsertUpdateDeleteOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackProvider;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOp;
+import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriterFactory;
+import edu.uci.ics.pregelix.api.graph.MsgList;
+import edu.uci.ics.pregelix.api.job.PregelixJob;
+import edu.uci.ics.pregelix.api.util.BspUtils;
+import edu.uci.ics.pregelix.core.data.TypeTraits;
+import edu.uci.ics.pregelix.core.hadoop.config.ConfigurationFactory;
+import edu.uci.ics.pregelix.core.jobgen.clusterconfig.ClusterConfig;
+import edu.uci.ics.pregelix.core.runtime.touchpoint.WritableComparingBinaryComparatorFactory;
+import edu.uci.ics.pregelix.core.util.DataflowUtils;
+import edu.uci.ics.pregelix.dataflow.ConnectorPolicyAssignmentPolicy;
+import edu.uci.ics.pregelix.dataflow.EmptySinkOperatorDescriptor;
+import edu.uci.ics.pregelix.dataflow.EmptyTupleSourceOperatorDescriptor;
+import edu.uci.ics.pregelix.dataflow.FinalAggregateOperatorDescriptor;
+import edu.uci.ics.pregelix.dataflow.MaterializingReadOperatorDescriptor;
+import edu.uci.ics.pregelix.dataflow.MaterializingWriteOperatorDescriptor;
+import edu.uci.ics.pregelix.dataflow.TerminationStateWriterOperatorDescriptor;
+import edu.uci.ics.pregelix.dataflow.base.IConfigurationFactory;
+import edu.uci.ics.pregelix.dataflow.std.BTreeSearchFunctionUpdateOperatorDescriptor;
+import edu.uci.ics.pregelix.dataflow.std.IndexNestedLoopJoinFunctionUpdateOperatorDescriptor;
+import edu.uci.ics.pregelix.dataflow.std.RuntimeHookOperatorDescriptor;
+import edu.uci.ics.pregelix.dataflow.std.base.IRecordDescriptorFactory;
+import edu.uci.ics.pregelix.dataflow.std.base.IRuntimeHookFactory;
+import edu.uci.ics.pregelix.runtime.function.ComputeUpdateFunctionFactory;
+import edu.uci.ics.pregelix.runtime.function.StartComputeUpdateFunctionFactory;
+import edu.uci.ics.pregelix.runtime.touchpoint.MergePartitionComputerFactory;
+import edu.uci.ics.pregelix.runtime.touchpoint.MsgListNullWriterFactory;
+import edu.uci.ics.pregelix.runtime.touchpoint.PostSuperStepRuntimeHookFactory;
+import edu.uci.ics.pregelix.runtime.touchpoint.PreSuperStepRuntimeHookFactory;
+import edu.uci.ics.pregelix.runtime.touchpoint.RuntimeHookFactory;
+import edu.uci.ics.pregelix.runtime.touchpoint.VertexIdNullWriterFactory;
+import edu.uci.ics.pregelix.runtime.touchpoint.VertexIdPartitionComputerFactory;
+
+public class JobGenOuterJoin extends JobGen {
+
+    public JobGenOuterJoin(PregelixJob job) {
+        super(job);
+    }
+
+    @SuppressWarnings({ "rawtypes", "unchecked" })
+    @Override
+    protected JobSpecification generateFirstIteration(int iteration) throws HyracksException {
+        Class<? extends WritableComparable<?>> vertexIdClass = BspUtils.getVertexIndexClass(conf);
+        Class<? extends Writable> vertexClass = BspUtils.getVertexClass(conf);
+        Class<? extends Writable> messageValueClass = BspUtils.getMessageValueClass(conf);
+        Class<? extends Writable> partialAggregateValueClass = BspUtils.getPartialAggregateValueClass(conf);
+        IConfigurationFactory confFactory = new ConfigurationFactory(conf);
+        JobSpecification spec = new JobSpecification();
+
+        /**
+         * construct empty tuple operator
+         */
+        EmptyTupleSourceOperatorDescriptor emptyTupleSource = new EmptyTupleSourceOperatorDescriptor(spec);
+        ClusterConfig.setLocationConstraint(spec, emptyTupleSource);
+
+        /** construct runtime hook */
+        RuntimeHookOperatorDescriptor preSuperStep = new RuntimeHookOperatorDescriptor(spec,
+                new PreSuperStepRuntimeHookFactory(jobId, confFactory));
+        ClusterConfig.setLocationConstraint(spec, preSuperStep);
+
+        /**
+         * construct btree search function update operator
+         */
+        RecordDescriptor recordDescriptor = DataflowUtils.getRecordDescriptorFromKeyValueClasses(
+                vertexIdClass.getName(), vertexClass.getName());
+        IBinaryComparatorFactory[] comparatorFactories = new IBinaryComparatorFactory[1];
+        comparatorFactories[0] = new WritableComparingBinaryComparatorFactory(WritableComparator.get(vertexIdClass)
+                .getClass());
+        IFileSplitProvider fileSplitProvider = ClusterConfig.getFileSplitProvider(jobId, PRIMARY_INDEX);
+
+        ITypeTraits[] typeTraits = new ITypeTraits[2];
+        typeTraits[0] = new TypeTraits(false);
+        typeTraits[1] = new TypeTraits(false);
+
+        RecordDescriptor rdDummy = DataflowUtils.getRecordDescriptorFromWritableClasses(VLongWritable.class.getName());
+        RecordDescriptor rdPartialAggregate = DataflowUtils
+                .getRecordDescriptorFromWritableClasses(partialAggregateValueClass.getName());
+        IConfigurationFactory configurationFactory = new ConfigurationFactory(conf);
+        IRuntimeHookFactory preHookFactory = new RuntimeHookFactory(configurationFactory);
+        IRecordDescriptorFactory inputRdFactory = DataflowUtils.getWritableRecordDescriptorFactoryFromWritableClasses(
+                vertexIdClass.getName(), vertexClass.getName());
+        RecordDescriptor rdUnnestedMessage = DataflowUtils.getRecordDescriptorFromKeyValueClasses(
+                vertexIdClass.getName(), messageValueClass.getName());
+        RecordDescriptor rdInsert = DataflowUtils.getRecordDescriptorFromKeyValueClasses(vertexIdClass.getName(),
+                vertexClass.getName());
+        RecordDescriptor rdDelete = DataflowUtils.getRecordDescriptorFromWritableClasses(vertexIdClass.getName());
+
+        BTreeSearchFunctionUpdateOperatorDescriptor scanner = new BTreeSearchFunctionUpdateOperatorDescriptor(spec,
+                recordDescriptor, storageManagerInterface, treeRegistryProvider, fileSplitProvider, typeTraits,
+                comparatorFactories, JobGenUtil.getForwardScan(iteration), null, null, true, true,
+                new BTreeDataflowHelperFactory(), inputRdFactory, 5,
+                new StartComputeUpdateFunctionFactory(confFactory), preHookFactory, null, rdUnnestedMessage, rdDummy,
+                rdPartialAggregate, rdInsert, rdDelete);
+        ClusterConfig.setLocationConstraint(spec, scanner);
+
+        /**
+         * construct local sort operator
+         */
+        int[] keyFields = new int[] { 0 };
+        INormalizedKeyComputerFactory nkmFactory = JobGenUtil
+                .getINormalizedKeyComputerFactory(iteration, vertexIdClass);
+        IBinaryComparatorFactory[] sortCmpFactories = new IBinaryComparatorFactory[1];
+        sortCmpFactories[0] = JobGenUtil.getIBinaryComparatorFactory(iteration, WritableComparator.get(vertexIdClass)
+                .getClass());
+        ExternalSortOperatorDescriptor localSort = new ExternalSortOperatorDescriptor(spec, maxFrameNumber, keyFields,
+                nkmFactory, sortCmpFactories, rdUnnestedMessage);
+        ClusterConfig.setLocationConstraint(spec, localSort);
+
+        /**
+         * construct local pre-clustered group-by operator
+         */
+        IAggregatorDescriptorFactory aggregatorFactory = DataflowUtils.getAccumulatingAggregatorFactory(conf, false,
+                false);
+        PreclusteredGroupOperatorDescriptor localGby = new PreclusteredGroupOperatorDescriptor(spec, keyFields,
+                sortCmpFactories, aggregatorFactory, rdUnnestedMessage);
+        ClusterConfig.setLocationConstraint(spec, localGby);
+
+        /**
+         * construct global group-by operator
+         */
+        RecordDescriptor rdFinal = DataflowUtils.getRecordDescriptorFromKeyValueClasses(vertexIdClass.getName(),
+                MsgList.class.getName());
+        IAggregatorDescriptorFactory aggregatorFactoryFinal = DataflowUtils.getAccumulatingAggregatorFactory(conf,
+                true, true);
+        PreclusteredGroupOperatorDescriptor globalGby = new PreclusteredGroupOperatorDescriptor(spec, keyFields,
+                sortCmpFactories, aggregatorFactoryFinal, rdFinal);
+        ClusterConfig.setLocationConstraint(spec, globalGby);
+
+        /**
+         * construct the materializing write operator
+         */
+        MaterializingWriteOperatorDescriptor materialize = new MaterializingWriteOperatorDescriptor(spec, rdFinal);
+        ClusterConfig.setLocationConstraint(spec, materialize);
+
+        RuntimeHookOperatorDescriptor postSuperStep = new RuntimeHookOperatorDescriptor(spec,
+                new PostSuperStepRuntimeHookFactory(jobId));
+        ClusterConfig.setLocationConstraint(spec, postSuperStep);
+
+        /** construct empty sink operator */
+        EmptySinkOperatorDescriptor emptySink2 = new EmptySinkOperatorDescriptor(spec);
+        ClusterConfig.setLocationConstraint(spec, emptySink2);
+
+        /**
+         * termination state write operator
+         */
+        TerminationStateWriterOperatorDescriptor terminateWriter = new TerminationStateWriterOperatorDescriptor(spec,
+                configurationFactory, jobId);
+        PartitionConstraintHelper.addPartitionCountConstraint(spec, terminateWriter, 1);
+        ITuplePartitionComputerFactory hashPartitionComputerFactory = new MergePartitionComputerFactory();
+
+        /**
+         * final aggregate write operator
+         */
+        IRecordDescriptorFactory aggRdFactory = DataflowUtils
+                .getWritableRecordDescriptorFactoryFromWritableClasses(partialAggregateValueClass.getName());
+        FinalAggregateOperatorDescriptor finalAggregator = new FinalAggregateOperatorDescriptor(spec,
+                configurationFactory, aggRdFactory, jobId);
+        PartitionConstraintHelper.addPartitionCountConstraint(spec, finalAggregator, 1);
+
+        /**
+         * add the insert operator to insert vertexes
+         */
+        int[] fieldPermutation = new int[] { 0, 1 };
+        TreeIndexInsertUpdateDeleteOperatorDescriptor insertOp = new TreeIndexInsertUpdateDeleteOperatorDescriptor(
+                spec, rdInsert, storageManagerInterface, treeRegistryProvider, fileSplitProvider, typeTraits,
+                comparatorFactories, fieldPermutation, IndexOp.INSERT, new BTreeDataflowHelperFactory(), null,
+                NoOpOperationCallbackProvider.INSTANCE);
+        ClusterConfig.setLocationConstraint(spec, insertOp);
+
+        /**
+         * add the delete operator to delete vertexes
+         */
+        TreeIndexInsertUpdateDeleteOperatorDescriptor deleteOp = new TreeIndexInsertUpdateDeleteOperatorDescriptor(
+                spec, rdDelete, storageManagerInterface, treeRegistryProvider, fileSplitProvider, typeTraits,
+                comparatorFactories, fieldPermutation, IndexOp.DELETE, new BTreeDataflowHelperFactory(), null,
+                NoOpOperationCallbackProvider.INSTANCE);
+        ClusterConfig.setLocationConstraint(spec, deleteOp);
+
+        /** construct empty sink operator */
+        EmptySinkOperatorDescriptor emptySink3 = new EmptySinkOperatorDescriptor(spec);
+        ClusterConfig.setLocationConstraint(spec, emptySink3);
+
+        /** construct empty sink operator */
+        EmptySinkOperatorDescriptor emptySink4 = new EmptySinkOperatorDescriptor(spec);
+        ClusterConfig.setLocationConstraint(spec, emptySink4);
+
+        ITuplePartitionComputerFactory partionFactory = new VertexIdPartitionComputerFactory(
+                rdUnnestedMessage.getFields()[0]);
+        /** connect all operators **/
+        spec.connect(new OneToOneConnectorDescriptor(spec), emptyTupleSource, 0, preSuperStep, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), preSuperStep, 0, scanner, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), scanner, 0, localSort, 0);
+        spec.connect(new MToNPartitioningConnectorDescriptor(spec, hashPartitionComputerFactory), scanner, 1,
+                terminateWriter, 0);
+        spec.connect(new MToNPartitioningConnectorDescriptor(spec, hashPartitionComputerFactory), scanner, 2,
+                finalAggregator, 0);
+
+        /**
+         * connect the insert/delete operator
+         */
+        spec.connect(new MToNPartitioningConnectorDescriptor(spec, hashPartitionComputerFactory), scanner, 3, insertOp,
+                0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), insertOp, 0, emptySink3, 0);
+        spec.connect(new MToNPartitioningConnectorDescriptor(spec, hashPartitionComputerFactory), scanner, 4, deleteOp,
+                0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), deleteOp, 0, emptySink4, 0);
+
+        /**
+         * connect the group-by operator
+         */
+        spec.connect(new OneToOneConnectorDescriptor(spec), localSort, 0, localGby, 0);
+        spec.connect(new MToNPartitioningMergingConnectorDescriptor(spec, partionFactory, keyFields, sortCmpFactories),
+                localGby, 0, globalGby, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), globalGby, 0, materialize, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), materialize, 0, postSuperStep, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), postSuperStep, 0, emptySink2, 0);
+
+        spec.addRoot(terminateWriter);
+        spec.addRoot(finalAggregator);
+        spec.addRoot(emptySink2);
+        spec.addRoot(emptySink3);
+        spec.addRoot(emptySink4);
+
+        spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy());
+        spec.setFrameSize(frameSize);
+        return spec;
+    }
+
+    @SuppressWarnings({ "rawtypes", "unchecked" })
+    @Override
+    protected JobSpecification generateNonFirstIteration(int iteration) throws HyracksException {
+        Class<? extends WritableComparable<?>> vertexIdClass = BspUtils.getVertexIndexClass(conf);
+        Class<? extends Writable> vertexClass = BspUtils.getVertexClass(conf);
+        Class<? extends Writable> messageValueClass = BspUtils.getMessageValueClass(conf);
+        Class<? extends Writable> partialAggregateValueClass = BspUtils.getPartialAggregateValueClass(conf);
+        JobSpecification spec = new JobSpecification();
+
+        /**
+         * source aggregate
+         */
+        int[] keyFields = new int[] { 0 };
+        RecordDescriptor rdUnnestedMessage = DataflowUtils.getRecordDescriptorFromKeyValueClasses(
+                vertexIdClass.getName(), messageValueClass.getName());
+        IBinaryComparatorFactory[] comparatorFactories = new IBinaryComparatorFactory[1];
+        comparatorFactories[0] = new WritableComparingBinaryComparatorFactory(WritableComparator.get(vertexIdClass)
+                .getClass());
+        RecordDescriptor rdFinal = DataflowUtils.getRecordDescriptorFromKeyValueClasses(vertexIdClass.getName(),
+                MsgList.class.getName());
+        RecordDescriptor rdInsert = DataflowUtils.getRecordDescriptorFromKeyValueClasses(vertexIdClass.getName(),
+                vertexClass.getName());
+        RecordDescriptor rdDelete = DataflowUtils.getRecordDescriptorFromWritableClasses(vertexIdClass.getName());
+
+        /**
+         * construct empty tuple operator
+         */
+        EmptyTupleSourceOperatorDescriptor emptyTupleSource = new EmptyTupleSourceOperatorDescriptor(spec);
+        ClusterConfig.setLocationConstraint(spec, emptyTupleSource);
+
+        /**
+         * construct pre-superstep hook
+         */
+        IConfigurationFactory confFactory = new ConfigurationFactory(conf);
+        RuntimeHookOperatorDescriptor preSuperStep = new RuntimeHookOperatorDescriptor(spec,
+                new PreSuperStepRuntimeHookFactory(jobId, confFactory));
+        ClusterConfig.setLocationConstraint(spec, preSuperStep);
+
+        /**
+         * construct the materializing write operator
+         */
+        MaterializingReadOperatorDescriptor materializeRead = new MaterializingReadOperatorDescriptor(spec, rdFinal);
+        ClusterConfig.setLocationConstraint(spec, materializeRead);
+
+        /**
+         * construct index join function update operator
+         */
+        IFileSplitProvider fileSplitProvider = ClusterConfig.getFileSplitProvider(jobId, PRIMARY_INDEX);
+        ITypeTraits[] typeTraits = new ITypeTraits[2];
+        typeTraits[0] = new TypeTraits(false);
+        typeTraits[1] = new TypeTraits(false);
+        ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(new TypeAwareTupleWriterFactory(
+                typeTraits));
+        ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(new TypeAwareTupleWriterFactory(
+                typeTraits));
+        INullWriterFactory[] nullWriterFactories = new INullWriterFactory[2];
+        nullWriterFactories[0] = VertexIdNullWriterFactory.INSTANCE;
+        nullWriterFactories[1] = MsgListNullWriterFactory.INSTANCE;
+
+        RecordDescriptor rdDummy = DataflowUtils.getRecordDescriptorFromWritableClasses(VLongWritable.class.getName());
+        RecordDescriptor rdPartialAggregate = DataflowUtils
+                .getRecordDescriptorFromWritableClasses(partialAggregateValueClass.getName());
+        IConfigurationFactory configurationFactory = new ConfigurationFactory(conf);
+        IRuntimeHookFactory preHookFactory = new RuntimeHookFactory(configurationFactory);
+        IRecordDescriptorFactory inputRdFactory = DataflowUtils.getWritableRecordDescriptorFactoryFromWritableClasses(
+                vertexIdClass.getName(), MsgList.class.getName(), vertexIdClass.getName(), vertexClass.getName());
+
+        IndexNestedLoopJoinFunctionUpdateOperatorDescriptor join = new IndexNestedLoopJoinFunctionUpdateOperatorDescriptor(
+                spec, storageManagerInterface, treeRegistryProvider, fileSplitProvider, interiorFrameFactory,
+                leafFrameFactory, typeTraits, comparatorFactories, JobGenUtil.getForwardScan(iteration), keyFields,
+                keyFields, true, true, new BTreeDataflowHelperFactory(), true, nullWriterFactories, inputRdFactory, 5,
+                new ComputeUpdateFunctionFactory(confFactory), preHookFactory, null, rdUnnestedMessage, rdDummy,
+                rdPartialAggregate, rdInsert, rdDelete);
+        ClusterConfig.setLocationConstraint(spec, join);
+
+        /**
+         * construct local sort operator
+         */
+        INormalizedKeyComputerFactory nkmFactory = JobGenUtil
+                .getINormalizedKeyComputerFactory(iteration, vertexIdClass);
+        IBinaryComparatorFactory[] sortCmpFactories = new IBinaryComparatorFactory[1];
+        sortCmpFactories[0] = JobGenUtil.getIBinaryComparatorFactory(iteration, WritableComparator.get(vertexIdClass)
+                .getClass());
+        ExternalSortOperatorDescriptor localSort = new ExternalSortOperatorDescriptor(spec, maxFrameNumber, keyFields,
+                nkmFactory, sortCmpFactories, rdUnnestedMessage);
+        ClusterConfig.setLocationConstraint(spec, localSort);
+
+        /**
+         * construct local pre-clustered group-by operator
+         */
+        IAggregatorDescriptorFactory aggregatorFactory = DataflowUtils.getAccumulatingAggregatorFactory(conf, false,
+                false);
+        PreclusteredGroupOperatorDescriptor localGby = new PreclusteredGroupOperatorDescriptor(spec, keyFields,
+                sortCmpFactories, aggregatorFactory, rdUnnestedMessage);
+        ClusterConfig.setLocationConstraint(spec, localGby);
+
+        /**
+         * construct global group-by operator
+         */
+        IAggregatorDescriptorFactory aggregatorFactoryFinal = DataflowUtils.getAccumulatingAggregatorFactory(conf,
+                true, true);
+        PreclusteredGroupOperatorDescriptor globalGby = new PreclusteredGroupOperatorDescriptor(spec, keyFields,
+                sortCmpFactories, aggregatorFactoryFinal, rdFinal);
+        ClusterConfig.setLocationConstraint(spec, globalGby);
+
+        /**
+         * construct the materializing write operator
+         */
+        MaterializingWriteOperatorDescriptor materialize = new MaterializingWriteOperatorDescriptor(spec, rdFinal);
+        ClusterConfig.setLocationConstraint(spec, materialize);
+
+        /** construct runtime hook */
+        RuntimeHookOperatorDescriptor postSuperStep = new RuntimeHookOperatorDescriptor(spec,
+                new PostSuperStepRuntimeHookFactory(jobId));
+        ClusterConfig.setLocationConstraint(spec, postSuperStep);
+
+        /** construct empty sink operator */
+        EmptySinkOperatorDescriptor emptySink = new EmptySinkOperatorDescriptor(spec);
+        ClusterConfig.setLocationConstraint(spec, emptySink);
+
+        /**
+         * termination state write operator
+         */
+        TerminationStateWriterOperatorDescriptor terminateWriter = new TerminationStateWriterOperatorDescriptor(spec,
+                configurationFactory, jobId);
+        PartitionConstraintHelper.addPartitionCountConstraint(spec, terminateWriter, 1);
+
+        /**
+         * final aggregate write operator
+         */
+        IRecordDescriptorFactory aggRdFactory = DataflowUtils
+                .getWritableRecordDescriptorFactoryFromWritableClasses(partialAggregateValueClass.getName());
+        FinalAggregateOperatorDescriptor finalAggregator = new FinalAggregateOperatorDescriptor(spec,
+                configurationFactory, aggRdFactory, jobId);
+        PartitionConstraintHelper.addPartitionCountConstraint(spec, finalAggregator, 1);
+
+        /**
+         * add the insert operator to insert vertexes
+         */
+        int[] fieldPermutation = new int[] { 0, 1 };
+        TreeIndexInsertUpdateDeleteOperatorDescriptor insertOp = new TreeIndexInsertUpdateDeleteOperatorDescriptor(
+                spec, rdInsert, storageManagerInterface, treeRegistryProvider, fileSplitProvider, typeTraits,
+                comparatorFactories, fieldPermutation, IndexOp.INSERT, new BTreeDataflowHelperFactory(), null,
+                NoOpOperationCallbackProvider.INSTANCE);
+        ClusterConfig.setLocationConstraint(spec, insertOp);
+
+        /**
+         * add the delete operator to delete vertexes
+         */
+        TreeIndexInsertUpdateDeleteOperatorDescriptor deleteOp = new TreeIndexInsertUpdateDeleteOperatorDescriptor(
+                spec, rdDelete, storageManagerInterface, treeRegistryProvider, fileSplitProvider, typeTraits,
+                comparatorFactories, fieldPermutation, IndexOp.DELETE, new BTreeDataflowHelperFactory(), null,
+                NoOpOperationCallbackProvider.INSTANCE);
+        ClusterConfig.setLocationConstraint(spec, deleteOp);
+
+        /** construct empty sink operator */
+        EmptySinkOperatorDescriptor emptySink3 = new EmptySinkOperatorDescriptor(spec);
+        ClusterConfig.setLocationConstraint(spec, emptySink3);
+
+        /** construct empty sink operator */
+        EmptySinkOperatorDescriptor emptySink4 = new EmptySinkOperatorDescriptor(spec);
+        ClusterConfig.setLocationConstraint(spec, emptySink4);
+
+        ITuplePartitionComputerFactory hashPartitionComputerFactory = new MergePartitionComputerFactory();
+        ITuplePartitionComputerFactory partionFactory = new VertexIdPartitionComputerFactory(
+                rdUnnestedMessage.getFields()[0]);
+
+        /** connect all operators **/
+        spec.connect(new OneToOneConnectorDescriptor(spec), emptyTupleSource, 0, preSuperStep, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), preSuperStep, 0, materializeRead, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), materializeRead, 0, join, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), join, 0, localSort, 0);
+        spec.connect(new MToNPartitioningConnectorDescriptor(spec, hashPartitionComputerFactory), join, 1,
+                terminateWriter, 0);
+        spec.connect(new MToNPartitioningConnectorDescriptor(spec, hashPartitionComputerFactory), join, 2,
+                finalAggregator, 0);
+
+        /**
+         * connect the insert/delete operator
+         */
+        spec.connect(new MToNPartitioningConnectorDescriptor(spec, hashPartitionComputerFactory), join, 3, insertOp, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), insertOp, 0, emptySink3, 0);
+        spec.connect(new MToNPartitioningConnectorDescriptor(spec, hashPartitionComputerFactory), join, 4, deleteOp, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), deleteOp, 0, emptySink4, 0);
+
+        spec.connect(new OneToOneConnectorDescriptor(spec), localSort, 0, localGby, 0);
+        spec.connect(new MToNPartitioningMergingConnectorDescriptor(spec, partionFactory, keyFields, sortCmpFactories),
+                localGby, 0, globalGby, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), globalGby, 0, materialize, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), materialize, 0, postSuperStep, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), postSuperStep, 0, emptySink, 0);
+
+        spec.addRoot(terminateWriter);
+        spec.addRoot(finalAggregator);
+        spec.addRoot(emptySink);
+
+        spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy());
+        spec.setFrameSize(frameSize);
+        return spec;
+    }
+
+    @Override
+    public JobSpecification[] generateCleanup() throws HyracksException {
+        JobSpecification[] cleanups = new JobSpecification[1];
+        cleanups[0] = this.dropIndex(PRIMARY_INDEX);
+        return cleanups;
+    }
+
+}
\ No newline at end of file
diff --git a/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/jobgen/JobGenOuterJoinSingleSort.java b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/jobgen/JobGenOuterJoinSingleSort.java
new file mode 100644
index 0000000..ffdef10
--- /dev/null
+++ b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/jobgen/JobGenOuterJoinSingleSort.java
@@ -0,0 +1,456 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.pregelix.core.jobgen;
+
+import org.apache.hadoop.io.VLongWritable;
+import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.io.WritableComparable;
+import org.apache.hadoop.io.WritableComparator;
+
+import edu.uci.ics.hyracks.api.constraints.PartitionConstraintHelper;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputerFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.INullWriterFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ITuplePartitionComputerFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksException;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.dataflow.std.connectors.MToNPartitioningConnectorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.dataflow.std.group.IAggregatorDescriptorFactory;
+import edu.uci.ics.hyracks.dataflow.std.group.preclustered.PreclusteredGroupOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.sort.ExternalSortOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMInteriorFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMLeafFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexInsertUpdateDeleteOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackProvider;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOp;
+import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriterFactory;
+import edu.uci.ics.pregelix.api.graph.MsgList;
+import edu.uci.ics.pregelix.api.job.PregelixJob;
+import edu.uci.ics.pregelix.api.util.BspUtils;
+import edu.uci.ics.pregelix.core.data.TypeTraits;
+import edu.uci.ics.pregelix.core.hadoop.config.ConfigurationFactory;
+import edu.uci.ics.pregelix.core.jobgen.clusterconfig.ClusterConfig;
+import edu.uci.ics.pregelix.core.runtime.touchpoint.WritableComparingBinaryComparatorFactory;
+import edu.uci.ics.pregelix.core.util.DataflowUtils;
+import edu.uci.ics.pregelix.dataflow.EmptySinkOperatorDescriptor;
+import edu.uci.ics.pregelix.dataflow.EmptyTupleSourceOperatorDescriptor;
+import edu.uci.ics.pregelix.dataflow.FinalAggregateOperatorDescriptor;
+import edu.uci.ics.pregelix.dataflow.MaterializingReadOperatorDescriptor;
+import edu.uci.ics.pregelix.dataflow.MaterializingWriteOperatorDescriptor;
+import edu.uci.ics.pregelix.dataflow.NonCombinerConnectorPolicyAssignmentPolicy;
+import edu.uci.ics.pregelix.dataflow.TerminationStateWriterOperatorDescriptor;
+import edu.uci.ics.pregelix.dataflow.base.IConfigurationFactory;
+import edu.uci.ics.pregelix.dataflow.std.BTreeSearchFunctionUpdateOperatorDescriptor;
+import edu.uci.ics.pregelix.dataflow.std.IndexNestedLoopJoinFunctionUpdateOperatorDescriptor;
+import edu.uci.ics.pregelix.dataflow.std.RuntimeHookOperatorDescriptor;
+import edu.uci.ics.pregelix.dataflow.std.base.IRecordDescriptorFactory;
+import edu.uci.ics.pregelix.dataflow.std.base.IRuntimeHookFactory;
+import edu.uci.ics.pregelix.runtime.function.ComputeUpdateFunctionFactory;
+import edu.uci.ics.pregelix.runtime.function.StartComputeUpdateFunctionFactory;
+import edu.uci.ics.pregelix.runtime.touchpoint.MergePartitionComputerFactory;
+import edu.uci.ics.pregelix.runtime.touchpoint.MsgListNullWriterFactory;
+import edu.uci.ics.pregelix.runtime.touchpoint.PostSuperStepRuntimeHookFactory;
+import edu.uci.ics.pregelix.runtime.touchpoint.PreSuperStepRuntimeHookFactory;
+import edu.uci.ics.pregelix.runtime.touchpoint.RuntimeHookFactory;
+import edu.uci.ics.pregelix.runtime.touchpoint.VertexIdNullWriterFactory;
+import edu.uci.ics.pregelix.runtime.touchpoint.VertexIdPartitionComputerFactory;
+
+public class JobGenOuterJoinSingleSort extends JobGen {
+
+    public JobGenOuterJoinSingleSort(PregelixJob job) {
+        super(job);
+    }
+
+    @SuppressWarnings({ "rawtypes", "unchecked" })
+    @Override
+    protected JobSpecification generateFirstIteration(int iteration) throws HyracksException {
+        Class<? extends WritableComparable<?>> vertexIdClass = BspUtils.getVertexIndexClass(conf);
+        Class<? extends Writable> vertexClass = BspUtils.getVertexClass(conf);
+        Class<? extends Writable> messageValueClass = BspUtils.getMessageValueClass(conf);
+        Class<? extends Writable> partialAggregateValueClass = BspUtils.getPartialAggregateValueClass(conf);
+        IConfigurationFactory confFactory = new ConfigurationFactory(conf);
+        JobSpecification spec = new JobSpecification();
+
+        /**
+         * construct empty tuple operator
+         */
+        EmptyTupleSourceOperatorDescriptor emptyTupleSource = new EmptyTupleSourceOperatorDescriptor(spec);
+        ClusterConfig.setLocationConstraint(spec, emptyTupleSource);
+
+        /** construct runtime hook */
+        RuntimeHookOperatorDescriptor preSuperStep = new RuntimeHookOperatorDescriptor(spec,
+                new PreSuperStepRuntimeHookFactory(jobId, confFactory));
+        ClusterConfig.setLocationConstraint(spec, preSuperStep);
+
+        /**
+         * construct btree search operator
+         */
+        RecordDescriptor recordDescriptor = DataflowUtils.getRecordDescriptorFromKeyValueClasses(
+                vertexIdClass.getName(), vertexClass.getName());
+        IBinaryComparatorFactory[] comparatorFactories = new IBinaryComparatorFactory[1];
+        comparatorFactories[0] = new WritableComparingBinaryComparatorFactory(WritableComparator.get(vertexIdClass)
+                .getClass());
+        IFileSplitProvider fileSplitProvider = ClusterConfig.getFileSplitProvider(jobId, PRIMARY_INDEX);
+
+        ITypeTraits[] typeTraits = new ITypeTraits[2];
+        typeTraits[0] = new TypeTraits(false);
+        typeTraits[1] = new TypeTraits(false);
+
+        /**
+         * construct compute operator
+         */
+        RecordDescriptor rdDummy = DataflowUtils.getRecordDescriptorFromWritableClasses(VLongWritable.class.getName());
+        RecordDescriptor rdPartialAggregate = DataflowUtils
+                .getRecordDescriptorFromWritableClasses(partialAggregateValueClass.getName());
+        IConfigurationFactory configurationFactory = new ConfigurationFactory(conf);
+        IRuntimeHookFactory preHookFactory = new RuntimeHookFactory(configurationFactory);
+        IRecordDescriptorFactory inputRdFactory = DataflowUtils.getWritableRecordDescriptorFactoryFromWritableClasses(
+                vertexIdClass.getName(), vertexClass.getName());
+        RecordDescriptor rdUnnestedMessage = DataflowUtils.getRecordDescriptorFromKeyValueClasses(
+                vertexIdClass.getName(), messageValueClass.getName());
+        RecordDescriptor rdInsert = DataflowUtils.getRecordDescriptorFromKeyValueClasses(vertexIdClass.getName(),
+                vertexClass.getName());
+        RecordDescriptor rdDelete = DataflowUtils.getRecordDescriptorFromWritableClasses(vertexIdClass.getName());
+
+        BTreeSearchFunctionUpdateOperatorDescriptor scanner = new BTreeSearchFunctionUpdateOperatorDescriptor(spec,
+                recordDescriptor, storageManagerInterface, treeRegistryProvider, fileSplitProvider, typeTraits,
+                comparatorFactories, JobGenUtil.getForwardScan(iteration), null, null, true, true,
+                new BTreeDataflowHelperFactory(), inputRdFactory, 5,
+                new StartComputeUpdateFunctionFactory(confFactory), preHookFactory, null, rdUnnestedMessage, rdDummy,
+                rdPartialAggregate);
+        ClusterConfig.setLocationConstraint(spec, scanner);
+
+        /**
+         * construct global sort operator
+         */
+        int[] keyFields = new int[] { 0 };
+        INormalizedKeyComputerFactory nkmFactory = JobGenUtil
+                .getINormalizedKeyComputerFactory(iteration, vertexIdClass);
+        IBinaryComparatorFactory[] sortCmpFactories = new IBinaryComparatorFactory[1];
+        sortCmpFactories[0] = JobGenUtil.getIBinaryComparatorFactory(iteration, WritableComparator.get(vertexIdClass)
+                .getClass());
+        ExternalSortOperatorDescriptor globalSort = new ExternalSortOperatorDescriptor(spec, maxFrameNumber, keyFields,
+                nkmFactory, sortCmpFactories, rdUnnestedMessage);
+        ClusterConfig.setLocationConstraint(spec, globalSort);
+
+        /**
+         * construct global group-by operator
+         */
+        RecordDescriptor rdFinal = DataflowUtils.getRecordDescriptorFromKeyValueClasses(vertexIdClass.getName(),
+                MsgList.class.getName());
+        IAggregatorDescriptorFactory aggregatorFactoryFinal = DataflowUtils.getAccumulatingAggregatorFactory(conf,
+                true, false);
+        PreclusteredGroupOperatorDescriptor globalGby = new PreclusteredGroupOperatorDescriptor(spec, keyFields,
+                sortCmpFactories, aggregatorFactoryFinal, rdFinal);
+        ClusterConfig.setLocationConstraint(spec, globalGby);
+
+        /**
+         * construct the materializing write operator
+         */
+        MaterializingWriteOperatorDescriptor materialize = new MaterializingWriteOperatorDescriptor(spec, rdFinal);
+        ClusterConfig.setLocationConstraint(spec, materialize);
+
+        RuntimeHookOperatorDescriptor postSuperStep = new RuntimeHookOperatorDescriptor(spec,
+                new PostSuperStepRuntimeHookFactory(jobId));
+        ClusterConfig.setLocationConstraint(spec, postSuperStep);
+
+        /** construct empty sink operator */
+        EmptySinkOperatorDescriptor emptySink2 = new EmptySinkOperatorDescriptor(spec);
+        ClusterConfig.setLocationConstraint(spec, emptySink2);
+
+        /**
+         * termination state write operator
+         */
+        TerminationStateWriterOperatorDescriptor terminateWriter = new TerminationStateWriterOperatorDescriptor(spec,
+                configurationFactory, jobId);
+        PartitionConstraintHelper.addPartitionCountConstraint(spec, terminateWriter, 1);
+
+        /**
+         * final aggregate write operator
+         */
+        IRecordDescriptorFactory aggRdFactory = DataflowUtils
+                .getWritableRecordDescriptorFactoryFromWritableClasses(partialAggregateValueClass.getName());
+        FinalAggregateOperatorDescriptor finalAggregator = new FinalAggregateOperatorDescriptor(spec,
+                configurationFactory, aggRdFactory, jobId);
+        PartitionConstraintHelper.addPartitionCountConstraint(spec, finalAggregator, 1);
+
+        /**
+         * add the insert operator to insert vertexes
+         */
+        int[] fieldPermutation = new int[] { 0, 1 };
+        TreeIndexInsertUpdateDeleteOperatorDescriptor insertOp = new TreeIndexInsertUpdateDeleteOperatorDescriptor(
+                spec, rdInsert, storageManagerInterface, treeRegistryProvider, fileSplitProvider, typeTraits,
+                comparatorFactories, fieldPermutation, IndexOp.INSERT, new BTreeDataflowHelperFactory(), null,
+                NoOpOperationCallbackProvider.INSTANCE);
+        ClusterConfig.setLocationConstraint(spec, insertOp);
+
+        /**
+         * add the delete operator to delete vertexes
+         */
+        TreeIndexInsertUpdateDeleteOperatorDescriptor deleteOp = new TreeIndexInsertUpdateDeleteOperatorDescriptor(
+                spec, rdDelete, storageManagerInterface, treeRegistryProvider, fileSplitProvider, typeTraits,
+                comparatorFactories, fieldPermutation, IndexOp.DELETE, new BTreeDataflowHelperFactory(), null,
+                NoOpOperationCallbackProvider.INSTANCE);
+        ClusterConfig.setLocationConstraint(spec, deleteOp);
+
+        /** construct empty sink operator */
+        EmptySinkOperatorDescriptor emptySink3 = new EmptySinkOperatorDescriptor(spec);
+        ClusterConfig.setLocationConstraint(spec, emptySink3);
+
+        /** construct empty sink operator */
+        EmptySinkOperatorDescriptor emptySink4 = new EmptySinkOperatorDescriptor(spec);
+        ClusterConfig.setLocationConstraint(spec, emptySink4);
+
+        ITuplePartitionComputerFactory hashPartitionComputerFactory = new MergePartitionComputerFactory();
+        ITuplePartitionComputerFactory partionFactory = new VertexIdPartitionComputerFactory(
+                rdUnnestedMessage.getFields()[0]);
+        /** connect all operators **/
+        spec.connect(new OneToOneConnectorDescriptor(spec), emptyTupleSource, 0, preSuperStep, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), preSuperStep, 0, scanner, 0);
+        spec.connect(new MToNPartitioningConnectorDescriptor(spec, partionFactory), scanner, 0, globalSort, 0);
+        spec.connect(new MToNPartitioningConnectorDescriptor(spec, hashPartitionComputerFactory), scanner, 1,
+                terminateWriter, 0);
+        spec.connect(new MToNPartitioningConnectorDescriptor(spec, hashPartitionComputerFactory), scanner, 2,
+                finalAggregator, 0);
+
+        /**
+         * connect the insert/delete operator
+         */
+        spec.connect(new MToNPartitioningConnectorDescriptor(spec, hashPartitionComputerFactory), scanner, 3, insertOp,
+                0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), insertOp, 0, emptySink3, 0);
+        spec.connect(new MToNPartitioningConnectorDescriptor(spec, hashPartitionComputerFactory), scanner, 4, deleteOp,
+                0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), deleteOp, 0, emptySink4, 0);
+
+        spec.connect(new OneToOneConnectorDescriptor(spec), globalSort, 0, globalGby, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), globalGby, 0, materialize, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), materialize, 0, postSuperStep, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), postSuperStep, 0, emptySink2, 0);
+
+        spec.addRoot(terminateWriter);
+        spec.addRoot(finalAggregator);
+        spec.addRoot(emptySink2);
+        spec.addRoot(emptySink3);
+        spec.addRoot(emptySink4);
+
+        spec.setConnectorPolicyAssignmentPolicy(new NonCombinerConnectorPolicyAssignmentPolicy());
+        spec.setFrameSize(frameSize);
+        return spec;
+    }
+
+    @SuppressWarnings({ "rawtypes", "unchecked" })
+    @Override
+    protected JobSpecification generateNonFirstIteration(int iteration) throws HyracksException {
+        Class<? extends WritableComparable<?>> vertexIdClass = BspUtils.getVertexIndexClass(conf);
+        Class<? extends Writable> vertexClass = BspUtils.getVertexClass(conf);
+        Class<? extends Writable> messageValueClass = BspUtils.getMessageValueClass(conf);
+        Class<? extends Writable> partialAggregateValueClass = BspUtils.getPartialAggregateValueClass(conf);
+        JobSpecification spec = new JobSpecification();
+
+        /**
+         * source aggregate
+         */
+        int[] keyFields = new int[] { 0 };
+        RecordDescriptor rdUnnestedMessage = DataflowUtils.getRecordDescriptorFromKeyValueClasses(
+                vertexIdClass.getName(), messageValueClass.getName());
+        IBinaryComparatorFactory[] comparatorFactories = new IBinaryComparatorFactory[1];
+        comparatorFactories[0] = new WritableComparingBinaryComparatorFactory(WritableComparator.get(vertexIdClass)
+                .getClass());
+        RecordDescriptor rdFinal = DataflowUtils.getRecordDescriptorFromKeyValueClasses(vertexIdClass.getName(),
+                MsgList.class.getName());
+        RecordDescriptor rdInsert = DataflowUtils.getRecordDescriptorFromKeyValueClasses(vertexIdClass.getName(),
+                vertexClass.getName());
+        RecordDescriptor rdDelete = DataflowUtils.getRecordDescriptorFromWritableClasses(vertexIdClass.getName());
+
+        /**
+         * construct empty tuple operator
+         */
+        EmptyTupleSourceOperatorDescriptor emptyTupleSource = new EmptyTupleSourceOperatorDescriptor(spec);
+        ClusterConfig.setLocationConstraint(spec, emptyTupleSource);
+
+        /**
+         * construct pre-superstep hook
+         */
+        IConfigurationFactory confFactory = new ConfigurationFactory(conf);
+        RuntimeHookOperatorDescriptor preSuperStep = new RuntimeHookOperatorDescriptor(spec,
+                new PreSuperStepRuntimeHookFactory(jobId, confFactory));
+        ClusterConfig.setLocationConstraint(spec, preSuperStep);
+
+        /**
+         * construct the materializing write operator
+         */
+        MaterializingReadOperatorDescriptor materializeRead = new MaterializingReadOperatorDescriptor(spec, rdFinal);
+        ClusterConfig.setLocationConstraint(spec, materializeRead);
+
+        /**
+         * construct index join function update operator
+         */
+        IFileSplitProvider fileSplitProvider = ClusterConfig.getFileSplitProvider(jobId, PRIMARY_INDEX);
+        ITypeTraits[] typeTraits = new ITypeTraits[2];
+        typeTraits[0] = new TypeTraits(false);
+        typeTraits[1] = new TypeTraits(false);
+        ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(new TypeAwareTupleWriterFactory(
+                typeTraits));
+        ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(new TypeAwareTupleWriterFactory(
+                typeTraits));
+        INullWriterFactory[] nullWriterFactories = new INullWriterFactory[2];
+        nullWriterFactories[0] = VertexIdNullWriterFactory.INSTANCE;
+        nullWriterFactories[1] = MsgListNullWriterFactory.INSTANCE;
+
+        RecordDescriptor rdDummy = DataflowUtils.getRecordDescriptorFromWritableClasses(VLongWritable.class.getName());
+        RecordDescriptor rdPartialAggregate = DataflowUtils
+                .getRecordDescriptorFromWritableClasses(partialAggregateValueClass.getName());
+        IConfigurationFactory configurationFactory = new ConfigurationFactory(conf);
+        IRuntimeHookFactory preHookFactory = new RuntimeHookFactory(configurationFactory);
+        IRecordDescriptorFactory inputRdFactory = DataflowUtils.getWritableRecordDescriptorFactoryFromWritableClasses(
+                vertexIdClass.getName(), MsgList.class.getName(), vertexIdClass.getName(), vertexClass.getName());
+
+        IndexNestedLoopJoinFunctionUpdateOperatorDescriptor join = new IndexNestedLoopJoinFunctionUpdateOperatorDescriptor(
+                spec, storageManagerInterface, treeRegistryProvider, fileSplitProvider, interiorFrameFactory,
+                leafFrameFactory, typeTraits, comparatorFactories, JobGenUtil.getForwardScan(iteration), keyFields,
+                keyFields, true, true, new BTreeDataflowHelperFactory(), true, nullWriterFactories, inputRdFactory, 5,
+                new ComputeUpdateFunctionFactory(confFactory), preHookFactory, null, rdUnnestedMessage, rdDummy,
+                rdPartialAggregate);
+        ClusterConfig.setLocationConstraint(spec, join);
+
+        /**
+         * construct global sort operator
+         */
+        INormalizedKeyComputerFactory nkmFactory = JobGenUtil
+                .getINormalizedKeyComputerFactory(iteration, vertexIdClass);
+        IBinaryComparatorFactory[] sortCmpFactories = new IBinaryComparatorFactory[1];
+        sortCmpFactories[0] = JobGenUtil.getIBinaryComparatorFactory(iteration, WritableComparator.get(vertexIdClass)
+                .getClass());
+        ExternalSortOperatorDescriptor globalSort = new ExternalSortOperatorDescriptor(spec, maxFrameNumber, keyFields,
+                nkmFactory, sortCmpFactories, rdUnnestedMessage);
+        ClusterConfig.setLocationConstraint(spec, globalSort);
+
+        /**
+         * construct global group-by operator
+         */
+        IAggregatorDescriptorFactory aggregatorFactoryFinal = DataflowUtils.getAccumulatingAggregatorFactory(conf,
+                true, false);
+        PreclusteredGroupOperatorDescriptor globalGby = new PreclusteredGroupOperatorDescriptor(spec, keyFields,
+                sortCmpFactories, aggregatorFactoryFinal, rdFinal);
+        ClusterConfig.setLocationConstraint(spec, globalGby);
+
+        /**
+         * construct the materializing write operator
+         */
+        MaterializingWriteOperatorDescriptor materialize = new MaterializingWriteOperatorDescriptor(spec, rdFinal);
+        ClusterConfig.setLocationConstraint(spec, materialize);
+
+        /** construct runtime hook */
+        RuntimeHookOperatorDescriptor postSuperStep = new RuntimeHookOperatorDescriptor(spec,
+                new PostSuperStepRuntimeHookFactory(jobId));
+        ClusterConfig.setLocationConstraint(spec, postSuperStep);
+
+        /** construct empty sink operator */
+        EmptySinkOperatorDescriptor emptySink = new EmptySinkOperatorDescriptor(spec);
+        ClusterConfig.setLocationConstraint(spec, emptySink);
+
+        /**
+         * termination state write operator
+         */
+        TerminationStateWriterOperatorDescriptor terminateWriter = new TerminationStateWriterOperatorDescriptor(spec,
+                configurationFactory, jobId);
+        PartitionConstraintHelper.addPartitionCountConstraint(spec, terminateWriter, 1);
+
+        /**
+         * final aggregate write operator
+         */
+        IRecordDescriptorFactory aggRdFactory = DataflowUtils
+                .getWritableRecordDescriptorFactoryFromWritableClasses(partialAggregateValueClass.getName());
+        FinalAggregateOperatorDescriptor finalAggregator = new FinalAggregateOperatorDescriptor(spec,
+                configurationFactory, aggRdFactory, jobId);
+        PartitionConstraintHelper.addPartitionCountConstraint(spec, finalAggregator, 1);
+        
+
+        int[] fieldPermutation = new int[] { 0, 1 };
+        TreeIndexInsertUpdateDeleteOperatorDescriptor insertOp = new TreeIndexInsertUpdateDeleteOperatorDescriptor(
+                spec, rdInsert, storageManagerInterface, treeRegistryProvider, fileSplitProvider, typeTraits,
+                comparatorFactories, fieldPermutation, IndexOp.INSERT, new BTreeDataflowHelperFactory(), null,
+                NoOpOperationCallbackProvider.INSTANCE);
+        ClusterConfig.setLocationConstraint(spec, insertOp);
+
+        /**
+         * add the delete operator to delete vertexes
+         */
+        TreeIndexInsertUpdateDeleteOperatorDescriptor deleteOp = new TreeIndexInsertUpdateDeleteOperatorDescriptor(
+                spec, rdDelete, storageManagerInterface, treeRegistryProvider, fileSplitProvider, typeTraits,
+                comparatorFactories, fieldPermutation, IndexOp.DELETE, new BTreeDataflowHelperFactory(), null,
+                NoOpOperationCallbackProvider.INSTANCE);
+        ClusterConfig.setLocationConstraint(spec, deleteOp);
+        
+        /** construct empty sink operator */
+        EmptySinkOperatorDescriptor emptySink3 = new EmptySinkOperatorDescriptor(spec);
+        ClusterConfig.setLocationConstraint(spec, emptySink3);
+
+        /** construct empty sink operator */
+        EmptySinkOperatorDescriptor emptySink4 = new EmptySinkOperatorDescriptor(spec);
+        ClusterConfig.setLocationConstraint(spec, emptySink4);
+
+        ITuplePartitionComputerFactory hashPartitionComputerFactory = new MergePartitionComputerFactory();
+        ITuplePartitionComputerFactory partionFactory = new VertexIdPartitionComputerFactory(
+                rdUnnestedMessage.getFields()[0]);
+
+        /** connect all operators **/
+        spec.connect(new OneToOneConnectorDescriptor(spec), emptyTupleSource, 0, preSuperStep, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), preSuperStep, 0, materializeRead, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), materializeRead, 0, join, 0);
+        spec.connect(new MToNPartitioningConnectorDescriptor(spec, partionFactory), join, 0, globalSort, 0);
+        spec.connect(new MToNPartitioningConnectorDescriptor(spec, hashPartitionComputerFactory), join, 1,
+                terminateWriter, 0);
+        spec.connect(new MToNPartitioningConnectorDescriptor(spec, hashPartitionComputerFactory), join, 2,
+                finalAggregator, 0);
+        /**
+         * connect the insert/delete operator
+         */
+        spec.connect(new MToNPartitioningConnectorDescriptor(spec, hashPartitionComputerFactory), join, 3, insertOp,
+                0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), insertOp, 0, emptySink3, 0);
+        spec.connect(new MToNPartitioningConnectorDescriptor(spec, hashPartitionComputerFactory), join, 4, deleteOp,
+                0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), deleteOp, 0, emptySink4, 0);
+        
+        spec.connect(new OneToOneConnectorDescriptor(spec), globalSort, 0, globalGby, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), globalGby, 0, materialize, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), materialize, 0, postSuperStep, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), postSuperStep, 0, emptySink, 0);
+
+        spec.addRoot(terminateWriter);
+        spec.addRoot(finalAggregator);
+        spec.addRoot(emptySink);
+
+        spec.setConnectorPolicyAssignmentPolicy(new NonCombinerConnectorPolicyAssignmentPolicy());
+        spec.setFrameSize(frameSize);
+        return spec;
+    }
+
+    @Override
+    public JobSpecification[] generateCleanup() throws HyracksException {
+        JobSpecification[] cleanups = new JobSpecification[1];
+        cleanups[0] = this.dropIndex(PRIMARY_INDEX);
+        return cleanups;
+    }
+
+}
\ No newline at end of file
diff --git a/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/jobgen/JobGenOuterJoinSort.java b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/jobgen/JobGenOuterJoinSort.java
new file mode 100644
index 0000000..cc12523
--- /dev/null
+++ b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/jobgen/JobGenOuterJoinSort.java
@@ -0,0 +1,487 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.pregelix.core.jobgen;
+
+import org.apache.hadoop.io.VLongWritable;
+import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.io.WritableComparable;
+import org.apache.hadoop.io.WritableComparator;
+
+import edu.uci.ics.hyracks.api.constraints.PartitionConstraintHelper;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputerFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.INullWriterFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ITuplePartitionComputerFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksException;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.dataflow.std.connectors.MToNPartitioningConnectorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.dataflow.std.group.IAggregatorDescriptorFactory;
+import edu.uci.ics.hyracks.dataflow.std.group.preclustered.PreclusteredGroupOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.sort.ExternalSortOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMInteriorFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMLeafFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexInsertUpdateDeleteOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackProvider;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOp;
+import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriterFactory;
+import edu.uci.ics.pregelix.api.graph.MsgList;
+import edu.uci.ics.pregelix.api.job.PregelixJob;
+import edu.uci.ics.pregelix.api.util.BspUtils;
+import edu.uci.ics.pregelix.core.data.TypeTraits;
+import edu.uci.ics.pregelix.core.hadoop.config.ConfigurationFactory;
+import edu.uci.ics.pregelix.core.jobgen.clusterconfig.ClusterConfig;
+import edu.uci.ics.pregelix.core.runtime.touchpoint.WritableComparingBinaryComparatorFactory;
+import edu.uci.ics.pregelix.core.util.DataflowUtils;
+import edu.uci.ics.pregelix.dataflow.EmptySinkOperatorDescriptor;
+import edu.uci.ics.pregelix.dataflow.EmptyTupleSourceOperatorDescriptor;
+import edu.uci.ics.pregelix.dataflow.FinalAggregateOperatorDescriptor;
+import edu.uci.ics.pregelix.dataflow.MaterializingReadOperatorDescriptor;
+import edu.uci.ics.pregelix.dataflow.MaterializingWriteOperatorDescriptor;
+import edu.uci.ics.pregelix.dataflow.TerminationStateWriterOperatorDescriptor;
+import edu.uci.ics.pregelix.dataflow.base.IConfigurationFactory;
+import edu.uci.ics.pregelix.dataflow.std.BTreeSearchFunctionUpdateOperatorDescriptor;
+import edu.uci.ics.pregelix.dataflow.std.IndexNestedLoopJoinFunctionUpdateOperatorDescriptor;
+import edu.uci.ics.pregelix.dataflow.std.RuntimeHookOperatorDescriptor;
+import edu.uci.ics.pregelix.dataflow.std.base.IRecordDescriptorFactory;
+import edu.uci.ics.pregelix.dataflow.std.base.IRuntimeHookFactory;
+import edu.uci.ics.pregelix.runtime.function.ComputeUpdateFunctionFactory;
+import edu.uci.ics.pregelix.runtime.function.StartComputeUpdateFunctionFactory;
+import edu.uci.ics.pregelix.runtime.touchpoint.MergePartitionComputerFactory;
+import edu.uci.ics.pregelix.runtime.touchpoint.MsgListNullWriterFactory;
+import edu.uci.ics.pregelix.runtime.touchpoint.PostSuperStepRuntimeHookFactory;
+import edu.uci.ics.pregelix.runtime.touchpoint.PreSuperStepRuntimeHookFactory;
+import edu.uci.ics.pregelix.runtime.touchpoint.RuntimeHookFactory;
+import edu.uci.ics.pregelix.runtime.touchpoint.VertexIdNullWriterFactory;
+import edu.uci.ics.pregelix.runtime.touchpoint.VertexIdPartitionComputerFactory;
+
+public class JobGenOuterJoinSort extends JobGen {
+
+    public JobGenOuterJoinSort(PregelixJob job) {
+        super(job);
+    }
+
+    @SuppressWarnings({ "rawtypes", "unchecked" })
+    @Override
+    protected JobSpecification generateFirstIteration(int iteration) throws HyracksException {
+        Class<? extends WritableComparable<?>> vertexIdClass = BspUtils.getVertexIndexClass(conf);
+        Class<? extends Writable> vertexClass = BspUtils.getVertexClass(conf);
+        Class<? extends Writable> messageValueClass = BspUtils.getMessageValueClass(conf);
+        Class<? extends Writable> partialAggregateValueClass = BspUtils.getPartialAggregateValueClass(conf);
+        IConfigurationFactory confFactory = new ConfigurationFactory(conf);
+        JobSpecification spec = new JobSpecification();
+
+        /**
+         * construct empty tuple operator
+         */
+        EmptyTupleSourceOperatorDescriptor emptyTupleSource = new EmptyTupleSourceOperatorDescriptor(spec);
+        ClusterConfig.setLocationConstraint(spec, emptyTupleSource);
+
+        /** construct runtime hook */
+        RuntimeHookOperatorDescriptor preSuperStep = new RuntimeHookOperatorDescriptor(spec,
+                new PreSuperStepRuntimeHookFactory(jobId, confFactory));
+        ClusterConfig.setLocationConstraint(spec, preSuperStep);
+
+        /**
+         * construct btree search function update operator
+         */
+        RecordDescriptor recordDescriptor = DataflowUtils.getRecordDescriptorFromKeyValueClasses(
+                vertexIdClass.getName(), vertexClass.getName());
+        IBinaryComparatorFactory[] comparatorFactories = new IBinaryComparatorFactory[1];
+        comparatorFactories[0] = new WritableComparingBinaryComparatorFactory(WritableComparator.get(vertexIdClass)
+                .getClass());
+        IFileSplitProvider fileSplitProvider = ClusterConfig.getFileSplitProvider(jobId, PRIMARY_INDEX);
+
+        ITypeTraits[] typeTraits = new ITypeTraits[2];
+        typeTraits[0] = new TypeTraits(false);
+        typeTraits[1] = new TypeTraits(false);
+
+        RecordDescriptor rdDummy = DataflowUtils.getRecordDescriptorFromWritableClasses(VLongWritable.class.getName());
+        RecordDescriptor rdPartialAggregate = DataflowUtils
+                .getRecordDescriptorFromWritableClasses(partialAggregateValueClass.getName());
+        IConfigurationFactory configurationFactory = new ConfigurationFactory(conf);
+        IRuntimeHookFactory preHookFactory = new RuntimeHookFactory(configurationFactory);
+        IRecordDescriptorFactory inputRdFactory = DataflowUtils.getWritableRecordDescriptorFactoryFromWritableClasses(
+                vertexIdClass.getName(), vertexClass.getName());
+        RecordDescriptor rdUnnestedMessage = DataflowUtils.getRecordDescriptorFromKeyValueClasses(
+                vertexIdClass.getName(), messageValueClass.getName());
+        RecordDescriptor rdInsert = DataflowUtils.getRecordDescriptorFromKeyValueClasses(vertexIdClass.getName(),
+                vertexClass.getName());
+        RecordDescriptor rdDelete = DataflowUtils.getRecordDescriptorFromWritableClasses(vertexIdClass.getName());
+
+        BTreeSearchFunctionUpdateOperatorDescriptor scanner = new BTreeSearchFunctionUpdateOperatorDescriptor(spec,
+                recordDescriptor, storageManagerInterface, treeRegistryProvider, fileSplitProvider, typeTraits,
+                comparatorFactories, JobGenUtil.getForwardScan(iteration), null, null, true, true,
+                new BTreeDataflowHelperFactory(), inputRdFactory, 5,
+                new StartComputeUpdateFunctionFactory(confFactory), preHookFactory, null, rdUnnestedMessage, rdDummy,
+                rdPartialAggregate);
+        ClusterConfig.setLocationConstraint(spec, scanner);
+
+        /**
+         * construct local sort operator
+         */
+        int[] keyFields = new int[] { 0 };
+        INormalizedKeyComputerFactory nkmFactory = JobGenUtil
+                .getINormalizedKeyComputerFactory(iteration, vertexIdClass);
+        IBinaryComparatorFactory[] sortCmpFactories = new IBinaryComparatorFactory[1];
+        sortCmpFactories[0] = JobGenUtil.getIBinaryComparatorFactory(iteration, WritableComparator.get(vertexIdClass)
+                .getClass());
+        ExternalSortOperatorDescriptor localSort = new ExternalSortOperatorDescriptor(spec, maxFrameNumber, keyFields,
+                nkmFactory, sortCmpFactories, rdUnnestedMessage);
+        ClusterConfig.setLocationConstraint(spec, localSort);
+
+        /**
+         * construct local pre-clustered group-by operator
+         */
+        IAggregatorDescriptorFactory aggregatorFactory = DataflowUtils.getAccumulatingAggregatorFactory(conf, false,
+                false);
+        PreclusteredGroupOperatorDescriptor localGby = new PreclusteredGroupOperatorDescriptor(spec, keyFields,
+                sortCmpFactories, aggregatorFactory, rdUnnestedMessage);
+        ClusterConfig.setLocationConstraint(spec, localGby);
+
+        /**
+         * construct global sort operator
+         */
+        ExternalSortOperatorDescriptor globalSort = new ExternalSortOperatorDescriptor(spec, maxFrameNumber, keyFields,
+                nkmFactory, sortCmpFactories, rdUnnestedMessage);
+        ClusterConfig.setLocationConstraint(spec, globalSort);
+
+        /**
+         * construct global group-by operator
+         */
+        RecordDescriptor rdFinal = DataflowUtils.getRecordDescriptorFromKeyValueClasses(vertexIdClass.getName(),
+                MsgList.class.getName());
+        IAggregatorDescriptorFactory aggregatorFactoryFinal = DataflowUtils.getAccumulatingAggregatorFactory(conf,
+                true, true);
+        PreclusteredGroupOperatorDescriptor globalGby = new PreclusteredGroupOperatorDescriptor(spec, keyFields,
+                sortCmpFactories, aggregatorFactoryFinal, rdFinal);
+        ClusterConfig.setLocationConstraint(spec, globalGby);
+
+        /**
+         * construct the materializing write operator
+         */
+        MaterializingWriteOperatorDescriptor materialize = new MaterializingWriteOperatorDescriptor(spec, rdFinal);
+        ClusterConfig.setLocationConstraint(spec, materialize);
+
+        RuntimeHookOperatorDescriptor postSuperStep = new RuntimeHookOperatorDescriptor(spec,
+                new PostSuperStepRuntimeHookFactory(jobId));
+        ClusterConfig.setLocationConstraint(spec, postSuperStep);
+
+        /** construct empty sink operator */
+        EmptySinkOperatorDescriptor emptySink2 = new EmptySinkOperatorDescriptor(spec);
+        ClusterConfig.setLocationConstraint(spec, emptySink2);
+
+        /**
+         * termination state write operator
+         */
+        TerminationStateWriterOperatorDescriptor terminateWriter = new TerminationStateWriterOperatorDescriptor(spec,
+                configurationFactory, jobId);
+        PartitionConstraintHelper.addPartitionCountConstraint(spec, terminateWriter, 1);
+        ITuplePartitionComputerFactory hashPartitionComputerFactory = new MergePartitionComputerFactory();
+
+        /**
+         * final aggregate write operator
+         */
+        IRecordDescriptorFactory aggRdFactory = DataflowUtils
+                .getWritableRecordDescriptorFactoryFromWritableClasses(partialAggregateValueClass.getName());
+        FinalAggregateOperatorDescriptor finalAggregator = new FinalAggregateOperatorDescriptor(spec,
+                configurationFactory, aggRdFactory, jobId);
+        PartitionConstraintHelper.addPartitionCountConstraint(spec, finalAggregator, 1);
+
+        /**
+         * add the insert operator to insert vertexes
+         */
+        int[] fieldPermutation = new int[] { 0, 1 };
+        TreeIndexInsertUpdateDeleteOperatorDescriptor insertOp = new TreeIndexInsertUpdateDeleteOperatorDescriptor(
+                spec, rdInsert, storageManagerInterface, treeRegistryProvider, fileSplitProvider, typeTraits,
+                comparatorFactories, fieldPermutation, IndexOp.INSERT, new BTreeDataflowHelperFactory(), null,
+                NoOpOperationCallbackProvider.INSTANCE);
+        ClusterConfig.setLocationConstraint(spec, insertOp);
+
+        /**
+         * add the delete operator to delete vertexes
+         */
+        TreeIndexInsertUpdateDeleteOperatorDescriptor deleteOp = new TreeIndexInsertUpdateDeleteOperatorDescriptor(
+                spec, rdDelete, storageManagerInterface, treeRegistryProvider, fileSplitProvider, typeTraits,
+                comparatorFactories, fieldPermutation, IndexOp.DELETE, new BTreeDataflowHelperFactory(), null,
+                NoOpOperationCallbackProvider.INSTANCE);
+        ClusterConfig.setLocationConstraint(spec, deleteOp);
+
+        /** construct empty sink operator */
+        EmptySinkOperatorDescriptor emptySink3 = new EmptySinkOperatorDescriptor(spec);
+        ClusterConfig.setLocationConstraint(spec, emptySink3);
+
+        /** construct empty sink operator */
+        EmptySinkOperatorDescriptor emptySink4 = new EmptySinkOperatorDescriptor(spec);
+        ClusterConfig.setLocationConstraint(spec, emptySink4);
+
+        ITuplePartitionComputerFactory partionFactory = new VertexIdPartitionComputerFactory(
+                rdUnnestedMessage.getFields()[0]);
+        /** connect all operators **/
+        spec.connect(new OneToOneConnectorDescriptor(spec), emptyTupleSource, 0, preSuperStep, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), preSuperStep, 0, scanner, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), scanner, 0, localSort, 0);
+        spec.connect(new MToNPartitioningConnectorDescriptor(spec, hashPartitionComputerFactory), scanner, 1,
+                terminateWriter, 0);
+        spec.connect(new MToNPartitioningConnectorDescriptor(spec, hashPartitionComputerFactory), scanner, 2,
+                finalAggregator, 0);
+        /**
+         * connect the insert/delete operator
+         */
+        spec.connect(new MToNPartitioningConnectorDescriptor(spec, hashPartitionComputerFactory), scanner, 3, insertOp,
+                0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), insertOp, 0, emptySink3, 0);
+        spec.connect(new MToNPartitioningConnectorDescriptor(spec, hashPartitionComputerFactory), scanner, 4, deleteOp,
+                0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), deleteOp, 0, emptySink4, 0);
+
+        spec.connect(new OneToOneConnectorDescriptor(spec), localSort, 0, localGby, 0);
+        spec.connect(new MToNPartitioningConnectorDescriptor(spec, partionFactory), localGby, 0, globalSort, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), globalSort, 0, globalGby, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), globalGby, 0, materialize, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), materialize, 0, postSuperStep, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), postSuperStep, 0, emptySink2, 0);
+
+        spec.addRoot(terminateWriter);
+        spec.addRoot(finalAggregator);
+        spec.addRoot(emptySink2);
+        spec.addRoot(emptySink3);
+        spec.addRoot(emptySink4);
+
+        spec.setFrameSize(frameSize);
+        return spec;
+    }
+
+    @SuppressWarnings({ "rawtypes", "unchecked" })
+    @Override
+    protected JobSpecification generateNonFirstIteration(int iteration) throws HyracksException {
+        Class<? extends WritableComparable<?>> vertexIdClass = BspUtils.getVertexIndexClass(conf);
+        Class<? extends Writable> vertexClass = BspUtils.getVertexClass(conf);
+        Class<? extends Writable> messageValueClass = BspUtils.getMessageValueClass(conf);
+        Class<? extends Writable> partialAggregateValueClass = BspUtils.getPartialAggregateValueClass(conf);
+        JobSpecification spec = new JobSpecification();
+
+        /**
+         * source aggregate
+         */
+        int[] keyFields = new int[] { 0 };
+        RecordDescriptor rdUnnestedMessage = DataflowUtils.getRecordDescriptorFromKeyValueClasses(
+                vertexIdClass.getName(), messageValueClass.getName());
+        IBinaryComparatorFactory[] comparatorFactories = new IBinaryComparatorFactory[1];
+        comparatorFactories[0] = new WritableComparingBinaryComparatorFactory(WritableComparator.get(vertexIdClass)
+                .getClass());
+        RecordDescriptor rdFinal = DataflowUtils.getRecordDescriptorFromKeyValueClasses(vertexIdClass.getName(),
+                MsgList.class.getName());
+        RecordDescriptor rdInsert = DataflowUtils.getRecordDescriptorFromKeyValueClasses(vertexIdClass.getName(),
+                vertexClass.getName());
+        RecordDescriptor rdDelete = DataflowUtils.getRecordDescriptorFromWritableClasses(vertexIdClass.getName());
+
+        /**
+         * construct empty tuple operator
+         */
+        EmptyTupleSourceOperatorDescriptor emptyTupleSource = new EmptyTupleSourceOperatorDescriptor(spec);
+        ClusterConfig.setLocationConstraint(spec, emptyTupleSource);
+
+        /**
+         * construct pre-superstep hook
+         */
+        IConfigurationFactory confFactory = new ConfigurationFactory(conf);
+        RuntimeHookOperatorDescriptor preSuperStep = new RuntimeHookOperatorDescriptor(spec,
+                new PreSuperStepRuntimeHookFactory(jobId, confFactory));
+        ClusterConfig.setLocationConstraint(spec, preSuperStep);
+
+        /**
+         * construct the materializing write operator
+         */
+        MaterializingReadOperatorDescriptor materializeRead = new MaterializingReadOperatorDescriptor(spec, rdFinal);
+        ClusterConfig.setLocationConstraint(spec, materializeRead);
+
+        /**
+         * construct index join function update operator
+         */
+        IFileSplitProvider fileSplitProvider = ClusterConfig.getFileSplitProvider(jobId, PRIMARY_INDEX);
+        ITypeTraits[] typeTraits = new ITypeTraits[2];
+        typeTraits[0] = new TypeTraits(false);
+        typeTraits[1] = new TypeTraits(false);
+        ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(new TypeAwareTupleWriterFactory(
+                typeTraits));
+        ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(new TypeAwareTupleWriterFactory(
+                typeTraits));
+        INullWriterFactory[] nullWriterFactories = new INullWriterFactory[2];
+        nullWriterFactories[0] = VertexIdNullWriterFactory.INSTANCE;
+        nullWriterFactories[1] = MsgListNullWriterFactory.INSTANCE;
+
+        RecordDescriptor rdDummy = DataflowUtils.getRecordDescriptorFromWritableClasses(VLongWritable.class.getName());
+        RecordDescriptor rdPartialAggregate = DataflowUtils
+                .getRecordDescriptorFromWritableClasses(partialAggregateValueClass.getName());
+        IConfigurationFactory configurationFactory = new ConfigurationFactory(conf);
+        IRuntimeHookFactory preHookFactory = new RuntimeHookFactory(configurationFactory);
+        IRecordDescriptorFactory inputRdFactory = DataflowUtils.getWritableRecordDescriptorFactoryFromWritableClasses(
+                vertexIdClass.getName(), MsgList.class.getName(), vertexIdClass.getName(), vertexClass.getName());
+
+        IndexNestedLoopJoinFunctionUpdateOperatorDescriptor join = new IndexNestedLoopJoinFunctionUpdateOperatorDescriptor(
+                spec, storageManagerInterface, treeRegistryProvider, fileSplitProvider, interiorFrameFactory,
+                leafFrameFactory, typeTraits, comparatorFactories, JobGenUtil.getForwardScan(iteration), keyFields,
+                keyFields, true, true, new BTreeDataflowHelperFactory(), true, nullWriterFactories, inputRdFactory, 5,
+                new ComputeUpdateFunctionFactory(confFactory), preHookFactory, null, rdUnnestedMessage, rdDummy,
+                rdPartialAggregate);
+        ClusterConfig.setLocationConstraint(spec, join);
+
+        /**
+         * construct local sort operator
+         */
+        INormalizedKeyComputerFactory nkmFactory = JobGenUtil
+                .getINormalizedKeyComputerFactory(iteration, vertexIdClass);
+        IBinaryComparatorFactory[] sortCmpFactories = new IBinaryComparatorFactory[1];
+        sortCmpFactories[0] = JobGenUtil.getIBinaryComparatorFactory(iteration, WritableComparator.get(vertexIdClass)
+                .getClass());
+        ExternalSortOperatorDescriptor localSort = new ExternalSortOperatorDescriptor(spec, maxFrameNumber, keyFields,
+                nkmFactory, sortCmpFactories, rdUnnestedMessage);
+        ClusterConfig.setLocationConstraint(spec, localSort);
+
+        /**
+         * construct local pre-clustered group-by operator
+         */
+        IAggregatorDescriptorFactory aggregatorFactory = DataflowUtils.getAccumulatingAggregatorFactory(conf, false,
+                false);
+        PreclusteredGroupOperatorDescriptor localGby = new PreclusteredGroupOperatorDescriptor(spec, keyFields,
+                sortCmpFactories, aggregatorFactory, rdUnnestedMessage);
+        ClusterConfig.setLocationConstraint(spec, localGby);
+
+        /**
+         * construct global sort operator
+         */
+        ExternalSortOperatorDescriptor globalSort = new ExternalSortOperatorDescriptor(spec, maxFrameNumber, keyFields,
+                nkmFactory, sortCmpFactories, rdUnnestedMessage);
+        ClusterConfig.setLocationConstraint(spec, globalSort);
+
+        /**
+         * construct global group-by operator
+         */
+        IAggregatorDescriptorFactory aggregatorFactoryFinal = DataflowUtils.getAccumulatingAggregatorFactory(conf,
+                true, true);
+        PreclusteredGroupOperatorDescriptor globalGby = new PreclusteredGroupOperatorDescriptor(spec, keyFields,
+                sortCmpFactories, aggregatorFactoryFinal, rdFinal);
+        ClusterConfig.setLocationConstraint(spec, globalGby);
+
+        /**
+         * construct the materializing write operator
+         */
+        MaterializingWriteOperatorDescriptor materialize = new MaterializingWriteOperatorDescriptor(spec, rdFinal);
+        ClusterConfig.setLocationConstraint(spec, materialize);
+
+        /** construct runtime hook */
+        RuntimeHookOperatorDescriptor postSuperStep = new RuntimeHookOperatorDescriptor(spec,
+                new PostSuperStepRuntimeHookFactory(jobId));
+        ClusterConfig.setLocationConstraint(spec, postSuperStep);
+
+        /** construct empty sink operator */
+        EmptySinkOperatorDescriptor emptySink = new EmptySinkOperatorDescriptor(spec);
+        ClusterConfig.setLocationConstraint(spec, emptySink);
+
+        /**
+         * termination state write operator
+         */
+        TerminationStateWriterOperatorDescriptor terminateWriter = new TerminationStateWriterOperatorDescriptor(spec,
+                configurationFactory, jobId);
+        PartitionConstraintHelper.addPartitionCountConstraint(spec, terminateWriter, 1);
+
+        /**
+         * final aggregate write operator
+         */
+        IRecordDescriptorFactory aggRdFactory = DataflowUtils
+                .getWritableRecordDescriptorFactoryFromWritableClasses(partialAggregateValueClass.getName());
+        FinalAggregateOperatorDescriptor finalAggregator = new FinalAggregateOperatorDescriptor(spec,
+                configurationFactory, aggRdFactory, jobId);
+        PartitionConstraintHelper.addPartitionCountConstraint(spec, finalAggregator, 1);
+
+        /**
+         * add the insert operator to insert vertexes
+         */
+        int[] fieldPermutation = new int[] { 0, 1 };
+        TreeIndexInsertUpdateDeleteOperatorDescriptor insertOp = new TreeIndexInsertUpdateDeleteOperatorDescriptor(
+                spec, rdInsert, storageManagerInterface, treeRegistryProvider, fileSplitProvider, typeTraits,
+                comparatorFactories, fieldPermutation, IndexOp.INSERT, new BTreeDataflowHelperFactory(), null,
+                NoOpOperationCallbackProvider.INSTANCE);
+        ClusterConfig.setLocationConstraint(spec, insertOp);
+
+        /**
+         * add the delete operator to delete vertexes
+         */
+        TreeIndexInsertUpdateDeleteOperatorDescriptor deleteOp = new TreeIndexInsertUpdateDeleteOperatorDescriptor(
+                spec, rdDelete, storageManagerInterface, treeRegistryProvider, fileSplitProvider, typeTraits,
+                comparatorFactories, fieldPermutation, IndexOp.DELETE, new BTreeDataflowHelperFactory(), null,
+                NoOpOperationCallbackProvider.INSTANCE);
+        ClusterConfig.setLocationConstraint(spec, deleteOp);
+
+        /** construct empty sink operator */
+        EmptySinkOperatorDescriptor emptySink3 = new EmptySinkOperatorDescriptor(spec);
+        ClusterConfig.setLocationConstraint(spec, emptySink3);
+
+        /** construct empty sink operator */
+        EmptySinkOperatorDescriptor emptySink4 = new EmptySinkOperatorDescriptor(spec);
+        ClusterConfig.setLocationConstraint(spec, emptySink4);
+
+        ITuplePartitionComputerFactory hashPartitionComputerFactory = new MergePartitionComputerFactory();
+        ITuplePartitionComputerFactory partionFactory = new VertexIdPartitionComputerFactory(
+                rdUnnestedMessage.getFields()[0]);
+
+        /** connect all operators **/
+        spec.connect(new OneToOneConnectorDescriptor(spec), emptyTupleSource, 0, preSuperStep, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), preSuperStep, 0, materializeRead, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), materializeRead, 0, join, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), join, 0, localSort, 0);
+        spec.connect(new MToNPartitioningConnectorDescriptor(spec, hashPartitionComputerFactory), join, 1,
+                terminateWriter, 0);
+        spec.connect(new MToNPartitioningConnectorDescriptor(spec, hashPartitionComputerFactory), join, 2,
+                finalAggregator, 0);
+        /**
+         * connect the insert/delete operator
+         */
+        spec.connect(new MToNPartitioningConnectorDescriptor(spec, hashPartitionComputerFactory), join, 3, insertOp, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), insertOp, 0, emptySink3, 0);
+        spec.connect(new MToNPartitioningConnectorDescriptor(spec, hashPartitionComputerFactory), join, 4, deleteOp, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), deleteOp, 0, emptySink4, 0);
+
+        spec.connect(new OneToOneConnectorDescriptor(spec), localSort, 0, localGby, 0);
+        spec.connect(new MToNPartitioningConnectorDescriptor(spec, partionFactory), localGby, 0, globalSort, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), globalSort, 0, globalGby, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), globalGby, 0, materialize, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), materialize, 0, postSuperStep, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), postSuperStep, 0, emptySink, 0);
+
+        spec.addRoot(terminateWriter);
+        spec.addRoot(finalAggregator);
+        spec.addRoot(emptySink);
+        spec.addRoot(emptySink3);
+        spec.addRoot(emptySink4);
+
+        spec.setFrameSize(frameSize);
+        return spec;
+    }
+
+    @Override
+    public JobSpecification[] generateCleanup() throws HyracksException {
+        JobSpecification[] cleanups = new JobSpecification[1];
+        cleanups[0] = this.dropIndex(PRIMARY_INDEX);
+        return cleanups;
+    }
+
+}
\ No newline at end of file
diff --git a/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/jobgen/JobGenUtil.java b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/jobgen/JobGenUtil.java
new file mode 100644
index 0000000..63b7c6d
--- /dev/null
+++ b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/jobgen/JobGenUtil.java
@@ -0,0 +1,61 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.pregelix.core.jobgen;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputerFactory;
+import edu.uci.ics.pregelix.core.jobgen.provider.NormalizedKeyComputerFactoryProvider;
+import edu.uci.ics.pregelix.core.runtime.touchpoint.WritableComparingBinaryComparatorFactory;
+
+@SuppressWarnings({ "rawtypes", "unchecked" })
+public class JobGenUtil {
+
+    /**
+     * get normalized key factory for an iteration, to sort messages iteration
+     * 1: asc order iteration 2: desc order
+     * 
+     * @param iteration
+     * @param keyClass
+     * @return
+     */
+    public static INormalizedKeyComputerFactory getINormalizedKeyComputerFactory(int iteration, Class keyClass) {
+        return NormalizedKeyComputerFactoryProvider.INSTANCE.getAscINormalizedKeyComputerFactory(keyClass);
+    }
+
+    /**
+     * get comparator for an iteration, to sort messages iteration 1: asc order
+     * iteration 0: desc order
+     * 
+     * @param iteration
+     * @param keyClass
+     * @return
+     */
+    public static IBinaryComparatorFactory getIBinaryComparatorFactory(int iteration, Class keyClass) {
+        return new WritableComparingBinaryComparatorFactory(keyClass);
+    }
+
+    /**
+     * get the B-tree scan order for an iteration iteration 1: desc order,
+     * backward scan iteration 2: asc order, forward scan
+     * 
+     * @param iteration
+     * @return
+     */
+    public static boolean getForwardScan(int iteration) {
+        return true;
+    }
+
+}
diff --git a/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/jobgen/clusterconfig/ClusterConfig.java b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/jobgen/clusterconfig/ClusterConfig.java
new file mode 100644
index 0000000..d26e637
--- /dev/null
+++ b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/jobgen/clusterconfig/ClusterConfig.java
@@ -0,0 +1,229 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.pregelix.core.jobgen.clusterconfig;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.net.InetAddress;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+import java.util.Random;
+
+import org.apache.hadoop.mapreduce.InputSplit;
+
+import edu.uci.ics.hyracks.api.client.HyracksConnection;
+import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
+import edu.uci.ics.hyracks.api.client.NodeControllerInfo;
+import edu.uci.ics.hyracks.api.constraints.PartitionConstraintHelper;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.exceptions.HyracksException;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.dataflow.std.file.ConstantFileSplitProvider;
+import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.hdfs2.scheduler.Scheduler;
+
+public class ClusterConfig {
+
+    private static String[] NCs;
+    private static String storePropertiesPath = "conf/stores.properties";
+    private static String clusterPropertiesPath = "conf/cluster.properties";
+    private static Properties clusterProperties = new Properties();
+    private static Map<String, List<String>> ipToNcMapping;
+    private static String[] stores;
+    private static Scheduler hdfsScheduler;
+
+    /**
+     * let tests set config path to be whatever
+     * 
+     * @param propertiesPath
+     *            stores properties file path
+     */
+    public static void setStorePath(String storePropertiesPath) throws HyracksException {
+        ClusterConfig.storePropertiesPath = storePropertiesPath;
+    }
+
+    public static void setClusterPropertiesPath(String clusterPropertiesPath) throws HyracksException {
+        ClusterConfig.clusterPropertiesPath = clusterPropertiesPath;
+    }
+
+    /**
+     * get NC names running on one IP address
+     * 
+     * @param ipAddress
+     * @return
+     * @throws HyracksDataException
+     */
+    public static List<String> getNCNames(String ipAddress) throws HyracksException {
+        return ipToNcMapping.get(ipAddress);
+    }
+
+    /**
+     * get file split provider
+     * 
+     * @param jobId
+     * @return
+     * @throws HyracksDataException
+     */
+    public static IFileSplitProvider getFileSplitProvider(String jobId, String indexName) throws HyracksException {
+        FileSplit[] fileSplits = new FileSplit[stores.length * NCs.length];
+        int i = 0;
+        for (String nc : NCs) {
+            for (String st : stores) {
+                FileSplit split = new FileSplit(nc, st + File.separator + nc + "-data" + File.separator + jobId
+                        + File.separator + indexName);
+                fileSplits[i++] = split;
+            }
+        }
+        return new ConstantFileSplitProvider(fileSplits);
+    }
+
+    private static void loadStores() throws HyracksException {
+        Properties properties = new Properties();
+        try {
+            properties.load(new FileInputStream(storePropertiesPath));
+        } catch (IOException e) {
+            throw new HyracksDataException(e);
+        }
+        String store = properties.getProperty("store");
+        stores = store.split(",");
+    }
+
+    private static void loadClusterProperties() throws HyracksException {
+        try {
+            clusterProperties.load(new FileInputStream(clusterPropertiesPath));
+        } catch (IOException e) {
+            throw new HyracksDataException(e);
+        }
+    }
+
+    public static int getFrameSize() {
+        return Integer.parseInt(clusterProperties.getProperty("FRAME_SIZE"));
+    }
+
+    /**
+     * set location constraint
+     * 
+     * @param spec
+     * @param operator
+     * @throws HyracksDataException
+     */
+    public static void setLocationConstraint(JobSpecification spec, IOperatorDescriptor operator,
+            List<InputSplit> splits) throws HyracksException {
+        int count = splits.size();
+        String[] locations = new String[splits.size()];
+        Random random = new Random(System.currentTimeMillis());
+        for (int i = 0; i < splits.size(); i++) {
+            try {
+                String[] loc = splits.get(i).getLocations();
+                Collections.shuffle(Arrays.asList(loc), random);
+                if (loc.length > 0) {
+                    InetAddress[] allIps = InetAddress.getAllByName(loc[0]);
+                    for (InetAddress ip : allIps) {
+                        if (ipToNcMapping.get(ip.getHostAddress()) != null) {
+                            List<String> ncs = ipToNcMapping.get(ip.getHostAddress());
+                            int pos = random.nextInt(ncs.size());
+                            locations[i] = ncs.get(pos);
+                        } else {
+                            int pos = random.nextInt(NCs.length);
+                            locations[i] = NCs[pos];
+                        }
+                    }
+                } else {
+                    int pos = random.nextInt(NCs.length);
+                    locations[i] = NCs[pos];
+                }
+            } catch (IOException e) {
+                throw new HyracksException(e);
+            } catch (InterruptedException e) {
+                throw new HyracksException(e);
+            }
+        }
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, operator, locations);
+        PartitionConstraintHelper.addPartitionCountConstraint(spec, operator, count);
+    }
+
+    /**
+     * set location constraint
+     * 
+     * @param spec
+     * @param operator
+     * @throws HyracksDataException
+     */
+    public static void setLocationConstraint(JobSpecification spec, IOperatorDescriptor operator)
+            throws HyracksException {
+        int count = 0;
+        String[] locations = new String[NCs.length * stores.length];
+        for (String nc : NCs) {
+            for (int i = 0; i < stores.length; i++) {
+                locations[count] = nc;
+                count++;
+            }
+        }
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, operator, locations);
+    }
+
+    /**
+     * set location constraint
+     * 
+     * @param spec
+     * @param operator
+     * @throws HyracksDataException
+     */
+    public static void setCountConstraint(JobSpecification spec, IOperatorDescriptor operator) throws HyracksException {
+        int count = NCs.length * stores.length;
+        PartitionConstraintHelper.addPartitionCountConstraint(spec, operator, count);
+    }
+
+    public static void loadClusterConfig(String ipAddress, int port) throws HyracksException {
+        try {
+            IHyracksClientConnection hcc = new HyracksConnection(ipAddress, port);
+            Map<String, NodeControllerInfo> ncNameToNcInfos = hcc.getNodeControllerInfos();
+            NCs = new String[ncNameToNcInfos.size()];
+            ipToNcMapping = new HashMap<String, List<String>>();
+            int i = 0;
+            for (Map.Entry<String, NodeControllerInfo> entry : ncNameToNcInfos.entrySet()) {
+                String ipAddr = InetAddress.getByAddress(entry.getValue().getNetworkAddress().getIpAddress())
+                        .getHostAddress();
+                List<String> matchedNCs = ipToNcMapping.get(ipAddr);
+                if (matchedNCs == null) {
+                    matchedNCs = new ArrayList<String>();
+                    ipToNcMapping.put(ipAddr, matchedNCs);
+                }
+                matchedNCs.add(entry.getKey());
+                NCs[i] = entry.getKey();
+                i++;
+            }
+
+            hdfsScheduler = new Scheduler(ipAddress, port);
+        } catch (Exception e) {
+            throw new IllegalStateException(e);
+        }
+
+        loadClusterProperties();
+        loadStores();
+    }
+
+    public static Scheduler getHdfsScheduler() {
+        return hdfsScheduler;
+    }
+}
diff --git a/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/jobgen/provider/NormalizedKeyComputerFactoryProvider.java b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/jobgen/provider/NormalizedKeyComputerFactoryProvider.java
new file mode 100644
index 0000000..0735593
--- /dev/null
+++ b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/jobgen/provider/NormalizedKeyComputerFactoryProvider.java
@@ -0,0 +1,48 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.pregelix.core.jobgen.provider;
+
+import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputerFactory;
+import edu.uci.ics.pregelix.core.base.INormalizedKeyComputerFactoryProvider;
+import edu.uci.ics.pregelix.runtime.touchpoint.VLongAscNormalizedKeyComputerFactory;
+import edu.uci.ics.pregelix.runtime.touchpoint.VLongDescNormalizedKeyComputerFactory;
+
+public class NormalizedKeyComputerFactoryProvider implements INormalizedKeyComputerFactoryProvider {
+
+    public static INormalizedKeyComputerFactoryProvider INSTANCE = new NormalizedKeyComputerFactoryProvider();
+
+    private NormalizedKeyComputerFactoryProvider() {
+
+    }
+
+    @SuppressWarnings("rawtypes")
+    @Override
+    public INormalizedKeyComputerFactory getAscINormalizedKeyComputerFactory(Class keyClass) {
+        if (keyClass.getName().indexOf("VLongWritable") > 0)
+            return new VLongAscNormalizedKeyComputerFactory();
+        else
+            return null;
+    }
+
+    @SuppressWarnings("rawtypes")
+    @Override
+    public INormalizedKeyComputerFactory getDescINormalizedKeyComputerFactory(Class keyClass) {
+        if (keyClass.getName().indexOf("VLongWritable") > 0)
+            return new VLongDescNormalizedKeyComputerFactory();
+        else
+            return null;
+    }
+}
diff --git a/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/runtime/touchpoint/WritableComparingBinaryComparatorFactory.java b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/runtime/touchpoint/WritableComparingBinaryComparatorFactory.java
new file mode 100644
index 0000000..3b2539c
--- /dev/null
+++ b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/runtime/touchpoint/WritableComparingBinaryComparatorFactory.java
@@ -0,0 +1,43 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.pregelix.core.runtime.touchpoint;
+
+import org.apache.hadoop.io.RawComparator;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.dataflow.common.util.ReflectionUtils;
+
+public class WritableComparingBinaryComparatorFactory<T> implements IBinaryComparatorFactory {
+    private static final long serialVersionUID = 1L;
+
+    private Class<? extends RawComparator<T>> cmpClass;
+
+    public WritableComparingBinaryComparatorFactory(Class<? extends RawComparator<T>> cmpClass) {
+        this.cmpClass = cmpClass;
+    }
+
+    @Override
+    public IBinaryComparator createBinaryComparator() {
+        final RawComparator<T> instance = ReflectionUtils.createInstance(cmpClass);
+        return new IBinaryComparator() {
+            @Override
+            public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
+                return instance.compare(b1, s1, l1, b2, s2, l2);
+            }
+        };
+    }
+}
\ No newline at end of file
diff --git a/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/runtime/touchpoint/WritableRecordDescriptorFactory.java b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/runtime/touchpoint/WritableRecordDescriptorFactory.java
new file mode 100644
index 0000000..d1d927d
--- /dev/null
+++ b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/runtime/touchpoint/WritableRecordDescriptorFactory.java
@@ -0,0 +1,40 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.pregelix.core.runtime.touchpoint;
+
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.exceptions.HyracksException;
+import edu.uci.ics.pregelix.core.util.DataflowUtils;
+import edu.uci.ics.pregelix.dataflow.std.base.IRecordDescriptorFactory;
+
+public class WritableRecordDescriptorFactory implements IRecordDescriptorFactory {
+    private static final long serialVersionUID = 1L;
+    private String[] fieldClasses;
+
+    public WritableRecordDescriptorFactory(String... fieldClasses) {
+        this.fieldClasses = fieldClasses;
+    }
+
+    @Override
+    public RecordDescriptor createRecordDescriptor() throws HyracksDataException {
+        try {
+            return DataflowUtils.getRecordDescriptorFromWritableClasses(fieldClasses);
+        } catch (HyracksException e) {
+            throw new HyracksDataException(e);
+        }
+    }
+
+}
diff --git a/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/util/BufferSerDeUtils.java b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/util/BufferSerDeUtils.java
new file mode 100644
index 0000000..3f15197
--- /dev/null
+++ b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/util/BufferSerDeUtils.java
@@ -0,0 +1,81 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.pregelix.core.util;
+
+public class BufferSerDeUtils {
+
+    public static double getDouble(byte[] bytes, int offset) {
+        return Double.longBitsToDouble(getLong(bytes, offset));
+    }
+
+    public static float getFloat(byte[] bytes, int offset) {
+        return Float.intBitsToFloat(getInt(bytes, offset));
+    }
+
+    public static boolean getBoolean(byte[] bytes, int offset) {
+        if (bytes[offset] == 0)
+            return false;
+        else
+            return true;
+    }
+
+    public static int getInt(byte[] bytes, int offset) {
+        return ((bytes[offset] & 0xff) << 24) + ((bytes[offset + 1] & 0xff) << 16) + ((bytes[offset + 2] & 0xff) << 8)
+                + ((bytes[offset + 3] & 0xff) << 0);
+    }
+
+    public static long getLong(byte[] bytes, int offset) {
+        return (((long) (bytes[offset] & 0xff)) << 56) + (((long) (bytes[offset + 1] & 0xff)) << 48)
+                + (((long) (bytes[offset + 2] & 0xff)) << 40) + (((long) (bytes[offset + 3] & 0xff)) << 32)
+                + (((long) (bytes[offset + 4] & 0xff)) << 24) + (((long) (bytes[offset + 5] & 0xff)) << 16)
+                + (((long) (bytes[offset + 6] & 0xff)) << 8) + (((long) (bytes[offset + 7] & 0xff)) << 0);
+    }
+
+    public static void writeBoolean(boolean value, byte[] bytes, int offset) {
+        if (value)
+            bytes[offset] = (byte) 1;
+        else
+            bytes[offset] = (byte) 0;
+    }
+
+    public static void writeInt(int value, byte[] bytes, int offset) {
+        bytes[offset++] = (byte) (value >> 24);
+        bytes[offset++] = (byte) (value >> 16);
+        bytes[offset++] = (byte) (value >> 8);
+        bytes[offset++] = (byte) (value);
+    }
+
+    public static void writeLong(long value, byte[] bytes, int offset) {
+        bytes[offset++] = (byte) (value >> 56);
+        bytes[offset++] = (byte) (value >> 48);
+        bytes[offset++] = (byte) (value >> 40);
+        bytes[offset++] = (byte) (value >> 32);
+        bytes[offset++] = (byte) (value >> 24);
+        bytes[offset++] = (byte) (value >> 16);
+        bytes[offset++] = (byte) (value >> 8);
+        bytes[offset++] = (byte) (value);
+    }
+
+    public static void writeDouble(double value, byte[] bytes, int offset) {
+        long lValue = Double.doubleToLongBits(value);
+        writeLong(lValue, bytes, offset);
+    }
+
+    public static void writeFloat(float value, byte[] bytes, int offset) {
+        int iValue = Float.floatToIntBits(value);
+        writeInt(iValue, bytes, offset);
+    }
+
+}
diff --git a/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/util/DataBalancer.java b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/util/DataBalancer.java
new file mode 100644
index 0000000..9877e97
--- /dev/null
+++ b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/util/DataBalancer.java
@@ -0,0 +1,82 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.pregelix.core.util;
+
+import java.io.IOException;
+import java.util.Iterator;
+
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.io.compress.BZip2Codec;
+import org.apache.hadoop.io.compress.GzipCodec;
+import org.apache.hadoop.mapred.FileInputFormat;
+import org.apache.hadoop.mapred.FileOutputFormat;
+import org.apache.hadoop.mapred.JobClient;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.MapReduceBase;
+import org.apache.hadoop.mapred.Mapper;
+import org.apache.hadoop.mapred.OutputCollector;
+import org.apache.hadoop.mapred.Reducer;
+import org.apache.hadoop.mapred.Reporter;
+import org.apache.hadoop.mapred.TextInputFormat;
+
+@SuppressWarnings("deprecation")
+public class DataBalancer {
+
+    public static class MapRecordOnly extends MapReduceBase implements Mapper<LongWritable, Text, LongWritable, Text> {
+
+        public void map(LongWritable id, Text inputValue, OutputCollector<LongWritable, Text> output, Reporter reporter)
+                throws IOException {
+            output.collect(id, inputValue);
+        }
+    }
+
+    public static class ReduceRecordOnly extends MapReduceBase implements
+            Reducer<LongWritable, Text, NullWritable, Text> {
+
+        NullWritable key = NullWritable.get();
+
+        public void reduce(LongWritable inputKey, Iterator<Text> inputValue,
+                OutputCollector<NullWritable, Text> output, Reporter reporter) throws IOException {
+            while (inputValue.hasNext())
+                output.collect(key, inputValue.next());
+        }
+    }
+
+    public static void main(String[] args) throws IOException {
+        JobConf job = new JobConf(DataBalancer.class);
+
+        job.setJobName(DataBalancer.class.getSimpleName());
+        job.setMapperClass(MapRecordOnly.class);
+        job.setReducerClass(ReduceRecordOnly.class);
+        job.setMapOutputKeyClass(LongWritable.class);
+        job.setMapOutputValueClass(Text.class);
+
+        job.setInputFormat(TextInputFormat.class);
+        FileInputFormat.setInputPaths(job, args[0]);
+        FileOutputFormat.setOutputPath(job, new Path(args[1]));
+        job.setNumReduceTasks(Integer.parseInt(args[2]));
+
+        if (args.length > 3) {
+            if (args[3].startsWith("bzip"))
+                FileOutputFormat.setOutputCompressorClass(job, BZip2Codec.class);
+            if (args[3].startsWith("gz"))
+                FileOutputFormat.setOutputCompressorClass(job, GzipCodec.class);
+        }
+        JobClient.runJob(job);
+    }
+}
\ No newline at end of file
diff --git a/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/util/DataGenerator.java b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/util/DataGenerator.java
new file mode 100644
index 0000000..466bef3
--- /dev/null
+++ b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/util/DataGenerator.java
@@ -0,0 +1,227 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.pregelix.core.util;
+
+import java.io.DataInputStream;
+import java.io.IOException;
+import java.util.Iterator;
+
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.io.VLongWritable;
+import org.apache.hadoop.io.compress.BZip2Codec;
+import org.apache.hadoop.io.compress.GzipCodec;
+import org.apache.hadoop.mapred.FileInputFormat;
+import org.apache.hadoop.mapred.FileOutputFormat;
+import org.apache.hadoop.mapred.JobClient;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.MapReduceBase;
+import org.apache.hadoop.mapred.Mapper;
+import org.apache.hadoop.mapred.OutputCollector;
+import org.apache.hadoop.mapred.Reducer;
+import org.apache.hadoop.mapred.Reporter;
+import org.apache.hadoop.mapred.TextInputFormat;
+
+/**
+ * generate graph data from a base dataset
+ */
+@SuppressWarnings("deprecation")
+public class DataGenerator {
+
+    public static class MapMaxId extends MapReduceBase implements
+            Mapper<LongWritable, Text, NullWritable, VLongWritable> {
+        private NullWritable key = NullWritable.get();
+        private VLongWritable value = new VLongWritable();
+
+        @Override
+        public void map(LongWritable id, Text inputValue, OutputCollector<NullWritable, VLongWritable> output,
+                Reporter reporter) throws IOException {
+            String[] vertices = inputValue.toString().split(" ");
+            long max = Long.parseLong(vertices[0]);
+            for (int i = 1; i < vertices.length; i++) {
+                long vid = Long.parseLong(vertices[i]);
+                if (vid > max)
+                    max = vid;
+            }
+            value.set(max);
+            output.collect(key, value);
+        }
+    }
+
+    public static class ReduceMaxId extends MapReduceBase implements
+            Reducer<NullWritable, VLongWritable, NullWritable, Text> {
+
+        private NullWritable key = NullWritable.get();
+        private long max = Long.MIN_VALUE;
+        private OutputCollector<NullWritable, Text> output;
+
+        @Override
+        public void reduce(NullWritable inputKey, Iterator<VLongWritable> inputValue,
+                OutputCollector<NullWritable, Text> output, Reporter reporter) throws IOException {
+            while (inputValue.hasNext()) {
+                long vid = inputValue.next().get();
+                if (vid > max)
+                    max = vid;
+            }
+            if (this.output == null)
+                this.output = output;
+
+        }
+
+        @Override
+        public void close() throws IOException {
+            output.collect(key, new Text(new VLongWritable(max).toString()));
+        }
+    }
+
+    public static class CombineMaxId extends MapReduceBase implements
+            Reducer<NullWritable, VLongWritable, NullWritable, VLongWritable> {
+
+        private NullWritable key = NullWritable.get();
+        private long max = Long.MIN_VALUE;
+        private OutputCollector<NullWritable, VLongWritable> output;
+
+        @Override
+        public void reduce(NullWritable inputKey, Iterator<VLongWritable> inputValue,
+                OutputCollector<NullWritable, VLongWritable> output, Reporter reporter) throws IOException {
+            while (inputValue.hasNext()) {
+                long vid = inputValue.next().get();
+                if (vid > max)
+                    max = vid;
+            }
+            if (this.output == null)
+                this.output = output;
+        }
+
+        public void close() throws IOException {
+            output.collect(key, new VLongWritable(max));
+        }
+    }
+
+    public static class MapRecordGen extends MapReduceBase implements Mapper<LongWritable, Text, LongWritable, Text> {
+
+        private long maxId = 0;
+        private Text text = new Text();
+        private int x = 2;
+
+        @Override
+        public void configure(JobConf conf) {
+            try {
+                x = conf.getInt("hyracks.x", 2);
+                String fileName = conf.get("hyracks.maxid.file");
+                FileSystem dfs = FileSystem.get(conf);
+                dfs.delete(new Path(fileName + "/_SUCCESS"), true);
+                dfs.delete(new Path(fileName + "/_logs"), true);
+                FileStatus[] files = dfs.listStatus(new Path(fileName));
+
+                for (int i = 0; i < files.length; i++) {
+                    if (!files[i].isDir()) {
+                        DataInputStream input = dfs.open(files[i].getPath());
+                        String id = input.readLine();
+                        maxId = Long.parseLong(id) + 1;
+                        input.close();
+                    }
+                }
+            } catch (IOException e) {
+                throw new IllegalStateException(e);
+            }
+        }
+
+        @Override
+        public void map(LongWritable id, Text inputValue, OutputCollector<LongWritable, Text> output, Reporter reporter)
+                throws IOException {
+            String[] vertices = inputValue.toString().split(" ");
+
+            /**
+             * generate data x times
+             */
+            for (int k = 0; k < x; k++) {
+                long max = maxId * k;
+                StringBuilder sb = new StringBuilder();
+                for (int i = 0; i < vertices.length - 1; i++) {
+                    long vid = Long.parseLong(vertices[i]) + max;
+                    sb.append(vid);
+                    sb.append(" ");
+                }
+                long vid = Long.parseLong(vertices[vertices.length - 1]) + max;
+                sb.append(vid);
+                text.set(sb.toString().getBytes());
+                output.collect(id, text);
+            }
+        }
+    }
+
+    public static class ReduceRecordGen extends MapReduceBase implements
+            Reducer<LongWritable, Text, NullWritable, Text> {
+
+        private NullWritable key = NullWritable.get();
+
+        public void reduce(LongWritable inputKey, Iterator<Text> inputValue,
+                OutputCollector<NullWritable, Text> output, Reporter reporter) throws IOException {
+            while (inputValue.hasNext())
+                output.collect(key, inputValue.next());
+        }
+    }
+
+    public static void main(String[] args) throws IOException {
+
+        JobConf job = new JobConf(DataGenerator.class);
+        FileSystem dfs = FileSystem.get(job);
+        String maxFile = "/maxtemp";
+        dfs.delete(new Path(maxFile), true);
+
+        job.setJobName(DataGenerator.class.getSimpleName() + "max ID");
+        job.setMapperClass(MapMaxId.class);
+        job.setCombinerClass(CombineMaxId.class);
+        job.setReducerClass(ReduceMaxId.class);
+        job.setMapOutputKeyClass(NullWritable.class);
+        job.setMapOutputValueClass(VLongWritable.class);
+
+        job.setInputFormat(TextInputFormat.class);
+        FileInputFormat.setInputPaths(job, args[0]);
+        FileOutputFormat.setOutputPath(job, new Path(maxFile));
+        job.setNumReduceTasks(1);
+        JobClient.runJob(job);
+
+        job = new JobConf(DataGenerator.class);
+        job.set("hyracks.maxid.file", maxFile);
+        job.setInt("hyracks.x", Integer.parseInt(args[2]));
+        dfs.delete(new Path(args[1]), true);
+
+        job.setJobName(DataGenerator.class.getSimpleName());
+        job.setMapperClass(MapRecordGen.class);
+        job.setReducerClass(ReduceRecordGen.class);
+        job.setMapOutputKeyClass(LongWritable.class);
+        job.setMapOutputValueClass(Text.class);
+
+        job.setInputFormat(TextInputFormat.class);
+        FileInputFormat.setInputPaths(job, args[0]);
+        FileOutputFormat.setOutputPath(job, new Path(args[1]));
+        job.setNumReduceTasks(Integer.parseInt(args[3]));
+
+        if (args.length > 4) {
+            if (args[4].startsWith("bzip"))
+                FileOutputFormat.setOutputCompressorClass(job, BZip2Codec.class);
+            if (args[4].startsWith("gz"))
+                FileOutputFormat.setOutputCompressorClass(job, GzipCodec.class);
+        }
+        JobClient.runJob(job);
+    }
+}
\ No newline at end of file
diff --git a/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/util/DataflowUtils.java b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/util/DataflowUtils.java
new file mode 100644
index 0000000..bcf3ffc
--- /dev/null
+++ b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/util/DataflowUtils.java
@@ -0,0 +1,82 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.pregelix.core.util;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.io.Writable;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksException;
+import edu.uci.ics.hyracks.dataflow.std.group.IAggregatorDescriptorFactory;
+import edu.uci.ics.pregelix.core.hadoop.config.ConfigurationFactory;
+import edu.uci.ics.pregelix.core.runtime.touchpoint.WritableRecordDescriptorFactory;
+import edu.uci.ics.pregelix.dataflow.std.base.IAggregateFunctionFactory;
+import edu.uci.ics.pregelix.dataflow.std.base.IRecordDescriptorFactory;
+import edu.uci.ics.pregelix.runtime.simpleagg.AccumulatingAggregatorFactory;
+import edu.uci.ics.pregelix.runtime.simpleagg.AggregationFunctionFactory;
+
+public class DataflowUtils {
+
+    public enum AggregationMode {
+        PARTIAL,
+        FINAL
+    }
+
+    @SuppressWarnings("unchecked")
+    public static RecordDescriptor getRecordDescriptorFromKeyValueClasses(String className1, String className2)
+            throws HyracksException {
+        RecordDescriptor recordDescriptor = null;
+        try {
+            recordDescriptor = DatatypeHelper.createKeyValueRecordDescriptor(
+                    (Class<? extends Writable>) Class.forName(className1),
+                    (Class<? extends Writable>) Class.forName(className2));
+        } catch (ClassNotFoundException cnfe) {
+            throw new HyracksException(cnfe);
+        }
+        return recordDescriptor;
+    }
+
+    @SuppressWarnings({ "unchecked", "rawtypes" })
+    public static RecordDescriptor getRecordDescriptorFromWritableClasses(String... classNames) throws HyracksException {
+        RecordDescriptor recordDescriptor = null;
+        ISerializerDeserializer[] serdes = new ISerializerDeserializer[classNames.length];
+        try {
+            int i = 0;
+            for (String className : classNames)
+                serdes[i++] = DatatypeHelper.createSerializerDeserializer((Class<? extends Writable>) Class
+                        .forName(className));
+        } catch (ClassNotFoundException cnfe) {
+            throw new HyracksException(cnfe);
+        }
+        recordDescriptor = new RecordDescriptor(serdes);
+        return recordDescriptor;
+    }
+
+    public static IRecordDescriptorFactory getWritableRecordDescriptorFactoryFromWritableClasses(String... classNames)
+            throws HyracksException {
+        IRecordDescriptorFactory rdFactory = new WritableRecordDescriptorFactory(classNames);
+        return rdFactory;
+    }
+
+    public static IAggregatorDescriptorFactory getAccumulatingAggregatorFactory(Configuration conf, boolean isFinal,
+            boolean partialAggAsInput) {
+        IAggregateFunctionFactory aggFuncFactory = new AggregationFunctionFactory(new ConfigurationFactory(conf),
+                isFinal, partialAggAsInput);
+        IAggregatorDescriptorFactory aggregatorFactory = new AccumulatingAggregatorFactory(
+                new IAggregateFunctionFactory[] { aggFuncFactory });
+        return aggregatorFactory;
+    }
+}
diff --git a/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/util/DatatypeHelper.java b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/util/DatatypeHelper.java
new file mode 100644
index 0000000..ee319c6
--- /dev/null
+++ b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/util/DatatypeHelper.java
@@ -0,0 +1,125 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.pregelix.core.util;
+
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Map.Entry;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.mapred.JobConf;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+
+@SuppressWarnings("deprecation")
+public class DatatypeHelper {
+    private static final class WritableSerializerDeserializer<T extends Writable> implements ISerializerDeserializer<T> {
+        private static final long serialVersionUID = 1L;
+
+        private Class<T> clazz;
+        private T object;
+
+        private WritableSerializerDeserializer(Class<T> clazz) {
+            this.clazz = clazz;
+        }
+
+        @SuppressWarnings("unchecked")
+        private T createInstance() throws HyracksDataException {
+            // TODO remove "if", create a new WritableInstanceOperations class
+            // that deals with Writables that don't have public constructors
+            if (NullWritable.class.equals(clazz)) {
+                return (T) NullWritable.get();
+            }
+            try {
+                return clazz.newInstance();
+            } catch (InstantiationException e) {
+                throw new HyracksDataException(e);
+            } catch (IllegalAccessException e) {
+                throw new HyracksDataException(e);
+            }
+        }
+
+        @Override
+        public T deserialize(DataInput in) throws HyracksDataException {
+            if (object == null) {
+                object = createInstance();
+            }
+            try {
+                object.readFields(in);
+            } catch (IOException e) {
+                e.printStackTrace();
+                throw new HyracksDataException(e);
+            }
+            return object;
+        }
+
+        @Override
+        public void serialize(T instance, DataOutput out) throws HyracksDataException {
+            try {
+                instance.write(out);
+            } catch (IOException e) {
+                throw new HyracksDataException(e);
+            }
+        }
+    }
+
+    @SuppressWarnings({ "rawtypes", "unchecked" })
+    public static ISerializerDeserializer<? extends Writable> createSerializerDeserializer(
+            Class<? extends Writable> fClass) {
+        return new WritableSerializerDeserializer(fClass);
+    }
+
+    public static RecordDescriptor createKeyValueRecordDescriptor(Class<? extends Writable> keyClass,
+            Class<? extends Writable> valueClass) {
+        @SuppressWarnings("rawtypes")
+        ISerializerDeserializer[] fields = new ISerializerDeserializer[2];
+        fields[0] = createSerializerDeserializer(keyClass);
+        fields[1] = createSerializerDeserializer(valueClass);
+        return new RecordDescriptor(fields);
+    }
+
+    public static RecordDescriptor createOneFieldRecordDescriptor(Class<? extends Writable> fieldClass) {
+        @SuppressWarnings("rawtypes")
+        ISerializerDeserializer[] fields = new ISerializerDeserializer[1];
+        fields[0] = createSerializerDeserializer(fieldClass);
+        return new RecordDescriptor(fields);
+    }
+
+    public static JobConf map2JobConf(Map<String, String> jobConfMap) {
+        JobConf jobConf;
+        synchronized (Configuration.class) {
+            jobConf = new JobConf();
+            for (Entry<String, String> entry : jobConfMap.entrySet()) {
+                jobConf.set(entry.getKey(), entry.getValue());
+            }
+        }
+        return jobConf;
+    }
+
+    public static Map<String, String> jobConf2Map(JobConf jobConf) {
+        Map<String, String> jobConfMap = new HashMap<String, String>();
+        for (Entry<String, String> entry : jobConf) {
+            jobConfMap.put(entry.getKey(), entry.getValue());
+        }
+        return jobConfMap;
+    }
+}
\ No newline at end of file
diff --git a/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/util/PregelixHyracksIntegrationUtil.java b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/util/PregelixHyracksIntegrationUtil.java
new file mode 100644
index 0000000..2a2e2bf
--- /dev/null
+++ b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/util/PregelixHyracksIntegrationUtil.java
@@ -0,0 +1,105 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.pregelix.core.util;
+
+import java.util.EnumSet;
+
+import edu.uci.ics.hyracks.api.client.HyracksConnection;
+import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
+import edu.uci.ics.hyracks.api.job.JobFlag;
+import edu.uci.ics.hyracks.api.job.JobId;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.control.cc.ClusterControllerService;
+import edu.uci.ics.hyracks.control.common.controllers.CCConfig;
+import edu.uci.ics.hyracks.control.common.controllers.NCConfig;
+import edu.uci.ics.hyracks.control.nc.NodeControllerService;
+import edu.uci.ics.pregelix.core.jobgen.clusterconfig.ClusterConfig;
+
+public class PregelixHyracksIntegrationUtil {
+
+    public static final String NC1_ID = "nc1";
+    public static final String NC2_ID = "nc2";
+
+    public static final int DEFAULT_HYRACKS_CC_PORT = 1099;
+    public static final int TEST_HYRACKS_CC_PORT = 1099;
+    public static final int TEST_HYRACKS_CC_CLIENT_PORT = 2099;
+    public static final String CC_HOST = "localhost";
+
+    public static final int FRAME_SIZE = 65536;
+
+    private static ClusterControllerService cc;
+    private static NodeControllerService nc1;
+    private static NodeControllerService nc2;
+    private static IHyracksClientConnection hcc;
+
+    public static void init() throws Exception {
+        CCConfig ccConfig = new CCConfig();
+        ccConfig.clientNetIpAddress = CC_HOST;
+        ccConfig.clusterNetIpAddress = CC_HOST;
+        ccConfig.clusterNetPort = TEST_HYRACKS_CC_PORT;
+        ccConfig.clientNetPort = TEST_HYRACKS_CC_CLIENT_PORT;
+        ccConfig.defaultMaxJobAttempts = 0;
+        ccConfig.jobHistorySize = 0;
+        ccConfig.profileDumpPeriod = -1;
+
+        // cluster controller
+        cc = new ClusterControllerService(ccConfig);
+        cc.start();
+
+        // two node controllers
+        NCConfig ncConfig1 = new NCConfig();
+        ncConfig1.ccHost = "localhost";
+        ncConfig1.clusterNetIPAddress = "localhost";
+        ncConfig1.ccPort = TEST_HYRACKS_CC_PORT;
+        ncConfig1.dataIPAddress = "127.0.0.1";
+        ncConfig1.nodeId = NC1_ID;
+        nc1 = new NodeControllerService(ncConfig1);
+        nc1.start();
+
+        NCConfig ncConfig2 = new NCConfig();
+        ncConfig2.ccHost = "localhost";
+        ncConfig2.clusterNetIPAddress = "localhost";
+        ncConfig2.ccPort = TEST_HYRACKS_CC_PORT;
+        ncConfig2.dataIPAddress = "127.0.0.1";
+        ncConfig2.nodeId = NC2_ID;
+        nc2 = new NodeControllerService(ncConfig2);
+        nc2.start();
+
+        // hyracks connection
+        hcc = new HyracksConnection(CC_HOST, TEST_HYRACKS_CC_CLIENT_PORT);
+        ClusterConfig.loadClusterConfig(CC_HOST, TEST_HYRACKS_CC_CLIENT_PORT);
+    }
+
+    public static void destroyApp(String hyracksAppName) throws Exception {
+        hcc.destroyApplication(hyracksAppName);
+    }
+
+    public static void createApp(String hyracksAppName) throws Exception {
+        hcc.createApplication(hyracksAppName, null);
+    }
+
+    public static void deinit() throws Exception {
+        nc2.stop();
+        nc1.stop();
+        cc.stop();
+    }
+
+    public static void runJob(JobSpecification spec, String appName) throws Exception {
+        spec.setFrameSize(FRAME_SIZE);
+        JobId jobId = hcc.startJob(appName, spec, EnumSet.of(JobFlag.PROFILE_RUNTIME));
+        hcc.waitForCompletion(jobId);
+    }
+
+}
diff --git a/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/util/Utilities.java b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/util/Utilities.java
new file mode 100644
index 0000000..3566bf9
--- /dev/null
+++ b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/util/Utilities.java
@@ -0,0 +1,84 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.pregelix.core.util;
+
+import java.io.BufferedReader;
+import java.io.DataInputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.util.HashSet;
+import java.util.Properties;
+import java.util.Set;
+import java.util.zip.ZipEntry;
+import java.util.zip.ZipOutputStream;
+
+public class Utilities {
+
+    public static Properties getProperties(String filePath, char delimiter) {
+        Properties properties = new Properties();
+        try {
+            FileInputStream fins = new FileInputStream(new File(filePath));
+            DataInputStream dins = new DataInputStream(fins);
+            BufferedReader br = new BufferedReader(new InputStreamReader(dins));
+            String strLine;
+            while ((strLine = br.readLine()) != null) {
+                int split = strLine.indexOf(delimiter);
+                if (split >= 0) {
+                    properties.put((strLine.substring(0, split)).trim(), strLine.substring(split + 1, strLine.length())
+                            .trim());
+                }
+            }
+            br.close();
+        } catch (IOException ioe) {
+            ioe.printStackTrace();
+        }
+        return properties;
+    }
+
+    public static File getHyracksArchive(String applicationName, Set<String> libJars) {
+        String target = applicationName + ".zip";
+        // Create a buffer for reading the files
+        byte[] buf = new byte[32768];
+        Set<String> fileNames = new HashSet<String>();
+        try {
+            ZipOutputStream out = new ZipOutputStream(new FileOutputStream(target));
+            for (String libJar : libJars) {
+                String fileName = libJar.substring(libJar.lastIndexOf("/") + 1);
+                if (fileNames.contains(fileName)) {
+                    continue;
+                }
+                FileInputStream in = new FileInputStream(libJar);
+                out.putNextEntry(new ZipEntry(fileName));
+                int len;
+                while ((len = in.read(buf)) > 0) {
+                    out.write(buf, 0, len);
+                }
+                out.closeEntry();
+                in.close();
+                fileNames.add(fileName);
+            }
+            out.close();
+        } catch (IOException e) {
+            e.printStackTrace();
+        }
+        File har = new File(target);
+        har.deleteOnExit();
+        return har;
+    }
+}
diff --git a/pregelix/pregelix-core/src/main/resources/conf/cluster.properties b/pregelix/pregelix-core/src/main/resources/conf/cluster.properties
new file mode 100644
index 0000000..2d2401a
--- /dev/null
+++ b/pregelix/pregelix-core/src/main/resources/conf/cluster.properties
@@ -0,0 +1,37 @@
+#The CC port for Hyracks clients
+CC_CLIENTPORT=3099
+
+#The CC port for Hyracks cluster management
+CC_CLUSTERPORT=1099
+
+#The directory of hyracks binaries
+HYRACKS_HOME=../../../../hyracks
+
+#The tmp directory for cc to install jars
+CCTMP_DIR=/tmp/t1
+
+#The tmp directory for nc to install jars
+NCTMP_DIR=/tmp/t2
+
+#The directory to put cc logs
+CCLOGS_DIR=$CCTMP_DIR/logs
+
+#The directory to put nc logs
+NCLOGS_DIR=$NCTMP_DIR/logs
+
+#Comma separated I/O directories for the spilling of external sort
+IO_DIRS="/tmp/t3,/tmp/t4"
+
+#The JAVA_HOME
+JAVA_HOME=$JAVA_HOME
+
+#The frame size of the internal dataflow engine
+FRAME_SIZE=65536
+
+#CC JAVA_OPTS
+CCJAVA_OPTS="-Xdebug -Xrunjdwp:transport=dt_socket,address=7001,server=y,suspend=n -Xmx1g -Djava.util.logging.config.file=logging.properties"
+# Yourkit option: -agentpath:/grid/0/dev/vborkar/tools/yjp-10.0.4/bin/linux-x86-64/libyjpagent.so=port=20001"
+
+#NC JAVA_OPTS
+NCJAVA_OPTS="-Xdebug -Xrunjdwp:transport=dt_socket,address=7002,server=y,suspend=n -Xmx1g -Djava.util.logging.config.file=logging.properties"
+
diff --git a/pregelix/pregelix-core/src/main/resources/conf/debugnc.properties b/pregelix/pregelix-core/src/main/resources/conf/debugnc.properties
new file mode 100755
index 0000000..27afa26
--- /dev/null
+++ b/pregelix/pregelix-core/src/main/resources/conf/debugnc.properties
@@ -0,0 +1,12 @@
+#The tmp directory for nc to install jars
+NCTMP_DIR2=/tmp/t-1
+
+#The directory to put nc logs
+NCLOGS_DIR2=$NCTMP_DIR/logs
+
+#Comma separated I/O directories for the spilling of external sort
+IO_DIRS2="/tmp/t-2,/tmp/t-3"
+
+#NC JAVA_OPTS
+NCJAVA_OPTS2="-Xdebug -Xrunjdwp:transport=dt_socket,address=7003,server=y,suspend=n -Xmx1g -Djava.util.logging.config.file=logging.properties"
+
diff --git a/pregelix/pregelix-core/src/main/resources/conf/master b/pregelix/pregelix-core/src/main/resources/conf/master
new file mode 100644
index 0000000..2fbb50c
--- /dev/null
+++ b/pregelix/pregelix-core/src/main/resources/conf/master
@@ -0,0 +1 @@
+localhost
diff --git a/pregelix/pregelix-core/src/main/resources/conf/slaves b/pregelix/pregelix-core/src/main/resources/conf/slaves
new file mode 100644
index 0000000..2fbb50c
--- /dev/null
+++ b/pregelix/pregelix-core/src/main/resources/conf/slaves
@@ -0,0 +1 @@
+localhost
diff --git a/pregelix/pregelix-core/src/main/resources/conf/stores.properties b/pregelix/pregelix-core/src/main/resources/conf/stores.properties
new file mode 100644
index 0000000..d1a4e10
--- /dev/null
+++ b/pregelix/pregelix-core/src/main/resources/conf/stores.properties
@@ -0,0 +1,2 @@
+#Comma separated directories for storing the partitioned graph on each machine
+store=/tmp/teststore1,/tmp/teststore2
\ No newline at end of file
diff --git a/pregelix/pregelix-core/src/main/resources/hyracks-deployment.properties b/pregelix/pregelix-core/src/main/resources/hyracks-deployment.properties
new file mode 100644
index 0000000..d5d7cd0
--- /dev/null
+++ b/pregelix/pregelix-core/src/main/resources/hyracks-deployment.properties
@@ -0,0 +1 @@
+nc.bootstrap.class=edu.uci.ics.pregelix.runtime.bootstrap.NCBootstrapImpl
\ No newline at end of file
diff --git a/pregelix/pregelix-core/src/main/resources/scripts/getip.sh b/pregelix/pregelix-core/src/main/resources/scripts/getip.sh
new file mode 100755
index 0000000..e0cdf73
--- /dev/null
+++ b/pregelix/pregelix-core/src/main/resources/scripts/getip.sh
@@ -0,0 +1,21 @@
+#get the OS
+OS_NAME=`uname -a|awk '{print $1}'`
+LINUX_OS='Linux'
+
+if [ $OS_NAME = $LINUX_OS ];
+then
+        #Get IP Address
+        IPADDR=`/sbin/ifconfig eth0 | grep "inet " | awk '{print $2}' | cut -f 2 -d ':'`
+	if [ "$IPADDR" = "" ]
+        then
+		IPADDR=`/sbin/ifconfig lo | grep "inet " | awk '{print $2}' | cut -f 2 -d ':'`
+        fi 
+else
+        IPADDR=`/sbin/ifconfig en1 | grep "inet " | awk '{print $2}' | cut -f 2 -d ':'`
+	if [ "$IPADDR" = "" ]
+        then
+                IPADDR=`/sbin/ifconfig lo0 | grep "inet " | awk '{print $2}' | cut -f 2 -d ':'`
+        fi
+
+fi
+echo $IPADDR
diff --git a/pregelix/pregelix-core/src/main/resources/scripts/pregelix b/pregelix/pregelix-core/src/main/resources/scripts/pregelix
new file mode 100644
index 0000000..6997078
--- /dev/null
+++ b/pregelix/pregelix-core/src/main/resources/scripts/pregelix
@@ -0,0 +1,113 @@
+#!/bin/sh
+# ----------------------------------------------------------------------------
+#  Copyright 2001-2006 The Apache Software Foundation.
+#
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+# ----------------------------------------------------------------------------
+#
+#   Copyright (c) 2001-2006 The Apache Software Foundation.  All rights
+#   reserved.
+
+
+# resolve links - $0 may be a softlink
+PRG="$0"
+
+while [ -h "$PRG" ]; do
+  ls=`ls -ld "$PRG"`
+  link=`expr "$ls" : '.*-> \(.*\)$'`
+  if expr "$link" : '/.*' > /dev/null; then
+    PRG="$link"
+  else
+    PRG=`dirname "$PRG"`/"$link"
+  fi
+done
+
+PRGDIR=`dirname "$PRG"`
+BASEDIR=`cd "$PRGDIR/.." >/dev/null; pwd`
+
+
+
+# OS specific support.  $var _must_ be set to either true or false.
+cygwin=false;
+darwin=false;
+case "`uname`" in
+  CYGWIN*) cygwin=true ;;
+  Darwin*) darwin=true
+           if [ -z "$JAVA_VERSION" ] ; then
+             JAVA_VERSION="CurrentJDK"
+           else
+             echo "Using Java version: $JAVA_VERSION"
+           fi
+           if [ -z "$JAVA_HOME" ] ; then
+             JAVA_HOME=/System/Library/Frameworks/JavaVM.framework/Versions/${JAVA_VERSION}/Home
+           fi
+           ;;
+esac
+
+if [ -z "$JAVA_HOME" ] ; then
+  if [ -r /etc/gentoo-release ] ; then
+    JAVA_HOME=`java-config --jre-home`
+  fi
+fi
+
+# For Cygwin, ensure paths are in UNIX format before anything is touched
+if $cygwin ; then
+  [ -n "$JAVA_HOME" ] && JAVA_HOME=`cygpath --unix "$JAVA_HOME"`
+  [ -n "$CLASSPATH" ] && CLASSPATH=`cygpath --path --unix "$CLASSPATH"`
+fi
+
+# If a specific java binary isn't specified search for the standard 'java' binary
+if [ -z "$JAVACMD" ] ; then
+  if [ -n "$JAVA_HOME"  ] ; then
+    if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
+      # IBM's JDK on AIX uses strange locations for the executables
+      JAVACMD="$JAVA_HOME/jre/sh/java"
+    else
+      JAVACMD="$JAVA_HOME/bin/java"
+    fi
+  else
+    JAVACMD=`which java`
+  fi
+fi
+
+if [ ! -x "$JAVACMD" ] ; then
+  echo "Error: JAVA_HOME is not defined correctly." 1>&2
+  echo "  We cannot execute $JAVACMD" 1>&2
+  exit 1
+fi
+
+if [ -z "$REPO" ]
+then
+  REPO="$BASEDIR"/lib
+fi
+
+CLASSPATH=$CLASSPATH_PREFIX:"$HADOOP_HOME"/conf:"$BASEDIR"/etc:$1
+
+# For Cygwin, switch paths to Windows format before running java
+if $cygwin; then
+  [ -n "$CLASSPATH" ] && CLASSPATH=`cygpath --path --windows "$CLASSPATH"`
+  [ -n "$JAVA_HOME" ] && JAVA_HOME=`cygpath --path --windows "$JAVA_HOME"`
+  [ -n "$HOME" ] && HOME=`cygpath --path --windows "$HOME"`
+  [ -n "$BASEDIR" ] && BASEDIR=`cygpath --path --windows "$BASEDIR"`
+  [ -n "$REPO" ] && REPO=`cygpath --path --windows "$REPO"`
+fi
+
+exec "$JAVACMD" $JAVA_OPTS  \
+  -classpath "$CLASSPATH" \
+  -Dapp.name="pregelix" \
+  -Dapp.pid="$$" \
+  -Dapp.repo="$REPO" \
+  -Dapp.home="$BASEDIR" \
+  -Dbasedir="$BASEDIR" \
+  org.apache.hadoop.util.RunJar \
+  "$@"
diff --git a/pregelix/pregelix-core/src/main/resources/scripts/pregelix.bat b/pregelix/pregelix-core/src/main/resources/scripts/pregelix.bat
new file mode 100644
index 0000000..536e3c8
--- /dev/null
+++ b/pregelix/pregelix-core/src/main/resources/scripts/pregelix.bat
@@ -0,0 +1,110 @@
+@REM ----------------------------------------------------------------------------

+@REM  Copyright 2001-2006 The Apache Software Foundation.

+@REM

+@REM  Licensed under the Apache License, Version 2.0 (the "License");

+@REM  you may not use this file except in compliance with the License.

+@REM  You may obtain a copy of the License at

+@REM

+@REM       http://www.apache.org/licenses/LICENSE-2.0

+@REM

+@REM  Unless required by applicable law or agreed to in writing, software

+@REM  distributed under the License is distributed on an "AS IS" BASIS,

+@REM  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+@REM  See the License for the specific language governing permissions and

+@REM  limitations under the License.

+@REM ----------------------------------------------------------------------------

+@REM

+@REM   Copyright (c) 2001-2006 The Apache Software Foundation.  All rights

+@REM   reserved.

+

+@echo off

+

+set ERROR_CODE=0

+

+:init

+@REM Decide how to startup depending on the version of windows

+

+@REM -- Win98ME

+if NOT "%OS%"=="Windows_NT" goto Win9xArg

+

+@REM set local scope for the variables with windows NT shell

+if "%OS%"=="Windows_NT" @setlocal

+

+@REM -- 4NT shell

+if "%eval[2+2]" == "4" goto 4NTArgs

+

+@REM -- Regular WinNT shell

+set CMD_LINE_ARGS=%*

+goto WinNTGetScriptDir

+

+@REM The 4NT Shell from jp software

+:4NTArgs

+set CMD_LINE_ARGS=%$

+goto WinNTGetScriptDir

+

+:Win9xArg

+@REM Slurp the command line arguments.  This loop allows for an unlimited number

+@REM of arguments (up to the command line limit, anyway).

+set CMD_LINE_ARGS=

+:Win9xApp

+if %1a==a goto Win9xGetScriptDir

+set CMD_LINE_ARGS=%CMD_LINE_ARGS% %1

+shift

+goto Win9xApp

+

+:Win9xGetScriptDir

+set SAVEDIR=%CD%

+%0\

+cd %0\..\.. 

+set BASEDIR=%CD%

+cd %SAVEDIR%

+set SAVE_DIR=

+goto repoSetup

+

+:WinNTGetScriptDir

+set BASEDIR=%~dp0\..

+

+:repoSetup

+

+

+if "%JAVACMD%"=="" set JAVACMD=java

+

+if "%REPO%"=="" set REPO=%BASEDIR%\lib

+

+cp $BASEDIR"\..\a-hadoop-patch.jar "$REPO"\

+

+set CLASSPATH="%BASEDIR%"\etc;"%REPO%"\a-hadoop-patch.jar;"%REPO%"\pregelix-api-0.0.1-SNAPSHOT.jar;"%REPO%"\hyracks-dataflow-common-0.2.2-SNAPSHOT.jar;"%REPO%"\hyracks-api-0.2.2-SNAPSHOT.jar;"%REPO%"\json-20090211.jar;"%REPO%"\httpclient-4.1-alpha2.jar;"%REPO%"\httpcore-4.1-beta1.jar;"%REPO%"\commons-logging-1.1.1.jar;"%REPO%"\commons-codec-1.3.jar;"%REPO%"\args4j-2.0.12.jar;"%REPO%"\hyracks-ipc-0.2.2-SNAPSHOT.jar;"%REPO%"\commons-lang3-3.1.jar;"%REPO%"\hyracks-data-std-0.2.2-SNAPSHOT.jar;"%REPO%"\hadoop-core-0.20.2.jar;"%REPO%"\commons-cli-1.2.jar;"%REPO%"\xmlenc-0.52.jar;"%REPO%"\commons-httpclient-3.0.1.jar;"%REPO%"\commons-net-1.4.1.jar;"%REPO%"\oro-2.0.8.jar;"%REPO%"\jetty-6.1.14.jar;"%REPO%"\jetty-util-6.1.14.jar;"%REPO%"\servlet-api-2.5-6.1.14.jar;"%REPO%"\jasper-runtime-5.5.12.jar;"%REPO%"\jasper-compiler-5.5.12.jar;"%REPO%"\jsp-api-2.1-6.1.14.jar;"%REPO%"\jsp-2.1-6.1.14.jar;"%REPO%"\core-3.1.1.jar;"%REPO%"\ant-1.6.5.jar;"%REPO%"\commons-el-1.0.jar;"%REPO%"\jets3t-0.7.1.jar;"%REPO%"\kfs-0.3.jar;"%REPO%"\hsqldb-1.8.0.10.jar;"%REPO%"\pregelix-dataflow-std-0.0.1-SNAPSHOT.jar;"%REPO%"\pregelix-dataflow-std-base-0.0.1-SNAPSHOT.jar;"%REPO%"\hyracks-dataflow-std-0.2.2-SNAPSHOT.jar;"%REPO%"\hyracks-dataflow-hadoop-0.2.2-SNAPSHOT.jar;"%REPO%"\dcache-client-0.0.1.jar;"%REPO%"\jetty-client-8.0.0.M0.jar;"%REPO%"\jetty-http-8.0.0.RC0.jar;"%REPO%"\jetty-io-8.0.0.RC0.jar;"%REPO%"\jetty-util-8.0.0.RC0.jar;"%REPO%"\hyracks-storage-am-common-0.2.2-SNAPSHOT.jar;"%REPO%"\hyracks-storage-common-0.2.2-SNAPSHOT.jar;"%REPO%"\hyracks-storage-am-btree-0.2.2-SNAPSHOT.jar;"%REPO%"\btreehelper-0.2.2-SNAPSHOT.jar;"%REPO%"\hyracks-control-cc-0.2.2-SNAPSHOT.jar;"%REPO%"\hyracks-control-common-0.2.2-SNAPSHOT.jar;"%REPO%"\commons-io-1.3.1.jar;"%REPO%"\jetty-server-8.0.0.RC0.jar;"%REPO%"\servlet-api-3.0.20100224.jar;"%REPO%"\jetty-continuation-8.0.0.RC0.jar;"%REPO%"\jetty-webapp-8.0.0.RC0.jar;"%REPO%"\jetty-xml-8.0.0.RC0.jar;"%REPO%"\jetty-servlet-8.0.0.RC0.jar;"%REPO%"\jetty-security-8.0.0.RC0.jar;"%REPO%"\wicket-core-1.5.2.jar;"%REPO%"\wicket-util-1.5.2.jar;"%REPO%"\slf4j-api-1.6.1.jar;"%REPO%"\wicket-request-1.5.2.jar;"%REPO%"\slf4j-jcl-1.6.3.jar;"%REPO%"\hyracks-control-nc-0.2.2-SNAPSHOT.jar;"%REPO%"\hyracks-net-0.2.2-SNAPSHOT.jar;"%REPO%"\hyracks-hadoop-compat-0.2.2-SNAPSHOT.jar;"%REPO%"\pregelix-dataflow-0.0.1-SNAPSHOT.jar;"%REPO%"\pregelix-runtime-0.0.1-SNAPSHOT.jar;"%REPO%"\hadoop-test-0.20.2.jar;"%REPO%"\ftplet-api-1.0.0.jar;"%REPO%"\mina-core-2.0.0-M5.jar;"%REPO%"\ftpserver-core-1.0.0.jar;"%REPO%"\ftpserver-deprecated-1.0.0-M2.jar;"%REPO%"\javax.servlet-api-3.0.1.jar;"%REPO%"\pregelix-core-0.0.1-SNAPSHOT.jar

+goto endInit

+

+@REM Reaching here means variables are defined and arguments have been captured

+:endInit

+

+%JAVACMD% %JAVA_OPTS%  -classpath %CLASSPATH_PREFIX%;%CLASSPATH% -Dapp.name="pregelix" -Dapp.repo="%REPO%" -Dapp.home="%BASEDIR%" -Dbasedir="%BASEDIR%" org.apache.hadoop.util.RunJar %CMD_LINE_ARGS%

+if ERRORLEVEL 1 goto error

+goto end

+

+:error

+if "%OS%"=="Windows_NT" @endlocal

+set ERROR_CODE=%ERRORLEVEL%

+

+:end

+@REM set local scope for the variables with windows NT shell

+if "%OS%"=="Windows_NT" goto endNT

+

+@REM For old DOS remove the set variables from ENV - we assume they were not set

+@REM before we started - at least we don't leave any baggage around

+set CMD_LINE_ARGS=

+goto postExec

+

+:endNT

+@REM If error code is set to 1 then the endlocal was done already in :error.

+if %ERROR_CODE% EQU 0 @endlocal

+

+

+:postExec

+

+if "%FORCE_EXIT_ON_ERROR%" == "on" (

+  if %ERROR_CODE% NEQ 0 exit %ERROR_CODE%

+)

+

+exit /B %ERROR_CODE%
\ No newline at end of file
diff --git a/pregelix/pregelix-core/src/main/resources/scripts/startAllNCs.sh b/pregelix/pregelix-core/src/main/resources/scripts/startAllNCs.sh
new file mode 100644
index 0000000..629bd90
--- /dev/null
+++ b/pregelix/pregelix-core/src/main/resources/scripts/startAllNCs.sh
@@ -0,0 +1,6 @@
+PREGELIX_PATH=`pwd`
+
+for i in `cat conf/slaves`
+do
+   ssh $i "cd ${PREGELIX_PATH}; bin/startnc.sh"
+done
diff --git a/pregelix/pregelix-core/src/main/resources/scripts/startCluster.sh b/pregelix/pregelix-core/src/main/resources/scripts/startCluster.sh
new file mode 100644
index 0000000..a0c2063
--- /dev/null
+++ b/pregelix/pregelix-core/src/main/resources/scripts/startCluster.sh
@@ -0,0 +1,3 @@
+bin/startcc.sh
+sleep 5
+bin/startAllNCs.sh
diff --git a/pregelix/pregelix-core/src/main/resources/scripts/startDebugNc.sh b/pregelix/pregelix-core/src/main/resources/scripts/startDebugNc.sh
new file mode 100755
index 0000000..fe6cf27
--- /dev/null
+++ b/pregelix/pregelix-core/src/main/resources/scripts/startDebugNc.sh
@@ -0,0 +1,50 @@
+hostname
+
+#Get the IP address of the cc
+CCHOST_NAME=`cat conf/master`
+CURRENT_PATH=`pwd`
+CCHOST=`ssh ${CCHOST_NAME} "cd ${CURRENT_PATH}; bin/getip.sh"`
+
+#Import cluster properties
+. conf/cluster.properties
+. conf/debugnc.properties
+
+#Clean up temp dir
+
+rm -rf $NCTMP_DIR2
+mkdir $NCTMP_DIR2
+
+#Clean up log dir
+rm -rf $NCLOGS_DIR2
+mkdir $NCLOGS_DIR2
+
+
+#Clean up I/O working dir
+io_dirs=$(echo $IO_DIRS2 | tr "," "\n")
+for io_dir in $io_dirs
+do
+	rm -rf $io_dir
+	mkdir $io_dir
+done
+
+#Set JAVA_HOME
+export JAVA_HOME=$JAVA_HOME
+
+#Get OS
+IPADDR=`bin/getip.sh`
+
+#Get node ID
+NODEID=`hostname | cut -d '.' -f 1`
+NODEID=${NODEID}2
+
+#Set JAVA_OPTS
+export JAVA_OPTS=$NCJAVA_OPTS2
+
+cd $HYRACKS_HOME
+HYRACKS_HOME=`pwd`
+
+#Enter the temp dir
+cd $NCTMP_DIR2
+
+#Launch hyracks nc
+$HYRACKS_HOME/hyracks-server/target/appassembler/bin/hyracksnc -cc-host $CCHOST -cc-port $CC_CLUSTERPORT -cluster-net-ip-address $IPADDR  -data-ip-address $IPADDR -node-id $NODEID -iodevices "${IO_DIRS2}" &> $NCLOGS_DIR2/$NODEID.log &
diff --git a/pregelix/pregelix-core/src/main/resources/scripts/startcc.sh b/pregelix/pregelix-core/src/main/resources/scripts/startcc.sh
new file mode 100644
index 0000000..fe2551d
--- /dev/null
+++ b/pregelix/pregelix-core/src/main/resources/scripts/startcc.sh
@@ -0,0 +1,25 @@
+#!/bin/bash
+hostname
+
+#Import cluster properties
+. conf/cluster.properties
+
+#Get the IP address of the cc
+CCHOST_NAME=`cat conf/master`
+CCHOST=`bin/getip.sh`
+
+#Remove the temp dir
+rm -rf $CCTMP_DIR
+mkdir $CCTMP_DIR
+
+#Remove the logs dir
+rm -rf $CCLOGS_DIR
+mkdir $CCLOGS_DIR
+
+#Export JAVA_HOME and JAVA_OPTS
+export JAVA_HOME=$JAVA_HOME
+export JAVA_OPTS=$CCJAVA_OPTS
+
+#Launch hyracks cc script
+chmod -R 755 $HYRACKS_HOME
+$HYRACKS_HOME/hyracks-server/target/appassembler/bin/hyrackscc -client-net-ip-address $CCHOST -cluster-net-ip-address $CCHOST -client-net-port $CC_CLIENTPORT -cluster-net-port $CC_CLUSTERPORT -max-heartbeat-lapse-periods 999999 -default-max-job-attempts 0 -job-history-size 3 &> $CCLOGS_DIR/cc.log &
diff --git a/pregelix/pregelix-core/src/main/resources/scripts/startnc.sh b/pregelix/pregelix-core/src/main/resources/scripts/startnc.sh
new file mode 100644
index 0000000..6e0f90e
--- /dev/null
+++ b/pregelix/pregelix-core/src/main/resources/scripts/startnc.sh
@@ -0,0 +1,49 @@
+hostname
+
+MY_NAME=`hostname`
+#Get the IP address of the cc
+CCHOST_NAME=`cat conf/master`
+CURRENT_PATH=`pwd`
+CCHOST=`ssh ${CCHOST_NAME} "cd ${CURRENT_PATH}; bin/getip.sh"`
+
+#Import cluster properties
+. conf/cluster.properties
+
+#Clean up temp dir
+
+rm -rf $NCTMP_DIR
+mkdir $NCTMP_DIR
+
+#Clean up log dir
+rm -rf $NCLOGS_DIR
+mkdir $NCLOGS_DIR
+
+
+#Clean up I/O working dir
+io_dirs=$(echo $IO_DIRS | tr "," "\n")
+for io_dir in $io_dirs
+do
+	rm -rf $io_dir
+	mkdir $io_dir
+done
+
+#Set JAVA_HOME
+export JAVA_HOME=$JAVA_HOME
+
+IPADDR=`bin/getip.sh`
+#echo $IPADDR
+
+#Get node ID
+NODEID=`hostname | cut -d '.' -f 1`
+
+#Set JAVA_OPTS
+export JAVA_OPTS=$NCJAVA_OPTS
+
+cd $HYRACKS_HOME
+HYRACKS_HOME=`pwd`
+
+#Enter the temp dir
+cd $NCTMP_DIR
+
+#Launch hyracks nc
+$HYRACKS_HOME/hyracks-server/target/appassembler/bin/hyracksnc -cc-host $CCHOST -cc-port $CC_CLUSTERPORT -cluster-net-ip-address $IPADDR  -data-ip-address $IPADDR -node-id $NODEID -iodevices "${IO_DIRS}" &> $NCLOGS_DIR/$NODEID.log &
diff --git a/pregelix/pregelix-core/src/main/resources/scripts/stopAllNCs.sh b/pregelix/pregelix-core/src/main/resources/scripts/stopAllNCs.sh
new file mode 100644
index 0000000..12367c1
--- /dev/null
+++ b/pregelix/pregelix-core/src/main/resources/scripts/stopAllNCs.sh
@@ -0,0 +1,6 @@
+PREGELIX_PATH=`pwd`
+
+for i in `cat conf/slaves`
+do
+   ssh $i "cd ${PREGELIX_PATH}; bin/stopnc.sh"
+done
diff --git a/pregelix/pregelix-core/src/main/resources/scripts/stopCluster.sh b/pregelix/pregelix-core/src/main/resources/scripts/stopCluster.sh
new file mode 100644
index 0000000..4889934
--- /dev/null
+++ b/pregelix/pregelix-core/src/main/resources/scripts/stopCluster.sh
@@ -0,0 +1,3 @@
+bin/stopAllNCs.sh
+sleep 2
+bin/stopcc.sh
diff --git a/pregelix/pregelix-core/src/main/resources/scripts/stopcc.sh b/pregelix/pregelix-core/src/main/resources/scripts/stopcc.sh
new file mode 100644
index 0000000..c2f525a
--- /dev/null
+++ b/pregelix/pregelix-core/src/main/resources/scripts/stopcc.sh
@@ -0,0 +1,10 @@
+hostname
+. conf/cluster.properties
+
+#Kill process
+PID=`ps -ef|grep ${USER}|grep java|grep hyracks|awk '{print $2}'`
+echo $PID
+kill -9 $PID
+
+#Clean up CC temp dir
+rm -rf $CCTMP_DIR/*
diff --git a/pregelix/pregelix-core/src/main/resources/scripts/stopnc.sh b/pregelix/pregelix-core/src/main/resources/scripts/stopnc.sh
new file mode 100644
index 0000000..03ce4e7
--- /dev/null
+++ b/pregelix/pregelix-core/src/main/resources/scripts/stopnc.sh
@@ -0,0 +1,23 @@
+hostname
+. conf/cluster.properties
+
+#Kill process
+PID=`ps -ef|grep ${USER}|grep java|grep 'Dapp.name=hyracksnc'|awk '{print $2}'`
+
+if [ "$PID" == "" ]; then
+  USERID=`id | sed 's/^uid=//;s/(.*$//'`
+  PID=`ps -ef|grep ${USERID}|grep java|grep 'Dapp.name=hyracksnc'|awk '{print $2}'`
+fi
+
+echo $PID
+kill -9 $PID
+
+#Clean up I/O working dir
+io_dirs=$(echo $IO_DIRS | tr "," "\n")
+for io_dir in $io_dirs
+do
+	rm -rf $io_dir/*
+done
+
+#Clean up NC temp dir
+rm -rf $NCTMP_DIR/*
diff --git a/pregelix/pregelix-core/src/test/java/edu/uci/ics/pregelix/core/join/JoinTest.java b/pregelix/pregelix-core/src/test/java/edu/uci/ics/pregelix/core/join/JoinTest.java
new file mode 100644
index 0000000..97659d4
--- /dev/null
+++ b/pregelix/pregelix-core/src/test/java/edu/uci/ics/pregelix/core/join/JoinTest.java
@@ -0,0 +1,594 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.pregelix.core.join;
+
+import java.io.File;
+import java.io.IOException;
+
+import org.apache.commons.io.FileUtils;
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.api.constraints.PartitionConstraintHelper;
+import edu.uci.ics.hyracks.api.dataflow.IConnectorDescriptor;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.INullWriterFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.io.FileReference;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
+import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryHashFunctionFactory;
+import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.parsers.IValueParserFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.parsers.UTF8StringParserFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.partition.FieldHashPartitionComputerFactory;
+import edu.uci.ics.hyracks.dataflow.std.connectors.MToNPartitioningConnectorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.connectors.MToNPartitioningMergingConnectorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.file.ConstantFileSplitProvider;
+import edu.uci.ics.hyracks.dataflow.std.file.DelimitedDataTupleParserFactory;
+import edu.uci.ics.hyracks.dataflow.std.file.FileScanOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.dataflow.std.join.InMemoryHashJoinOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.sort.ExternalSortOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMInteriorFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMLeafFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndex;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexRegistryProvider;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexBulkLoadOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexCreateOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackProvider;
+import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriterFactory;
+import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
+import edu.uci.ics.pregelix.core.data.TypeTraits;
+import edu.uci.ics.pregelix.core.jobgen.clusterconfig.ClusterConfig;
+import edu.uci.ics.pregelix.core.util.PregelixHyracksIntegrationUtil;
+import edu.uci.ics.pregelix.core.util.TestUtils;
+import edu.uci.ics.pregelix.dataflow.VertexWriteOperatorDescriptor;
+import edu.uci.ics.pregelix.dataflow.std.IndexNestedLoopJoinOperatorDescriptor;
+import edu.uci.ics.pregelix.dataflow.std.ProjectOperatorDescriptor;
+import edu.uci.ics.pregelix.runtime.bootstrap.StorageManagerInterface;
+import edu.uci.ics.pregelix.runtime.bootstrap.TreeIndexRegistryProvider;
+
+public class JoinTest {
+    private final static String ACTUAL_RESULT_DIR = "actual";
+    private final static String EXPECT_RESULT_DIR = "expected";
+    private final static String ACTUAL_RESULT_FILE = ACTUAL_RESULT_DIR + File.separator + "join.txt";
+    private final static String EXPECTED_RESULT_FILE = EXPECT_RESULT_DIR + File.separator + "join.txt";
+    private final static String JOB_NAME = "JOIN_TEST";
+    private static final String HYRACKS_APP_NAME = "giraph";
+    private static final String NC1_ID = "nc1";
+    private static final String NC2_ID = "nc2";
+
+    private static final String PATH_TO_CLUSTER_STORE = "src/test/resources/cluster/data.properties";
+    private static final String PATH_TO_CLUSTER_PROPERTIES = "src/test/resources/cluster/cluster.properties";
+
+    private static final float DEFAULT_BTREE_FILL_FACTOR = 1.00f;
+    private IIndexRegistryProvider<IIndex> treeRegistry = TreeIndexRegistryProvider.INSTANCE;
+    private IStorageManagerInterface storageManagerInterface = StorageManagerInterface.INSTANCE;
+
+    private IBinaryHashFunctionFactory stringHashFactory = new PointableBinaryHashFunctionFactory(
+            UTF8StringPointable.FACTORY);
+    private IBinaryComparatorFactory stringComparatorFactory = new PointableBinaryComparatorFactory(
+            UTF8StringPointable.FACTORY);
+
+    private void cleanupStores() throws IOException {
+        FileUtils.forceMkdir(new File("teststore"));
+        FileUtils.forceMkdir(new File("build"));
+        FileUtils.cleanDirectory(new File("teststore"));
+        FileUtils.cleanDirectory(new File("build"));
+    }
+
+    @Test
+    public void customerOrderCIDJoinMulti() throws Exception {
+        ClusterConfig.setStorePath(PATH_TO_CLUSTER_STORE);
+        ClusterConfig.setClusterPropertiesPath(PATH_TO_CLUSTER_PROPERTIES);
+        cleanupStores();
+        PregelixHyracksIntegrationUtil.init();
+        PregelixHyracksIntegrationUtil.createApp(HYRACKS_APP_NAME);
+
+        FileUtils.forceMkdir(new File(EXPECT_RESULT_DIR));
+        FileUtils.forceMkdir(new File(ACTUAL_RESULT_DIR));
+        FileUtils.cleanDirectory(new File(EXPECT_RESULT_DIR));
+        FileUtils.cleanDirectory(new File(ACTUAL_RESULT_DIR));
+        runCreate();
+        runBulkLoad();
+        runHashJoin();
+        runIndexJoin();
+        TestUtils.compareWithResult(new File(EXPECTED_RESULT_FILE), new File(ACTUAL_RESULT_FILE));
+
+        FileUtils.cleanDirectory(new File(EXPECT_RESULT_DIR));
+        FileUtils.cleanDirectory(new File(ACTUAL_RESULT_DIR));
+        runLeftOuterHashJoin();
+        runIndexRightOuterJoin();
+        TestUtils.compareWithResult(new File(EXPECTED_RESULT_FILE), new File(ACTUAL_RESULT_FILE));
+
+        PregelixHyracksIntegrationUtil.destroyApp(HYRACKS_APP_NAME);
+        PregelixHyracksIntegrationUtil.deinit();
+    }
+
+    private void runHashJoin() throws Exception {
+        JobSpecification spec = new JobSpecification();
+
+        FileSplit[] custSplits = new FileSplit[] {
+                new FileSplit(NC1_ID, new FileReference(new File("data/tpch0.001/customer-part1.tbl"))),
+                new FileSplit(NC2_ID, new FileReference(new File("data/tpch0.001/customer-part2.tbl"))) };
+        IFileSplitProvider custSplitsProvider = new ConstantFileSplitProvider(custSplits);
+        RecordDescriptor custDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE });
+
+        FileSplit[] ordersSplits = new FileSplit[] {
+                new FileSplit(NC1_ID, new FileReference(new File("data/tpch0.001/orders-part1.tbl"))),
+                new FileSplit(NC2_ID, new FileReference(new File("data/tpch0.001/orders-part2.tbl"))) };
+        IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
+        RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE });
+
+        RecordDescriptor custOrderJoinDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE });
+
+        FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitsProvider,
+                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'), ordersDesc);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID, NC2_ID);
+
+        FileScanOperatorDescriptor custScanner = new FileScanOperatorDescriptor(spec, custSplitsProvider,
+                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE }, '|'), custDesc);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, custScanner, NC1_ID, NC2_ID);
+
+        InMemoryHashJoinOperatorDescriptor join = new InMemoryHashJoinOperatorDescriptor(spec, new int[] { 1 },
+                new int[] { 0 }, new IBinaryHashFunctionFactory[] { stringHashFactory },
+                new IBinaryComparatorFactory[] { stringComparatorFactory }, custOrderJoinDesc, 128);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, join, NC1_ID, NC2_ID);
+
+        int[] sortFields = new int[2];
+        sortFields[0] = 1;
+        sortFields[1] = 0;
+        IBinaryComparatorFactory[] comparatorFactories = new IBinaryComparatorFactory[2];
+        comparatorFactories[0] = stringComparatorFactory;
+        comparatorFactories[1] = stringComparatorFactory;
+        ExternalSortOperatorDescriptor sorter = new ExternalSortOperatorDescriptor(spec, 1024, sortFields,
+                comparatorFactories, custOrderJoinDesc);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, sorter, NC1_ID, NC2_ID);
+
+        FileSplit resultFile = new FileSplit(NC1_ID, new FileReference(new File(EXPECTED_RESULT_FILE)));
+        FileSplit[] results = new FileSplit[1];
+        results[0] = resultFile;
+        IFileSplitProvider resultFileSplitProvider = new ConstantFileSplitProvider(results);
+        VertexWriteOperatorDescriptor writer = new VertexWriteOperatorDescriptor(spec, null, resultFileSplitProvider, null,
+                null);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, writer, new String[] { NC1_ID });
+        PartitionConstraintHelper.addPartitionCountConstraint(spec, writer, 1);
+
+        IConnectorDescriptor ordJoinConn = new MToNPartitioningConnectorDescriptor(spec,
+                new FieldHashPartitionComputerFactory(new int[] { 1 },
+                        new IBinaryHashFunctionFactory[] { stringHashFactory }));
+        spec.connect(ordJoinConn, ordScanner, 0, join, 0);
+
+        IConnectorDescriptor custJoinConn = new MToNPartitioningConnectorDescriptor(spec,
+                new FieldHashPartitionComputerFactory(new int[] { 0 },
+                        new IBinaryHashFunctionFactory[] { stringHashFactory }));
+        spec.connect(custJoinConn, custScanner, 0, join, 1);
+
+        spec.connect(new OneToOneConnectorDescriptor(spec), join, 0, sorter, 0);
+        IConnectorDescriptor joinWriterConn = new MToNPartitioningMergingConnectorDescriptor(spec,
+                new FieldHashPartitionComputerFactory(new int[] { 1 },
+                        new IBinaryHashFunctionFactory[] { stringHashFactory }), sortFields, comparatorFactories);
+        spec.connect(joinWriterConn, sorter, 0, writer, 0);
+
+        spec.addRoot(writer);
+        runTest(spec);
+    }
+
+    private void runCreate() throws Exception {
+        JobSpecification spec = new JobSpecification();
+        RecordDescriptor custDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE });
+        IBinaryComparatorFactory[] comparatorFactories = new IBinaryComparatorFactory[1];
+        comparatorFactories[0] = stringComparatorFactory;
+        IFileSplitProvider fileSplitProvider = ClusterConfig.getFileSplitProvider(JOB_NAME, JOB_NAME);
+        ITypeTraits[] typeTraits = new ITypeTraits[custDesc.getFields().length];
+        for (int i = 0; i < typeTraits.length; i++)
+            typeTraits[i] = new TypeTraits(false);
+        TreeIndexCreateOperatorDescriptor writer = new TreeIndexCreateOperatorDescriptor(spec, storageManagerInterface,
+                treeRegistry, fileSplitProvider, typeTraits, comparatorFactories, new BTreeDataflowHelperFactory(),
+                NoOpOperationCallbackProvider.INSTANCE);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, writer, NC1_ID, NC2_ID);
+        spec.addRoot(writer);
+        runTest(spec);
+    }
+
+    private void runBulkLoad() throws Exception {
+        JobSpecification spec = new JobSpecification();
+
+        FileSplit[] custSplits = new FileSplit[] {
+                new FileSplit(NC1_ID, new FileReference(new File("data/tpch0.001/customer-part1.tbl"))),
+                new FileSplit(NC2_ID, new FileReference(new File("data/tpch0.001/customer-part2.tbl"))) };
+        IFileSplitProvider custSplitsProvider = new ConstantFileSplitProvider(custSplits);
+        RecordDescriptor custDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE });
+
+        FileScanOperatorDescriptor custScanner = new FileScanOperatorDescriptor(spec, custSplitsProvider,
+                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE }, '|'), custDesc);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, custScanner, NC1_ID, NC2_ID);
+
+        int[] sortFields = new int[1];
+        sortFields[0] = 0;
+        IBinaryComparatorFactory[] comparatorFactories = new IBinaryComparatorFactory[1];
+        comparatorFactories[0] = stringComparatorFactory;
+        ExternalSortOperatorDescriptor sorter = new ExternalSortOperatorDescriptor(spec, 1024, sortFields,
+                comparatorFactories, custDesc);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, sorter, NC1_ID, NC2_ID);
+
+        IFileSplitProvider fileSplitProvider = ClusterConfig.getFileSplitProvider(JOB_NAME, JOB_NAME);
+        int[] fieldPermutation = new int[custDesc.getFields().length];
+        for (int i = 0; i < fieldPermutation.length; i++)
+            fieldPermutation[i] = i;
+        ITypeTraits[] typeTraits = new ITypeTraits[custDesc.getFields().length];
+        for (int i = 0; i < typeTraits.length; i++)
+            typeTraits[i] = new TypeTraits(false);
+        TreeIndexBulkLoadOperatorDescriptor writer = new TreeIndexBulkLoadOperatorDescriptor(spec,
+                storageManagerInterface, treeRegistry, fileSplitProvider, typeTraits, comparatorFactories,
+                fieldPermutation, DEFAULT_BTREE_FILL_FACTOR, new BTreeDataflowHelperFactory(),
+                NoOpOperationCallbackProvider.INSTANCE);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, writer, NC1_ID, NC2_ID);
+
+        spec.connect(new OneToOneConnectorDescriptor(spec), custScanner, 0, sorter, 0);
+        spec.connect(new MToNPartitioningMergingConnectorDescriptor(spec, new FieldHashPartitionComputerFactory(
+                sortFields, new IBinaryHashFunctionFactory[] { stringHashFactory }), sortFields, comparatorFactories),
+                sorter, 0, writer, 0);
+
+        spec.addRoot(writer);
+        runTest(spec);
+    }
+
+    private void runIndexJoin() throws Exception {
+        JobSpecification spec = new JobSpecification();
+
+        FileSplit[] ordersSplits = new FileSplit[] {
+                new FileSplit(NC1_ID, new FileReference(new File("data/tpch0.001/orders-part1.tbl"))),
+                new FileSplit(NC2_ID, new FileReference(new File("data/tpch0.001/orders-part2.tbl"))) };
+        IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
+
+        RecordDescriptor custDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE });
+
+        RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE });
+
+        RecordDescriptor custOrderJoinDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE });
+
+        FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitsProvider,
+                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'), ordersDesc);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID, NC2_ID);
+
+        /** sort operator */
+        int[] sortFields = new int[2];
+        sortFields[0] = 1;
+        sortFields[1] = 0;
+        IBinaryComparatorFactory[] comparatorFactories = new IBinaryComparatorFactory[2];
+        comparatorFactories[0] = stringComparatorFactory;
+        comparatorFactories[1] = stringComparatorFactory;
+        ExternalSortOperatorDescriptor sorter = new ExternalSortOperatorDescriptor(spec, 1024, sortFields,
+                comparatorFactories, ordersDesc);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, sorter, NC1_ID, NC2_ID);
+
+        /** index join operator */
+        int[] keyFields = new int[1];
+        keyFields[0] = 1;
+        IBinaryComparatorFactory[] keyComparatorFactories = new IBinaryComparatorFactory[1];
+        keyComparatorFactories[0] = stringComparatorFactory;
+        IFileSplitProvider fileSplitProvider = ClusterConfig.getFileSplitProvider(JOB_NAME, JOB_NAME);
+        ITypeTraits[] typeTraits = new ITypeTraits[custDesc.getFields().length];
+        for (int i = 0; i < typeTraits.length; i++)
+            typeTraits[i] = new TypeTraits(false);
+        IndexNestedLoopJoinOperatorDescriptor join = new IndexNestedLoopJoinOperatorDescriptor(spec, custOrderJoinDesc,
+                storageManagerInterface, treeRegistry, fileSplitProvider, typeTraits, keyComparatorFactories, true,
+                keyFields, keyFields, true, true, new BTreeDataflowHelperFactory());
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, join, NC1_ID, NC2_ID);
+
+        /** results (already in sorted order) */
+        FileSplit resultFile = new FileSplit(NC1_ID, new FileReference(new File(ACTUAL_RESULT_FILE)));
+        FileSplit[] results = new FileSplit[1];
+        results[0] = resultFile;
+        IFileSplitProvider resultFileSplitProvider = new ConstantFileSplitProvider(results);
+        VertexWriteOperatorDescriptor writer = new VertexWriteOperatorDescriptor(spec, null, resultFileSplitProvider, null,
+                null);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, writer, new String[] { NC1_ID });
+        PartitionConstraintHelper.addPartitionCountConstraint(spec, writer, 1);
+
+        spec.connect(new OneToOneConnectorDescriptor(spec), ordScanner, 0, sorter, 0);
+        spec.connect(new MToNPartitioningMergingConnectorDescriptor(spec, new FieldHashPartitionComputerFactory(
+                keyFields, new IBinaryHashFunctionFactory[] { stringHashFactory }), sortFields, comparatorFactories),
+                sorter, 0, join, 0);
+        spec.connect(new MToNPartitioningMergingConnectorDescriptor(spec, new FieldHashPartitionComputerFactory(
+                keyFields, new IBinaryHashFunctionFactory[] { stringHashFactory }), sortFields, comparatorFactories),
+                join, 0, writer, 0);
+
+        spec.addRoot(writer);
+        runTest(spec);
+    }
+
+    private void runLeftOuterHashJoin() throws Exception {
+        JobSpecification spec = new JobSpecification();
+
+        FileSplit[] custSplits = new FileSplit[] {
+                new FileSplit(NC1_ID, new FileReference(new File("data/tpch0.001/customer-part1.tbl"))),
+                new FileSplit(NC2_ID, new FileReference(new File("data/tpch0.001/customer-part2.tbl"))) };
+        IFileSplitProvider custSplitsProvider = new ConstantFileSplitProvider(custSplits);
+        RecordDescriptor custDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE });
+
+        FileSplit[] ordersSplits = new FileSplit[] {
+                new FileSplit(NC1_ID, new FileReference(new File("data/tpch0.001/orders-part1.tbl"))),
+                new FileSplit(NC2_ID, new FileReference(new File("data/tpch0.001/orders-part2.tbl"))) };
+        IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
+        RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE });
+
+        INullWriterFactory[] nullWriterFactories = new INullWriterFactory[] { JoinTestNullWriterFactory.INSTANCE,
+                JoinTestNullWriterFactory.INSTANCE, JoinTestNullWriterFactory.INSTANCE,
+                JoinTestNullWriterFactory.INSTANCE, JoinTestNullWriterFactory.INSTANCE,
+                JoinTestNullWriterFactory.INSTANCE, JoinTestNullWriterFactory.INSTANCE,
+                JoinTestNullWriterFactory.INSTANCE, JoinTestNullWriterFactory.INSTANCE };
+
+        RecordDescriptor custOrderJoinDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE });
+
+        FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitsProvider,
+                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'), ordersDesc);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID, NC2_ID);
+
+        FileScanOperatorDescriptor custScanner = new FileScanOperatorDescriptor(spec, custSplitsProvider,
+                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE }, '|'), custDesc);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, custScanner, NC1_ID, NC2_ID);
+
+        InMemoryHashJoinOperatorDescriptor join = new InMemoryHashJoinOperatorDescriptor(spec, new int[] { 0 },
+                new int[] { 1 }, new IBinaryHashFunctionFactory[] { stringHashFactory },
+                new IBinaryComparatorFactory[] { stringComparatorFactory }, custOrderJoinDesc, true,
+                nullWriterFactories, 128);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, join, NC1_ID, NC2_ID);
+
+        int[] projectFields = new int[] { 8, 9, 10, 11, 12, 13, 14, 15, 16, 0, 1, 2, 3, 4, 5, 6, 7 };
+        ProjectOperatorDescriptor project = new ProjectOperatorDescriptor(spec, custOrderJoinDesc, projectFields);
+
+        int[] sortFields = new int[2];
+        sortFields[0] = 9;
+        sortFields[1] = 0;
+        IBinaryComparatorFactory[] comparatorFactories = new IBinaryComparatorFactory[2];
+        comparatorFactories[0] = stringComparatorFactory;
+        comparatorFactories[1] = stringComparatorFactory;
+        ExternalSortOperatorDescriptor sorter = new ExternalSortOperatorDescriptor(spec, 1024, sortFields,
+                comparatorFactories, custOrderJoinDesc);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, sorter, NC1_ID, NC2_ID);
+
+        FileSplit resultFile = new FileSplit(NC1_ID, new FileReference(new File(EXPECTED_RESULT_FILE)));
+        FileSplit[] results = new FileSplit[1];
+        results[0] = resultFile;
+        IFileSplitProvider resultFileSplitProvider = new ConstantFileSplitProvider(results);
+        VertexWriteOperatorDescriptor writer = new VertexWriteOperatorDescriptor(spec, null, resultFileSplitProvider, null,
+                null);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, writer, new String[] { NC1_ID });
+        PartitionConstraintHelper.addPartitionCountConstraint(spec, writer, 1);
+
+        IConnectorDescriptor ordJoinConn = new MToNPartitioningConnectorDescriptor(spec,
+                new FieldHashPartitionComputerFactory(new int[] { 1 },
+                        new IBinaryHashFunctionFactory[] { stringHashFactory }));
+        spec.connect(ordJoinConn, ordScanner, 0, join, 1);
+
+        IConnectorDescriptor custJoinConn = new MToNPartitioningConnectorDescriptor(spec,
+                new FieldHashPartitionComputerFactory(new int[] { 0 },
+                        new IBinaryHashFunctionFactory[] { stringHashFactory }));
+        spec.connect(custJoinConn, custScanner, 0, join, 0);
+
+        spec.connect(new OneToOneConnectorDescriptor(spec), join, 0, project, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), project, 0, sorter, 0);
+        IConnectorDescriptor joinWriterConn = new MToNPartitioningMergingConnectorDescriptor(spec,
+                new FieldHashPartitionComputerFactory(new int[] { 9 },
+                        new IBinaryHashFunctionFactory[] { stringHashFactory }), sortFields, comparatorFactories);
+        spec.connect(joinWriterConn, sorter, 0, writer, 0);
+
+        spec.addRoot(writer);
+        runTest(spec);
+    }
+
+    private void runIndexRightOuterJoin() throws Exception {
+        JobSpecification spec = new JobSpecification();
+
+        FileSplit[] ordersSplits = new FileSplit[] {
+                new FileSplit(NC1_ID, new FileReference(new File("data/tpch0.001/orders-part1.tbl"))),
+                new FileSplit(NC2_ID, new FileReference(new File("data/tpch0.001/orders-part2.tbl"))) };
+        IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
+
+        INullWriterFactory[] nullWriterFactories = new INullWriterFactory[] { JoinTestNullWriterFactory.INSTANCE,
+                JoinTestNullWriterFactory.INSTANCE, JoinTestNullWriterFactory.INSTANCE,
+                JoinTestNullWriterFactory.INSTANCE, JoinTestNullWriterFactory.INSTANCE,
+                JoinTestNullWriterFactory.INSTANCE, JoinTestNullWriterFactory.INSTANCE,
+                JoinTestNullWriterFactory.INSTANCE, JoinTestNullWriterFactory.INSTANCE };
+
+        RecordDescriptor custDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE });
+
+        RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE });
+
+        RecordDescriptor custOrderJoinDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE });
+
+        FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitsProvider,
+                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'), ordersDesc);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID, NC2_ID);
+
+        /** sort operator */
+        int[] sortFields = new int[2];
+        sortFields[0] = 1;
+        sortFields[1] = 0;
+        IBinaryComparatorFactory[] comparatorFactories = new IBinaryComparatorFactory[2];
+        comparatorFactories[0] = stringComparatorFactory;
+        comparatorFactories[1] = stringComparatorFactory;
+        ExternalSortOperatorDescriptor sorter = new ExternalSortOperatorDescriptor(spec, 1024, sortFields,
+                comparatorFactories, ordersDesc);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, sorter, NC1_ID, NC2_ID);
+
+        /** index join operator */
+        int[] keyFields = new int[1];
+        keyFields[0] = 1;
+        IBinaryComparatorFactory[] keyComparatorFactories = new IBinaryComparatorFactory[1];
+        keyComparatorFactories[0] = stringComparatorFactory;
+        IFileSplitProvider fileSplitProvider = ClusterConfig.getFileSplitProvider(JOB_NAME, JOB_NAME);
+        ITypeTraits[] typeTraits = new ITypeTraits[custDesc.getFields().length];
+        for (int i = 0; i < typeTraits.length; i++)
+            typeTraits[i] = new TypeTraits(false);
+        ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(new TypeAwareTupleWriterFactory(
+                typeTraits));
+        ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(new TypeAwareTupleWriterFactory(
+                typeTraits));
+        IndexNestedLoopJoinOperatorDescriptor join = new IndexNestedLoopJoinOperatorDescriptor(spec, custOrderJoinDesc,
+                storageManagerInterface, treeRegistry, fileSplitProvider, interiorFrameFactory, leafFrameFactory,
+                typeTraits, keyComparatorFactories, true, keyFields, keyFields, true, true,
+                new BTreeDataflowHelperFactory(), true, nullWriterFactories);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, join, NC1_ID, NC2_ID);
+
+        /** results (already in sorted order) */
+        FileSplit resultFile = new FileSplit(NC1_ID, new FileReference(new File(ACTUAL_RESULT_FILE)));
+        FileSplit[] results = new FileSplit[1];
+        results[0] = resultFile;
+        IFileSplitProvider resultFileSplitProvider = new ConstantFileSplitProvider(results);
+        VertexWriteOperatorDescriptor writer = new VertexWriteOperatorDescriptor(spec, null, resultFileSplitProvider, null,
+                null);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, writer, new String[] { NC1_ID });
+        PartitionConstraintHelper.addPartitionCountConstraint(spec, writer, 1);
+
+        spec.connect(new OneToOneConnectorDescriptor(spec), ordScanner, 0, sorter, 0);
+        spec.connect(new MToNPartitioningMergingConnectorDescriptor(spec, new FieldHashPartitionComputerFactory(
+                keyFields, new IBinaryHashFunctionFactory[] { new PointableBinaryHashFunctionFactory(
+                        UTF8StringPointable.FACTORY) }), sortFields, comparatorFactories), sorter, 0, join, 0);
+
+        IBinaryComparatorFactory[] mergeComparatorFactories = new IBinaryComparatorFactory[2];
+        mergeComparatorFactories[0] = new PointableBinaryComparatorFactory(UTF8StringPointable.FACTORY);
+        mergeComparatorFactories[1] = new PointableBinaryComparatorFactory(UTF8StringPointable.FACTORY);
+        int[] mergeFields = new int[] { 9, 0 };
+        spec.connect(new MToNPartitioningMergingConnectorDescriptor(spec, new FieldHashPartitionComputerFactory(
+                new int[] { 9 }, new IBinaryHashFunctionFactory[] { new PointableBinaryHashFunctionFactory(
+                        UTF8StringPointable.FACTORY) }), mergeFields, comparatorFactories), join, 0, writer, 0);
+
+        spec.addRoot(writer);
+        runTest(spec);
+    }
+
+    private void runTest(JobSpecification spec) throws Exception {
+        PregelixHyracksIntegrationUtil.runJob(spec, HYRACKS_APP_NAME);
+    }
+}
diff --git a/pregelix/pregelix-core/src/test/java/edu/uci/ics/pregelix/core/join/JoinTestNullWriterFactory.java b/pregelix/pregelix-core/src/test/java/edu/uci/ics/pregelix/core/join/JoinTestNullWriterFactory.java
new file mode 100644
index 0000000..8f2e546
--- /dev/null
+++ b/pregelix/pregelix-core/src/test/java/edu/uci/ics/pregelix/core/join/JoinTestNullWriterFactory.java
@@ -0,0 +1,40 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.pregelix.core.join;
+
+import java.io.DataOutput;
+
+import edu.uci.ics.hyracks.api.dataflow.value.INullWriter;
+import edu.uci.ics.hyracks.api.dataflow.value.INullWriterFactory;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+
+public class JoinTestNullWriterFactory implements INullWriterFactory {
+    private static final long serialVersionUID = 1L;
+    public static INullWriterFactory INSTANCE = new JoinTestNullWriterFactory();
+
+    @Override
+    public INullWriter createNullWriter() {
+        return new INullWriter() {
+
+            @Override
+            public void writeNull(DataOutput out) throws HyracksDataException {
+                UTF8StringSerializerDeserializer.INSTANCE.serialize("NULL", out);
+            }
+
+        };
+    }
+
+}
diff --git a/pregelix/pregelix-core/src/test/java/edu/uci/ics/pregelix/core/util/TestUtils.java b/pregelix/pregelix-core/src/test/java/edu/uci/ics/pregelix/core/util/TestUtils.java
new file mode 100644
index 0000000..83dd10d
--- /dev/null
+++ b/pregelix/pregelix-core/src/test/java/edu/uci/ics/pregelix/core/util/TestUtils.java
@@ -0,0 +1,92 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.pregelix.core.util;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileReader;
+
+public class TestUtils {
+
+    public static void compareWithResult(File expectedFile, File actualFile) throws Exception {
+        BufferedReader readerExpected = new BufferedReader(new FileReader(expectedFile));
+        BufferedReader readerActual = new BufferedReader(new FileReader(actualFile));
+        String lineExpected, lineActual;
+        int num = 1;
+        try {
+            while ((lineExpected = readerExpected.readLine()) != null) {
+                lineActual = readerActual.readLine();
+                // Assert.assertEquals(lineExpected, lineActual);
+                if (lineActual == null) {
+                    throw new Exception("Actual result changed at line " + num + ":\n< " + lineExpected + "\n> ");
+                }
+                if (!equalStrings(lineExpected, lineActual)) {
+                    throw new Exception("Result for changed at line " + num + ":\n< " + lineExpected + "\n> "
+                            + lineActual);
+                }
+                ++num;
+            }
+            lineActual = readerActual.readLine();
+            if (lineActual != null) {
+                throw new Exception("Actual result changed at line " + num + ":\n< \n> " + lineActual);
+            }
+        } finally {
+            readerExpected.close();
+            readerActual.close();
+        }
+    }
+
+    private static boolean equalStrings(String s1, String s2) {
+        String[] rowsOne = s1.split("\n");
+        String[] rowsTwo = s2.split("\n");
+
+        if (rowsOne.length != rowsTwo.length)
+            return false;
+
+        for (int i = 0; i < rowsOne.length; i++) {
+            String row1 = rowsOne[i];
+            String row2 = rowsTwo[i];
+
+            if (row1.equals(row2))
+                continue;
+
+            String[] fields1 = row1.split(",");
+            String[] fields2 = row2.split(",");
+
+            for (int j = 0; j < fields1.length; j++) {
+                if (fields1[j].equals(fields2[j])) {
+                    continue;
+                } else if (fields1[j].indexOf('.') < 0) {
+                    return false;
+                } else {
+                    fields1[j] = fields1[j].split("=")[1];
+                    fields2[j] = fields2[j].split("=")[1];
+                    Double double1 = Double.parseDouble(fields1[j]);
+                    Double double2 = Double.parseDouble(fields2[j]);
+                    float float1 = (float) double1.doubleValue();
+                    float float2 = (float) double2.doubleValue();
+
+                    if (Math.abs(float1 - float2) == 0)
+                        continue;
+                    else {
+                        return false;
+                    }
+                }
+            }
+        }
+        return true;
+    }
+
+}
diff --git a/pregelix/pregelix-core/src/test/resources/cluster/cluster.properties b/pregelix/pregelix-core/src/test/resources/cluster/cluster.properties
new file mode 100644
index 0000000..14f8bd4
--- /dev/null
+++ b/pregelix/pregelix-core/src/test/resources/cluster/cluster.properties
@@ -0,0 +1,37 @@
+#The CC port for Hyracks clients
+CC_CLIENTPORT=3099
+
+#The CC port for Hyracks cluster management
+CC_CLUSTERPORT=1099
+
+#The directory of hyracks binaries
+HYRACKS_HOME=../../../../hyracks
+
+#The tmp directory for cc to install jars
+CCTMP_DIR=/tmp/t1
+
+#The tmp directory for nc to install jars
+NCTMP_DIR=/tmp/t2
+
+#The directory to put cc logs
+CCLOGS_DIR=$CCTMP_DIR/logs
+
+#The directory to put nc logs
+NCLOGS_DIR=$NCTMP_DIR/logs
+
+#Comma separated I/O directories for the spilling of external sort
+IO_DIRS="/tmp/t3,/tmp/t4"
+
+#The JAVA_HOME
+JAVA_HOME=$JAVA_HOME
+
+#The frame size of the internal dataflow engine
+FRAME_SIZE=65536
+
+#CC JAVA_OPTS
+CCJAVA_OPTS="-Xdebug -Xrunjdwp:transport=dt_socket,address=7001,server=y,suspend=n -Xmx3g -Djava.util.logging.config.file=logging.properties"
+# Yourkit option: -agentpath:/grid/0/dev/vborkar/tools/yjp-10.0.4/bin/linux-x86-64/libyjpagent.so=port=20001"
+
+#NC JAVA_OPTS
+NCJAVA_OPTS="-Xdebug -Xrunjdwp:transport=dt_socket,address=7002,server=y,suspend=n -Xmx1g -Djava.util.logging.config.file=logging.properties"
+
diff --git a/pregelix/pregelix-core/src/test/resources/cluster/data.properties b/pregelix/pregelix-core/src/test/resources/cluster/data.properties
new file mode 100644
index 0000000..daf881e
--- /dev/null
+++ b/pregelix/pregelix-core/src/test/resources/cluster/data.properties
@@ -0,0 +1 @@
+store=teststore
\ No newline at end of file
diff --git a/pregelix/pregelix-core/src/test/resources/hadoop/conf/core-site.xml b/pregelix/pregelix-core/src/test/resources/hadoop/conf/core-site.xml
new file mode 100644
index 0000000..47dfac5
--- /dev/null
+++ b/pregelix/pregelix-core/src/test/resources/hadoop/conf/core-site.xml
@@ -0,0 +1,18 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+
+<!-- Put site-specific property overrides in this file. -->
+
+<configuration>
+
+<property>
+    <name>fs.default.name</name>
+    <value>hdfs://127.0.0.1:31888</value>
+</property>
+<property>
+    <name>hadoop.tmp.dir</name>
+    <value>/tmp/hadoop</value>
+</property>
+
+
+</configuration>
diff --git a/pregelix/pregelix-core/src/test/resources/hadoop/conf/hdfs-site.xml b/pregelix/pregelix-core/src/test/resources/hadoop/conf/hdfs-site.xml
new file mode 100644
index 0000000..8d29b1d
--- /dev/null
+++ b/pregelix/pregelix-core/src/test/resources/hadoop/conf/hdfs-site.xml
@@ -0,0 +1,18 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+
+<!-- Put site-specific property overrides in this file. -->
+
+<configuration>
+
+<property>
+   <name>dfs.replication</name>
+   <value>1</value>
+</property>
+
+<property>
+	<name>dfs.block.size</name>
+	<value>65536</value>
+</property>
+
+</configuration>
diff --git a/pregelix/pregelix-core/src/test/resources/hadoop/conf/log4j.properties b/pregelix/pregelix-core/src/test/resources/hadoop/conf/log4j.properties
new file mode 100755
index 0000000..d5e6004
--- /dev/null
+++ b/pregelix/pregelix-core/src/test/resources/hadoop/conf/log4j.properties
@@ -0,0 +1,94 @@
+# Define some default values that can be overridden by system properties
+hadoop.root.logger=FATAL,console
+hadoop.log.dir=.
+hadoop.log.file=hadoop.log
+
+# Define the root logger to the system property "hadoop.root.logger".
+log4j.rootLogger=${hadoop.root.logger}, EventCounter
+
+# Logging Threshold
+log4j.threshhold=FATAL
+
+#
+# Daily Rolling File Appender
+#
+
+log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender
+log4j.appender.DRFA.File=${hadoop.log.dir}/${hadoop.log.file}
+
+# Rollver at midnight
+log4j.appender.DRFA.DatePattern=.yyyy-MM-dd
+
+# 30-day backup
+#log4j.appender.DRFA.MaxBackupIndex=30
+log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout
+
+# Pattern format: Date LogLevel LoggerName LogMessage
+log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
+# Debugging Pattern format
+#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n
+
+
+#
+# console
+# Add "console" to rootlogger above if you want to use this 
+#
+
+log4j.appender.console=org.apache.log4j.ConsoleAppender
+log4j.appender.console.target=System.err
+log4j.appender.console.layout=org.apache.log4j.PatternLayout
+log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n
+
+#
+# TaskLog Appender
+#
+
+#Default values
+hadoop.tasklog.taskid=null
+hadoop.tasklog.noKeepSplits=4
+hadoop.tasklog.totalLogFileSize=100
+hadoop.tasklog.purgeLogSplits=true
+hadoop.tasklog.logsRetainHours=12
+
+log4j.appender.TLA=org.apache.hadoop.mapred.TaskLogAppender
+log4j.appender.TLA.taskId=${hadoop.tasklog.taskid}
+log4j.appender.TLA.totalLogFileSize=${hadoop.tasklog.totalLogFileSize}
+
+log4j.appender.TLA.layout=org.apache.log4j.PatternLayout
+log4j.appender.TLA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
+
+#
+# Rolling File Appender
+#
+
+#log4j.appender.RFA=org.apache.log4j.RollingFileAppender
+#log4j.appender.RFA.File=${hadoop.log.dir}/${hadoop.log.file}
+
+# Logfile size and and 30-day backups
+#log4j.appender.RFA.MaxFileSize=1MB
+#log4j.appender.RFA.MaxBackupIndex=30
+
+#log4j.appender.RFA.layout=org.apache.log4j.PatternLayout
+#log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} - %m%n
+#log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n
+
+#
+# FSNamesystem Audit logging
+# All audit events are logged at INFO level
+#
+log4j.logger.org.apache.hadoop.fs.FSNamesystem.audit=WARN
+
+# Custom Logging levels
+
+#log4j.logger.org.apache.hadoop.mapred.JobTracker=DEBUG
+#log4j.logger.org.apache.hadoop.mapred.TaskTracker=DEBUG
+#log4j.logger.org.apache.hadoop.fs.FSNamesystem=DEBUG
+
+# Jets3t library
+log4j.logger.org.jets3t.service.impl.rest.httpclient.RestS3Service=ERROR
+
+#
+# Event Counter Appender
+# Sends counts of logging messages at different severity levels to Hadoop Metrics.
+#
+log4j.appender.EventCounter=org.apache.hadoop.metrics.jvm.EventCounter
diff --git a/pregelix/pregelix-core/src/test/resources/hadoop/conf/mapred-site.xml b/pregelix/pregelix-core/src/test/resources/hadoop/conf/mapred-site.xml
new file mode 100644
index 0000000..f89dd79
--- /dev/null
+++ b/pregelix/pregelix-core/src/test/resources/hadoop/conf/mapred-site.xml
@@ -0,0 +1,25 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+
+<!-- Put site-specific property overrides in this file. -->
+
+<configuration>
+
+  <property>
+    <name>mapred.job.tracker</name>
+    <value>localhost:29007</value>
+  </property>
+  <property>
+     <name>mapred.tasktracker.map.tasks.maximum</name>
+     <value>20</value>
+  </property>
+   <property>
+      <name>mapred.tasktracker.reduce.tasks.maximum</name>
+      <value>20</value>
+   </property>
+   <property>
+      <name>mapred.max.split.size</name>
+      <value>4096</value>
+   </property>
+
+</configuration>
diff --git a/pregelix/pregelix-core/src/test/resources/hyracks-deployment.properties b/pregelix/pregelix-core/src/test/resources/hyracks-deployment.properties
new file mode 100644
index 0000000..2ae9818
--- /dev/null
+++ b/pregelix/pregelix-core/src/test/resources/hyracks-deployment.properties
@@ -0,0 +1,2 @@
+#cc.bootstrap.class=edu.uci.ics.asterix.hyracks.bootstrap.CCBootstrapImpl
+nc.bootstrap.class=edu.uci.ics.pregelix.runtime.bootstrap.NCBootstrapImpl
\ No newline at end of file
diff --git a/pregelix/pregelix-core/src/test/resources/log4j.properties b/pregelix/pregelix-core/src/test/resources/log4j.properties
new file mode 100755
index 0000000..d5e6004
--- /dev/null
+++ b/pregelix/pregelix-core/src/test/resources/log4j.properties
@@ -0,0 +1,94 @@
+# Define some default values that can be overridden by system properties
+hadoop.root.logger=FATAL,console
+hadoop.log.dir=.
+hadoop.log.file=hadoop.log
+
+# Define the root logger to the system property "hadoop.root.logger".
+log4j.rootLogger=${hadoop.root.logger}, EventCounter
+
+# Logging Threshold
+log4j.threshhold=FATAL
+
+#
+# Daily Rolling File Appender
+#
+
+log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender
+log4j.appender.DRFA.File=${hadoop.log.dir}/${hadoop.log.file}
+
+# Rollver at midnight
+log4j.appender.DRFA.DatePattern=.yyyy-MM-dd
+
+# 30-day backup
+#log4j.appender.DRFA.MaxBackupIndex=30
+log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout
+
+# Pattern format: Date LogLevel LoggerName LogMessage
+log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
+# Debugging Pattern format
+#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n
+
+
+#
+# console
+# Add "console" to rootlogger above if you want to use this 
+#
+
+log4j.appender.console=org.apache.log4j.ConsoleAppender
+log4j.appender.console.target=System.err
+log4j.appender.console.layout=org.apache.log4j.PatternLayout
+log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n
+
+#
+# TaskLog Appender
+#
+
+#Default values
+hadoop.tasklog.taskid=null
+hadoop.tasklog.noKeepSplits=4
+hadoop.tasklog.totalLogFileSize=100
+hadoop.tasklog.purgeLogSplits=true
+hadoop.tasklog.logsRetainHours=12
+
+log4j.appender.TLA=org.apache.hadoop.mapred.TaskLogAppender
+log4j.appender.TLA.taskId=${hadoop.tasklog.taskid}
+log4j.appender.TLA.totalLogFileSize=${hadoop.tasklog.totalLogFileSize}
+
+log4j.appender.TLA.layout=org.apache.log4j.PatternLayout
+log4j.appender.TLA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
+
+#
+# Rolling File Appender
+#
+
+#log4j.appender.RFA=org.apache.log4j.RollingFileAppender
+#log4j.appender.RFA.File=${hadoop.log.dir}/${hadoop.log.file}
+
+# Logfile size and and 30-day backups
+#log4j.appender.RFA.MaxFileSize=1MB
+#log4j.appender.RFA.MaxBackupIndex=30
+
+#log4j.appender.RFA.layout=org.apache.log4j.PatternLayout
+#log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} - %m%n
+#log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n
+
+#
+# FSNamesystem Audit logging
+# All audit events are logged at INFO level
+#
+log4j.logger.org.apache.hadoop.fs.FSNamesystem.audit=WARN
+
+# Custom Logging levels
+
+#log4j.logger.org.apache.hadoop.mapred.JobTracker=DEBUG
+#log4j.logger.org.apache.hadoop.mapred.TaskTracker=DEBUG
+#log4j.logger.org.apache.hadoop.fs.FSNamesystem=DEBUG
+
+# Jets3t library
+log4j.logger.org.jets3t.service.impl.rest.httpclient.RestS3Service=ERROR
+
+#
+# Event Counter Appender
+# Sends counts of logging messages at different severity levels to Hadoop Metrics.
+#
+log4j.appender.EventCounter=org.apache.hadoop.metrics.jvm.EventCounter
diff --git a/pregelix/pregelix-core/src/test/resources/logging.properties b/pregelix/pregelix-core/src/test/resources/logging.properties
new file mode 100644
index 0000000..f43eb05
--- /dev/null
+++ b/pregelix/pregelix-core/src/test/resources/logging.properties
@@ -0,0 +1,66 @@
+############################################################
+#  	Default Logging Configuration File
+#
+# You can use a different file by specifying a filename
+# with the java.util.logging.config.file system property.  
+# For example java -Djava.util.logging.config.file=myfile
+############################################################
+
+############################################################
+#  	Global properties
+############################################################
+
+# "handlers" specifies a comma separated list of log Handler 
+# classes.  These handlers will be installed during VM startup.
+# Note that these classes must be on the system classpath.
+# By default we only configure a ConsoleHandler, which will only
+# show messages at the INFO and above levels.
+
+handlers= java.util.logging.ConsoleHandler
+
+# To also add the FileHandler, use the following line instead.
+
+# handlers= java.util.logging.FileHandler, java.util.logging.ConsoleHandler
+
+# Default global logging level.
+# This specifies which kinds of events are logged across
+# all loggers.  For any given facility this global level
+# can be overriden by a facility specific level
+# Note that the ConsoleHandler also has a separate level
+# setting to limit messages printed to the console.
+
+.level= WARNING
+# .level= INFO
+# .level= FINE
+# .level = FINEST
+
+############################################################
+# Handler specific properties.
+# Describes specific configuration info for Handlers.
+############################################################
+
+# default file output is in user's home directory.
+
+# java.util.logging.FileHandler.pattern = %h/java%u.log
+# java.util.logging.FileHandler.limit = 50000
+# java.util.logging.FileHandler.count = 1
+# java.util.logging.FileHandler.formatter = java.util.logging.XMLFormatter
+
+# Limit the message that are printed on the console to FINE and above.
+
+java.util.logging.ConsoleHandler.level = WARNING
+java.util.logging.ConsoleHandler.formatter = java.util.logging.SimpleFormatter
+
+
+############################################################
+# Facility specific properties.
+# Provides extra control for each logger.
+############################################################
+
+# For example, set the com.xyz.foo logger to only log SEVERE
+# messages:
+
+#edu.uci.ics.asterix.level = FINE
+#edu.uci.ics.algebricks.level = FINE
+edu.uci.ics.hyracks.level = INFO
+#edu.uci.ics.hyracks.control.nc.net.level = FINE
\ No newline at end of file
diff --git a/pregelix/pregelix-dataflow-std-base/pom.xml b/pregelix/pregelix-dataflow-std-base/pom.xml
new file mode 100644
index 0000000..d12f4be
--- /dev/null
+++ b/pregelix/pregelix-dataflow-std-base/pom.xml
@@ -0,0 +1,87 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+	<modelVersion>4.0.0</modelVersion>
+	<artifactId>pregelix-dataflow-std-base</artifactId>
+	<packaging>jar</packaging>
+	<name>pregelix-dataflow-std-base</name>
+
+	<parent>
+    		<groupId>edu.uci.ics.hyracks</groupId>
+    		<artifactId>pregelix</artifactId>
+    		<version>0.2.3-SNAPSHOT</version>
+  	</parent>
+
+
+	<properties>
+		<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+	</properties>
+
+	<build>
+		<plugins>
+			<plugin>
+				<groupId>org.apache.maven.plugins</groupId>
+				<artifactId>maven-compiler-plugin</artifactId>
+				<version>2.0.2</version>
+				<configuration>
+					<source>1.7</source>
+					<target>1.7</target>
+				</configuration>
+			</plugin>
+			<plugin>
+				<groupId>org.apache.maven.plugins</groupId>
+				<artifactId>maven-surefire-plugin</artifactId>
+				<version>2.7.2</version>
+				<configuration>
+					<forkMode>pertest</forkMode>
+					<argLine>-enableassertions -Xmx512m -Dfile.encoding=UTF-8
+						-Djava.util.logging.config.file=src/test/resources/logging.properties</argLine>
+					<includes>
+						<include>**/*TestSuite.java</include>
+						<include>**/*Test.java</include>
+					</includes>
+				</configuration>
+			</plugin>
+			<plugin>
+				<artifactId>maven-clean-plugin</artifactId>
+				<version>2.5</version>
+				<configuration>
+					<filesets>
+						<fileset>
+							<directory>.</directory>
+							<includes>
+								<include>teststore*</include>
+								<include>edu*</include>
+								<include>actual*</include>
+								<include>build*</include>
+								<include>expect*</include>
+								<include>ClusterController*</include>
+							</includes>
+						</fileset>
+					</filesets>
+				</configuration>
+			</plugin>
+		</plugins>
+	</build>
+
+	<dependencies>
+		<dependency>
+			<groupId>junit</groupId>
+			<artifactId>junit</artifactId>
+			<version>4.8.1</version>
+			<scope>test</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>pregelix-api</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>hyracks-api</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+	</dependencies>
+</project>
diff --git a/pregelix/pregelix-dataflow-std-base/src/main/java/edu/uci/ics/pregelix/dataflow/std/base/IAggregateFunction.java b/pregelix/pregelix-dataflow-std-base/src/main/java/edu/uci/ics/pregelix/dataflow/std/base/IAggregateFunction.java
new file mode 100644
index 0000000..c05e9b9
--- /dev/null
+++ b/pregelix/pregelix-dataflow-std-base/src/main/java/edu/uci/ics/pregelix/dataflow/std/base/IAggregateFunction.java
@@ -0,0 +1,27 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.pregelix.dataflow.std.base;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+public interface IAggregateFunction {
+    /** should be called each time a new aggregate value is computed */
+    public void init() throws HyracksDataException;
+
+    public void step(IFrameTupleReference tuple) throws HyracksDataException;
+
+    public void finish() throws HyracksDataException;
+}
diff --git a/pregelix/pregelix-dataflow-std-base/src/main/java/edu/uci/ics/pregelix/dataflow/std/base/IAggregateFunctionFactory.java b/pregelix/pregelix-dataflow-std-base/src/main/java/edu/uci/ics/pregelix/dataflow/std/base/IAggregateFunctionFactory.java
new file mode 100644
index 0000000..4be0bed
--- /dev/null
+++ b/pregelix/pregelix-dataflow-std-base/src/main/java/edu/uci/ics/pregelix/dataflow/std/base/IAggregateFunctionFactory.java
@@ -0,0 +1,24 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.pregelix.dataflow.std.base;
+
+import java.io.Serializable;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksException;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+
+public interface IAggregateFunctionFactory extends Serializable {
+    public IAggregateFunction createAggregateFunction(IDataOutputProvider provider) throws HyracksException;
+}
\ No newline at end of file
diff --git a/pregelix/pregelix-dataflow-std-base/src/main/java/edu/uci/ics/pregelix/dataflow/std/base/IFunction.java b/pregelix/pregelix-dataflow-std-base/src/main/java/edu/uci/ics/pregelix/dataflow/std/base/IFunction.java
new file mode 100644
index 0000000..23cb9c5
--- /dev/null
+++ b/pregelix/pregelix-dataflow-std-base/src/main/java/edu/uci/ics/pregelix/dataflow/std/base/IFunction.java
@@ -0,0 +1,30 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.pregelix.dataflow.std.base;
+
+import edu.uci.ics.hyracks.api.comm.IFrameWriter;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+
+public interface IFunction {
+
+    public void open(IHyracksTaskContext ctx, RecordDescriptor rd, IFrameWriter... writer) throws HyracksDataException;
+
+    public void process(Object[] tuple) throws HyracksDataException;
+
+    public void close() throws HyracksDataException;
+
+}
diff --git a/pregelix/pregelix-dataflow-std-base/src/main/java/edu/uci/ics/pregelix/dataflow/std/base/IFunctionFactory.java b/pregelix/pregelix-dataflow-std-base/src/main/java/edu/uci/ics/pregelix/dataflow/std/base/IFunctionFactory.java
new file mode 100644
index 0000000..d0e70e5
--- /dev/null
+++ b/pregelix/pregelix-dataflow-std-base/src/main/java/edu/uci/ics/pregelix/dataflow/std/base/IFunctionFactory.java
@@ -0,0 +1,23 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.pregelix.dataflow.std.base;
+
+import java.io.Serializable;
+
+public interface IFunctionFactory extends Serializable {
+
+    public IFunction createFunction();
+
+}
diff --git a/pregelix/pregelix-dataflow-std-base/src/main/java/edu/uci/ics/pregelix/dataflow/std/base/IRecordDescriptorFactory.java b/pregelix/pregelix-dataflow-std-base/src/main/java/edu/uci/ics/pregelix/dataflow/std/base/IRecordDescriptorFactory.java
new file mode 100644
index 0000000..e7de650
--- /dev/null
+++ b/pregelix/pregelix-dataflow-std-base/src/main/java/edu/uci/ics/pregelix/dataflow/std/base/IRecordDescriptorFactory.java
@@ -0,0 +1,26 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.pregelix.dataflow.std.base;
+
+import java.io.Serializable;
+
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+
+public interface IRecordDescriptorFactory extends Serializable {
+
+    public RecordDescriptor createRecordDescriptor() throws HyracksDataException;
+
+}
diff --git a/pregelix/pregelix-dataflow-std-base/src/main/java/edu/uci/ics/pregelix/dataflow/std/base/IRuntimeHook.java b/pregelix/pregelix-dataflow-std-base/src/main/java/edu/uci/ics/pregelix/dataflow/std/base/IRuntimeHook.java
new file mode 100644
index 0000000..f46166f
--- /dev/null
+++ b/pregelix/pregelix-dataflow-std-base/src/main/java/edu/uci/ics/pregelix/dataflow/std/base/IRuntimeHook.java
@@ -0,0 +1,24 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.pregelix.dataflow.std.base;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+
+public interface IRuntimeHook {
+
+    public void configure(IHyracksTaskContext ctx) throws HyracksDataException;
+
+}
diff --git a/pregelix/pregelix-dataflow-std-base/src/main/java/edu/uci/ics/pregelix/dataflow/std/base/IRuntimeHookFactory.java b/pregelix/pregelix-dataflow-std-base/src/main/java/edu/uci/ics/pregelix/dataflow/std/base/IRuntimeHookFactory.java
new file mode 100644
index 0000000..009804c
--- /dev/null
+++ b/pregelix/pregelix-dataflow-std-base/src/main/java/edu/uci/ics/pregelix/dataflow/std/base/IRuntimeHookFactory.java
@@ -0,0 +1,23 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.pregelix.dataflow.std.base;
+
+import java.io.Serializable;
+
+public interface IRuntimeHookFactory extends Serializable {
+
+    public IRuntimeHook createRuntimeHook();
+
+}
diff --git a/pregelix/pregelix-dataflow-std-base/src/main/java/edu/uci/ics/pregelix/dataflow/std/base/IUpdateFunction.java b/pregelix/pregelix-dataflow-std-base/src/main/java/edu/uci/ics/pregelix/dataflow/std/base/IUpdateFunction.java
new file mode 100644
index 0000000..a0d365f
--- /dev/null
+++ b/pregelix/pregelix-dataflow-std-base/src/main/java/edu/uci/ics/pregelix/dataflow/std/base/IUpdateFunction.java
@@ -0,0 +1,34 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.pregelix.dataflow.std.base;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+
+public interface IUpdateFunction extends IFunction {
+
+	/**
+	 * update the tuple pointed by tupleRef called after process,
+	 * one-input-tuple-at-a-time
+	 * 
+	 * @param tupleRef
+	 * @throws HyracksDataException
+	 */
+	public void update(ITupleReference tupleRef, ArrayTupleBuilder cloneUpdateTb)
+			throws HyracksDataException;
+
+}
diff --git a/pregelix/pregelix-dataflow-std-base/src/main/java/edu/uci/ics/pregelix/dataflow/std/base/IUpdateFunctionFactory.java b/pregelix/pregelix-dataflow-std-base/src/main/java/edu/uci/ics/pregelix/dataflow/std/base/IUpdateFunctionFactory.java
new file mode 100644
index 0000000..b974c76
--- /dev/null
+++ b/pregelix/pregelix-dataflow-std-base/src/main/java/edu/uci/ics/pregelix/dataflow/std/base/IUpdateFunctionFactory.java
@@ -0,0 +1,23 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.pregelix.dataflow.std.base;
+
+import java.io.Serializable;
+
+public interface IUpdateFunctionFactory extends Serializable {
+
+    public IUpdateFunction createFunction();
+
+}
diff --git a/pregelix/pregelix-dataflow-std/pom.xml b/pregelix/pregelix-dataflow-std/pom.xml
new file mode 100644
index 0000000..f4dadc6
--- /dev/null
+++ b/pregelix/pregelix-dataflow-std/pom.xml
@@ -0,0 +1,151 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+	xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+	<modelVersion>4.0.0</modelVersion>
+	<artifactId>pregelix-dataflow-std</artifactId>
+	<packaging>jar</packaging>
+	<name>pregelix-dataflow-std</name>
+
+	<parent>
+		<groupId>edu.uci.ics.hyracks</groupId>
+		<artifactId>pregelix</artifactId>
+		<version>0.2.3-SNAPSHOT</version>
+	</parent>
+
+
+	<properties>
+		<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+	</properties>
+
+	<build>
+		<plugins>
+			<plugin>
+				<groupId>org.apache.maven.plugins</groupId>
+				<artifactId>maven-compiler-plugin</artifactId>
+				<version>2.0.2</version>
+				<configuration>
+					<source>1.7</source>
+					<target>1.7</target>
+				</configuration>
+			</plugin>
+			<plugin>
+				<groupId>org.apache.maven.plugins</groupId>
+				<artifactId>maven-surefire-plugin</artifactId>
+				<version>2.7.2</version>
+				<configuration>
+					<forkMode>pertest</forkMode>
+					<argLine>-enableassertions -Xmx512m -Dfile.encoding=UTF-8
+						-Djava.util.logging.config.file=src/test/resources/logging.properties</argLine>
+					<includes>
+						<include>**/*TestSuite.java</include>
+						<include>**/*Test.java</include>
+					</includes>
+				</configuration>
+			</plugin>
+			<plugin>
+				<artifactId>maven-clean-plugin</artifactId>
+				<version>2.5</version>
+				<configuration>
+					<filesets>
+						<fileset>
+							<directory>.</directory>
+							<includes>
+								<include>teststore*</include>
+								<include>edu*</include>
+								<include>actual*</include>
+								<include>build*</include>
+								<include>expect*</include>
+								<include>ClusterController*</include>
+							</includes>
+						</fileset>
+					</filesets>
+				</configuration>
+			</plugin>
+		</plugins>
+	</build>
+
+	<dependencies>
+		<dependency>
+			<groupId>junit</groupId>
+			<artifactId>junit</artifactId>
+			<version>4.8.1</version>
+			<scope>test</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>pregelix-dataflow-std-base</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>hyracks-dataflow-std</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>hyracks-api</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>hyracks-dataflow-common</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>hyracks-data-std</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>hyracks-hdfs-core</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>hyracks-storage-am-common</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>hyracks-storage-am-btree</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>hyracks-control-cc</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>hyracks-control-nc</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>hyracks-ipc</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+	</dependencies>
+</project>
diff --git a/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/BTreeSearchFunctionUpdateOperatorDescriptor.java b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/BTreeSearchFunctionUpdateOperatorDescriptor.java
new file mode 100644
index 0000000..99e55f1
--- /dev/null
+++ b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/BTreeSearchFunctionUpdateOperatorDescriptor.java
@@ -0,0 +1,89 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.pregelix.dataflow.std;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.AbstractTreeIndexOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndex;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexRegistryProvider;
+import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackProvider;
+import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
+import edu.uci.ics.pregelix.dataflow.std.base.IRecordDescriptorFactory;
+import edu.uci.ics.pregelix.dataflow.std.base.IRuntimeHookFactory;
+import edu.uci.ics.pregelix.dataflow.std.base.IUpdateFunctionFactory;
+
+public class BTreeSearchFunctionUpdateOperatorDescriptor extends AbstractTreeIndexOperatorDescriptor {
+
+    private static final long serialVersionUID = 1L;
+
+    protected boolean isForward;
+    protected int[] lowKeyFields; // fields in input tuple to be used as low
+                                  // keys
+    protected int[] highKeyFields; // fields in input tuple to be used as high
+    // keys
+    protected boolean lowKeyInclusive;
+    protected boolean highKeyInclusive;
+
+    private final IUpdateFunctionFactory functionFactory;
+    private final IRuntimeHookFactory preHookFactory;
+    private final IRuntimeHookFactory postHookFactory;
+    private final IRecordDescriptorFactory inputRdFactory;
+
+    private final int outputArity;
+
+    public BTreeSearchFunctionUpdateOperatorDescriptor(JobSpecification spec, RecordDescriptor recDesc,
+            IStorageManagerInterface storageManager, IIndexRegistryProvider<IIndex> indexRegistryProvider,
+            IFileSplitProvider fileSplitProvider, ITypeTraits[] typeTraits,
+            IBinaryComparatorFactory[] comparatorFactories, boolean isForward, int[] lowKeyFields, int[] highKeyFields,
+            boolean lowKeyInclusive, boolean highKeyInclusive, IIndexDataflowHelperFactory dataflowHelperFactory,
+            IRecordDescriptorFactory inputRdFactory, int outputArity, IUpdateFunctionFactory functionFactory,
+            IRuntimeHookFactory preHookFactory, IRuntimeHookFactory postHookFactory, RecordDescriptor... rDescs) {
+        super(spec, 1, outputArity, recDesc, storageManager, indexRegistryProvider, fileSplitProvider, typeTraits,
+                comparatorFactories, dataflowHelperFactory, null, false, NoOpOperationCallbackProvider.INSTANCE);
+        this.isForward = isForward;
+        this.lowKeyFields = lowKeyFields;
+        this.highKeyFields = highKeyFields;
+        this.lowKeyInclusive = lowKeyInclusive;
+        this.highKeyInclusive = highKeyInclusive;
+
+        this.functionFactory = functionFactory;
+        this.preHookFactory = preHookFactory;
+        this.postHookFactory = postHookFactory;
+        this.inputRdFactory = inputRdFactory;
+
+        for (int i = 0; i < rDescs.length; i++) {
+            this.recordDescriptors[i] = rDescs[i];
+        }
+
+        this.outputArity = outputArity;
+    }
+
+    @Override
+    public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx,
+            IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) {
+        return new BTreeSearchFunctionUpdateOperatorNodePushable(this, ctx, partition, recordDescProvider, isForward,
+                lowKeyFields, highKeyFields, lowKeyInclusive, highKeyInclusive, functionFactory, preHookFactory,
+                postHookFactory, inputRdFactory, outputArity);
+    }
+}
\ No newline at end of file
diff --git a/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/BTreeSearchFunctionUpdateOperatorNodePushable.java b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/BTreeSearchFunctionUpdateOperatorNodePushable.java
new file mode 100644
index 0000000..3938613
--- /dev/null
+++ b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/BTreeSearchFunctionUpdateOperatorNodePushable.java
@@ -0,0 +1,221 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.pregelix.dataflow.std;
+
+import java.io.DataOutput;
+import java.nio.ByteBuffer;
+
+import edu.uci.ics.hyracks.api.comm.IFrameWriter;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryInputOperatorNodePushable;
+import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeRangeSearchCursor;
+import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
+import edu.uci.ics.hyracks.storage.am.btree.util.BTreeUtils;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexAccessor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.AbstractTreeIndexOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.PermutingFrameTupleReference;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexDataflowHelper;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.pregelix.dataflow.std.base.IRecordDescriptorFactory;
+import edu.uci.ics.pregelix.dataflow.std.base.IRuntimeHookFactory;
+import edu.uci.ics.pregelix.dataflow.std.base.IUpdateFunctionFactory;
+import edu.uci.ics.pregelix.dataflow.util.FunctionProxy;
+import edu.uci.ics.pregelix.dataflow.util.UpdateBuffer;
+
+public class BTreeSearchFunctionUpdateOperatorNodePushable extends AbstractUnaryInputOperatorNodePushable {
+    protected TreeIndexDataflowHelper treeIndexHelper;
+    protected FrameTupleAccessor accessor;
+
+    protected ByteBuffer writeBuffer;
+    protected FrameTupleAppender appender;
+    protected ArrayTupleBuilder tb;
+    protected DataOutput dos;
+
+    protected BTree btree;
+    protected boolean isForward;
+    protected PermutingFrameTupleReference lowKey;
+    protected PermutingFrameTupleReference highKey;
+    protected boolean lowKeyInclusive;
+    protected boolean highKeyInclusive;
+    protected RangePredicate rangePred;
+    protected MultiComparator lowKeySearchCmp;
+    protected MultiComparator highKeySearchCmp;
+    protected ITreeIndexCursor cursor;
+    protected ITreeIndexFrame cursorFrame;
+    protected ITreeIndexAccessor indexAccessor;
+
+    protected RecordDescriptor recDesc;
+
+    private final IFrameWriter[] writers;
+    private final FunctionProxy functionProxy;
+    private ArrayTupleBuilder cloneUpdateTb;
+    private final UpdateBuffer updateBuffer;
+
+    public BTreeSearchFunctionUpdateOperatorNodePushable(AbstractTreeIndexOperatorDescriptor opDesc,
+            IHyracksTaskContext ctx, int partition, IRecordDescriptorProvider recordDescProvider, boolean isForward,
+            int[] lowKeyFields, int[] highKeyFields, boolean lowKeyInclusive, boolean highKeyInclusive,
+            IUpdateFunctionFactory functionFactory, IRuntimeHookFactory preHookFactory,
+            IRuntimeHookFactory postHookFactory, IRecordDescriptorFactory inputRdFactory, int outputArity) {
+        treeIndexHelper = (TreeIndexDataflowHelper) opDesc.getIndexDataflowHelperFactory().createIndexDataflowHelper(
+                opDesc, ctx, partition);
+        this.isForward = isForward;
+        this.lowKeyInclusive = lowKeyInclusive;
+        this.highKeyInclusive = highKeyInclusive;
+        this.recDesc = recordDescProvider.getInputRecordDescriptor(opDesc.getActivityId(), 0);
+        if (lowKeyFields != null && lowKeyFields.length > 0) {
+            lowKey = new PermutingFrameTupleReference();
+            lowKey.setFieldPermutation(lowKeyFields);
+        }
+        if (highKeyFields != null && highKeyFields.length > 0) {
+            highKey = new PermutingFrameTupleReference();
+            highKey.setFieldPermutation(highKeyFields);
+        }
+
+        this.writers = new IFrameWriter[outputArity];
+        this.functionProxy = new FunctionProxy(ctx, functionFactory, preHookFactory, postHookFactory, inputRdFactory,
+                writers);
+        this.updateBuffer = new UpdateBuffer(ctx, 2);
+    }
+
+    @Override
+    public void open() throws HyracksDataException {
+        /**
+         * open the function
+         */
+        functionProxy.functionOpen();
+        accessor = new FrameTupleAccessor(treeIndexHelper.getHyracksTaskContext().getFrameSize(), recDesc);
+
+        try {
+            treeIndexHelper.init(false);
+            btree = (BTree) treeIndexHelper.getIndex();
+            cursorFrame = btree.getLeafFrameFactory().createFrame();
+            setCursor();
+
+            // Construct range predicate.
+            lowKeySearchCmp = BTreeUtils.getSearchMultiComparator(btree.getComparatorFactories(), lowKey);
+            highKeySearchCmp = BTreeUtils.getSearchMultiComparator(btree.getComparatorFactories(), highKey);
+            rangePred = new RangePredicate(null, null, lowKeyInclusive, highKeyInclusive, lowKeySearchCmp,
+                    highKeySearchCmp);
+
+            writeBuffer = treeIndexHelper.getHyracksTaskContext().allocateFrame();
+            tb = new ArrayTupleBuilder(btree.getFieldCount());
+            dos = tb.getDataOutput();
+            appender = new FrameTupleAppender(treeIndexHelper.getHyracksTaskContext().getFrameSize());
+            appender.reset(writeBuffer, true);
+            indexAccessor = btree.createAccessor();
+
+            cloneUpdateTb = new ArrayTupleBuilder(btree.getFieldCount());
+            updateBuffer.setFieldCount(btree.getFieldCount());
+        } catch (Exception e) {
+            treeIndexHelper.deinit();
+            throw new HyracksDataException(e);
+        }
+    }
+
+    protected void setCursor() {
+        cursor = new BTreeRangeSearchCursor((IBTreeLeafFrame) cursorFrame, false);
+    }
+
+    protected void writeSearchResults() throws Exception {
+        while (cursor.hasNext()) {
+            cursor.next();
+            ITupleReference tuple = cursor.getTuple();
+            functionProxy.functionCall(tuple, cloneUpdateTb);
+
+            //doing clone update
+            if (cloneUpdateTb.getSize() > 0) {
+                if (!updateBuffer.appendTuple(cloneUpdateTb)) {
+                    //release the cursor/latch
+                    cursor.close();
+                    //batch update
+                    updateBuffer.updateBTree(indexAccessor);
+
+                    //search again
+                    cursor.reset();
+                    rangePred.setLowKey(tuple, true);
+                    rangePred.setHighKey(highKey, highKeyInclusive);
+                    indexAccessor.search(cursor, rangePred);
+                }
+            }
+            cloneUpdateTb.reset();
+        }
+    }
+
+    @Override
+    public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
+        accessor.reset(buffer);
+        int tupleCount = accessor.getTupleCount();
+        try {
+            for (int i = 0; i < tupleCount; i++) {
+                if (lowKey != null) {
+                    lowKey.reset(accessor, i);
+                }
+                if (highKey != null) {
+                    highKey.reset(accessor, i);
+                }
+                rangePred.setLowKey(lowKey, lowKeyInclusive);
+                rangePred.setHighKey(highKey, highKeyInclusive);
+                cursor.reset();
+                indexAccessor.search(cursor, rangePred);
+                writeSearchResults();
+            }
+        } catch (Exception e) {
+            throw new HyracksDataException(e);
+        }
+    }
+
+    @Override
+    public void close() throws HyracksDataException {
+        try {
+            try {
+                cursor.close();
+                //batch update
+                updateBuffer.updateBTree(indexAccessor);
+            } catch (Exception e) {
+                throw new HyracksDataException(e);
+            }
+
+            /**
+             * close the update function
+             */
+            functionProxy.functionClose();
+        } finally {
+            treeIndexHelper.deinit();
+        }
+    }
+
+    @Override
+    public void fail() throws HyracksDataException {
+        for (IFrameWriter writer : writers)
+            writer.fail();
+    }
+
+    @Override
+    public void setOutputFrameWriter(int index, IFrameWriter writer, RecordDescriptor recordDesc) {
+        writers[index] = writer;
+    }
+
+}
\ No newline at end of file
diff --git a/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/FunctionCallOperatorDescriptor.java b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/FunctionCallOperatorDescriptor.java
new file mode 100644
index 0000000..4cbd6c4
--- /dev/null
+++ b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/FunctionCallOperatorDescriptor.java
@@ -0,0 +1,114 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.pregelix.dataflow.std;
+
+import java.nio.ByteBuffer;
+
+import edu.uci.ics.hyracks.api.comm.IFrameWriter;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
+import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameDeserializer;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryInputOperatorNodePushable;
+import edu.uci.ics.pregelix.dataflow.std.base.IFunction;
+import edu.uci.ics.pregelix.dataflow.std.base.IFunctionFactory;
+import edu.uci.ics.pregelix.dataflow.std.base.IRecordDescriptorFactory;
+import edu.uci.ics.pregelix.dataflow.std.base.IRuntimeHookFactory;
+
+public class FunctionCallOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor {
+    private static final long serialVersionUID = 1L;
+    private final IFunctionFactory functionFactory;
+    private final IRuntimeHookFactory preHookFactory;
+    private final IRuntimeHookFactory postHookFactory;
+    private final IRecordDescriptorFactory inputRdFactory;
+
+    public FunctionCallOperatorDescriptor(JobSpecification spec, IRecordDescriptorFactory inputRdFactory,
+            int outputArity, IFunctionFactory functionFactory, IRuntimeHookFactory preHookFactory,
+            IRuntimeHookFactory postHookFactory, RecordDescriptor... rDescs) {
+        super(spec, 1, outputArity);
+        this.functionFactory = functionFactory;
+        this.preHookFactory = preHookFactory;
+        this.postHookFactory = postHookFactory;
+        this.inputRdFactory = inputRdFactory;
+
+        for (int i = 0; i < rDescs.length; i++) {
+            this.recordDescriptors[i] = rDescs[i];
+        }
+    }
+
+    @Override
+    public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx,
+            final IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions)
+            throws HyracksDataException {
+        return new AbstractUnaryInputOperatorNodePushable() {
+            private RecordDescriptor rd0;
+            private FrameDeserializer frameDeserializer;
+            private final IFrameWriter[] writers = new IFrameWriter[outputArity];
+            private final IFunction function = functionFactory.createFunction();
+            private ClassLoader ctxCL = Thread.currentThread().getContextClassLoader();
+
+            @Override
+            public void open() throws HyracksDataException {
+                rd0 = inputRdFactory == null ? recordDescProvider.getInputRecordDescriptor(getActivityId(), 0)
+                        : inputRdFactory.createRecordDescriptor();
+                frameDeserializer = new FrameDeserializer(ctx.getFrameSize(), rd0);
+                ctxCL = Thread.currentThread().getContextClassLoader();
+                Thread.currentThread().setContextClassLoader(this.getClass().getClassLoader());
+                for (IFrameWriter writer : writers) {
+                    writer.open();
+                }
+                if (preHookFactory != null)
+                    preHookFactory.createRuntimeHook().configure(ctx);
+                function.open(ctx, rd0, writers);
+            }
+
+            @Override
+            public void nextFrame(ByteBuffer frame) throws HyracksDataException {
+                frameDeserializer.reset(frame);
+                while (!frameDeserializer.done()) {
+                    Object[] tuple = frameDeserializer.deserializeRecord();
+                    function.process(tuple);
+                }
+            }
+
+            @Override
+            public void close() throws HyracksDataException {
+                if (postHookFactory != null)
+                    postHookFactory.createRuntimeHook().configure(ctx);
+                function.close();
+                for (IFrameWriter writer : writers) {
+                    writer.close();
+                }
+                Thread.currentThread().setContextClassLoader(ctxCL);
+            }
+
+            @Override
+            public void fail() throws HyracksDataException {
+                for (IFrameWriter writer : writers) {
+                    writer.fail();
+                }
+            }
+
+            @Override
+            public void setOutputFrameWriter(int index, IFrameWriter writer, RecordDescriptor recordDesc) {
+                writers[index] = writer;
+            }
+        };
+    }
+}
diff --git a/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/IndexNestedLoopJoinFunctionUpdateOperatorDescriptor.java b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/IndexNestedLoopJoinFunctionUpdateOperatorDescriptor.java
new file mode 100644
index 0000000..60559e8
--- /dev/null
+++ b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/IndexNestedLoopJoinFunctionUpdateOperatorDescriptor.java
@@ -0,0 +1,173 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.pregelix.dataflow.std;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.INullWriter;
+import edu.uci.ics.hyracks.api.dataflow.value.INullWriterFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.AbstractTreeIndexOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndex;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexRegistryProvider;
+import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackProvider;
+import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
+import edu.uci.ics.pregelix.dataflow.std.base.IRecordDescriptorFactory;
+import edu.uci.ics.pregelix.dataflow.std.base.IRuntimeHookFactory;
+import edu.uci.ics.pregelix.dataflow.std.base.IUpdateFunctionFactory;
+
+public class IndexNestedLoopJoinFunctionUpdateOperatorDescriptor extends AbstractTreeIndexOperatorDescriptor {
+    private static final long serialVersionUID = 1L;
+
+    private boolean isForward;
+    private int[] lowKeyFields; // fields in input tuple to be used as low keys
+    private int[] highKeyFields; // fields in input tuple to be used as high
+    // keys
+    private boolean lowKeyInclusive;
+    private boolean highKeyInclusive;
+
+    // right outer join
+    private boolean isRightOuter = false;
+    private INullWriterFactory[] nullWriterFactories;
+
+    // set union
+    private boolean isSetUnion = false;
+
+    private final IUpdateFunctionFactory functionFactory;
+    private final IRuntimeHookFactory preHookFactory;
+    private final IRuntimeHookFactory postHookFactory;
+    private final IRecordDescriptorFactory inputRdFactory;
+
+    private final int outputArity;
+
+    public IndexNestedLoopJoinFunctionUpdateOperatorDescriptor(JobSpecification spec,
+            IStorageManagerInterface storageManager, IIndexRegistryProvider<IIndex> treeIndexRegistryProvider,
+            IFileSplitProvider fileSplitProvider, ITypeTraits[] typeTraits,
+            IBinaryComparatorFactory[] comparatorFactories, boolean isForward, int[] lowKeyFields, int[] highKeyFields,
+            boolean lowKeyInclusive, boolean highKeyInclusive, IIndexDataflowHelperFactory opHelperFactory,
+            IRecordDescriptorFactory inputRdFactory, int outputArity, IUpdateFunctionFactory functionFactory,
+            IRuntimeHookFactory preHookFactory, IRuntimeHookFactory postHookFactory, RecordDescriptor... rDescs) {
+        super(spec, 1, outputArity, rDescs[0], storageManager, treeIndexRegistryProvider, fileSplitProvider,
+                typeTraits, comparatorFactories, opHelperFactory, null, false, NoOpOperationCallbackProvider.INSTANCE);
+        this.isForward = isForward;
+        this.lowKeyFields = lowKeyFields;
+        this.highKeyFields = highKeyFields;
+        this.lowKeyInclusive = lowKeyInclusive;
+        this.highKeyInclusive = highKeyInclusive;
+
+        this.functionFactory = functionFactory;
+        this.preHookFactory = preHookFactory;
+        this.postHookFactory = postHookFactory;
+        this.inputRdFactory = inputRdFactory;
+
+        for (int i = 0; i < rDescs.length; i++) {
+            this.recordDescriptors[i] = rDescs[i];
+        }
+
+        this.outputArity = outputArity;
+    }
+
+    public IndexNestedLoopJoinFunctionUpdateOperatorDescriptor(JobSpecification spec,
+            IStorageManagerInterface storageManager, IIndexRegistryProvider<IIndex> treeIndexRegistryProvider,
+            IFileSplitProvider fileSplitProvider, ITreeIndexFrameFactory interiorFrameFactory,
+            ITreeIndexFrameFactory leafFrameFactory, ITypeTraits[] typeTraits,
+            IBinaryComparatorFactory[] comparatorFactories, boolean isForward, int[] lowKeyFields, int[] highKeyFields,
+            boolean lowKeyInclusive, boolean highKeyInclusive, IIndexDataflowHelperFactory opHelperFactory,
+            boolean isRightOuter, INullWriterFactory[] nullWriterFactories, IRecordDescriptorFactory inputRdFactory,
+            int outputArity, IUpdateFunctionFactory functionFactory, IRuntimeHookFactory preHookFactory,
+            IRuntimeHookFactory postHookFactory, RecordDescriptor... rDescs) {
+        super(spec, 1, outputArity, rDescs[0], storageManager, treeIndexRegistryProvider, fileSplitProvider,
+                typeTraits, comparatorFactories, opHelperFactory, null, false, NoOpOperationCallbackProvider.INSTANCE);
+        this.isForward = isForward;
+        this.lowKeyFields = lowKeyFields;
+        this.highKeyFields = highKeyFields;
+        this.lowKeyInclusive = lowKeyInclusive;
+        this.highKeyInclusive = highKeyInclusive;
+
+        this.isRightOuter = isRightOuter;
+        this.nullWriterFactories = nullWriterFactories;
+
+        this.functionFactory = functionFactory;
+        this.preHookFactory = preHookFactory;
+        this.postHookFactory = postHookFactory;
+        this.inputRdFactory = inputRdFactory;
+
+        for (int i = 0; i < rDescs.length; i++) {
+            this.recordDescriptors[i] = rDescs[i];
+        }
+
+        this.outputArity = outputArity;
+    }
+
+    public IndexNestedLoopJoinFunctionUpdateOperatorDescriptor(JobSpecification spec,
+            IStorageManagerInterface storageManager, IIndexRegistryProvider<IIndex> treeIndexRegistryProvider,
+            IFileSplitProvider fileSplitProvider, ITreeIndexFrameFactory interiorFrameFactory,
+            ITreeIndexFrameFactory leafFrameFactory, ITypeTraits[] typeTraits,
+            IBinaryComparatorFactory[] comparatorFactories, boolean isForward, int[] lowKeyFields, int[] highKeyFields,
+            boolean lowKeyInclusive, boolean highKeyInclusive, IIndexDataflowHelperFactory opHelperFactory,
+            boolean isSetUnion, IRecordDescriptorFactory inputRdFactory, int outputArity,
+            IUpdateFunctionFactory functionFactory, IRuntimeHookFactory preHookFactory,
+            IRuntimeHookFactory postHookFactory, RecordDescriptor... rDescs) {
+        super(spec, 1, outputArity, rDescs[0], storageManager, treeIndexRegistryProvider, fileSplitProvider,
+                typeTraits, comparatorFactories, opHelperFactory, null, false, NoOpOperationCallbackProvider.INSTANCE);
+        this.isForward = isForward;
+        this.lowKeyFields = lowKeyFields;
+        this.highKeyFields = highKeyFields;
+        this.lowKeyInclusive = lowKeyInclusive;
+        this.highKeyInclusive = highKeyInclusive;
+
+        this.isSetUnion = isSetUnion;
+
+        this.functionFactory = functionFactory;
+        this.preHookFactory = preHookFactory;
+        this.postHookFactory = postHookFactory;
+        this.inputRdFactory = inputRdFactory;
+
+        for (int i = 0; i < rDescs.length; i++) {
+            this.recordDescriptors[i] = rDescs[i];
+        }
+
+        this.outputArity = outputArity;
+    }
+
+    @Override
+    public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx,
+            IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) {
+        if (isRightOuter) {
+            INullWriter[] nullWriters = new INullWriter[nullWriterFactories.length];
+            for (int i = 0; i < nullWriters.length; i++)
+                nullWriters[i] = nullWriterFactories[i].createNullWriter();
+            return new IndexNestedLoopRightOuterJoinFunctionUpdateOperatorNodePushable(this, ctx, partition,
+                    recordDescProvider, isForward, lowKeyFields, highKeyFields, nullWriters, functionFactory,
+                    preHookFactory, postHookFactory, inputRdFactory, outputArity);
+        } else if (isSetUnion) {
+            return new IndexNestedLoopSetUnionFunctionUpdateOperatorNodePushable(this, ctx, partition,
+                    recordDescProvider, isForward, lowKeyFields, highKeyFields, functionFactory, preHookFactory,
+                    postHookFactory, inputRdFactory, outputArity);
+        } else {
+            return new IndexNestedLoopJoinFunctionUpdateOperatorNodePushable(this, ctx, partition, recordDescProvider,
+                    isForward, lowKeyFields, highKeyFields, lowKeyInclusive, highKeyInclusive, functionFactory,
+                    preHookFactory, postHookFactory, inputRdFactory, outputArity);
+        }
+    }
+}
diff --git a/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/IndexNestedLoopJoinFunctionUpdateOperatorNodePushable.java b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/IndexNestedLoopJoinFunctionUpdateOperatorNodePushable.java
new file mode 100644
index 0000000..37029f3
--- /dev/null
+++ b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/IndexNestedLoopJoinFunctionUpdateOperatorNodePushable.java
@@ -0,0 +1,238 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.pregelix.dataflow.std;
+
+import java.nio.ByteBuffer;
+
+import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
+import edu.uci.ics.hyracks.api.comm.IFrameWriter;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
+import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryInputOperatorNodePushable;
+import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeRangeSearchCursor;
+import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexAccessor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.AbstractTreeIndexOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.PermutingFrameTupleReference;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexDataflowHelper;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.pregelix.dataflow.std.base.IRecordDescriptorFactory;
+import edu.uci.ics.pregelix.dataflow.std.base.IRuntimeHookFactory;
+import edu.uci.ics.pregelix.dataflow.std.base.IUpdateFunctionFactory;
+import edu.uci.ics.pregelix.dataflow.util.FunctionProxy;
+import edu.uci.ics.pregelix.dataflow.util.UpdateBuffer;
+
+public class IndexNestedLoopJoinFunctionUpdateOperatorNodePushable extends AbstractUnaryInputOperatorNodePushable {
+    private TreeIndexDataflowHelper treeIndexOpHelper;
+    private FrameTupleAccessor accessor;
+
+    private ByteBuffer writeBuffer;
+    private FrameTupleAppender appender;
+    private BTree btree;
+    private PermutingFrameTupleReference lowKey;
+    private PermutingFrameTupleReference highKey;
+    private boolean lowKeyInclusive;
+    private boolean highKeyInclusive;
+    private RangePredicate rangePred;
+    private MultiComparator lowKeySearchCmp;
+    private MultiComparator highKeySearchCmp;
+    private ITreeIndexCursor cursor;
+    private ITreeIndexFrame cursorFrame;
+    protected ITreeIndexAccessor indexAccessor;
+
+    private RecordDescriptor recDesc;
+    private final IFrameWriter[] writers;
+    private final FunctionProxy functionProxy;
+    private ArrayTupleBuilder cloneUpdateTb;
+    private final UpdateBuffer updateBuffer;
+
+    public IndexNestedLoopJoinFunctionUpdateOperatorNodePushable(AbstractTreeIndexOperatorDescriptor opDesc,
+            IHyracksTaskContext ctx, int partition, IRecordDescriptorProvider recordDescProvider, boolean isForward,
+            int[] lowKeyFields, int[] highKeyFields, boolean lowKeyInclusive, boolean highKeyInclusive,
+            IUpdateFunctionFactory functionFactory, IRuntimeHookFactory preHookFactory,
+            IRuntimeHookFactory postHookFactory, IRecordDescriptorFactory inputRdFactory, int outputArity) {
+        treeIndexOpHelper = (TreeIndexDataflowHelper) opDesc.getIndexDataflowHelperFactory().createIndexDataflowHelper(
+                opDesc, ctx, partition);
+        this.lowKeyInclusive = lowKeyInclusive;
+        this.highKeyInclusive = highKeyInclusive;
+        this.recDesc = recordDescProvider.getInputRecordDescriptor(opDesc.getActivityId(), 0);
+        if (lowKeyFields != null && lowKeyFields.length > 0) {
+            lowKey = new PermutingFrameTupleReference();
+            lowKey.setFieldPermutation(lowKeyFields);
+        }
+        if (highKeyFields != null && highKeyFields.length > 0) {
+            highKey = new PermutingFrameTupleReference();
+            highKey.setFieldPermutation(highKeyFields);
+        }
+
+        this.writers = new IFrameWriter[outputArity];
+        this.functionProxy = new FunctionProxy(ctx, functionFactory, preHookFactory, postHookFactory, inputRdFactory,
+                writers);
+        this.updateBuffer = new UpdateBuffer(ctx, 2);
+    }
+
+    protected void setCursor() {
+        cursor = new BTreeRangeSearchCursor((IBTreeLeafFrame) cursorFrame, true);
+    }
+
+    @Override
+    public void open() throws HyracksDataException {
+        /**
+         * open the function
+         */
+        functionProxy.functionOpen();
+        accessor = new FrameTupleAccessor(treeIndexOpHelper.getHyracksTaskContext().getFrameSize(), recDesc);
+
+        try {
+            treeIndexOpHelper.init(false);
+            btree = (BTree) treeIndexOpHelper.getIndex();
+            btree.open(treeIndexOpHelper.getIndexFileId());
+            cursorFrame = btree.getLeafFrameFactory().createFrame();
+            setCursor();
+
+            // TODO: Can we construct the multicmps using helper methods?
+            int lowKeySearchFields = btree.getComparatorFactories().length;
+            int highKeySearchFields = btree.getComparatorFactories().length;
+            if (lowKey != null)
+                lowKeySearchFields = lowKey.getFieldCount();
+            if (highKey != null)
+                highKeySearchFields = highKey.getFieldCount();
+
+            IBinaryComparator[] lowKeySearchComparators = new IBinaryComparator[lowKeySearchFields];
+            for (int i = 0; i < lowKeySearchFields; i++) {
+                lowKeySearchComparators[i] = btree.getComparatorFactories()[i].createBinaryComparator();
+            }
+            lowKeySearchCmp = new MultiComparator(lowKeySearchComparators);
+
+            if (lowKeySearchFields == highKeySearchFields) {
+                highKeySearchCmp = lowKeySearchCmp;
+            } else {
+                IBinaryComparator[] highKeySearchComparators = new IBinaryComparator[highKeySearchFields];
+                for (int i = 0; i < highKeySearchFields; i++) {
+                    highKeySearchComparators[i] = btree.getComparatorFactories()[i].createBinaryComparator();
+                }
+                highKeySearchCmp = new MultiComparator(highKeySearchComparators);
+
+            }
+
+            rangePred = new RangePredicate(null, null, lowKeyInclusive, highKeyInclusive, lowKeySearchCmp,
+                    highKeySearchCmp);
+            writeBuffer = treeIndexOpHelper.getHyracksTaskContext().allocateFrame();
+            appender = new FrameTupleAppender(treeIndexOpHelper.getHyracksTaskContext().getFrameSize());
+            appender.reset(writeBuffer, true);
+
+            indexAccessor = btree.createAccessor();
+            cloneUpdateTb = new ArrayTupleBuilder(btree.getFieldCount());
+            updateBuffer.setFieldCount(btree.getFieldCount());
+        } catch (Exception e) {
+            treeIndexOpHelper.deinit();
+            throw new HyracksDataException(e);
+        }
+    }
+
+    private void writeSearchResults(IFrameTupleAccessor leftAccessor, int tIndex) throws Exception {
+        while (cursor.hasNext()) {
+            cursor.next();
+            ITupleReference tupleRef = cursor.getTuple();
+
+            /**
+             * call the update function
+             */
+            functionProxy.functionCall(leftAccessor, tIndex, tupleRef, cloneUpdateTb);
+
+            if (cloneUpdateTb.getSize() > 0) {
+                if (!updateBuffer.appendTuple(cloneUpdateTb)) {
+                    //release the cursor/latch
+                    cursor.close();
+                    //batch update
+                    updateBuffer.updateBTree(indexAccessor);
+
+                    //search again
+                    cursor.reset();
+                    rangePred.setLowKey(tupleRef, true);
+                    rangePred.setHighKey(highKey, highKeyInclusive);
+                    indexAccessor.search(cursor, rangePred);
+                }
+            }
+            cloneUpdateTb.reset();
+        }
+    }
+
+    @Override
+    public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
+        accessor.reset(buffer);
+
+        int tupleCount = accessor.getTupleCount();
+        try {
+            for (int i = 0; i < tupleCount; i++) {
+                if (lowKey != null)
+                    lowKey.reset(accessor, i);
+                if (highKey != null)
+                    highKey.reset(accessor, i);
+                rangePred.setLowKey(lowKey, lowKeyInclusive);
+                rangePred.setHighKey(highKey, highKeyInclusive);
+
+                cursor.reset();
+                indexAccessor.search(cursor, rangePred);
+                writeSearchResults(accessor, i);
+            }
+        } catch (Exception e) {
+            throw new HyracksDataException(e);
+        }
+    }
+
+    @Override
+    public void close() throws HyracksDataException {
+        try {
+            try {
+                cursor.close();
+                //batch update
+                updateBuffer.updateBTree(indexAccessor);
+            } catch (Exception e) {
+                throw new HyracksDataException(e);
+            }
+
+            /**
+             * close the update function
+             */
+            functionProxy.functionClose();
+        } finally {
+            treeIndexOpHelper.deinit();
+        }
+    }
+
+    @Override
+    public void fail() throws HyracksDataException {
+        for (IFrameWriter writer : writers)
+            writer.fail();
+    }
+
+    @Override
+    public void setOutputFrameWriter(int index, IFrameWriter writer, RecordDescriptor recordDesc) {
+        writers[index] = writer;
+    }
+
+}
\ No newline at end of file
diff --git a/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/IndexNestedLoopJoinOperatorDescriptor.java b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/IndexNestedLoopJoinOperatorDescriptor.java
new file mode 100644
index 0000000..ed177e3
--- /dev/null
+++ b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/IndexNestedLoopJoinOperatorDescriptor.java
@@ -0,0 +1,121 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.pregelix.dataflow.std;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.INullWriter;
+import edu.uci.ics.hyracks.api.dataflow.value.INullWriterFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.AbstractTreeIndexOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndex;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexRegistryProvider;
+import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackProvider;
+import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
+
+public class IndexNestedLoopJoinOperatorDescriptor extends AbstractTreeIndexOperatorDescriptor {
+
+    private static final long serialVersionUID = 1L;
+
+    private boolean isForward;
+    private int[] lowKeyFields; // fields in input tuple to be used as low keys
+    private int[] highKeyFields; // fields in input tuple to be used as high
+    // keys
+    private boolean lowKeyInclusive;
+    private boolean highKeyInclusive;
+
+    // right outer join
+    private boolean isRightOuter = false;
+    private INullWriterFactory[] nullWriterFactories;
+
+    // set union
+    private boolean isSetUnion = false;
+
+    public IndexNestedLoopJoinOperatorDescriptor(JobSpecification spec, RecordDescriptor recDesc,
+            IStorageManagerInterface storageManager, IIndexRegistryProvider<IIndex> treeIndexRegistryProvider,
+            IFileSplitProvider fileSplitProvider, ITypeTraits[] typeTraits,
+            IBinaryComparatorFactory[] comparatorFactories, boolean isForward, int[] lowKeyFields, int[] highKeyFields,
+            boolean lowKeyInclusive, boolean highKeyInclusive, IIndexDataflowHelperFactory opHelperFactory) {
+        super(spec, 1, 1, recDesc, storageManager, treeIndexRegistryProvider, fileSplitProvider, typeTraits,
+                comparatorFactories, opHelperFactory, null, false, NoOpOperationCallbackProvider.INSTANCE);
+        this.isForward = isForward;
+        this.lowKeyFields = lowKeyFields;
+        this.highKeyFields = highKeyFields;
+        this.lowKeyInclusive = lowKeyInclusive;
+        this.highKeyInclusive = highKeyInclusive;
+    }
+
+    public IndexNestedLoopJoinOperatorDescriptor(JobSpecification spec, RecordDescriptor recDesc,
+            IStorageManagerInterface storageManager, IIndexRegistryProvider<IIndex> treeIndexRegistryProvider,
+            IFileSplitProvider fileSplitProvider, ITreeIndexFrameFactory interiorFrameFactory,
+            ITreeIndexFrameFactory leafFrameFactory, ITypeTraits[] typeTraits,
+            IBinaryComparatorFactory[] comparatorFactories, boolean isForward, int[] lowKeyFields, int[] highKeyFields,
+            boolean lowKeyInclusive, boolean highKeyInclusive, IIndexDataflowHelperFactory opHelperFactory,
+            boolean isRightOuter, INullWriterFactory[] nullWriterFactories) {
+        super(spec, 1, 1, recDesc, storageManager, treeIndexRegistryProvider, fileSplitProvider, typeTraits,
+                comparatorFactories, opHelperFactory, null, false, NoOpOperationCallbackProvider.INSTANCE);
+        this.isForward = isForward;
+        this.lowKeyFields = lowKeyFields;
+        this.highKeyFields = highKeyFields;
+        this.lowKeyInclusive = lowKeyInclusive;
+        this.highKeyInclusive = highKeyInclusive;
+
+        this.isRightOuter = isRightOuter;
+        this.nullWriterFactories = nullWriterFactories;
+    }
+
+    public IndexNestedLoopJoinOperatorDescriptor(JobSpecification spec, RecordDescriptor recDesc,
+            IStorageManagerInterface storageManager, IIndexRegistryProvider<IIndex> treeIndexRegistryProvider,
+            IFileSplitProvider fileSplitProvider, ITreeIndexFrameFactory interiorFrameFactory,
+            ITreeIndexFrameFactory leafFrameFactory, ITypeTraits[] typeTraits,
+            IBinaryComparatorFactory[] comparatorFactories, boolean isForward, int[] lowKeyFields, int[] highKeyFields,
+            boolean lowKeyInclusive, boolean highKeyInclusive, IIndexDataflowHelperFactory opHelperFactory,
+            boolean isSetUnion) {
+        super(spec, 1, 1, recDesc, storageManager, treeIndexRegistryProvider, fileSplitProvider, typeTraits,
+                comparatorFactories, opHelperFactory, null, false, NoOpOperationCallbackProvider.INSTANCE);
+        this.isForward = isForward;
+        this.lowKeyFields = lowKeyFields;
+        this.highKeyFields = highKeyFields;
+        this.lowKeyInclusive = lowKeyInclusive;
+        this.highKeyInclusive = highKeyInclusive;
+
+        this.isSetUnion = isSetUnion;
+    }
+
+    @Override
+    public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx,
+            IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) {
+        if (isRightOuter) {
+            INullWriter[] nullWriters = new INullWriter[nullWriterFactories.length];
+            for (int i = 0; i < nullWriters.length; i++)
+                nullWriters[i] = nullWriterFactories[i].createNullWriter();
+            return new IndexNestedLoopRightOuterJoinOperatorNodePushable(this, ctx, partition, recordDescProvider,
+                    isForward, lowKeyFields, highKeyFields, nullWriters);
+        } else if (isSetUnion) {
+            return new IndexNestedLoopSetUnionOperatorNodePushable(this, ctx, partition, recordDescProvider, isForward,
+                    lowKeyFields, highKeyFields);
+        } else {
+            return new IndexNestedLoopJoinOperatorNodePushable(this, ctx, partition, recordDescProvider, isForward,
+                    lowKeyFields, highKeyFields, lowKeyInclusive, highKeyInclusive);
+        }
+    }
+}
\ No newline at end of file
diff --git a/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/IndexNestedLoopJoinOperatorNodePushable.java b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/IndexNestedLoopJoinOperatorNodePushable.java
new file mode 100644
index 0000000..bd076d3
--- /dev/null
+++ b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/IndexNestedLoopJoinOperatorNodePushable.java
@@ -0,0 +1,207 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.pregelix.dataflow.std;
+
+import java.io.DataOutput;
+import java.nio.ByteBuffer;
+
+import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
+import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
+import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryInputUnaryOutputOperatorNodePushable;
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
+import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
+import edu.uci.ics.hyracks.storage.am.common.api.IIndexCursor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexAccessor;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.AbstractTreeIndexOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.PermutingFrameTupleReference;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexDataflowHelper;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+
+public class IndexNestedLoopJoinOperatorNodePushable extends AbstractUnaryInputUnaryOutputOperatorNodePushable {
+    private TreeIndexDataflowHelper treeIndexOpHelper;
+    private FrameTupleAccessor accessor;
+
+    private ByteBuffer writeBuffer;
+    private FrameTupleAppender appender;
+    private ArrayTupleBuilder tb;
+    private DataOutput dos;
+
+    private BTree btree;
+    private PermutingFrameTupleReference lowKey;
+    private PermutingFrameTupleReference highKey;
+    private boolean lowKeyInclusive;
+    private boolean highKeyInclusive;
+    private RangePredicate rangePred;
+    private MultiComparator lowKeySearchCmp;
+    private MultiComparator highKeySearchCmp;
+    private IIndexCursor cursor;
+    protected ITreeIndexAccessor indexAccessor;
+
+    private RecordDescriptor recDesc;
+    private final RecordDescriptor inputRecDesc;
+
+    public IndexNestedLoopJoinOperatorNodePushable(AbstractTreeIndexOperatorDescriptor opDesc, IHyracksTaskContext ctx,
+            int partition, IRecordDescriptorProvider recordDescProvider, boolean isForward, int[] lowKeyFields,
+            int[] highKeyFields, boolean lowKeyInclusive, boolean highKeyInclusive) {
+        inputRecDesc = recordDescProvider.getInputRecordDescriptor(opDesc.getActivityId(), 0);
+        treeIndexOpHelper = (TreeIndexDataflowHelper) opDesc.getIndexDataflowHelperFactory().createIndexDataflowHelper(
+                opDesc, ctx, partition);
+        this.lowKeyInclusive = lowKeyInclusive;
+        this.highKeyInclusive = highKeyInclusive;
+        this.recDesc = recordDescProvider.getInputRecordDescriptor(opDesc.getActivityId(), 0);
+        if (lowKeyFields != null && lowKeyFields.length > 0) {
+            lowKey = new PermutingFrameTupleReference();
+            lowKey.setFieldPermutation(lowKeyFields);
+        }
+        if (highKeyFields != null && highKeyFields.length > 0) {
+            highKey = new PermutingFrameTupleReference();
+            highKey.setFieldPermutation(highKeyFields);
+        }
+    }
+
+    protected void setCursor() {
+        cursor = indexAccessor.createSearchCursor();
+    }
+
+    @Override
+    public void open() throws HyracksDataException {
+        accessor = new FrameTupleAccessor(treeIndexOpHelper.getHyracksTaskContext().getFrameSize(), recDesc);
+
+        try {
+            treeIndexOpHelper.init(false);
+            btree = (BTree) treeIndexOpHelper.getIndex();
+            writer.open();
+
+            int lowKeySearchFields = btree.getComparatorFactories().length;
+            int highKeySearchFields = btree.getComparatorFactories().length;
+            if (lowKey != null)
+                lowKeySearchFields = lowKey.getFieldCount();
+            if (highKey != null)
+                highKeySearchFields = highKey.getFieldCount();
+
+            IBinaryComparator[] lowKeySearchComparators = new IBinaryComparator[lowKeySearchFields];
+            for (int i = 0; i < lowKeySearchFields; i++) {
+                lowKeySearchComparators[i] = btree.getComparatorFactories()[i].createBinaryComparator();
+            }
+            lowKeySearchCmp = new MultiComparator(lowKeySearchComparators);
+
+            if (lowKeySearchFields == highKeySearchFields) {
+                highKeySearchCmp = lowKeySearchCmp;
+            } else {
+                IBinaryComparator[] highKeySearchComparators = new IBinaryComparator[highKeySearchFields];
+                for (int i = 0; i < highKeySearchFields; i++) {
+                    highKeySearchComparators[i] = btree.getComparatorFactories()[i].createBinaryComparator();
+                }
+                highKeySearchCmp = new MultiComparator(highKeySearchComparators);
+            }
+
+            rangePred = new RangePredicate(null, null, lowKeyInclusive, highKeyInclusive, lowKeySearchCmp,
+                    highKeySearchCmp);
+            writeBuffer = treeIndexOpHelper.getHyracksTaskContext().allocateFrame();
+            tb = new ArrayTupleBuilder(inputRecDesc.getFields().length + btree.getFieldCount());
+            dos = tb.getDataOutput();
+            appender = new FrameTupleAppender(treeIndexOpHelper.getHyracksTaskContext().getFrameSize());
+            appender.reset(writeBuffer, true);
+            indexAccessor = btree.createAccessor();
+            setCursor();
+        } catch (Exception e) {
+            treeIndexOpHelper.deinit();
+            throw new HyracksDataException(e);
+        }
+    }
+
+    private void writeSearchResults(IFrameTupleAccessor leftAccessor, int tIndex) throws Exception {
+        while (cursor.hasNext()) {
+            tb.reset();
+            cursor.next();
+
+            ITupleReference frameTuple = cursor.getTuple();
+            for (int i = 0; i < inputRecDesc.getFields().length; i++) {
+                int tupleStart = leftAccessor.getTupleStartOffset(tIndex);
+                int fieldStart = leftAccessor.getFieldStartOffset(tIndex, i);
+                int offset = leftAccessor.getFieldSlotsLength() + tupleStart + fieldStart;
+                int len = leftAccessor.getFieldEndOffset(tIndex, i) - fieldStart;
+                dos.write(leftAccessor.getBuffer().array(), offset, len);
+                tb.addFieldEndOffset();
+            }
+            for (int i = 0; i < frameTuple.getFieldCount(); i++) {
+                dos.write(frameTuple.getFieldData(i), frameTuple.getFieldStart(i), frameTuple.getFieldLength(i));
+                tb.addFieldEndOffset();
+            }
+
+            if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
+                FrameUtils.flushFrame(writeBuffer, writer);
+                appender.reset(writeBuffer, true);
+                if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
+                    throw new IllegalStateException();
+                }
+            }
+        }
+    }
+
+    @Override
+    public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
+        accessor.reset(buffer);
+
+        int tupleCount = accessor.getTupleCount();
+        try {
+            for (int i = 0; i < tupleCount; i++) {
+                if (lowKey != null)
+                    lowKey.reset(accessor, i);
+                if (highKey != null)
+                    highKey.reset(accessor, i);
+                rangePred.setLowKey(lowKey, lowKeyInclusive);
+                rangePred.setHighKey(highKey, highKeyInclusive);
+
+                cursor.reset();
+                indexAccessor.search(cursor, rangePred);
+                writeSearchResults(accessor, i);
+            }
+        } catch (Exception e) {
+            throw new HyracksDataException(e);
+        }
+    }
+
+    @Override
+    public void close() throws HyracksDataException {
+        try {
+            if (appender.getTupleCount() > 0) {
+                FrameUtils.flushFrame(writeBuffer, writer);
+            }
+            writer.close();
+            try {
+                cursor.close();
+            } catch (Exception e) {
+                throw new HyracksDataException(e);
+            }
+        } finally {
+            treeIndexOpHelper.deinit();
+        }
+    }
+
+    @Override
+    public void fail() throws HyracksDataException {
+        writer.fail();
+    }
+}
\ No newline at end of file
diff --git a/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/IndexNestedLoopRightOuterJoinFunctionUpdateOperatorNodePushable.java b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/IndexNestedLoopRightOuterJoinFunctionUpdateOperatorNodePushable.java
new file mode 100644
index 0000000..f7b3d62
--- /dev/null
+++ b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/IndexNestedLoopRightOuterJoinFunctionUpdateOperatorNodePushable.java
@@ -0,0 +1,320 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.pregelix.dataflow.std;
+
+import java.io.DataOutput;
+import java.nio.ByteBuffer;
+
+import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
+import edu.uci.ics.hyracks.api.comm.IFrameWriter;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
+import edu.uci.ics.hyracks.api.dataflow.value.INullWriter;
+import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryInputOperatorNodePushable;
+import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeRangeSearchCursor;
+import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexAccessor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.AbstractTreeIndexOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.PermutingFrameTupleReference;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexDataflowHelper;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.pregelix.dataflow.std.base.IRecordDescriptorFactory;
+import edu.uci.ics.pregelix.dataflow.std.base.IRuntimeHookFactory;
+import edu.uci.ics.pregelix.dataflow.std.base.IUpdateFunctionFactory;
+import edu.uci.ics.pregelix.dataflow.util.FunctionProxy;
+import edu.uci.ics.pregelix.dataflow.util.UpdateBuffer;
+
+public class IndexNestedLoopRightOuterJoinFunctionUpdateOperatorNodePushable extends
+        AbstractUnaryInputOperatorNodePushable {
+    private TreeIndexDataflowHelper treeIndexOpHelper;
+    private FrameTupleAccessor accessor;
+
+    private ByteBuffer writeBuffer;
+    private FrameTupleAppender appender;
+    private ArrayTupleBuilder nullTupleBuilder;
+    private DataOutput dos;
+
+    private BTree btree;
+    private RangePredicate rangePred;
+    private MultiComparator lowKeySearchCmp;
+    private MultiComparator highKeySearchCmp;
+    private ITreeIndexCursor cursor;
+    private ITreeIndexFrame cursorFrame;
+    protected ITreeIndexAccessor indexAccessor;
+
+    private RecordDescriptor recDesc;
+    private final RecordDescriptor inputRecDesc;
+
+    private PermutingFrameTupleReference lowKey;
+    private PermutingFrameTupleReference highKey;
+
+    private INullWriter[] nullWriter;
+    private ITupleReference currentTopTuple;
+    private boolean match;
+
+    private final IFrameWriter[] writers;
+    private final FunctionProxy functionProxy;
+    private ArrayTupleBuilder cloneUpdateTb;
+    private UpdateBuffer updateBuffer;
+
+    public IndexNestedLoopRightOuterJoinFunctionUpdateOperatorNodePushable(AbstractTreeIndexOperatorDescriptor opDesc,
+            IHyracksTaskContext ctx, int partition, IRecordDescriptorProvider recordDescProvider, boolean isForward,
+            int[] lowKeyFields, int[] highKeyFields, INullWriter[] nullWriter, IUpdateFunctionFactory functionFactory,
+            IRuntimeHookFactory preHookFactory, IRuntimeHookFactory postHookFactory,
+            IRecordDescriptorFactory inputRdFactory, int outputArity) {
+        inputRecDesc = recordDescProvider.getInputRecordDescriptor(opDesc.getActivityId(), 0);
+        treeIndexOpHelper = (TreeIndexDataflowHelper) opDesc.getIndexDataflowHelperFactory().createIndexDataflowHelper(
+                opDesc, ctx, partition);
+        this.recDesc = recordDescProvider.getInputRecordDescriptor(opDesc.getActivityId(), 0);
+
+        if (lowKeyFields != null && lowKeyFields.length > 0) {
+            lowKey = new PermutingFrameTupleReference();
+            lowKey.setFieldPermutation(lowKeyFields);
+        }
+        if (highKeyFields != null && highKeyFields.length > 0) {
+            highKey = new PermutingFrameTupleReference();
+            highKey.setFieldPermutation(highKeyFields);
+        }
+        this.nullWriter = nullWriter;
+
+        this.writers = new IFrameWriter[outputArity];
+        this.functionProxy = new FunctionProxy(ctx, functionFactory, preHookFactory, postHookFactory, inputRdFactory,
+                writers);
+        this.updateBuffer = new UpdateBuffer(ctx, 2);
+    }
+
+    protected void setCursor() {
+        cursor = new BTreeRangeSearchCursor((IBTreeLeafFrame) cursorFrame, true);
+    }
+
+    @Override
+    public void open() throws HyracksDataException {
+        /**
+         * function open
+         */
+        functionProxy.functionOpen();
+        accessor = new FrameTupleAccessor(treeIndexOpHelper.getHyracksTaskContext().getFrameSize(), recDesc);
+
+        try {
+            treeIndexOpHelper.init(false);
+            btree = (BTree) treeIndexOpHelper.getIndex();
+            cursorFrame = btree.getLeafFrameFactory().createFrame();
+            setCursor();
+
+            // construct range predicate
+            // TODO: Can we construct the multicmps using helper methods?
+            int lowKeySearchFields = btree.getComparatorFactories().length;
+            int highKeySearchFields = btree.getComparatorFactories().length;
+
+            IBinaryComparator[] lowKeySearchComparators = new IBinaryComparator[lowKeySearchFields];
+            for (int i = 0; i < lowKeySearchFields; i++) {
+                lowKeySearchComparators[i] = btree.getComparatorFactories()[i].createBinaryComparator();
+            }
+            lowKeySearchCmp = new MultiComparator(lowKeySearchComparators);
+
+            if (lowKeySearchFields == highKeySearchFields) {
+                highKeySearchCmp = lowKeySearchCmp;
+            } else {
+                IBinaryComparator[] highKeySearchComparators = new IBinaryComparator[highKeySearchFields];
+                for (int i = 0; i < highKeySearchFields; i++) {
+                    highKeySearchComparators[i] = btree.getComparatorFactories()[i].createBinaryComparator();
+                }
+                highKeySearchCmp = new MultiComparator(highKeySearchComparators);
+            }
+
+            rangePred = new RangePredicate(null, null, true, true, lowKeySearchCmp, highKeySearchCmp);
+
+            writeBuffer = treeIndexOpHelper.getHyracksTaskContext().allocateFrame();
+
+            nullTupleBuilder = new ArrayTupleBuilder(inputRecDesc.getFields().length);
+            dos = nullTupleBuilder.getDataOutput();
+            nullTupleBuilder.reset();
+            for (int i = 0; i < inputRecDesc.getFields().length; i++) {
+                nullWriter[i].writeNull(dos);
+                nullTupleBuilder.addFieldEndOffset();
+            }
+
+            appender = new FrameTupleAppender(treeIndexOpHelper.getHyracksTaskContext().getFrameSize());
+            appender.reset(writeBuffer, true);
+
+            indexAccessor = btree.createAccessor();
+
+            /** set the search cursor */
+            rangePred.setLowKey(null, true);
+            rangePred.setHighKey(null, true);
+            cursor.reset();
+            indexAccessor.search(cursor, rangePred);
+
+            /** set up current top tuple */
+            if (cursor.hasNext()) {
+                cursor.next();
+                currentTopTuple = cursor.getTuple();
+                match = false;
+            }
+
+            cloneUpdateTb = new ArrayTupleBuilder(btree.getFieldCount());
+            updateBuffer.setFieldCount(btree.getFieldCount());
+        } catch (Exception e) {
+            treeIndexOpHelper.deinit();
+            throw new HyracksDataException(e);
+        }
+    }
+
+    //for the join match casesos
+    private void writeResults(IFrameTupleAccessor leftAccessor, int tIndex, ITupleReference frameTuple)
+            throws Exception {
+        /**
+         * function call
+         */
+        functionProxy.functionCall(leftAccessor, tIndex, frameTuple, cloneUpdateTb);
+
+        //doing clone update
+        if (cloneUpdateTb.getSize() > 0) {
+            if (!updateBuffer.appendTuple(cloneUpdateTb)) {
+                //release the cursor/latch
+                cursor.close();
+                //batch update
+                updateBuffer.updateBTree(indexAccessor);
+
+                //search again and recover the cursor
+                cursor.reset();
+                rangePred.setLowKey(frameTuple, true);
+                rangePred.setHighKey(null, true);
+                indexAccessor.search(cursor, rangePred);
+            }
+            cloneUpdateTb.reset();
+        }
+    }
+
+    @Override
+    public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
+        accessor.reset(buffer);
+        int tupleCount = accessor.getTupleCount();
+        try {
+            for (int i = 0; i < tupleCount && currentTopTuple != null;) {
+                if (lowKey != null)
+                    lowKey.reset(accessor, i);
+                if (highKey != null)
+                    highKey.reset(accessor, i);
+                // TODO: currently use low key only, check what they mean
+                int cmp = compare(lowKey, currentTopTuple);
+                if (cmp <= 0) {
+                    if (cmp == 0)
+                        outputMatch(i);
+                    i++;
+                } else {
+                    moveTreeCursor();
+                }
+            }
+        } catch (Exception e) {
+            throw new HyracksDataException(e);
+        }
+    }
+
+    private void outputMatch(int i) throws Exception {
+        writeResults(accessor, i, currentTopTuple);
+        match = true;
+    }
+
+    private void moveTreeCursor() throws Exception {
+        if (!match) {
+            writeResults(currentTopTuple);
+        }
+        if (cursor.hasNext()) {
+            cursor.next();
+            currentTopTuple = cursor.getTuple();
+            match = false;
+        } else {
+            currentTopTuple = null;
+        }
+    }
+
+    @Override
+    public void close() throws HyracksDataException {
+        try {
+            while (currentTopTuple != null) {
+                moveTreeCursor();
+            }
+            try {
+                cursor.close();
+                //batch update
+                updateBuffer.updateBTree(indexAccessor);
+            } catch (Exception e) {
+                throw new HyracksDataException(e);
+            }
+
+            /**
+             * function close
+             */
+            functionProxy.functionClose();
+        } catch (Exception e) {
+            throw new HyracksDataException(e);
+        } finally {
+            treeIndexOpHelper.deinit();
+        }
+    }
+
+    @Override
+    public void fail() throws HyracksDataException {
+        for (IFrameWriter writer : writers)
+            writer.fail();
+    }
+
+    /** compare tuples */
+    private int compare(ITupleReference left, ITupleReference right) throws Exception {
+        return lowKeySearchCmp.compare(left, right);
+    }
+
+    /** write result for outer case */
+    private void writeResults(ITupleReference frameTuple) throws Exception {
+        /**
+         * function call
+         */
+        functionProxy.functionCall(nullTupleBuilder, frameTuple, cloneUpdateTb);
+
+        //doing clone update
+        if (cloneUpdateTb.getSize() > 0) {
+            if (!updateBuffer.appendTuple(cloneUpdateTb)) {
+                //release the cursor/latch
+                cursor.close();
+                //batch update
+                updateBuffer.updateBTree(indexAccessor);
+
+                //search again and recover the cursor
+                cursor.reset();
+                rangePred.setLowKey(frameTuple, true);
+                rangePred.setHighKey(null, true);
+                indexAccessor.search(cursor, rangePred);
+            }
+            cloneUpdateTb.reset();
+        }
+    }
+
+    @Override
+    public void setOutputFrameWriter(int index, IFrameWriter writer, RecordDescriptor recordDesc) {
+        writers[index] = writer;
+    }
+}
\ No newline at end of file
diff --git a/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/IndexNestedLoopRightOuterJoinOperatorNodePushable.java b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/IndexNestedLoopRightOuterJoinOperatorNodePushable.java
new file mode 100644
index 0000000..9f1e1ad
--- /dev/null
+++ b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/IndexNestedLoopRightOuterJoinOperatorNodePushable.java
@@ -0,0 +1,280 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.pregelix.dataflow.std;
+
+import java.io.DataOutput;
+import java.nio.ByteBuffer;
+
+import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
+import edu.uci.ics.hyracks.api.dataflow.value.INullWriter;
+import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
+import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryInputUnaryOutputOperatorNodePushable;
+import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeRangeSearchCursor;
+import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexAccessor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.AbstractTreeIndexOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.PermutingFrameTupleReference;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexDataflowHelper;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+
+public class IndexNestedLoopRightOuterJoinOperatorNodePushable extends
+        AbstractUnaryInputUnaryOutputOperatorNodePushable {
+    private TreeIndexDataflowHelper treeIndexOpHelper;
+    private FrameTupleAccessor accessor;
+
+    private ByteBuffer writeBuffer;
+    private FrameTupleAppender appender;
+    private ArrayTupleBuilder tb;
+    private DataOutput dos;
+
+    private BTree btree;
+    private boolean isForward;
+    private RangePredicate rangePred;
+    private MultiComparator lowKeySearchCmp;
+    private MultiComparator highKeySearchCmp;
+    private ITreeIndexCursor cursor;
+    private ITreeIndexFrame cursorFrame;
+    protected ITreeIndexAccessor indexAccessor;
+
+    private RecordDescriptor recDesc;
+    private final RecordDescriptor inputRecDesc;
+
+    private PermutingFrameTupleReference lowKey;
+    private PermutingFrameTupleReference highKey;
+
+    private INullWriter[] nullWriter;
+    private ITupleReference currentTopTuple;
+    private boolean match;
+
+    public IndexNestedLoopRightOuterJoinOperatorNodePushable(AbstractTreeIndexOperatorDescriptor opDesc,
+            IHyracksTaskContext ctx, int partition, IRecordDescriptorProvider recordDescProvider, boolean isForward,
+            int[] lowKeyFields, int[] highKeyFields, INullWriter[] nullWriter) {
+        inputRecDesc = recordDescProvider.getInputRecordDescriptor(opDesc.getActivityId(), 0);
+        treeIndexOpHelper = (TreeIndexDataflowHelper) opDesc.getIndexDataflowHelperFactory().createIndexDataflowHelper(
+                opDesc, ctx, partition);
+        this.isForward = isForward;
+        this.recDesc = recordDescProvider.getInputRecordDescriptor(opDesc.getActivityId(), 0);
+
+        if (lowKeyFields != null && lowKeyFields.length > 0) {
+            lowKey = new PermutingFrameTupleReference();
+            lowKey.setFieldPermutation(lowKeyFields);
+        }
+        if (highKeyFields != null && highKeyFields.length > 0) {
+            highKey = new PermutingFrameTupleReference();
+            highKey.setFieldPermutation(highKeyFields);
+        }
+        this.nullWriter = nullWriter;
+    }
+
+    protected void setCursor() {
+        cursor = new BTreeRangeSearchCursor((IBTreeLeafFrame) cursorFrame, false);
+    }
+
+    @Override
+    public void open() throws HyracksDataException {
+        accessor = new FrameTupleAccessor(treeIndexOpHelper.getHyracksTaskContext().getFrameSize(), recDesc);
+        try {
+            treeIndexOpHelper.init(false);
+            btree = (BTree) treeIndexOpHelper.getIndex();
+            cursorFrame = btree.getLeafFrameFactory().createFrame();
+            setCursor();
+            writer.open();
+
+            // construct range predicate
+            // TODO: Can we construct the multicmps using helper methods?
+            int lowKeySearchFields = btree.getComparatorFactories().length;
+            int highKeySearchFields = btree.getComparatorFactories().length;
+
+            IBinaryComparator[] lowKeySearchComparators = new IBinaryComparator[lowKeySearchFields];
+            for (int i = 0; i < lowKeySearchFields; i++) {
+                lowKeySearchComparators[i] = btree.getComparatorFactories()[i].createBinaryComparator();
+            }
+            lowKeySearchCmp = new MultiComparator(lowKeySearchComparators);
+
+            if (lowKeySearchFields == highKeySearchFields) {
+                highKeySearchCmp = lowKeySearchCmp;
+            } else {
+                IBinaryComparator[] highKeySearchComparators = new IBinaryComparator[highKeySearchFields];
+                for (int i = 0; i < highKeySearchFields; i++) {
+                    highKeySearchComparators[i] = btree.getComparatorFactories()[i].createBinaryComparator();
+                }
+                highKeySearchCmp = new MultiComparator(highKeySearchComparators);
+
+            }
+
+            rangePred = new RangePredicate(null, null, true, true, lowKeySearchCmp, highKeySearchCmp);
+
+            writeBuffer = treeIndexOpHelper.getHyracksTaskContext().allocateFrame();
+            tb = new ArrayTupleBuilder(inputRecDesc.getFields().length + btree.getFieldCount());
+            dos = tb.getDataOutput();
+            appender = new FrameTupleAppender(treeIndexOpHelper.getHyracksTaskContext().getFrameSize());
+            appender.reset(writeBuffer, true);
+
+            indexAccessor = btree.createAccessor();
+
+            /** set the search cursor */
+            rangePred.setLowKey(null, true);
+            rangePred.setHighKey(null, true);
+            cursor.reset();
+            indexAccessor.search(cursor, rangePred);
+
+            /** set up current top tuple */
+            if (cursor.hasNext()) {
+                cursor.next();
+                currentTopTuple = cursor.getTuple();
+                match = false;
+            }
+
+        } catch (Exception e) {
+            treeIndexOpHelper.deinit();
+            throw new HyracksDataException(e);
+        }
+    }
+
+    private void writeResults(IFrameTupleAccessor leftAccessor, int tIndex, ITupleReference frameTuple)
+            throws Exception {
+        tb.reset();
+        for (int i = 0; i < inputRecDesc.getFields().length; i++) {
+            int tupleStart = leftAccessor.getTupleStartOffset(tIndex);
+            int fieldStart = leftAccessor.getFieldStartOffset(tIndex, i);
+            int offset = leftAccessor.getFieldSlotsLength() + tupleStart + fieldStart;
+            int len = leftAccessor.getFieldEndOffset(tIndex, i) - fieldStart;
+            dos.write(leftAccessor.getBuffer().array(), offset, len);
+            tb.addFieldEndOffset();
+        }
+        for (int i = 0; i < frameTuple.getFieldCount(); i++) {
+            dos.write(frameTuple.getFieldData(i), frameTuple.getFieldStart(i), frameTuple.getFieldLength(i));
+            tb.addFieldEndOffset();
+        }
+
+        if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
+            FrameUtils.flushFrame(writeBuffer, writer);
+            appender.reset(writeBuffer, true);
+            if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
+                throw new IllegalStateException();
+            }
+        }
+    }
+
+    @Override
+    public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
+        accessor.reset(buffer);
+        int tupleCount = accessor.getTupleCount();
+        try {
+            for (int i = 0; i < tupleCount && currentTopTuple != null;) {
+                if (lowKey != null)
+                    lowKey.reset(accessor, i);
+                if (highKey != null)
+                    highKey.reset(accessor, i);
+                // TODO: currently use low key only, check what they mean
+                int cmp = compare(lowKey, currentTopTuple);
+                if ((cmp <= 0 && isForward) || (cmp >= 0 && !isForward)) {
+                    if (cmp == 0)
+                        outputMatch(i);
+                    i++;
+                } else {
+                    moveTreeCursor();
+                }
+            }
+        } catch (Exception e) {
+            throw new HyracksDataException(e);
+        }
+    }
+
+    private void outputMatch(int i) throws Exception {
+        writeResults(accessor, i, currentTopTuple);
+        match = true;
+    }
+
+    private void moveTreeCursor() throws Exception {
+        if (!match) {
+            writeResults(currentTopTuple);
+        }
+        if (cursor.hasNext()) {
+            cursor.next();
+            currentTopTuple = cursor.getTuple();
+            match = false;
+        } else {
+            currentTopTuple = null;
+        }
+    }
+
+    @Override
+    public void close() throws HyracksDataException {
+        try {
+            while (currentTopTuple != null) {
+                moveTreeCursor();
+            }
+
+            if (appender.getTupleCount() > 0) {
+                FrameUtils.flushFrame(writeBuffer, writer);
+            }
+            writer.close();
+            try {
+                cursor.close();
+            } catch (Exception e) {
+                throw new HyracksDataException(e);
+            }
+        } catch (Exception e) {
+            throw new HyracksDataException(e);
+        } finally {
+            treeIndexOpHelper.deinit();
+        }
+    }
+
+    @Override
+    public void fail() throws HyracksDataException {
+        writer.fail();
+    }
+
+    /** compare tuples */
+    private int compare(ITupleReference left, ITupleReference right) throws Exception {
+        return lowKeySearchCmp.compare(left, right);
+    }
+
+    /** write result for outer case */
+    private void writeResults(ITupleReference frameTuple) throws Exception {
+        tb.reset();
+        for (int i = 0; i < inputRecDesc.getFields().length; i++) {
+            nullWriter[i].writeNull(dos);
+            tb.addFieldEndOffset();
+        }
+        for (int i = 0; i < frameTuple.getFieldCount(); i++) {
+            dos.write(frameTuple.getFieldData(i), frameTuple.getFieldStart(i), frameTuple.getFieldLength(i));
+            tb.addFieldEndOffset();
+        }
+
+        if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
+            FrameUtils.flushFrame(writeBuffer, writer);
+            appender.reset(writeBuffer, true);
+            if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
+                throw new IllegalStateException();
+            }
+        }
+    }
+}
\ No newline at end of file
diff --git a/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/IndexNestedLoopSetUnionFunctionUpdateOperatorNodePushable.java b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/IndexNestedLoopSetUnionFunctionUpdateOperatorNodePushable.java
new file mode 100644
index 0000000..6af60a8
--- /dev/null
+++ b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/IndexNestedLoopSetUnionFunctionUpdateOperatorNodePushable.java
@@ -0,0 +1,281 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.pregelix.dataflow.std;
+
+import java.nio.ByteBuffer;
+
+import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
+import edu.uci.ics.hyracks.api.comm.IFrameWriter;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
+import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryInputOperatorNodePushable;
+import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeRangeSearchCursor;
+import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexAccessor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.AbstractTreeIndexOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.PermutingFrameTupleReference;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexDataflowHelper;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.pregelix.dataflow.std.base.IRecordDescriptorFactory;
+import edu.uci.ics.pregelix.dataflow.std.base.IRuntimeHookFactory;
+import edu.uci.ics.pregelix.dataflow.std.base.IUpdateFunctionFactory;
+import edu.uci.ics.pregelix.dataflow.util.FunctionProxy;
+import edu.uci.ics.pregelix.dataflow.util.UpdateBuffer;
+
+public class IndexNestedLoopSetUnionFunctionUpdateOperatorNodePushable extends AbstractUnaryInputOperatorNodePushable {
+    private TreeIndexDataflowHelper treeIndexOpHelper;
+    private FrameTupleAccessor accessor;
+
+    private ByteBuffer writeBuffer;
+    private FrameTupleAppender appender;
+
+    private BTree btree;
+    private boolean isForward;
+    private RangePredicate rangePred;
+    private MultiComparator lowKeySearchCmp;
+    private ITreeIndexCursor cursor;
+    private ITreeIndexFrame cursorFrame;
+    protected ITreeIndexAccessor indexAccessor;
+
+    private RecordDescriptor recDesc;
+    private PermutingFrameTupleReference lowKey;
+    private PermutingFrameTupleReference highKey;
+
+    private ITupleReference currentTopTuple;
+    private boolean match;
+
+    private final IFrameWriter[] writers;
+    private final FunctionProxy functionProxy;
+    private ArrayTupleBuilder cloneUpdateTb;
+    private UpdateBuffer updateBuffer;
+
+    public IndexNestedLoopSetUnionFunctionUpdateOperatorNodePushable(AbstractTreeIndexOperatorDescriptor opDesc,
+            IHyracksTaskContext ctx, int partition, IRecordDescriptorProvider recordDescProvider, boolean isForward,
+            int[] lowKeyFields, int[] highKeyFields, IUpdateFunctionFactory functionFactory,
+            IRuntimeHookFactory preHookFactory, IRuntimeHookFactory postHookFactory,
+            IRecordDescriptorFactory inputRdFactory, int outputArity) {
+        treeIndexOpHelper = (TreeIndexDataflowHelper) opDesc.getIndexDataflowHelperFactory().createIndexDataflowHelper(
+                opDesc, ctx, partition);
+        this.isForward = isForward;
+        this.recDesc = recordDescProvider.getInputRecordDescriptor(opDesc.getActivityId(), 0);
+
+        if (lowKeyFields != null && lowKeyFields.length > 0) {
+            lowKey = new PermutingFrameTupleReference();
+            lowKey.setFieldPermutation(lowKeyFields);
+        }
+        if (highKeyFields != null && highKeyFields.length > 0) {
+            highKey = new PermutingFrameTupleReference();
+            highKey.setFieldPermutation(highKeyFields);
+        }
+
+        this.writers = new IFrameWriter[outputArity];
+        this.functionProxy = new FunctionProxy(ctx, functionFactory, preHookFactory, postHookFactory, inputRdFactory,
+                writers);
+        this.updateBuffer = new UpdateBuffer(ctx, 2);
+    }
+
+    protected void setCursor() {
+        cursor = new BTreeRangeSearchCursor((IBTreeLeafFrame) cursorFrame, true);
+    }
+
+    @Override
+    public void open() throws HyracksDataException {
+        functionProxy.functionOpen();
+        accessor = new FrameTupleAccessor(treeIndexOpHelper.getHyracksTaskContext().getFrameSize(), recDesc);
+
+        try {
+            treeIndexOpHelper.init(false);
+            btree = (BTree) treeIndexOpHelper.getIndex();
+            cursorFrame = btree.getLeafFrameFactory().createFrame();
+            setCursor();
+
+            rangePred = new RangePredicate(null, null, true, true, null, null);
+            int lowKeySearchFields = btree.getComparatorFactories().length;
+            IBinaryComparator[] lowKeySearchComparators = new IBinaryComparator[lowKeySearchFields];
+            for (int i = 0; i < lowKeySearchFields; i++) {
+                lowKeySearchComparators[i] = btree.getComparatorFactories()[i].createBinaryComparator();
+            }
+            lowKeySearchCmp = new MultiComparator(lowKeySearchComparators);
+
+            writeBuffer = treeIndexOpHelper.getHyracksTaskContext().allocateFrame();
+            appender = new FrameTupleAppender(treeIndexOpHelper.getHyracksTaskContext().getFrameSize());
+            appender.reset(writeBuffer, true);
+
+            indexAccessor = btree.createAccessor();
+
+            /** set the search cursor */
+            rangePred.setLowKey(null, true);
+            rangePred.setHighKey(null, true);
+            cursor.reset();
+            indexAccessor.search(cursor, rangePred);
+
+            /** set up current top tuple */
+            if (cursor.hasNext()) {
+                cursor.next();
+                currentTopTuple = cursor.getTuple();
+                match = false;
+            }
+            cloneUpdateTb = new ArrayTupleBuilder(btree.getFieldCount());
+            updateBuffer.setFieldCount(btree.getFieldCount());
+        } catch (Exception e) {
+            treeIndexOpHelper.deinit();
+            throw new HyracksDataException(e);
+        }
+    }
+
+    @Override
+    public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
+        accessor.reset(buffer);
+        int tupleCount = accessor.getTupleCount();
+        try {
+            for (int i = 0; i < tupleCount;) {
+                if (lowKey != null)
+                    lowKey.reset(accessor, i);
+                if (highKey != null)
+                    highKey.reset(accessor, i);
+                // TODO: currently use low key only, check what they mean
+                if (currentTopTuple != null) {
+                    int cmp = compare(lowKey, currentTopTuple);
+                    if (cmp == 0) {
+                        outputMatch(i);
+                        i++;
+                    } else if ((cmp > 0 && isForward) || (cmp < 0 && !isForward)) {
+                        moveTreeCursor();
+                    } else {
+                        writeLeftResults(accessor, i, null);
+                        i++;
+                    }
+                } else {
+                    writeLeftResults(accessor, i, null);
+                    i++;
+                }
+            }
+        } catch (Exception e) {
+            throw new HyracksDataException(e);
+        }
+    }
+
+    private void outputMatch(int i) throws Exception {
+        writeLeftResults(accessor, i, currentTopTuple);
+        match = true;
+    }
+
+    private void moveTreeCursor() throws Exception {
+        if (!match) {
+            writeRightResults(currentTopTuple);
+        }
+        if (cursor.hasNext()) {
+            cursor.next();
+            currentTopTuple = cursor.getTuple();
+            match = false;
+        } else {
+            currentTopTuple = null;
+        }
+    }
+
+    @Override
+    public void close() throws HyracksDataException {
+        try {
+            while (currentTopTuple != null) {
+                moveTreeCursor();
+            }
+            try {
+                cursor.close();
+
+                //batch update
+                updateBuffer.updateBTree(indexAccessor);
+            } catch (Exception e) {
+                throw new HyracksDataException(e);
+            }
+            functionProxy.functionClose();
+        } catch (Exception e) {
+            throw new HyracksDataException(e);
+        } finally {
+            treeIndexOpHelper.deinit();
+        }
+    }
+
+    @Override
+    public void fail() throws HyracksDataException {
+        for (IFrameWriter writer : writers)
+            writer.fail();
+    }
+
+    /** compare tuples */
+    private int compare(ITupleReference left, ITupleReference right) throws Exception {
+        return lowKeySearchCmp.compare(left, right);
+    }
+
+    /** write the right result */
+    private void writeRightResults(ITupleReference frameTuple) throws Exception {
+        functionProxy.functionCall(frameTuple, cloneUpdateTb);
+
+        //doing clone update
+        if (cloneUpdateTb.getSize() > 0) {
+            if (!updateBuffer.appendTuple(cloneUpdateTb)) {
+                //release the cursor/latch
+                cursor.close();
+                //batch update
+                updateBuffer.updateBTree(indexAccessor);
+
+                //search again
+                cursor.reset();
+                rangePred.setLowKey(frameTuple, true);
+                rangePred.setHighKey(null, true);
+                indexAccessor.search(cursor, rangePred);
+            }
+            cloneUpdateTb.reset();
+        }
+    }
+
+    /** write the left result */
+    private void writeLeftResults(IFrameTupleAccessor leftAccessor, int tIndex, ITupleReference frameTuple)
+            throws Exception {
+        functionProxy.functionCall(leftAccessor, tIndex, frameTuple, cloneUpdateTb);
+
+        //doing clone update
+        if (cloneUpdateTb.getSize() > 0) {
+            if (!updateBuffer.appendTuple(cloneUpdateTb)) {
+                //release the cursor/latch
+                cursor.close();
+                //batch update
+                updateBuffer.updateBTree(indexAccessor);
+
+                //search again
+                cursor.reset();
+                rangePred.setLowKey(frameTuple, true);
+                rangePred.setHighKey(null, true);
+                indexAccessor.search(cursor, rangePred);
+            }
+            cloneUpdateTb.reset();
+        }
+    }
+
+    @Override
+    public void setOutputFrameWriter(int index, IFrameWriter writer, RecordDescriptor recordDesc) {
+        writers[index] = writer;
+    }
+}
\ No newline at end of file
diff --git a/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/IndexNestedLoopSetUnionOperatorNodePushable.java b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/IndexNestedLoopSetUnionOperatorNodePushable.java
new file mode 100644
index 0000000..615a25b
--- /dev/null
+++ b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/IndexNestedLoopSetUnionOperatorNodePushable.java
@@ -0,0 +1,257 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.pregelix.dataflow.std;
+
+import java.io.DataOutput;
+import java.nio.ByteBuffer;
+
+import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
+import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
+import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryInputUnaryOutputOperatorNodePushable;
+import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeRangeSearchCursor;
+import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexAccessor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.AbstractTreeIndexOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.PermutingFrameTupleReference;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexDataflowHelper;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+
+public class IndexNestedLoopSetUnionOperatorNodePushable extends AbstractUnaryInputUnaryOutputOperatorNodePushable {
+    private TreeIndexDataflowHelper treeIndexOpHelper;
+    private FrameTupleAccessor accessor;
+
+    private ByteBuffer writeBuffer;
+    private FrameTupleAppender appender;
+    private ArrayTupleBuilder tb;
+    private DataOutput dos;
+
+    private BTree btree;
+    private RangePredicate rangePred;
+    private MultiComparator lowKeySearchCmp;
+    private ITreeIndexCursor cursor;
+    private ITreeIndexFrame cursorFrame;
+    protected ITreeIndexAccessor indexAccessor;
+
+    private RecordDescriptor recDesc;
+    private final RecordDescriptor inputRecDesc;
+
+    private PermutingFrameTupleReference lowKey;
+    private PermutingFrameTupleReference highKey;
+
+    private ITupleReference currentTopTuple;
+    private boolean match;
+
+    public IndexNestedLoopSetUnionOperatorNodePushable(AbstractTreeIndexOperatorDescriptor opDesc,
+            IHyracksTaskContext ctx, int partition, IRecordDescriptorProvider recordDescProvider, boolean isForward,
+            int[] lowKeyFields, int[] highKeyFields) {
+        inputRecDesc = recordDescProvider.getInputRecordDescriptor(opDesc.getActivityId(), 0);
+        treeIndexOpHelper = (TreeIndexDataflowHelper) opDesc.getIndexDataflowHelperFactory().createIndexDataflowHelper(
+                opDesc, ctx, partition);
+        this.recDesc = recordDescProvider.getInputRecordDescriptor(opDesc.getActivityId(), 0);
+
+        if (lowKeyFields != null && lowKeyFields.length > 0) {
+            lowKey = new PermutingFrameTupleReference();
+            lowKey.setFieldPermutation(lowKeyFields);
+        }
+        if (highKeyFields != null && highKeyFields.length > 0) {
+            highKey = new PermutingFrameTupleReference();
+            highKey.setFieldPermutation(highKeyFields);
+        }
+    }
+
+    protected void setCursor() {
+        cursor = new BTreeRangeSearchCursor((IBTreeLeafFrame) cursorFrame, false);
+    }
+
+    @Override
+    public void open() throws HyracksDataException {
+        accessor = new FrameTupleAccessor(treeIndexOpHelper.getHyracksTaskContext().getFrameSize(), recDesc);
+
+        try {
+            treeIndexOpHelper.init(false);
+            btree = (BTree) treeIndexOpHelper.getIndex();
+            cursorFrame = btree.getLeafFrameFactory().createFrame();
+            setCursor();
+            writer.open();
+
+            rangePred = new RangePredicate(null, null, true, true, null, null);
+            int lowKeySearchFields = btree.getComparatorFactories().length;
+            IBinaryComparator[] lowKeySearchComparators = new IBinaryComparator[lowKeySearchFields];
+            for (int i = 0; i < lowKeySearchFields; i++) {
+                lowKeySearchComparators[i] = btree.getComparatorFactories()[i].createBinaryComparator();
+            }
+            lowKeySearchCmp = new MultiComparator(lowKeySearchComparators);
+
+            writeBuffer = treeIndexOpHelper.getHyracksTaskContext().allocateFrame();
+            tb = new ArrayTupleBuilder(btree.getFieldCount());
+            dos = tb.getDataOutput();
+            appender = new FrameTupleAppender(treeIndexOpHelper.getHyracksTaskContext().getFrameSize());
+            appender.reset(writeBuffer, true);
+
+            indexAccessor = btree.createAccessor();
+
+            /** set the search cursor */
+            rangePred.setLowKey(null, true);
+            rangePred.setHighKey(null, true);
+            cursor.reset();
+            indexAccessor.search(cursor, rangePred);
+
+            /** set up current top tuple */
+            if (cursor.hasNext()) {
+                cursor.next();
+                currentTopTuple = cursor.getTuple();
+                match = false;
+            }
+
+        } catch (Exception e) {
+            treeIndexOpHelper.deinit();
+            throw new HyracksDataException(e);
+        }
+    }
+
+    @Override
+    public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
+        accessor.reset(buffer);
+        int tupleCount = accessor.getTupleCount();
+        try {
+            for (int i = 0; i < tupleCount;) {
+                if (lowKey != null)
+                    lowKey.reset(accessor, i);
+                if (highKey != null)
+                    highKey.reset(accessor, i);
+                // TODO: currently use low key only, check what they mean
+                if (currentTopTuple != null) {
+                    int cmp = compare(lowKey, currentTopTuple);
+                    if (cmp == 0) {
+                        outputMatch(i);
+                        i++;
+                    } else if ((cmp > 0)) {
+                        moveTreeCursor();
+                    } else {
+                        writeLeftResults(accessor, i);
+                        i++;
+                    }
+                } else {
+                    writeLeftResults(accessor, i);
+                    i++;
+                }
+            }
+        } catch (Exception e) {
+            throw new HyracksDataException(e);
+        }
+    }
+
+    private void outputMatch(int i) throws Exception {
+        writeLeftResults(accessor, i);
+        match = true;
+    }
+
+    private void moveTreeCursor() throws Exception {
+        if (!match) {
+            writeRightResults(currentTopTuple);
+        }
+        if (cursor.hasNext()) {
+            cursor.next();
+            currentTopTuple = cursor.getTuple();
+            match = false;
+        } else {
+            currentTopTuple = null;
+        }
+    }
+
+    @Override
+    public void close() throws HyracksDataException {
+        try {
+            while (currentTopTuple != null) {
+                moveTreeCursor();
+            }
+
+            if (appender.getTupleCount() > 0) {
+                FrameUtils.flushFrame(writeBuffer, writer);
+            }
+            writer.close();
+            try {
+                cursor.close();
+            } catch (Exception e) {
+                throw new HyracksDataException(e);
+            }
+        } catch (Exception e) {
+            throw new HyracksDataException(e);
+        } finally {
+            treeIndexOpHelper.deinit();
+        }
+    }
+
+    @Override
+    public void fail() throws HyracksDataException {
+        writer.fail();
+    }
+
+    /** compare tuples */
+    private int compare(ITupleReference left, ITupleReference right) throws Exception {
+        return lowKeySearchCmp.compare(left, right);
+    }
+
+    /** write the right result */
+    private void writeRightResults(ITupleReference frameTuple) throws Exception {
+        tb.reset();
+        for (int i = 0; i < frameTuple.getFieldCount(); i++) {
+            dos.write(frameTuple.getFieldData(i), frameTuple.getFieldStart(i), frameTuple.getFieldLength(i));
+            tb.addFieldEndOffset();
+        }
+
+        if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
+            FrameUtils.flushFrame(writeBuffer, writer);
+            appender.reset(writeBuffer, true);
+            if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
+                throw new IllegalStateException();
+            }
+        }
+    }
+
+    /** write the left result */
+    private void writeLeftResults(IFrameTupleAccessor leftAccessor, int tIndex) throws Exception {
+        tb.reset();
+        for (int i = 0; i < inputRecDesc.getFields().length; i++) {
+            int tupleStart = leftAccessor.getTupleStartOffset(tIndex);
+            int fieldStart = leftAccessor.getFieldStartOffset(tIndex, i);
+            int offset = leftAccessor.getFieldSlotsLength() + tupleStart + fieldStart;
+            int len = leftAccessor.getFieldEndOffset(tIndex, i) - fieldStart;
+            dos.write(leftAccessor.getBuffer().array(), offset, len);
+            tb.addFieldEndOffset();
+        }
+
+        if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
+            FrameUtils.flushFrame(writeBuffer, writer);
+            appender.reset(writeBuffer, true);
+            if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
+                throw new IllegalStateException();
+            }
+        }
+    }
+}
\ No newline at end of file
diff --git a/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/ProjectOperatorDescriptor.java b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/ProjectOperatorDescriptor.java
new file mode 100644
index 0000000..9f35e25
--- /dev/null
+++ b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/ProjectOperatorDescriptor.java
@@ -0,0 +1,105 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.pregelix.dataflow.std;
+
+import java.io.DataOutput;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
+import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
+import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryInputUnaryOutputOperatorNodePushable;
+
+public class ProjectOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor {
+    private static final long serialVersionUID = 1L;
+    private final int[] projectFields;
+
+    public ProjectOperatorDescriptor(JobSpecification spec, RecordDescriptor rDesc, int projectFields[]) {
+        super(spec, 1, 1);
+        this.recordDescriptors[0] = rDesc;
+        this.projectFields = projectFields;
+    }
+
+    @Override
+    public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx,
+            final IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions)
+            throws HyracksDataException {
+        return new AbstractUnaryInputUnaryOutputOperatorNodePushable() {
+            private final RecordDescriptor rd0 = recordDescProvider.getInputRecordDescriptor(getActivityId(), 0);
+            private final FrameTupleAccessor fta = new FrameTupleAccessor(ctx.getFrameSize(), rd0);
+            private final FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
+            private final ArrayTupleBuilder tb = new ArrayTupleBuilder(projectFields.length);
+            private final DataOutput dos = tb.getDataOutput();
+            private final ByteBuffer writeBuffer = ctx.allocateFrame();
+
+            @Override
+            public void close() throws HyracksDataException {
+                if (appender.getTupleCount() > 0)
+                    FrameUtils.flushFrame(writeBuffer, writer);
+                writer.close();
+            }
+
+            @Override
+            public void fail() throws HyracksDataException {
+                writer.fail();
+            }
+
+            @Override
+            public void nextFrame(ByteBuffer frame) throws HyracksDataException {
+                fta.reset(frame);
+                int tupleCount = fta.getTupleCount();
+                try {
+                    for (int tIndex = 0; tIndex < tupleCount; tIndex++) {
+                        tb.reset();
+                        for (int j = 0; j < projectFields.length; j++) {
+                            int fIndex = projectFields[j];
+                            int tupleStart = fta.getTupleStartOffset(tIndex);
+                            int fieldStart = fta.getFieldStartOffset(tIndex, fIndex);
+                            int offset = fta.getFieldSlotsLength() + tupleStart + fieldStart;
+                            int len = fta.getFieldEndOffset(tIndex, fIndex) - fieldStart;
+                            dos.write(fta.getBuffer().array(), offset, len);
+                            tb.addFieldEndOffset();
+                        }
+                        if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
+                            FrameUtils.flushFrame(writeBuffer, writer);
+                            appender.reset(writeBuffer, true);
+                            if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
+                                throw new IllegalStateException();
+                            }
+                        }
+                    }
+                } catch (IOException e) {
+                    throw new HyracksDataException(e);
+                }
+            }
+
+            @Override
+            public void open() throws HyracksDataException {
+                writer.open();
+                appender.reset(writeBuffer, true);
+            }
+
+        };
+    }
+}
diff --git a/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/RuntimeHookOperatorDescriptor.java b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/RuntimeHookOperatorDescriptor.java
new file mode 100644
index 0000000..20f7aaa
--- /dev/null
+++ b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/RuntimeHookOperatorDescriptor.java
@@ -0,0 +1,70 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.pregelix.dataflow.std;
+
+import java.nio.ByteBuffer;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
+import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryInputUnaryOutputOperatorNodePushable;
+import edu.uci.ics.pregelix.dataflow.std.base.IRuntimeHook;
+import edu.uci.ics.pregelix.dataflow.std.base.IRuntimeHookFactory;
+
+public class RuntimeHookOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor {
+
+    private static final long serialVersionUID = 1L;
+    private final IRuntimeHookFactory hookFactory;
+
+    public RuntimeHookOperatorDescriptor(JobSpecification spec, IRuntimeHookFactory hookFactory) {
+        super(spec, 1, 1);
+        this.hookFactory = hookFactory;
+    }
+
+    @Override
+    public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx,
+            IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) throws HyracksDataException {
+        return new AbstractUnaryInputUnaryOutputOperatorNodePushable() {
+            private IRuntimeHook hook = hookFactory.createRuntimeHook();
+
+            @Override
+            public void open() throws HyracksDataException {
+                hook.configure(ctx);
+                writer.open();
+            }
+
+            @Override
+            public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
+                FrameUtils.flushFrame(buffer, writer);
+            }
+
+            @Override
+            public void fail() throws HyracksDataException {
+                writer.fail();
+            }
+
+            @Override
+            public void close() throws HyracksDataException {
+                writer.close();
+            }
+
+        };
+    }
+
+}
diff --git a/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/TreeIndexBulkReLoadOperatorDescriptor.java b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/TreeIndexBulkReLoadOperatorDescriptor.java
new file mode 100644
index 0000000..126fcb8
--- /dev/null
+++ b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/TreeIndexBulkReLoadOperatorDescriptor.java
@@ -0,0 +1,62 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.pregelix.dataflow.std;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.AbstractTreeIndexOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndex;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexRegistryProvider;
+import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackProvider;
+import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
+
+public class TreeIndexBulkReLoadOperatorDescriptor extends AbstractTreeIndexOperatorDescriptor {
+
+    private static final long serialVersionUID = 1L;
+
+    private final int[] fieldPermutation;
+    private final IStorageManagerInterface storageManager;
+    private final IIndexRegistryProvider<IIndex> treeIndexRegistryProvider;
+    private final IFileSplitProvider fileSplitProvider;
+    private final float fillFactor;
+
+    public TreeIndexBulkReLoadOperatorDescriptor(JobSpecification spec, IStorageManagerInterface storageManager,
+            IIndexRegistryProvider<IIndex> treeIndexRegistryProvider, IFileSplitProvider fileSplitProvider,
+            ITypeTraits[] typeTraits, IBinaryComparatorFactory[] comparatorFactories, int[] fieldPermutation,
+            float fillFactor, IIndexDataflowHelperFactory opHelperFactory) {
+        super(spec, 1, 0, null, storageManager, treeIndexRegistryProvider, fileSplitProvider, typeTraits,
+                comparatorFactories, opHelperFactory, null, false, NoOpOperationCallbackProvider.INSTANCE);
+        this.fieldPermutation = fieldPermutation;
+
+        this.storageManager = storageManager;
+        this.treeIndexRegistryProvider = treeIndexRegistryProvider;
+        this.fileSplitProvider = fileSplitProvider;
+        this.fillFactor = fillFactor;
+    }
+
+    @Override
+    public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
+            IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) {
+        return new TreeIndexBulkReLoadOperatorNodePushable(this, ctx, partition, fieldPermutation, fillFactor,
+                recordDescProvider, storageManager, treeIndexRegistryProvider, fileSplitProvider);
+    }
+}
\ No newline at end of file
diff --git a/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/TreeIndexBulkReLoadOperatorNodePushable.java b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/TreeIndexBulkReLoadOperatorNodePushable.java
new file mode 100644
index 0000000..883fef4
--- /dev/null
+++ b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/TreeIndexBulkReLoadOperatorNodePushable.java
@@ -0,0 +1,154 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.pregelix.dataflow.std;
+
+import java.nio.ByteBuffer;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.io.FileReference;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryInputSinkOperatorNodePushable;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.storage.am.common.api.IIndexBulkLoadContext;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.AbstractTreeIndexOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndex;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexRegistryProvider;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IndexRegistry;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.PermutingFrameTupleReference;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexDataflowHelper;
+import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
+
+public class TreeIndexBulkReLoadOperatorNodePushable extends AbstractUnaryInputSinkOperatorNodePushable {
+    private final TreeIndexDataflowHelper treeIndexOpHelper;
+    private FrameTupleAccessor accessor;
+    private IIndexBulkLoadContext bulkLoadCtx;
+
+    private IRecordDescriptorProvider recordDescProvider;
+    private PermutingFrameTupleReference tuple = new PermutingFrameTupleReference();
+
+    private final IStorageManagerInterface storageManager;
+    private final IIndexRegistryProvider<IIndex> treeIndexRegistryProvider;
+    private final IFileSplitProvider fileSplitProvider;
+    private final int partition;
+    private final float fillFactor;
+    private IHyracksTaskContext ctx;
+    private ITreeIndex index;
+
+    public TreeIndexBulkReLoadOperatorNodePushable(AbstractTreeIndexOperatorDescriptor opDesc, IHyracksTaskContext ctx,
+            int partition, int[] fieldPermutation, float fillFactor, IRecordDescriptorProvider recordDescProvider,
+            IStorageManagerInterface storageManager, IIndexRegistryProvider<IIndex> treeIndexRegistryProvider,
+            IFileSplitProvider fileSplitProvider) {
+        treeIndexOpHelper = (TreeIndexDataflowHelper) opDesc.getIndexDataflowHelperFactory().createIndexDataflowHelper(
+                opDesc, ctx, partition);
+        this.recordDescProvider = recordDescProvider;
+        tuple.setFieldPermutation(fieldPermutation);
+
+        this.storageManager = storageManager;
+        this.treeIndexRegistryProvider = treeIndexRegistryProvider;
+        this.fileSplitProvider = fileSplitProvider;
+        this.partition = partition;
+        this.ctx = ctx;
+        this.fillFactor = fillFactor;
+    }
+
+    @Override
+    public void open() throws HyracksDataException {
+        initDrop();
+        init();
+    }
+
+    private void initDrop() throws HyracksDataException {
+        try {
+            IndexRegistry<IIndex> treeIndexRegistry = treeIndexRegistryProvider.getRegistry(ctx);
+            IBufferCache bufferCache = storageManager.getBufferCache(ctx);
+            IFileMapProvider fileMapProvider = storageManager.getFileMapProvider(ctx);
+
+            FileReference f = fileSplitProvider.getFileSplits()[partition].getLocalFile();
+            int indexFileId = -1;
+            boolean fileIsMapped = false;
+            synchronized (fileMapProvider) {
+                fileIsMapped = fileMapProvider.isMapped(f);
+                if (fileIsMapped)
+                    indexFileId = fileMapProvider.lookupFileId(f);
+            }
+
+            /**
+             * delete the file if it is mapped
+             */
+            if (fileIsMapped) {
+                // Unregister tree instance.
+                synchronized (treeIndexRegistry) {
+                    treeIndexRegistry.unregister(indexFileId);
+                }
+
+                // remove name to id mapping
+                bufferCache.deleteFile(indexFileId, false);
+            }
+        }
+        // TODO: for the time being we don't throw,
+        // with proper exception handling (no hanging job problem) we should
+        // throw
+        catch (Exception e) {
+            throw new HyracksDataException(e);
+        }
+    }
+
+    private void init() throws HyracksDataException {
+        AbstractTreeIndexOperatorDescriptor opDesc = (AbstractTreeIndexOperatorDescriptor) treeIndexOpHelper
+                .getOperatorDescriptor();
+        RecordDescriptor recDesc = recordDescProvider.getInputRecordDescriptor(opDesc.getActivityId(), 0);
+        accessor = new FrameTupleAccessor(treeIndexOpHelper.getHyracksTaskContext().getFrameSize(), recDesc);
+        try {
+            treeIndexOpHelper.init(true);
+            treeIndexOpHelper.getIndex().open(treeIndexOpHelper.getIndexFileId());
+            index = (ITreeIndex) treeIndexOpHelper.getIndex();
+            index.open(treeIndexOpHelper.getIndexFileId());
+            bulkLoadCtx = index.beginBulkLoad(fillFactor);
+        } catch (Exception e) {
+            // cleanup in case of failure
+            treeIndexOpHelper.deinit();
+            throw new HyracksDataException(e);
+        }
+    }
+
+    @Override
+    public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
+        accessor.reset(buffer);
+        int tupleCount = accessor.getTupleCount();
+        for (int i = 0; i < tupleCount; i++) {
+            tuple.reset(accessor, i);
+            index.bulkLoadAddTuple(tuple, bulkLoadCtx);
+        }
+    }
+
+    @Override
+    public void close() throws HyracksDataException {
+        try {
+            index.endBulkLoad(bulkLoadCtx);
+        } finally {
+            treeIndexOpHelper.deinit();
+        }
+    }
+
+    @Override
+    public void fail() throws HyracksDataException {
+    }
+}
\ No newline at end of file
diff --git a/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/util/FunctionProxy.java b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/util/FunctionProxy.java
new file mode 100644
index 0000000..99bca1a
--- /dev/null
+++ b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/util/FunctionProxy.java
@@ -0,0 +1,131 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.pregelix.dataflow.util;
+
+import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
+import edu.uci.ics.hyracks.api.comm.IFrameWriter;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.pregelix.dataflow.std.base.IRecordDescriptorFactory;
+import edu.uci.ics.pregelix.dataflow.std.base.IRuntimeHookFactory;
+import edu.uci.ics.pregelix.dataflow.std.base.IUpdateFunction;
+import edu.uci.ics.pregelix.dataflow.std.base.IUpdateFunctionFactory;
+
+public class FunctionProxy {
+
+    private final IUpdateFunction function;
+    private final IRuntimeHookFactory preHookFactory;
+    private final IRuntimeHookFactory postHookFactory;
+    private final IRecordDescriptorFactory inputRdFactory;
+    private final IHyracksTaskContext ctx;
+    private final IFrameWriter[] writers;
+    private TupleDeserializer tupleDe;
+    private RecordDescriptor inputRd;
+    private ClassLoader ctxCL;
+
+    public FunctionProxy(IHyracksTaskContext ctx, IUpdateFunctionFactory functionFactory,
+            IRuntimeHookFactory preHookFactory, IRuntimeHookFactory postHookFactory,
+            IRecordDescriptorFactory inputRdFactory, IFrameWriter[] writers) {
+        this.function = functionFactory.createFunction();
+        this.preHookFactory = preHookFactory;
+        this.postHookFactory = postHookFactory;
+        this.inputRdFactory = inputRdFactory;
+        this.writers = writers;
+        this.ctx = ctx;
+    }
+
+    /**
+     * Initialize the function
+     * 
+     * @throws HyracksDataException
+     */
+    public void functionOpen() throws HyracksDataException {
+        inputRd = inputRdFactory.createRecordDescriptor();
+        tupleDe = new TupleDeserializer(inputRd);
+        ctxCL = Thread.currentThread().getContextClassLoader();
+        Thread.currentThread().setContextClassLoader(this.getClass().getClassLoader());
+        for (IFrameWriter writer : writers) {
+            writer.open();
+        }
+        if (preHookFactory != null)
+            preHookFactory.createRuntimeHook().configure(ctx);
+        function.open(ctx, inputRd, writers);
+    }
+
+    /**
+     * Call the function
+     * 
+     * @param leftAccessor
+     *            input page accessor
+     * @param leftTupleIndex
+     *            the tuple index in the page
+     * @param updateRef
+     *            update pointer
+     * @throws HyracksDataException
+     */
+    public void functionCall(IFrameTupleAccessor leftAccessor, int leftTupleIndex, ITupleReference right,
+            ArrayTupleBuilder cloneUpdateTb) throws HyracksDataException {
+        Object[] tuple = tupleDe.deserializeRecord(leftAccessor, leftTupleIndex, right);
+        function.process(tuple);
+        function.update(right, cloneUpdateTb);
+    }
+
+    /**
+     * call function, without the newly generated tuple, just the tuple in btree
+     * 
+     * @param updateRef
+     * @throws HyracksDataException
+     */
+    public void functionCall(ITupleReference updateRef, ArrayTupleBuilder cloneUpdateTb) throws HyracksDataException {
+        Object[] tuple = tupleDe.deserializeRecord(updateRef);
+        function.process(tuple);
+        function.update(updateRef, cloneUpdateTb);
+    }
+
+    /**
+     * Call the function
+     * 
+     * @param tb
+     *            input data
+     * @param inPlaceUpdateRef
+     *            update pointer
+     * @throws HyracksDataException
+     */
+    public void functionCall(ArrayTupleBuilder tb, ITupleReference inPlaceUpdateRef, ArrayTupleBuilder cloneUpdateTb)
+            throws HyracksDataException {
+        Object[] tuple = tupleDe.deserializeRecord(tb, inPlaceUpdateRef);
+        function.process(tuple);
+        function.update(inPlaceUpdateRef, cloneUpdateTb);
+    }
+
+    /**
+     * Close the function
+     * 
+     * @throws HyracksDataException
+     */
+    public void functionClose() throws HyracksDataException {
+        if (postHookFactory != null)
+            postHookFactory.createRuntimeHook().configure(ctx);
+        function.close();
+        for (IFrameWriter writer : writers) {
+            writer.close();
+        }
+        Thread.currentThread().setContextClassLoader(ctxCL);
+    }
+}
diff --git a/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/util/ResetableByteArrayInputStream.java b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/util/ResetableByteArrayInputStream.java
new file mode 100644
index 0000000..b6fd98a
--- /dev/null
+++ b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/util/ResetableByteArrayInputStream.java
@@ -0,0 +1,60 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.pregelix.dataflow.util;
+
+import java.io.InputStream;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+public class ResetableByteArrayInputStream extends InputStream {
+    private static final Logger LOGGER = Logger.getLogger(ResetableByteArrayInputStream.class.getName());
+
+    private byte[] data;
+    private int position;
+
+    public ResetableByteArrayInputStream() {
+    }
+
+    public void setByteArray(byte[] data, int position) {
+        this.data = data;
+        this.position = position;
+    }
+
+    @Override
+    public int read() {
+        int remaining = data.length - position;
+        int value = remaining > 0 ? (data[position++] & 0xff) : -1;
+        if (LOGGER.isLoggable(Level.FINEST)) {
+            LOGGER.finest("read(): value: " + value + " remaining: " + remaining + " position: " + position);
+        }
+        return value;
+    }
+
+    @Override
+    public int read(byte[] bytes, int offset, int length) {
+        int remaining = data.length - position;
+        if (LOGGER.isLoggable(Level.FINEST)) {
+            LOGGER.finest("read(bytes[], int, int): remaining: " + remaining + " offset: " + offset + " length: "
+                    + length + " position: " + position);
+        }
+        if (remaining == 0) {
+            return -1;
+        }
+        int l = Math.min(length, remaining);
+        System.arraycopy(data, position, bytes, offset, l);
+        position += l;
+        return l;
+    }
+}
\ No newline at end of file
diff --git a/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/util/ResetableByteArrayOutputStream.java b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/util/ResetableByteArrayOutputStream.java
new file mode 100644
index 0000000..88521a4
--- /dev/null
+++ b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/util/ResetableByteArrayOutputStream.java
@@ -0,0 +1,58 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.pregelix.dataflow.util;
+
+import java.io.OutputStream;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+public class ResetableByteArrayOutputStream extends OutputStream {
+    private static final Logger LOGGER = Logger.getLogger(ResetableByteArrayOutputStream.class.getName());
+
+    private byte[] data;
+    private int position;
+
+    public ResetableByteArrayOutputStream() {
+    }
+
+    public void setByteArray(byte[] data, int position) {
+        this.data = data;
+        this.position = position;
+    }
+
+    @Override
+    public void write(int b) {
+        int remaining = data.length - position;
+        if (position + 1 > data.length - 1)
+            throw new IndexOutOfBoundsException();
+        data[position] = (byte) b;
+        position++;
+        if (LOGGER.isLoggable(Level.FINEST)) {
+            LOGGER.finest("write(): value: " + b + " remaining: " + remaining + " position: " + position);
+        }
+    }
+
+    @Override
+    public void write(byte[] bytes, int offset, int length) {
+        if (LOGGER.isLoggable(Level.FINEST)) {
+            LOGGER.finest("write(bytes[], int, int) offset: " + offset + " length: " + length + " position: "
+                    + position);
+        }
+        if (position + length > data.length - 1)
+            throw new IndexOutOfBoundsException();
+        System.arraycopy(bytes, offset, data, position, length);
+        position += length;
+    }
+}
\ No newline at end of file
diff --git a/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/util/TupleDeserializer.java b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/util/TupleDeserializer.java
new file mode 100644
index 0000000..4fe83db
--- /dev/null
+++ b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/util/TupleDeserializer.java
@@ -0,0 +1,174 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.pregelix.dataflow.util;
+
+import java.io.DataInputStream;
+import java.io.IOException;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameConstants;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+
+public class TupleDeserializer {
+    private static final Logger LOGGER = Logger.getLogger(TupleDeserializer.class.getName());
+
+    private Object[] record;
+    private RecordDescriptor recordDescriptor;
+    private ResetableByteArrayInputStream bbis;
+    private DataInputStream di;
+
+    public TupleDeserializer(RecordDescriptor recordDescriptor) {
+        this.recordDescriptor = recordDescriptor;
+        this.bbis = new ResetableByteArrayInputStream();
+        this.di = new DataInputStream(bbis);
+        this.record = new Object[recordDescriptor.getFields().length];
+    }
+
+    public Object[] deserializeRecord(ITupleReference tupleRef) throws HyracksDataException {
+        for (int i = 0; i < tupleRef.getFieldCount(); ++i) {
+            byte[] data = tupleRef.getFieldData(i);
+            int offset = tupleRef.getFieldStart(i);
+            bbis.setByteArray(data, offset);
+
+            Object instance = recordDescriptor.getFields()[i].deserialize(di);
+            if (LOGGER.isLoggable(Level.FINEST)) {
+                LOGGER.finest(i + " " + instance);
+            }
+            record[i] = instance;
+            if (FrameConstants.DEBUG_FRAME_IO) {
+                try {
+                    if (di.readInt() != FrameConstants.FRAME_FIELD_MAGIC) {
+                        throw new HyracksDataException("Field magic mismatch");
+                    }
+                } catch (IOException e) {
+                    e.printStackTrace();
+                }
+            }
+        }
+        return record;
+    }
+
+    public Object[] deserializeRecord(IFrameTupleAccessor left, int tIndex, ITupleReference right)
+            throws HyracksDataException {
+        byte[] data = left.getBuffer().array();
+        int tStart = left.getTupleStartOffset(tIndex) + left.getFieldSlotsLength();
+        int leftFieldCount = left.getFieldCount();
+        int fStart = tStart;
+        for (int i = 0; i < leftFieldCount; ++i) {
+            /**
+             * reset the input
+             */
+            fStart = tStart + left.getFieldStartOffset(tIndex, i);
+            bbis.setByteArray(data, fStart);
+
+            /**
+             * do deserialization
+             */
+            Object instance = recordDescriptor.getFields()[i].deserialize(di);
+            if (LOGGER.isLoggable(Level.FINEST)) {
+                LOGGER.finest(i + " " + instance);
+            }
+            record[i] = instance;
+            if (FrameConstants.DEBUG_FRAME_IO) {
+                try {
+                    if (di.readInt() != FrameConstants.FRAME_FIELD_MAGIC) {
+                        throw new HyracksDataException("Field magic mismatch");
+                    }
+                } catch (IOException e) {
+                    e.printStackTrace();
+                }
+            }
+        }
+        for (int i = leftFieldCount; i < record.length; ++i) {
+            byte[] rightData = right.getFieldData(i - leftFieldCount);
+            int rightOffset = right.getFieldStart(i - leftFieldCount);
+            bbis.setByteArray(rightData, rightOffset);
+
+            Object instance = recordDescriptor.getFields()[i].deserialize(di);
+            if (LOGGER.isLoggable(Level.FINEST)) {
+                LOGGER.finest(i + " " + instance);
+            }
+            record[i] = instance;
+            if (FrameConstants.DEBUG_FRAME_IO) {
+                try {
+                    if (di.readInt() != FrameConstants.FRAME_FIELD_MAGIC) {
+                        throw new HyracksDataException("Field magic mismatch");
+                    }
+                } catch (IOException e) {
+                    e.printStackTrace();
+                }
+            }
+        }
+        return record;
+    }
+
+    public Object[] deserializeRecord(ArrayTupleBuilder tb, ITupleReference right) throws HyracksDataException {
+        byte[] data = tb.getByteArray();
+        int[] offset = tb.getFieldEndOffsets();
+        int start = 0;
+        for (int i = 0; i < offset.length; ++i) {
+            /**
+             * reset the input
+             */
+            bbis.setByteArray(data, start);
+            start = offset[i];
+
+            /**
+             * do deserialization
+             */
+            Object instance = recordDescriptor.getFields()[i].deserialize(di);
+            if (LOGGER.isLoggable(Level.FINEST)) {
+                LOGGER.finest(i + " " + instance);
+            }
+            record[i] = instance;
+            if (FrameConstants.DEBUG_FRAME_IO) {
+                try {
+                    if (di.readInt() != FrameConstants.FRAME_FIELD_MAGIC) {
+                        throw new HyracksDataException("Field magic mismatch");
+                    }
+                } catch (IOException e) {
+                    e.printStackTrace();
+                }
+            }
+        }
+        for (int i = offset.length; i < record.length; ++i) {
+            byte[] rightData = right.getFieldData(i - offset.length);
+            int rightOffset = right.getFieldStart(i - offset.length);
+            bbis.setByteArray(rightData, rightOffset);
+
+            Object instance = recordDescriptor.getFields()[i].deserialize(di);
+            if (LOGGER.isLoggable(Level.FINEST)) {
+                LOGGER.finest(i + " " + instance);
+            }
+            record[i] = instance;
+            if (FrameConstants.DEBUG_FRAME_IO) {
+                try {
+                    if (di.readInt() != FrameConstants.FRAME_FIELD_MAGIC) {
+                        throw new HyracksDataException("Field magic mismatch");
+                    }
+                } catch (IOException e) {
+                    e.printStackTrace();
+                }
+            }
+        }
+        return record;
+    }
+}
diff --git a/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/util/UpdateBuffer.java b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/util/UpdateBuffer.java
new file mode 100644
index 0000000..9a30647
--- /dev/null
+++ b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/util/UpdateBuffer.java
@@ -0,0 +1,112 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+
+package edu.uci.ics.pregelix.dataflow.util;
+
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.List;
+
+import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.FrameTupleReference;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexAccessor;
+import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
+
+/**
+ * The buffer to hold updates.
+ * We do a batch update for the B-tree during index search and join so that
+ * avoid to open/close cursors frequently.
+ */
+public class UpdateBuffer {
+
+    private int currentInUse = 0;
+    private final int pageLimit;
+    private final List<ByteBuffer> buffers = new ArrayList<ByteBuffer>();
+    private final FrameTupleAppender appender;
+    private final IHyracksTaskContext ctx;
+    private final FrameTupleReference tuple = new FrameTupleReference();
+    private final int frameSize;
+    private IFrameTupleAccessor fta;
+
+    public UpdateBuffer(int numPages, IHyracksTaskContext ctx, int fieldCount) {
+        this.appender = new FrameTupleAppender(ctx.getFrameSize());
+        ByteBuffer buffer = ctx.allocateFrame();
+        this.buffers.add(buffer);
+        this.appender.reset(buffer, true);
+        this.pageLimit = numPages;
+        this.ctx = ctx;
+        this.frameSize = ctx.getFrameSize();
+        this.fta = new UpdateBufferTupleAccessor(frameSize, fieldCount);
+    }
+
+    public UpdateBuffer(IHyracksTaskContext ctx, int fieldCount) {
+        //by default, the update buffer has 1000 pages
+        this(1000, ctx, fieldCount);
+    }
+
+    public void setFieldCount(int fieldCount) {
+        if (fta.getFieldCount() != fieldCount) {
+            this.fta = new UpdateBufferTupleAccessor(frameSize, fieldCount);
+        }
+    }
+
+    public boolean appendTuple(ArrayTupleBuilder tb) throws HyracksDataException {
+        if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
+            if (currentInUse + 1 < pageLimit) {
+                // move to the new buffer
+                currentInUse++;
+                allocate(currentInUse);
+                ByteBuffer buffer = buffers.get(currentInUse);
+                appender.reset(buffer, true);
+
+                if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
+                    throw new HyracksDataException("tuple cannot be appended to a new frame!");
+                }
+                return true;
+            } else {
+                return false;
+            }
+        } else {
+            return true;
+        }
+    }
+
+    public void updateBTree(ITreeIndexAccessor bta) throws HyracksDataException, IndexException {
+        // batch update
+        for (int i = 0; i <= currentInUse; i++) {
+            ByteBuffer buffer = buffers.get(i);
+            fta.reset(buffer);
+            for (int j = 0; j < fta.getTupleCount(); j++) {
+                tuple.reset(fta, j);
+                bta.update(tuple);
+            }
+        }
+
+        //cleanup the buffer
+        currentInUse = 0;
+        ByteBuffer buffer = buffers.get(0);
+        appender.reset(buffer, true);
+    }
+
+    private void allocate(int index) {
+        if (index >= buffers.size()) {
+            buffers.add(ctx.allocateFrame());
+        }
+    }
+}
diff --git a/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/util/UpdateBufferTupleAccessor.java b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/util/UpdateBufferTupleAccessor.java
new file mode 100644
index 0000000..39f1361
--- /dev/null
+++ b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/util/UpdateBufferTupleAccessor.java
@@ -0,0 +1,81 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.pregelix.dataflow.util;
+
+import java.nio.ByteBuffer;
+
+import edu.uci.ics.hyracks.api.comm.FrameHelper;
+import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
+
+public final class UpdateBufferTupleAccessor implements IFrameTupleAccessor {
+    private final int frameSize;
+    private final int fieldCount;
+    private ByteBuffer buffer;
+
+    public UpdateBufferTupleAccessor(int frameSize, int fieldCount) {
+        this.frameSize = frameSize;
+        this.fieldCount = fieldCount;
+    }
+
+    @Override
+    public void reset(ByteBuffer buffer) {
+        this.buffer = buffer;
+    }
+
+    @Override
+    public ByteBuffer getBuffer() {
+        return buffer;
+    }
+
+    @Override
+    public int getTupleCount() {
+        return buffer.getInt(FrameHelper.getTupleCountOffset(frameSize));
+    }
+
+    @Override
+    public int getTupleStartOffset(int tupleIndex) {
+        return tupleIndex == 0 ? 0 : buffer.getInt(FrameHelper.getTupleCountOffset(frameSize) - 4 * tupleIndex);
+    }
+
+    @Override
+    public int getTupleEndOffset(int tupleIndex) {
+        return buffer.getInt(FrameHelper.getTupleCountOffset(frameSize) - 4 * (tupleIndex + 1));
+    }
+
+    @Override
+    public int getFieldStartOffset(int tupleIndex, int fIdx) {
+        return fIdx == 0 ? 0 : buffer.getInt(getTupleStartOffset(tupleIndex) + (fIdx - 1) * 4);
+    }
+
+    @Override
+    public int getFieldEndOffset(int tupleIndex, int fIdx) {
+        return buffer.getInt(getTupleStartOffset(tupleIndex) + fIdx * 4);
+    }
+
+    @Override
+    public int getFieldLength(int tupleIndex, int fIdx) {
+        return getFieldEndOffset(tupleIndex, fIdx) - getFieldStartOffset(tupleIndex, fIdx);
+    }
+
+    @Override
+    public int getFieldSlotsLength() {
+        return getFieldCount() * 4;
+    }
+
+    @Override
+    public int getFieldCount() {
+        return fieldCount;
+    }
+}
\ No newline at end of file
diff --git a/pregelix/pregelix-dataflow/pom.xml b/pregelix/pregelix-dataflow/pom.xml
new file mode 100644
index 0000000..c4ed479
--- /dev/null
+++ b/pregelix/pregelix-dataflow/pom.xml
@@ -0,0 +1,142 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+	xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+	<modelVersion>4.0.0</modelVersion>
+	<artifactId>pregelix-dataflow</artifactId>
+	<packaging>jar</packaging>
+	<name>pregelix-dataflow</name>
+
+	<parent>
+		<groupId>edu.uci.ics.hyracks</groupId>
+		<artifactId>pregelix</artifactId>
+		<version>0.2.3-SNAPSHOT</version>
+	</parent>
+
+
+	<properties>
+		<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+	</properties>
+
+	<build>
+		<plugins>
+			<plugin>
+				<groupId>org.apache.maven.plugins</groupId>
+				<artifactId>maven-compiler-plugin</artifactId>
+				<version>2.0.2</version>
+				<configuration>
+					<source>1.7</source>
+					<target>1.7</target>
+				</configuration>
+			</plugin>
+			<plugin>
+				<groupId>org.apache.maven.plugins</groupId>
+				<artifactId>maven-surefire-plugin</artifactId>
+				<version>2.7.2</version>
+				<configuration>
+					<forkMode>pertest</forkMode>
+					<argLine>-enableassertions -Xmx512m -Dfile.encoding=UTF-8
+						-Djava.util.logging.config.file=src/test/resources/logging.properties</argLine>
+					<includes>
+						<include>**/*TestSuite.java</include>
+						<include>**/*Test.java</include>
+					</includes>
+				</configuration>
+			</plugin>
+			<plugin>
+				<artifactId>maven-clean-plugin</artifactId>
+				<version>2.5</version>
+				<configuration>
+					<filesets>
+						<fileset>
+							<directory>.</directory>
+							<includes>
+								<include>teststore*</include>
+								<include>edu*</include>
+								<include>actual*</include>
+								<include>build*</include>
+								<include>expect*</include>
+								<include>ClusterController*</include>
+							</includes>
+						</fileset>
+					</filesets>
+				</configuration>
+			</plugin>
+		</plugins>
+	</build>
+
+	<dependencies>
+		<dependency>
+			<groupId>junit</groupId>
+			<artifactId>junit</artifactId>
+			<version>4.8.1</version>
+			<scope>test</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>pregelix-api</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>pregelix-dataflow-std-base</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>hyracks-api</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>hyracks-dataflow-common</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>hyracks-data-std</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>hyracks-storage-am-common</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>hyracks-storage-am-btree</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>hyracks-control-cc</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>hyracks-control-nc</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>hyracks-ipc</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+	</dependencies>
+</project>
diff --git a/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/ConnectorPolicyAssignmentPolicy.java b/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/ConnectorPolicyAssignmentPolicy.java
new file mode 100644
index 0000000..d29afca
--- /dev/null
+++ b/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/ConnectorPolicyAssignmentPolicy.java
@@ -0,0 +1,39 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.pregelix.dataflow;
+
+import edu.uci.ics.hyracks.api.dataflow.IConnectorDescriptor;
+import edu.uci.ics.hyracks.api.dataflow.connectors.IConnectorPolicy;
+import edu.uci.ics.hyracks.api.dataflow.connectors.IConnectorPolicyAssignmentPolicy;
+import edu.uci.ics.hyracks.api.dataflow.connectors.PipeliningConnectorPolicy;
+import edu.uci.ics.hyracks.api.dataflow.connectors.SendSideMaterializedPipeliningConnectorPolicy;
+import edu.uci.ics.hyracks.dataflow.std.connectors.MToNPartitioningMergingConnectorDescriptor;
+
+public class ConnectorPolicyAssignmentPolicy implements IConnectorPolicyAssignmentPolicy {
+    private static final long serialVersionUID = 1L;
+    private IConnectorPolicy senderSideMaterializePolicy = new SendSideMaterializedPipeliningConnectorPolicy();
+    private IConnectorPolicy pipeliningPolicy = new PipeliningConnectorPolicy();
+
+    @Override
+    public IConnectorPolicy getConnectorPolicyAssignment(IConnectorDescriptor c, int nProducers, int nConsumers,
+            int[] fanouts) {
+        if (c instanceof MToNPartitioningMergingConnectorDescriptor) {
+            return senderSideMaterializePolicy;
+        } else {
+            return pipeliningPolicy;
+        }
+    }
+}
diff --git a/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/CountTupleOperatorDescriptor.java b/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/CountTupleOperatorDescriptor.java
new file mode 100644
index 0000000..7ad6aa5
--- /dev/null
+++ b/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/CountTupleOperatorDescriptor.java
@@ -0,0 +1,71 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.pregelix.dataflow;
+
+import java.nio.ByteBuffer;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
+import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryInputUnaryOutputOperatorNodePushable;
+
+public class CountTupleOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor {
+    private static final long serialVersionUID = 1L;
+
+    public CountTupleOperatorDescriptor(JobSpecification spec, RecordDescriptor rDesc) {
+        super(spec, 1, 1);
+        this.recordDescriptors[0] = rDesc;
+    }
+
+    @Override
+    public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx,
+            final IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions)
+            throws HyracksDataException {
+        return new AbstractUnaryInputUnaryOutputOperatorNodePushable() {
+            private final RecordDescriptor rd0 = recordDescProvider.getInputRecordDescriptor(getActivityId(), 0);
+            private final FrameTupleAccessor fta = new FrameTupleAccessor(ctx.getFrameSize(), rd0);
+            private int tupleCount = 0;
+
+            @Override
+            public void close() throws HyracksDataException {
+                System.out.println(this.toString() + " tuple count " + tupleCount);
+                writer.close();
+            }
+
+            @Override
+            public void fail() throws HyracksDataException {
+                writer.fail();
+            }
+
+            @Override
+            public void nextFrame(ByteBuffer frame) throws HyracksDataException {
+                fta.reset(frame);
+                tupleCount += fta.getTupleCount();
+                writer.nextFrame(frame);
+            }
+
+            @Override
+            public void open() throws HyracksDataException {
+                writer.open();
+            }
+
+        };
+    }
+}
diff --git a/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/EmptySinkOperatorDescriptor.java b/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/EmptySinkOperatorDescriptor.java
new file mode 100644
index 0000000..fab7198
--- /dev/null
+++ b/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/EmptySinkOperatorDescriptor.java
@@ -0,0 +1,62 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.pregelix.dataflow;
+
+import java.nio.ByteBuffer;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
+import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryInputSinkOperatorNodePushable;
+
+public class EmptySinkOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor {
+    private static final long serialVersionUID = 1L;
+
+    public EmptySinkOperatorDescriptor(JobSpecification spec) {
+        super(spec, 1, 0);
+    }
+
+    @Override
+    public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
+            IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) throws HyracksDataException {
+        return new AbstractUnaryInputSinkOperatorNodePushable() {
+
+            @Override
+            public void open() throws HyracksDataException {
+
+            }
+
+            @Override
+            public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
+
+            }
+
+            @Override
+            public void fail() throws HyracksDataException {
+
+            }
+
+            @Override
+            public void close() throws HyracksDataException {
+
+            }
+
+        };
+    }
+
+}
diff --git a/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/EmptyTupleSourceOperatorDescriptor.java b/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/EmptyTupleSourceOperatorDescriptor.java
new file mode 100644
index 0000000..5276c82
--- /dev/null
+++ b/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/EmptyTupleSourceOperatorDescriptor.java
@@ -0,0 +1,62 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.pregelix.dataflow;
+
+import java.nio.ByteBuffer;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
+import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
+import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryOutputSourceOperatorNodePushable;
+
+public class EmptyTupleSourceOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor {
+    private static final long serialVersionUID = 1L;
+
+    public EmptyTupleSourceOperatorDescriptor(JobSpecification spec) {
+        super(spec, 0, 1);
+    }
+
+    @Override
+    public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx,
+            IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) throws HyracksDataException {
+        return new AbstractUnaryOutputSourceOperatorNodePushable() {
+            private ByteBuffer frame = ctx.allocateFrame();
+            private ArrayTupleBuilder tb = new ArrayTupleBuilder(0);
+            private FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
+
+            @Override
+            public void initialize() throws HyracksDataException {
+                try {
+                    writer.open();
+                    appender.reset(frame, true);
+                    if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
+                        throw new IllegalStateException();
+                    }
+                    FrameUtils.flushFrame(frame, writer);
+                    writer.close();
+                } catch (Exception e) {
+                    throw new HyracksDataException(e);
+                }
+            }
+        };
+    }
+
+}
diff --git a/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/FinalAggregateOperatorDescriptor.java b/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/FinalAggregateOperatorDescriptor.java
new file mode 100644
index 0000000..eda7754
--- /dev/null
+++ b/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/FinalAggregateOperatorDescriptor.java
@@ -0,0 +1,104 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.pregelix.dataflow;
+
+import java.io.DataInput;
+import java.io.DataInputStream;
+import java.nio.ByteBuffer;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.io.Writable;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
+import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+import edu.uci.ics.hyracks.dataflow.common.comm.util.ByteBufferInputStream;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryInputSinkOperatorNodePushable;
+import edu.uci.ics.pregelix.api.graph.GlobalAggregator;
+import edu.uci.ics.pregelix.api.util.BspUtils;
+import edu.uci.ics.pregelix.dataflow.base.IConfigurationFactory;
+import edu.uci.ics.pregelix.dataflow.std.base.IRecordDescriptorFactory;
+import edu.uci.ics.pregelix.dataflow.util.IterationUtils;
+
+public class FinalAggregateOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor {
+    private static final long serialVersionUID = 1L;
+
+    private final IConfigurationFactory confFactory;
+    private final String jobId;
+    private final IRecordDescriptorFactory inputRdFactory;
+
+    public FinalAggregateOperatorDescriptor(JobSpecification spec, IConfigurationFactory confFactory,
+            IRecordDescriptorFactory inputRdFactory, String jobId) {
+        super(spec, 1, 0);
+        this.confFactory = confFactory;
+        this.jobId = jobId;
+        this.inputRdFactory = inputRdFactory;
+    }
+
+    @Override
+    public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx,
+            IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) throws HyracksDataException {
+        return new AbstractUnaryInputSinkOperatorNodePushable() {
+            private Configuration conf = confFactory.createConfiguration();
+            @SuppressWarnings("rawtypes")
+            private GlobalAggregator aggregator = BspUtils.createGlobalAggregator(conf);
+            private FrameTupleAccessor accessor = new FrameTupleAccessor(ctx.getFrameSize(),
+                    inputRdFactory.createRecordDescriptor());
+            private ByteBufferInputStream inputStream = new ByteBufferInputStream();
+            private DataInput input = new DataInputStream(inputStream);
+            private Writable partialAggregateValue = BspUtils.createFinalAggregateValue(conf);
+
+            @Override
+            public void open() throws HyracksDataException {
+                aggregator.init();
+            }
+
+            @SuppressWarnings("unchecked")
+            @Override
+            public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
+                accessor.reset(buffer);
+                int tupleCount = accessor.getTupleCount();
+                try {
+                    for (int i = 0; i < tupleCount; i++) {
+                        int start = accessor.getFieldSlotsLength() + accessor.getTupleStartOffset(i)
+                                + accessor.getFieldStartOffset(i, 0);
+                        inputStream.setByteBuffer(buffer, start);
+                        partialAggregateValue.readFields(input);
+                        aggregator.step(partialAggregateValue);
+                    }
+                } catch (Exception e) {
+                    throw new HyracksDataException(e);
+                }
+            }
+
+            @Override
+            public void fail() throws HyracksDataException {
+
+            }
+
+            @Override
+            public void close() throws HyracksDataException {
+                Writable finalAggregateValue = aggregator.finishFinal();
+                IterationUtils.writeGlobalAggregateValue(conf, jobId, finalAggregateValue);
+            }
+
+        };
+    }
+}
diff --git a/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/HDFSFileWriteOperatorDescriptor.java b/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/HDFSFileWriteOperatorDescriptor.java
new file mode 100644
index 0000000..0133d76
--- /dev/null
+++ b/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/HDFSFileWriteOperatorDescriptor.java
@@ -0,0 +1,164 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.pregelix.dataflow;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.PathFilter;
+import org.apache.hadoop.mapreduce.JobContext;
+import org.apache.hadoop.mapreduce.JobID;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+import org.apache.hadoop.mapreduce.TaskAttemptID;
+import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
+import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameDeserializer;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryInputSinkOperatorNodePushable;
+import edu.uci.ics.pregelix.api.graph.Vertex;
+import edu.uci.ics.pregelix.api.io.VertexOutputFormat;
+import edu.uci.ics.pregelix.api.io.VertexWriter;
+import edu.uci.ics.pregelix.api.util.BspUtils;
+import edu.uci.ics.pregelix.dataflow.base.IConfigurationFactory;
+import edu.uci.ics.pregelix.dataflow.std.base.IRecordDescriptorFactory;
+
+public class HDFSFileWriteOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor {
+    private static final long serialVersionUID = 1L;
+    private final IConfigurationFactory confFactory;
+    private final IRecordDescriptorFactory inputRdFactory;
+
+    public HDFSFileWriteOperatorDescriptor(JobSpecification spec, IConfigurationFactory confFactory,
+            IRecordDescriptorFactory inputRdFactory) {
+        super(spec, 1, 0);
+        this.confFactory = confFactory;
+        this.inputRdFactory = inputRdFactory;
+    }
+
+    @SuppressWarnings("rawtypes")
+    @Override
+    public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx,
+            final IRecordDescriptorProvider recordDescProvider, final int partition, int nPartitions)
+            throws HyracksDataException {
+        return new AbstractUnaryInputSinkOperatorNodePushable() {
+            private RecordDescriptor rd0;
+            private FrameDeserializer frameDeserializer;
+            private Configuration conf;
+            private VertexWriter vertexWriter;
+            private TaskAttemptContext context;
+            private String TEMP_DIR = "_temporary";
+            private ClassLoader ctxCL;
+
+            @Override
+            public void open() throws HyracksDataException {
+                rd0 = inputRdFactory == null ? recordDescProvider.getInputRecordDescriptor(getActivityId(), 0)
+                        : inputRdFactory.createRecordDescriptor();
+                frameDeserializer = new FrameDeserializer(ctx.getFrameSize(), rd0);
+                ctxCL = Thread.currentThread().getContextClassLoader();
+                Thread.currentThread().setContextClassLoader(this.getClass().getClassLoader());
+                conf = confFactory.createConfiguration();
+
+                VertexOutputFormat outputFormat = BspUtils.createVertexOutputFormat(conf);
+                TaskAttemptID tid = new TaskAttemptID("", 0, true, partition, 0);
+                context = new TaskAttemptContext(conf, tid);
+                try {
+                    vertexWriter = outputFormat.createVertexWriter(context);
+                } catch (InterruptedException e) {
+                    throw new HyracksDataException(e);
+                } catch (IOException e) {
+                    throw new HyracksDataException(e);
+                }
+            }
+
+            @SuppressWarnings("unchecked")
+            @Override
+            public void nextFrame(ByteBuffer frame) throws HyracksDataException {
+                frameDeserializer.reset(frame);
+                try {
+                    while (!frameDeserializer.done()) {
+                        Object[] tuple = frameDeserializer.deserializeRecord();
+                        Vertex value = (Vertex) tuple[1];
+                        vertexWriter.writeVertex(value);
+                    }
+                } catch (InterruptedException e) {
+                    throw new HyracksDataException(e);
+                } catch (IOException e) {
+                    throw new HyracksDataException(e);
+                }
+            }
+
+            @Override
+            public void fail() throws HyracksDataException {
+                Thread.currentThread().setContextClassLoader(ctxCL);
+            }
+
+            @Override
+            public void close() throws HyracksDataException {
+                try {
+                    vertexWriter.close(context);
+                    moveFilesToFinalPath();
+                } catch (InterruptedException e) {
+                    throw new HyracksDataException(e);
+                } catch (IOException e) {
+                    throw new HyracksDataException(e);
+                }
+            }
+
+            private void moveFilesToFinalPath() throws HyracksDataException {
+                try {
+                    JobContext job = new JobContext(conf, new JobID("0", 0));
+                    Path outputPath = FileOutputFormat.getOutputPath(job);
+                    FileSystem dfs = FileSystem.get(conf);
+                    Path filePath = new Path(outputPath, "part-" + new Integer(partition).toString());
+                    FileStatus[] tempPaths = dfs.listStatus(outputPath, new PathFilter() {
+                        @Override
+                        public boolean accept(Path dir) {
+                            return dir.getName().endsWith(TEMP_DIR);
+                        }
+                    });
+                    Path tempDir = tempPaths[0].getPath();
+                    FileStatus[] results = dfs.listStatus(tempDir, new PathFilter() {
+                        @Override
+                        public boolean accept(Path dir) {
+                            return dir.getName().indexOf(context.getTaskAttemptID().toString()) >= 0;
+                        }
+                    });
+                    Path srcDir = results[0].getPath();
+                    if (!dfs.exists(srcDir))
+                        throw new HyracksDataException("file " + srcDir.toString() + " does not exist!");
+
+                    FileStatus[] srcFiles = dfs.listStatus(srcDir);
+                    Path srcFile = srcFiles[0].getPath();
+                    dfs.delete(filePath, true);
+                    dfs.rename(srcFile, filePath);
+                } catch (IOException e) {
+                    throw new HyracksDataException(e);
+                } finally {
+                    Thread.currentThread().setContextClassLoader(ctxCL);
+                }
+            }
+
+        };
+    }
+}
diff --git a/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/MaterializingReadOperatorDescriptor.java b/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/MaterializingReadOperatorDescriptor.java
new file mode 100644
index 0000000..b1bb555
--- /dev/null
+++ b/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/MaterializingReadOperatorDescriptor.java
@@ -0,0 +1,91 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.pregelix.dataflow;
+
+import java.nio.ByteBuffer;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
+import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.dataflow.common.io.RunFileReader;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryInputUnaryOutputOperatorNodePushable;
+import edu.uci.ics.pregelix.dataflow.state.MaterializerTaskState;
+import edu.uci.ics.pregelix.dataflow.util.IterationUtils;
+
+public class MaterializingReadOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor {
+    private static final long serialVersionUID = 1L;
+
+    public MaterializingReadOperatorDescriptor(JobSpecification spec, RecordDescriptor recordDescriptor) {
+        super(spec, 1, 1);
+        recordDescriptors[0] = recordDescriptor;
+    }
+
+    @Override
+    public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx,
+            IRecordDescriptorProvider recordDescProvider, final int partition, int nPartitions) {
+        return new AbstractUnaryInputUnaryOutputOperatorNodePushable() {
+            private ByteBuffer frame = ctx.allocateFrame();
+            private boolean complete = false;
+
+            @Override
+            public void open() throws HyracksDataException {
+
+            }
+
+            @Override
+            public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
+                if (!complete) {
+                    MaterializerTaskState state = (MaterializerTaskState) IterationUtils.getIterationState(ctx,
+                            partition);
+                    RunFileReader in = state.getRunFileWriter().createReader();
+                    writer.open();
+                    try {
+                        in.open();
+                        while (in.nextFrame(frame)) {
+                            frame.flip();
+                            writer.nextFrame(frame);
+                            frame.clear();
+                        }
+                        in.close();
+                    } catch (Exception e) {
+                        writer.fail();
+                        throw new HyracksDataException(e);
+                    } finally {
+                        /**
+                         * remove last iteration's state
+                         */
+                        IterationUtils.removeIterationState(ctx, partition);
+                        writer.close();
+                    }
+                    complete = true;
+                }
+            }
+
+            @Override
+            public void fail() throws HyracksDataException {
+
+            }
+
+            @Override
+            public void close() throws HyracksDataException {
+
+            }
+        };
+    }
+}
\ No newline at end of file
diff --git a/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/MaterializingWriteOperatorDescriptor.java b/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/MaterializingWriteOperatorDescriptor.java
new file mode 100644
index 0000000..efe5f1b
--- /dev/null
+++ b/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/MaterializingWriteOperatorDescriptor.java
@@ -0,0 +1,103 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.pregelix.dataflow;
+
+import java.nio.ByteBuffer;
+
+import edu.uci.ics.hyracks.api.application.INCApplicationContext;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.ActivityId;
+import edu.uci.ics.hyracks.api.dataflow.IActivityGraphBuilder;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
+import edu.uci.ics.hyracks.api.dataflow.TaskId;
+import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.io.FileReference;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.dataflow.common.io.RunFileWriter;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractActivityNode;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryInputUnaryOutputOperatorNodePushable;
+import edu.uci.ics.pregelix.dataflow.context.RuntimeContext;
+import edu.uci.ics.pregelix.dataflow.state.MaterializerTaskState;
+import edu.uci.ics.pregelix.dataflow.util.IterationUtils;
+
+public class MaterializingWriteOperatorDescriptor extends AbstractOperatorDescriptor {
+    private static final long serialVersionUID = 1L;
+    private final static int MATERIALIZER_ACTIVITY_ID = 0;
+
+    public MaterializingWriteOperatorDescriptor(JobSpecification spec, RecordDescriptor recordDescriptor) {
+        super(spec, 1, 1);
+        recordDescriptors[0] = recordDescriptor;
+    }
+
+    @Override
+    public void contributeActivities(IActivityGraphBuilder builder) {
+        MaterializerActivityNode ma = new MaterializerActivityNode(new ActivityId(odId, MATERIALIZER_ACTIVITY_ID));
+
+        builder.addActivity(this, ma);
+        builder.addSourceEdge(0, ma, 0);
+        builder.addTargetEdge(0, ma, 0);
+    }
+
+    private final class MaterializerActivityNode extends AbstractActivityNode {
+        private static final long serialVersionUID = 1L;
+
+        public MaterializerActivityNode(ActivityId id) {
+            super(id);
+        }
+
+        @Override
+        public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx,
+                IRecordDescriptorProvider recordDescProvider, final int partition, int nPartitions) {
+            return new AbstractUnaryInputUnaryOutputOperatorNodePushable() {
+                private MaterializerTaskState state;
+
+                @Override
+                public void open() throws HyracksDataException {
+                    state = new MaterializerTaskState(ctx.getJobletContext().getJobId(), new TaskId(getActivityId(),
+                            partition));
+                    INCApplicationContext appContext = ctx.getJobletContext().getApplicationContext();
+                    RuntimeContext context = (RuntimeContext) appContext.getApplicationObject();
+                    FileReference file = context.createManagedWorkspaceFile(MaterializingWriteOperatorDescriptor.class
+                            .getSimpleName());
+                    state.setRunFileWriter(new RunFileWriter(file, ctx.getIOManager()));
+                    state.getRunFileWriter().open();
+                    writer.open();
+                }
+
+                @Override
+                public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
+                    state.getRunFileWriter().nextFrame(buffer);
+                }
+
+                @Override
+                public void close() throws HyracksDataException {
+                    state.getRunFileWriter().close();
+                    /**
+                     * set iteration state
+                     */
+                    IterationUtils.setIterationState(ctx, partition, state);
+                    writer.close();
+                }
+
+                @Override
+                public void fail() throws HyracksDataException {
+                }
+            };
+        }
+    }
+}
\ No newline at end of file
diff --git a/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/NonCombinerConnectorPolicyAssignmentPolicy.java b/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/NonCombinerConnectorPolicyAssignmentPolicy.java
new file mode 100644
index 0000000..8023fe5
--- /dev/null
+++ b/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/NonCombinerConnectorPolicyAssignmentPolicy.java
@@ -0,0 +1,24 @@
+package edu.uci.ics.pregelix.dataflow;
+
+import edu.uci.ics.hyracks.api.dataflow.IConnectorDescriptor;
+import edu.uci.ics.hyracks.api.dataflow.connectors.IConnectorPolicy;
+import edu.uci.ics.hyracks.api.dataflow.connectors.IConnectorPolicyAssignmentPolicy;
+import edu.uci.ics.hyracks.api.dataflow.connectors.PipeliningConnectorPolicy;
+import edu.uci.ics.hyracks.api.dataflow.connectors.SendSideMaterializedPipeliningConnectorPolicy;
+import edu.uci.ics.hyracks.dataflow.std.connectors.MToNPartitioningConnectorDescriptor;
+
+public class NonCombinerConnectorPolicyAssignmentPolicy implements IConnectorPolicyAssignmentPolicy {
+    private static final long serialVersionUID = 1L;
+    private IConnectorPolicy senderSideMaterializePolicy = new SendSideMaterializedPipeliningConnectorPolicy();
+    private IConnectorPolicy pipeliningPolicy = new PipeliningConnectorPolicy();
+
+    @Override
+    public IConnectorPolicy getConnectorPolicyAssignment(IConnectorDescriptor c, int nProducers, int nConsumers,
+            int[] fanouts) {
+        if (c instanceof MToNPartitioningConnectorDescriptor) {
+            return senderSideMaterializePolicy;
+        } else {
+            return pipeliningPolicy;
+        }
+    }
+}
diff --git a/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/TerminationStateWriterOperatorDescriptor.java b/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/TerminationStateWriterOperatorDescriptor.java
new file mode 100644
index 0000000..88a0dda
--- /dev/null
+++ b/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/TerminationStateWriterOperatorDescriptor.java
@@ -0,0 +1,79 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.pregelix.dataflow;
+
+import java.nio.ByteBuffer;
+import java.util.logging.Logger;
+
+import org.apache.hadoop.conf.Configuration;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
+import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryInputSinkOperatorNodePushable;
+import edu.uci.ics.pregelix.dataflow.base.IConfigurationFactory;
+import edu.uci.ics.pregelix.dataflow.util.IterationUtils;
+
+public class TerminationStateWriterOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor {
+    private static final long serialVersionUID = 1L;
+    private static final Logger LOGGER = Logger.getLogger(TerminationStateWriterOperatorDescriptor.class.getName());
+
+    private final IConfigurationFactory confFactory;
+    private final String jobId;
+
+    public TerminationStateWriterOperatorDescriptor(JobSpecification spec, IConfigurationFactory confFactory,
+            String jobId) {
+        super(spec, 1, 0);
+        this.confFactory = confFactory;
+        this.jobId = jobId;
+    }
+
+    @Override
+    public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
+            IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) throws HyracksDataException {
+        return new AbstractUnaryInputSinkOperatorNodePushable() {
+            private Configuration conf = confFactory.createConfiguration();
+            private boolean terminate = true;
+
+            @Override
+            public void open() throws HyracksDataException {
+                terminate = true;
+            }
+
+            @Override
+            public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
+                terminate = false;
+            }
+
+            @Override
+            public void fail() throws HyracksDataException {
+
+            }
+
+            @Override
+            public void close() throws HyracksDataException {
+                IterationUtils.writeTerminationState(conf, jobId, terminate);
+                LOGGER.info("close termination state");
+                if (terminate)
+                    LOGGER.info("write termination to HDFS");
+            }
+
+        };
+    }
+}
diff --git a/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/VertexFileScanOperatorDescriptor.java b/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/VertexFileScanOperatorDescriptor.java
new file mode 100644
index 0000000..a38b19e
--- /dev/null
+++ b/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/VertexFileScanOperatorDescriptor.java
@@ -0,0 +1,193 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.pregelix.dataflow;
+
+import java.io.DataOutput;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.io.WritableComparable;
+import org.apache.hadoop.mapreduce.InputSplit;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+import org.apache.hadoop.mapreduce.TaskAttemptID;
+import org.apache.hadoop.mapreduce.lib.input.FileSplit;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
+import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.exceptions.HyracksException;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
+import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryOutputSourceOperatorNodePushable;
+import edu.uci.ics.hyracks.hdfs.ContextFactory;
+import edu.uci.ics.hyracks.hdfs2.dataflow.FileSplitsFactory;
+import edu.uci.ics.pregelix.api.graph.Vertex;
+import edu.uci.ics.pregelix.api.io.VertexInputFormat;
+import edu.uci.ics.pregelix.api.io.VertexReader;
+import edu.uci.ics.pregelix.api.util.BspUtils;
+import edu.uci.ics.pregelix.dataflow.base.IConfigurationFactory;
+
+@SuppressWarnings("rawtypes")
+public class VertexFileScanOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor {
+    private static final long serialVersionUID = 1L;
+    private final FileSplitsFactory splitsFactory;
+    private final IConfigurationFactory confFactory;
+    private final int fieldSize = 2;
+    private final String[] scheduledLocations;
+    private final boolean[] executed;
+
+    /**
+     * @param spec
+     */
+    public VertexFileScanOperatorDescriptor(JobSpecification spec, RecordDescriptor rd, List<InputSplit> splits,
+            String[] scheduledLocations, IConfigurationFactory confFactory) throws HyracksException {
+        super(spec, 0, 1);
+        List<FileSplit> fileSplits = new ArrayList<FileSplit>();
+        for (int i = 0; i < splits.size(); i++) {
+            fileSplits.add((FileSplit) splits.get(i));
+        }
+        this.splitsFactory = new FileSplitsFactory(fileSplits);
+        this.confFactory = confFactory;
+        this.scheduledLocations = scheduledLocations;
+        this.executed = new boolean[scheduledLocations.length];
+        Arrays.fill(executed, false);
+        this.recordDescriptors[0] = rd;
+    }
+
+    public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx,
+            IRecordDescriptorProvider recordDescProvider, final int partition, final int nPartitions)
+            throws HyracksDataException {
+        final List<FileSplit> splits = splitsFactory.getSplits();
+
+        return new AbstractUnaryOutputSourceOperatorNodePushable() {
+            private ClassLoader ctxCL;
+            private ContextFactory ctxFactory = new ContextFactory();
+
+            @Override
+            public void initialize() throws HyracksDataException {
+                ctxCL = Thread.currentThread().getContextClassLoader();
+                try {
+                    Thread.currentThread().setContextClassLoader(this.getClass().getClassLoader());
+                    Configuration conf = confFactory.createConfiguration();
+                    writer.open();
+                    for (int i = 0; i < scheduledLocations.length; i++) {
+                        if (scheduledLocations[i].equals(ctx.getJobletContext().getApplicationContext().getNodeId())) {
+                            /**
+                             * pick one from the FileSplit queue
+                             */
+                            synchronized (executed) {
+                                if (!executed[i]) {
+                                    executed[i] = true;
+                                } else {
+                                    continue;
+                                }
+                            }
+                            loadVertices(ctx, conf, i);
+                        }
+                    }
+                    writer.close();
+                } catch (Exception e) {
+                    throw new HyracksDataException(e);
+                } finally {
+                    Thread.currentThread().setContextClassLoader(ctxCL);
+                }
+            }
+
+            /**
+             * Load the vertices
+             * 
+             * @parameter IHyracks ctx
+             * @throws IOException
+             * @throws IllegalAccessException
+             * @throws InstantiationException
+             * @throws ClassNotFoundException
+             * @throws InterruptedException
+             */
+            @SuppressWarnings("unchecked")
+            private void loadVertices(final IHyracksTaskContext ctx, Configuration conf, int splitId)
+                    throws IOException, ClassNotFoundException, InterruptedException, InstantiationException,
+                    IllegalAccessException {
+                ByteBuffer frame = ctx.allocateFrame();
+                FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
+                appender.reset(frame, true);
+
+                VertexInputFormat vertexInputFormat = BspUtils.createVertexInputFormat(conf);
+                TaskAttemptContext context = new TaskAttemptContext(conf, new TaskAttemptID());
+                InputSplit split = splits.get(splitId);
+
+                VertexReader vertexReader = vertexInputFormat.createVertexReader(split, context);
+                vertexReader.initialize(split, context);
+                Vertex readerVertex = (Vertex) BspUtils.createVertex(conf);
+                ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldSize);
+                DataOutput dos = tb.getDataOutput();
+
+                /**
+                 * set context
+                 */
+                TaskAttemptContext mapperContext = ctxFactory.createContext(conf, splits.get(splitId));
+                Vertex.setContext(mapperContext);
+
+                /**
+                 * empty vertex value
+                 */
+                Writable emptyVertexValue = (Writable) BspUtils.createVertexValue(conf);
+
+                while (vertexReader.nextVertex()) {
+                    readerVertex = vertexReader.getCurrentVertex();
+                    tb.reset();
+                    if (readerVertex.getVertexId() == null) {
+                        throw new IllegalArgumentException("loadVertices: Vertex reader returned a vertex "
+                                + "without an id!  - " + readerVertex);
+                    }
+                    if (readerVertex.getVertexValue() == null) {
+                        readerVertex.setVertexValue(emptyVertexValue);
+                    }
+                    WritableComparable vertexId = readerVertex.getVertexId();
+                    vertexId.write(dos);
+                    tb.addFieldEndOffset();
+
+                    readerVertex.write(dos);
+                    tb.addFieldEndOffset();
+
+                    if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
+                        if (appender.getTupleCount() <= 0)
+                            throw new IllegalStateException("zero tuples in a frame!");
+                        FrameUtils.flushFrame(frame, writer);
+                        appender.reset(frame, true);
+                        if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
+                            throw new IllegalStateException();
+                        }
+                    }
+                }
+
+                vertexReader.close();
+                if (appender.getTupleCount() > 0) {
+                    FrameUtils.flushFrame(frame, writer);
+                }
+                System.gc();
+            }
+        };
+    }
+}
\ No newline at end of file
diff --git a/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/VertexWriteOperatorDescriptor.java b/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/VertexWriteOperatorDescriptor.java
new file mode 100644
index 0000000..d7cbb3a
--- /dev/null
+++ b/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/VertexWriteOperatorDescriptor.java
@@ -0,0 +1,105 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.pregelix.dataflow;
+
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.OutputStreamWriter;
+import java.io.PrintWriter;
+import java.nio.ByteBuffer;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
+import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameDeserializer;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryInputSinkOperatorNodePushable;
+import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.dataflow.std.util.StringSerializationUtils;
+import edu.uci.ics.pregelix.dataflow.std.base.IRecordDescriptorFactory;
+import edu.uci.ics.pregelix.dataflow.std.base.IRuntimeHookFactory;
+
+public class VertexWriteOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor {
+    private static final long serialVersionUID = 1L;
+    private final FileSplit[] splits;
+    private final IRuntimeHookFactory preHookFactory;
+    private final IRuntimeHookFactory postHookFactory;
+    private final IRecordDescriptorFactory inputRdFactory;
+
+    public VertexWriteOperatorDescriptor(JobSpecification spec, IRecordDescriptorFactory inputRdFactory,
+            IFileSplitProvider fileSplitProvider, IRuntimeHookFactory preHookFactory,
+            IRuntimeHookFactory postHookFactory) {
+        super(spec, 1, 0);
+        this.splits = fileSplitProvider.getFileSplits();
+        this.preHookFactory = preHookFactory;
+        this.postHookFactory = postHookFactory;
+        this.inputRdFactory = inputRdFactory;
+    }
+
+    @Override
+    public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx,
+            final IRecordDescriptorProvider recordDescProvider, final int partition, int nPartitions)
+            throws HyracksDataException {
+        IOperatorNodePushable op = new AbstractUnaryInputSinkOperatorNodePushable() {
+            private RecordDescriptor rd0;
+            private FrameDeserializer frameDeserializer;
+            private PrintWriter outputWriter;
+
+            @Override
+            public void open() throws HyracksDataException {
+                rd0 = inputRdFactory == null ? recordDescProvider.getInputRecordDescriptor(getActivityId(), 0)
+                        : inputRdFactory.createRecordDescriptor();
+                frameDeserializer = new FrameDeserializer(ctx.getFrameSize(), rd0);
+                try {
+                    outputWriter = new PrintWriter(new OutputStreamWriter(new FileOutputStream(splits[partition]
+                            .getLocalFile().getFile())));
+                    if (preHookFactory != null)
+                        preHookFactory.createRuntimeHook().configure(ctx);
+                } catch (IOException e) {
+                    throw new HyracksDataException(e);
+                }
+            }
+
+            @Override
+            public void nextFrame(ByteBuffer frame) throws HyracksDataException {
+                frameDeserializer.reset(frame);
+                while (!frameDeserializer.done()) {
+                    Object[] tuple = frameDeserializer.deserializeRecord();
+                    // output the vertex
+                    outputWriter.print(StringSerializationUtils.toString(tuple[tuple.length - 1]));
+                    outputWriter.println();
+                }
+            }
+
+            @Override
+            public void fail() throws HyracksDataException {
+
+            }
+
+            @Override
+            public void close() throws HyracksDataException {
+                if (postHookFactory != null)
+                    postHookFactory.createRuntimeHook().configure(ctx);
+                outputWriter.close();
+            }
+
+        };
+        return op;
+    }
+}
diff --git a/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/base/IConfigurationFactory.java b/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/base/IConfigurationFactory.java
new file mode 100644
index 0000000..b31f376
--- /dev/null
+++ b/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/base/IConfigurationFactory.java
@@ -0,0 +1,27 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.pregelix.dataflow.base;
+
+import java.io.Serializable;
+
+import org.apache.hadoop.conf.Configuration;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+
+public interface IConfigurationFactory extends Serializable {
+
+    public Configuration createConfiguration() throws HyracksDataException;
+
+}
diff --git a/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/context/PreDelayPageCleanerPolicy.java b/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/context/PreDelayPageCleanerPolicy.java
new file mode 100644
index 0000000..2b07c81
--- /dev/null
+++ b/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/context/PreDelayPageCleanerPolicy.java
@@ -0,0 +1,46 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.pregelix.dataflow.context;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.storage.common.buffercache.IPageCleanerPolicy;
+
+public class PreDelayPageCleanerPolicy implements IPageCleanerPolicy {
+    private long delay;
+
+    public PreDelayPageCleanerPolicy(long delay) {
+        this.delay = delay;
+    }
+
+    @Override
+    public void notifyCleanCycleStart(Object monitor) throws HyracksDataException {
+        try {
+            monitor.wait(delay);
+        } catch (InterruptedException e) {
+            throw new HyracksDataException(e);
+        }
+    }
+
+    @Override
+    public void notifyCleanCycleFinish(Object monitor) throws HyracksDataException {
+       
+    }
+
+    @Override
+    public void notifyVictimNotFound(Object monitor) throws HyracksDataException {
+        monitor.notifyAll();
+    }
+}
diff --git a/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/context/RuntimeContext.java b/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/context/RuntimeContext.java
new file mode 100644
index 0000000..567e220
--- /dev/null
+++ b/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/context/RuntimeContext.java
@@ -0,0 +1,148 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.pregelix.dataflow.context;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.logging.Logger;
+
+import edu.uci.ics.hyracks.api.application.INCApplicationContext;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.state.IStateObject;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.io.FileReference;
+import edu.uci.ics.hyracks.api.io.IWorkspaceFileFactory;
+import edu.uci.ics.hyracks.control.nc.io.IOManager;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndex;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IndexRegistry;
+import edu.uci.ics.hyracks.storage.common.buffercache.BufferCache;
+import edu.uci.ics.hyracks.storage.common.buffercache.ClockPageReplacementStrategy;
+import edu.uci.ics.hyracks.storage.common.buffercache.HeapBufferAllocator;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+import edu.uci.ics.hyracks.storage.common.buffercache.ICacheMemoryAllocator;
+import edu.uci.ics.hyracks.storage.common.buffercache.IPageReplacementStrategy;
+import edu.uci.ics.hyracks.storage.common.file.IFileMapManager;
+import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
+import edu.uci.ics.hyracks.storage.common.smi.TransientFileMapManager;
+import edu.uci.ics.pregelix.api.graph.Vertex;
+
+public class RuntimeContext implements IWorkspaceFileFactory {
+    private static final Logger LOGGER = Logger.getLogger(RuntimeContext.class.getName());
+
+    private IndexRegistry<IIndex> treeIndexRegistry;
+    private IBufferCache bufferCache;
+    private IFileMapManager fileMapManager;
+    private Map<StateKey, IStateObject> appStateMap = new ConcurrentHashMap<StateKey, IStateObject>();
+    private Map<String, Long> giraphJobIdToSuperStep = new ConcurrentHashMap<String, Long>();
+    private Map<String, Boolean> giraphJobIdToMove = new ConcurrentHashMap<String, Boolean>();
+    private IOManager ioManager;
+    private Map<Long, List<FileReference>> iterationToFiles = new ConcurrentHashMap<Long, List<FileReference>>();
+
+    public RuntimeContext(INCApplicationContext appCtx) {
+        fileMapManager = new TransientFileMapManager();
+        ICacheMemoryAllocator allocator = new HeapBufferAllocator();
+        IPageReplacementStrategy prs = new ClockPageReplacementStrategy();
+        int pageSize = 64 * 1024;
+        long memSize = Runtime.getRuntime().maxMemory();
+        long bufferSize = memSize / 4;
+        int numPages = (int) (bufferSize / pageSize);
+        /** let the buffer cache never flush dirty pages */
+        bufferCache = new BufferCache(appCtx.getRootContext().getIOManager(), allocator, prs,
+                new PreDelayPageCleanerPolicy(Long.MAX_VALUE), fileMapManager, pageSize, numPages, 1000000);
+        treeIndexRegistry = new IndexRegistry<IIndex>();
+        ioManager = (IOManager) appCtx.getRootContext().getIOManager();
+    }
+
+    public void close() {
+        for (Entry<Long, List<FileReference>> entry : iterationToFiles.entrySet())
+            for (FileReference fileRef : entry.getValue())
+                fileRef.delete();
+
+        iterationToFiles.clear();
+        bufferCache.close();
+        appStateMap.clear();
+
+        System.gc();
+    }
+
+    public IBufferCache getBufferCache() {
+        return bufferCache;
+    }
+
+    public IFileMapProvider getFileMapManager() {
+        return fileMapManager;
+    }
+
+    public IndexRegistry<IIndex> getTreeIndexRegistry() {
+        return treeIndexRegistry;
+    }
+
+    public Map<StateKey, IStateObject> getAppStateStore() {
+        return appStateMap;
+    }
+
+    public static RuntimeContext get(IHyracksTaskContext ctx) {
+        return (RuntimeContext) ctx.getJobletContext().getApplicationContext().getApplicationObject();
+    }
+
+    public synchronized void setVertexProperties(String giraphJobId, long numVertices, long numEdges) {
+        Boolean toMove = giraphJobIdToMove.get(giraphJobId);
+        if (toMove == null || toMove == true) {
+            if (giraphJobIdToSuperStep.get(giraphJobId) == null) {
+                giraphJobIdToSuperStep.put(giraphJobId, 0L);
+            }
+
+            long superStep = giraphJobIdToSuperStep.get(giraphJobId);
+            List<FileReference> files = iterationToFiles.remove(superStep - 1);
+            if (files != null) {
+                for (FileReference fileRef : files)
+                    fileRef.delete();
+            }
+
+            Vertex.setSuperstep(++superStep);
+            Vertex.setNumVertices(numVertices);
+            Vertex.setNumEdges(numEdges);
+            giraphJobIdToSuperStep.put(giraphJobId, superStep);
+            giraphJobIdToMove.put(giraphJobId, false);
+            LOGGER.info("start iteration " + Vertex.getSuperstep());
+        }
+        System.gc();
+    }
+
+    public synchronized void endSuperStep(String giraphJobId) {
+        giraphJobIdToMove.put(giraphJobId, true);
+        LOGGER.info("end iteration " + Vertex.getSuperstep());
+    }
+
+    @Override
+    public FileReference createManagedWorkspaceFile(String prefix) throws HyracksDataException {
+        final FileReference fRef = ioManager.createWorkspaceFile(prefix);
+        List<FileReference> files = iterationToFiles.get(Vertex.getSuperstep());
+        if (files == null) {
+            files = new ArrayList<FileReference>();
+            iterationToFiles.put(Vertex.getSuperstep(), files);
+        }
+        files.add(fRef);
+        return fRef;
+    }
+
+    @Override
+    public FileReference createUnmanagedWorkspaceFile(String prefix) throws HyracksDataException {
+        return ioManager.createWorkspaceFile(prefix);
+    }
+}
\ No newline at end of file
diff --git a/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/context/StateKey.java b/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/context/StateKey.java
new file mode 100644
index 0000000..ae58802
--- /dev/null
+++ b/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/context/StateKey.java
@@ -0,0 +1,45 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.pregelix.dataflow.context;
+
+import edu.uci.ics.hyracks.api.job.JobId;
+
+public class StateKey {
+    private final JobId jobId;
+    private final int partition;
+
+    public StateKey(JobId jobId, int partition) {
+        this.jobId = jobId;
+        this.partition = partition;
+    }
+
+    @Override
+    public int hashCode() {
+        return jobId.hashCode() * partition;
+    }
+
+    @Override
+    public boolean equals(Object o) {
+        if (!(o instanceof StateKey))
+            return false;
+        StateKey key = (StateKey) o;
+        return key.jobId.equals(jobId) && key.partition == partition;
+    }
+
+    @Override
+    public String toString() {
+        return jobId.toString() + ":" + partition;
+    }
+}
diff --git a/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/state/MaterializerTaskState.java b/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/state/MaterializerTaskState.java
new file mode 100644
index 0000000..117e9f0
--- /dev/null
+++ b/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/state/MaterializerTaskState.java
@@ -0,0 +1,54 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.pregelix.dataflow.state;
+
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+
+import edu.uci.ics.hyracks.api.dataflow.TaskId;
+import edu.uci.ics.hyracks.api.job.JobId;
+import edu.uci.ics.hyracks.dataflow.common.io.RunFileWriter;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractStateObject;
+
+public class MaterializerTaskState extends AbstractStateObject {
+    private RunFileWriter out;
+
+    public MaterializerTaskState() {
+    }
+
+    public MaterializerTaskState(JobId jobId, TaskId taskId) {
+        super(jobId, taskId);
+    }
+
+    @Override
+    public void toBytes(DataOutput out) throws IOException {
+        throw new UnsupportedOperationException();
+    }
+
+    @Override
+    public void fromBytes(DataInput in) throws IOException {
+        throw new UnsupportedOperationException();
+    }
+
+    public RunFileWriter getRunFileWriter() {
+        return out;
+    }
+
+    public void setRunFileWriter(RunFileWriter out) {
+        this.out = out;
+    }
+}
\ No newline at end of file
diff --git a/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/util/IterationUtils.java b/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/util/IterationUtils.java
new file mode 100644
index 0000000..dd562ba
--- /dev/null
+++ b/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/util/IterationUtils.java
@@ -0,0 +1,169 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.pregelix.dataflow.util;
+
+import java.io.IOException;
+import java.util.Map;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.Writable;
+
+import edu.uci.ics.hyracks.api.application.INCApplicationContext;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.state.IStateObject;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.job.JobId;
+import edu.uci.ics.pregelix.api.job.PregelixJob;
+import edu.uci.ics.pregelix.api.util.BspUtils;
+import edu.uci.ics.pregelix.dataflow.context.RuntimeContext;
+import edu.uci.ics.pregelix.dataflow.context.StateKey;
+
+public class IterationUtils {
+    public static final String TMP_DIR = "/tmp/";
+
+    public static void setIterationState(IHyracksTaskContext ctx, int partition, IStateObject state) {
+        INCApplicationContext appContext = ctx.getJobletContext().getApplicationContext();
+        RuntimeContext context = (RuntimeContext) appContext.getApplicationObject();
+        Map<StateKey, IStateObject> map = context.getAppStateStore();
+        map.put(new StateKey(ctx.getJobletContext().getJobId(), partition), state);
+    }
+
+    public static IStateObject getIterationState(IHyracksTaskContext ctx, int partition) {
+        JobId currentId = ctx.getJobletContext().getJobId();
+        JobId lastId = new JobId(currentId.getId() - 1);
+        INCApplicationContext appContext = ctx.getJobletContext().getApplicationContext();
+        RuntimeContext context = (RuntimeContext) appContext.getApplicationObject();
+        Map<StateKey, IStateObject> map = context.getAppStateStore();
+        IStateObject state = map.get(new StateKey(lastId, partition));
+        return state;
+    }
+
+    public static void removeIterationState(IHyracksTaskContext ctx, int partition) {
+        JobId currentId = ctx.getJobletContext().getJobId();
+        JobId lastId = new JobId(currentId.getId() - 1);
+        INCApplicationContext appContext = ctx.getJobletContext().getApplicationContext();
+        RuntimeContext context = (RuntimeContext) appContext.getApplicationObject();
+        Map<StateKey, IStateObject> map = context.getAppStateStore();
+        map.remove(new StateKey(lastId, partition));
+    }
+
+    public static void endSuperStep(String giraphJobId, IHyracksTaskContext ctx) {
+        INCApplicationContext appContext = ctx.getJobletContext().getApplicationContext();
+        RuntimeContext context = (RuntimeContext) appContext.getApplicationObject();
+        context.endSuperStep(giraphJobId);
+    }
+
+    public static void setProperties(String giraphJobId, IHyracksTaskContext ctx, Configuration conf) {
+        INCApplicationContext appContext = ctx.getJobletContext().getApplicationContext();
+        RuntimeContext context = (RuntimeContext) appContext.getApplicationObject();
+        context.setVertexProperties(giraphJobId, conf.getLong(PregelixJob.NUM_VERTICE, -1),
+                conf.getLong(PregelixJob.NUM_EDGES, -1));
+    }
+
+    public static void writeTerminationState(Configuration conf, String jobId, boolean terminate)
+            throws HyracksDataException {
+        try {
+            FileSystem dfs = FileSystem.get(conf);
+            String pathStr = IterationUtils.TMP_DIR + jobId;
+            Path path = new Path(pathStr);
+            FSDataOutputStream output = dfs.create(path, true);
+            output.writeBoolean(terminate);
+            output.flush();
+            output.close();
+        } catch (IOException e) {
+            throw new HyracksDataException(e);
+        }
+    }
+
+    public static void writeForceTerminationState(Configuration conf, String jobId) throws HyracksDataException {
+        try {
+            FileSystem dfs = FileSystem.get(conf);
+            String pathStr = IterationUtils.TMP_DIR + jobId + "fterm";
+            Path path = new Path(pathStr);
+            if (!dfs.exists(path)) {
+                FSDataOutputStream output = dfs.create(path, true);
+                output.writeBoolean(true);
+                output.flush();
+                output.close();
+            }
+        } catch (IOException e) {
+            throw new HyracksDataException(e);
+        }
+    }
+
+    public static void writeGlobalAggregateValue(Configuration conf, String jobId, Writable agg)
+            throws HyracksDataException {
+        try {
+            FileSystem dfs = FileSystem.get(conf);
+            String pathStr = IterationUtils.TMP_DIR + jobId + "agg";
+            Path path = new Path(pathStr);
+            FSDataOutputStream output = dfs.create(path, true);
+            agg.write(output);
+            output.flush();
+            output.close();
+        } catch (IOException e) {
+            throw new HyracksDataException(e);
+        }
+    }
+
+    public static boolean readTerminationState(Configuration conf, String jobId) throws HyracksDataException {
+        try {
+            FileSystem dfs = FileSystem.get(conf);
+            String pathStr = IterationUtils.TMP_DIR + jobId;
+            Path path = new Path(pathStr);
+            FSDataInputStream input = dfs.open(path);
+            boolean terminate = input.readBoolean();
+            input.close();
+            return terminate;
+        } catch (IOException e) {
+            throw new HyracksDataException(e);
+        }
+    }
+
+    public static boolean readForceTerminationState(Configuration conf, String jobId) throws HyracksDataException {
+        try {
+            FileSystem dfs = FileSystem.get(conf);
+            String pathStr = IterationUtils.TMP_DIR + jobId + "fterm";
+            Path path = new Path(pathStr);
+            if (dfs.exists(path)) {
+                return true;
+            } else {
+                return false;
+            }
+        } catch (IOException e) {
+            throw new HyracksDataException(e);
+        }
+    }
+
+    public static Writable readGlobalAggregateValue(Configuration conf, String jobId) throws HyracksDataException {
+        try {
+            FileSystem dfs = FileSystem.get(conf);
+            String pathStr = IterationUtils.TMP_DIR + jobId + "agg";
+            Path path = new Path(pathStr);
+            FSDataInputStream input = dfs.open(path);
+            Writable agg = BspUtils.createFinalAggregateValue(conf);
+            agg.readFields(input);
+            input.close();
+            return agg;
+        } catch (IOException e) {
+            throw new HyracksDataException(e);
+        }
+    }
+
+}
diff --git a/pregelix/pregelix-dist/pom.xml b/pregelix/pregelix-dist/pom.xml
new file mode 100644
index 0000000..847e843
--- /dev/null
+++ b/pregelix/pregelix-dist/pom.xml
@@ -0,0 +1,81 @@
+<?xml version="1.0"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+	<modelVersion>4.0.0</modelVersion>
+	<parent>
+		<groupId>edu.uci.ics.hyracks</groupId>
+		<artifactId>pregelix</artifactId>
+		<version>0.2.3-SNAPSHOT</version>
+	</parent>
+	<artifactId>pregelix-dist</artifactId>
+	<name>pregelix-dist</name>
+	<url>http://maven.apache.org</url>
+	<properties>
+		<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+	</properties>
+	<build>
+		<plugins>
+			<plugin>
+				<groupId>org.apache.maven.plugins</groupId>
+				<artifactId>maven-compiler-plugin</artifactId>
+				<version>2.0.2</version>
+				<configuration>
+					<source>1.7</source>
+					<target>1.7</target>
+				</configuration>
+			</plugin>
+			<plugin>
+				<artifactId>maven-resources-plugin</artifactId>
+				<version>2.5</version>
+				<executions>
+					<execution>
+						<id>copy-scripts</id>
+						<!-- here the phase you need -->
+						<phase>package</phase>
+						<goals>
+							<goal>copy-resources</goal>
+						</goals>
+						<configuration>
+							<outputDirectory>target/appassembler</outputDirectory>
+							<resources>
+								<resource>
+									<directory>../pregelix-core/target/appassembler</directory>
+								</resource>
+							</resources>
+						</configuration>
+					</execution>
+					<execution>
+						<id>copy-examples</id>
+						<phase>package</phase>
+						<goals>
+							<goal>copy-resources</goal>
+						</goals>
+						<configuration>
+							<outputDirectory>target/appassembler/examples</outputDirectory>
+							<resources>
+								<resource>
+									<directory>../pregelix-example/target</directory>
+									<includes>
+										<include>*.jar</include>
+									</includes>
+								</resource>
+							</resources>
+						</configuration>
+					</execution>
+				</executions>
+			</plugin>
+		</plugins>
+	</build>
+	<dependencies>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>pregelix-core</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>pregelix-example</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+		</dependency>
+	</dependencies>
+</project>
diff --git a/pregelix/pregelix-example/data/clique/clique.txt b/pregelix/pregelix-example/data/clique/clique.txt
new file mode 100755
index 0000000..08280e3
--- /dev/null
+++ b/pregelix/pregelix-example/data/clique/clique.txt
@@ -0,0 +1,7 @@
+1 2 3 4
+2 1 3 4 5
+3 1 2 4 5
+4 1 2 3
+5 6 7
+6 5 7
+7 5 6
diff --git a/pregelix/pregelix-example/data/webmap/webmap_link.txt b/pregelix/pregelix-example/data/webmap/webmap_link.txt
new file mode 100644
index 0000000..2bd5a0e
--- /dev/null
+++ b/pregelix/pregelix-example/data/webmap/webmap_link.txt
@@ -0,0 +1,20 @@
+0 1
+1 1 2
+2 1 2 3
+3 1 2 3 4
+4 1 2 3 4 5
+5 1 2 3 4 5 6
+6 1 2 3 4 5 6 7
+7 1 2 3 4 5 6 7 8
+8 1 2 3 4 5 6 7 8 9
+9 1 2 3 4 5 6 7 8 9 10
+10 11
+11 11 12
+12 11 12 13
+13 11 12 13 14
+14 11 12 13 14 15
+15 11 12 13 14 15 16
+16 11 12 13 14 15 16 17
+17 11 12 13 14 15 16 17 18
+18 11 12 13 14 15 16 17 18 19
+19 0 11 12 13 14 15 16 17 18 19
diff --git a/pregelix/pregelix-example/data/webmapcomplex/webmap_link.txt b/pregelix/pregelix-example/data/webmapcomplex/webmap_link.txt
new file mode 100644
index 0000000..6640a62
--- /dev/null
+++ b/pregelix/pregelix-example/data/webmapcomplex/webmap_link.txt
@@ -0,0 +1,23 @@
+0 1 50
+1 1 2
+2 1 2 3
+3 1 2 3 4
+4 1 2 3 4 5
+5 1 2 3 4 5 6
+6 1 2 3 4 5 6 7
+7 1 2 3 4 5 6 7 8
+8 1 2 3 4 5 6 7 8 9
+9 1 2 3 4 5 6 7 8 9 10
+10 11 99
+11 11 12 101
+12 11 12 13
+13 11 12 13 14
+14 11 12 13 14 15
+15 11 12 13 14 15 16
+16 11 12 13 14 15 16 17
+17 11 12 13 14 15 16 17 18
+18 11 12 13 14 15 16 17 18 19
+19 0 11 12 13 14 15 16 17 18 19
+21 22 23 24
+25
+27
\ No newline at end of file
diff --git a/pregelix/pregelix-example/pom.xml b/pregelix/pregelix-example/pom.xml
new file mode 100644
index 0000000..01c0d77
--- /dev/null
+++ b/pregelix/pregelix-example/pom.xml
@@ -0,0 +1,122 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+	<modelVersion>4.0.0</modelVersion>
+	<artifactId>pregelix-example</artifactId>
+	<packaging>jar</packaging>
+	<name>pregelix-example</name>
+
+	<parent>
+		<groupId>edu.uci.ics.hyracks</groupId>
+		<artifactId>pregelix</artifactId>
+		<version>0.2.3-SNAPSHOT</version>
+	</parent>
+
+	<build>
+		<plugins>
+			<plugin>
+				<groupId>org.apache.maven.plugins</groupId>
+				<artifactId>maven-compiler-plugin</artifactId>
+				<version>2.0.2</version>
+				<configuration>
+					<source>1.7</source>
+					<target>1.7</target>
+				</configuration>
+			</plugin>
+			<plugin>
+				<artifactId>maven-assembly-plugin</artifactId>
+				<configuration>
+					<descriptorRefs>
+						<descriptorRef>jar-with-dependencies</descriptorRef>
+					</descriptorRefs>
+				</configuration>
+				<executions>
+					<execution>
+						<id>make-my-jar-with-dependencies</id>
+						<phase>package</phase>
+						<goals>
+							<goal>single</goal>
+						</goals>
+					</execution>
+				</executions>
+			</plugin>
+			<plugin>
+				<groupId>org.codehaus.mojo</groupId>
+				<artifactId>appassembler-maven-plugin</artifactId>
+				<version>1.3</version>
+				<executions>
+					<execution>
+						<configuration>
+							<programs>
+								<program>
+									<mainClass>edu.uci.ics.pregelix.example.Client</mainClass>
+									<name>pregelix</name>
+								</program>
+							</programs>
+							<repositoryLayout>flat</repositoryLayout>
+							<repositoryName>lib</repositoryName>
+						</configuration>
+						<phase>package</phase>
+						<goals>
+							<goal>assemble</goal>
+						</goals>
+					</execution>
+				</executions>
+			</plugin>
+			<plugin>
+				<groupId>org.apache.maven.plugins</groupId>
+				<artifactId>maven-surefire-plugin</artifactId>
+				<version>2.7.2</version>
+				<configuration>
+					<forkMode>pertest</forkMode>
+					<argLine>-enableassertions -Xmx512m -XX:MaxPermSize=300m -Dfile.encoding=UTF-8
+						-Djava.util.logging.config.file=src/test/resources/logging.properties</argLine>
+					<includes>
+						<include>**/*TestSuite.java</include>
+						<include>**/*Test.java</include>
+					</includes>
+				</configuration>
+			</plugin>
+			<plugin>
+				<artifactId>maven-clean-plugin</artifactId>
+				<version>2.5</version>
+				<configuration>
+					<filesets>
+						<fileset>
+							<directory>.</directory>
+							<includes>
+								<include>teststore*</include>
+								<include>edu*</include>
+								<include>actual*</include>
+								<include>build*</include>
+								<include>expect*</include>
+								<include>ClusterController*</include>
+								<include>edu.uci.*</include>
+							</includes>
+						</fileset>
+					</filesets>
+				</configuration>
+			</plugin>
+		</plugins>
+	</build>
+
+	<dependencies>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>pregelix-core</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>junit</groupId>
+			<artifactId>junit</artifactId>
+			<version>4.8.1</version>
+			<scope>test</scope>
+		</dependency>
+		<dependency>
+			<groupId>com.google.collections</groupId>
+			<artifactId>google-collections</artifactId>
+			<version>1.0</version>
+		</dependency>
+	</dependencies>
+
+</project>
diff --git a/hyracks-cli/src/main/assembly/binary-assembly.xml b/pregelix/pregelix-example/src/main/assembly/binary-assembly.xml
old mode 100644
new mode 100755
similarity index 100%
copy from hyracks-cli/src/main/assembly/binary-assembly.xml
copy to pregelix/pregelix-example/src/main/assembly/binary-assembly.xml
diff --git a/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/ConnectedComponentsVertex.java b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/ConnectedComponentsVertex.java
new file mode 100644
index 0000000..74ae455
--- /dev/null
+++ b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/ConnectedComponentsVertex.java
@@ -0,0 +1,175 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.pregelix.example;
+
+import java.io.IOException;
+import java.util.Iterator;
+import java.util.List;
+
+import org.apache.hadoop.io.FloatWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapreduce.RecordWriter;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.pregelix.api.graph.Edge;
+import edu.uci.ics.pregelix.api.graph.MessageCombiner;
+import edu.uci.ics.pregelix.api.graph.MsgList;
+import edu.uci.ics.pregelix.api.graph.Vertex;
+import edu.uci.ics.pregelix.api.io.VertexWriter;
+import edu.uci.ics.pregelix.api.io.text.TextVertexOutputFormat;
+import edu.uci.ics.pregelix.api.io.text.TextVertexOutputFormat.TextVertexWriter;
+import edu.uci.ics.pregelix.api.job.PregelixJob;
+import edu.uci.ics.pregelix.example.client.Client;
+import edu.uci.ics.pregelix.example.inputformat.TextPageRankInputFormat;
+import edu.uci.ics.pregelix.example.io.VLongWritable;
+
+/**
+ * Demonstrates the basic Pregel connected components implementation, for undirected graph (e.g., Facebook, LinkedIn graph).
+ */
+public class ConnectedComponentsVertex extends Vertex<VLongWritable, VLongWritable, FloatWritable, VLongWritable> {
+    /**
+     * Test whether combiner is called to get the minimum ID in the cluster
+     */
+    public static class SimpleMinCombiner extends MessageCombiner<VLongWritable, VLongWritable, VLongWritable> {
+        private long min = Long.MAX_VALUE;
+        private VLongWritable agg = new VLongWritable();
+        private MsgList<VLongWritable> msgList;
+
+        @Override
+        public void stepPartial(VLongWritable vertexIndex, VLongWritable msg) throws HyracksDataException {
+            long value = msg.get();
+            if (min > value)
+                min = value;
+        }
+
+        @SuppressWarnings({ "rawtypes", "unchecked" })
+        @Override
+        public void init(MsgList msgList) {
+            min = Long.MAX_VALUE;
+            this.msgList = msgList;
+        }
+
+        @Override
+        public void stepFinal(VLongWritable vertexIndex, VLongWritable partialAggregate) throws HyracksDataException {
+            if (min > partialAggregate.get())
+                min = partialAggregate.get();
+        }
+
+        @Override
+        public VLongWritable finishPartial() {
+            agg.set(min);
+            return agg;
+        }
+
+        @Override
+        public MsgList<VLongWritable> finishFinal() {
+            agg.set(min);
+            msgList.clear();
+            msgList.add(agg);
+            return msgList;
+        }
+    }
+
+    private VLongWritable outputValue = new VLongWritable();
+    private VLongWritable tmpVertexValue = new VLongWritable();
+    private long minID;
+
+    @Override
+    public void compute(Iterator<VLongWritable> msgIterator) {
+        if (getSuperstep() == 1) {
+            minID = getVertexId().get();
+            List<Edge<VLongWritable, FloatWritable>> edges = this.getEdges();
+            for (int i = 0; i < edges.size(); i++) {
+                Edge<VLongWritable, FloatWritable> edge = edges.get(i);
+                long neighbor = edge.getDestVertexId().get();
+                if (minID > neighbor) {
+                    minID = neighbor;
+                }
+            }
+            tmpVertexValue.set(minID);
+            setVertexValue(tmpVertexValue);
+            sendOutMsgs();
+        } else {
+            minID = getVertexId().get();
+            while (msgIterator.hasNext()) {
+                minID = Math.min(minID, msgIterator.next().get());
+            }
+            if (minID < getVertexValue().get()) {
+                tmpVertexValue.set(minID);
+                setVertexValue(tmpVertexValue);
+                sendOutMsgs();
+            }
+        }
+        voteToHalt();
+    }
+
+    private void sendOutMsgs() {
+        List<Edge<VLongWritable, FloatWritable>> edges = this.getEdges();
+        outputValue.set(minID);
+        for (int i = 0; i < edges.size(); i++) {
+            Edge<VLongWritable, FloatWritable> edge = edges.get(i);
+            sendMsg(edge.getDestVertexId(), outputValue);
+        }
+    }
+
+    @Override
+    public String toString() {
+        return getVertexId() + " " + getVertexValue();
+    }
+
+    public static void main(String[] args) throws Exception {
+        PregelixJob job = new PregelixJob(ConnectedComponentsVertex.class.getSimpleName());
+        job.setVertexClass(ConnectedComponentsVertex.class);
+        job.setVertexInputFormatClass(TextPageRankInputFormat.class);
+        job.setVertexOutputFormatClass(SimpleConnectedComponentsVertexOutputFormat.class);
+        job.setMessageCombinerClass(ConnectedComponentsVertex.SimpleMinCombiner.class);
+        Client.run(args, job);
+    }
+
+    /**
+     * Simple VertexWriter that support
+     */
+    public static class SimpleConnectedComponentsVertexWriter extends
+            TextVertexWriter<VLongWritable, VLongWritable, FloatWritable> {
+        public SimpleConnectedComponentsVertexWriter(RecordWriter<Text, Text> lineRecordWriter) {
+            super(lineRecordWriter);
+        }
+
+        @Override
+        public void writeVertex(Vertex<VLongWritable, VLongWritable, FloatWritable, ?> vertex) throws IOException,
+                InterruptedException {
+            getRecordWriter().write(new Text(vertex.getVertexId().toString()),
+                    new Text(vertex.getVertexValue().toString()));
+        }
+    }
+
+    /**
+     * output format for connected components
+     */
+    public static class SimpleConnectedComponentsVertexOutputFormat extends
+            TextVertexOutputFormat<VLongWritable, VLongWritable, FloatWritable> {
+
+        @Override
+        public VertexWriter<VLongWritable, VLongWritable, FloatWritable> createVertexWriter(TaskAttemptContext context)
+                throws IOException, InterruptedException {
+            RecordWriter<Text, Text> recordWriter = textOutputFormat.getRecordWriter(context);
+            return new SimpleConnectedComponentsVertexWriter(recordWriter);
+        }
+
+    }
+
+}
diff --git a/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/PageRankVertex.java b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/PageRankVertex.java
new file mode 100644
index 0000000..b6d4da7
--- /dev/null
+++ b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/PageRankVertex.java
@@ -0,0 +1,222 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.pregelix.example;
+
+import java.io.IOException;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.apache.hadoop.io.DoubleWritable;
+import org.apache.hadoop.io.FloatWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapreduce.InputSplit;
+import org.apache.hadoop.mapreduce.RecordWriter;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+
+import com.google.common.collect.Maps;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.pregelix.api.graph.MessageCombiner;
+import edu.uci.ics.pregelix.api.graph.MsgList;
+import edu.uci.ics.pregelix.api.graph.Vertex;
+import edu.uci.ics.pregelix.api.io.VertexReader;
+import edu.uci.ics.pregelix.api.io.VertexWriter;
+import edu.uci.ics.pregelix.api.io.generated.GeneratedVertexInputFormat;
+import edu.uci.ics.pregelix.api.io.generated.GeneratedVertexReader;
+import edu.uci.ics.pregelix.api.io.text.TextVertexOutputFormat;
+import edu.uci.ics.pregelix.api.io.text.TextVertexOutputFormat.TextVertexWriter;
+import edu.uci.ics.pregelix.api.job.PregelixJob;
+import edu.uci.ics.pregelix.api.util.BspUtils;
+import edu.uci.ics.pregelix.example.client.Client;
+import edu.uci.ics.pregelix.example.inputformat.TextPageRankInputFormat;
+import edu.uci.ics.pregelix.example.io.VLongWritable;
+
+/**
+ * Demonstrates the basic Pregel PageRank implementation.
+ */
+public class PageRankVertex extends Vertex<VLongWritable, DoubleWritable, FloatWritable, DoubleWritable> {
+
+    public static final String ITERATIONS = "HyracksPageRankVertex.iteration";
+    private DoubleWritable outputValue = new DoubleWritable();
+    private DoubleWritable tmpVertexValue = new DoubleWritable();
+    private int maxIteration = -1;
+
+    /**
+     * Test whether combiner is called by summing up the messages.
+     */
+    public static class SimpleSumCombiner extends MessageCombiner<VLongWritable, DoubleWritable, DoubleWritable> {
+        private double sum = 0.0;
+        private DoubleWritable agg = new DoubleWritable();
+        private MsgList<DoubleWritable> msgList;
+
+        @SuppressWarnings({ "rawtypes", "unchecked" })
+        @Override
+        public void init(MsgList msgList) {
+            sum = 0.0;
+            this.msgList = msgList;
+        }
+
+        @Override
+        public void stepPartial(VLongWritable vertexIndex, DoubleWritable msg) throws HyracksDataException {
+            sum += msg.get();
+        }
+
+        @Override
+        public DoubleWritable finishPartial() {
+            agg.set(sum);
+            return agg;
+        }
+
+        @Override
+        public void stepFinal(VLongWritable vertexIndex, DoubleWritable partialAggregate) throws HyracksDataException {
+            sum += partialAggregate.get();
+        }
+
+        @Override
+        public MsgList<DoubleWritable> finishFinal() {
+            agg.set(sum);
+            msgList.clear();
+            msgList.add(agg);
+            return msgList;
+        }
+    }
+
+    @Override
+    public void compute(Iterator<DoubleWritable> msgIterator) {
+        if (maxIteration < 0) {
+            maxIteration = getContext().getConfiguration().getInt(ITERATIONS, 10);
+        }
+        if (getSuperstep() == 1) {
+            tmpVertexValue.set(1.0 / getNumVertices());
+            setVertexValue(tmpVertexValue);
+        }
+        if (getSuperstep() >= 2 && getSuperstep() <= maxIteration) {
+            double sum = 0;
+            while (msgIterator.hasNext()) {
+                sum += msgIterator.next().get();
+            }
+            tmpVertexValue.set((0.15 / getNumVertices()) + 0.85 * sum);
+            setVertexValue(tmpVertexValue);
+        }
+
+        if (getSuperstep() >= 1 && getSuperstep() < maxIteration) {
+            long edges = getNumOutEdges();
+            outputValue.set(getVertexValue().get() / edges);
+            sendMsgToAllEdges(outputValue);
+        } else {
+            voteToHalt();
+        }
+    }
+
+    /**
+     * Simple VertexReader that supports {@link SimplePageRankVertex}
+     */
+    public static class SimulatedPageRankVertexReader extends
+            GeneratedVertexReader<VLongWritable, DoubleWritable, FloatWritable, DoubleWritable> {
+        /** Class logger */
+        private static final Logger LOG = Logger.getLogger(SimulatedPageRankVertexReader.class.getName());
+        private Map<VLongWritable, FloatWritable> edges = Maps.newHashMap();
+
+        public SimulatedPageRankVertexReader() {
+            super();
+        }
+
+        @Override
+        public boolean nextVertex() {
+            return totalRecords > recordsRead;
+        }
+
+        @Override
+        public Vertex<VLongWritable, DoubleWritable, FloatWritable, DoubleWritable> getCurrentVertex()
+                throws IOException {
+            Vertex<VLongWritable, DoubleWritable, FloatWritable, DoubleWritable> vertex = BspUtils
+                    .createVertex(configuration);
+
+            VLongWritable vertexId = new VLongWritable((inputSplit.getSplitIndex() * totalRecords) + recordsRead);
+            DoubleWritable vertexValue = new DoubleWritable(vertexId.get() * 10d);
+            long destVertexId = (vertexId.get() + 1) % (inputSplit.getNumSplits() * totalRecords);
+            float edgeValue = vertexId.get() * 100f;
+            edges.put(new VLongWritable(destVertexId), new FloatWritable(edgeValue));
+            vertex.initialize(vertexId, vertexValue, edges, null);
+            ++recordsRead;
+            if (LOG.getLevel() == Level.FINE) {
+                LOG.fine("next: Return vertexId=" + vertex.getVertexId().get() + ", vertexValue="
+                        + vertex.getVertexValue() + ", destinationId=" + destVertexId + ", edgeValue=" + edgeValue);
+            }
+            return vertex;
+        }
+    }
+
+    /**
+     * Simple VertexInputFormat that supports {@link SimplePageRankVertex}
+     */
+    public static class SimulatedPageRankVertexInputFormat extends
+            GeneratedVertexInputFormat<VLongWritable, DoubleWritable, FloatWritable, DoubleWritable> {
+        @Override
+        public VertexReader<VLongWritable, DoubleWritable, FloatWritable, DoubleWritable> createVertexReader(
+                InputSplit split, TaskAttemptContext context) throws IOException {
+            return new SimulatedPageRankVertexReader();
+        }
+    }
+
+    /**
+     * Simple VertexWriter that supports {@link SimplePageRankVertex}
+     */
+    public static class SimplePageRankVertexWriter extends
+            TextVertexWriter<VLongWritable, DoubleWritable, FloatWritable> {
+        public SimplePageRankVertexWriter(RecordWriter<Text, Text> lineRecordWriter) {
+            super(lineRecordWriter);
+        }
+
+        @Override
+        public void writeVertex(Vertex<VLongWritable, DoubleWritable, FloatWritable, ?> vertex) throws IOException,
+                InterruptedException {
+            getRecordWriter().write(new Text(vertex.getVertexId().toString()),
+                    new Text(vertex.getVertexValue().toString()));
+        }
+    }
+
+    @Override
+    public String toString() {
+        return getVertexId() + " " + getVertexValue();
+    }
+
+    /**
+     * Simple VertexOutputFormat that supports {@link SimplePageRankVertex}
+     */
+    public static class SimplePageRankVertexOutputFormat extends
+            TextVertexOutputFormat<VLongWritable, DoubleWritable, FloatWritable> {
+
+        @Override
+        public VertexWriter<VLongWritable, DoubleWritable, FloatWritable> createVertexWriter(TaskAttemptContext context)
+                throws IOException, InterruptedException {
+            RecordWriter<Text, Text> recordWriter = textOutputFormat.getRecordWriter(context);
+            return new SimplePageRankVertexWriter(recordWriter);
+        }
+    }
+
+    public static void main(String[] args) throws Exception {
+        PregelixJob job = new PregelixJob(PageRankVertex.class.getSimpleName());
+        job.setVertexClass(PageRankVertex.class);
+        job.setVertexInputFormatClass(TextPageRankInputFormat.class);
+        job.setVertexOutputFormatClass(SimplePageRankVertexOutputFormat.class);
+        job.setMessageCombinerClass(PageRankVertex.SimpleSumCombiner.class);
+        Client.run(args, job);
+    }
+
+}
diff --git a/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/ReachabilityVertex.java b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/ReachabilityVertex.java
new file mode 100644
index 0000000..0895386
--- /dev/null
+++ b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/ReachabilityVertex.java
@@ -0,0 +1,262 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.pregelix.example;
+
+import java.io.IOException;
+import java.util.Iterator;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.ByteWritable;
+import org.apache.hadoop.io.FloatWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapreduce.RecordWriter;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.pregelix.api.graph.Edge;
+import edu.uci.ics.pregelix.api.graph.MessageCombiner;
+import edu.uci.ics.pregelix.api.graph.MsgList;
+import edu.uci.ics.pregelix.api.graph.Vertex;
+import edu.uci.ics.pregelix.api.io.VertexWriter;
+import edu.uci.ics.pregelix.api.io.text.TextVertexOutputFormat;
+import edu.uci.ics.pregelix.api.io.text.TextVertexOutputFormat.TextVertexWriter;
+import edu.uci.ics.pregelix.api.job.PregelixJob;
+import edu.uci.ics.pregelix.api.util.BspUtils;
+import edu.uci.ics.pregelix.dataflow.util.IterationUtils;
+import edu.uci.ics.pregelix.example.client.Client;
+import edu.uci.ics.pregelix.example.inputformat.TextReachibilityVertexInputFormat;
+import edu.uci.ics.pregelix.example.io.VLongWritable;
+
+/**
+ * Demonstrates the basic Pregel reachibility query implementation, for undirected graph (e.g., Facebook, LinkedIn graph).
+ */
+public class ReachabilityVertex extends Vertex<VLongWritable, ByteWritable, FloatWritable, ByteWritable> {
+
+    public static class SimpleReachibilityCombiner extends MessageCombiner<VLongWritable, ByteWritable, ByteWritable> {
+        private ByteWritable agg = new ByteWritable();
+        private MsgList<ByteWritable> msgList;
+
+        @SuppressWarnings({ "rawtypes", "unchecked" })
+        @Override
+        public void init(MsgList msgList) {
+            this.msgList = msgList;
+            agg.set((byte) 0);
+        }
+
+        @Override
+        public void stepPartial(VLongWritable vertexIndex, ByteWritable msg) throws HyracksDataException {
+            int newState = agg.get() | msg.get();
+            agg.set((byte) newState);
+        }
+
+        @Override
+        public void stepFinal(VLongWritable vertexIndex, ByteWritable partialAggregate) throws HyracksDataException {
+            int newState = agg.get() | partialAggregate.get();
+            agg.set((byte) newState);
+        }
+
+        @Override
+        public ByteWritable finishPartial() {
+            return agg;
+        }
+
+        @Override
+        public MsgList<ByteWritable> finishFinal() {
+            msgList.clear();
+            msgList.add(agg);
+            return msgList;
+        }
+    }
+
+    private ByteWritable tmpVertexValue = new ByteWritable();
+    private long sourceId = -1;
+
+    /** The source vertex id */
+    public static final String SOURCE_ID = "ReachibilityVertex.sourceId";
+    /** The destination vertex id */
+    public static final String DEST_ID = "ReachibilityVertex.destId";
+    /** Default source vertex id */
+    public static final long SOURCE_ID_DEFAULT = 1;
+    /** Default destination vertex id */
+    public static final long DEST_ID_DEFAULT = 1;
+
+    /**
+     * Is this vertex the source id?
+     * 
+     * @return True if the source id
+     */
+    private boolean isSource(VLongWritable v) {
+        return (v.get() == sourceId);
+    }
+
+    /**
+     * Is this vertex the dest id?
+     * 
+     * @return True if the source id
+     */
+    private boolean isDest(VLongWritable v) {
+        return (v.get() == getContext().getConfiguration().getLong(DEST_ID, DEST_ID_DEFAULT));
+    }
+
+    @Override
+    public void compute(Iterator<ByteWritable> msgIterator) {
+        if (sourceId < 0) {
+            sourceId = getContext().getConfiguration().getLong(SOURCE_ID, SOURCE_ID_DEFAULT);
+        }
+        if (getSuperstep() == 1) {
+            boolean isSource = isSource(getVertexId());
+            if (isSource) {
+                tmpVertexValue.set((byte) 1);
+                setVertexValue(tmpVertexValue);
+            }
+            boolean isDest = isDest(getVertexId());
+            if (isDest) {
+                tmpVertexValue.set((byte) 2);
+                setVertexValue(tmpVertexValue);
+            }
+            if (isSource && isDest) {
+                signalTerminate();
+                return;
+            }
+            if (isSource || isDest) {
+                sendOutMsgs();
+            } else {
+                tmpVertexValue.set((byte) 0);
+                setVertexValue(tmpVertexValue);
+            }
+        } else {
+            while (msgIterator.hasNext()) {
+                ByteWritable msg = msgIterator.next();
+                int msgValue = msg.get();
+                if (msgValue < 3) {
+                    int state = getVertexValue().get();
+                    int newState = state | msgValue;
+                    boolean changed = state == newState ? false : true;
+                    if (changed) {
+                        tmpVertexValue.set((byte) newState);
+                        setVertexValue(tmpVertexValue);
+                        if (newState < 3) {
+                            sendOutMsgs();
+                        } else {
+                            signalTerminate();
+                        }
+                    }
+                } else {
+                    signalTerminate();
+                }
+            }
+        }
+        voteToHalt();
+    }
+
+    @Override
+    public String toString() {
+        return getVertexId() + " " + getVertexValue();
+    }
+
+    private void signalTerminate() {
+        Configuration conf = getContext().getConfiguration();
+        try {
+            IterationUtils.writeForceTerminationState(conf, BspUtils.getJobId(conf));
+            writeReachibilityResult(conf, true);
+        } catch (Exception e) {
+            throw new IllegalStateException(e);
+        }
+    }
+
+    private void sendOutMsgs() {
+        for (Edge<VLongWritable, FloatWritable> edge : getEdges()) {
+            sendMsg(edge.getDestVertexId(), tmpVertexValue);
+        }
+    }
+
+    private void writeReachibilityResult(Configuration conf, boolean terminate) {
+        try {
+            FileSystem dfs = FileSystem.get(conf);
+            String pathStr = IterationUtils.TMP_DIR + BspUtils.getJobId(conf) + "reachibility";
+            Path path = new Path(pathStr);
+            if (!dfs.exists(path)) {
+                FSDataOutputStream output = dfs.create(path, true);
+                output.writeBoolean(terminate);
+                output.flush();
+                output.close();
+            }
+        } catch (IOException e) {
+            throw new IllegalStateException(e);
+        }
+    }
+
+    private static boolean readReachibilityResult(Configuration conf) {
+        try {
+            FileSystem dfs = FileSystem.get(conf);
+            String pathStr = IterationUtils.TMP_DIR + BspUtils.getJobId(conf) + "reachibility";
+            Path path = new Path(pathStr);
+            if (!dfs.exists(path)) {
+                return false;
+            } else {
+                return true;
+            }
+        } catch (IOException e) {
+            throw new IllegalStateException(e);
+        }
+    }
+
+    public static void main(String[] args) throws Exception {
+        PregelixJob job = new PregelixJob(ReachabilityVertex.class.getSimpleName());
+        job.setVertexClass(ReachabilityVertex.class);
+        job.setVertexInputFormatClass(TextReachibilityVertexInputFormat.class);
+        job.setVertexOutputFormatClass(SimpleReachibilityVertexOutputFormat.class);
+        job.setMessageCombinerClass(ReachabilityVertex.SimpleReachibilityCombiner.class);
+        Client.run(args, job);
+        System.out.println("reachable? " + readReachibilityResult(job.getConfiguration()));
+    }
+
+    /**
+     * Simple VertexWriter
+     */
+    public static class SimpleReachibilityVertexWriter extends
+            TextVertexWriter<VLongWritable, ByteWritable, FloatWritable> {
+        public SimpleReachibilityVertexWriter(RecordWriter<Text, Text> lineRecordWriter) {
+            super(lineRecordWriter);
+        }
+
+        @Override
+        public void writeVertex(Vertex<VLongWritable, ByteWritable, FloatWritable, ?> vertex) throws IOException,
+                InterruptedException {
+            getRecordWriter().write(new Text(vertex.getVertexId().toString()),
+                    new Text(vertex.getVertexValue().toString()));
+        }
+    }
+
+    /**
+     * output format for reachibility
+     */
+    public static class SimpleReachibilityVertexOutputFormat extends
+            TextVertexOutputFormat<VLongWritable, ByteWritable, FloatWritable> {
+
+        @Override
+        public VertexWriter<VLongWritable, ByteWritable, FloatWritable> createVertexWriter(TaskAttemptContext context)
+                throws IOException, InterruptedException {
+            RecordWriter<Text, Text> recordWriter = textOutputFormat.getRecordWriter(context);
+            return new SimpleReachibilityVertexWriter(recordWriter);
+        }
+
+    }
+
+}
diff --git a/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/ShortestPathsVertex.java b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/ShortestPathsVertex.java
new file mode 100644
index 0000000..199870e
--- /dev/null
+++ b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/ShortestPathsVertex.java
@@ -0,0 +1,145 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.pregelix.example;
+
+import java.util.Iterator;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.apache.hadoop.io.DoubleWritable;
+import org.apache.hadoop.io.FloatWritable;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.pregelix.api.graph.Edge;
+import edu.uci.ics.pregelix.api.graph.MessageCombiner;
+import edu.uci.ics.pregelix.api.graph.MsgList;
+import edu.uci.ics.pregelix.api.graph.Vertex;
+import edu.uci.ics.pregelix.api.job.PregelixJob;
+import edu.uci.ics.pregelix.example.PageRankVertex.SimplePageRankVertexOutputFormat;
+import edu.uci.ics.pregelix.example.client.Client;
+import edu.uci.ics.pregelix.example.inputformat.TextShortestPathsInputFormat;
+import edu.uci.ics.pregelix.example.io.VLongWritable;
+
+/**
+ * Demonstrates the basic Pregel shortest paths implementation.
+ */
+public class ShortestPathsVertex extends Vertex<VLongWritable, DoubleWritable, FloatWritable, DoubleWritable> {
+    /**
+     * Test whether combiner is called by summing up the messages.
+     */
+    public static class SimpleMinCombiner extends MessageCombiner<VLongWritable, DoubleWritable, DoubleWritable> {
+        private double min = Double.MAX_VALUE;
+        private DoubleWritable agg = new DoubleWritable();
+        private MsgList<DoubleWritable> msgList;
+
+        @Override
+        public void stepPartial(VLongWritable vertexIndex, DoubleWritable msg) throws HyracksDataException {
+            double value = msg.get();
+            if (min > value)
+                min = value;
+        }
+
+        @SuppressWarnings({ "unchecked", "rawtypes" })
+        @Override
+        public void init(MsgList msgList) {
+            min = Double.MAX_VALUE;
+            this.msgList = msgList;
+        }
+
+        @Override
+        public DoubleWritable finishPartial() {
+            agg.set(min);
+            return agg;
+        }
+
+        @Override
+        public void stepFinal(VLongWritable vertexIndex, DoubleWritable partialAggregate) throws HyracksDataException {
+            double value = partialAggregate.get();
+            if (min > value)
+                min = value;
+        }
+
+        @Override
+        public MsgList<DoubleWritable> finishFinal() {
+            agg.set(min);
+            msgList.clear();
+            msgList.add(agg);
+            return msgList;
+        }
+    }
+
+    private DoubleWritable outputValue = new DoubleWritable();
+    private DoubleWritable tmpVertexValue = new DoubleWritable();
+    /** Class logger */
+    private static final Logger LOG = Logger.getLogger(ShortestPathsVertex.class.getName());
+    /** The shortest paths id */
+    public static final String SOURCE_ID = "SimpleShortestPathsVertex.sourceId";
+    /** Default shortest paths id */
+    public static final long SOURCE_ID_DEFAULT = 1;
+
+    /**
+     * Is this vertex the source id?
+     * 
+     * @return True if the source id
+     */
+    private boolean isSource() {
+        return (getVertexId().get() == getContext().getConfiguration().getLong(SOURCE_ID, SOURCE_ID_DEFAULT));
+    }
+
+    @Override
+    public void compute(Iterator<DoubleWritable> msgIterator) {
+        if (getSuperstep() == 1) {
+            tmpVertexValue.set(Double.MAX_VALUE);
+            setVertexValue(tmpVertexValue);
+        }
+        double minDist = isSource() ? 0d : Double.MAX_VALUE;
+        while (msgIterator.hasNext()) {
+            minDist = Math.min(minDist, msgIterator.next().get());
+        }
+        if (LOG.getLevel() == Level.FINE) {
+            LOG.fine("Vertex " + getVertexId() + " got minDist = " + minDist + " vertex value = " + getVertexValue());
+        }
+        if (minDist < getVertexValue().get()) {
+            tmpVertexValue.set(minDist);
+            setVertexValue(tmpVertexValue);
+            for (Edge<VLongWritable, FloatWritable> edge : getEdges()) {
+                if (LOG.getLevel() == Level.FINE) {
+                    LOG.fine("Vertex " + getVertexId() + " sent to " + edge.getDestVertexId() + " = "
+                            + (minDist + edge.getEdgeValue().get()));
+                }
+                outputValue.set(minDist + edge.getEdgeValue().get());
+                sendMsg(edge.getDestVertexId(), outputValue);
+            }
+        }
+        voteToHalt();
+    }
+    
+    @Override
+    public String toString() {
+        return getVertexId() + " " + getVertexValue();
+    }
+
+    public static void main(String[] args) throws Exception {
+        PregelixJob job = new PregelixJob(ShortestPathsVertex.class.getSimpleName());
+        job.setVertexClass(ShortestPathsVertex.class);
+        job.setVertexInputFormatClass(TextShortestPathsInputFormat.class);
+        job.setVertexOutputFormatClass(SimplePageRankVertexOutputFormat.class);
+        job.setMessageCombinerClass(ShortestPathsVertex.SimpleMinCombiner.class);
+        job.getConfiguration().setLong(SOURCE_ID, 0);
+        Client.run(args, job);
+    }
+
+}
diff --git a/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/client/Client.java b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/client/Client.java
new file mode 100644
index 0000000..597ad6e
--- /dev/null
+++ b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/client/Client.java
@@ -0,0 +1,97 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.pregelix.example.client;
+
+import java.io.IOException;
+
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
+import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
+import org.kohsuke.args4j.CmdLineException;
+import org.kohsuke.args4j.CmdLineParser;
+import org.kohsuke.args4j.Option;
+
+import edu.uci.ics.pregelix.api.job.PregelixJob;
+import edu.uci.ics.pregelix.core.base.IDriver.Plan;
+import edu.uci.ics.pregelix.core.driver.Driver;
+import edu.uci.ics.pregelix.example.PageRankVertex;
+import edu.uci.ics.pregelix.example.ReachabilityVertex;
+import edu.uci.ics.pregelix.example.ShortestPathsVertex;
+
+public class Client {
+
+    private static class Options {
+        @Option(name = "-inputpaths", usage = "comma seprated input paths", required = true)
+        public String inputPaths;
+
+        @Option(name = "-outputpath", usage = "output path", required = true)
+        public String outputPath;
+
+        @Option(name = "-ip", usage = "ip address of cluster controller", required = true)
+        public String ipAddress;
+
+        @Option(name = "-port", usage = "port of cluster controller", required = false)
+        public int port;
+
+        @Option(name = "-plan", usage = "query plan choice", required = false)
+        public Plan planChoice = Plan.OUTER_JOIN;
+
+        @Option(name = "-vnum", usage = "number of vertices", required = false)
+        public long numVertices;
+
+        @Option(name = "-enum", usage = "number of vertices", required = false)
+        public long numEdges;
+
+        @Option(name = "-source-vertex", usage = "source vertex id, for shortest paths/reachibility only", required = false)
+        public long sourceId;
+
+        @Option(name = "-dest-vertex", usage = "dest vertex id, for reachibility only", required = false)
+        public long destId;
+
+        @Option(name = "-num-iteration", usage = "max number of iterations, for pagerank job only", required = false)
+        public long numIteration = -1;
+
+        @Option(name = "-runtime-profiling", usage = "whether to do runtime profifling", required = false)
+        public String profiling = "false";
+    }
+
+    public static void run(String[] args, PregelixJob job) throws Exception {
+        Options options = prepareJob(args, job);
+        Driver driver = new Driver(Client.class);
+        driver.runJob(job, options.planChoice, options.ipAddress, options.port, Boolean.parseBoolean(options.profiling));
+    }
+
+    private static Options prepareJob(String[] args, PregelixJob job) throws CmdLineException, IOException {
+        Options options = new Options();
+        CmdLineParser parser = new CmdLineParser(options);
+        parser.parseArgument(args);
+
+        String[] inputs = options.inputPaths.split(";");
+        FileInputFormat.setInputPaths(job, inputs[0]);
+        for (int i = 1; i < inputs.length; i++)
+            FileInputFormat.addInputPaths(job, inputs[0]);
+        FileOutputFormat.setOutputPath(job, new Path(options.outputPath));
+        job.getConfiguration().setLong(PregelixJob.NUM_VERTICE, options.numVertices);
+        job.getConfiguration().setLong(PregelixJob.NUM_EDGES, options.numEdges);
+        job.getConfiguration().setLong(ShortestPathsVertex.SOURCE_ID, options.sourceId);
+        job.getConfiguration().setLong(ReachabilityVertex.SOURCE_ID, options.sourceId);
+        job.getConfiguration().setLong(ReachabilityVertex.DEST_ID, options.destId);
+        if (options.numIteration > 0)
+            job.getConfiguration().setLong(PageRankVertex.ITERATIONS, options.numIteration);
+        return options;
+    }
+
+}
diff --git a/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/inputformat/TextConnectedComponentsInputFormat.java b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/inputformat/TextConnectedComponentsInputFormat.java
new file mode 100644
index 0000000..a802403
--- /dev/null
+++ b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/inputformat/TextConnectedComponentsInputFormat.java
@@ -0,0 +1,113 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.pregelix.example.inputformat;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.io.FloatWritable;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapreduce.InputSplit;
+import org.apache.hadoop.mapreduce.RecordReader;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+
+import edu.uci.ics.pregelix.api.graph.Vertex;
+import edu.uci.ics.pregelix.api.io.VertexReader;
+import edu.uci.ics.pregelix.api.io.text.TextVertexInputFormat;
+import edu.uci.ics.pregelix.api.io.text.TextVertexInputFormat.TextVertexReader;
+import edu.uci.ics.pregelix.api.util.BspUtils;
+import edu.uci.ics.pregelix.example.io.VLongWritable;
+
+public class TextConnectedComponentsInputFormat extends
+        TextVertexInputFormat<VLongWritable, VLongWritable, FloatWritable, VLongWritable> {
+
+    @Override
+    public VertexReader<VLongWritable, VLongWritable, FloatWritable, VLongWritable> createVertexReader(
+            InputSplit split, TaskAttemptContext context) throws IOException {
+        return new TextReachibilityGraphReader(textInputFormat.createRecordReader(split, context));
+    }
+}
+
+@SuppressWarnings("rawtypes")
+class TextConnectedComponentsGraphReader extends
+        TextVertexReader<VLongWritable, VLongWritable, FloatWritable, VLongWritable> {
+
+    private final static String separator = " ";
+    private Vertex vertex;
+    private VLongWritable vertexId = new VLongWritable();
+    private List<VLongWritable> pool = new ArrayList<VLongWritable>();
+    private int used = 0;
+
+    public TextConnectedComponentsGraphReader(RecordReader<LongWritable, Text> lineRecordReader) {
+        super(lineRecordReader);
+    }
+
+    @Override
+    public boolean nextVertex() throws IOException, InterruptedException {
+        return getRecordReader().nextKeyValue();
+    }
+
+    @SuppressWarnings("unchecked")
+    @Override
+    public Vertex<VLongWritable, VLongWritable, FloatWritable, VLongWritable> getCurrentVertex() throws IOException,
+            InterruptedException {
+        used = 0;
+        if (vertex == null)
+            vertex = (Vertex) BspUtils.createVertex(getContext().getConfiguration());
+        vertex.getMsgList().clear();
+        vertex.getEdges().clear();
+
+        vertex.reset();
+        Text line = getRecordReader().getCurrentValue();
+        String[] fields = line.toString().split(separator);
+
+        if (fields.length > 0) {
+            /**
+             * set the src vertex id
+             */
+            long src = Long.parseLong(fields[0]);
+            vertexId.set(src);
+            vertex.setVertexId(vertexId);
+            long dest = -1L;
+
+            /**
+             * set up edges
+             */
+            for (int i = 1; i < fields.length; i++) {
+                dest = Long.parseLong(fields[i]);
+                VLongWritable destId = allocate();
+                destId.set(dest);
+                vertex.addEdge(destId, null);
+            }
+        }
+        // vertex.sortEdges();
+        return vertex;
+    }
+
+    private VLongWritable allocate() {
+        if (used >= pool.size()) {
+            VLongWritable value = new VLongWritable();
+            pool.add(value);
+            used++;
+            return value;
+        } else {
+            VLongWritable value = pool.get(used);
+            used++;
+            return value;
+        }
+    }
+}
diff --git a/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/inputformat/TextPageRankInputFormat.java b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/inputformat/TextPageRankInputFormat.java
new file mode 100644
index 0000000..a8a752e
--- /dev/null
+++ b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/inputformat/TextPageRankInputFormat.java
@@ -0,0 +1,113 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.pregelix.example.inputformat;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.io.DoubleWritable;
+import org.apache.hadoop.io.FloatWritable;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapreduce.InputSplit;
+import org.apache.hadoop.mapreduce.RecordReader;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+
+import edu.uci.ics.pregelix.api.graph.Vertex;
+import edu.uci.ics.pregelix.api.io.VertexReader;
+import edu.uci.ics.pregelix.api.io.text.TextVertexInputFormat;
+import edu.uci.ics.pregelix.api.io.text.TextVertexInputFormat.TextVertexReader;
+import edu.uci.ics.pregelix.api.util.BspUtils;
+import edu.uci.ics.pregelix.example.io.VLongWritable;
+
+public class TextPageRankInputFormat extends
+        TextVertexInputFormat<VLongWritable, DoubleWritable, FloatWritable, DoubleWritable> {
+
+    @Override
+    public VertexReader<VLongWritable, DoubleWritable, FloatWritable, DoubleWritable> createVertexReader(
+            InputSplit split, TaskAttemptContext context) throws IOException {
+        return new TextPageRankGraphReader(textInputFormat.createRecordReader(split, context));
+    }
+}
+
+@SuppressWarnings("rawtypes")
+class TextPageRankGraphReader extends TextVertexReader<VLongWritable, DoubleWritable, FloatWritable, DoubleWritable> {
+
+    private final static String separator = " ";
+    private Vertex vertex;
+    private VLongWritable vertexId = new VLongWritable();
+    private List<VLongWritable> pool = new ArrayList<VLongWritable>();
+    private int used = 0;
+
+    public TextPageRankGraphReader(RecordReader<LongWritable, Text> lineRecordReader) {
+        super(lineRecordReader);
+    }
+
+    @Override
+    public boolean nextVertex() throws IOException, InterruptedException {
+        return getRecordReader().nextKeyValue();
+    }
+
+    @SuppressWarnings("unchecked")
+    @Override
+    public Vertex<VLongWritable, DoubleWritable, FloatWritable, DoubleWritable> getCurrentVertex() throws IOException,
+            InterruptedException {
+        used = 0;
+        if (vertex == null)
+            vertex = (Vertex) BspUtils.createVertex(getContext().getConfiguration());
+        vertex.getMsgList().clear();
+        vertex.getEdges().clear();
+
+        vertex.reset();
+        Text line = getRecordReader().getCurrentValue();
+        String[] fields = line.toString().split(separator);
+
+        if (fields.length > 0) {
+            /**
+             * set the src vertex id
+             */
+            long src = Long.parseLong(fields[0]);
+            vertexId.set(src);
+            vertex.setVertexId(vertexId);
+            long dest = -1L;
+
+            /**
+             * set up edges
+             */
+            for (int i = 1; i < fields.length; i++) {
+                dest = Long.parseLong(fields[i]);
+                VLongWritable destId = allocate();
+                destId.set(dest);
+                vertex.addEdge(destId, null);
+            }
+        }
+        // vertex.sortEdges();
+        return vertex;
+    }
+
+    private VLongWritable allocate() {
+        if (used >= pool.size()) {
+            VLongWritable value = new VLongWritable();
+            pool.add(value);
+            used++;
+            return value;
+        } else {
+            VLongWritable value = pool.get(used);
+            used++;
+            return value;
+        }
+    }
+}
diff --git a/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/inputformat/TextReachibilityVertexInputFormat.java b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/inputformat/TextReachibilityVertexInputFormat.java
new file mode 100644
index 0000000..9ef1d49
--- /dev/null
+++ b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/inputformat/TextReachibilityVertexInputFormat.java
@@ -0,0 +1,113 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.pregelix.example.inputformat;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.io.FloatWritable;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapreduce.InputSplit;
+import org.apache.hadoop.mapreduce.RecordReader;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+
+import edu.uci.ics.pregelix.api.graph.Vertex;
+import edu.uci.ics.pregelix.api.io.VertexReader;
+import edu.uci.ics.pregelix.api.io.text.TextVertexInputFormat;
+import edu.uci.ics.pregelix.api.io.text.TextVertexInputFormat.TextVertexReader;
+import edu.uci.ics.pregelix.api.util.BspUtils;
+import edu.uci.ics.pregelix.example.io.VLongWritable;
+
+public class TextReachibilityVertexInputFormat extends
+        TextVertexInputFormat<VLongWritable, VLongWritable, FloatWritable, VLongWritable> {
+
+    @Override
+    public VertexReader<VLongWritable, VLongWritable, FloatWritable, VLongWritable> createVertexReader(
+            InputSplit split, TaskAttemptContext context) throws IOException {
+        return new TextConnectedComponentsGraphReader(textInputFormat.createRecordReader(split, context));
+    }
+}
+
+@SuppressWarnings("rawtypes")
+class TextReachibilityGraphReader extends
+        TextVertexReader<VLongWritable, VLongWritable, FloatWritable, VLongWritable> {
+
+    private final static String separator = " ";
+    private Vertex vertex;
+    private VLongWritable vertexId = new VLongWritable();
+    private List<VLongWritable> pool = new ArrayList<VLongWritable>();
+    private int used = 0;
+
+    public TextReachibilityGraphReader(RecordReader<LongWritable, Text> lineRecordReader) {
+        super(lineRecordReader);
+    }
+
+    @Override
+    public boolean nextVertex() throws IOException, InterruptedException {
+        return getRecordReader().nextKeyValue();
+    }
+
+    @SuppressWarnings("unchecked")
+    @Override
+    public Vertex<VLongWritable, VLongWritable, FloatWritable, VLongWritable> getCurrentVertex() throws IOException,
+            InterruptedException {
+        used = 0;
+        if (vertex == null)
+            vertex = (Vertex) BspUtils.createVertex(getContext().getConfiguration());
+        vertex.getMsgList().clear();
+        vertex.getEdges().clear();
+
+        vertex.reset();
+        Text line = getRecordReader().getCurrentValue();
+        String[] fields = line.toString().split(separator);
+
+        if (fields.length > 0) {
+            /**
+             * set the src vertex id
+             */
+            long src = Long.parseLong(fields[0]);
+            vertexId.set(src);
+            vertex.setVertexId(vertexId);
+            long dest = -1L;
+
+            /**
+             * set up edges
+             */
+            for (int i = 1; i < fields.length; i++) {
+                dest = Long.parseLong(fields[i]);
+                VLongWritable destId = allocate();
+                destId.set(dest);
+                vertex.addEdge(destId, null);
+            }
+        }
+        // vertex.sortEdges();
+        return vertex;
+    }
+
+    private VLongWritable allocate() {
+        if (used >= pool.size()) {
+            VLongWritable value = new VLongWritable();
+            pool.add(value);
+            used++;
+            return value;
+        } else {
+            VLongWritable value = pool.get(used);
+            used++;
+            return value;
+        }
+    }
+}
diff --git a/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/inputformat/TextShortestPathsInputFormat.java b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/inputformat/TextShortestPathsInputFormat.java
new file mode 100644
index 0000000..d445935
--- /dev/null
+++ b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/inputformat/TextShortestPathsInputFormat.java
@@ -0,0 +1,114 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.pregelix.example.inputformat;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.io.DoubleWritable;
+import org.apache.hadoop.io.FloatWritable;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapreduce.InputSplit;
+import org.apache.hadoop.mapreduce.RecordReader;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+
+import edu.uci.ics.pregelix.api.graph.Vertex;
+import edu.uci.ics.pregelix.api.io.VertexReader;
+import edu.uci.ics.pregelix.api.io.text.TextVertexInputFormat;
+import edu.uci.ics.pregelix.api.io.text.TextVertexInputFormat.TextVertexReader;
+import edu.uci.ics.pregelix.api.util.BspUtils;
+import edu.uci.ics.pregelix.example.io.VLongWritable;
+
+public class TextShortestPathsInputFormat extends
+        TextVertexInputFormat<VLongWritable, DoubleWritable, FloatWritable, DoubleWritable> {
+
+    @Override
+    public VertexReader<VLongWritable, DoubleWritable, FloatWritable, DoubleWritable> createVertexReader(
+            InputSplit split, TaskAttemptContext context) throws IOException {
+        return new TextShortestPathsGraphReader(textInputFormat.createRecordReader(split, context));
+    }
+}
+
+@SuppressWarnings("rawtypes")
+class TextShortestPathsGraphReader extends
+        TextVertexReader<VLongWritable, DoubleWritable, FloatWritable, DoubleWritable> {
+
+    private final static String separator = " ";
+    private Vertex vertex;
+    private FloatWritable initValue = new FloatWritable(1.0f);
+    private VLongWritable vertexId = new VLongWritable();
+    private List<VLongWritable> pool = new ArrayList<VLongWritable>();
+    private int used = 0;
+
+    public TextShortestPathsGraphReader(RecordReader<LongWritable, Text> lineRecordReader) {
+        super(lineRecordReader);
+    }
+
+    @Override
+    public boolean nextVertex() throws IOException, InterruptedException {
+        return getRecordReader().nextKeyValue();
+    }
+
+    @SuppressWarnings("unchecked")
+    @Override
+    public Vertex<VLongWritable, DoubleWritable, FloatWritable, DoubleWritable> getCurrentVertex() throws IOException,
+            InterruptedException {
+        used = 0;
+        if (vertex == null)
+            vertex = (Vertex) BspUtils.createVertex(getContext().getConfiguration());
+
+        vertex.getMsgList().clear();
+        vertex.getEdges().clear();
+        Text line = getRecordReader().getCurrentValue();
+        String[] fields = line.toString().split(separator);
+
+        if (fields.length > 0) {
+            /**
+             * set the src vertex id
+             */
+            long src = Long.parseLong(fields[0]);
+            vertexId.set(src);
+            vertex.setVertexId(vertexId);
+            long dest = -1L;
+
+            /**
+             * set up edges
+             */
+            for (int i = 1; i < fields.length; i++) {
+                dest = Long.parseLong(fields[i]);
+                VLongWritable destId = allocate();
+                destId.set(dest);
+                vertex.addEdge(destId, initValue);
+            }
+        }
+        // vertex.sortEdges();
+        return vertex;
+    }
+
+    private VLongWritable allocate() {
+        if (used >= pool.size()) {
+            VLongWritable value = new VLongWritable();
+            pool.add(value);
+            used++;
+            return value;
+        } else {
+            VLongWritable value = pool.get(used);
+            used++;
+            return value;
+        }
+    }
+}
diff --git a/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/io/VLongWritable.java b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/io/VLongWritable.java
new file mode 100644
index 0000000..a6c2c1e
--- /dev/null
+++ b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/io/VLongWritable.java
@@ -0,0 +1,113 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.pregelix.example.io;
+
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+
+import org.apache.hadoop.io.WritableComparable;
+import org.apache.hadoop.io.WritableComparator;
+
+import edu.uci.ics.pregelix.api.util.SerDeUtils;
+
+/**
+ * A WritableComparable for longs in a variable-length format. Such values take
+ * between one and five bytes. Smaller values take fewer bytes.
+ * 
+ * @see org.apache.hadoop.io.WritableUtils#readVLong(DataInput)
+ */
+@SuppressWarnings("rawtypes")
+public class VLongWritable implements WritableComparable {
+    private long value;
+
+    public VLongWritable() {
+    }
+
+    public VLongWritable(long value) {
+        set(value);
+    }
+
+    /** Set the value of this LongWritable. */
+    public void set(long value) {
+        this.value = value;
+    }
+
+    /** Return the value of this LongWritable. */
+    public long get() {
+        return value;
+    }
+
+    public void readFields(DataInput in) throws IOException {
+        value = SerDeUtils.readVLong(in);
+    }
+
+    public void write(DataOutput out) throws IOException {
+        SerDeUtils.writeVLong(out, value);
+    }
+
+    /** Returns true iff <code>o</code> is a VLongWritable with the same value. */
+    public boolean equals(Object o) {
+        if (!(o instanceof VLongWritable))
+            return false;
+        VLongWritable other = (VLongWritable) o;
+        return this.value == other.value;
+    }
+
+    public int hashCode() {
+        return (int) value;
+    }
+
+    /** Compares two VLongWritables. */
+    public int compareTo(Object o) {
+        long thisValue = this.value;
+        long thatValue = ((VLongWritable) o).value;
+        return (thisValue < thatValue ? -1 : (thisValue == thatValue ? 0 : 1));
+    }
+
+    public String toString() {
+        return Long.toString(value);
+    }
+
+    /** A Comparator optimized for LongWritable. */
+    public static class Comparator extends WritableComparator {
+        public Comparator() {
+            super(VLongWritable.class);
+        }
+
+        public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
+            long thisValue = SerDeUtils.readVLong(b1, s1, l1);
+            long thatValue = SerDeUtils.readVLong(b2, s2, l2);
+            return (thisValue < thatValue ? -1 : (thisValue == thatValue ? 0 : 1));
+        }
+    }
+
+    /** A decreasing Comparator optimized for LongWritable. */
+    public static class DecreasingComparator extends Comparator {
+        public int compare(WritableComparable a, WritableComparable b) {
+            return -super.compare(a, b);
+        }
+
+        public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
+            return -super.compare(b1, s1, l1, b2, s2, l2);
+        }
+    }
+
+    static { // register default comparator
+        WritableComparator.define(VLongWritable.class, new Comparator());
+    }
+
+}
diff --git a/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/maximalclique/AdjacencyListWritable.java b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/maximalclique/AdjacencyListWritable.java
new file mode 100644
index 0000000..83e0a6b
--- /dev/null
+++ b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/maximalclique/AdjacencyListWritable.java
@@ -0,0 +1,99 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.pregelix.example.maximalclique;
+
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+import java.util.Collection;
+import java.util.Iterator;
+import java.util.Set;
+import java.util.TreeSet;
+
+import org.apache.hadoop.io.Writable;
+
+import edu.uci.ics.pregelix.example.io.VLongWritable;
+
+/**
+ * The adjacency list contains <src, list-of-neighbors>
+ */
+public class AdjacencyListWritable implements Writable {
+
+    private VLongWritable sourceVertex = new VLongWritable();
+    private Set<VLongWritable> destinationVertexes = new TreeSet<VLongWritable>();
+
+    public AdjacencyListWritable() {
+    }
+
+    public void reset() {
+        this.destinationVertexes.clear();
+    }
+
+    public void setSource(VLongWritable source) {
+        this.sourceVertex = source;
+    }
+
+    public void addNeighbor(VLongWritable neighbor) {
+        destinationVertexes.add(neighbor);
+    }
+
+    @Override
+    public void readFields(DataInput input) throws IOException {
+        sourceVertex = new VLongWritable();
+        destinationVertexes.clear();
+        sourceVertex.readFields(input);
+        int numberOfNeighbors = input.readInt();
+        for (int i = 0; i < numberOfNeighbors; i++) {
+            VLongWritable neighbor = new VLongWritable();
+            neighbor.readFields(input);
+            destinationVertexes.add(neighbor);
+        }
+    }
+
+    @Override
+    public void write(DataOutput output) throws IOException {
+        sourceVertex.write(output);
+        output.writeInt(destinationVertexes.size());
+        for (VLongWritable dest : destinationVertexes) {
+            dest.write(output);
+        }
+    }
+
+    public int numberOfNeighbors() {
+        return destinationVertexes.size();
+    }
+
+    public void removeNeighbor(VLongWritable v) {
+        destinationVertexes.remove(v);
+    }
+
+    public VLongWritable getSource() {
+        return sourceVertex;
+    }
+
+    public Iterator<VLongWritable> getNeighbors() {
+        return destinationVertexes.iterator();
+    }
+
+    public void cleanNonMatch(Collection<VLongWritable> matches) {
+        destinationVertexes.retainAll(matches);
+    }
+
+    public boolean isNeighbor(VLongWritable v) {
+        return destinationVertexes.contains(v);
+    }
+
+}
diff --git a/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/maximalclique/CliquesWritable.java b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/maximalclique/CliquesWritable.java
new file mode 100644
index 0000000..0e22ea1
--- /dev/null
+++ b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/maximalclique/CliquesWritable.java
@@ -0,0 +1,138 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.pregelix.example.maximalclique;
+
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.io.Writable;
+
+import edu.uci.ics.pregelix.example.io.VLongWritable;
+
+/**
+ * The representation of cliques stored in a vertex.
+ */
+public class CliquesWritable implements Writable {
+
+    private List<VLongWritable> cliques = new ArrayList<VLongWritable>();
+    private int sizeOfClique = 0;
+
+    public CliquesWritable(List<VLongWritable> cliques, int sizeOfClique) {
+        this.cliques = cliques;
+        this.sizeOfClique = sizeOfClique;
+    }
+
+    public CliquesWritable() {
+
+    }
+
+    /**
+     * Set the size of cliques.
+     * 
+     * @param sizeOfClique
+     *            the size of each maximal clique
+     */
+    public void setCliqueSize(int sizeOfClique) {
+        this.sizeOfClique = sizeOfClique;
+    }
+
+    /**
+     * Add the clique vertexes
+     * 
+     * @param cliques
+     *            the list of vertexes -- can contain multiple cliques
+     */
+    public void addCliques(CliquesWritable cliques) {
+        this.cliques.addAll(cliques.cliques);
+    }
+
+    /**
+     * Add the clique vertexes
+     * 
+     * @param cliques
+     *            the list of vertexes -- can contain multiple cliques
+     */
+    public void addCliques(List<VLongWritable> vertexes) {
+        this.cliques.addAll(vertexes);
+    }
+
+    /**
+     * @return the size of the clique
+     */
+    public int getSizeOfClique() {
+        return sizeOfClique;
+    }
+
+    /**
+     * rese the clique
+     */
+    public void reset() {
+        this.cliques.clear();
+        this.sizeOfClique = 0;
+    }
+
+    @Override
+    public void readFields(DataInput input) throws IOException {
+        cliques.clear();
+        int numCliques = input.readInt();
+        if (numCliques < 0) {
+            sizeOfClique = 0;
+            return;
+        }
+        sizeOfClique = input.readInt();
+        for (int i = 0; i < numCliques; i++) {
+            for (int j = 0; j < sizeOfClique; j++) {
+                VLongWritable vid = new VLongWritable();
+                vid.readFields(input);
+                cliques.add(vid);
+            }
+        }
+    }
+
+    @Override
+    public void write(DataOutput output) throws IOException {
+        if (sizeOfClique <= 0) {
+            output.writeInt(-1);
+            return;
+        }
+        output.writeInt(cliques.size() / sizeOfClique);
+        output.writeInt(sizeOfClique);
+
+        for (int i = 0; i < cliques.size(); i++) {
+            cliques.get(i).write(output);
+        }
+    }
+
+    @Override
+    public String toString() {
+        if (sizeOfClique == 0)
+            return "";
+        StringBuffer sb = new StringBuffer();
+        int numCliques = cliques.size() / sizeOfClique;
+        for (int i = 0; i < numCliques; i++) {
+            for (int j = 0; j < sizeOfClique - 1; j++) {
+                sb.append(cliques.get(j));
+                sb.append(",");
+            }
+            sb.append(cliques.get(sizeOfClique - 1));
+            sb.append(";");
+        }
+        return sb.toString();
+    }
+}
diff --git a/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/maximalclique/MaximalCliqueAggregator.java b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/maximalclique/MaximalCliqueAggregator.java
new file mode 100644
index 0000000..061e9e0
--- /dev/null
+++ b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/maximalclique/MaximalCliqueAggregator.java
@@ -0,0 +1,65 @@
+package edu.uci.ics.pregelix.example.maximalclique;
+
+import org.apache.hadoop.io.NullWritable;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.pregelix.api.graph.GlobalAggregator;
+import edu.uci.ics.pregelix.api.graph.Vertex;
+import edu.uci.ics.pregelix.example.io.VLongWritable;
+
+/**
+ * The global aggregator aggregates the count of triangles
+ */
+public class MaximalCliqueAggregator
+        extends
+        GlobalAggregator<VLongWritable, CliquesWritable, NullWritable, AdjacencyListWritable, CliquesWritable, CliquesWritable> {
+
+    private CliquesWritable state = new CliquesWritable();
+
+    @Override
+    public void init() {
+        state.reset();
+    }
+
+    @Override
+    public void step(Vertex<VLongWritable, CliquesWritable, NullWritable, AdjacencyListWritable> v)
+            throws HyracksDataException {
+        CliquesWritable cliques = v.getVertexValue();
+        updateAggregateState(cliques);
+    }
+
+    /**
+     * Update the current aggregate state
+     * 
+     * @param cliques the incoming cliques
+     */
+    private void updateAggregateState(CliquesWritable cliques) {
+        if (cliques.getSizeOfClique() > state.getSizeOfClique()) {
+            //reset the vertex state
+            state.reset();
+            state.setCliqueSize(cliques.getSizeOfClique());
+            state.addCliques(cliques);
+        } else if (cliques.getSizeOfClique() == state.getSizeOfClique()) {
+            //add the new cliques
+            state.addCliques(cliques);
+        } else {
+            return;
+        }
+    }
+
+    @Override
+    public void step(CliquesWritable partialResult) {
+        updateAggregateState(partialResult);
+    }
+
+    @Override
+    public CliquesWritable finishPartial() {
+        return state;
+    }
+
+    @Override
+    public CliquesWritable finishFinal() {
+        return state;
+    }
+
+}
diff --git a/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/maximalclique/MaximalCliqueVertex.java b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/maximalclique/MaximalCliqueVertex.java
new file mode 100644
index 0000000..266feb7
--- /dev/null
+++ b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/maximalclique/MaximalCliqueVertex.java
@@ -0,0 +1,347 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.pregelix.example.maximalclique;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.BitSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.TreeMap;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapreduce.RecordWriter;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+
+import edu.uci.ics.pregelix.api.graph.Edge;
+import edu.uci.ics.pregelix.api.graph.Vertex;
+import edu.uci.ics.pregelix.api.io.VertexWriter;
+import edu.uci.ics.pregelix.api.io.text.TextVertexOutputFormat;
+import edu.uci.ics.pregelix.api.io.text.TextVertexOutputFormat.TextVertexWriter;
+import edu.uci.ics.pregelix.api.job.PregelixJob;
+import edu.uci.ics.pregelix.api.util.BspUtils;
+import edu.uci.ics.pregelix.dataflow.util.IterationUtils;
+import edu.uci.ics.pregelix.example.client.Client;
+import edu.uci.ics.pregelix.example.io.VLongWritable;
+import edu.uci.ics.pregelix.example.trianglecounting.TriangleCountingVertex;
+
+/**
+ * The maximal clique example -- find maximal cliques in an undirected graph.
+ * The result cliques contains vertexes ordered by the vertex id ascendingly. The algorithm takes
+ * advantage of that property to do effective pruning.
+ */
+public class MaximalCliqueVertex extends Vertex<VLongWritable, CliquesWritable, NullWritable, AdjacencyListWritable> {
+
+    private Map<VLongWritable, AdjacencyListWritable> map = new TreeMap<VLongWritable, AdjacencyListWritable>();
+    private List<VLongWritable> vertexList = new ArrayList<VLongWritable>();
+    private Map<VLongWritable, Integer> invertedMap = new TreeMap<VLongWritable, Integer>();
+    private int largestCliqueSizeSoFar = 0;
+    private List<BitSet> currentMaximalCliques = new ArrayList<BitSet>();
+    private CliquesWritable tmpValue = new CliquesWritable();
+    private List<VLongWritable> cliques = new ArrayList<VLongWritable>();
+
+    /**
+     * Update the current maximal cliques
+     * 
+     * @param values
+     *            the received adjcency lists
+     */
+    private void updateCurrentMaximalCliques(Iterator<AdjacencyListWritable> values) {
+        map.clear();
+        vertexList.clear();
+        invertedMap.clear();
+        currentMaximalCliques.clear();
+        cliques.clear();
+        tmpValue.reset();
+
+        // build the initial sub graph
+        while (values.hasNext()) {
+            AdjacencyListWritable adj = values.next();
+            map.put(adj.getSource(), adj);
+        }
+        VLongWritable srcId = getVertexId();
+        map.put(srcId, new AdjacencyListWritable());
+
+        // build the vertex list (vertex id in ascending order) and the inverted list of vertexes
+        int i = 0;
+        for (VLongWritable v : map.keySet()) {
+            vertexList.add(v);
+            invertedMap.put(v, i++);
+        }
+
+        //clean up adjacency list --- remove vertexes who are not neighbors of key
+        for (AdjacencyListWritable adj : map.values()) {
+            adj.cleanNonMatch(vertexList);
+        }
+
+        // get the h-index of the subgraph --- which is the maximum depth to explore
+        int[] neighborCounts = new int[map.size()];
+        i = 0;
+        for (AdjacencyListWritable adj : map.values()) {
+            neighborCounts[i++] = adj.numberOfNeighbors();
+        }
+        Arrays.sort(neighborCounts);
+        int h = 0;
+        for (i = neighborCounts.length - 1; i >= 0; i--) {
+            if (h >= neighborCounts[i]) {
+                break;
+            }
+            h++;
+        }
+        if (h < largestCliqueSizeSoFar) {
+            return;
+        }
+
+        //start depth-first search
+        BitSet cliqueSoFar = new BitSet(h);
+        for (VLongWritable v : vertexList) {
+            cliqueSoFar.set(invertedMap.get(v));
+            searchClique(h, cliqueSoFar, 1, v);
+            cliqueSoFar.clear();
+        }
+
+        //output local maximal cliques
+        for (BitSet clique : currentMaximalCliques) {
+            int keyIndex = invertedMap.get(srcId);
+            clique.set(keyIndex);
+            generateClique(clique);
+            tmpValue.addCliques(cliques);
+            tmpValue.setCliqueSize(clique.cardinality());
+        }
+
+        //update the vertex state
+        setVertexValue(tmpValue);
+    }
+
+    /**
+     * Output a clique with vertex ids.
+     * 
+     * @param clique
+     *            the bitmap representation of a clique
+     */
+    private void generateClique(BitSet clique) {
+        for (int j = 0; j < clique.length();) {
+            j = clique.nextSetBit(j);
+            VLongWritable v = vertexList.get(j);
+            cliques.add(v);
+            j++;
+        }
+    }
+
+    /**
+     * find cliques using the depth-first search
+     * 
+     * @param maxDepth
+     *            the maximum search depth
+     * @param cliqueSoFar
+     *            the the cliques found so far
+     * @param depthSoFar
+     *            the current search depth
+     * @param currentSource
+     *            the vertex to be added into the clique
+     */
+    private void searchClique(int maxDepth, BitSet cliqueSoFar, int depthSoFar, VLongWritable currentSource) {
+        if (depthSoFar > maxDepth) {
+            // update maximal clique info
+            updateMaximalClique(cliqueSoFar);
+            return;
+        }
+
+        AdjacencyListWritable adj = map.get(currentSource);
+        Iterator<VLongWritable> neighbors = adj.getNeighbors();
+        ++depthSoFar;
+        while (neighbors.hasNext()) {
+            VLongWritable neighbor = neighbors.next();
+            if (!isTested(neighbor, cliqueSoFar) && isClique(neighbor, cliqueSoFar)) {
+                //snapshot the clique
+                int cliqueLength = cliqueSoFar.length();
+                // expand the clique
+                cliqueSoFar.set(invertedMap.get(neighbor));
+                searchClique(maxDepth, cliqueSoFar, depthSoFar, neighbor);
+                // back to the snapshot clique
+                cliqueSoFar.set(cliqueLength, cliqueSoFar.length(), false);
+            }
+        }
+
+        // update maximal clique info
+        updateMaximalClique(cliqueSoFar);
+    }
+
+    /**
+     * Update the maximal clique to a larger one if it exists
+     * 
+     * @param cliqueSoFar
+     *            the clique so far, in the bitmap representation
+     */
+    private void updateMaximalClique(BitSet cliqueSoFar) {
+        int cliqueSize = cliqueSoFar.cardinality();
+        if (cliqueSize > largestCliqueSizeSoFar) {
+            currentMaximalCliques.clear();
+            currentMaximalCliques.add((BitSet) cliqueSoFar.clone());
+            largestCliqueSizeSoFar = cliqueSize;
+        } else if (cliqueSize == largestCliqueSizeSoFar) {
+            currentMaximalCliques.add((BitSet) cliqueSoFar.clone());
+        } else {
+            return;
+        }
+    }
+
+    /**
+     * Should we test the vertex newVertex?
+     * 
+     * @param newVertex
+     *            the vertex to be tested
+     * @param cliqueSoFar
+     *            the current clique, in the bitmap representation
+     * @return true if new vertex has been tested
+     */
+    private boolean isTested(VLongWritable newVertex, BitSet cliqueSoFar) {
+        int index = invertedMap.get(newVertex);
+        int largestSetIndex = cliqueSoFar.length() - 1;
+        if (index > largestSetIndex) {
+            // we only return cliques with vertexes in the ascending order
+            // hence, the new vertex must be larger than the largesetSetIndex in the clique
+            return false;
+        } else {
+            // otherwise, we think the vertex is "tested"
+            return true;
+        }
+    }
+
+    /**
+     * Will adding the newVertex yield a bigger clique?
+     * 
+     * @param newVertex
+     *            the new vertex id
+     * @param cliqueSoFar
+     *            the bitmap representation of the clique
+     * @return true if adding the new vertex yelds a bigger clique
+     */
+    private boolean isClique(VLongWritable newVertex, BitSet cliqueSoFar) {
+        AdjacencyListWritable adj = map.get(newVertex);
+        // check whether each existing vertex is in the neighbor set of newVertex
+        for (int i = 0; i < cliqueSoFar.length();) {
+            i = cliqueSoFar.nextSetBit(i);
+            VLongWritable v = vertexList.get(i);
+            if (!adj.isNeighbor(v)) {
+                return false;
+            }
+            i++;
+        }
+        return true;
+    }
+
+    /**
+     * For superstep 1, send outgoing mesages.
+     * For superstep 2, calculate maximal cliques.
+     * otherwise, vote to halt.
+     */
+    @Override
+    public void compute(Iterator<AdjacencyListWritable> msgIterator) {
+        if (getSuperstep() == 1) {
+            sortEdges();
+            sendOutgoingMsgs(getEdges());
+        } else if (getSuperstep() == 2) {
+            updateCurrentMaximalCliques(msgIterator);
+        } else {
+            voteToHalt();
+        }
+    }
+
+    @Override
+    public String toString() {
+        return getVertexId() + " " + getVertexValue();
+    }
+
+    private static CliquesWritable readMaximalCliqueResult(Configuration conf) {
+        try {
+            CliquesWritable result = (CliquesWritable) IterationUtils.readGlobalAggregateValue(conf,
+                    BspUtils.getJobId(conf));
+            return result;
+        } catch (IOException e) {
+            throw new IllegalStateException(e);
+        }
+    }
+
+    public static void main(String[] args) throws Exception {
+        PregelixJob job = new PregelixJob(TriangleCountingVertex.class.getSimpleName());
+        job.setVertexClass(MaximalCliqueVertex.class);
+        job.setGlobalAggregatorClass(MaximalCliqueAggregator.class);
+        job.setDynamicVertexValueSize(true);
+        job.setVertexInputFormatClass(TextMaximalCliqueInputFormat.class);
+        job.setVertexOutputFormatClass(MaximalCliqueVertexOutputFormat.class);
+        Client.run(args, job);
+        System.out.println("maximal cliques: \n" + readMaximalCliqueResult(job.getConfiguration()));
+    }
+
+    /**
+     * Send the adjacency lists
+     * 
+     * @param edges
+     *            the outgoing edges
+     */
+    private void sendOutgoingMsgs(List<Edge<VLongWritable, NullWritable>> edges) {
+        for (int i = 0; i < edges.size(); i++) {
+            if (edges.get(i).getDestVertexId().get() < getVertexId().get()) {
+                // only add emit for the vertexes whose id is smaller than the vertex id 
+                // to avoid the duplicate removal step,
+                // because all the resulting cliques will have vertexes in the ascending order.
+                AdjacencyListWritable msg = new AdjacencyListWritable();
+                msg.setSource(getVertexId());
+                for (int j = i + 1; j < edges.size(); j++) {
+                    msg.addNeighbor(edges.get(j).getDestVertexId());
+                }
+                sendMsg(edges.get(i).getDestVertexId(), msg);
+            }
+        }
+    }
+
+    /**
+     * Maximal Clique VertexWriter
+     */
+    public static class MaximalCliqueVertexWriter extends
+            TextVertexWriter<VLongWritable, CliquesWritable, NullWritable> {
+        public MaximalCliqueVertexWriter(RecordWriter<Text, Text> lineRecordWriter) {
+            super(lineRecordWriter);
+        }
+
+        @Override
+        public void writeVertex(Vertex<VLongWritable, CliquesWritable, NullWritable, ?> vertex) throws IOException,
+                InterruptedException {
+            getRecordWriter().write(new Text(vertex.getVertexId().toString()),
+                    new Text(vertex.getVertexValue().toString()));
+        }
+    }
+
+    /**
+     * output format for maximal clique
+     */
+    public static class MaximalCliqueVertexOutputFormat extends
+            TextVertexOutputFormat<VLongWritable, CliquesWritable, NullWritable> {
+
+        @Override
+        public VertexWriter<VLongWritable, CliquesWritable, NullWritable> createVertexWriter(TaskAttemptContext context)
+                throws IOException, InterruptedException {
+            RecordWriter<Text, Text> recordWriter = textOutputFormat.getRecordWriter(context);
+            return new MaximalCliqueVertexWriter(recordWriter);
+        }
+
+    }
+}
diff --git a/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/maximalclique/TextMaximalCliqueInputFormat.java b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/maximalclique/TextMaximalCliqueInputFormat.java
new file mode 100644
index 0000000..ec7b32c
--- /dev/null
+++ b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/maximalclique/TextMaximalCliqueInputFormat.java
@@ -0,0 +1,113 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.pregelix.example.maximalclique;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapreduce.InputSplit;
+import org.apache.hadoop.mapreduce.RecordReader;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+
+import edu.uci.ics.pregelix.api.graph.Vertex;
+import edu.uci.ics.pregelix.api.io.VertexReader;
+import edu.uci.ics.pregelix.api.io.text.TextVertexInputFormat;
+import edu.uci.ics.pregelix.api.io.text.TextVertexInputFormat.TextVertexReader;
+import edu.uci.ics.pregelix.api.util.BspUtils;
+import edu.uci.ics.pregelix.example.io.VLongWritable;
+
+public class TextMaximalCliqueInputFormat extends
+        TextVertexInputFormat<VLongWritable, CliquesWritable, NullWritable, AdjacencyListWritable> {
+
+    @Override
+    public VertexReader<VLongWritable, CliquesWritable, NullWritable, AdjacencyListWritable> createVertexReader(
+            InputSplit split, TaskAttemptContext context) throws IOException {
+        return new TextMaximalCliqueGraphReader(textInputFormat.createRecordReader(split, context));
+    }
+}
+
+@SuppressWarnings("rawtypes")
+class TextMaximalCliqueGraphReader extends
+        TextVertexReader<VLongWritable, CliquesWritable, NullWritable, AdjacencyListWritable> {
+
+    private final static String separator = " ";
+    private Vertex vertex;
+    private VLongWritable vertexId = new VLongWritable();
+    private List<VLongWritable> pool = new ArrayList<VLongWritable>();
+    private int used = 0;
+
+    public TextMaximalCliqueGraphReader(RecordReader<LongWritable, Text> lineRecordReader) {
+        super(lineRecordReader);
+    }
+
+    @Override
+    public boolean nextVertex() throws IOException, InterruptedException {
+        return getRecordReader().nextKeyValue();
+    }
+
+    @SuppressWarnings("unchecked")
+    @Override
+    public Vertex<VLongWritable, CliquesWritable, NullWritable, AdjacencyListWritable> getCurrentVertex()
+            throws IOException, InterruptedException {
+        used = 0;
+        if (vertex == null)
+            vertex = (Vertex) BspUtils.createVertex(getContext().getConfiguration());
+        vertex.getMsgList().clear();
+        vertex.getEdges().clear();
+
+        vertex.reset();
+        Text line = getRecordReader().getCurrentValue();
+        String[] fields = line.toString().split(separator);
+
+        if (fields.length > 0) {
+            /**
+             * set the src vertex id
+             */
+            long src = Long.parseLong(fields[0]);
+            vertexId.set(src);
+            vertex.setVertexId(vertexId);
+            long dest = -1L;
+
+            /**
+             * set up edges
+             */
+            for (int i = 1; i < fields.length; i++) {
+                dest = Long.parseLong(fields[i]);
+                VLongWritable destId = allocate();
+                destId.set(dest);
+                vertex.addEdge(destId, null);
+            }
+        }
+        return vertex;
+    }
+
+    private VLongWritable allocate() {
+        if (used >= pool.size()) {
+            VLongWritable value = new VLongWritable();
+            pool.add(value);
+            used++;
+            return value;
+        } else {
+            VLongWritable value = pool.get(used);
+            used++;
+            return value;
+        }
+    }
+}
diff --git a/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/trianglecounting/TextTriangleCountingInputFormat.java b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/trianglecounting/TextTriangleCountingInputFormat.java
new file mode 100644
index 0000000..bb399ff
--- /dev/null
+++ b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/trianglecounting/TextTriangleCountingInputFormat.java
@@ -0,0 +1,111 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.pregelix.example.trianglecounting;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapreduce.InputSplit;
+import org.apache.hadoop.mapreduce.RecordReader;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+
+import edu.uci.ics.pregelix.api.graph.Vertex;
+import edu.uci.ics.pregelix.api.io.VertexReader;
+import edu.uci.ics.pregelix.api.io.text.TextVertexInputFormat;
+import edu.uci.ics.pregelix.api.io.text.TextVertexInputFormat.TextVertexReader;
+import edu.uci.ics.pregelix.api.util.BspUtils;
+import edu.uci.ics.pregelix.example.io.VLongWritable;
+
+public class TextTriangleCountingInputFormat extends
+        TextVertexInputFormat<VLongWritable, VLongWritable, VLongWritable, VLongWritable> {
+
+    @Override
+    public VertexReader<VLongWritable, VLongWritable, VLongWritable, VLongWritable> createVertexReader(
+            InputSplit split, TaskAttemptContext context) throws IOException {
+        return new TextPageRankGraphReader(textInputFormat.createRecordReader(split, context));
+    }
+}
+
+@SuppressWarnings("rawtypes")
+class TextPageRankGraphReader extends TextVertexReader<VLongWritable, VLongWritable, VLongWritable, VLongWritable> {
+
+    private final static String separator = " ";
+    private Vertex vertex;
+    private VLongWritable vertexId = new VLongWritable();
+    private List<VLongWritable> pool = new ArrayList<VLongWritable>();
+    private int used = 0;
+
+    public TextPageRankGraphReader(RecordReader<LongWritable, Text> lineRecordReader) {
+        super(lineRecordReader);
+    }
+
+    @Override
+    public boolean nextVertex() throws IOException, InterruptedException {
+        return getRecordReader().nextKeyValue();
+    }
+
+    @SuppressWarnings("unchecked")
+    @Override
+    public Vertex<VLongWritable, VLongWritable, VLongWritable, VLongWritable> getCurrentVertex() throws IOException,
+            InterruptedException {
+        used = 0;
+        if (vertex == null)
+            vertex = (Vertex) BspUtils.createVertex(getContext().getConfiguration());
+        vertex.getMsgList().clear();
+        vertex.getEdges().clear();
+
+        vertex.reset();
+        Text line = getRecordReader().getCurrentValue();
+        String[] fields = line.toString().split(separator);
+
+        if (fields.length > 0) {
+            /**
+             * set the src vertex id
+             */
+            long src = Long.parseLong(fields[0]);
+            vertexId.set(src);
+            vertex.setVertexId(vertexId);
+            long dest = -1L;
+
+            /**
+             * set up edges
+             */
+            for (int i = 1; i < fields.length; i++) {
+                dest = Long.parseLong(fields[i]);
+                VLongWritable destId = allocate();
+                destId.set(dest);
+                vertex.addEdge(destId, null);
+            }
+        }
+        // vertex.sortEdges();
+        return vertex;
+    }
+
+    private VLongWritable allocate() {
+        if (used >= pool.size()) {
+            VLongWritable value = new VLongWritable();
+            pool.add(value);
+            used++;
+            return value;
+        } else {
+            VLongWritable value = pool.get(used);
+            used++;
+            return value;
+        }
+    }
+}
diff --git a/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/trianglecounting/TriangleCountingAggregator.java b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/trianglecounting/TriangleCountingAggregator.java
new file mode 100644
index 0000000..67b028d
--- /dev/null
+++ b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/trianglecounting/TriangleCountingAggregator.java
@@ -0,0 +1,41 @@
+package edu.uci.ics.pregelix.example.trianglecounting;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.pregelix.api.graph.GlobalAggregator;
+import edu.uci.ics.pregelix.api.graph.Vertex;
+import edu.uci.ics.pregelix.example.io.VLongWritable;
+
+/**
+ * The global aggregator aggregates the count of triangles
+ */
+public class TriangleCountingAggregator extends
+        GlobalAggregator<VLongWritable, VLongWritable, VLongWritable, VLongWritable, VLongWritable, VLongWritable> {
+
+    private VLongWritable state = new VLongWritable(0);
+
+    @Override
+    public void init() {
+        state.set(0);
+    }
+
+    @Override
+    public void step(Vertex<VLongWritable, VLongWritable, VLongWritable, VLongWritable> v) throws HyracksDataException {
+        state.set(state.get() + v.getVertexValue().get());
+    }
+
+    @Override
+    public void step(VLongWritable partialResult) {
+        state.set(state.get() + partialResult.get());
+    }
+
+    @Override
+    public VLongWritable finishPartial() {
+        return state;
+    }
+
+    @Override
+    public VLongWritable finishFinal() {
+        return state;
+    }
+
+}
diff --git a/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/trianglecounting/TriangleCountingVertex.java b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/trianglecounting/TriangleCountingVertex.java
new file mode 100644
index 0000000..d3db095
--- /dev/null
+++ b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/trianglecounting/TriangleCountingVertex.java
@@ -0,0 +1,153 @@
+package edu.uci.ics.pregelix.example.trianglecounting;
+
+import java.io.IOException;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.Iterator;
+import java.util.List;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapreduce.RecordWriter;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+
+import edu.uci.ics.pregelix.api.graph.Edge;
+import edu.uci.ics.pregelix.api.graph.Vertex;
+import edu.uci.ics.pregelix.api.io.VertexWriter;
+import edu.uci.ics.pregelix.api.io.text.TextVertexOutputFormat;
+import edu.uci.ics.pregelix.api.io.text.TextVertexOutputFormat.TextVertexWriter;
+import edu.uci.ics.pregelix.api.job.PregelixJob;
+import edu.uci.ics.pregelix.api.util.BspUtils;
+import edu.uci.ics.pregelix.dataflow.util.IterationUtils;
+import edu.uci.ics.pregelix.example.client.Client;
+import edu.uci.ics.pregelix.example.io.VLongWritable;
+
+/**
+ * The triangle counting example -- counting the triangles in an undirected graph.
+ */
+public class TriangleCountingVertex extends Vertex<VLongWritable, VLongWritable, VLongWritable, VLongWritable> {
+
+    private VLongWritable tmpValue = new VLongWritable(0);
+    private long triangleCount = 0;
+    private Edge<VLongWritable, VLongWritable> candidateEdge = new Edge<VLongWritable, VLongWritable>(
+            new VLongWritable(0), new VLongWritable(0));
+    private EdgeComparator edgeComparator = new EdgeComparator();
+
+    @Override
+    public void compute(Iterator<VLongWritable> msgIterator) {
+        // transforms the edge list into a set to facilitate lookup
+        if (getSuperstep() == 1) {
+            // sorting edges could be avoid if the dataset already has that property
+            sortEdges();
+            List<Edge<VLongWritable, VLongWritable>> edges = this.getEdges();
+            int numEdges = edges.size();
+
+            //decoding longs
+            long src = getVertexId().get();
+            long[] dests = new long[numEdges];
+            for (int i = 0; i < numEdges; i++) {
+                dests[i] = edges.get(i).getDestVertexId().get();
+            }
+
+            //send messages -- take advantage of that each discovered 
+            //triangle should have vertexes ordered by vertex id
+            for (int i = 0; i < numEdges; i++) {
+                if (dests[i] < src) {
+                    for (int j = i + 1; j < numEdges; j++) {
+                        //send messages -- v_j.id > v_i.id -- guaranteed by sortEdge()
+                        if (dests[j] > src) {
+                            sendMsg(edges.get(i).getDestVertexId(), edges.get(j).getDestVertexId());
+                        }
+                    }
+                }
+            }
+        }
+        if (getSuperstep() >= 2) {
+            triangleCount = 0;
+            List<Edge<VLongWritable, VLongWritable>> edges = this.getEdges();
+            while (msgIterator.hasNext()) {
+                VLongWritable msg = msgIterator.next();
+                candidateEdge.setDestVertexId(msg);
+                if (Collections.binarySearch(edges, candidateEdge, edgeComparator) >= 0) {
+                    // if the msg value is a dest from this vertex
+                    triangleCount++;
+                }
+            }
+
+            // set vertex value
+            tmpValue.set(triangleCount);
+            setVertexValue(tmpValue);
+            voteToHalt();
+        }
+    }
+
+    /**
+     * Triangle Counting VertexWriter
+     */
+    public static class TriangleCountingVertexWriter extends
+            TextVertexWriter<VLongWritable, VLongWritable, VLongWritable> {
+        public TriangleCountingVertexWriter(RecordWriter<Text, Text> lineRecordWriter) {
+            super(lineRecordWriter);
+        }
+
+        @Override
+        public void writeVertex(Vertex<VLongWritable, VLongWritable, VLongWritable, ?> vertex) throws IOException,
+                InterruptedException {
+            getRecordWriter().write(new Text(vertex.getVertexId().toString()),
+                    new Text(vertex.getVertexValue().toString()));
+        }
+    }
+
+    @Override
+    public String toString() {
+        return getVertexId() + " " + getVertexValue();
+    }
+
+    /**
+     * output format for triangle counting
+     */
+    public static class TriangleCountingVertexOutputFormat extends
+            TextVertexOutputFormat<VLongWritable, VLongWritable, VLongWritable> {
+
+        @Override
+        public VertexWriter<VLongWritable, VLongWritable, VLongWritable> createVertexWriter(TaskAttemptContext context)
+                throws IOException, InterruptedException {
+            RecordWriter<Text, Text> recordWriter = textOutputFormat.getRecordWriter(context);
+            return new TriangleCountingVertexWriter(recordWriter);
+        }
+
+    }
+
+    private static long readTriangleCountingResult(Configuration conf) {
+        try {
+            VLongWritable count = (VLongWritable) IterationUtils
+                    .readGlobalAggregateValue(conf, BspUtils.getJobId(conf));
+            return count.get();
+        } catch (IOException e) {
+            throw new IllegalStateException(e);
+        }
+    }
+
+    public static void main(String[] args) throws Exception {
+        PregelixJob job = new PregelixJob(TriangleCountingVertex.class.getSimpleName());
+        job.setVertexClass(TriangleCountingVertex.class);
+        job.setGlobalAggregatorClass(TriangleCountingAggregator.class);
+        job.setVertexInputFormatClass(TextTriangleCountingInputFormat.class);
+        job.setVertexOutputFormatClass(TriangleCountingVertexOutputFormat.class);
+        Client.run(args, job);
+        System.out.println("triangle count: " + readTriangleCountingResult(job.getConfiguration()));
+    }
+}
+
+/**
+ * The comparator for Edge<VLongWritable, VLongWritable>.
+ */
+class EdgeComparator implements Comparator<Edge<VLongWritable, VLongWritable>> {
+
+    @Override
+    public int compare(Edge<VLongWritable, VLongWritable> left, Edge<VLongWritable, VLongWritable> right) {
+        long leftValue = left.getDestVertexId().get();
+        long rightValue = right.getDestVertexId().get();
+        return leftValue > rightValue ? 1 : (leftValue < rightValue ? -1 : 0);
+    }
+}
diff --git a/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/utils/VertexAggregator.java b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/utils/VertexAggregator.java
new file mode 100644
index 0000000..d8f704e
--- /dev/null
+++ b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/utils/VertexAggregator.java
@@ -0,0 +1,92 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.pregelix.example.utils;
+
+import java.io.IOException;
+import java.util.Iterator;
+
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapred.FileInputFormat;
+import org.apache.hadoop.mapred.FileOutputFormat;
+import org.apache.hadoop.mapred.JobClient;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.MapReduceBase;
+import org.apache.hadoop.mapred.Mapper;
+import org.apache.hadoop.mapred.OutputCollector;
+import org.apache.hadoop.mapred.Reducer;
+import org.apache.hadoop.mapred.Reporter;
+import org.apache.hadoop.mapred.TextInputFormat;
+
+@SuppressWarnings("deprecation")
+public class VertexAggregator {
+
+    public static class MapRecordOnly extends MapReduceBase implements
+            Mapper<LongWritable, Text, NullWritable, LongWritable> {
+        private final NullWritable nullValue = NullWritable.get();
+        private final LongWritable count = new LongWritable(1);
+
+        public void map(LongWritable id, Text inputValue, OutputCollector<NullWritable, LongWritable> output,
+                Reporter reporter) throws IOException {
+            output.collect(nullValue, count);
+        }
+    }
+
+    public static class CombineRecordOnly extends MapReduceBase implements
+            Reducer<NullWritable, LongWritable, NullWritable, LongWritable> {
+        private final NullWritable nullValue = NullWritable.get();
+
+        public void reduce(NullWritable inputKey, Iterator<LongWritable> inputValue,
+                OutputCollector<NullWritable, LongWritable> output, Reporter reporter) throws IOException {
+            long count = 0;
+            while (inputValue.hasNext())
+                count += inputValue.next().get();
+            output.collect(nullValue, new LongWritable(count));
+        }
+    }
+
+    public static class ReduceRecordOnly extends MapReduceBase implements
+            Reducer<NullWritable, LongWritable, NullWritable, Text> {
+        private final NullWritable nullValue = NullWritable.get();
+
+        public void reduce(NullWritable inputKey, Iterator<LongWritable> inputValue,
+                OutputCollector<NullWritable, Text> output, Reporter reporter) throws IOException {
+            long count = 0;
+            while (inputValue.hasNext())
+                count += inputValue.next().get();
+            output.collect(nullValue, new Text(Long.toString(count)));
+        }
+    }
+
+    public static void main(String[] args) throws IOException {
+        JobConf job = new JobConf(VertexAggregator.class);
+
+        job.setJobName(VertexAggregator.class.getSimpleName());
+        job.setMapperClass(MapRecordOnly.class);
+        job.setCombinerClass(CombineRecordOnly.class);
+        job.setReducerClass(ReduceRecordOnly.class);
+        job.setMapOutputKeyClass(NullWritable.class);
+        job.setMapOutputValueClass(LongWritable.class);
+
+        job.setInputFormat(TextInputFormat.class);
+        FileInputFormat.setInputPaths(job, args[0]);
+        FileOutputFormat.setOutputPath(job, new Path(args[1]));
+        job.setNumReduceTasks(Integer.parseInt(args[2]));
+        JobClient.runJob(job);
+    }
+}
diff --git a/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/utils/VertexSorter.java b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/utils/VertexSorter.java
new file mode 100644
index 0000000..8421088
--- /dev/null
+++ b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/utils/VertexSorter.java
@@ -0,0 +1,76 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.pregelix.example.utils;
+
+import java.io.IOException;
+import java.util.Iterator;
+
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapred.FileInputFormat;
+import org.apache.hadoop.mapred.FileOutputFormat;
+import org.apache.hadoop.mapred.JobClient;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.MapReduceBase;
+import org.apache.hadoop.mapred.Mapper;
+import org.apache.hadoop.mapred.OutputCollector;
+import org.apache.hadoop.mapred.Reducer;
+import org.apache.hadoop.mapred.Reporter;
+import org.apache.hadoop.mapred.TextInputFormat;
+
+@SuppressWarnings("deprecation")
+public class VertexSorter {
+    public static class MapRecordOnly extends MapReduceBase implements Mapper<LongWritable, Text, LongWritable, Text> {
+        private static String separator = " ";
+
+        public void map(LongWritable id, Text inputValue, OutputCollector<LongWritable, Text> output, Reporter reporter)
+                throws IOException {
+            String[] fields = inputValue.toString().split(separator);
+            LongWritable vertexId = new LongWritable(Long.parseLong(fields[0]));
+            output.collect(vertexId, inputValue);
+        }
+    }
+
+    public static class ReduceRecordOnly extends MapReduceBase implements
+            Reducer<LongWritable, Text, NullWritable, Text> {
+
+        NullWritable key = NullWritable.get();
+
+        public void reduce(LongWritable inputKey, Iterator<Text> inputValue,
+                OutputCollector<NullWritable, Text> output, Reporter reporter) throws IOException {
+            while (inputValue.hasNext())
+                output.collect(key, inputValue.next());
+        }
+    }
+
+    public static void main(String[] args) throws IOException {
+        JobConf job = new JobConf(VertexSorter.class);
+
+        job.setJobName(VertexSorter.class.getSimpleName());
+        job.setMapperClass(MapRecordOnly.class);
+        job.setReducerClass(ReduceRecordOnly.class);
+        job.setMapOutputKeyClass(LongWritable.class);
+        job.setMapOutputValueClass(Text.class);
+
+        job.setInputFormat(TextInputFormat.class);
+        FileInputFormat.setInputPaths(job, args[0]);
+        FileOutputFormat.setOutputPath(job, new Path(args[1]));
+        job.setNumReduceTasks(Integer.parseInt(args[2]));
+        JobClient.runJob(job);
+    }
+}
diff --git a/pregelix/pregelix-example/src/test/java/edu/uci/ics/pregelix/example/dataload/DataLoadTest.java b/pregelix/pregelix-example/src/test/java/edu/uci/ics/pregelix/example/dataload/DataLoadTest.java
new file mode 100644
index 0000000..37f03a5
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/java/edu/uci/ics/pregelix/example/dataload/DataLoadTest.java
@@ -0,0 +1,180 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.pregelix.example.dataload;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.logging.Logger;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.hadoop.io.DoubleWritable;
+import org.apache.hadoop.io.FloatWritable;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.io.WritableComparable;
+import org.apache.hadoop.mapred.JobConf;
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.pregelix.api.job.PregelixJob;
+import edu.uci.ics.pregelix.core.jobgen.JobGenOuterJoin;
+import edu.uci.ics.pregelix.core.jobgen.clusterconfig.ClusterConfig;
+import edu.uci.ics.pregelix.core.util.PregelixHyracksIntegrationUtil;
+import edu.uci.ics.pregelix.example.PageRankVertex;
+import edu.uci.ics.pregelix.example.PageRankVertex.SimulatedPageRankVertexInputFormat;
+import edu.uci.ics.pregelix.example.util.TestUtils;
+
+@SuppressWarnings("deprecation")
+public class DataLoadTest {
+    private static final String EXPECT_RESULT_DIR = "expected";
+    private static final String ACTUAL_RESULT_DIR = "actual";
+    private static final String NC1 = "nc1";
+
+    private static final Logger LOGGER = Logger.getLogger(DataLoadTest.class.getName());
+
+    private static final String PATH_TO_HADOOP_CONF = "src/test/resources/hadoop/conf";
+    private static final String PATH_TO_CLUSTER_STORE = "src/test/resources/cluster/stores.properties";
+    private static final String PATH_TO_CLUSTER_PROPERTIES = "src/test/resources/cluster/cluster.properties";
+
+    private static final String HYRACKS_APP_NAME = "giraph";
+    private static final String GIRAPH_JOB_NAME = "DataLoadTest";
+
+    private MiniDFSCluster dfsCluster;
+
+    private JobConf conf = new JobConf();
+    private int numberOfNC = 2;
+    private JobGenOuterJoin giraphTestJobGen;
+    private PregelixJob job;
+
+    public DataLoadTest() throws Exception {
+        job = new PregelixJob(GIRAPH_JOB_NAME);
+        job.setVertexClass(PageRankVertex.class);
+        job.setVertexInputFormatClass(SimulatedPageRankVertexInputFormat.class);
+        job.getConfiguration().setClass(PregelixJob.VERTEX_INDEX_CLASS, LongWritable.class, WritableComparable.class);
+        job.getConfiguration().setClass(PregelixJob.VERTEX_VALUE_CLASS, DoubleWritable.class, Writable.class);
+        job.getConfiguration().setClass(PregelixJob.EDGE_VALUE_CLASS, FloatWritable.class, Writable.class);
+        job.getConfiguration().setClass(PregelixJob.MESSAGE_VALUE_CLASS, DoubleWritable.class, Writable.class);
+    }
+
+    public void setUp() throws Exception {
+        ClusterConfig.setStorePath(PATH_TO_CLUSTER_STORE);
+        ClusterConfig.setClusterPropertiesPath(PATH_TO_CLUSTER_PROPERTIES);
+        cleanupStores();
+        PregelixHyracksIntegrationUtil.init();
+        PregelixHyracksIntegrationUtil.createApp(HYRACKS_APP_NAME);
+        LOGGER.info("Hyracks mini-cluster started");
+        startHDFS();
+        FileUtils.forceMkdir(new File(EXPECT_RESULT_DIR));
+        FileUtils.forceMkdir(new File(ACTUAL_RESULT_DIR));
+        FileUtils.cleanDirectory(new File(EXPECT_RESULT_DIR));
+        FileUtils.cleanDirectory(new File(ACTUAL_RESULT_DIR));
+        giraphTestJobGen = new JobGenOuterJoin(job);
+    }
+
+    private void cleanupStores() throws IOException {
+        FileUtils.forceMkdir(new File("teststore"));
+        FileUtils.forceMkdir(new File("build"));
+        FileUtils.cleanDirectory(new File("teststore"));
+        FileUtils.cleanDirectory(new File("build"));
+    }
+
+    private void startHDFS() throws IOException {
+        conf.addResource(new Path(PATH_TO_HADOOP_CONF + "/core-site.xml"));
+        conf.addResource(new Path(PATH_TO_HADOOP_CONF + "/mapred-site.xml"));
+        conf.addResource(new Path(PATH_TO_HADOOP_CONF + "/hdfs-site.xml"));
+        FileSystem lfs = FileSystem.getLocal(new Configuration());
+        lfs.delete(new Path("build"), true);
+        System.setProperty("hadoop.log.dir", "logs");
+        dfsCluster = new MiniDFSCluster(conf, numberOfNC, true, null);
+    }
+
+    /**
+     * cleanup hdfs cluster
+     */
+    private void cleanupHDFS() throws Exception {
+        dfsCluster.shutdown();
+    }
+
+    public void tearDown() throws Exception {
+        PregelixHyracksIntegrationUtil.destroyApp(HYRACKS_APP_NAME);
+        PregelixHyracksIntegrationUtil.deinit();
+        LOGGER.info("Hyracks mini-cluster shut down");
+        cleanupHDFS();
+    }
+
+    @Test
+    public void test() throws Exception {
+        setUp();
+        runDataScan();
+        runCreation();
+        runDataLoad();
+        runIndexScan();
+        try {
+            compareResults();
+        } catch (Exception e) {
+            tearDown();
+            throw e;
+        }
+        tearDown();
+    }
+
+    private void runCreation() throws Exception {
+        try {
+            JobSpecification bulkLoadJobSpec = giraphTestJobGen.generateCreatingJob();
+            PregelixHyracksIntegrationUtil.runJob(bulkLoadJobSpec, HYRACKS_APP_NAME);
+        } catch (Exception e) {
+            throw e;
+        }
+    }
+
+    private void runDataLoad() throws Exception {
+        try {
+            JobSpecification bulkLoadJobSpec = giraphTestJobGen.generateLoadingJob();
+            PregelixHyracksIntegrationUtil.runJob(bulkLoadJobSpec, HYRACKS_APP_NAME);
+        } catch (Exception e) {
+            throw e;
+        }
+    }
+
+    private void runDataScan() throws Exception {
+        try {
+            JobSpecification scanSortPrintJobSpec = giraphTestJobGen.scanSortPrintGraph(NC1, EXPECT_RESULT_DIR
+                    + File.separator + job.getJobName());
+            PregelixHyracksIntegrationUtil.runJob(scanSortPrintJobSpec, HYRACKS_APP_NAME);
+        } catch (Exception e) {
+            throw e;
+        }
+    }
+
+    private void runIndexScan() throws Exception {
+        try {
+            JobSpecification scanSortPrintJobSpec = giraphTestJobGen.scanIndexPrintGraph(NC1, ACTUAL_RESULT_DIR
+                    + File.separator + job.getJobName());
+            PregelixHyracksIntegrationUtil.runJob(scanSortPrintJobSpec, HYRACKS_APP_NAME);
+        } catch (Exception e) {
+            throw e;
+        }
+    }
+
+    private void compareResults() throws Exception {
+        PregelixJob job = new PregelixJob(GIRAPH_JOB_NAME);
+        TestUtils.compareWithResult(new File(EXPECT_RESULT_DIR + File.separator + job.getJobName()), new File(
+                ACTUAL_RESULT_DIR + File.separator + job.getJobName()));
+    }
+}
diff --git a/pregelix/pregelix-example/src/test/java/edu/uci/ics/pregelix/example/jobgen/JobGenerator.java b/pregelix/pregelix-example/src/test/java/edu/uci/ics/pregelix/example/jobgen/JobGenerator.java
new file mode 100644
index 0000000..c353d84
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/java/edu/uci/ics/pregelix/example/jobgen/JobGenerator.java
@@ -0,0 +1,261 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.pregelix.example.jobgen;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
+import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
+
+import edu.uci.ics.pregelix.api.job.PregelixJob;
+import edu.uci.ics.pregelix.example.ConnectedComponentsVertex;
+import edu.uci.ics.pregelix.example.ConnectedComponentsVertex.SimpleConnectedComponentsVertexOutputFormat;
+import edu.uci.ics.pregelix.example.PageRankVertex;
+import edu.uci.ics.pregelix.example.PageRankVertex.SimplePageRankVertexOutputFormat;
+import edu.uci.ics.pregelix.example.PageRankVertex.SimulatedPageRankVertexInputFormat;
+import edu.uci.ics.pregelix.example.ReachabilityVertex;
+import edu.uci.ics.pregelix.example.ReachabilityVertex.SimpleReachibilityVertexOutputFormat;
+import edu.uci.ics.pregelix.example.ShortestPathsVertex;
+import edu.uci.ics.pregelix.example.inputformat.TextConnectedComponentsInputFormat;
+import edu.uci.ics.pregelix.example.inputformat.TextPageRankInputFormat;
+import edu.uci.ics.pregelix.example.inputformat.TextReachibilityVertexInputFormat;
+import edu.uci.ics.pregelix.example.inputformat.TextShortestPathsInputFormat;
+import edu.uci.ics.pregelix.example.maximalclique.MaximalCliqueAggregator;
+import edu.uci.ics.pregelix.example.maximalclique.MaximalCliqueVertex;
+import edu.uci.ics.pregelix.example.maximalclique.MaximalCliqueVertex.MaximalCliqueVertexOutputFormat;
+import edu.uci.ics.pregelix.example.maximalclique.TextMaximalCliqueInputFormat;
+import edu.uci.ics.pregelix.example.trianglecounting.TextTriangleCountingInputFormat;
+import edu.uci.ics.pregelix.example.trianglecounting.TriangleCountingAggregator;
+import edu.uci.ics.pregelix.example.trianglecounting.TriangleCountingVertex;
+import edu.uci.ics.pregelix.example.trianglecounting.TriangleCountingVertex.TriangleCountingVertexOutputFormat;
+
+public class JobGenerator {
+    private static String outputBase = "src/test/resources/jobs/";
+    private static String HDFS_INPUTPATH = "/webmap";
+    private static String HDFS_OUTPUTPAH = "/result";
+
+    private static String HDFS_INPUTPATH2 = "/webmapcomplex";
+    private static String HDFS_OUTPUTPAH2 = "/resultcomplex";
+
+    private static String HDFS_INPUTPATH3 = "/clique";
+    private static String HDFS_OUTPUTPAH3 = "/resultclique";
+
+    private static void generatePageRankJobReal(String jobName, String outputPath) throws IOException {
+        PregelixJob job = new PregelixJob(jobName);
+        job.setVertexClass(PageRankVertex.class);
+        job.setVertexInputFormatClass(TextPageRankInputFormat.class);
+        job.setVertexOutputFormatClass(SimplePageRankVertexOutputFormat.class);
+        job.setMessageCombinerClass(PageRankVertex.SimpleSumCombiner.class);
+        FileInputFormat.setInputPaths(job, HDFS_INPUTPATH);
+        FileOutputFormat.setOutputPath(job, new Path(HDFS_OUTPUTPAH));
+        job.getConfiguration().setLong(PregelixJob.NUM_VERTICE, 20);
+        job.getConfiguration().writeXml(new FileOutputStream(new File(outputPath)));
+    }
+
+    private static void generatePageRankJobRealComplex(String jobName, String outputPath) throws IOException {
+        PregelixJob job = new PregelixJob(jobName);
+        job.setVertexClass(PageRankVertex.class);
+        job.setVertexInputFormatClass(TextPageRankInputFormat.class);
+        job.setVertexOutputFormatClass(SimplePageRankVertexOutputFormat.class);
+        job.setMessageCombinerClass(PageRankVertex.SimpleSumCombiner.class);
+        FileInputFormat.setInputPaths(job, HDFS_INPUTPATH2);
+        FileOutputFormat.setOutputPath(job, new Path(HDFS_OUTPUTPAH2));
+        job.getConfiguration().setLong(PregelixJob.NUM_VERTICE, 23);
+        job.getConfiguration().writeXml(new FileOutputStream(new File(outputPath)));
+    }
+
+    private static void generateShortestPathJobReal(String jobName, String outputPath) throws IOException {
+        PregelixJob job = new PregelixJob(jobName);
+        job.setVertexClass(ShortestPathsVertex.class);
+        job.setVertexInputFormatClass(TextShortestPathsInputFormat.class);
+        job.setVertexOutputFormatClass(SimplePageRankVertexOutputFormat.class);
+        job.setMessageCombinerClass(ShortestPathsVertex.SimpleMinCombiner.class);
+        FileInputFormat.setInputPaths(job, HDFS_INPUTPATH);
+        FileOutputFormat.setOutputPath(job, new Path(HDFS_OUTPUTPAH));
+        job.getConfiguration().setLong(PregelixJob.NUM_VERTICE, 20);
+        job.getConfiguration().setLong(ShortestPathsVertex.SOURCE_ID, 0);
+        job.getConfiguration().writeXml(new FileOutputStream(new File(outputPath)));
+    }
+
+    private static void generatePageRankJobRealNoCombiner(String jobName, String outputPath) throws IOException {
+        PregelixJob job = new PregelixJob(jobName);
+        job.setVertexClass(PageRankVertex.class);
+        job.setVertexInputFormatClass(TextPageRankInputFormat.class);
+        job.setVertexOutputFormatClass(SimplePageRankVertexOutputFormat.class);
+        FileInputFormat.setInputPaths(job, HDFS_INPUTPATH);
+        FileOutputFormat.setOutputPath(job, new Path(HDFS_OUTPUTPAH));
+        job.getConfiguration().setLong(PregelixJob.NUM_VERTICE, 20);
+        job.getConfiguration().writeXml(new FileOutputStream(new File(outputPath)));
+    }
+
+    private static void generateConnectedComponentsJobReal(String jobName, String outputPath) throws IOException {
+        PregelixJob job = new PregelixJob(jobName);
+        job.setVertexClass(ConnectedComponentsVertex.class);
+        job.setVertexInputFormatClass(TextConnectedComponentsInputFormat.class);
+        job.setVertexOutputFormatClass(SimpleConnectedComponentsVertexOutputFormat.class);
+        job.setMessageCombinerClass(ConnectedComponentsVertex.SimpleMinCombiner.class);
+        FileInputFormat.setInputPaths(job, HDFS_INPUTPATH);
+        FileOutputFormat.setOutputPath(job, new Path(HDFS_OUTPUTPAH));
+        job.getConfiguration().setLong(PregelixJob.NUM_VERTICE, 20);
+        job.getConfiguration().writeXml(new FileOutputStream(new File(outputPath)));
+    }
+
+    private static void generateConnectedComponentsJobRealComplex(String jobName, String outputPath) throws IOException {
+        PregelixJob job = new PregelixJob(jobName);
+        job.setVertexClass(ConnectedComponentsVertex.class);
+        job.setVertexInputFormatClass(TextConnectedComponentsInputFormat.class);
+        job.setVertexOutputFormatClass(SimpleConnectedComponentsVertexOutputFormat.class);
+        job.setMessageCombinerClass(ConnectedComponentsVertex.SimpleMinCombiner.class);
+        FileInputFormat.setInputPaths(job, HDFS_INPUTPATH2);
+        FileOutputFormat.setOutputPath(job, new Path(HDFS_OUTPUTPAH2));
+        job.getConfiguration().setLong(PregelixJob.NUM_VERTICE, 23);
+        job.getConfiguration().writeXml(new FileOutputStream(new File(outputPath)));
+    }
+
+    private static void generateReachibilityRealComplex(String jobName, String outputPath) throws IOException {
+        PregelixJob job = new PregelixJob(jobName);
+        job.setVertexClass(ReachabilityVertex.class);
+        job.setVertexInputFormatClass(TextReachibilityVertexInputFormat.class);
+        job.setVertexOutputFormatClass(SimpleReachibilityVertexOutputFormat.class);
+        job.setMessageCombinerClass(ReachabilityVertex.SimpleReachibilityCombiner.class);
+        FileInputFormat.setInputPaths(job, HDFS_INPUTPATH2);
+        FileOutputFormat.setOutputPath(job, new Path(HDFS_OUTPUTPAH2));
+        job.getConfiguration().setLong(PregelixJob.NUM_VERTICE, 23);
+        job.getConfiguration().setLong(ReachabilityVertex.SOURCE_ID, 1);
+        job.getConfiguration().setLong(ReachabilityVertex.DEST_ID, 10);
+        job.getConfiguration().writeXml(new FileOutputStream(new File(outputPath)));
+    }
+
+    private static void generateReachibilityRealComplexNoConnectivity(String jobName, String outputPath)
+            throws IOException {
+        PregelixJob job = new PregelixJob(jobName);
+        job.setVertexClass(ReachabilityVertex.class);
+        job.setVertexInputFormatClass(TextReachibilityVertexInputFormat.class);
+        job.setVertexOutputFormatClass(SimpleReachibilityVertexOutputFormat.class);
+        job.setMessageCombinerClass(ReachabilityVertex.SimpleReachibilityCombiner.class);
+        FileInputFormat.setInputPaths(job, HDFS_INPUTPATH2);
+        FileOutputFormat.setOutputPath(job, new Path(HDFS_OUTPUTPAH2));
+        job.getConfiguration().setLong(PregelixJob.NUM_VERTICE, 23);
+        job.getConfiguration().setLong(ReachabilityVertex.SOURCE_ID, 1);
+        job.getConfiguration().setLong(ReachabilityVertex.DEST_ID, 25);
+        job.getConfiguration().writeXml(new FileOutputStream(new File(outputPath)));
+    }
+
+    private static void generatePageRankJob(String jobName, String outputPath) throws IOException {
+        PregelixJob job = new PregelixJob(jobName);
+        job.setVertexClass(PageRankVertex.class);
+        job.setVertexInputFormatClass(SimulatedPageRankVertexInputFormat.class);
+        job.setMessageCombinerClass(PageRankVertex.SimpleSumCombiner.class);
+        job.setVertexOutputFormatClass(SimplePageRankVertexOutputFormat.class);
+        FileInputFormat.setInputPaths(job, HDFS_INPUTPATH);
+        FileOutputFormat.setOutputPath(job, new Path(HDFS_OUTPUTPAH));
+        job.getConfiguration().setLong(PregelixJob.NUM_VERTICE, 20);
+        job.getConfiguration().writeXml(new FileOutputStream(new File(outputPath)));
+    }
+
+    private static void generateShortestPathJob(String jobName, String outputPath) throws IOException {
+        PregelixJob job = new PregelixJob(jobName);
+        job.setVertexClass(ShortestPathsVertex.class);
+        job.setVertexInputFormatClass(SimulatedPageRankVertexInputFormat.class);
+        job.setMessageCombinerClass(ShortestPathsVertex.SimpleMinCombiner.class);
+        job.setVertexOutputFormatClass(SimplePageRankVertexOutputFormat.class);
+        FileInputFormat.setInputPaths(job, HDFS_INPUTPATH);
+        FileOutputFormat.setOutputPath(job, new Path(HDFS_OUTPUTPAH));
+        job.getConfiguration().setLong(PregelixJob.NUM_VERTICE, 20);
+        job.getConfiguration().setLong(ShortestPathsVertex.SOURCE_ID, 0);
+        job.getConfiguration().writeXml(new FileOutputStream(new File(outputPath)));
+    }
+
+    private static void generatePageRankJobRealDynamic(String jobName, String outputPath) throws IOException {
+        PregelixJob job = new PregelixJob(jobName);
+        job.setVertexClass(PageRankVertex.class);
+        job.setVertexInputFormatClass(TextPageRankInputFormat.class);
+        job.setVertexOutputFormatClass(SimplePageRankVertexOutputFormat.class);
+        job.setMessageCombinerClass(PageRankVertex.SimpleSumCombiner.class);
+        job.setDynamicVertexValueSize(true);
+        FileInputFormat.setInputPaths(job, HDFS_INPUTPATH);
+        FileOutputFormat.setOutputPath(job, new Path(HDFS_OUTPUTPAH));
+        job.getConfiguration().setLong(PregelixJob.NUM_VERTICE, 20);
+        job.getConfiguration().writeXml(new FileOutputStream(new File(outputPath)));
+    }
+
+    private static void generateTriangleCountingJob(String jobName, String outputPath) throws IOException {
+        PregelixJob job = new PregelixJob(jobName);
+        job.setVertexClass(TriangleCountingVertex.class);
+        job.setGlobalAggregatorClass(TriangleCountingAggregator.class);
+        job.setVertexInputFormatClass(TextTriangleCountingInputFormat.class);
+        job.setVertexOutputFormatClass(TriangleCountingVertexOutputFormat.class);
+        FileInputFormat.setInputPaths(job, HDFS_INPUTPATH3);
+        FileOutputFormat.setOutputPath(job, new Path(HDFS_OUTPUTPAH3));
+        job.getConfiguration().writeXml(new FileOutputStream(new File(outputPath)));
+    }
+
+    private static void generateMaximalCliqueJob(String jobName, String outputPath) throws IOException {
+        PregelixJob job = new PregelixJob(jobName);
+        job.setVertexClass(MaximalCliqueVertex.class);
+        job.setGlobalAggregatorClass(MaximalCliqueAggregator.class);
+        job.setDynamicVertexValueSize(true);
+        job.setVertexInputFormatClass(TextMaximalCliqueInputFormat.class);
+        job.setVertexOutputFormatClass(MaximalCliqueVertexOutputFormat.class);
+        FileInputFormat.setInputPaths(job, HDFS_INPUTPATH3);
+        FileOutputFormat.setOutputPath(job, new Path(HDFS_OUTPUTPAH3));
+        job.getConfiguration().writeXml(new FileOutputStream(new File(outputPath)));
+    }
+
+    private static void genPageRank() throws IOException {
+        generatePageRankJob("PageRank", outputBase + "PageRank.xml");
+        generatePageRankJobReal("PageRank", outputBase + "PageRankReal.xml");
+        generatePageRankJobRealDynamic("PageRank", outputBase + "PageRankRealDynamic.xml");
+        generatePageRankJobRealComplex("PageRank", outputBase + "PageRankRealComplex.xml");
+        generatePageRankJobRealNoCombiner("PageRank", outputBase + "PageRankRealNoCombiner.xml");
+    }
+
+    private static void genShortestPath() throws IOException {
+        generateShortestPathJob("ShortestPaths", outputBase + "ShortestPaths.xml");
+        generateShortestPathJobReal("ShortestPaths", outputBase + "ShortestPathsReal.xml");
+    }
+
+    private static void genConnectedComponents() throws IOException {
+        generateConnectedComponentsJobReal("ConnectedComponents", outputBase + "ConnectedComponentsReal.xml");
+        generateConnectedComponentsJobRealComplex("ConnectedComponents", outputBase
+                + "ConnectedComponentsRealComplex.xml");
+    }
+
+    private static void genReachibility() throws IOException {
+        generateReachibilityRealComplex("Reachibility", outputBase + "ReachibilityRealComplex.xml");
+        generateReachibilityRealComplexNoConnectivity("Reachibility", outputBase
+                + "ReachibilityRealComplexNoConnectivity.xml");
+    }
+
+    private static void genTriangleCounting() throws IOException {
+        generateTriangleCountingJob("Triangle Counting", outputBase + "TriangleCounting.xml");
+    }
+
+    private static void genMaximalClique() throws IOException {
+        generateMaximalCliqueJob("Maximal Clique", outputBase + "MaximalClique.xml");
+    }
+
+    public static void main(String[] args) throws IOException {
+        genPageRank();
+        genShortestPath();
+        genConnectedComponents();
+        genReachibility();
+        genTriangleCounting();
+        genMaximalClique();
+    }
+}
diff --git a/pregelix/pregelix-example/src/test/java/edu/uci/ics/pregelix/example/jobrun/RunJobTestCase.java b/pregelix/pregelix-example/src/test/java/edu/uci/ics/pregelix/example/jobrun/RunJobTestCase.java
new file mode 100644
index 0000000..5a556fa
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/java/edu/uci/ics/pregelix/example/jobrun/RunJobTestCase.java
@@ -0,0 +1,185 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.pregelix.example.jobrun;
+
+import java.io.File;
+
+import junit.framework.TestCase;
+
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
+import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.pregelix.api.job.PregelixJob;
+import edu.uci.ics.pregelix.core.jobgen.JobGen;
+import edu.uci.ics.pregelix.core.jobgen.JobGenInnerJoin;
+import edu.uci.ics.pregelix.core.jobgen.JobGenOuterJoin;
+import edu.uci.ics.pregelix.core.jobgen.JobGenOuterJoinSingleSort;
+import edu.uci.ics.pregelix.core.jobgen.JobGenOuterJoinSort;
+import edu.uci.ics.pregelix.core.util.PregelixHyracksIntegrationUtil;
+import edu.uci.ics.pregelix.dataflow.util.IterationUtils;
+import edu.uci.ics.pregelix.example.util.TestUtils;
+
+public class RunJobTestCase extends TestCase {
+    private static final String NC1 = "nc1";
+    private static final String HYRACKS_APP_NAME = "pregelix";
+    private static String HDFS_INPUTPATH = "/webmap";
+    private static String HDFS_OUTPUTPAH = "/result";
+
+    private static String HDFS_INPUTPATH2 = "/webmapcomplex";
+    private static String HDFS_OUTPUTPAH2 = "/resultcomplex";
+
+    private static String HDFS_INPUTPATH3 = "/clique";
+    private static String HDFS_OUTPUTPAH3 = "/resultclique";
+
+    private final PregelixJob job;
+    private JobGen[] giraphJobGens;
+    private final String resultFileName;
+    private final String expectedFileName;
+    private final String jobFile;
+
+    public RunJobTestCase(String hadoopConfPath, String jobName, String jobFile, String resultFile, String expectedFile)
+            throws Exception {
+        super("test");
+        this.jobFile = jobFile;
+        this.job = new PregelixJob("test");
+        this.job.getConfiguration().addResource(new Path(jobFile));
+        this.job.getConfiguration().addResource(new Path(hadoopConfPath));
+        Path[] inputPaths = FileInputFormat.getInputPaths(job);
+        if (inputPaths[0].toString().endsWith(HDFS_INPUTPATH)) {
+            FileInputFormat.setInputPaths(job, HDFS_INPUTPATH);
+            FileOutputFormat.setOutputPath(job, new Path(HDFS_OUTPUTPAH));
+        } else if (inputPaths[0].toString().endsWith(HDFS_INPUTPATH2)) {
+            FileInputFormat.setInputPaths(job, HDFS_INPUTPATH2);
+            FileOutputFormat.setOutputPath(job, new Path(HDFS_OUTPUTPAH2));
+        } else {
+            FileInputFormat.setInputPaths(job, HDFS_INPUTPATH3);
+            FileOutputFormat.setOutputPath(job, new Path(HDFS_OUTPUTPAH3));
+        }
+        job.setJobName(jobName);
+        this.resultFileName = resultFile;
+        this.expectedFileName = expectedFile;
+        giraphJobGens = new JobGen[4];
+        giraphJobGens[0] = new JobGenOuterJoin(job);
+        waitawhile();
+        giraphJobGens[1] = new JobGenInnerJoin(job);
+        waitawhile();
+        giraphJobGens[2] = new JobGenOuterJoinSort(job);
+        waitawhile();
+        giraphJobGens[3] = new JobGenOuterJoinSingleSort(job);
+    }
+
+    private void waitawhile() throws InterruptedException {
+        synchronized (this) {
+            this.wait(20);
+        }
+    }
+
+    @Test
+    public void test() throws Exception {
+        setUp();
+        for (JobGen jobGen : giraphJobGens) {
+            FileSystem dfs = FileSystem.get(job.getConfiguration());
+            dfs.delete(new Path(HDFS_OUTPUTPAH), true);
+            runCreate(jobGen);
+            runDataLoad(jobGen);
+            int i = 1;
+            boolean terminate = false;
+            do {
+                runLoopBodyIteration(jobGen, i);
+                terminate = IterationUtils.readTerminationState(job.getConfiguration(), jobGen.getJobId());
+                i++;
+            } while (!terminate);
+            runIndexScan(jobGen);
+            runHDFSWRite(jobGen);
+            runCleanup(jobGen);
+            compareResults();
+        }
+        tearDown();
+        waitawhile();
+    }
+
+    private void runCreate(JobGen jobGen) throws Exception {
+        try {
+            JobSpecification treeCreateJobSpec = jobGen.generateCreatingJob();
+            PregelixHyracksIntegrationUtil.runJob(treeCreateJobSpec, HYRACKS_APP_NAME);
+        } catch (Exception e) {
+            throw e;
+        }
+    }
+
+    private void runDataLoad(JobGen jobGen) throws Exception {
+        try {
+            JobSpecification bulkLoadJobSpec = jobGen.generateLoadingJob();
+            PregelixHyracksIntegrationUtil.runJob(bulkLoadJobSpec, HYRACKS_APP_NAME);
+        } catch (Exception e) {
+            throw e;
+        }
+    }
+
+    private void runLoopBodyIteration(JobGen jobGen, int iteration) throws Exception {
+        try {
+            JobSpecification loopBody = jobGen.generateJob(iteration);
+            PregelixHyracksIntegrationUtil.runJob(loopBody, HYRACKS_APP_NAME);
+        } catch (Exception e) {
+            throw e;
+        }
+    }
+
+    private void runIndexScan(JobGen jobGen) throws Exception {
+        try {
+            JobSpecification scanSortPrintJobSpec = jobGen.scanIndexPrintGraph(NC1, resultFileName);
+            PregelixHyracksIntegrationUtil.runJob(scanSortPrintJobSpec, HYRACKS_APP_NAME);
+        } catch (Exception e) {
+            throw e;
+        }
+    }
+
+    private void runHDFSWRite(JobGen jobGen) throws Exception {
+        try {
+            JobSpecification scanSortPrintJobSpec = jobGen.scanIndexWriteGraph();
+            PregelixHyracksIntegrationUtil.runJob(scanSortPrintJobSpec, HYRACKS_APP_NAME);
+        } catch (Exception e) {
+            throw e;
+        }
+    }
+
+    private void runCleanup(JobGen jobGen) throws Exception {
+        try {
+            JobSpecification[] cleanups = jobGen.generateCleanup();
+            runJobArray(cleanups);
+        } catch (Exception e) {
+            throw e;
+        }
+    }
+
+    private void runJobArray(JobSpecification[] jobs) throws Exception {
+        for (JobSpecification job : jobs) {
+            PregelixHyracksIntegrationUtil.runJob(job, HYRACKS_APP_NAME);
+        }
+    }
+
+    private void compareResults() throws Exception {
+        TestUtils.compareWithResult(new File(resultFileName), new File(expectedFileName));
+    }
+
+    public String toString() {
+        return jobFile;
+    }
+}
diff --git a/pregelix/pregelix-example/src/test/java/edu/uci/ics/pregelix/example/jobrun/RunJobTestSuite.java b/pregelix/pregelix-example/src/test/java/edu/uci/ics/pregelix/example/jobrun/RunJobTestSuite.java
new file mode 100644
index 0000000..79a5c3c
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/java/edu/uci/ics/pregelix/example/jobrun/RunJobTestSuite.java
@@ -0,0 +1,216 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.pregelix.example.jobrun;
+
+import java.io.BufferedReader;
+import java.io.DataOutputStream;
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.FileReader;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.logging.Logger;
+
+import junit.framework.Test;
+import junit.framework.TestResult;
+import junit.framework.TestSuite;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.hadoop.mapred.JobConf;
+
+import edu.uci.ics.pregelix.core.jobgen.clusterconfig.ClusterConfig;
+import edu.uci.ics.pregelix.core.util.PregelixHyracksIntegrationUtil;
+
+@SuppressWarnings("deprecation")
+public class RunJobTestSuite extends TestSuite {
+	private static final Logger LOGGER = Logger.getLogger(RunJobTestSuite.class
+			.getName());
+
+	private static final String ACTUAL_RESULT_DIR = "actual";
+	private static final String EXPECTED_RESULT_DIR = "src/test/resources/expected";
+	private static final String PATH_TO_HADOOP_CONF = "src/test/resources/hadoop/conf";
+	private static final String PATH_TO_CLUSTER_STORE = "src/test/resources/cluster/stores.properties";
+	private static final String PATH_TO_CLUSTER_PROPERTIES = "src/test/resources/cluster/cluster.properties";
+	private static final String PATH_TO_JOBS = "src/test/resources/jobs/";
+	private static final String PATH_TO_IGNORE = "src/test/resources/ignore.txt";
+	private static final String PATH_TO_ONLY = "src/test/resources/only.txt";
+	private static final String FILE_EXTENSION_OF_RESULTS = "result";
+
+	private static final String DATA_PATH = "data/webmap/webmap_link.txt";
+	private static final String HDFS_PATH = "/webmap/";
+
+	private static final String DATA_PATH2 = "data/webmapcomplex/webmap_link.txt";
+	private static final String HDFS_PATH2 = "/webmapcomplex/";
+
+	private static final String DATA_PATH3 = "data/clique/clique.txt";
+	private static final String HDFS_PATH3 = "/clique/";
+
+	private static final String HYRACKS_APP_NAME = "pregelix";
+	private static final String HADOOP_CONF_PATH = ACTUAL_RESULT_DIR
+			+ File.separator + "conf.xml";
+	private MiniDFSCluster dfsCluster;
+
+	private JobConf conf = new JobConf();
+	private int numberOfNC = 2;
+
+	public void setUp() throws Exception {
+		ClusterConfig.setStorePath(PATH_TO_CLUSTER_STORE);
+		ClusterConfig.setClusterPropertiesPath(PATH_TO_CLUSTER_PROPERTIES);
+		cleanupStores();
+		PregelixHyracksIntegrationUtil.init();
+		PregelixHyracksIntegrationUtil.createApp(HYRACKS_APP_NAME);
+		LOGGER.info("Hyracks mini-cluster started");
+		FileUtils.forceMkdir(new File(ACTUAL_RESULT_DIR));
+		FileUtils.cleanDirectory(new File(ACTUAL_RESULT_DIR));
+		startHDFS();
+	}
+
+	private void cleanupStores() throws IOException {
+		FileUtils.forceMkdir(new File("teststore"));
+		FileUtils.forceMkdir(new File("build"));
+		FileUtils.cleanDirectory(new File("teststore"));
+		FileUtils.cleanDirectory(new File("build"));
+	}
+
+	private void startHDFS() throws IOException {
+		conf.addResource(new Path(PATH_TO_HADOOP_CONF + "/core-site.xml"));
+		conf.addResource(new Path(PATH_TO_HADOOP_CONF + "/mapred-site.xml"));
+		conf.addResource(new Path(PATH_TO_HADOOP_CONF + "/hdfs-site.xml"));
+		FileSystem lfs = FileSystem.getLocal(new Configuration());
+		lfs.delete(new Path("build"), true);
+		System.setProperty("hadoop.log.dir", "logs");
+		dfsCluster = new MiniDFSCluster(conf, numberOfNC, true, null);
+		FileSystem dfs = FileSystem.get(conf);
+		Path src = new Path(DATA_PATH);
+		Path dest = new Path(HDFS_PATH);
+		dfs.mkdirs(dest);
+		dfs.copyFromLocalFile(src, dest);
+
+		src = new Path(DATA_PATH2);
+		dest = new Path(HDFS_PATH2);
+		dfs.mkdirs(dest);
+		dfs.copyFromLocalFile(src, dest);
+
+		src = new Path(DATA_PATH3);
+		dest = new Path(HDFS_PATH3);
+		dfs.mkdirs(dest);
+		dfs.copyFromLocalFile(src, dest);
+
+		DataOutputStream confOutput = new DataOutputStream(
+				new FileOutputStream(new File(HADOOP_CONF_PATH)));
+		conf.writeXml(confOutput);
+		confOutput.flush();
+		confOutput.close();
+	}
+
+	/**
+	 * cleanup hdfs cluster
+	 */
+	private void cleanupHDFS() throws Exception {
+		dfsCluster.shutdown();
+	}
+
+	public void tearDown() throws Exception {
+		PregelixHyracksIntegrationUtil.destroyApp(HYRACKS_APP_NAME);
+		PregelixHyracksIntegrationUtil.deinit();
+		LOGGER.info("Hyracks mini-cluster shut down");
+		cleanupHDFS();
+	}
+
+	public static Test suite() throws Exception {
+		List<String> ignores = getFileList(PATH_TO_IGNORE);
+		List<String> onlys = getFileList(PATH_TO_ONLY);
+		File testData = new File(PATH_TO_JOBS);
+		File[] queries = testData.listFiles();
+		RunJobTestSuite testSuite = new RunJobTestSuite();
+		testSuite.setUp();
+		boolean onlyEnabled = false;
+
+		if (onlys.size() > 0) {
+			onlyEnabled = true;
+		}
+		for (File qFile : queries) {
+			if (isInList(ignores, qFile.getName()))
+				continue;
+
+			if (qFile.isFile()) {
+				if (onlyEnabled && !isInList(onlys, qFile.getName())) {
+					continue;
+				} else {
+					String resultFileName = ACTUAL_RESULT_DIR + File.separator
+							+ jobExtToResExt(qFile.getName());
+					String expectedFileName = EXPECTED_RESULT_DIR
+							+ File.separator + jobExtToResExt(qFile.getName());
+					testSuite.addTest(new RunJobTestCase(HADOOP_CONF_PATH,
+							qFile.getName(),
+							qFile.getAbsolutePath().toString(), resultFileName,
+							expectedFileName));
+				}
+			}
+		}
+		return testSuite;
+	}
+
+	/**
+	 * Runs the tests and collects their result in a TestResult.
+	 */
+	@Override
+	public void run(TestResult result) {
+		try {
+			int testCount = countTestCases();
+			for (int i = 0; i < testCount; i++) {
+				// cleanupStores();
+				Test each = this.testAt(i);
+				if (result.shouldStop())
+					break;
+				runTest(each, result);
+			}
+			tearDown();
+		} catch (Exception e) {
+			throw new IllegalStateException(e);
+		}
+	}
+
+	protected static List<String> getFileList(String ignorePath)
+			throws FileNotFoundException, IOException {
+		BufferedReader reader = new BufferedReader(new FileReader(ignorePath));
+		String s = null;
+		List<String> ignores = new ArrayList<String>();
+		while ((s = reader.readLine()) != null) {
+			ignores.add(s);
+		}
+		reader.close();
+		return ignores;
+	}
+
+	private static String jobExtToResExt(String fname) {
+		int dot = fname.lastIndexOf('.');
+		return fname.substring(0, dot + 1) + FILE_EXTENSION_OF_RESULTS;
+	}
+
+	private static boolean isInList(List<String> onlys, String name) {
+		for (String only : onlys)
+			if (name.indexOf(only) >= 0)
+				return true;
+		return false;
+	}
+
+}
diff --git a/pregelix/pregelix-example/src/test/java/edu/uci/ics/pregelix/example/util/TestUtils.java b/pregelix/pregelix-example/src/test/java/edu/uci/ics/pregelix/example/util/TestUtils.java
new file mode 100644
index 0000000..d89ec46
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/java/edu/uci/ics/pregelix/example/util/TestUtils.java
@@ -0,0 +1,90 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.pregelix.example.util;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileReader;
+
+public class TestUtils {
+
+    public static void compareWithResult(File expectedFile, File actualFile) throws Exception {
+        BufferedReader readerExpected = new BufferedReader(new FileReader(expectedFile));
+        BufferedReader readerActual = new BufferedReader(new FileReader(actualFile));
+        String lineExpected, lineActual;
+        int num = 1;
+        try {
+            while ((lineExpected = readerExpected.readLine()) != null) {
+                lineActual = readerActual.readLine();
+                // Assert.assertEquals(lineExpected, lineActual);
+                if (lineActual == null) {
+                    throw new Exception("Actual result changed at line " + num + ":\n< " + lineExpected + "\n> ");
+                }
+                if (!equalStrings(lineExpected, lineActual)) {
+                    throw new Exception("Result for changed at line " + num + ":\n< " + lineExpected + "\n> "
+                            + lineActual);
+                }
+                ++num;
+            }
+            lineActual = readerActual.readLine();
+            if (lineActual != null) {
+                throw new Exception("Actual result changed at line " + num + ":\n< \n> " + lineActual);
+            }
+        } finally {
+            readerExpected.close();
+            readerActual.close();
+        }
+    }
+
+    private static boolean equalStrings(String s1, String s2) {
+        String[] rowsOne = s1.split("\n");
+        String[] rowsTwo = s2.split("\n");
+
+        if (rowsOne.length != rowsTwo.length)
+            return false;
+
+        for (int i = 0; i < rowsOne.length; i++) {
+            String row1 = rowsOne[i];
+            String row2 = rowsTwo[i];
+
+            if (row1.equals(row2))
+                continue;
+
+            String[] fields1 = row1.split(" ");
+            String[] fields2 = row2.split(" ");
+
+            for (int j = 0; j < fields1.length; j++) {
+                if (fields1[j].equals(fields2[j])) {
+                    continue;
+                } else if (fields1[j].indexOf('.') < 0) {
+                    return false;
+                } else {
+                    Double double1 = Double.parseDouble(fields1[j]);
+                    Double double2 = Double.parseDouble(fields2[j]);
+                    float float1 = (float) double1.doubleValue();
+                    float float2 = (float) double2.doubleValue();
+
+                    if (Math.abs(float1 - float2) == 0)
+                        continue;
+                    else {
+                        return false;
+                    }
+                }
+            }
+        }
+        return true;
+    }
+
+}
diff --git a/pregelix/pregelix-example/src/test/resources/cluster/cluster.properties b/pregelix/pregelix-example/src/test/resources/cluster/cluster.properties
new file mode 100644
index 0000000..14f8bd4
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/cluster/cluster.properties
@@ -0,0 +1,37 @@
+#The CC port for Hyracks clients
+CC_CLIENTPORT=3099
+
+#The CC port for Hyracks cluster management
+CC_CLUSTERPORT=1099
+
+#The directory of hyracks binaries
+HYRACKS_HOME=../../../../hyracks
+
+#The tmp directory for cc to install jars
+CCTMP_DIR=/tmp/t1
+
+#The tmp directory for nc to install jars
+NCTMP_DIR=/tmp/t2
+
+#The directory to put cc logs
+CCLOGS_DIR=$CCTMP_DIR/logs
+
+#The directory to put nc logs
+NCLOGS_DIR=$NCTMP_DIR/logs
+
+#Comma separated I/O directories for the spilling of external sort
+IO_DIRS="/tmp/t3,/tmp/t4"
+
+#The JAVA_HOME
+JAVA_HOME=$JAVA_HOME
+
+#The frame size of the internal dataflow engine
+FRAME_SIZE=65536
+
+#CC JAVA_OPTS
+CCJAVA_OPTS="-Xdebug -Xrunjdwp:transport=dt_socket,address=7001,server=y,suspend=n -Xmx3g -Djava.util.logging.config.file=logging.properties"
+# Yourkit option: -agentpath:/grid/0/dev/vborkar/tools/yjp-10.0.4/bin/linux-x86-64/libyjpagent.so=port=20001"
+
+#NC JAVA_OPTS
+NCJAVA_OPTS="-Xdebug -Xrunjdwp:transport=dt_socket,address=7002,server=y,suspend=n -Xmx1g -Djava.util.logging.config.file=logging.properties"
+
diff --git a/pregelix/pregelix-example/src/test/resources/cluster/stores.properties b/pregelix/pregelix-example/src/test/resources/cluster/stores.properties
new file mode 100644
index 0000000..daf881e
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/cluster/stores.properties
@@ -0,0 +1 @@
+store=teststore
\ No newline at end of file
diff --git a/pregelix/pregelix-example/src/test/resources/expected/ConnectedComponentsReal.result b/pregelix/pregelix-example/src/test/resources/expected/ConnectedComponentsReal.result
new file mode 100644
index 0000000..45376e2
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/ConnectedComponentsReal.result
@@ -0,0 +1,20 @@
+0 0
+1 0
+2 0
+3 0
+4 0
+5 0
+6 0
+7 0
+8 0
+9 0
+10 0
+11 0
+12 0
+13 0
+14 0
+15 0
+16 0
+17 0
+18 0
+19 0
diff --git a/pregelix/pregelix-example/src/test/resources/expected/ConnectedComponentsRealComplex.result b/pregelix/pregelix-example/src/test/resources/expected/ConnectedComponentsRealComplex.result
new file mode 100644
index 0000000..dbc30fc
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/ConnectedComponentsRealComplex.result
@@ -0,0 +1,23 @@
+0 0
+1 0
+2 0
+3 0
+4 0
+5 0
+6 0
+7 0
+8 0
+9 0
+10 0
+11 0
+12 0
+13 0
+14 0
+15 0
+16 0
+17 0
+18 0
+19 0
+21 21
+25 25
+27 27
diff --git a/pregelix/pregelix-example/src/test/resources/expected/MaximalClique.result b/pregelix/pregelix-example/src/test/resources/expected/MaximalClique.result
new file mode 100644
index 0000000..d238037
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/MaximalClique.result
@@ -0,0 +1,7 @@
+1 1,2,3,4;
+2 2,3,4;
+3 
+4 
+5 
+6 
+7 
diff --git a/pregelix/pregelix-example/src/test/resources/expected/PageRank.result b/pregelix/pregelix-example/src/test/resources/expected/PageRank.result
new file mode 100644
index 0000000..9c4d83a
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/PageRank.result
@@ -0,0 +1,20 @@
+0 0.008290140026154316
+1 0.1535152819247165
+2 0.14646839195826475
+3 0.08125113985998214
+4 0.03976979906329426
+5 0.0225041581462058
+6 0.015736276824953852
+7 0.012542224114863661
+8 0.010628239626209894
+9 0.009294348455354817
+10 0.008290140026154316
+11 0.15351528192471647
+12 0.14646839195826472
+13 0.08125113985998214
+14 0.03976979906329425
+15 0.0225041581462058
+16 0.015736276824953852
+17 0.012542224114863661
+18 0.010628239626209894
+19 0.009294348455354817
diff --git a/pregelix/pregelix-example/src/test/resources/expected/PageRankReal.result b/pregelix/pregelix-example/src/test/resources/expected/PageRankReal.result
new file mode 100644
index 0000000..6432eda
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/PageRankReal.result
@@ -0,0 +1,20 @@
+0 0.008290140026154316
+1 0.1535152819247165
+2 0.14646839195826475
+3 0.08125113985998214
+4 0.03976979906329426
+5 0.0225041581462058
+6 0.015736276824953852
+7 0.012542224114863661
+8 0.010628239626209894
+9 0.009294348455354817
+10 0.008290140026154316
+11 0.15351528192471647
+12 0.14646839195826472
+13 0.08125113985998214
+14 0.03976979906329426
+15 0.0225041581462058
+16 0.015736276824953852
+17 0.012542224114863661
+18 0.010628239626209894
+19 0.009294348455354817
diff --git a/pregelix/pregelix-example/src/test/resources/expected/PageRankRealComplex.result b/pregelix/pregelix-example/src/test/resources/expected/PageRankRealComplex.result
new file mode 100644
index 0000000..2bd09e1
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/PageRankRealComplex.result
@@ -0,0 +1,23 @@
+0 0.0072088164890121405
+1 0.12352056961948686
+2 0.12045670441668178
+3 0.06798545786459467
+4 0.03387281259892814
+5 0.01942600635480669
+6 0.013661020012182747
+7 0.0109034351563503
+8 0.009241684574402657
+9 0.008082028259564783
+10 0.007208817414047232
+11 0.07555839219845861
+12 0.07249452699565352
+13 0.05063539695954156
+14 0.029644452692487822
+15 0.018670183493927354
+16 0.013558283213067561
+17 0.010892790899883237
+18 0.009240874593661061
+19 0.008081987856433137
+21 0.006521739130434782
+25 0.006521739130434782
+27 0.006521739130434782
diff --git a/pregelix/pregelix-example/src/test/resources/expected/PageRankRealDynamic.result b/pregelix/pregelix-example/src/test/resources/expected/PageRankRealDynamic.result
new file mode 100644
index 0000000..6432eda
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/PageRankRealDynamic.result
@@ -0,0 +1,20 @@
+0 0.008290140026154316
+1 0.1535152819247165
+2 0.14646839195826475
+3 0.08125113985998214
+4 0.03976979906329426
+5 0.0225041581462058
+6 0.015736276824953852
+7 0.012542224114863661
+8 0.010628239626209894
+9 0.009294348455354817
+10 0.008290140026154316
+11 0.15351528192471647
+12 0.14646839195826472
+13 0.08125113985998214
+14 0.03976979906329426
+15 0.0225041581462058
+16 0.015736276824953852
+17 0.012542224114863661
+18 0.010628239626209894
+19 0.009294348455354817
diff --git a/pregelix/pregelix-example/src/test/resources/expected/PageRankRealNoCombiner.result b/pregelix/pregelix-example/src/test/resources/expected/PageRankRealNoCombiner.result
new file mode 100755
index 0000000..9a747a6
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/PageRankRealNoCombiner.result
@@ -0,0 +1,20 @@
+0 0.008290140026154316
+1 0.15351528192471647
+2 0.14646839195826475
+3 0.08125113985998211
+4 0.03976979906329425
+5 0.0225041581462058
+6 0.01573627682495385
+7 0.012542224114863661
+8 0.010628239626209894
+9 0.009294348455354817
+10 0.008290140026154316
+11 0.1535152819247165
+12 0.14646839195826475
+13 0.08125113985998214
+14 0.03976979906329426
+15 0.0225041581462058
+16 0.015736276824953852
+17 0.012542224114863661
+18 0.010628239626209894
+19 0.009294348455354817
diff --git a/pregelix/pregelix-example/src/test/resources/expected/ReachibilityRealComplex.result b/pregelix/pregelix-example/src/test/resources/expected/ReachibilityRealComplex.result
new file mode 100644
index 0000000..a1dfc0f
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/ReachibilityRealComplex.result
@@ -0,0 +1,23 @@
+0 2
+1 3
+2 1
+3 1
+4 1
+5 1
+6 1
+7 1
+8 1
+9 1
+10 3
+11 2
+12 2
+13 2
+14 2
+15 2
+16 2
+17 2
+18 2
+19 2
+21 0
+25 0
+27 0
diff --git a/pregelix/pregelix-example/src/test/resources/expected/ReachibilityRealComplexNoConnectivity.result b/pregelix/pregelix-example/src/test/resources/expected/ReachibilityRealComplexNoConnectivity.result
new file mode 100644
index 0000000..1693fb2
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/ReachibilityRealComplexNoConnectivity.result
@@ -0,0 +1,23 @@
+0 1
+1 1
+2 1
+3 1
+4 1
+5 1
+6 1
+7 1
+8 1
+9 1
+10 1
+11 1
+12 1
+13 1
+14 1
+15 1
+16 1
+17 1
+18 1
+19 1
+21 0
+25 2
+27 0
diff --git a/pregelix/pregelix-example/src/test/resources/expected/ShortestPaths.result b/pregelix/pregelix-example/src/test/resources/expected/ShortestPaths.result
new file mode 100644
index 0000000..46d1c73
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/ShortestPaths.result
@@ -0,0 +1,20 @@
+0 0.0
+1 0.0
+2 100.0
+3 300.0
+4 600.0
+5 1000.0
+6 1500.0
+7 2100.0
+8 2800.0
+9 3600.0
+10 4500.0
+11 5500.0
+12 6600.0
+13 7800.0
+14 9100.0
+15 10500.0
+16 12000.0
+17 13600.0
+18 15300.0
+19 17100.0
diff --git a/pregelix/pregelix-example/src/test/resources/expected/ShortestPathsReal.result b/pregelix/pregelix-example/src/test/resources/expected/ShortestPathsReal.result
new file mode 100644
index 0000000..b42462f
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/ShortestPathsReal.result
@@ -0,0 +1,20 @@
+0 0.0
+1 1.0
+2 2.0
+3 3.0
+4 4.0
+5 5.0
+6 6.0
+7 7.0
+8 8.0
+9 9.0
+10 10.0
+11 11.0
+12 12.0
+13 13.0
+14 14.0
+15 15.0
+16 16.0
+17 17.0
+18 18.0
+19 19.0
diff --git a/pregelix/pregelix-example/src/test/resources/expected/TriangleCounting.result b/pregelix/pregelix-example/src/test/resources/expected/TriangleCounting.result
new file mode 100644
index 0000000..4818e13
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/TriangleCounting.result
@@ -0,0 +1,7 @@
+1 3
+2 2
+3 0
+4 0
+5 1
+6 0
+7 0
diff --git a/pregelix/pregelix-example/src/test/resources/hadoop/conf/core-site.xml b/pregelix/pregelix-example/src/test/resources/hadoop/conf/core-site.xml
new file mode 100644
index 0000000..47dfac5
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/hadoop/conf/core-site.xml
@@ -0,0 +1,18 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+
+<!-- Put site-specific property overrides in this file. -->
+
+<configuration>
+
+<property>
+    <name>fs.default.name</name>
+    <value>hdfs://127.0.0.1:31888</value>
+</property>
+<property>
+    <name>hadoop.tmp.dir</name>
+    <value>/tmp/hadoop</value>
+</property>
+
+
+</configuration>
diff --git a/pregelix/pregelix-example/src/test/resources/hadoop/conf/hdfs-site.xml b/pregelix/pregelix-example/src/test/resources/hadoop/conf/hdfs-site.xml
new file mode 100644
index 0000000..8d29b1d
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/hadoop/conf/hdfs-site.xml
@@ -0,0 +1,18 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+
+<!-- Put site-specific property overrides in this file. -->
+
+<configuration>
+
+<property>
+   <name>dfs.replication</name>
+   <value>1</value>
+</property>
+
+<property>
+	<name>dfs.block.size</name>
+	<value>65536</value>
+</property>
+
+</configuration>
diff --git a/pregelix/pregelix-example/src/test/resources/hadoop/conf/log4j.properties b/pregelix/pregelix-example/src/test/resources/hadoop/conf/log4j.properties
new file mode 100755
index 0000000..d5e6004
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/hadoop/conf/log4j.properties
@@ -0,0 +1,94 @@
+# Define some default values that can be overridden by system properties
+hadoop.root.logger=FATAL,console
+hadoop.log.dir=.
+hadoop.log.file=hadoop.log
+
+# Define the root logger to the system property "hadoop.root.logger".
+log4j.rootLogger=${hadoop.root.logger}, EventCounter
+
+# Logging Threshold
+log4j.threshhold=FATAL
+
+#
+# Daily Rolling File Appender
+#
+
+log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender
+log4j.appender.DRFA.File=${hadoop.log.dir}/${hadoop.log.file}
+
+# Rollver at midnight
+log4j.appender.DRFA.DatePattern=.yyyy-MM-dd
+
+# 30-day backup
+#log4j.appender.DRFA.MaxBackupIndex=30
+log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout
+
+# Pattern format: Date LogLevel LoggerName LogMessage
+log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
+# Debugging Pattern format
+#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n
+
+
+#
+# console
+# Add "console" to rootlogger above if you want to use this 
+#
+
+log4j.appender.console=org.apache.log4j.ConsoleAppender
+log4j.appender.console.target=System.err
+log4j.appender.console.layout=org.apache.log4j.PatternLayout
+log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n
+
+#
+# TaskLog Appender
+#
+
+#Default values
+hadoop.tasklog.taskid=null
+hadoop.tasklog.noKeepSplits=4
+hadoop.tasklog.totalLogFileSize=100
+hadoop.tasklog.purgeLogSplits=true
+hadoop.tasklog.logsRetainHours=12
+
+log4j.appender.TLA=org.apache.hadoop.mapred.TaskLogAppender
+log4j.appender.TLA.taskId=${hadoop.tasklog.taskid}
+log4j.appender.TLA.totalLogFileSize=${hadoop.tasklog.totalLogFileSize}
+
+log4j.appender.TLA.layout=org.apache.log4j.PatternLayout
+log4j.appender.TLA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
+
+#
+# Rolling File Appender
+#
+
+#log4j.appender.RFA=org.apache.log4j.RollingFileAppender
+#log4j.appender.RFA.File=${hadoop.log.dir}/${hadoop.log.file}
+
+# Logfile size and and 30-day backups
+#log4j.appender.RFA.MaxFileSize=1MB
+#log4j.appender.RFA.MaxBackupIndex=30
+
+#log4j.appender.RFA.layout=org.apache.log4j.PatternLayout
+#log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} - %m%n
+#log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n
+
+#
+# FSNamesystem Audit logging
+# All audit events are logged at INFO level
+#
+log4j.logger.org.apache.hadoop.fs.FSNamesystem.audit=WARN
+
+# Custom Logging levels
+
+#log4j.logger.org.apache.hadoop.mapred.JobTracker=DEBUG
+#log4j.logger.org.apache.hadoop.mapred.TaskTracker=DEBUG
+#log4j.logger.org.apache.hadoop.fs.FSNamesystem=DEBUG
+
+# Jets3t library
+log4j.logger.org.jets3t.service.impl.rest.httpclient.RestS3Service=ERROR
+
+#
+# Event Counter Appender
+# Sends counts of logging messages at different severity levels to Hadoop Metrics.
+#
+log4j.appender.EventCounter=org.apache.hadoop.metrics.jvm.EventCounter
diff --git a/pregelix/pregelix-example/src/test/resources/hadoop/conf/mapred-site.xml b/pregelix/pregelix-example/src/test/resources/hadoop/conf/mapred-site.xml
new file mode 100644
index 0000000..71450f1
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/hadoop/conf/mapred-site.xml
@@ -0,0 +1,25 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+
+<!-- Put site-specific property overrides in this file. -->
+
+<configuration>
+
+  <property>
+    <name>mapred.job.tracker</name>
+    <value>localhost:29007</value>
+  </property>
+  <property>
+     <name>mapred.tasktracker.map.tasks.maximum</name>
+     <value>20</value>
+  </property>
+   <property>
+      <name>mapred.tasktracker.reduce.tasks.maximum</name>
+      <value>20</value>
+   </property>
+   <property>
+      <name>mapred.max.split.size</name>
+      <value>128</value>
+   </property>
+
+</configuration>
diff --git a/pregelix/pregelix-example/src/test/resources/hyracks-deployment.properties b/pregelix/pregelix-example/src/test/resources/hyracks-deployment.properties
new file mode 100644
index 0000000..9c42b89
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/hyracks-deployment.properties
@@ -0,0 +1,2 @@
+#cc.bootstrap.class=edu.uci.ics.asterix.hyracks.bootstrap.CCBootstrapImpl
+nc.bootstrap.class=edu.uci.ics.pregelix.runtime.bootstrap.NCBootstrapImpl
diff --git a/pregelix/pregelix-example/src/test/resources/ignore.txt b/pregelix/pregelix-example/src/test/resources/ignore.txt
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/ignore.txt
diff --git a/pregelix/pregelix-example/src/test/resources/jobs/ConnectedComponentsReal.xml b/pregelix/pregelix-example/src/test/resources/jobs/ConnectedComponentsReal.xml
new file mode 100644
index 0000000..22ae6cf
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/jobs/ConnectedComponentsReal.xml
@@ -0,0 +1,142 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?><configuration>
+<property><name>mapred.tasktracker.dns.nameserver</name><value>default</value></property>
+<property><name>mapred.queue.default.acl-administer-jobs</name><value>*</value></property>
+<property><name>mapred.skip.map.auto.incr.proc.count</name><value>true</value></property>
+<property><name>mapred.jobtracker.instrumentation</name><value>org.apache.hadoop.mapred.JobTrackerMetricsInst</value></property>
+<property><name>mapred.skip.reduce.auto.incr.proc.count</name><value>true</value></property>
+<property><name>fs.hsftp.impl</name><value>org.apache.hadoop.hdfs.HsftpFileSystem</value></property>
+<property><name>mapred.input.dir</name><value>file:/webmap</value></property>
+<property><name>mapred.submit.replication</name><value>10</value></property>
+<property><name>ipc.server.tcpnodelay</name><value>false</value></property>
+<property><name>fs.checkpoint.dir</name><value>${hadoop.tmp.dir}/dfs/namesecondary</value></property>
+<property><name>mapred.output.compression.type</name><value>RECORD</value></property>
+<property><name>mapred.job.shuffle.merge.percent</name><value>0.66</value></property>
+<property><name>mapred.child.java.opts</name><value>-Xmx200m</value></property>
+<property><name>mapred.queue.default.acl-submit-job</name><value>*</value></property>
+<property><name>keep.failed.task.files</name><value>false</value></property>
+<property><name>mapred.jobtracker.job.history.block.size</name><value>3145728</value></property>
+<property><name>io.bytes.per.checksum</name><value>512</value></property>
+<property><name>mapred.task.tracker.report.address</name><value>127.0.0.1:0</value></property>
+<property><name>hadoop.util.hash.type</name><value>murmur</value></property>
+<property><name>fs.hdfs.impl</name><value>org.apache.hadoop.hdfs.DistributedFileSystem</value></property>
+<property><name>fs.ramfs.impl</name><value>org.apache.hadoop.fs.InMemoryFileSystem</value></property>
+<property><name>mapred.jobtracker.restart.recover</name><value>false</value></property>
+<property><name>fs.hftp.impl</name><value>org.apache.hadoop.hdfs.HftpFileSystem</value></property>
+<property><name>fs.checkpoint.period</name><value>3600</value></property>
+<property><name>mapred.child.tmp</name><value>./tmp</value></property>
+<property><name>mapred.local.dir.minspacekill</name><value>0</value></property>
+<property><name>map.sort.class</name><value>org.apache.hadoop.util.QuickSort</value></property>
+<property><name>hadoop.logfile.count</name><value>10</value></property>
+<property><name>ipc.client.connection.maxidletime</name><value>10000</value></property>
+<property><name>mapred.output.dir</name><value>/result</value></property>
+<property><name>io.map.index.skip</name><value>0</value></property>
+<property><name>mapred.tasktracker.expiry.interval</name><value>600000</value></property>
+<property><name>mapred.output.compress</name><value>false</value></property>
+<property><name>io.seqfile.lazydecompress</name><value>true</value></property>
+<property><name>mapred.reduce.parallel.copies</name><value>5</value></property>
+<property><name>fs.checkpoint.size</name><value>67108864</value></property>
+<property><name>mapred.job.reduce.input.buffer.percent</name><value>0.0</value></property>
+<property><name>mapred.job.name</name><value>ConnectedComponents</value></property>
+<property><name>local.cache.size</name><value>10737418240</value></property>
+<property><name>fs.s3n.impl</name><value>org.apache.hadoop.fs.s3native.NativeS3FileSystem</value></property>
+<property><name>mapred.userlog.limit.kb</name><value>0</value></property>
+<property><name>fs.file.impl</name><value>org.apache.hadoop.fs.LocalFileSystem</value></property>
+<property><name>mapred.task.tracker.http.address</name><value>0.0.0.0:50060</value></property>
+<property><name>mapred.task.timeout</name><value>600000</value></property>
+<property><name>fs.kfs.impl</name><value>org.apache.hadoop.fs.kfs.KosmosFileSystem</value></property>
+<property><name>mapred.max.tracker.blacklists</name><value>4</value></property>
+<property><name>fs.s3.buffer.dir</name><value>${hadoop.tmp.dir}/s3</value></property>
+<property><name>mapred.job.tracker.persist.jobstatus.dir</name><value>/jobtracker/jobsInfo</value></property>
+<property><name>ipc.client.kill.max</name><value>10</value></property>
+<property><name>mapred.tasktracker.instrumentation</name><value>org.apache.hadoop.mapred.TaskTrackerMetricsInst</value></property>
+<property><name>mapred.reduce.tasks.speculative.execution</name><value>true</value></property>
+<property><name>io.sort.record.percent</name><value>0.05</value></property>
+<property><name>hadoop.security.authorization</name><value>false</value></property>
+<property><name>mapred.max.tracker.failures</name><value>4</value></property>
+<property><name>mapred.jobtracker.taskScheduler</name><value>org.apache.hadoop.mapred.JobQueueTaskScheduler</value></property>
+<property><name>pregelix.numVertices</name><value>20</value></property>
+<property><name>mapred.tasktracker.dns.interface</name><value>default</value></property>
+<property><name>mapred.map.tasks</name><value>2</value></property>
+<property><name>mapred.job.tracker.persist.jobstatus.hours</name><value>0</value></property>
+<property><name>fs.s3.sleepTimeSeconds</name><value>10</value></property>
+<property><name>fs.default.name</name><value>file:///</value></property>
+<property><name>tasktracker.http.threads</name><value>40</value></property>
+<property><name>mapred.tasktracker.taskmemorymanager.monitoring-interval</name><value>5000</value></property>
+<property><name>hadoop.rpc.socket.factory.class.default</name><value>org.apache.hadoop.net.StandardSocketFactory</value></property>
+<property><name>mapred.reduce.tasks</name><value>1</value></property>
+<property><name>topology.node.switch.mapping.impl</name><value>org.apache.hadoop.net.ScriptBasedMapping</value></property>
+<property><name>pregelix.vertexClass</name><value>edu.uci.ics.pregelix.example.ConnectedComponentsVertex</value></property>
+<property><name>mapred.skip.reduce.max.skip.groups</name><value>0</value></property>
+<property><name>io.file.buffer.size</name><value>4096</value></property>
+<property><name>mapred.jobtracker.maxtasks.per.job</name><value>-1</value></property>
+<property><name>mapred.tasktracker.indexcache.mb</name><value>10</value></property>
+<property><name>mapred.tasktracker.map.tasks.maximum</name><value>2</value></property>
+<property><name>fs.har.impl.disable.cache</name><value>true</value></property>
+<property><name>mapred.task.profile.maps</name><value>0-2</value></property>
+<property><name>hadoop.native.lib</name><value>true</value></property>
+<property><name>fs.s3.block.size</name><value>67108864</value></property>
+<property><name>mapred.job.reuse.jvm.num.tasks</name><value>1</value></property>
+<property><name>mapred.job.tracker.http.address</name><value>0.0.0.0:50030</value></property>
+<property><name>mapred.tasktracker.reduce.tasks.maximum</name><value>2</value></property>
+<property><name>io.compression.codecs</name><value>org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.BZip2Codec</value></property>
+<property><name>mapred.job.shuffle.input.buffer.percent</name><value>0.70</value></property>
+<property><name>io.seqfile.compress.blocksize</name><value>1000000</value></property>
+<property><name>mapred.queue.names</name><value>default</value></property>
+<property><name>fs.har.impl</name><value>org.apache.hadoop.fs.HarFileSystem</value></property>
+<property><name>io.mapfile.bloom.error.rate</name><value>0.005</value></property>
+<property><name>mapred.job.tracker</name><value>local</value></property>
+<property><name>io.skip.checksum.errors</name><value>false</value></property>
+<property><name>mapred.reduce.max.attempts</name><value>4</value></property>
+<property><name>fs.s3.maxRetries</name><value>4</value></property>
+<property><name>ipc.server.listen.queue.size</name><value>128</value></property>
+<property><name>fs.trash.interval</name><value>0</value></property>
+<property><name>mapred.local.dir.minspacestart</name><value>0</value></property>
+<property><name>fs.s3.impl</name><value>org.apache.hadoop.fs.s3.S3FileSystem</value></property>
+<property><name>io.seqfile.sorter.recordlimit</name><value>1000000</value></property>
+<property><name>io.mapfile.bloom.size</name><value>1048576</value></property>
+<property><name>io.sort.mb</name><value>100</value></property>
+<property><name>mapred.local.dir</name><value>${hadoop.tmp.dir}/mapred/local</value></property>
+<property><name>io.sort.factor</name><value>10</value></property>
+<property><name>mapred.task.profile</name><value>false</value></property>
+<property><name>job.end.retry.interval</name><value>30000</value></property>
+<property><name>mapred.tasktracker.procfsbasedprocesstree.sleeptime-before-sigkill</name><value>5000</value></property>
+<property><name>mapred.jobtracker.completeuserjobs.maximum</name><value>100</value></property>
+<property><name>mapred.task.profile.reduces</name><value>0-2</value></property>
+<property><name>webinterface.private.actions</name><value>false</value></property>
+<property><name>hadoop.tmp.dir</name><value>/tmp/hadoop-${user.name}</value></property>
+<property><name>pregelix.combinerClass</name><value>edu.uci.ics.pregelix.example.ConnectedComponentsVertex$SimpleMinCombiner</value></property>
+<property><name>mapred.output.compression.codec</name><value>org.apache.hadoop.io.compress.DefaultCodec</value></property>
+<property><name>mapred.skip.attempts.to.start.skipping</name><value>2</value></property>
+<property><name>mapred.temp.dir</name><value>${hadoop.tmp.dir}/mapred/temp</value></property>
+<property><name>mapred.merge.recordsBeforeProgress</name><value>10000</value></property>
+<property><name>mapred.map.output.compression.codec</name><value>org.apache.hadoop.io.compress.DefaultCodec</value></property>
+<property><name>mapred.compress.map.output</name><value>false</value></property>
+<property><name>io.sort.spill.percent</name><value>0.80</value></property>
+<property><name>fs.checkpoint.edits.dir</name><value>${fs.checkpoint.dir}</value></property>
+<property><name>mapred.userlog.retain.hours</name><value>24</value></property>
+<property><name>mapred.system.dir</name><value>${hadoop.tmp.dir}/mapred/system</value></property>
+<property><name>mapred.line.input.format.linespermap</name><value>1</value></property>
+<property><name>job.end.retry.attempts</name><value>0</value></property>
+<property><name>ipc.client.idlethreshold</name><value>4000</value></property>
+<property><name>pregelix.vertexOutputFormatClass</name><value>edu.uci.ics.pregelix.example.ConnectedComponentsVertex$SimpleConnectedComponentsVertexOutputFormat</value></property>
+<property><name>mapred.reduce.copy.backoff</name><value>300</value></property>
+<property><name>mapred.map.tasks.speculative.execution</name><value>true</value></property>
+<property><name>mapred.inmem.merge.threshold</name><value>1000</value></property>
+<property><name>hadoop.logfile.size</name><value>10000000</value></property>
+<property><name>pregelix.vertexInputFormatClass</name><value>edu.uci.ics.pregelix.example.inputformat.TextConnectedComponentsInputFormat</value></property>
+<property><name>mapred.job.queue.name</name><value>default</value></property>
+<property><name>mapred.job.tracker.persist.jobstatus.active</name><value>false</value></property>
+<property><name>mapred.reduce.slowstart.completed.maps</name><value>0.05</value></property>
+<property><name>topology.script.number.args</name><value>100</value></property>
+<property><name>mapred.skip.map.max.skip.records</name><value>0</value></property>
+<property><name>fs.ftp.impl</name><value>org.apache.hadoop.fs.ftp.FTPFileSystem</value></property>
+<property><name>mapred.task.cache.levels</name><value>2</value></property>
+<property><name>mapred.job.tracker.handler.count</name><value>10</value></property>
+<property><name>io.serializations</name><value>org.apache.hadoop.io.serializer.WritableSerialization</value></property>
+<property><name>ipc.client.connect.max.retries</name><value>10</value></property>
+<property><name>mapred.min.split.size</name><value>0</value></property>
+<property><name>mapred.map.max.attempts</name><value>4</value></property>
+<property><name>jobclient.output.filter</name><value>FAILED</value></property>
+<property><name>ipc.client.tcpnodelay</name><value>false</value></property>
+<property><name>mapred.acls.enabled</name><value>false</value></property>
+</configuration>
\ No newline at end of file
diff --git a/pregelix/pregelix-example/src/test/resources/jobs/ConnectedComponentsRealComplex.xml b/pregelix/pregelix-example/src/test/resources/jobs/ConnectedComponentsRealComplex.xml
new file mode 100644
index 0000000..50662f9
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/jobs/ConnectedComponentsRealComplex.xml
@@ -0,0 +1,142 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?><configuration>
+<property><name>mapred.tasktracker.dns.nameserver</name><value>default</value></property>
+<property><name>mapred.queue.default.acl-administer-jobs</name><value>*</value></property>
+<property><name>mapred.skip.map.auto.incr.proc.count</name><value>true</value></property>
+<property><name>mapred.jobtracker.instrumentation</name><value>org.apache.hadoop.mapred.JobTrackerMetricsInst</value></property>
+<property><name>mapred.skip.reduce.auto.incr.proc.count</name><value>true</value></property>
+<property><name>fs.hsftp.impl</name><value>org.apache.hadoop.hdfs.HsftpFileSystem</value></property>
+<property><name>mapred.input.dir</name><value>file:/webmapcomplex</value></property>
+<property><name>mapred.submit.replication</name><value>10</value></property>
+<property><name>ipc.server.tcpnodelay</name><value>false</value></property>
+<property><name>fs.checkpoint.dir</name><value>${hadoop.tmp.dir}/dfs/namesecondary</value></property>
+<property><name>mapred.output.compression.type</name><value>RECORD</value></property>
+<property><name>mapred.job.shuffle.merge.percent</name><value>0.66</value></property>
+<property><name>mapred.child.java.opts</name><value>-Xmx200m</value></property>
+<property><name>mapred.queue.default.acl-submit-job</name><value>*</value></property>
+<property><name>keep.failed.task.files</name><value>false</value></property>
+<property><name>mapred.jobtracker.job.history.block.size</name><value>3145728</value></property>
+<property><name>io.bytes.per.checksum</name><value>512</value></property>
+<property><name>mapred.task.tracker.report.address</name><value>127.0.0.1:0</value></property>
+<property><name>hadoop.util.hash.type</name><value>murmur</value></property>
+<property><name>fs.hdfs.impl</name><value>org.apache.hadoop.hdfs.DistributedFileSystem</value></property>
+<property><name>fs.ramfs.impl</name><value>org.apache.hadoop.fs.InMemoryFileSystem</value></property>
+<property><name>mapred.jobtracker.restart.recover</name><value>false</value></property>
+<property><name>fs.hftp.impl</name><value>org.apache.hadoop.hdfs.HftpFileSystem</value></property>
+<property><name>fs.checkpoint.period</name><value>3600</value></property>
+<property><name>mapred.child.tmp</name><value>./tmp</value></property>
+<property><name>mapred.local.dir.minspacekill</name><value>0</value></property>
+<property><name>map.sort.class</name><value>org.apache.hadoop.util.QuickSort</value></property>
+<property><name>hadoop.logfile.count</name><value>10</value></property>
+<property><name>ipc.client.connection.maxidletime</name><value>10000</value></property>
+<property><name>mapred.output.dir</name><value>/resultcomplex</value></property>
+<property><name>io.map.index.skip</name><value>0</value></property>
+<property><name>mapred.tasktracker.expiry.interval</name><value>600000</value></property>
+<property><name>mapred.output.compress</name><value>false</value></property>
+<property><name>io.seqfile.lazydecompress</name><value>true</value></property>
+<property><name>mapred.reduce.parallel.copies</name><value>5</value></property>
+<property><name>fs.checkpoint.size</name><value>67108864</value></property>
+<property><name>mapred.job.reduce.input.buffer.percent</name><value>0.0</value></property>
+<property><name>mapred.job.name</name><value>ConnectedComponents</value></property>
+<property><name>local.cache.size</name><value>10737418240</value></property>
+<property><name>fs.s3n.impl</name><value>org.apache.hadoop.fs.s3native.NativeS3FileSystem</value></property>
+<property><name>mapred.userlog.limit.kb</name><value>0</value></property>
+<property><name>fs.file.impl</name><value>org.apache.hadoop.fs.LocalFileSystem</value></property>
+<property><name>mapred.task.tracker.http.address</name><value>0.0.0.0:50060</value></property>
+<property><name>mapred.task.timeout</name><value>600000</value></property>
+<property><name>fs.kfs.impl</name><value>org.apache.hadoop.fs.kfs.KosmosFileSystem</value></property>
+<property><name>mapred.max.tracker.blacklists</name><value>4</value></property>
+<property><name>fs.s3.buffer.dir</name><value>${hadoop.tmp.dir}/s3</value></property>
+<property><name>mapred.job.tracker.persist.jobstatus.dir</name><value>/jobtracker/jobsInfo</value></property>
+<property><name>ipc.client.kill.max</name><value>10</value></property>
+<property><name>mapred.tasktracker.instrumentation</name><value>org.apache.hadoop.mapred.TaskTrackerMetricsInst</value></property>
+<property><name>mapred.reduce.tasks.speculative.execution</name><value>true</value></property>
+<property><name>io.sort.record.percent</name><value>0.05</value></property>
+<property><name>hadoop.security.authorization</name><value>false</value></property>
+<property><name>mapred.max.tracker.failures</name><value>4</value></property>
+<property><name>mapred.jobtracker.taskScheduler</name><value>org.apache.hadoop.mapred.JobQueueTaskScheduler</value></property>
+<property><name>pregelix.numVertices</name><value>23</value></property>
+<property><name>mapred.tasktracker.dns.interface</name><value>default</value></property>
+<property><name>mapred.map.tasks</name><value>2</value></property>
+<property><name>mapred.job.tracker.persist.jobstatus.hours</name><value>0</value></property>
+<property><name>fs.s3.sleepTimeSeconds</name><value>10</value></property>
+<property><name>fs.default.name</name><value>file:///</value></property>
+<property><name>tasktracker.http.threads</name><value>40</value></property>
+<property><name>mapred.tasktracker.taskmemorymanager.monitoring-interval</name><value>5000</value></property>
+<property><name>hadoop.rpc.socket.factory.class.default</name><value>org.apache.hadoop.net.StandardSocketFactory</value></property>
+<property><name>mapred.reduce.tasks</name><value>1</value></property>
+<property><name>topology.node.switch.mapping.impl</name><value>org.apache.hadoop.net.ScriptBasedMapping</value></property>
+<property><name>pregelix.vertexClass</name><value>edu.uci.ics.pregelix.example.ConnectedComponentsVertex</value></property>
+<property><name>mapred.skip.reduce.max.skip.groups</name><value>0</value></property>
+<property><name>io.file.buffer.size</name><value>4096</value></property>
+<property><name>mapred.jobtracker.maxtasks.per.job</name><value>-1</value></property>
+<property><name>mapred.tasktracker.indexcache.mb</name><value>10</value></property>
+<property><name>mapred.tasktracker.map.tasks.maximum</name><value>2</value></property>
+<property><name>fs.har.impl.disable.cache</name><value>true</value></property>
+<property><name>mapred.task.profile.maps</name><value>0-2</value></property>
+<property><name>hadoop.native.lib</name><value>true</value></property>
+<property><name>fs.s3.block.size</name><value>67108864</value></property>
+<property><name>mapred.job.reuse.jvm.num.tasks</name><value>1</value></property>
+<property><name>mapred.job.tracker.http.address</name><value>0.0.0.0:50030</value></property>
+<property><name>mapred.tasktracker.reduce.tasks.maximum</name><value>2</value></property>
+<property><name>io.compression.codecs</name><value>org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.BZip2Codec</value></property>
+<property><name>mapred.job.shuffle.input.buffer.percent</name><value>0.70</value></property>
+<property><name>io.seqfile.compress.blocksize</name><value>1000000</value></property>
+<property><name>mapred.queue.names</name><value>default</value></property>
+<property><name>fs.har.impl</name><value>org.apache.hadoop.fs.HarFileSystem</value></property>
+<property><name>io.mapfile.bloom.error.rate</name><value>0.005</value></property>
+<property><name>mapred.job.tracker</name><value>local</value></property>
+<property><name>io.skip.checksum.errors</name><value>false</value></property>
+<property><name>mapred.reduce.max.attempts</name><value>4</value></property>
+<property><name>fs.s3.maxRetries</name><value>4</value></property>
+<property><name>ipc.server.listen.queue.size</name><value>128</value></property>
+<property><name>fs.trash.interval</name><value>0</value></property>
+<property><name>mapred.local.dir.minspacestart</name><value>0</value></property>
+<property><name>fs.s3.impl</name><value>org.apache.hadoop.fs.s3.S3FileSystem</value></property>
+<property><name>io.seqfile.sorter.recordlimit</name><value>1000000</value></property>
+<property><name>io.mapfile.bloom.size</name><value>1048576</value></property>
+<property><name>io.sort.mb</name><value>100</value></property>
+<property><name>mapred.local.dir</name><value>${hadoop.tmp.dir}/mapred/local</value></property>
+<property><name>io.sort.factor</name><value>10</value></property>
+<property><name>mapred.task.profile</name><value>false</value></property>
+<property><name>job.end.retry.interval</name><value>30000</value></property>
+<property><name>mapred.tasktracker.procfsbasedprocesstree.sleeptime-before-sigkill</name><value>5000</value></property>
+<property><name>mapred.jobtracker.completeuserjobs.maximum</name><value>100</value></property>
+<property><name>mapred.task.profile.reduces</name><value>0-2</value></property>
+<property><name>webinterface.private.actions</name><value>false</value></property>
+<property><name>hadoop.tmp.dir</name><value>/tmp/hadoop-${user.name}</value></property>
+<property><name>pregelix.combinerClass</name><value>edu.uci.ics.pregelix.example.ConnectedComponentsVertex$SimpleMinCombiner</value></property>
+<property><name>mapred.output.compression.codec</name><value>org.apache.hadoop.io.compress.DefaultCodec</value></property>
+<property><name>mapred.skip.attempts.to.start.skipping</name><value>2</value></property>
+<property><name>mapred.temp.dir</name><value>${hadoop.tmp.dir}/mapred/temp</value></property>
+<property><name>mapred.merge.recordsBeforeProgress</name><value>10000</value></property>
+<property><name>mapred.map.output.compression.codec</name><value>org.apache.hadoop.io.compress.DefaultCodec</value></property>
+<property><name>mapred.compress.map.output</name><value>false</value></property>
+<property><name>io.sort.spill.percent</name><value>0.80</value></property>
+<property><name>fs.checkpoint.edits.dir</name><value>${fs.checkpoint.dir}</value></property>
+<property><name>mapred.userlog.retain.hours</name><value>24</value></property>
+<property><name>mapred.system.dir</name><value>${hadoop.tmp.dir}/mapred/system</value></property>
+<property><name>mapred.line.input.format.linespermap</name><value>1</value></property>
+<property><name>job.end.retry.attempts</name><value>0</value></property>
+<property><name>ipc.client.idlethreshold</name><value>4000</value></property>
+<property><name>pregelix.vertexOutputFormatClass</name><value>edu.uci.ics.pregelix.example.ConnectedComponentsVertex$SimpleConnectedComponentsVertexOutputFormat</value></property>
+<property><name>mapred.reduce.copy.backoff</name><value>300</value></property>
+<property><name>mapred.map.tasks.speculative.execution</name><value>true</value></property>
+<property><name>mapred.inmem.merge.threshold</name><value>1000</value></property>
+<property><name>hadoop.logfile.size</name><value>10000000</value></property>
+<property><name>pregelix.vertexInputFormatClass</name><value>edu.uci.ics.pregelix.example.inputformat.TextConnectedComponentsInputFormat</value></property>
+<property><name>mapred.job.queue.name</name><value>default</value></property>
+<property><name>mapred.job.tracker.persist.jobstatus.active</name><value>false</value></property>
+<property><name>mapred.reduce.slowstart.completed.maps</name><value>0.05</value></property>
+<property><name>topology.script.number.args</name><value>100</value></property>
+<property><name>mapred.skip.map.max.skip.records</name><value>0</value></property>
+<property><name>fs.ftp.impl</name><value>org.apache.hadoop.fs.ftp.FTPFileSystem</value></property>
+<property><name>mapred.task.cache.levels</name><value>2</value></property>
+<property><name>mapred.job.tracker.handler.count</name><value>10</value></property>
+<property><name>io.serializations</name><value>org.apache.hadoop.io.serializer.WritableSerialization</value></property>
+<property><name>ipc.client.connect.max.retries</name><value>10</value></property>
+<property><name>mapred.min.split.size</name><value>0</value></property>
+<property><name>mapred.map.max.attempts</name><value>4</value></property>
+<property><name>jobclient.output.filter</name><value>FAILED</value></property>
+<property><name>ipc.client.tcpnodelay</name><value>false</value></property>
+<property><name>mapred.acls.enabled</name><value>false</value></property>
+</configuration>
\ No newline at end of file
diff --git a/pregelix/pregelix-example/src/test/resources/jobs/MaximalClique.xml b/pregelix/pregelix-example/src/test/resources/jobs/MaximalClique.xml
new file mode 100644
index 0000000..616c647
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/jobs/MaximalClique.xml
@@ -0,0 +1,142 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?><configuration>
+<property><name>mapred.tasktracker.dns.nameserver</name><value>default</value></property>
+<property><name>mapred.queue.default.acl-administer-jobs</name><value>*</value></property>
+<property><name>mapred.skip.map.auto.incr.proc.count</name><value>true</value></property>
+<property><name>mapred.jobtracker.instrumentation</name><value>org.apache.hadoop.mapred.JobTrackerMetricsInst</value></property>
+<property><name>mapred.skip.reduce.auto.incr.proc.count</name><value>true</value></property>
+<property><name>fs.hsftp.impl</name><value>org.apache.hadoop.hdfs.HsftpFileSystem</value></property>
+<property><name>mapred.input.dir</name><value>file:/clique</value></property>
+<property><name>mapred.submit.replication</name><value>10</value></property>
+<property><name>ipc.server.tcpnodelay</name><value>false</value></property>
+<property><name>fs.checkpoint.dir</name><value>${hadoop.tmp.dir}/dfs/namesecondary</value></property>
+<property><name>mapred.output.compression.type</name><value>RECORD</value></property>
+<property><name>mapred.job.shuffle.merge.percent</name><value>0.66</value></property>
+<property><name>mapred.child.java.opts</name><value>-Xmx200m</value></property>
+<property><name>mapred.queue.default.acl-submit-job</name><value>*</value></property>
+<property><name>keep.failed.task.files</name><value>false</value></property>
+<property><name>mapred.jobtracker.job.history.block.size</name><value>3145728</value></property>
+<property><name>io.bytes.per.checksum</name><value>512</value></property>
+<property><name>mapred.task.tracker.report.address</name><value>127.0.0.1:0</value></property>
+<property><name>hadoop.util.hash.type</name><value>murmur</value></property>
+<property><name>fs.hdfs.impl</name><value>org.apache.hadoop.hdfs.DistributedFileSystem</value></property>
+<property><name>fs.ramfs.impl</name><value>org.apache.hadoop.fs.InMemoryFileSystem</value></property>
+<property><name>mapred.jobtracker.restart.recover</name><value>false</value></property>
+<property><name>fs.hftp.impl</name><value>org.apache.hadoop.hdfs.HftpFileSystem</value></property>
+<property><name>fs.checkpoint.period</name><value>3600</value></property>
+<property><name>mapred.child.tmp</name><value>./tmp</value></property>
+<property><name>mapred.local.dir.minspacekill</name><value>0</value></property>
+<property><name>map.sort.class</name><value>org.apache.hadoop.util.QuickSort</value></property>
+<property><name>hadoop.logfile.count</name><value>10</value></property>
+<property><name>ipc.client.connection.maxidletime</name><value>10000</value></property>
+<property><name>mapred.output.dir</name><value>/resultclique</value></property>
+<property><name>io.map.index.skip</name><value>0</value></property>
+<property><name>mapred.tasktracker.expiry.interval</name><value>600000</value></property>
+<property><name>mapred.output.compress</name><value>false</value></property>
+<property><name>io.seqfile.lazydecompress</name><value>true</value></property>
+<property><name>mapred.reduce.parallel.copies</name><value>5</value></property>
+<property><name>fs.checkpoint.size</name><value>67108864</value></property>
+<property><name>mapred.job.reduce.input.buffer.percent</name><value>0.0</value></property>
+<property><name>mapred.job.name</name><value>Maximal Clique</value></property>
+<property><name>local.cache.size</name><value>10737418240</value></property>
+<property><name>fs.s3n.impl</name><value>org.apache.hadoop.fs.s3native.NativeS3FileSystem</value></property>
+<property><name>mapred.userlog.limit.kb</name><value>0</value></property>
+<property><name>fs.file.impl</name><value>org.apache.hadoop.fs.LocalFileSystem</value></property>
+<property><name>mapred.task.tracker.http.address</name><value>0.0.0.0:50060</value></property>
+<property><name>mapred.task.timeout</name><value>600000</value></property>
+<property><name>fs.kfs.impl</name><value>org.apache.hadoop.fs.kfs.KosmosFileSystem</value></property>
+<property><name>mapred.max.tracker.blacklists</name><value>4</value></property>
+<property><name>fs.s3.buffer.dir</name><value>${hadoop.tmp.dir}/s3</value></property>
+<property><name>mapred.job.tracker.persist.jobstatus.dir</name><value>/jobtracker/jobsInfo</value></property>
+<property><name>ipc.client.kill.max</name><value>10</value></property>
+<property><name>mapred.tasktracker.instrumentation</name><value>org.apache.hadoop.mapred.TaskTrackerMetricsInst</value></property>
+<property><name>mapred.reduce.tasks.speculative.execution</name><value>true</value></property>
+<property><name>io.sort.record.percent</name><value>0.05</value></property>
+<property><name>hadoop.security.authorization</name><value>false</value></property>
+<property><name>mapred.max.tracker.failures</name><value>4</value></property>
+<property><name>mapred.jobtracker.taskScheduler</name><value>org.apache.hadoop.mapred.JobQueueTaskScheduler</value></property>
+<property><name>mapred.tasktracker.dns.interface</name><value>default</value></property>
+<property><name>mapred.map.tasks</name><value>2</value></property>
+<property><name>mapred.job.tracker.persist.jobstatus.hours</name><value>0</value></property>
+<property><name>fs.s3.sleepTimeSeconds</name><value>10</value></property>
+<property><name>fs.default.name</name><value>file:///</value></property>
+<property><name>tasktracker.http.threads</name><value>40</value></property>
+<property><name>mapred.tasktracker.taskmemorymanager.monitoring-interval</name><value>5000</value></property>
+<property><name>hadoop.rpc.socket.factory.class.default</name><value>org.apache.hadoop.net.StandardSocketFactory</value></property>
+<property><name>mapred.reduce.tasks</name><value>1</value></property>
+<property><name>topology.node.switch.mapping.impl</name><value>org.apache.hadoop.net.ScriptBasedMapping</value></property>
+<property><name>pregelix.vertexClass</name><value>edu.uci.ics.pregelix.example.maximalclique.MaximalCliqueVertex</value></property>
+<property><name>mapred.skip.reduce.max.skip.groups</name><value>0</value></property>
+<property><name>io.file.buffer.size</name><value>4096</value></property>
+<property><name>mapred.jobtracker.maxtasks.per.job</name><value>-1</value></property>
+<property><name>mapred.tasktracker.indexcache.mb</name><value>10</value></property>
+<property><name>mapred.tasktracker.map.tasks.maximum</name><value>2</value></property>
+<property><name>fs.har.impl.disable.cache</name><value>true</value></property>
+<property><name>mapred.task.profile.maps</name><value>0-2</value></property>
+<property><name>hadoop.native.lib</name><value>true</value></property>
+<property><name>fs.s3.block.size</name><value>67108864</value></property>
+<property><name>mapred.job.reuse.jvm.num.tasks</name><value>1</value></property>
+<property><name>mapred.job.tracker.http.address</name><value>0.0.0.0:50030</value></property>
+<property><name>mapred.tasktracker.reduce.tasks.maximum</name><value>2</value></property>
+<property><name>io.compression.codecs</name><value>org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.BZip2Codec</value></property>
+<property><name>mapred.job.shuffle.input.buffer.percent</name><value>0.70</value></property>
+<property><name>io.seqfile.compress.blocksize</name><value>1000000</value></property>
+<property><name>mapred.queue.names</name><value>default</value></property>
+<property><name>fs.har.impl</name><value>org.apache.hadoop.fs.HarFileSystem</value></property>
+<property><name>io.mapfile.bloom.error.rate</name><value>0.005</value></property>
+<property><name>mapred.job.tracker</name><value>local</value></property>
+<property><name>io.skip.checksum.errors</name><value>false</value></property>
+<property><name>mapred.reduce.max.attempts</name><value>4</value></property>
+<property><name>fs.s3.maxRetries</name><value>4</value></property>
+<property><name>ipc.server.listen.queue.size</name><value>128</value></property>
+<property><name>fs.trash.interval</name><value>0</value></property>
+<property><name>mapred.local.dir.minspacestart</name><value>0</value></property>
+<property><name>fs.s3.impl</name><value>org.apache.hadoop.fs.s3.S3FileSystem</value></property>
+<property><name>io.seqfile.sorter.recordlimit</name><value>1000000</value></property>
+<property><name>io.mapfile.bloom.size</name><value>1048576</value></property>
+<property><name>io.sort.mb</name><value>100</value></property>
+<property><name>mapred.local.dir</name><value>${hadoop.tmp.dir}/mapred/local</value></property>
+<property><name>io.sort.factor</name><value>10</value></property>
+<property><name>mapred.task.profile</name><value>false</value></property>
+<property><name>job.end.retry.interval</name><value>30000</value></property>
+<property><name>mapred.tasktracker.procfsbasedprocesstree.sleeptime-before-sigkill</name><value>5000</value></property>
+<property><name>mapred.jobtracker.completeuserjobs.maximum</name><value>100</value></property>
+<property><name>mapred.task.profile.reduces</name><value>0-2</value></property>
+<property><name>webinterface.private.actions</name><value>false</value></property>
+<property><name>hadoop.tmp.dir</name><value>/tmp/hadoop-${user.name}</value></property>
+<property><name>mapred.output.compression.codec</name><value>org.apache.hadoop.io.compress.DefaultCodec</value></property>
+<property><name>mapred.skip.attempts.to.start.skipping</name><value>2</value></property>
+<property><name>mapred.temp.dir</name><value>${hadoop.tmp.dir}/mapred/temp</value></property>
+<property><name>mapred.merge.recordsBeforeProgress</name><value>10000</value></property>
+<property><name>mapred.map.output.compression.codec</name><value>org.apache.hadoop.io.compress.DefaultCodec</value></property>
+<property><name>mapred.compress.map.output</name><value>false</value></property>
+<property><name>io.sort.spill.percent</name><value>0.80</value></property>
+<property><name>fs.checkpoint.edits.dir</name><value>${fs.checkpoint.dir}</value></property>
+<property><name>mapred.userlog.retain.hours</name><value>24</value></property>
+<property><name>mapred.system.dir</name><value>${hadoop.tmp.dir}/mapred/system</value></property>
+<property><name>mapred.line.input.format.linespermap</name><value>1</value></property>
+<property><name>job.end.retry.attempts</name><value>0</value></property>
+<property><name>ipc.client.idlethreshold</name><value>4000</value></property>
+<property><name>pregelix.vertexOutputFormatClass</name><value>edu.uci.ics.pregelix.example.maximalclique.MaximalCliqueVertex$MaximalCliqueVertexOutputFormat</value></property>
+<property><name>mapred.reduce.copy.backoff</name><value>300</value></property>
+<property><name>mapred.map.tasks.speculative.execution</name><value>true</value></property>
+<property><name>mapred.inmem.merge.threshold</name><value>1000</value></property>
+<property><name>hadoop.logfile.size</name><value>10000000</value></property>
+<property><name>pregelix.vertexInputFormatClass</name><value>edu.uci.ics.pregelix.example.maximalclique.TextMaximalCliqueInputFormat</value></property>
+<property><name>pregelix.aggregatorClass</name><value>edu.uci.ics.pregelix.example.maximalclique.MaximalCliqueAggregator</value></property>
+<property><name>mapred.job.queue.name</name><value>default</value></property>
+<property><name>mapred.job.tracker.persist.jobstatus.active</name><value>false</value></property>
+<property><name>pregelix.incStateLength</name><value>true</value></property>
+<property><name>mapred.reduce.slowstart.completed.maps</name><value>0.05</value></property>
+<property><name>topology.script.number.args</name><value>100</value></property>
+<property><name>mapred.skip.map.max.skip.records</name><value>0</value></property>
+<property><name>fs.ftp.impl</name><value>org.apache.hadoop.fs.ftp.FTPFileSystem</value></property>
+<property><name>mapred.task.cache.levels</name><value>2</value></property>
+<property><name>mapred.job.tracker.handler.count</name><value>10</value></property>
+<property><name>io.serializations</name><value>org.apache.hadoop.io.serializer.WritableSerialization</value></property>
+<property><name>ipc.client.connect.max.retries</name><value>10</value></property>
+<property><name>mapred.min.split.size</name><value>0</value></property>
+<property><name>mapred.map.max.attempts</name><value>4</value></property>
+<property><name>jobclient.output.filter</name><value>FAILED</value></property>
+<property><name>ipc.client.tcpnodelay</name><value>false</value></property>
+<property><name>mapred.acls.enabled</name><value>false</value></property>
+</configuration>
\ No newline at end of file
diff --git a/pregelix/pregelix-example/src/test/resources/jobs/PageRank.xml b/pregelix/pregelix-example/src/test/resources/jobs/PageRank.xml
new file mode 100644
index 0000000..744e5b0
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/jobs/PageRank.xml
@@ -0,0 +1,142 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?><configuration>
+<property><name>mapred.tasktracker.dns.nameserver</name><value>default</value></property>
+<property><name>mapred.queue.default.acl-administer-jobs</name><value>*</value></property>
+<property><name>mapred.skip.map.auto.incr.proc.count</name><value>true</value></property>
+<property><name>mapred.jobtracker.instrumentation</name><value>org.apache.hadoop.mapred.JobTrackerMetricsInst</value></property>
+<property><name>mapred.skip.reduce.auto.incr.proc.count</name><value>true</value></property>
+<property><name>fs.hsftp.impl</name><value>org.apache.hadoop.hdfs.HsftpFileSystem</value></property>
+<property><name>mapred.input.dir</name><value>file:/webmap</value></property>
+<property><name>mapred.submit.replication</name><value>10</value></property>
+<property><name>ipc.server.tcpnodelay</name><value>false</value></property>
+<property><name>fs.checkpoint.dir</name><value>${hadoop.tmp.dir}/dfs/namesecondary</value></property>
+<property><name>mapred.output.compression.type</name><value>RECORD</value></property>
+<property><name>mapred.job.shuffle.merge.percent</name><value>0.66</value></property>
+<property><name>mapred.child.java.opts</name><value>-Xmx200m</value></property>
+<property><name>mapred.queue.default.acl-submit-job</name><value>*</value></property>
+<property><name>keep.failed.task.files</name><value>false</value></property>
+<property><name>mapred.jobtracker.job.history.block.size</name><value>3145728</value></property>
+<property><name>io.bytes.per.checksum</name><value>512</value></property>
+<property><name>mapred.task.tracker.report.address</name><value>127.0.0.1:0</value></property>
+<property><name>hadoop.util.hash.type</name><value>murmur</value></property>
+<property><name>fs.hdfs.impl</name><value>org.apache.hadoop.hdfs.DistributedFileSystem</value></property>
+<property><name>fs.ramfs.impl</name><value>org.apache.hadoop.fs.InMemoryFileSystem</value></property>
+<property><name>mapred.jobtracker.restart.recover</name><value>false</value></property>
+<property><name>fs.hftp.impl</name><value>org.apache.hadoop.hdfs.HftpFileSystem</value></property>
+<property><name>fs.checkpoint.period</name><value>3600</value></property>
+<property><name>mapred.child.tmp</name><value>./tmp</value></property>
+<property><name>mapred.local.dir.minspacekill</name><value>0</value></property>
+<property><name>map.sort.class</name><value>org.apache.hadoop.util.QuickSort</value></property>
+<property><name>hadoop.logfile.count</name><value>10</value></property>
+<property><name>ipc.client.connection.maxidletime</name><value>10000</value></property>
+<property><name>mapred.output.dir</name><value>/result</value></property>
+<property><name>io.map.index.skip</name><value>0</value></property>
+<property><name>mapred.tasktracker.expiry.interval</name><value>600000</value></property>
+<property><name>mapred.output.compress</name><value>false</value></property>
+<property><name>io.seqfile.lazydecompress</name><value>true</value></property>
+<property><name>mapred.reduce.parallel.copies</name><value>5</value></property>
+<property><name>fs.checkpoint.size</name><value>67108864</value></property>
+<property><name>mapred.job.reduce.input.buffer.percent</name><value>0.0</value></property>
+<property><name>mapred.job.name</name><value>PageRank</value></property>
+<property><name>local.cache.size</name><value>10737418240</value></property>
+<property><name>fs.s3n.impl</name><value>org.apache.hadoop.fs.s3native.NativeS3FileSystem</value></property>
+<property><name>mapred.userlog.limit.kb</name><value>0</value></property>
+<property><name>fs.file.impl</name><value>org.apache.hadoop.fs.LocalFileSystem</value></property>
+<property><name>mapred.task.tracker.http.address</name><value>0.0.0.0:50060</value></property>
+<property><name>mapred.task.timeout</name><value>600000</value></property>
+<property><name>fs.kfs.impl</name><value>org.apache.hadoop.fs.kfs.KosmosFileSystem</value></property>
+<property><name>mapred.max.tracker.blacklists</name><value>4</value></property>
+<property><name>fs.s3.buffer.dir</name><value>${hadoop.tmp.dir}/s3</value></property>
+<property><name>mapred.job.tracker.persist.jobstatus.dir</name><value>/jobtracker/jobsInfo</value></property>
+<property><name>ipc.client.kill.max</name><value>10</value></property>
+<property><name>mapred.tasktracker.instrumentation</name><value>org.apache.hadoop.mapred.TaskTrackerMetricsInst</value></property>
+<property><name>mapred.reduce.tasks.speculative.execution</name><value>true</value></property>
+<property><name>io.sort.record.percent</name><value>0.05</value></property>
+<property><name>hadoop.security.authorization</name><value>false</value></property>
+<property><name>mapred.max.tracker.failures</name><value>4</value></property>
+<property><name>mapred.jobtracker.taskScheduler</name><value>org.apache.hadoop.mapred.JobQueueTaskScheduler</value></property>
+<property><name>pregelix.numVertices</name><value>20</value></property>
+<property><name>mapred.tasktracker.dns.interface</name><value>default</value></property>
+<property><name>mapred.map.tasks</name><value>2</value></property>
+<property><name>mapred.job.tracker.persist.jobstatus.hours</name><value>0</value></property>
+<property><name>fs.s3.sleepTimeSeconds</name><value>10</value></property>
+<property><name>fs.default.name</name><value>file:///</value></property>
+<property><name>tasktracker.http.threads</name><value>40</value></property>
+<property><name>mapred.tasktracker.taskmemorymanager.monitoring-interval</name><value>5000</value></property>
+<property><name>hadoop.rpc.socket.factory.class.default</name><value>org.apache.hadoop.net.StandardSocketFactory</value></property>
+<property><name>mapred.reduce.tasks</name><value>1</value></property>
+<property><name>topology.node.switch.mapping.impl</name><value>org.apache.hadoop.net.ScriptBasedMapping</value></property>
+<property><name>pregelix.vertexClass</name><value>edu.uci.ics.pregelix.example.PageRankVertex</value></property>
+<property><name>mapred.skip.reduce.max.skip.groups</name><value>0</value></property>
+<property><name>io.file.buffer.size</name><value>4096</value></property>
+<property><name>mapred.jobtracker.maxtasks.per.job</name><value>-1</value></property>
+<property><name>mapred.tasktracker.indexcache.mb</name><value>10</value></property>
+<property><name>mapred.tasktracker.map.tasks.maximum</name><value>2</value></property>
+<property><name>fs.har.impl.disable.cache</name><value>true</value></property>
+<property><name>mapred.task.profile.maps</name><value>0-2</value></property>
+<property><name>hadoop.native.lib</name><value>true</value></property>
+<property><name>fs.s3.block.size</name><value>67108864</value></property>
+<property><name>mapred.job.reuse.jvm.num.tasks</name><value>1</value></property>
+<property><name>mapred.job.tracker.http.address</name><value>0.0.0.0:50030</value></property>
+<property><name>mapred.tasktracker.reduce.tasks.maximum</name><value>2</value></property>
+<property><name>io.compression.codecs</name><value>org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.BZip2Codec</value></property>
+<property><name>mapred.job.shuffle.input.buffer.percent</name><value>0.70</value></property>
+<property><name>io.seqfile.compress.blocksize</name><value>1000000</value></property>
+<property><name>mapred.queue.names</name><value>default</value></property>
+<property><name>fs.har.impl</name><value>org.apache.hadoop.fs.HarFileSystem</value></property>
+<property><name>io.mapfile.bloom.error.rate</name><value>0.005</value></property>
+<property><name>mapred.job.tracker</name><value>local</value></property>
+<property><name>io.skip.checksum.errors</name><value>false</value></property>
+<property><name>mapred.reduce.max.attempts</name><value>4</value></property>
+<property><name>fs.s3.maxRetries</name><value>4</value></property>
+<property><name>ipc.server.listen.queue.size</name><value>128</value></property>
+<property><name>fs.trash.interval</name><value>0</value></property>
+<property><name>mapred.local.dir.minspacestart</name><value>0</value></property>
+<property><name>fs.s3.impl</name><value>org.apache.hadoop.fs.s3.S3FileSystem</value></property>
+<property><name>io.seqfile.sorter.recordlimit</name><value>1000000</value></property>
+<property><name>io.mapfile.bloom.size</name><value>1048576</value></property>
+<property><name>io.sort.mb</name><value>100</value></property>
+<property><name>mapred.local.dir</name><value>${hadoop.tmp.dir}/mapred/local</value></property>
+<property><name>io.sort.factor</name><value>10</value></property>
+<property><name>mapred.task.profile</name><value>false</value></property>
+<property><name>job.end.retry.interval</name><value>30000</value></property>
+<property><name>mapred.tasktracker.procfsbasedprocesstree.sleeptime-before-sigkill</name><value>5000</value></property>
+<property><name>mapred.jobtracker.completeuserjobs.maximum</name><value>100</value></property>
+<property><name>mapred.task.profile.reduces</name><value>0-2</value></property>
+<property><name>webinterface.private.actions</name><value>false</value></property>
+<property><name>hadoop.tmp.dir</name><value>/tmp/hadoop-${user.name}</value></property>
+<property><name>pregelix.combinerClass</name><value>edu.uci.ics.pregelix.example.PageRankVertex$SimpleSumCombiner</value></property>
+<property><name>mapred.output.compression.codec</name><value>org.apache.hadoop.io.compress.DefaultCodec</value></property>
+<property><name>mapred.skip.attempts.to.start.skipping</name><value>2</value></property>
+<property><name>mapred.temp.dir</name><value>${hadoop.tmp.dir}/mapred/temp</value></property>
+<property><name>mapred.merge.recordsBeforeProgress</name><value>10000</value></property>
+<property><name>mapred.map.output.compression.codec</name><value>org.apache.hadoop.io.compress.DefaultCodec</value></property>
+<property><name>mapred.compress.map.output</name><value>false</value></property>
+<property><name>io.sort.spill.percent</name><value>0.80</value></property>
+<property><name>fs.checkpoint.edits.dir</name><value>${fs.checkpoint.dir}</value></property>
+<property><name>mapred.userlog.retain.hours</name><value>24</value></property>
+<property><name>mapred.system.dir</name><value>${hadoop.tmp.dir}/mapred/system</value></property>
+<property><name>mapred.line.input.format.linespermap</name><value>1</value></property>
+<property><name>job.end.retry.attempts</name><value>0</value></property>
+<property><name>ipc.client.idlethreshold</name><value>4000</value></property>
+<property><name>pregelix.vertexOutputFormatClass</name><value>edu.uci.ics.pregelix.example.PageRankVertex$SimplePageRankVertexOutputFormat</value></property>
+<property><name>mapred.reduce.copy.backoff</name><value>300</value></property>
+<property><name>mapred.map.tasks.speculative.execution</name><value>true</value></property>
+<property><name>mapred.inmem.merge.threshold</name><value>1000</value></property>
+<property><name>hadoop.logfile.size</name><value>10000000</value></property>
+<property><name>pregelix.vertexInputFormatClass</name><value>edu.uci.ics.pregelix.example.PageRankVertex$SimulatedPageRankVertexInputFormat</value></property>
+<property><name>mapred.job.queue.name</name><value>default</value></property>
+<property><name>mapred.job.tracker.persist.jobstatus.active</name><value>false</value></property>
+<property><name>mapred.reduce.slowstart.completed.maps</name><value>0.05</value></property>
+<property><name>topology.script.number.args</name><value>100</value></property>
+<property><name>mapred.skip.map.max.skip.records</name><value>0</value></property>
+<property><name>fs.ftp.impl</name><value>org.apache.hadoop.fs.ftp.FTPFileSystem</value></property>
+<property><name>mapred.task.cache.levels</name><value>2</value></property>
+<property><name>mapred.job.tracker.handler.count</name><value>10</value></property>
+<property><name>io.serializations</name><value>org.apache.hadoop.io.serializer.WritableSerialization</value></property>
+<property><name>ipc.client.connect.max.retries</name><value>10</value></property>
+<property><name>mapred.min.split.size</name><value>0</value></property>
+<property><name>mapred.map.max.attempts</name><value>4</value></property>
+<property><name>jobclient.output.filter</name><value>FAILED</value></property>
+<property><name>ipc.client.tcpnodelay</name><value>false</value></property>
+<property><name>mapred.acls.enabled</name><value>false</value></property>
+</configuration>
\ No newline at end of file
diff --git a/pregelix/pregelix-example/src/test/resources/jobs/PageRankReal.xml b/pregelix/pregelix-example/src/test/resources/jobs/PageRankReal.xml
new file mode 100644
index 0000000..b51bd98
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/jobs/PageRankReal.xml
@@ -0,0 +1,142 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?><configuration>
+<property><name>mapred.tasktracker.dns.nameserver</name><value>default</value></property>
+<property><name>mapred.queue.default.acl-administer-jobs</name><value>*</value></property>
+<property><name>mapred.skip.map.auto.incr.proc.count</name><value>true</value></property>
+<property><name>mapred.jobtracker.instrumentation</name><value>org.apache.hadoop.mapred.JobTrackerMetricsInst</value></property>
+<property><name>mapred.skip.reduce.auto.incr.proc.count</name><value>true</value></property>
+<property><name>fs.hsftp.impl</name><value>org.apache.hadoop.hdfs.HsftpFileSystem</value></property>
+<property><name>mapred.input.dir</name><value>file:/webmap</value></property>
+<property><name>mapred.submit.replication</name><value>10</value></property>
+<property><name>ipc.server.tcpnodelay</name><value>false</value></property>
+<property><name>fs.checkpoint.dir</name><value>${hadoop.tmp.dir}/dfs/namesecondary</value></property>
+<property><name>mapred.output.compression.type</name><value>RECORD</value></property>
+<property><name>mapred.job.shuffle.merge.percent</name><value>0.66</value></property>
+<property><name>mapred.child.java.opts</name><value>-Xmx200m</value></property>
+<property><name>mapred.queue.default.acl-submit-job</name><value>*</value></property>
+<property><name>keep.failed.task.files</name><value>false</value></property>
+<property><name>mapred.jobtracker.job.history.block.size</name><value>3145728</value></property>
+<property><name>io.bytes.per.checksum</name><value>512</value></property>
+<property><name>mapred.task.tracker.report.address</name><value>127.0.0.1:0</value></property>
+<property><name>hadoop.util.hash.type</name><value>murmur</value></property>
+<property><name>fs.hdfs.impl</name><value>org.apache.hadoop.hdfs.DistributedFileSystem</value></property>
+<property><name>fs.ramfs.impl</name><value>org.apache.hadoop.fs.InMemoryFileSystem</value></property>
+<property><name>mapred.jobtracker.restart.recover</name><value>false</value></property>
+<property><name>fs.hftp.impl</name><value>org.apache.hadoop.hdfs.HftpFileSystem</value></property>
+<property><name>fs.checkpoint.period</name><value>3600</value></property>
+<property><name>mapred.child.tmp</name><value>./tmp</value></property>
+<property><name>mapred.local.dir.minspacekill</name><value>0</value></property>
+<property><name>map.sort.class</name><value>org.apache.hadoop.util.QuickSort</value></property>
+<property><name>hadoop.logfile.count</name><value>10</value></property>
+<property><name>ipc.client.connection.maxidletime</name><value>10000</value></property>
+<property><name>mapred.output.dir</name><value>/result</value></property>
+<property><name>io.map.index.skip</name><value>0</value></property>
+<property><name>mapred.tasktracker.expiry.interval</name><value>600000</value></property>
+<property><name>mapred.output.compress</name><value>false</value></property>
+<property><name>io.seqfile.lazydecompress</name><value>true</value></property>
+<property><name>mapred.reduce.parallel.copies</name><value>5</value></property>
+<property><name>fs.checkpoint.size</name><value>67108864</value></property>
+<property><name>mapred.job.reduce.input.buffer.percent</name><value>0.0</value></property>
+<property><name>mapred.job.name</name><value>PageRank</value></property>
+<property><name>local.cache.size</name><value>10737418240</value></property>
+<property><name>fs.s3n.impl</name><value>org.apache.hadoop.fs.s3native.NativeS3FileSystem</value></property>
+<property><name>mapred.userlog.limit.kb</name><value>0</value></property>
+<property><name>fs.file.impl</name><value>org.apache.hadoop.fs.LocalFileSystem</value></property>
+<property><name>mapred.task.tracker.http.address</name><value>0.0.0.0:50060</value></property>
+<property><name>mapred.task.timeout</name><value>600000</value></property>
+<property><name>fs.kfs.impl</name><value>org.apache.hadoop.fs.kfs.KosmosFileSystem</value></property>
+<property><name>mapred.max.tracker.blacklists</name><value>4</value></property>
+<property><name>fs.s3.buffer.dir</name><value>${hadoop.tmp.dir}/s3</value></property>
+<property><name>mapred.job.tracker.persist.jobstatus.dir</name><value>/jobtracker/jobsInfo</value></property>
+<property><name>ipc.client.kill.max</name><value>10</value></property>
+<property><name>mapred.tasktracker.instrumentation</name><value>org.apache.hadoop.mapred.TaskTrackerMetricsInst</value></property>
+<property><name>mapred.reduce.tasks.speculative.execution</name><value>true</value></property>
+<property><name>io.sort.record.percent</name><value>0.05</value></property>
+<property><name>hadoop.security.authorization</name><value>false</value></property>
+<property><name>mapred.max.tracker.failures</name><value>4</value></property>
+<property><name>mapred.jobtracker.taskScheduler</name><value>org.apache.hadoop.mapred.JobQueueTaskScheduler</value></property>
+<property><name>pregelix.numVertices</name><value>20</value></property>
+<property><name>mapred.tasktracker.dns.interface</name><value>default</value></property>
+<property><name>mapred.map.tasks</name><value>2</value></property>
+<property><name>mapred.job.tracker.persist.jobstatus.hours</name><value>0</value></property>
+<property><name>fs.s3.sleepTimeSeconds</name><value>10</value></property>
+<property><name>fs.default.name</name><value>file:///</value></property>
+<property><name>tasktracker.http.threads</name><value>40</value></property>
+<property><name>mapred.tasktracker.taskmemorymanager.monitoring-interval</name><value>5000</value></property>
+<property><name>hadoop.rpc.socket.factory.class.default</name><value>org.apache.hadoop.net.StandardSocketFactory</value></property>
+<property><name>mapred.reduce.tasks</name><value>1</value></property>
+<property><name>topology.node.switch.mapping.impl</name><value>org.apache.hadoop.net.ScriptBasedMapping</value></property>
+<property><name>pregelix.vertexClass</name><value>edu.uci.ics.pregelix.example.PageRankVertex</value></property>
+<property><name>mapred.skip.reduce.max.skip.groups</name><value>0</value></property>
+<property><name>io.file.buffer.size</name><value>4096</value></property>
+<property><name>mapred.jobtracker.maxtasks.per.job</name><value>-1</value></property>
+<property><name>mapred.tasktracker.indexcache.mb</name><value>10</value></property>
+<property><name>mapred.tasktracker.map.tasks.maximum</name><value>2</value></property>
+<property><name>fs.har.impl.disable.cache</name><value>true</value></property>
+<property><name>mapred.task.profile.maps</name><value>0-2</value></property>
+<property><name>hadoop.native.lib</name><value>true</value></property>
+<property><name>fs.s3.block.size</name><value>67108864</value></property>
+<property><name>mapred.job.reuse.jvm.num.tasks</name><value>1</value></property>
+<property><name>mapred.job.tracker.http.address</name><value>0.0.0.0:50030</value></property>
+<property><name>mapred.tasktracker.reduce.tasks.maximum</name><value>2</value></property>
+<property><name>io.compression.codecs</name><value>org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.BZip2Codec</value></property>
+<property><name>mapred.job.shuffle.input.buffer.percent</name><value>0.70</value></property>
+<property><name>io.seqfile.compress.blocksize</name><value>1000000</value></property>
+<property><name>mapred.queue.names</name><value>default</value></property>
+<property><name>fs.har.impl</name><value>org.apache.hadoop.fs.HarFileSystem</value></property>
+<property><name>io.mapfile.bloom.error.rate</name><value>0.005</value></property>
+<property><name>mapred.job.tracker</name><value>local</value></property>
+<property><name>io.skip.checksum.errors</name><value>false</value></property>
+<property><name>mapred.reduce.max.attempts</name><value>4</value></property>
+<property><name>fs.s3.maxRetries</name><value>4</value></property>
+<property><name>ipc.server.listen.queue.size</name><value>128</value></property>
+<property><name>fs.trash.interval</name><value>0</value></property>
+<property><name>mapred.local.dir.minspacestart</name><value>0</value></property>
+<property><name>fs.s3.impl</name><value>org.apache.hadoop.fs.s3.S3FileSystem</value></property>
+<property><name>io.seqfile.sorter.recordlimit</name><value>1000000</value></property>
+<property><name>io.mapfile.bloom.size</name><value>1048576</value></property>
+<property><name>io.sort.mb</name><value>100</value></property>
+<property><name>mapred.local.dir</name><value>${hadoop.tmp.dir}/mapred/local</value></property>
+<property><name>io.sort.factor</name><value>10</value></property>
+<property><name>mapred.task.profile</name><value>false</value></property>
+<property><name>job.end.retry.interval</name><value>30000</value></property>
+<property><name>mapred.tasktracker.procfsbasedprocesstree.sleeptime-before-sigkill</name><value>5000</value></property>
+<property><name>mapred.jobtracker.completeuserjobs.maximum</name><value>100</value></property>
+<property><name>mapred.task.profile.reduces</name><value>0-2</value></property>
+<property><name>webinterface.private.actions</name><value>false</value></property>
+<property><name>hadoop.tmp.dir</name><value>/tmp/hadoop-${user.name}</value></property>
+<property><name>pregelix.combinerClass</name><value>edu.uci.ics.pregelix.example.PageRankVertex$SimpleSumCombiner</value></property>
+<property><name>mapred.output.compression.codec</name><value>org.apache.hadoop.io.compress.DefaultCodec</value></property>
+<property><name>mapred.skip.attempts.to.start.skipping</name><value>2</value></property>
+<property><name>mapred.temp.dir</name><value>${hadoop.tmp.dir}/mapred/temp</value></property>
+<property><name>mapred.merge.recordsBeforeProgress</name><value>10000</value></property>
+<property><name>mapred.map.output.compression.codec</name><value>org.apache.hadoop.io.compress.DefaultCodec</value></property>
+<property><name>mapred.compress.map.output</name><value>false</value></property>
+<property><name>io.sort.spill.percent</name><value>0.80</value></property>
+<property><name>fs.checkpoint.edits.dir</name><value>${fs.checkpoint.dir}</value></property>
+<property><name>mapred.userlog.retain.hours</name><value>24</value></property>
+<property><name>mapred.system.dir</name><value>${hadoop.tmp.dir}/mapred/system</value></property>
+<property><name>mapred.line.input.format.linespermap</name><value>1</value></property>
+<property><name>job.end.retry.attempts</name><value>0</value></property>
+<property><name>ipc.client.idlethreshold</name><value>4000</value></property>
+<property><name>pregelix.vertexOutputFormatClass</name><value>edu.uci.ics.pregelix.example.PageRankVertex$SimplePageRankVertexOutputFormat</value></property>
+<property><name>mapred.reduce.copy.backoff</name><value>300</value></property>
+<property><name>mapred.map.tasks.speculative.execution</name><value>true</value></property>
+<property><name>mapred.inmem.merge.threshold</name><value>1000</value></property>
+<property><name>hadoop.logfile.size</name><value>10000000</value></property>
+<property><name>pregelix.vertexInputFormatClass</name><value>edu.uci.ics.pregelix.example.inputformat.TextPageRankInputFormat</value></property>
+<property><name>mapred.job.queue.name</name><value>default</value></property>
+<property><name>mapred.job.tracker.persist.jobstatus.active</name><value>false</value></property>
+<property><name>mapred.reduce.slowstart.completed.maps</name><value>0.05</value></property>
+<property><name>topology.script.number.args</name><value>100</value></property>
+<property><name>mapred.skip.map.max.skip.records</name><value>0</value></property>
+<property><name>fs.ftp.impl</name><value>org.apache.hadoop.fs.ftp.FTPFileSystem</value></property>
+<property><name>mapred.task.cache.levels</name><value>2</value></property>
+<property><name>mapred.job.tracker.handler.count</name><value>10</value></property>
+<property><name>io.serializations</name><value>org.apache.hadoop.io.serializer.WritableSerialization</value></property>
+<property><name>ipc.client.connect.max.retries</name><value>10</value></property>
+<property><name>mapred.min.split.size</name><value>0</value></property>
+<property><name>mapred.map.max.attempts</name><value>4</value></property>
+<property><name>jobclient.output.filter</name><value>FAILED</value></property>
+<property><name>ipc.client.tcpnodelay</name><value>false</value></property>
+<property><name>mapred.acls.enabled</name><value>false</value></property>
+</configuration>
\ No newline at end of file
diff --git a/pregelix/pregelix-example/src/test/resources/jobs/PageRankRealComplex.xml b/pregelix/pregelix-example/src/test/resources/jobs/PageRankRealComplex.xml
new file mode 100644
index 0000000..a9e43bd
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/jobs/PageRankRealComplex.xml
@@ -0,0 +1,142 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?><configuration>
+<property><name>mapred.tasktracker.dns.nameserver</name><value>default</value></property>
+<property><name>mapred.queue.default.acl-administer-jobs</name><value>*</value></property>
+<property><name>mapred.skip.map.auto.incr.proc.count</name><value>true</value></property>
+<property><name>mapred.jobtracker.instrumentation</name><value>org.apache.hadoop.mapred.JobTrackerMetricsInst</value></property>
+<property><name>mapred.skip.reduce.auto.incr.proc.count</name><value>true</value></property>
+<property><name>fs.hsftp.impl</name><value>org.apache.hadoop.hdfs.HsftpFileSystem</value></property>
+<property><name>mapred.input.dir</name><value>file:/webmapcomplex</value></property>
+<property><name>mapred.submit.replication</name><value>10</value></property>
+<property><name>ipc.server.tcpnodelay</name><value>false</value></property>
+<property><name>fs.checkpoint.dir</name><value>${hadoop.tmp.dir}/dfs/namesecondary</value></property>
+<property><name>mapred.output.compression.type</name><value>RECORD</value></property>
+<property><name>mapred.job.shuffle.merge.percent</name><value>0.66</value></property>
+<property><name>mapred.child.java.opts</name><value>-Xmx200m</value></property>
+<property><name>mapred.queue.default.acl-submit-job</name><value>*</value></property>
+<property><name>keep.failed.task.files</name><value>false</value></property>
+<property><name>mapred.jobtracker.job.history.block.size</name><value>3145728</value></property>
+<property><name>io.bytes.per.checksum</name><value>512</value></property>
+<property><name>mapred.task.tracker.report.address</name><value>127.0.0.1:0</value></property>
+<property><name>hadoop.util.hash.type</name><value>murmur</value></property>
+<property><name>fs.hdfs.impl</name><value>org.apache.hadoop.hdfs.DistributedFileSystem</value></property>
+<property><name>fs.ramfs.impl</name><value>org.apache.hadoop.fs.InMemoryFileSystem</value></property>
+<property><name>mapred.jobtracker.restart.recover</name><value>false</value></property>
+<property><name>fs.hftp.impl</name><value>org.apache.hadoop.hdfs.HftpFileSystem</value></property>
+<property><name>fs.checkpoint.period</name><value>3600</value></property>
+<property><name>mapred.child.tmp</name><value>./tmp</value></property>
+<property><name>mapred.local.dir.minspacekill</name><value>0</value></property>
+<property><name>map.sort.class</name><value>org.apache.hadoop.util.QuickSort</value></property>
+<property><name>hadoop.logfile.count</name><value>10</value></property>
+<property><name>ipc.client.connection.maxidletime</name><value>10000</value></property>
+<property><name>mapred.output.dir</name><value>/resultcomplex</value></property>
+<property><name>io.map.index.skip</name><value>0</value></property>
+<property><name>mapred.tasktracker.expiry.interval</name><value>600000</value></property>
+<property><name>mapred.output.compress</name><value>false</value></property>
+<property><name>io.seqfile.lazydecompress</name><value>true</value></property>
+<property><name>mapred.reduce.parallel.copies</name><value>5</value></property>
+<property><name>fs.checkpoint.size</name><value>67108864</value></property>
+<property><name>mapred.job.reduce.input.buffer.percent</name><value>0.0</value></property>
+<property><name>mapred.job.name</name><value>PageRank</value></property>
+<property><name>local.cache.size</name><value>10737418240</value></property>
+<property><name>fs.s3n.impl</name><value>org.apache.hadoop.fs.s3native.NativeS3FileSystem</value></property>
+<property><name>mapred.userlog.limit.kb</name><value>0</value></property>
+<property><name>fs.file.impl</name><value>org.apache.hadoop.fs.LocalFileSystem</value></property>
+<property><name>mapred.task.tracker.http.address</name><value>0.0.0.0:50060</value></property>
+<property><name>mapred.task.timeout</name><value>600000</value></property>
+<property><name>fs.kfs.impl</name><value>org.apache.hadoop.fs.kfs.KosmosFileSystem</value></property>
+<property><name>mapred.max.tracker.blacklists</name><value>4</value></property>
+<property><name>fs.s3.buffer.dir</name><value>${hadoop.tmp.dir}/s3</value></property>
+<property><name>mapred.job.tracker.persist.jobstatus.dir</name><value>/jobtracker/jobsInfo</value></property>
+<property><name>ipc.client.kill.max</name><value>10</value></property>
+<property><name>mapred.tasktracker.instrumentation</name><value>org.apache.hadoop.mapred.TaskTrackerMetricsInst</value></property>
+<property><name>mapred.reduce.tasks.speculative.execution</name><value>true</value></property>
+<property><name>io.sort.record.percent</name><value>0.05</value></property>
+<property><name>hadoop.security.authorization</name><value>false</value></property>
+<property><name>mapred.max.tracker.failures</name><value>4</value></property>
+<property><name>mapred.jobtracker.taskScheduler</name><value>org.apache.hadoop.mapred.JobQueueTaskScheduler</value></property>
+<property><name>pregelix.numVertices</name><value>23</value></property>
+<property><name>mapred.tasktracker.dns.interface</name><value>default</value></property>
+<property><name>mapred.map.tasks</name><value>2</value></property>
+<property><name>mapred.job.tracker.persist.jobstatus.hours</name><value>0</value></property>
+<property><name>fs.s3.sleepTimeSeconds</name><value>10</value></property>
+<property><name>fs.default.name</name><value>file:///</value></property>
+<property><name>tasktracker.http.threads</name><value>40</value></property>
+<property><name>mapred.tasktracker.taskmemorymanager.monitoring-interval</name><value>5000</value></property>
+<property><name>hadoop.rpc.socket.factory.class.default</name><value>org.apache.hadoop.net.StandardSocketFactory</value></property>
+<property><name>mapred.reduce.tasks</name><value>1</value></property>
+<property><name>topology.node.switch.mapping.impl</name><value>org.apache.hadoop.net.ScriptBasedMapping</value></property>
+<property><name>pregelix.vertexClass</name><value>edu.uci.ics.pregelix.example.PageRankVertex</value></property>
+<property><name>mapred.skip.reduce.max.skip.groups</name><value>0</value></property>
+<property><name>io.file.buffer.size</name><value>4096</value></property>
+<property><name>mapred.jobtracker.maxtasks.per.job</name><value>-1</value></property>
+<property><name>mapred.tasktracker.indexcache.mb</name><value>10</value></property>
+<property><name>mapred.tasktracker.map.tasks.maximum</name><value>2</value></property>
+<property><name>fs.har.impl.disable.cache</name><value>true</value></property>
+<property><name>mapred.task.profile.maps</name><value>0-2</value></property>
+<property><name>hadoop.native.lib</name><value>true</value></property>
+<property><name>fs.s3.block.size</name><value>67108864</value></property>
+<property><name>mapred.job.reuse.jvm.num.tasks</name><value>1</value></property>
+<property><name>mapred.job.tracker.http.address</name><value>0.0.0.0:50030</value></property>
+<property><name>mapred.tasktracker.reduce.tasks.maximum</name><value>2</value></property>
+<property><name>io.compression.codecs</name><value>org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.BZip2Codec</value></property>
+<property><name>mapred.job.shuffle.input.buffer.percent</name><value>0.70</value></property>
+<property><name>io.seqfile.compress.blocksize</name><value>1000000</value></property>
+<property><name>mapred.queue.names</name><value>default</value></property>
+<property><name>fs.har.impl</name><value>org.apache.hadoop.fs.HarFileSystem</value></property>
+<property><name>io.mapfile.bloom.error.rate</name><value>0.005</value></property>
+<property><name>mapred.job.tracker</name><value>local</value></property>
+<property><name>io.skip.checksum.errors</name><value>false</value></property>
+<property><name>mapred.reduce.max.attempts</name><value>4</value></property>
+<property><name>fs.s3.maxRetries</name><value>4</value></property>
+<property><name>ipc.server.listen.queue.size</name><value>128</value></property>
+<property><name>fs.trash.interval</name><value>0</value></property>
+<property><name>mapred.local.dir.minspacestart</name><value>0</value></property>
+<property><name>fs.s3.impl</name><value>org.apache.hadoop.fs.s3.S3FileSystem</value></property>
+<property><name>io.seqfile.sorter.recordlimit</name><value>1000000</value></property>
+<property><name>io.mapfile.bloom.size</name><value>1048576</value></property>
+<property><name>io.sort.mb</name><value>100</value></property>
+<property><name>mapred.local.dir</name><value>${hadoop.tmp.dir}/mapred/local</value></property>
+<property><name>io.sort.factor</name><value>10</value></property>
+<property><name>mapred.task.profile</name><value>false</value></property>
+<property><name>job.end.retry.interval</name><value>30000</value></property>
+<property><name>mapred.tasktracker.procfsbasedprocesstree.sleeptime-before-sigkill</name><value>5000</value></property>
+<property><name>mapred.jobtracker.completeuserjobs.maximum</name><value>100</value></property>
+<property><name>mapred.task.profile.reduces</name><value>0-2</value></property>
+<property><name>webinterface.private.actions</name><value>false</value></property>
+<property><name>hadoop.tmp.dir</name><value>/tmp/hadoop-${user.name}</value></property>
+<property><name>pregelix.combinerClass</name><value>edu.uci.ics.pregelix.example.PageRankVertex$SimpleSumCombiner</value></property>
+<property><name>mapred.output.compression.codec</name><value>org.apache.hadoop.io.compress.DefaultCodec</value></property>
+<property><name>mapred.skip.attempts.to.start.skipping</name><value>2</value></property>
+<property><name>mapred.temp.dir</name><value>${hadoop.tmp.dir}/mapred/temp</value></property>
+<property><name>mapred.merge.recordsBeforeProgress</name><value>10000</value></property>
+<property><name>mapred.map.output.compression.codec</name><value>org.apache.hadoop.io.compress.DefaultCodec</value></property>
+<property><name>mapred.compress.map.output</name><value>false</value></property>
+<property><name>io.sort.spill.percent</name><value>0.80</value></property>
+<property><name>fs.checkpoint.edits.dir</name><value>${fs.checkpoint.dir}</value></property>
+<property><name>mapred.userlog.retain.hours</name><value>24</value></property>
+<property><name>mapred.system.dir</name><value>${hadoop.tmp.dir}/mapred/system</value></property>
+<property><name>mapred.line.input.format.linespermap</name><value>1</value></property>
+<property><name>job.end.retry.attempts</name><value>0</value></property>
+<property><name>ipc.client.idlethreshold</name><value>4000</value></property>
+<property><name>pregelix.vertexOutputFormatClass</name><value>edu.uci.ics.pregelix.example.PageRankVertex$SimplePageRankVertexOutputFormat</value></property>
+<property><name>mapred.reduce.copy.backoff</name><value>300</value></property>
+<property><name>mapred.map.tasks.speculative.execution</name><value>true</value></property>
+<property><name>mapred.inmem.merge.threshold</name><value>1000</value></property>
+<property><name>hadoop.logfile.size</name><value>10000000</value></property>
+<property><name>pregelix.vertexInputFormatClass</name><value>edu.uci.ics.pregelix.example.inputformat.TextPageRankInputFormat</value></property>
+<property><name>mapred.job.queue.name</name><value>default</value></property>
+<property><name>mapred.job.tracker.persist.jobstatus.active</name><value>false</value></property>
+<property><name>mapred.reduce.slowstart.completed.maps</name><value>0.05</value></property>
+<property><name>topology.script.number.args</name><value>100</value></property>
+<property><name>mapred.skip.map.max.skip.records</name><value>0</value></property>
+<property><name>fs.ftp.impl</name><value>org.apache.hadoop.fs.ftp.FTPFileSystem</value></property>
+<property><name>mapred.task.cache.levels</name><value>2</value></property>
+<property><name>mapred.job.tracker.handler.count</name><value>10</value></property>
+<property><name>io.serializations</name><value>org.apache.hadoop.io.serializer.WritableSerialization</value></property>
+<property><name>ipc.client.connect.max.retries</name><value>10</value></property>
+<property><name>mapred.min.split.size</name><value>0</value></property>
+<property><name>mapred.map.max.attempts</name><value>4</value></property>
+<property><name>jobclient.output.filter</name><value>FAILED</value></property>
+<property><name>ipc.client.tcpnodelay</name><value>false</value></property>
+<property><name>mapred.acls.enabled</name><value>false</value></property>
+</configuration>
\ No newline at end of file
diff --git a/pregelix/pregelix-example/src/test/resources/jobs/PageRankRealDynamic.xml b/pregelix/pregelix-example/src/test/resources/jobs/PageRankRealDynamic.xml
new file mode 100644
index 0000000..c1a04ae
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/jobs/PageRankRealDynamic.xml
@@ -0,0 +1,143 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?><configuration>
+<property><name>mapred.tasktracker.dns.nameserver</name><value>default</value></property>
+<property><name>mapred.queue.default.acl-administer-jobs</name><value>*</value></property>
+<property><name>mapred.skip.map.auto.incr.proc.count</name><value>true</value></property>
+<property><name>mapred.jobtracker.instrumentation</name><value>org.apache.hadoop.mapred.JobTrackerMetricsInst</value></property>
+<property><name>mapred.skip.reduce.auto.incr.proc.count</name><value>true</value></property>
+<property><name>fs.hsftp.impl</name><value>org.apache.hadoop.hdfs.HsftpFileSystem</value></property>
+<property><name>mapred.input.dir</name><value>file:/webmap</value></property>
+<property><name>mapred.submit.replication</name><value>10</value></property>
+<property><name>ipc.server.tcpnodelay</name><value>false</value></property>
+<property><name>fs.checkpoint.dir</name><value>${hadoop.tmp.dir}/dfs/namesecondary</value></property>
+<property><name>mapred.output.compression.type</name><value>RECORD</value></property>
+<property><name>mapred.job.shuffle.merge.percent</name><value>0.66</value></property>
+<property><name>mapred.child.java.opts</name><value>-Xmx200m</value></property>
+<property><name>mapred.queue.default.acl-submit-job</name><value>*</value></property>
+<property><name>keep.failed.task.files</name><value>false</value></property>
+<property><name>mapred.jobtracker.job.history.block.size</name><value>3145728</value></property>
+<property><name>io.bytes.per.checksum</name><value>512</value></property>
+<property><name>mapred.task.tracker.report.address</name><value>127.0.0.1:0</value></property>
+<property><name>hadoop.util.hash.type</name><value>murmur</value></property>
+<property><name>fs.hdfs.impl</name><value>org.apache.hadoop.hdfs.DistributedFileSystem</value></property>
+<property><name>fs.ramfs.impl</name><value>org.apache.hadoop.fs.InMemoryFileSystem</value></property>
+<property><name>mapred.jobtracker.restart.recover</name><value>false</value></property>
+<property><name>fs.hftp.impl</name><value>org.apache.hadoop.hdfs.HftpFileSystem</value></property>
+<property><name>fs.checkpoint.period</name><value>3600</value></property>
+<property><name>mapred.child.tmp</name><value>./tmp</value></property>
+<property><name>mapred.local.dir.minspacekill</name><value>0</value></property>
+<property><name>map.sort.class</name><value>org.apache.hadoop.util.QuickSort</value></property>
+<property><name>hadoop.logfile.count</name><value>10</value></property>
+<property><name>ipc.client.connection.maxidletime</name><value>10000</value></property>
+<property><name>mapred.output.dir</name><value>/result</value></property>
+<property><name>io.map.index.skip</name><value>0</value></property>
+<property><name>mapred.tasktracker.expiry.interval</name><value>600000</value></property>
+<property><name>mapred.output.compress</name><value>false</value></property>
+<property><name>io.seqfile.lazydecompress</name><value>true</value></property>
+<property><name>mapred.reduce.parallel.copies</name><value>5</value></property>
+<property><name>fs.checkpoint.size</name><value>67108864</value></property>
+<property><name>mapred.job.reduce.input.buffer.percent</name><value>0.0</value></property>
+<property><name>mapred.job.name</name><value>PageRank</value></property>
+<property><name>local.cache.size</name><value>10737418240</value></property>
+<property><name>fs.s3n.impl</name><value>org.apache.hadoop.fs.s3native.NativeS3FileSystem</value></property>
+<property><name>mapred.userlog.limit.kb</name><value>0</value></property>
+<property><name>fs.file.impl</name><value>org.apache.hadoop.fs.LocalFileSystem</value></property>
+<property><name>mapred.task.tracker.http.address</name><value>0.0.0.0:50060</value></property>
+<property><name>mapred.task.timeout</name><value>600000</value></property>
+<property><name>fs.kfs.impl</name><value>org.apache.hadoop.fs.kfs.KosmosFileSystem</value></property>
+<property><name>mapred.max.tracker.blacklists</name><value>4</value></property>
+<property><name>fs.s3.buffer.dir</name><value>${hadoop.tmp.dir}/s3</value></property>
+<property><name>mapred.job.tracker.persist.jobstatus.dir</name><value>/jobtracker/jobsInfo</value></property>
+<property><name>ipc.client.kill.max</name><value>10</value></property>
+<property><name>mapred.tasktracker.instrumentation</name><value>org.apache.hadoop.mapred.TaskTrackerMetricsInst</value></property>
+<property><name>mapred.reduce.tasks.speculative.execution</name><value>true</value></property>
+<property><name>io.sort.record.percent</name><value>0.05</value></property>
+<property><name>hadoop.security.authorization</name><value>false</value></property>
+<property><name>mapred.max.tracker.failures</name><value>4</value></property>
+<property><name>mapred.jobtracker.taskScheduler</name><value>org.apache.hadoop.mapred.JobQueueTaskScheduler</value></property>
+<property><name>pregelix.numVertices</name><value>20</value></property>
+<property><name>mapred.tasktracker.dns.interface</name><value>default</value></property>
+<property><name>mapred.map.tasks</name><value>2</value></property>
+<property><name>mapred.job.tracker.persist.jobstatus.hours</name><value>0</value></property>
+<property><name>fs.s3.sleepTimeSeconds</name><value>10</value></property>
+<property><name>fs.default.name</name><value>file:///</value></property>
+<property><name>tasktracker.http.threads</name><value>40</value></property>
+<property><name>mapred.tasktracker.taskmemorymanager.monitoring-interval</name><value>5000</value></property>
+<property><name>hadoop.rpc.socket.factory.class.default</name><value>org.apache.hadoop.net.StandardSocketFactory</value></property>
+<property><name>mapred.reduce.tasks</name><value>1</value></property>
+<property><name>topology.node.switch.mapping.impl</name><value>org.apache.hadoop.net.ScriptBasedMapping</value></property>
+<property><name>pregelix.vertexClass</name><value>edu.uci.ics.pregelix.example.PageRankVertex</value></property>
+<property><name>mapred.skip.reduce.max.skip.groups</name><value>0</value></property>
+<property><name>io.file.buffer.size</name><value>4096</value></property>
+<property><name>mapred.jobtracker.maxtasks.per.job</name><value>-1</value></property>
+<property><name>mapred.tasktracker.indexcache.mb</name><value>10</value></property>
+<property><name>mapred.tasktracker.map.tasks.maximum</name><value>2</value></property>
+<property><name>fs.har.impl.disable.cache</name><value>true</value></property>
+<property><name>mapred.task.profile.maps</name><value>0-2</value></property>
+<property><name>hadoop.native.lib</name><value>true</value></property>
+<property><name>fs.s3.block.size</name><value>67108864</value></property>
+<property><name>mapred.job.reuse.jvm.num.tasks</name><value>1</value></property>
+<property><name>mapred.job.tracker.http.address</name><value>0.0.0.0:50030</value></property>
+<property><name>mapred.tasktracker.reduce.tasks.maximum</name><value>2</value></property>
+<property><name>io.compression.codecs</name><value>org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.BZip2Codec</value></property>
+<property><name>mapred.job.shuffle.input.buffer.percent</name><value>0.70</value></property>
+<property><name>io.seqfile.compress.blocksize</name><value>1000000</value></property>
+<property><name>mapred.queue.names</name><value>default</value></property>
+<property><name>fs.har.impl</name><value>org.apache.hadoop.fs.HarFileSystem</value></property>
+<property><name>io.mapfile.bloom.error.rate</name><value>0.005</value></property>
+<property><name>mapred.job.tracker</name><value>local</value></property>
+<property><name>io.skip.checksum.errors</name><value>false</value></property>
+<property><name>mapred.reduce.max.attempts</name><value>4</value></property>
+<property><name>fs.s3.maxRetries</name><value>4</value></property>
+<property><name>ipc.server.listen.queue.size</name><value>128</value></property>
+<property><name>fs.trash.interval</name><value>0</value></property>
+<property><name>mapred.local.dir.minspacestart</name><value>0</value></property>
+<property><name>fs.s3.impl</name><value>org.apache.hadoop.fs.s3.S3FileSystem</value></property>
+<property><name>io.seqfile.sorter.recordlimit</name><value>1000000</value></property>
+<property><name>io.mapfile.bloom.size</name><value>1048576</value></property>
+<property><name>io.sort.mb</name><value>100</value></property>
+<property><name>mapred.local.dir</name><value>${hadoop.tmp.dir}/mapred/local</value></property>
+<property><name>io.sort.factor</name><value>10</value></property>
+<property><name>mapred.task.profile</name><value>false</value></property>
+<property><name>job.end.retry.interval</name><value>30000</value></property>
+<property><name>mapred.tasktracker.procfsbasedprocesstree.sleeptime-before-sigkill</name><value>5000</value></property>
+<property><name>mapred.jobtracker.completeuserjobs.maximum</name><value>100</value></property>
+<property><name>mapred.task.profile.reduces</name><value>0-2</value></property>
+<property><name>webinterface.private.actions</name><value>false</value></property>
+<property><name>hadoop.tmp.dir</name><value>/tmp/hadoop-${user.name}</value></property>
+<property><name>pregelix.combinerClass</name><value>edu.uci.ics.pregelix.example.PageRankVertex$SimpleSumCombiner</value></property>
+<property><name>mapred.output.compression.codec</name><value>org.apache.hadoop.io.compress.DefaultCodec</value></property>
+<property><name>mapred.skip.attempts.to.start.skipping</name><value>2</value></property>
+<property><name>mapred.temp.dir</name><value>${hadoop.tmp.dir}/mapred/temp</value></property>
+<property><name>mapred.merge.recordsBeforeProgress</name><value>10000</value></property>
+<property><name>mapred.map.output.compression.codec</name><value>org.apache.hadoop.io.compress.DefaultCodec</value></property>
+<property><name>mapred.compress.map.output</name><value>false</value></property>
+<property><name>io.sort.spill.percent</name><value>0.80</value></property>
+<property><name>fs.checkpoint.edits.dir</name><value>${fs.checkpoint.dir}</value></property>
+<property><name>mapred.userlog.retain.hours</name><value>24</value></property>
+<property><name>mapred.system.dir</name><value>${hadoop.tmp.dir}/mapred/system</value></property>
+<property><name>mapred.line.input.format.linespermap</name><value>1</value></property>
+<property><name>job.end.retry.attempts</name><value>0</value></property>
+<property><name>ipc.client.idlethreshold</name><value>4000</value></property>
+<property><name>pregelix.vertexOutputFormatClass</name><value>edu.uci.ics.pregelix.example.PageRankVertex$SimplePageRankVertexOutputFormat</value></property>
+<property><name>mapred.reduce.copy.backoff</name><value>300</value></property>
+<property><name>mapred.map.tasks.speculative.execution</name><value>true</value></property>
+<property><name>mapred.inmem.merge.threshold</name><value>1000</value></property>
+<property><name>hadoop.logfile.size</name><value>10000000</value></property>
+<property><name>pregelix.vertexInputFormatClass</name><value>edu.uci.ics.pregelix.example.inputformat.TextPageRankInputFormat</value></property>
+<property><name>mapred.job.queue.name</name><value>default</value></property>
+<property><name>mapred.job.tracker.persist.jobstatus.active</name><value>false</value></property>
+<property><name>pregelix.incStateLength</name><value>true</value></property>
+<property><name>mapred.reduce.slowstart.completed.maps</name><value>0.05</value></property>
+<property><name>topology.script.number.args</name><value>100</value></property>
+<property><name>mapred.skip.map.max.skip.records</name><value>0</value></property>
+<property><name>fs.ftp.impl</name><value>org.apache.hadoop.fs.ftp.FTPFileSystem</value></property>
+<property><name>mapred.task.cache.levels</name><value>2</value></property>
+<property><name>mapred.job.tracker.handler.count</name><value>10</value></property>
+<property><name>io.serializations</name><value>org.apache.hadoop.io.serializer.WritableSerialization</value></property>
+<property><name>ipc.client.connect.max.retries</name><value>10</value></property>
+<property><name>mapred.min.split.size</name><value>0</value></property>
+<property><name>mapred.map.max.attempts</name><value>4</value></property>
+<property><name>jobclient.output.filter</name><value>FAILED</value></property>
+<property><name>ipc.client.tcpnodelay</name><value>false</value></property>
+<property><name>mapred.acls.enabled</name><value>false</value></property>
+</configuration>
\ No newline at end of file
diff --git a/pregelix/pregelix-example/src/test/resources/jobs/PageRankRealNoCombiner.xml b/pregelix/pregelix-example/src/test/resources/jobs/PageRankRealNoCombiner.xml
new file mode 100644
index 0000000..410ea8b
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/jobs/PageRankRealNoCombiner.xml
@@ -0,0 +1,141 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?><configuration>
+<property><name>mapred.tasktracker.dns.nameserver</name><value>default</value></property>
+<property><name>mapred.queue.default.acl-administer-jobs</name><value>*</value></property>
+<property><name>mapred.skip.map.auto.incr.proc.count</name><value>true</value></property>
+<property><name>mapred.jobtracker.instrumentation</name><value>org.apache.hadoop.mapred.JobTrackerMetricsInst</value></property>
+<property><name>mapred.skip.reduce.auto.incr.proc.count</name><value>true</value></property>
+<property><name>fs.hsftp.impl</name><value>org.apache.hadoop.hdfs.HsftpFileSystem</value></property>
+<property><name>mapred.input.dir</name><value>file:/webmap</value></property>
+<property><name>mapred.submit.replication</name><value>10</value></property>
+<property><name>ipc.server.tcpnodelay</name><value>false</value></property>
+<property><name>fs.checkpoint.dir</name><value>${hadoop.tmp.dir}/dfs/namesecondary</value></property>
+<property><name>mapred.output.compression.type</name><value>RECORD</value></property>
+<property><name>mapred.job.shuffle.merge.percent</name><value>0.66</value></property>
+<property><name>mapred.child.java.opts</name><value>-Xmx200m</value></property>
+<property><name>mapred.queue.default.acl-submit-job</name><value>*</value></property>
+<property><name>keep.failed.task.files</name><value>false</value></property>
+<property><name>mapred.jobtracker.job.history.block.size</name><value>3145728</value></property>
+<property><name>io.bytes.per.checksum</name><value>512</value></property>
+<property><name>mapred.task.tracker.report.address</name><value>127.0.0.1:0</value></property>
+<property><name>hadoop.util.hash.type</name><value>murmur</value></property>
+<property><name>fs.hdfs.impl</name><value>org.apache.hadoop.hdfs.DistributedFileSystem</value></property>
+<property><name>fs.ramfs.impl</name><value>org.apache.hadoop.fs.InMemoryFileSystem</value></property>
+<property><name>mapred.jobtracker.restart.recover</name><value>false</value></property>
+<property><name>fs.hftp.impl</name><value>org.apache.hadoop.hdfs.HftpFileSystem</value></property>
+<property><name>fs.checkpoint.period</name><value>3600</value></property>
+<property><name>mapred.child.tmp</name><value>./tmp</value></property>
+<property><name>mapred.local.dir.minspacekill</name><value>0</value></property>
+<property><name>map.sort.class</name><value>org.apache.hadoop.util.QuickSort</value></property>
+<property><name>hadoop.logfile.count</name><value>10</value></property>
+<property><name>ipc.client.connection.maxidletime</name><value>10000</value></property>
+<property><name>mapred.output.dir</name><value>/result</value></property>
+<property><name>io.map.index.skip</name><value>0</value></property>
+<property><name>mapred.tasktracker.expiry.interval</name><value>600000</value></property>
+<property><name>mapred.output.compress</name><value>false</value></property>
+<property><name>io.seqfile.lazydecompress</name><value>true</value></property>
+<property><name>mapred.reduce.parallel.copies</name><value>5</value></property>
+<property><name>fs.checkpoint.size</name><value>67108864</value></property>
+<property><name>mapred.job.reduce.input.buffer.percent</name><value>0.0</value></property>
+<property><name>mapred.job.name</name><value>PageRank</value></property>
+<property><name>local.cache.size</name><value>10737418240</value></property>
+<property><name>fs.s3n.impl</name><value>org.apache.hadoop.fs.s3native.NativeS3FileSystem</value></property>
+<property><name>mapred.userlog.limit.kb</name><value>0</value></property>
+<property><name>fs.file.impl</name><value>org.apache.hadoop.fs.LocalFileSystem</value></property>
+<property><name>mapred.task.tracker.http.address</name><value>0.0.0.0:50060</value></property>
+<property><name>mapred.task.timeout</name><value>600000</value></property>
+<property><name>fs.kfs.impl</name><value>org.apache.hadoop.fs.kfs.KosmosFileSystem</value></property>
+<property><name>mapred.max.tracker.blacklists</name><value>4</value></property>
+<property><name>fs.s3.buffer.dir</name><value>${hadoop.tmp.dir}/s3</value></property>
+<property><name>mapred.job.tracker.persist.jobstatus.dir</name><value>/jobtracker/jobsInfo</value></property>
+<property><name>ipc.client.kill.max</name><value>10</value></property>
+<property><name>mapred.tasktracker.instrumentation</name><value>org.apache.hadoop.mapred.TaskTrackerMetricsInst</value></property>
+<property><name>mapred.reduce.tasks.speculative.execution</name><value>true</value></property>
+<property><name>io.sort.record.percent</name><value>0.05</value></property>
+<property><name>hadoop.security.authorization</name><value>false</value></property>
+<property><name>mapred.max.tracker.failures</name><value>4</value></property>
+<property><name>mapred.jobtracker.taskScheduler</name><value>org.apache.hadoop.mapred.JobQueueTaskScheduler</value></property>
+<property><name>pregelix.numVertices</name><value>20</value></property>
+<property><name>mapred.tasktracker.dns.interface</name><value>default</value></property>
+<property><name>mapred.map.tasks</name><value>2</value></property>
+<property><name>mapred.job.tracker.persist.jobstatus.hours</name><value>0</value></property>
+<property><name>fs.s3.sleepTimeSeconds</name><value>10</value></property>
+<property><name>fs.default.name</name><value>file:///</value></property>
+<property><name>tasktracker.http.threads</name><value>40</value></property>
+<property><name>mapred.tasktracker.taskmemorymanager.monitoring-interval</name><value>5000</value></property>
+<property><name>hadoop.rpc.socket.factory.class.default</name><value>org.apache.hadoop.net.StandardSocketFactory</value></property>
+<property><name>mapred.reduce.tasks</name><value>1</value></property>
+<property><name>topology.node.switch.mapping.impl</name><value>org.apache.hadoop.net.ScriptBasedMapping</value></property>
+<property><name>pregelix.vertexClass</name><value>edu.uci.ics.pregelix.example.PageRankVertex</value></property>
+<property><name>mapred.skip.reduce.max.skip.groups</name><value>0</value></property>
+<property><name>io.file.buffer.size</name><value>4096</value></property>
+<property><name>mapred.jobtracker.maxtasks.per.job</name><value>-1</value></property>
+<property><name>mapred.tasktracker.indexcache.mb</name><value>10</value></property>
+<property><name>mapred.tasktracker.map.tasks.maximum</name><value>2</value></property>
+<property><name>fs.har.impl.disable.cache</name><value>true</value></property>
+<property><name>mapred.task.profile.maps</name><value>0-2</value></property>
+<property><name>hadoop.native.lib</name><value>true</value></property>
+<property><name>fs.s3.block.size</name><value>67108864</value></property>
+<property><name>mapred.job.reuse.jvm.num.tasks</name><value>1</value></property>
+<property><name>mapred.job.tracker.http.address</name><value>0.0.0.0:50030</value></property>
+<property><name>mapred.tasktracker.reduce.tasks.maximum</name><value>2</value></property>
+<property><name>io.compression.codecs</name><value>org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.BZip2Codec</value></property>
+<property><name>mapred.job.shuffle.input.buffer.percent</name><value>0.70</value></property>
+<property><name>io.seqfile.compress.blocksize</name><value>1000000</value></property>
+<property><name>mapred.queue.names</name><value>default</value></property>
+<property><name>fs.har.impl</name><value>org.apache.hadoop.fs.HarFileSystem</value></property>
+<property><name>io.mapfile.bloom.error.rate</name><value>0.005</value></property>
+<property><name>mapred.job.tracker</name><value>local</value></property>
+<property><name>io.skip.checksum.errors</name><value>false</value></property>
+<property><name>mapred.reduce.max.attempts</name><value>4</value></property>
+<property><name>fs.s3.maxRetries</name><value>4</value></property>
+<property><name>ipc.server.listen.queue.size</name><value>128</value></property>
+<property><name>fs.trash.interval</name><value>0</value></property>
+<property><name>mapred.local.dir.minspacestart</name><value>0</value></property>
+<property><name>fs.s3.impl</name><value>org.apache.hadoop.fs.s3.S3FileSystem</value></property>
+<property><name>io.seqfile.sorter.recordlimit</name><value>1000000</value></property>
+<property><name>io.mapfile.bloom.size</name><value>1048576</value></property>
+<property><name>io.sort.mb</name><value>100</value></property>
+<property><name>mapred.local.dir</name><value>${hadoop.tmp.dir}/mapred/local</value></property>
+<property><name>io.sort.factor</name><value>10</value></property>
+<property><name>mapred.task.profile</name><value>false</value></property>
+<property><name>job.end.retry.interval</name><value>30000</value></property>
+<property><name>mapred.tasktracker.procfsbasedprocesstree.sleeptime-before-sigkill</name><value>5000</value></property>
+<property><name>mapred.jobtracker.completeuserjobs.maximum</name><value>100</value></property>
+<property><name>mapred.task.profile.reduces</name><value>0-2</value></property>
+<property><name>webinterface.private.actions</name><value>false</value></property>
+<property><name>hadoop.tmp.dir</name><value>/tmp/hadoop-${user.name}</value></property>
+<property><name>mapred.output.compression.codec</name><value>org.apache.hadoop.io.compress.DefaultCodec</value></property>
+<property><name>mapred.skip.attempts.to.start.skipping</name><value>2</value></property>
+<property><name>mapred.temp.dir</name><value>${hadoop.tmp.dir}/mapred/temp</value></property>
+<property><name>mapred.merge.recordsBeforeProgress</name><value>10000</value></property>
+<property><name>mapred.map.output.compression.codec</name><value>org.apache.hadoop.io.compress.DefaultCodec</value></property>
+<property><name>mapred.compress.map.output</name><value>false</value></property>
+<property><name>io.sort.spill.percent</name><value>0.80</value></property>
+<property><name>fs.checkpoint.edits.dir</name><value>${fs.checkpoint.dir}</value></property>
+<property><name>mapred.userlog.retain.hours</name><value>24</value></property>
+<property><name>mapred.system.dir</name><value>${hadoop.tmp.dir}/mapred/system</value></property>
+<property><name>mapred.line.input.format.linespermap</name><value>1</value></property>
+<property><name>job.end.retry.attempts</name><value>0</value></property>
+<property><name>ipc.client.idlethreshold</name><value>4000</value></property>
+<property><name>pregelix.vertexOutputFormatClass</name><value>edu.uci.ics.pregelix.example.PageRankVertex$SimplePageRankVertexOutputFormat</value></property>
+<property><name>mapred.reduce.copy.backoff</name><value>300</value></property>
+<property><name>mapred.map.tasks.speculative.execution</name><value>true</value></property>
+<property><name>mapred.inmem.merge.threshold</name><value>1000</value></property>
+<property><name>hadoop.logfile.size</name><value>10000000</value></property>
+<property><name>pregelix.vertexInputFormatClass</name><value>edu.uci.ics.pregelix.example.inputformat.TextPageRankInputFormat</value></property>
+<property><name>mapred.job.queue.name</name><value>default</value></property>
+<property><name>mapred.job.tracker.persist.jobstatus.active</name><value>false</value></property>
+<property><name>mapred.reduce.slowstart.completed.maps</name><value>0.05</value></property>
+<property><name>topology.script.number.args</name><value>100</value></property>
+<property><name>mapred.skip.map.max.skip.records</name><value>0</value></property>
+<property><name>fs.ftp.impl</name><value>org.apache.hadoop.fs.ftp.FTPFileSystem</value></property>
+<property><name>mapred.task.cache.levels</name><value>2</value></property>
+<property><name>mapred.job.tracker.handler.count</name><value>10</value></property>
+<property><name>io.serializations</name><value>org.apache.hadoop.io.serializer.WritableSerialization</value></property>
+<property><name>ipc.client.connect.max.retries</name><value>10</value></property>
+<property><name>mapred.min.split.size</name><value>0</value></property>
+<property><name>mapred.map.max.attempts</name><value>4</value></property>
+<property><name>jobclient.output.filter</name><value>FAILED</value></property>
+<property><name>ipc.client.tcpnodelay</name><value>false</value></property>
+<property><name>mapred.acls.enabled</name><value>false</value></property>
+</configuration>
\ No newline at end of file
diff --git a/pregelix/pregelix-example/src/test/resources/jobs/ReachibilityRealComplex.xml b/pregelix/pregelix-example/src/test/resources/jobs/ReachibilityRealComplex.xml
new file mode 100644
index 0000000..0332ec5
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/jobs/ReachibilityRealComplex.xml
@@ -0,0 +1,144 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?><configuration>
+<property><name>mapred.tasktracker.dns.nameserver</name><value>default</value></property>
+<property><name>mapred.queue.default.acl-administer-jobs</name><value>*</value></property>
+<property><name>mapred.skip.map.auto.incr.proc.count</name><value>true</value></property>
+<property><name>mapred.jobtracker.instrumentation</name><value>org.apache.hadoop.mapred.JobTrackerMetricsInst</value></property>
+<property><name>mapred.skip.reduce.auto.incr.proc.count</name><value>true</value></property>
+<property><name>fs.hsftp.impl</name><value>org.apache.hadoop.hdfs.HsftpFileSystem</value></property>
+<property><name>mapred.input.dir</name><value>file:/webmapcomplex</value></property>
+<property><name>mapred.submit.replication</name><value>10</value></property>
+<property><name>ipc.server.tcpnodelay</name><value>false</value></property>
+<property><name>fs.checkpoint.dir</name><value>${hadoop.tmp.dir}/dfs/namesecondary</value></property>
+<property><name>mapred.output.compression.type</name><value>RECORD</value></property>
+<property><name>mapred.job.shuffle.merge.percent</name><value>0.66</value></property>
+<property><name>mapred.child.java.opts</name><value>-Xmx200m</value></property>
+<property><name>mapred.queue.default.acl-submit-job</name><value>*</value></property>
+<property><name>keep.failed.task.files</name><value>false</value></property>
+<property><name>mapred.jobtracker.job.history.block.size</name><value>3145728</value></property>
+<property><name>io.bytes.per.checksum</name><value>512</value></property>
+<property><name>mapred.task.tracker.report.address</name><value>127.0.0.1:0</value></property>
+<property><name>hadoop.util.hash.type</name><value>murmur</value></property>
+<property><name>fs.hdfs.impl</name><value>org.apache.hadoop.hdfs.DistributedFileSystem</value></property>
+<property><name>fs.ramfs.impl</name><value>org.apache.hadoop.fs.InMemoryFileSystem</value></property>
+<property><name>mapred.jobtracker.restart.recover</name><value>false</value></property>
+<property><name>fs.hftp.impl</name><value>org.apache.hadoop.hdfs.HftpFileSystem</value></property>
+<property><name>fs.checkpoint.period</name><value>3600</value></property>
+<property><name>mapred.child.tmp</name><value>./tmp</value></property>
+<property><name>mapred.local.dir.minspacekill</name><value>0</value></property>
+<property><name>map.sort.class</name><value>org.apache.hadoop.util.QuickSort</value></property>
+<property><name>hadoop.logfile.count</name><value>10</value></property>
+<property><name>ipc.client.connection.maxidletime</name><value>10000</value></property>
+<property><name>mapred.output.dir</name><value>/resultcomplex</value></property>
+<property><name>io.map.index.skip</name><value>0</value></property>
+<property><name>mapred.tasktracker.expiry.interval</name><value>600000</value></property>
+<property><name>mapred.output.compress</name><value>false</value></property>
+<property><name>io.seqfile.lazydecompress</name><value>true</value></property>
+<property><name>mapred.reduce.parallel.copies</name><value>5</value></property>
+<property><name>fs.checkpoint.size</name><value>67108864</value></property>
+<property><name>mapred.job.reduce.input.buffer.percent</name><value>0.0</value></property>
+<property><name>mapred.job.name</name><value>Reachibility</value></property>
+<property><name>local.cache.size</name><value>10737418240</value></property>
+<property><name>fs.s3n.impl</name><value>org.apache.hadoop.fs.s3native.NativeS3FileSystem</value></property>
+<property><name>mapred.userlog.limit.kb</name><value>0</value></property>
+<property><name>fs.file.impl</name><value>org.apache.hadoop.fs.LocalFileSystem</value></property>
+<property><name>mapred.task.tracker.http.address</name><value>0.0.0.0:50060</value></property>
+<property><name>mapred.task.timeout</name><value>600000</value></property>
+<property><name>fs.kfs.impl</name><value>org.apache.hadoop.fs.kfs.KosmosFileSystem</value></property>
+<property><name>mapred.max.tracker.blacklists</name><value>4</value></property>
+<property><name>fs.s3.buffer.dir</name><value>${hadoop.tmp.dir}/s3</value></property>
+<property><name>mapred.job.tracker.persist.jobstatus.dir</name><value>/jobtracker/jobsInfo</value></property>
+<property><name>ipc.client.kill.max</name><value>10</value></property>
+<property><name>mapred.tasktracker.instrumentation</name><value>org.apache.hadoop.mapred.TaskTrackerMetricsInst</value></property>
+<property><name>mapred.reduce.tasks.speculative.execution</name><value>true</value></property>
+<property><name>io.sort.record.percent</name><value>0.05</value></property>
+<property><name>hadoop.security.authorization</name><value>false</value></property>
+<property><name>mapred.max.tracker.failures</name><value>4</value></property>
+<property><name>mapred.jobtracker.taskScheduler</name><value>org.apache.hadoop.mapred.JobQueueTaskScheduler</value></property>
+<property><name>pregelix.numVertices</name><value>23</value></property>
+<property><name>mapred.tasktracker.dns.interface</name><value>default</value></property>
+<property><name>mapred.map.tasks</name><value>2</value></property>
+<property><name>mapred.job.tracker.persist.jobstatus.hours</name><value>0</value></property>
+<property><name>fs.s3.sleepTimeSeconds</name><value>10</value></property>
+<property><name>fs.default.name</name><value>file:///</value></property>
+<property><name>tasktracker.http.threads</name><value>40</value></property>
+<property><name>mapred.tasktracker.taskmemorymanager.monitoring-interval</name><value>5000</value></property>
+<property><name>hadoop.rpc.socket.factory.class.default</name><value>org.apache.hadoop.net.StandardSocketFactory</value></property>
+<property><name>mapred.reduce.tasks</name><value>1</value></property>
+<property><name>topology.node.switch.mapping.impl</name><value>org.apache.hadoop.net.ScriptBasedMapping</value></property>
+<property><name>pregelix.vertexClass</name><value>edu.uci.ics.pregelix.example.ReachabilityVertex</value></property>
+<property><name>mapred.skip.reduce.max.skip.groups</name><value>0</value></property>
+<property><name>io.file.buffer.size</name><value>4096</value></property>
+<property><name>mapred.jobtracker.maxtasks.per.job</name><value>-1</value></property>
+<property><name>mapred.tasktracker.indexcache.mb</name><value>10</value></property>
+<property><name>mapred.tasktracker.map.tasks.maximum</name><value>2</value></property>
+<property><name>fs.har.impl.disable.cache</name><value>true</value></property>
+<property><name>mapred.task.profile.maps</name><value>0-2</value></property>
+<property><name>hadoop.native.lib</name><value>true</value></property>
+<property><name>fs.s3.block.size</name><value>67108864</value></property>
+<property><name>mapred.job.reuse.jvm.num.tasks</name><value>1</value></property>
+<property><name>mapred.job.tracker.http.address</name><value>0.0.0.0:50030</value></property>
+<property><name>mapred.tasktracker.reduce.tasks.maximum</name><value>2</value></property>
+<property><name>io.compression.codecs</name><value>org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.BZip2Codec</value></property>
+<property><name>mapred.job.shuffle.input.buffer.percent</name><value>0.70</value></property>
+<property><name>io.seqfile.compress.blocksize</name><value>1000000</value></property>
+<property><name>mapred.queue.names</name><value>default</value></property>
+<property><name>fs.har.impl</name><value>org.apache.hadoop.fs.HarFileSystem</value></property>
+<property><name>io.mapfile.bloom.error.rate</name><value>0.005</value></property>
+<property><name>mapred.job.tracker</name><value>local</value></property>
+<property><name>io.skip.checksum.errors</name><value>false</value></property>
+<property><name>mapred.reduce.max.attempts</name><value>4</value></property>
+<property><name>fs.s3.maxRetries</name><value>4</value></property>
+<property><name>ipc.server.listen.queue.size</name><value>128</value></property>
+<property><name>ReachibilityVertex.destId</name><value>10</value></property>
+<property><name>fs.trash.interval</name><value>0</value></property>
+<property><name>mapred.local.dir.minspacestart</name><value>0</value></property>
+<property><name>fs.s3.impl</name><value>org.apache.hadoop.fs.s3.S3FileSystem</value></property>
+<property><name>io.seqfile.sorter.recordlimit</name><value>1000000</value></property>
+<property><name>io.mapfile.bloom.size</name><value>1048576</value></property>
+<property><name>io.sort.mb</name><value>100</value></property>
+<property><name>mapred.local.dir</name><value>${hadoop.tmp.dir}/mapred/local</value></property>
+<property><name>io.sort.factor</name><value>10</value></property>
+<property><name>mapred.task.profile</name><value>false</value></property>
+<property><name>job.end.retry.interval</name><value>30000</value></property>
+<property><name>mapred.tasktracker.procfsbasedprocesstree.sleeptime-before-sigkill</name><value>5000</value></property>
+<property><name>mapred.jobtracker.completeuserjobs.maximum</name><value>100</value></property>
+<property><name>mapred.task.profile.reduces</name><value>0-2</value></property>
+<property><name>webinterface.private.actions</name><value>false</value></property>
+<property><name>hadoop.tmp.dir</name><value>/tmp/hadoop-${user.name}</value></property>
+<property><name>pregelix.combinerClass</name><value>edu.uci.ics.pregelix.example.ReachabilityVertex$SimpleReachibilityCombiner</value></property>
+<property><name>mapred.output.compression.codec</name><value>org.apache.hadoop.io.compress.DefaultCodec</value></property>
+<property><name>mapred.skip.attempts.to.start.skipping</name><value>2</value></property>
+<property><name>mapred.temp.dir</name><value>${hadoop.tmp.dir}/mapred/temp</value></property>
+<property><name>mapred.merge.recordsBeforeProgress</name><value>10000</value></property>
+<property><name>mapred.map.output.compression.codec</name><value>org.apache.hadoop.io.compress.DefaultCodec</value></property>
+<property><name>mapred.compress.map.output</name><value>false</value></property>
+<property><name>io.sort.spill.percent</name><value>0.80</value></property>
+<property><name>fs.checkpoint.edits.dir</name><value>${fs.checkpoint.dir}</value></property>
+<property><name>mapred.userlog.retain.hours</name><value>24</value></property>
+<property><name>mapred.system.dir</name><value>${hadoop.tmp.dir}/mapred/system</value></property>
+<property><name>mapred.line.input.format.linespermap</name><value>1</value></property>
+<property><name>job.end.retry.attempts</name><value>0</value></property>
+<property><name>ipc.client.idlethreshold</name><value>4000</value></property>
+<property><name>pregelix.vertexOutputFormatClass</name><value>edu.uci.ics.pregelix.example.ReachabilityVertex$SimpleReachibilityVertexOutputFormat</value></property>
+<property><name>mapred.reduce.copy.backoff</name><value>300</value></property>
+<property><name>mapred.map.tasks.speculative.execution</name><value>true</value></property>
+<property><name>mapred.inmem.merge.threshold</name><value>1000</value></property>
+<property><name>hadoop.logfile.size</name><value>10000000</value></property>
+<property><name>pregelix.vertexInputFormatClass</name><value>edu.uci.ics.pregelix.example.inputformat.TextReachibilityVertexInputFormat</value></property>
+<property><name>mapred.job.queue.name</name><value>default</value></property>
+<property><name>mapred.job.tracker.persist.jobstatus.active</name><value>false</value></property>
+<property><name>mapred.reduce.slowstart.completed.maps</name><value>0.05</value></property>
+<property><name>ReachibilityVertex.sourceId</name><value>1</value></property>
+<property><name>topology.script.number.args</name><value>100</value></property>
+<property><name>mapred.skip.map.max.skip.records</name><value>0</value></property>
+<property><name>fs.ftp.impl</name><value>org.apache.hadoop.fs.ftp.FTPFileSystem</value></property>
+<property><name>mapred.task.cache.levels</name><value>2</value></property>
+<property><name>mapred.job.tracker.handler.count</name><value>10</value></property>
+<property><name>io.serializations</name><value>org.apache.hadoop.io.serializer.WritableSerialization</value></property>
+<property><name>ipc.client.connect.max.retries</name><value>10</value></property>
+<property><name>mapred.min.split.size</name><value>0</value></property>
+<property><name>mapred.map.max.attempts</name><value>4</value></property>
+<property><name>jobclient.output.filter</name><value>FAILED</value></property>
+<property><name>ipc.client.tcpnodelay</name><value>false</value></property>
+<property><name>mapred.acls.enabled</name><value>false</value></property>
+</configuration>
\ No newline at end of file
diff --git a/pregelix/pregelix-example/src/test/resources/jobs/ReachibilityRealComplexNoConnectivity.xml b/pregelix/pregelix-example/src/test/resources/jobs/ReachibilityRealComplexNoConnectivity.xml
new file mode 100644
index 0000000..4f280fc
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/jobs/ReachibilityRealComplexNoConnectivity.xml
@@ -0,0 +1,144 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?><configuration>
+<property><name>mapred.tasktracker.dns.nameserver</name><value>default</value></property>
+<property><name>mapred.queue.default.acl-administer-jobs</name><value>*</value></property>
+<property><name>mapred.skip.map.auto.incr.proc.count</name><value>true</value></property>
+<property><name>mapred.jobtracker.instrumentation</name><value>org.apache.hadoop.mapred.JobTrackerMetricsInst</value></property>
+<property><name>mapred.skip.reduce.auto.incr.proc.count</name><value>true</value></property>
+<property><name>fs.hsftp.impl</name><value>org.apache.hadoop.hdfs.HsftpFileSystem</value></property>
+<property><name>mapred.input.dir</name><value>file:/webmapcomplex</value></property>
+<property><name>mapred.submit.replication</name><value>10</value></property>
+<property><name>ipc.server.tcpnodelay</name><value>false</value></property>
+<property><name>fs.checkpoint.dir</name><value>${hadoop.tmp.dir}/dfs/namesecondary</value></property>
+<property><name>mapred.output.compression.type</name><value>RECORD</value></property>
+<property><name>mapred.job.shuffle.merge.percent</name><value>0.66</value></property>
+<property><name>mapred.child.java.opts</name><value>-Xmx200m</value></property>
+<property><name>mapred.queue.default.acl-submit-job</name><value>*</value></property>
+<property><name>keep.failed.task.files</name><value>false</value></property>
+<property><name>mapred.jobtracker.job.history.block.size</name><value>3145728</value></property>
+<property><name>io.bytes.per.checksum</name><value>512</value></property>
+<property><name>mapred.task.tracker.report.address</name><value>127.0.0.1:0</value></property>
+<property><name>hadoop.util.hash.type</name><value>murmur</value></property>
+<property><name>fs.hdfs.impl</name><value>org.apache.hadoop.hdfs.DistributedFileSystem</value></property>
+<property><name>fs.ramfs.impl</name><value>org.apache.hadoop.fs.InMemoryFileSystem</value></property>
+<property><name>mapred.jobtracker.restart.recover</name><value>false</value></property>
+<property><name>fs.hftp.impl</name><value>org.apache.hadoop.hdfs.HftpFileSystem</value></property>
+<property><name>fs.checkpoint.period</name><value>3600</value></property>
+<property><name>mapred.child.tmp</name><value>./tmp</value></property>
+<property><name>mapred.local.dir.minspacekill</name><value>0</value></property>
+<property><name>map.sort.class</name><value>org.apache.hadoop.util.QuickSort</value></property>
+<property><name>hadoop.logfile.count</name><value>10</value></property>
+<property><name>ipc.client.connection.maxidletime</name><value>10000</value></property>
+<property><name>mapred.output.dir</name><value>/resultcomplex</value></property>
+<property><name>io.map.index.skip</name><value>0</value></property>
+<property><name>mapred.tasktracker.expiry.interval</name><value>600000</value></property>
+<property><name>mapred.output.compress</name><value>false</value></property>
+<property><name>io.seqfile.lazydecompress</name><value>true</value></property>
+<property><name>mapred.reduce.parallel.copies</name><value>5</value></property>
+<property><name>fs.checkpoint.size</name><value>67108864</value></property>
+<property><name>mapred.job.reduce.input.buffer.percent</name><value>0.0</value></property>
+<property><name>mapred.job.name</name><value>Reachibility</value></property>
+<property><name>local.cache.size</name><value>10737418240</value></property>
+<property><name>fs.s3n.impl</name><value>org.apache.hadoop.fs.s3native.NativeS3FileSystem</value></property>
+<property><name>mapred.userlog.limit.kb</name><value>0</value></property>
+<property><name>fs.file.impl</name><value>org.apache.hadoop.fs.LocalFileSystem</value></property>
+<property><name>mapred.task.tracker.http.address</name><value>0.0.0.0:50060</value></property>
+<property><name>mapred.task.timeout</name><value>600000</value></property>
+<property><name>fs.kfs.impl</name><value>org.apache.hadoop.fs.kfs.KosmosFileSystem</value></property>
+<property><name>mapred.max.tracker.blacklists</name><value>4</value></property>
+<property><name>fs.s3.buffer.dir</name><value>${hadoop.tmp.dir}/s3</value></property>
+<property><name>mapred.job.tracker.persist.jobstatus.dir</name><value>/jobtracker/jobsInfo</value></property>
+<property><name>ipc.client.kill.max</name><value>10</value></property>
+<property><name>mapred.tasktracker.instrumentation</name><value>org.apache.hadoop.mapred.TaskTrackerMetricsInst</value></property>
+<property><name>mapred.reduce.tasks.speculative.execution</name><value>true</value></property>
+<property><name>io.sort.record.percent</name><value>0.05</value></property>
+<property><name>hadoop.security.authorization</name><value>false</value></property>
+<property><name>mapred.max.tracker.failures</name><value>4</value></property>
+<property><name>mapred.jobtracker.taskScheduler</name><value>org.apache.hadoop.mapred.JobQueueTaskScheduler</value></property>
+<property><name>pregelix.numVertices</name><value>23</value></property>
+<property><name>mapred.tasktracker.dns.interface</name><value>default</value></property>
+<property><name>mapred.map.tasks</name><value>2</value></property>
+<property><name>mapred.job.tracker.persist.jobstatus.hours</name><value>0</value></property>
+<property><name>fs.s3.sleepTimeSeconds</name><value>10</value></property>
+<property><name>fs.default.name</name><value>file:///</value></property>
+<property><name>tasktracker.http.threads</name><value>40</value></property>
+<property><name>mapred.tasktracker.taskmemorymanager.monitoring-interval</name><value>5000</value></property>
+<property><name>hadoop.rpc.socket.factory.class.default</name><value>org.apache.hadoop.net.StandardSocketFactory</value></property>
+<property><name>mapred.reduce.tasks</name><value>1</value></property>
+<property><name>topology.node.switch.mapping.impl</name><value>org.apache.hadoop.net.ScriptBasedMapping</value></property>
+<property><name>pregelix.vertexClass</name><value>edu.uci.ics.pregelix.example.ReachabilityVertex</value></property>
+<property><name>mapred.skip.reduce.max.skip.groups</name><value>0</value></property>
+<property><name>io.file.buffer.size</name><value>4096</value></property>
+<property><name>mapred.jobtracker.maxtasks.per.job</name><value>-1</value></property>
+<property><name>mapred.tasktracker.indexcache.mb</name><value>10</value></property>
+<property><name>mapred.tasktracker.map.tasks.maximum</name><value>2</value></property>
+<property><name>fs.har.impl.disable.cache</name><value>true</value></property>
+<property><name>mapred.task.profile.maps</name><value>0-2</value></property>
+<property><name>hadoop.native.lib</name><value>true</value></property>
+<property><name>fs.s3.block.size</name><value>67108864</value></property>
+<property><name>mapred.job.reuse.jvm.num.tasks</name><value>1</value></property>
+<property><name>mapred.job.tracker.http.address</name><value>0.0.0.0:50030</value></property>
+<property><name>mapred.tasktracker.reduce.tasks.maximum</name><value>2</value></property>
+<property><name>io.compression.codecs</name><value>org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.BZip2Codec</value></property>
+<property><name>mapred.job.shuffle.input.buffer.percent</name><value>0.70</value></property>
+<property><name>io.seqfile.compress.blocksize</name><value>1000000</value></property>
+<property><name>mapred.queue.names</name><value>default</value></property>
+<property><name>fs.har.impl</name><value>org.apache.hadoop.fs.HarFileSystem</value></property>
+<property><name>io.mapfile.bloom.error.rate</name><value>0.005</value></property>
+<property><name>mapred.job.tracker</name><value>local</value></property>
+<property><name>io.skip.checksum.errors</name><value>false</value></property>
+<property><name>mapred.reduce.max.attempts</name><value>4</value></property>
+<property><name>fs.s3.maxRetries</name><value>4</value></property>
+<property><name>ipc.server.listen.queue.size</name><value>128</value></property>
+<property><name>ReachibilityVertex.destId</name><value>25</value></property>
+<property><name>fs.trash.interval</name><value>0</value></property>
+<property><name>mapred.local.dir.minspacestart</name><value>0</value></property>
+<property><name>fs.s3.impl</name><value>org.apache.hadoop.fs.s3.S3FileSystem</value></property>
+<property><name>io.seqfile.sorter.recordlimit</name><value>1000000</value></property>
+<property><name>io.mapfile.bloom.size</name><value>1048576</value></property>
+<property><name>io.sort.mb</name><value>100</value></property>
+<property><name>mapred.local.dir</name><value>${hadoop.tmp.dir}/mapred/local</value></property>
+<property><name>io.sort.factor</name><value>10</value></property>
+<property><name>mapred.task.profile</name><value>false</value></property>
+<property><name>job.end.retry.interval</name><value>30000</value></property>
+<property><name>mapred.tasktracker.procfsbasedprocesstree.sleeptime-before-sigkill</name><value>5000</value></property>
+<property><name>mapred.jobtracker.completeuserjobs.maximum</name><value>100</value></property>
+<property><name>mapred.task.profile.reduces</name><value>0-2</value></property>
+<property><name>webinterface.private.actions</name><value>false</value></property>
+<property><name>hadoop.tmp.dir</name><value>/tmp/hadoop-${user.name}</value></property>
+<property><name>pregelix.combinerClass</name><value>edu.uci.ics.pregelix.example.ReachabilityVertex$SimpleReachibilityCombiner</value></property>
+<property><name>mapred.output.compression.codec</name><value>org.apache.hadoop.io.compress.DefaultCodec</value></property>
+<property><name>mapred.skip.attempts.to.start.skipping</name><value>2</value></property>
+<property><name>mapred.temp.dir</name><value>${hadoop.tmp.dir}/mapred/temp</value></property>
+<property><name>mapred.merge.recordsBeforeProgress</name><value>10000</value></property>
+<property><name>mapred.map.output.compression.codec</name><value>org.apache.hadoop.io.compress.DefaultCodec</value></property>
+<property><name>mapred.compress.map.output</name><value>false</value></property>
+<property><name>io.sort.spill.percent</name><value>0.80</value></property>
+<property><name>fs.checkpoint.edits.dir</name><value>${fs.checkpoint.dir}</value></property>
+<property><name>mapred.userlog.retain.hours</name><value>24</value></property>
+<property><name>mapred.system.dir</name><value>${hadoop.tmp.dir}/mapred/system</value></property>
+<property><name>mapred.line.input.format.linespermap</name><value>1</value></property>
+<property><name>job.end.retry.attempts</name><value>0</value></property>
+<property><name>ipc.client.idlethreshold</name><value>4000</value></property>
+<property><name>pregelix.vertexOutputFormatClass</name><value>edu.uci.ics.pregelix.example.ReachabilityVertex$SimpleReachibilityVertexOutputFormat</value></property>
+<property><name>mapred.reduce.copy.backoff</name><value>300</value></property>
+<property><name>mapred.map.tasks.speculative.execution</name><value>true</value></property>
+<property><name>mapred.inmem.merge.threshold</name><value>1000</value></property>
+<property><name>hadoop.logfile.size</name><value>10000000</value></property>
+<property><name>pregelix.vertexInputFormatClass</name><value>edu.uci.ics.pregelix.example.inputformat.TextReachibilityVertexInputFormat</value></property>
+<property><name>mapred.job.queue.name</name><value>default</value></property>
+<property><name>mapred.job.tracker.persist.jobstatus.active</name><value>false</value></property>
+<property><name>mapred.reduce.slowstart.completed.maps</name><value>0.05</value></property>
+<property><name>ReachibilityVertex.sourceId</name><value>1</value></property>
+<property><name>topology.script.number.args</name><value>100</value></property>
+<property><name>mapred.skip.map.max.skip.records</name><value>0</value></property>
+<property><name>fs.ftp.impl</name><value>org.apache.hadoop.fs.ftp.FTPFileSystem</value></property>
+<property><name>mapred.task.cache.levels</name><value>2</value></property>
+<property><name>mapred.job.tracker.handler.count</name><value>10</value></property>
+<property><name>io.serializations</name><value>org.apache.hadoop.io.serializer.WritableSerialization</value></property>
+<property><name>ipc.client.connect.max.retries</name><value>10</value></property>
+<property><name>mapred.min.split.size</name><value>0</value></property>
+<property><name>mapred.map.max.attempts</name><value>4</value></property>
+<property><name>jobclient.output.filter</name><value>FAILED</value></property>
+<property><name>ipc.client.tcpnodelay</name><value>false</value></property>
+<property><name>mapred.acls.enabled</name><value>false</value></property>
+</configuration>
\ No newline at end of file
diff --git a/pregelix/pregelix-example/src/test/resources/jobs/ShortestPaths.xml b/pregelix/pregelix-example/src/test/resources/jobs/ShortestPaths.xml
new file mode 100644
index 0000000..9e791e2
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/jobs/ShortestPaths.xml
@@ -0,0 +1,143 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?><configuration>
+<property><name>mapred.tasktracker.dns.nameserver</name><value>default</value></property>
+<property><name>mapred.queue.default.acl-administer-jobs</name><value>*</value></property>
+<property><name>mapred.skip.map.auto.incr.proc.count</name><value>true</value></property>
+<property><name>mapred.jobtracker.instrumentation</name><value>org.apache.hadoop.mapred.JobTrackerMetricsInst</value></property>
+<property><name>mapred.skip.reduce.auto.incr.proc.count</name><value>true</value></property>
+<property><name>fs.hsftp.impl</name><value>org.apache.hadoop.hdfs.HsftpFileSystem</value></property>
+<property><name>mapred.input.dir</name><value>file:/webmap</value></property>
+<property><name>mapred.submit.replication</name><value>10</value></property>
+<property><name>ipc.server.tcpnodelay</name><value>false</value></property>
+<property><name>fs.checkpoint.dir</name><value>${hadoop.tmp.dir}/dfs/namesecondary</value></property>
+<property><name>mapred.output.compression.type</name><value>RECORD</value></property>
+<property><name>mapred.job.shuffle.merge.percent</name><value>0.66</value></property>
+<property><name>mapred.child.java.opts</name><value>-Xmx200m</value></property>
+<property><name>mapred.queue.default.acl-submit-job</name><value>*</value></property>
+<property><name>keep.failed.task.files</name><value>false</value></property>
+<property><name>mapred.jobtracker.job.history.block.size</name><value>3145728</value></property>
+<property><name>io.bytes.per.checksum</name><value>512</value></property>
+<property><name>mapred.task.tracker.report.address</name><value>127.0.0.1:0</value></property>
+<property><name>hadoop.util.hash.type</name><value>murmur</value></property>
+<property><name>fs.hdfs.impl</name><value>org.apache.hadoop.hdfs.DistributedFileSystem</value></property>
+<property><name>fs.ramfs.impl</name><value>org.apache.hadoop.fs.InMemoryFileSystem</value></property>
+<property><name>mapred.jobtracker.restart.recover</name><value>false</value></property>
+<property><name>fs.hftp.impl</name><value>org.apache.hadoop.hdfs.HftpFileSystem</value></property>
+<property><name>fs.checkpoint.period</name><value>3600</value></property>
+<property><name>mapred.child.tmp</name><value>./tmp</value></property>
+<property><name>mapred.local.dir.minspacekill</name><value>0</value></property>
+<property><name>map.sort.class</name><value>org.apache.hadoop.util.QuickSort</value></property>
+<property><name>hadoop.logfile.count</name><value>10</value></property>
+<property><name>ipc.client.connection.maxidletime</name><value>10000</value></property>
+<property><name>mapred.output.dir</name><value>/result</value></property>
+<property><name>io.map.index.skip</name><value>0</value></property>
+<property><name>mapred.tasktracker.expiry.interval</name><value>600000</value></property>
+<property><name>mapred.output.compress</name><value>false</value></property>
+<property><name>io.seqfile.lazydecompress</name><value>true</value></property>
+<property><name>mapred.reduce.parallel.copies</name><value>5</value></property>
+<property><name>fs.checkpoint.size</name><value>67108864</value></property>
+<property><name>mapred.job.reduce.input.buffer.percent</name><value>0.0</value></property>
+<property><name>mapred.job.name</name><value>ShortestPaths</value></property>
+<property><name>local.cache.size</name><value>10737418240</value></property>
+<property><name>fs.s3n.impl</name><value>org.apache.hadoop.fs.s3native.NativeS3FileSystem</value></property>
+<property><name>mapred.userlog.limit.kb</name><value>0</value></property>
+<property><name>fs.file.impl</name><value>org.apache.hadoop.fs.LocalFileSystem</value></property>
+<property><name>mapred.task.tracker.http.address</name><value>0.0.0.0:50060</value></property>
+<property><name>mapred.task.timeout</name><value>600000</value></property>
+<property><name>fs.kfs.impl</name><value>org.apache.hadoop.fs.kfs.KosmosFileSystem</value></property>
+<property><name>mapred.max.tracker.blacklists</name><value>4</value></property>
+<property><name>fs.s3.buffer.dir</name><value>${hadoop.tmp.dir}/s3</value></property>
+<property><name>mapred.job.tracker.persist.jobstatus.dir</name><value>/jobtracker/jobsInfo</value></property>
+<property><name>ipc.client.kill.max</name><value>10</value></property>
+<property><name>mapred.tasktracker.instrumentation</name><value>org.apache.hadoop.mapred.TaskTrackerMetricsInst</value></property>
+<property><name>mapred.reduce.tasks.speculative.execution</name><value>true</value></property>
+<property><name>io.sort.record.percent</name><value>0.05</value></property>
+<property><name>hadoop.security.authorization</name><value>false</value></property>
+<property><name>mapred.max.tracker.failures</name><value>4</value></property>
+<property><name>mapred.jobtracker.taskScheduler</name><value>org.apache.hadoop.mapred.JobQueueTaskScheduler</value></property>
+<property><name>pregelix.numVertices</name><value>20</value></property>
+<property><name>mapred.tasktracker.dns.interface</name><value>default</value></property>
+<property><name>mapred.map.tasks</name><value>2</value></property>
+<property><name>mapred.job.tracker.persist.jobstatus.hours</name><value>0</value></property>
+<property><name>fs.s3.sleepTimeSeconds</name><value>10</value></property>
+<property><name>fs.default.name</name><value>file:///</value></property>
+<property><name>tasktracker.http.threads</name><value>40</value></property>
+<property><name>mapred.tasktracker.taskmemorymanager.monitoring-interval</name><value>5000</value></property>
+<property><name>hadoop.rpc.socket.factory.class.default</name><value>org.apache.hadoop.net.StandardSocketFactory</value></property>
+<property><name>mapred.reduce.tasks</name><value>1</value></property>
+<property><name>topology.node.switch.mapping.impl</name><value>org.apache.hadoop.net.ScriptBasedMapping</value></property>
+<property><name>pregelix.vertexClass</name><value>edu.uci.ics.pregelix.example.ShortestPathsVertex</value></property>
+<property><name>mapred.skip.reduce.max.skip.groups</name><value>0</value></property>
+<property><name>io.file.buffer.size</name><value>4096</value></property>
+<property><name>mapred.jobtracker.maxtasks.per.job</name><value>-1</value></property>
+<property><name>mapred.tasktracker.indexcache.mb</name><value>10</value></property>
+<property><name>mapred.tasktracker.map.tasks.maximum</name><value>2</value></property>
+<property><name>fs.har.impl.disable.cache</name><value>true</value></property>
+<property><name>mapred.task.profile.maps</name><value>0-2</value></property>
+<property><name>hadoop.native.lib</name><value>true</value></property>
+<property><name>fs.s3.block.size</name><value>67108864</value></property>
+<property><name>mapred.job.reuse.jvm.num.tasks</name><value>1</value></property>
+<property><name>mapred.job.tracker.http.address</name><value>0.0.0.0:50030</value></property>
+<property><name>mapred.tasktracker.reduce.tasks.maximum</name><value>2</value></property>
+<property><name>io.compression.codecs</name><value>org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.BZip2Codec</value></property>
+<property><name>mapred.job.shuffle.input.buffer.percent</name><value>0.70</value></property>
+<property><name>io.seqfile.compress.blocksize</name><value>1000000</value></property>
+<property><name>mapred.queue.names</name><value>default</value></property>
+<property><name>fs.har.impl</name><value>org.apache.hadoop.fs.HarFileSystem</value></property>
+<property><name>io.mapfile.bloom.error.rate</name><value>0.005</value></property>
+<property><name>mapred.job.tracker</name><value>local</value></property>
+<property><name>io.skip.checksum.errors</name><value>false</value></property>
+<property><name>mapred.reduce.max.attempts</name><value>4</value></property>
+<property><name>fs.s3.maxRetries</name><value>4</value></property>
+<property><name>ipc.server.listen.queue.size</name><value>128</value></property>
+<property><name>fs.trash.interval</name><value>0</value></property>
+<property><name>mapred.local.dir.minspacestart</name><value>0</value></property>
+<property><name>fs.s3.impl</name><value>org.apache.hadoop.fs.s3.S3FileSystem</value></property>
+<property><name>io.seqfile.sorter.recordlimit</name><value>1000000</value></property>
+<property><name>io.mapfile.bloom.size</name><value>1048576</value></property>
+<property><name>io.sort.mb</name><value>100</value></property>
+<property><name>mapred.local.dir</name><value>${hadoop.tmp.dir}/mapred/local</value></property>
+<property><name>io.sort.factor</name><value>10</value></property>
+<property><name>mapred.task.profile</name><value>false</value></property>
+<property><name>job.end.retry.interval</name><value>30000</value></property>
+<property><name>mapred.tasktracker.procfsbasedprocesstree.sleeptime-before-sigkill</name><value>5000</value></property>
+<property><name>mapred.jobtracker.completeuserjobs.maximum</name><value>100</value></property>
+<property><name>mapred.task.profile.reduces</name><value>0-2</value></property>
+<property><name>webinterface.private.actions</name><value>false</value></property>
+<property><name>hadoop.tmp.dir</name><value>/tmp/hadoop-${user.name}</value></property>
+<property><name>pregelix.combinerClass</name><value>edu.uci.ics.pregelix.example.ShortestPathsVertex$SimpleMinCombiner</value></property>
+<property><name>mapred.output.compression.codec</name><value>org.apache.hadoop.io.compress.DefaultCodec</value></property>
+<property><name>mapred.skip.attempts.to.start.skipping</name><value>2</value></property>
+<property><name>mapred.temp.dir</name><value>${hadoop.tmp.dir}/mapred/temp</value></property>
+<property><name>mapred.merge.recordsBeforeProgress</name><value>10000</value></property>
+<property><name>mapred.map.output.compression.codec</name><value>org.apache.hadoop.io.compress.DefaultCodec</value></property>
+<property><name>mapred.compress.map.output</name><value>false</value></property>
+<property><name>io.sort.spill.percent</name><value>0.80</value></property>
+<property><name>fs.checkpoint.edits.dir</name><value>${fs.checkpoint.dir}</value></property>
+<property><name>mapred.userlog.retain.hours</name><value>24</value></property>
+<property><name>mapred.system.dir</name><value>${hadoop.tmp.dir}/mapred/system</value></property>
+<property><name>SimpleShortestPathsVertex.sourceId</name><value>0</value></property>
+<property><name>mapred.line.input.format.linespermap</name><value>1</value></property>
+<property><name>job.end.retry.attempts</name><value>0</value></property>
+<property><name>ipc.client.idlethreshold</name><value>4000</value></property>
+<property><name>pregelix.vertexOutputFormatClass</name><value>edu.uci.ics.pregelix.example.PageRankVertex$SimplePageRankVertexOutputFormat</value></property>
+<property><name>mapred.reduce.copy.backoff</name><value>300</value></property>
+<property><name>mapred.map.tasks.speculative.execution</name><value>true</value></property>
+<property><name>mapred.inmem.merge.threshold</name><value>1000</value></property>
+<property><name>hadoop.logfile.size</name><value>10000000</value></property>
+<property><name>pregelix.vertexInputFormatClass</name><value>edu.uci.ics.pregelix.example.PageRankVertex$SimulatedPageRankVertexInputFormat</value></property>
+<property><name>mapred.job.queue.name</name><value>default</value></property>
+<property><name>mapred.job.tracker.persist.jobstatus.active</name><value>false</value></property>
+<property><name>mapred.reduce.slowstart.completed.maps</name><value>0.05</value></property>
+<property><name>topology.script.number.args</name><value>100</value></property>
+<property><name>mapred.skip.map.max.skip.records</name><value>0</value></property>
+<property><name>fs.ftp.impl</name><value>org.apache.hadoop.fs.ftp.FTPFileSystem</value></property>
+<property><name>mapred.task.cache.levels</name><value>2</value></property>
+<property><name>mapred.job.tracker.handler.count</name><value>10</value></property>
+<property><name>io.serializations</name><value>org.apache.hadoop.io.serializer.WritableSerialization</value></property>
+<property><name>ipc.client.connect.max.retries</name><value>10</value></property>
+<property><name>mapred.min.split.size</name><value>0</value></property>
+<property><name>mapred.map.max.attempts</name><value>4</value></property>
+<property><name>jobclient.output.filter</name><value>FAILED</value></property>
+<property><name>ipc.client.tcpnodelay</name><value>false</value></property>
+<property><name>mapred.acls.enabled</name><value>false</value></property>
+</configuration>
\ No newline at end of file
diff --git a/pregelix/pregelix-example/src/test/resources/jobs/ShortestPathsReal.xml b/pregelix/pregelix-example/src/test/resources/jobs/ShortestPathsReal.xml
new file mode 100644
index 0000000..90caf6b
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/jobs/ShortestPathsReal.xml
@@ -0,0 +1,143 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?><configuration>
+<property><name>mapred.tasktracker.dns.nameserver</name><value>default</value></property>
+<property><name>mapred.queue.default.acl-administer-jobs</name><value>*</value></property>
+<property><name>mapred.skip.map.auto.incr.proc.count</name><value>true</value></property>
+<property><name>mapred.jobtracker.instrumentation</name><value>org.apache.hadoop.mapred.JobTrackerMetricsInst</value></property>
+<property><name>mapred.skip.reduce.auto.incr.proc.count</name><value>true</value></property>
+<property><name>fs.hsftp.impl</name><value>org.apache.hadoop.hdfs.HsftpFileSystem</value></property>
+<property><name>mapred.input.dir</name><value>file:/webmap</value></property>
+<property><name>mapred.submit.replication</name><value>10</value></property>
+<property><name>ipc.server.tcpnodelay</name><value>false</value></property>
+<property><name>fs.checkpoint.dir</name><value>${hadoop.tmp.dir}/dfs/namesecondary</value></property>
+<property><name>mapred.output.compression.type</name><value>RECORD</value></property>
+<property><name>mapred.job.shuffle.merge.percent</name><value>0.66</value></property>
+<property><name>mapred.child.java.opts</name><value>-Xmx200m</value></property>
+<property><name>mapred.queue.default.acl-submit-job</name><value>*</value></property>
+<property><name>keep.failed.task.files</name><value>false</value></property>
+<property><name>mapred.jobtracker.job.history.block.size</name><value>3145728</value></property>
+<property><name>io.bytes.per.checksum</name><value>512</value></property>
+<property><name>mapred.task.tracker.report.address</name><value>127.0.0.1:0</value></property>
+<property><name>hadoop.util.hash.type</name><value>murmur</value></property>
+<property><name>fs.hdfs.impl</name><value>org.apache.hadoop.hdfs.DistributedFileSystem</value></property>
+<property><name>fs.ramfs.impl</name><value>org.apache.hadoop.fs.InMemoryFileSystem</value></property>
+<property><name>mapred.jobtracker.restart.recover</name><value>false</value></property>
+<property><name>fs.hftp.impl</name><value>org.apache.hadoop.hdfs.HftpFileSystem</value></property>
+<property><name>fs.checkpoint.period</name><value>3600</value></property>
+<property><name>mapred.child.tmp</name><value>./tmp</value></property>
+<property><name>mapred.local.dir.minspacekill</name><value>0</value></property>
+<property><name>map.sort.class</name><value>org.apache.hadoop.util.QuickSort</value></property>
+<property><name>hadoop.logfile.count</name><value>10</value></property>
+<property><name>ipc.client.connection.maxidletime</name><value>10000</value></property>
+<property><name>mapred.output.dir</name><value>/result</value></property>
+<property><name>io.map.index.skip</name><value>0</value></property>
+<property><name>mapred.tasktracker.expiry.interval</name><value>600000</value></property>
+<property><name>mapred.output.compress</name><value>false</value></property>
+<property><name>io.seqfile.lazydecompress</name><value>true</value></property>
+<property><name>mapred.reduce.parallel.copies</name><value>5</value></property>
+<property><name>fs.checkpoint.size</name><value>67108864</value></property>
+<property><name>mapred.job.reduce.input.buffer.percent</name><value>0.0</value></property>
+<property><name>mapred.job.name</name><value>ShortestPaths</value></property>
+<property><name>local.cache.size</name><value>10737418240</value></property>
+<property><name>fs.s3n.impl</name><value>org.apache.hadoop.fs.s3native.NativeS3FileSystem</value></property>
+<property><name>mapred.userlog.limit.kb</name><value>0</value></property>
+<property><name>fs.file.impl</name><value>org.apache.hadoop.fs.LocalFileSystem</value></property>
+<property><name>mapred.task.tracker.http.address</name><value>0.0.0.0:50060</value></property>
+<property><name>mapred.task.timeout</name><value>600000</value></property>
+<property><name>fs.kfs.impl</name><value>org.apache.hadoop.fs.kfs.KosmosFileSystem</value></property>
+<property><name>mapred.max.tracker.blacklists</name><value>4</value></property>
+<property><name>fs.s3.buffer.dir</name><value>${hadoop.tmp.dir}/s3</value></property>
+<property><name>mapred.job.tracker.persist.jobstatus.dir</name><value>/jobtracker/jobsInfo</value></property>
+<property><name>ipc.client.kill.max</name><value>10</value></property>
+<property><name>mapred.tasktracker.instrumentation</name><value>org.apache.hadoop.mapred.TaskTrackerMetricsInst</value></property>
+<property><name>mapred.reduce.tasks.speculative.execution</name><value>true</value></property>
+<property><name>io.sort.record.percent</name><value>0.05</value></property>
+<property><name>hadoop.security.authorization</name><value>false</value></property>
+<property><name>mapred.max.tracker.failures</name><value>4</value></property>
+<property><name>mapred.jobtracker.taskScheduler</name><value>org.apache.hadoop.mapred.JobQueueTaskScheduler</value></property>
+<property><name>pregelix.numVertices</name><value>20</value></property>
+<property><name>mapred.tasktracker.dns.interface</name><value>default</value></property>
+<property><name>mapred.map.tasks</name><value>2</value></property>
+<property><name>mapred.job.tracker.persist.jobstatus.hours</name><value>0</value></property>
+<property><name>fs.s3.sleepTimeSeconds</name><value>10</value></property>
+<property><name>fs.default.name</name><value>file:///</value></property>
+<property><name>tasktracker.http.threads</name><value>40</value></property>
+<property><name>mapred.tasktracker.taskmemorymanager.monitoring-interval</name><value>5000</value></property>
+<property><name>hadoop.rpc.socket.factory.class.default</name><value>org.apache.hadoop.net.StandardSocketFactory</value></property>
+<property><name>mapred.reduce.tasks</name><value>1</value></property>
+<property><name>topology.node.switch.mapping.impl</name><value>org.apache.hadoop.net.ScriptBasedMapping</value></property>
+<property><name>pregelix.vertexClass</name><value>edu.uci.ics.pregelix.example.ShortestPathsVertex</value></property>
+<property><name>mapred.skip.reduce.max.skip.groups</name><value>0</value></property>
+<property><name>io.file.buffer.size</name><value>4096</value></property>
+<property><name>mapred.jobtracker.maxtasks.per.job</name><value>-1</value></property>
+<property><name>mapred.tasktracker.indexcache.mb</name><value>10</value></property>
+<property><name>mapred.tasktracker.map.tasks.maximum</name><value>2</value></property>
+<property><name>fs.har.impl.disable.cache</name><value>true</value></property>
+<property><name>mapred.task.profile.maps</name><value>0-2</value></property>
+<property><name>hadoop.native.lib</name><value>true</value></property>
+<property><name>fs.s3.block.size</name><value>67108864</value></property>
+<property><name>mapred.job.reuse.jvm.num.tasks</name><value>1</value></property>
+<property><name>mapred.job.tracker.http.address</name><value>0.0.0.0:50030</value></property>
+<property><name>mapred.tasktracker.reduce.tasks.maximum</name><value>2</value></property>
+<property><name>io.compression.codecs</name><value>org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.BZip2Codec</value></property>
+<property><name>mapred.job.shuffle.input.buffer.percent</name><value>0.70</value></property>
+<property><name>io.seqfile.compress.blocksize</name><value>1000000</value></property>
+<property><name>mapred.queue.names</name><value>default</value></property>
+<property><name>fs.har.impl</name><value>org.apache.hadoop.fs.HarFileSystem</value></property>
+<property><name>io.mapfile.bloom.error.rate</name><value>0.005</value></property>
+<property><name>mapred.job.tracker</name><value>local</value></property>
+<property><name>io.skip.checksum.errors</name><value>false</value></property>
+<property><name>mapred.reduce.max.attempts</name><value>4</value></property>
+<property><name>fs.s3.maxRetries</name><value>4</value></property>
+<property><name>ipc.server.listen.queue.size</name><value>128</value></property>
+<property><name>fs.trash.interval</name><value>0</value></property>
+<property><name>mapred.local.dir.minspacestart</name><value>0</value></property>
+<property><name>fs.s3.impl</name><value>org.apache.hadoop.fs.s3.S3FileSystem</value></property>
+<property><name>io.seqfile.sorter.recordlimit</name><value>1000000</value></property>
+<property><name>io.mapfile.bloom.size</name><value>1048576</value></property>
+<property><name>io.sort.mb</name><value>100</value></property>
+<property><name>mapred.local.dir</name><value>${hadoop.tmp.dir}/mapred/local</value></property>
+<property><name>io.sort.factor</name><value>10</value></property>
+<property><name>mapred.task.profile</name><value>false</value></property>
+<property><name>job.end.retry.interval</name><value>30000</value></property>
+<property><name>mapred.tasktracker.procfsbasedprocesstree.sleeptime-before-sigkill</name><value>5000</value></property>
+<property><name>mapred.jobtracker.completeuserjobs.maximum</name><value>100</value></property>
+<property><name>mapred.task.profile.reduces</name><value>0-2</value></property>
+<property><name>webinterface.private.actions</name><value>false</value></property>
+<property><name>hadoop.tmp.dir</name><value>/tmp/hadoop-${user.name}</value></property>
+<property><name>pregelix.combinerClass</name><value>edu.uci.ics.pregelix.example.ShortestPathsVertex$SimpleMinCombiner</value></property>
+<property><name>mapred.output.compression.codec</name><value>org.apache.hadoop.io.compress.DefaultCodec</value></property>
+<property><name>mapred.skip.attempts.to.start.skipping</name><value>2</value></property>
+<property><name>mapred.temp.dir</name><value>${hadoop.tmp.dir}/mapred/temp</value></property>
+<property><name>mapred.merge.recordsBeforeProgress</name><value>10000</value></property>
+<property><name>mapred.map.output.compression.codec</name><value>org.apache.hadoop.io.compress.DefaultCodec</value></property>
+<property><name>mapred.compress.map.output</name><value>false</value></property>
+<property><name>io.sort.spill.percent</name><value>0.80</value></property>
+<property><name>fs.checkpoint.edits.dir</name><value>${fs.checkpoint.dir}</value></property>
+<property><name>mapred.userlog.retain.hours</name><value>24</value></property>
+<property><name>mapred.system.dir</name><value>${hadoop.tmp.dir}/mapred/system</value></property>
+<property><name>SimpleShortestPathsVertex.sourceId</name><value>0</value></property>
+<property><name>mapred.line.input.format.linespermap</name><value>1</value></property>
+<property><name>job.end.retry.attempts</name><value>0</value></property>
+<property><name>ipc.client.idlethreshold</name><value>4000</value></property>
+<property><name>pregelix.vertexOutputFormatClass</name><value>edu.uci.ics.pregelix.example.PageRankVertex$SimplePageRankVertexOutputFormat</value></property>
+<property><name>mapred.reduce.copy.backoff</name><value>300</value></property>
+<property><name>mapred.map.tasks.speculative.execution</name><value>true</value></property>
+<property><name>mapred.inmem.merge.threshold</name><value>1000</value></property>
+<property><name>hadoop.logfile.size</name><value>10000000</value></property>
+<property><name>pregelix.vertexInputFormatClass</name><value>edu.uci.ics.pregelix.example.inputformat.TextShortestPathsInputFormat</value></property>
+<property><name>mapred.job.queue.name</name><value>default</value></property>
+<property><name>mapred.job.tracker.persist.jobstatus.active</name><value>false</value></property>
+<property><name>mapred.reduce.slowstart.completed.maps</name><value>0.05</value></property>
+<property><name>topology.script.number.args</name><value>100</value></property>
+<property><name>mapred.skip.map.max.skip.records</name><value>0</value></property>
+<property><name>fs.ftp.impl</name><value>org.apache.hadoop.fs.ftp.FTPFileSystem</value></property>
+<property><name>mapred.task.cache.levels</name><value>2</value></property>
+<property><name>mapred.job.tracker.handler.count</name><value>10</value></property>
+<property><name>io.serializations</name><value>org.apache.hadoop.io.serializer.WritableSerialization</value></property>
+<property><name>ipc.client.connect.max.retries</name><value>10</value></property>
+<property><name>mapred.min.split.size</name><value>0</value></property>
+<property><name>mapred.map.max.attempts</name><value>4</value></property>
+<property><name>jobclient.output.filter</name><value>FAILED</value></property>
+<property><name>ipc.client.tcpnodelay</name><value>false</value></property>
+<property><name>mapred.acls.enabled</name><value>false</value></property>
+</configuration>
\ No newline at end of file
diff --git a/pregelix/pregelix-example/src/test/resources/jobs/TriangleCounting.xml b/pregelix/pregelix-example/src/test/resources/jobs/TriangleCounting.xml
new file mode 100644
index 0000000..ee2acc1
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/jobs/TriangleCounting.xml
@@ -0,0 +1,141 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?><configuration>
+<property><name>mapred.tasktracker.dns.nameserver</name><value>default</value></property>
+<property><name>mapred.queue.default.acl-administer-jobs</name><value>*</value></property>
+<property><name>mapred.skip.map.auto.incr.proc.count</name><value>true</value></property>
+<property><name>mapred.jobtracker.instrumentation</name><value>org.apache.hadoop.mapred.JobTrackerMetricsInst</value></property>
+<property><name>mapred.skip.reduce.auto.incr.proc.count</name><value>true</value></property>
+<property><name>fs.hsftp.impl</name><value>org.apache.hadoop.hdfs.HsftpFileSystem</value></property>
+<property><name>mapred.input.dir</name><value>file:/clique</value></property>
+<property><name>mapred.submit.replication</name><value>10</value></property>
+<property><name>ipc.server.tcpnodelay</name><value>false</value></property>
+<property><name>fs.checkpoint.dir</name><value>${hadoop.tmp.dir}/dfs/namesecondary</value></property>
+<property><name>mapred.output.compression.type</name><value>RECORD</value></property>
+<property><name>mapred.job.shuffle.merge.percent</name><value>0.66</value></property>
+<property><name>mapred.child.java.opts</name><value>-Xmx200m</value></property>
+<property><name>mapred.queue.default.acl-submit-job</name><value>*</value></property>
+<property><name>keep.failed.task.files</name><value>false</value></property>
+<property><name>mapred.jobtracker.job.history.block.size</name><value>3145728</value></property>
+<property><name>io.bytes.per.checksum</name><value>512</value></property>
+<property><name>mapred.task.tracker.report.address</name><value>127.0.0.1:0</value></property>
+<property><name>hadoop.util.hash.type</name><value>murmur</value></property>
+<property><name>fs.hdfs.impl</name><value>org.apache.hadoop.hdfs.DistributedFileSystem</value></property>
+<property><name>fs.ramfs.impl</name><value>org.apache.hadoop.fs.InMemoryFileSystem</value></property>
+<property><name>mapred.jobtracker.restart.recover</name><value>false</value></property>
+<property><name>fs.hftp.impl</name><value>org.apache.hadoop.hdfs.HftpFileSystem</value></property>
+<property><name>fs.checkpoint.period</name><value>3600</value></property>
+<property><name>mapred.child.tmp</name><value>./tmp</value></property>
+<property><name>mapred.local.dir.minspacekill</name><value>0</value></property>
+<property><name>map.sort.class</name><value>org.apache.hadoop.util.QuickSort</value></property>
+<property><name>hadoop.logfile.count</name><value>10</value></property>
+<property><name>ipc.client.connection.maxidletime</name><value>10000</value></property>
+<property><name>mapred.output.dir</name><value>/resultclique</value></property>
+<property><name>io.map.index.skip</name><value>0</value></property>
+<property><name>mapred.tasktracker.expiry.interval</name><value>600000</value></property>
+<property><name>mapred.output.compress</name><value>false</value></property>
+<property><name>io.seqfile.lazydecompress</name><value>true</value></property>
+<property><name>mapred.reduce.parallel.copies</name><value>5</value></property>
+<property><name>fs.checkpoint.size</name><value>67108864</value></property>
+<property><name>mapred.job.reduce.input.buffer.percent</name><value>0.0</value></property>
+<property><name>mapred.job.name</name><value>Triangle Counting</value></property>
+<property><name>local.cache.size</name><value>10737418240</value></property>
+<property><name>fs.s3n.impl</name><value>org.apache.hadoop.fs.s3native.NativeS3FileSystem</value></property>
+<property><name>mapred.userlog.limit.kb</name><value>0</value></property>
+<property><name>fs.file.impl</name><value>org.apache.hadoop.fs.LocalFileSystem</value></property>
+<property><name>mapred.task.tracker.http.address</name><value>0.0.0.0:50060</value></property>
+<property><name>mapred.task.timeout</name><value>600000</value></property>
+<property><name>fs.kfs.impl</name><value>org.apache.hadoop.fs.kfs.KosmosFileSystem</value></property>
+<property><name>mapred.max.tracker.blacklists</name><value>4</value></property>
+<property><name>fs.s3.buffer.dir</name><value>${hadoop.tmp.dir}/s3</value></property>
+<property><name>mapred.job.tracker.persist.jobstatus.dir</name><value>/jobtracker/jobsInfo</value></property>
+<property><name>ipc.client.kill.max</name><value>10</value></property>
+<property><name>mapred.tasktracker.instrumentation</name><value>org.apache.hadoop.mapred.TaskTrackerMetricsInst</value></property>
+<property><name>mapred.reduce.tasks.speculative.execution</name><value>true</value></property>
+<property><name>io.sort.record.percent</name><value>0.05</value></property>
+<property><name>hadoop.security.authorization</name><value>false</value></property>
+<property><name>mapred.max.tracker.failures</name><value>4</value></property>
+<property><name>mapred.jobtracker.taskScheduler</name><value>org.apache.hadoop.mapred.JobQueueTaskScheduler</value></property>
+<property><name>mapred.tasktracker.dns.interface</name><value>default</value></property>
+<property><name>mapred.map.tasks</name><value>2</value></property>
+<property><name>mapred.job.tracker.persist.jobstatus.hours</name><value>0</value></property>
+<property><name>fs.s3.sleepTimeSeconds</name><value>10</value></property>
+<property><name>fs.default.name</name><value>file:///</value></property>
+<property><name>tasktracker.http.threads</name><value>40</value></property>
+<property><name>mapred.tasktracker.taskmemorymanager.monitoring-interval</name><value>5000</value></property>
+<property><name>hadoop.rpc.socket.factory.class.default</name><value>org.apache.hadoop.net.StandardSocketFactory</value></property>
+<property><name>mapred.reduce.tasks</name><value>1</value></property>
+<property><name>topology.node.switch.mapping.impl</name><value>org.apache.hadoop.net.ScriptBasedMapping</value></property>
+<property><name>pregelix.vertexClass</name><value>edu.uci.ics.pregelix.example.trianglecounting.TriangleCountingVertex</value></property>
+<property><name>mapred.skip.reduce.max.skip.groups</name><value>0</value></property>
+<property><name>io.file.buffer.size</name><value>4096</value></property>
+<property><name>mapred.jobtracker.maxtasks.per.job</name><value>-1</value></property>
+<property><name>mapred.tasktracker.indexcache.mb</name><value>10</value></property>
+<property><name>mapred.tasktracker.map.tasks.maximum</name><value>2</value></property>
+<property><name>fs.har.impl.disable.cache</name><value>true</value></property>
+<property><name>mapred.task.profile.maps</name><value>0-2</value></property>
+<property><name>hadoop.native.lib</name><value>true</value></property>
+<property><name>fs.s3.block.size</name><value>67108864</value></property>
+<property><name>mapred.job.reuse.jvm.num.tasks</name><value>1</value></property>
+<property><name>mapred.job.tracker.http.address</name><value>0.0.0.0:50030</value></property>
+<property><name>mapred.tasktracker.reduce.tasks.maximum</name><value>2</value></property>
+<property><name>io.compression.codecs</name><value>org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.BZip2Codec</value></property>
+<property><name>mapred.job.shuffle.input.buffer.percent</name><value>0.70</value></property>
+<property><name>io.seqfile.compress.blocksize</name><value>1000000</value></property>
+<property><name>mapred.queue.names</name><value>default</value></property>
+<property><name>fs.har.impl</name><value>org.apache.hadoop.fs.HarFileSystem</value></property>
+<property><name>io.mapfile.bloom.error.rate</name><value>0.005</value></property>
+<property><name>mapred.job.tracker</name><value>local</value></property>
+<property><name>io.skip.checksum.errors</name><value>false</value></property>
+<property><name>mapred.reduce.max.attempts</name><value>4</value></property>
+<property><name>fs.s3.maxRetries</name><value>4</value></property>
+<property><name>ipc.server.listen.queue.size</name><value>128</value></property>
+<property><name>fs.trash.interval</name><value>0</value></property>
+<property><name>mapred.local.dir.minspacestart</name><value>0</value></property>
+<property><name>fs.s3.impl</name><value>org.apache.hadoop.fs.s3.S3FileSystem</value></property>
+<property><name>io.seqfile.sorter.recordlimit</name><value>1000000</value></property>
+<property><name>io.mapfile.bloom.size</name><value>1048576</value></property>
+<property><name>io.sort.mb</name><value>100</value></property>
+<property><name>mapred.local.dir</name><value>${hadoop.tmp.dir}/mapred/local</value></property>
+<property><name>io.sort.factor</name><value>10</value></property>
+<property><name>mapred.task.profile</name><value>false</value></property>
+<property><name>job.end.retry.interval</name><value>30000</value></property>
+<property><name>mapred.tasktracker.procfsbasedprocesstree.sleeptime-before-sigkill</name><value>5000</value></property>
+<property><name>mapred.jobtracker.completeuserjobs.maximum</name><value>100</value></property>
+<property><name>mapred.task.profile.reduces</name><value>0-2</value></property>
+<property><name>webinterface.private.actions</name><value>false</value></property>
+<property><name>hadoop.tmp.dir</name><value>/tmp/hadoop-${user.name}</value></property>
+<property><name>mapred.output.compression.codec</name><value>org.apache.hadoop.io.compress.DefaultCodec</value></property>
+<property><name>mapred.skip.attempts.to.start.skipping</name><value>2</value></property>
+<property><name>mapred.temp.dir</name><value>${hadoop.tmp.dir}/mapred/temp</value></property>
+<property><name>mapred.merge.recordsBeforeProgress</name><value>10000</value></property>
+<property><name>mapred.map.output.compression.codec</name><value>org.apache.hadoop.io.compress.DefaultCodec</value></property>
+<property><name>mapred.compress.map.output</name><value>false</value></property>
+<property><name>io.sort.spill.percent</name><value>0.80</value></property>
+<property><name>fs.checkpoint.edits.dir</name><value>${fs.checkpoint.dir}</value></property>
+<property><name>mapred.userlog.retain.hours</name><value>24</value></property>
+<property><name>mapred.system.dir</name><value>${hadoop.tmp.dir}/mapred/system</value></property>
+<property><name>mapred.line.input.format.linespermap</name><value>1</value></property>
+<property><name>job.end.retry.attempts</name><value>0</value></property>
+<property><name>ipc.client.idlethreshold</name><value>4000</value></property>
+<property><name>pregelix.vertexOutputFormatClass</name><value>edu.uci.ics.pregelix.example.trianglecounting.TriangleCountingVertex$TriangleCountingVertexOutputFormat</value></property>
+<property><name>mapred.reduce.copy.backoff</name><value>300</value></property>
+<property><name>mapred.map.tasks.speculative.execution</name><value>true</value></property>
+<property><name>mapred.inmem.merge.threshold</name><value>1000</value></property>
+<property><name>hadoop.logfile.size</name><value>10000000</value></property>
+<property><name>pregelix.vertexInputFormatClass</name><value>edu.uci.ics.pregelix.example.inputformat.TextPageRankInputFormat</value></property>
+<property><name>pregelix.aggregatorClass</name><value>edu.uci.ics.pregelix.example.trianglecounting.TriangleCountingAggregator</value></property>
+<property><name>mapred.job.queue.name</name><value>default</value></property>
+<property><name>mapred.job.tracker.persist.jobstatus.active</name><value>false</value></property>
+<property><name>mapred.reduce.slowstart.completed.maps</name><value>0.05</value></property>
+<property><name>topology.script.number.args</name><value>100</value></property>
+<property><name>mapred.skip.map.max.skip.records</name><value>0</value></property>
+<property><name>fs.ftp.impl</name><value>org.apache.hadoop.fs.ftp.FTPFileSystem</value></property>
+<property><name>mapred.task.cache.levels</name><value>2</value></property>
+<property><name>mapred.job.tracker.handler.count</name><value>10</value></property>
+<property><name>io.serializations</name><value>org.apache.hadoop.io.serializer.WritableSerialization</value></property>
+<property><name>ipc.client.connect.max.retries</name><value>10</value></property>
+<property><name>mapred.min.split.size</name><value>0</value></property>
+<property><name>mapred.map.max.attempts</name><value>4</value></property>
+<property><name>jobclient.output.filter</name><value>FAILED</value></property>
+<property><name>ipc.client.tcpnodelay</name><value>false</value></property>
+<property><name>mapred.acls.enabled</name><value>false</value></property>
+</configuration>
\ No newline at end of file
diff --git a/pregelix/pregelix-example/src/test/resources/log4j.properties b/pregelix/pregelix-example/src/test/resources/log4j.properties
new file mode 100755
index 0000000..d5e6004
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/log4j.properties
@@ -0,0 +1,94 @@
+# Define some default values that can be overridden by system properties
+hadoop.root.logger=FATAL,console
+hadoop.log.dir=.
+hadoop.log.file=hadoop.log
+
+# Define the root logger to the system property "hadoop.root.logger".
+log4j.rootLogger=${hadoop.root.logger}, EventCounter
+
+# Logging Threshold
+log4j.threshhold=FATAL
+
+#
+# Daily Rolling File Appender
+#
+
+log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender
+log4j.appender.DRFA.File=${hadoop.log.dir}/${hadoop.log.file}
+
+# Rollver at midnight
+log4j.appender.DRFA.DatePattern=.yyyy-MM-dd
+
+# 30-day backup
+#log4j.appender.DRFA.MaxBackupIndex=30
+log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout
+
+# Pattern format: Date LogLevel LoggerName LogMessage
+log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
+# Debugging Pattern format
+#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n
+
+
+#
+# console
+# Add "console" to rootlogger above if you want to use this 
+#
+
+log4j.appender.console=org.apache.log4j.ConsoleAppender
+log4j.appender.console.target=System.err
+log4j.appender.console.layout=org.apache.log4j.PatternLayout
+log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n
+
+#
+# TaskLog Appender
+#
+
+#Default values
+hadoop.tasklog.taskid=null
+hadoop.tasklog.noKeepSplits=4
+hadoop.tasklog.totalLogFileSize=100
+hadoop.tasklog.purgeLogSplits=true
+hadoop.tasklog.logsRetainHours=12
+
+log4j.appender.TLA=org.apache.hadoop.mapred.TaskLogAppender
+log4j.appender.TLA.taskId=${hadoop.tasklog.taskid}
+log4j.appender.TLA.totalLogFileSize=${hadoop.tasklog.totalLogFileSize}
+
+log4j.appender.TLA.layout=org.apache.log4j.PatternLayout
+log4j.appender.TLA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
+
+#
+# Rolling File Appender
+#
+
+#log4j.appender.RFA=org.apache.log4j.RollingFileAppender
+#log4j.appender.RFA.File=${hadoop.log.dir}/${hadoop.log.file}
+
+# Logfile size and and 30-day backups
+#log4j.appender.RFA.MaxFileSize=1MB
+#log4j.appender.RFA.MaxBackupIndex=30
+
+#log4j.appender.RFA.layout=org.apache.log4j.PatternLayout
+#log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} - %m%n
+#log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n
+
+#
+# FSNamesystem Audit logging
+# All audit events are logged at INFO level
+#
+log4j.logger.org.apache.hadoop.fs.FSNamesystem.audit=WARN
+
+# Custom Logging levels
+
+#log4j.logger.org.apache.hadoop.mapred.JobTracker=DEBUG
+#log4j.logger.org.apache.hadoop.mapred.TaskTracker=DEBUG
+#log4j.logger.org.apache.hadoop.fs.FSNamesystem=DEBUG
+
+# Jets3t library
+log4j.logger.org.jets3t.service.impl.rest.httpclient.RestS3Service=ERROR
+
+#
+# Event Counter Appender
+# Sends counts of logging messages at different severity levels to Hadoop Metrics.
+#
+log4j.appender.EventCounter=org.apache.hadoop.metrics.jvm.EventCounter
diff --git a/pregelix/pregelix-example/src/test/resources/logging.properties b/pregelix/pregelix-example/src/test/resources/logging.properties
new file mode 100644
index 0000000..b8f2be9
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/logging.properties
@@ -0,0 +1,66 @@
+############################################################
+#  	Default Logging Configuration File
+#
+# You can use a different file by specifying a filename
+# with the java.util.logging.config.file system property.  
+# For example java -Djava.util.logging.config.file=myfile
+############################################################
+
+############################################################
+#  	Global properties
+############################################################
+
+# "handlers" specifies a comma separated list of log Handler 
+# classes.  These handlers will be installed during VM startup.
+# Note that these classes must be on the system classpath.
+# By default we only configure a ConsoleHandler, which will only
+# show messages at the INFO and above levels.
+
+handlers= java.util.logging.ConsoleHandler
+
+# To also add the FileHandler, use the following line instead.
+
+# handlers= java.util.logging.FileHandler, java.util.logging.ConsoleHandler
+
+# Default global logging level.
+# This specifies which kinds of events are logged across
+# all loggers.  For any given facility this global level
+# can be overriden by a facility specific level
+# Note that the ConsoleHandler also has a separate level
+# setting to limit messages printed to the console.
+
+.level= SEVERE
+# .level= INFO
+# .level= FINE
+# .level = FINEST
+
+############################################################
+# Handler specific properties.
+# Describes specific configuration info for Handlers.
+############################################################
+
+# default file output is in user's home directory.
+
+# java.util.logging.FileHandler.pattern = %h/java%u.log
+# java.util.logging.FileHandler.limit = 50000
+# java.util.logging.FileHandler.count = 1
+# java.util.logging.FileHandler.formatter = java.util.logging.XMLFormatter
+
+# Limit the message that are printed on the console to FINE and above.
+
+java.util.logging.ConsoleHandler.level = FINEST
+java.util.logging.ConsoleHandler.formatter = java.util.logging.SimpleFormatter
+
+
+############################################################
+# Facility specific properties.
+# Provides extra control for each logger.
+############################################################
+
+# For example, set the com.xyz.foo logger to only log SEVERE
+# messages:
+
+#edu.uci.ics.asterix.level = FINE
+#edu.uci.ics.algebricks.level = FINE
+edu.uci.ics.hyracks.level = SEVERE
+#edu.uci.ics.hyracks.control.nc.net.level = FINE
\ No newline at end of file
diff --git a/pregelix/pregelix-example/src/test/resources/only.txt b/pregelix/pregelix-example/src/test/resources/only.txt
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/only.txt
diff --git a/pregelix/pregelix-runtime/pom.xml b/pregelix/pregelix-runtime/pom.xml
new file mode 100644
index 0000000..e352cf4
--- /dev/null
+++ b/pregelix/pregelix-runtime/pom.xml
@@ -0,0 +1,155 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+	<modelVersion>4.0.0</modelVersion>
+	<artifactId>pregelix-runtime</artifactId>
+	<packaging>jar</packaging>
+	<name>pregelix-runtime</name>
+
+	<parent>
+    		<groupId>edu.uci.ics.hyracks</groupId>
+    		<artifactId>pregelix</artifactId>
+    		<version>0.2.3-SNAPSHOT</version>
+  	</parent>
+
+
+	<properties>
+		<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+	</properties>
+
+	<build>
+		<plugins>
+			<plugin>
+				<groupId>org.apache.maven.plugins</groupId>
+				<artifactId>maven-compiler-plugin</artifactId>
+				<version>2.0.2</version>
+				<configuration>
+					<source>1.7</source>
+					<target>1.7</target>
+				</configuration>
+			</plugin>
+			<plugin>
+				<groupId>org.apache.maven.plugins</groupId>
+				<artifactId>maven-surefire-plugin</artifactId>
+				<version>2.7.2</version>
+				<configuration>
+					<forkMode>pertest</forkMode>
+					<argLine>-enableassertions -Xmx512m -Dfile.encoding=UTF-8
+						-Djava.util.logging.config.file=src/test/resources/logging.properties</argLine>
+					<includes>
+						<include>**/*TestSuite.java</include>
+						<include>**/*Test.java</include>
+					</includes>
+				</configuration>
+			</plugin>
+			<plugin>
+				<artifactId>maven-clean-plugin</artifactId>
+				<version>2.5</version>
+				<configuration>
+					<filesets>
+						<fileset>
+							<directory>.</directory>
+							<includes>
+								<include>teststore*</include>
+								<include>edu*</include>
+								<include>actual*</include>
+								<include>build*</include>
+								<include>expect*</include>
+								<include>ClusterController*</include>
+							</includes>
+						</fileset>
+					</filesets>
+				</configuration>
+			</plugin>
+		</plugins>
+	</build>
+
+	<dependencies>
+		<dependency>
+			<groupId>junit</groupId>
+			<artifactId>junit</artifactId>
+			<version>4.8.1</version>
+			<scope>test</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>pregelix-api</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>pregelix-dataflow-std</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>pregelix-dataflow</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>hyracks-dataflow-std</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>hyracks-api</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>hyracks-dataflow-common</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>hyracks-data-std</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>hyracks-storage-am-common</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>hyracks-storage-am-btree</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>hyracks-control-cc</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>hyracks-control-nc</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>hyracks-ipc</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+	</dependencies>
+</project>
diff --git a/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/bootstrap/NCBootstrapImpl.java b/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/bootstrap/NCBootstrapImpl.java
new file mode 100644
index 0000000..76c725e
--- /dev/null
+++ b/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/bootstrap/NCBootstrapImpl.java
@@ -0,0 +1,46 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.pregelix.runtime.bootstrap;
+
+import java.util.logging.Logger;
+
+import edu.uci.ics.hyracks.api.application.INCApplicationContext;
+import edu.uci.ics.hyracks.api.application.INCBootstrap;
+import edu.uci.ics.pregelix.dataflow.context.RuntimeContext;
+
+public class NCBootstrapImpl implements INCBootstrap {
+    private static final Logger LOGGER = Logger.getLogger(NCBootstrapImpl.class.getName());
+    private INCApplicationContext appCtx;
+
+    @Override
+    public void start() throws Exception {
+        LOGGER.info("Starting NC Bootstrap");
+        RuntimeContext rCtx = new RuntimeContext(appCtx);
+        appCtx.setApplicationObject(rCtx);
+        LOGGER.info("Initialized RuntimeContext: " + rCtx);
+    }
+
+    @Override
+    public void stop() throws Exception {
+        LOGGER.info("Stopping NC Bootstrap");
+        RuntimeContext rCtx = (RuntimeContext) appCtx.getApplicationObject();
+        rCtx.close();
+    }
+
+    @Override
+    public void setApplicationContext(INCApplicationContext appCtx) {
+        this.appCtx = appCtx;
+    }
+}
\ No newline at end of file
diff --git a/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/bootstrap/StorageManagerInterface.java b/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/bootstrap/StorageManagerInterface.java
new file mode 100644
index 0000000..57bbfbe
--- /dev/null
+++ b/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/bootstrap/StorageManagerInterface.java
@@ -0,0 +1,40 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.pregelix.runtime.bootstrap;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
+import edu.uci.ics.pregelix.dataflow.context.RuntimeContext;
+
+public class StorageManagerInterface implements IStorageManagerInterface {
+    private static final long serialVersionUID = 1L;
+
+    public static final StorageManagerInterface INSTANCE = new StorageManagerInterface();
+
+    private StorageManagerInterface() {
+    }
+
+    @Override
+    public IBufferCache getBufferCache(IHyracksTaskContext ctx) {
+        return RuntimeContext.get(ctx).getBufferCache();
+    }
+
+    @Override
+    public IFileMapProvider getFileMapProvider(IHyracksTaskContext ctx) {
+        return RuntimeContext.get(ctx).getFileMapManager();
+    }
+}
\ No newline at end of file
diff --git a/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/bootstrap/TreeIndexRegistryProvider.java b/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/bootstrap/TreeIndexRegistryProvider.java
new file mode 100644
index 0000000..7d66422
--- /dev/null
+++ b/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/bootstrap/TreeIndexRegistryProvider.java
@@ -0,0 +1,35 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.pregelix.runtime.bootstrap;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndex;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexRegistryProvider;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IndexRegistry;
+import edu.uci.ics.pregelix.dataflow.context.RuntimeContext;
+
+public class TreeIndexRegistryProvider implements IIndexRegistryProvider<IIndex> {
+    private static final long serialVersionUID = 1L;
+
+    public static final TreeIndexRegistryProvider INSTANCE = new TreeIndexRegistryProvider();
+
+    private TreeIndexRegistryProvider() {
+    }
+
+    @Override
+    public IndexRegistry<IIndex> getRegistry(IHyracksTaskContext ctx) {
+        return RuntimeContext.get(ctx).getTreeIndexRegistry();
+    }
+}
\ No newline at end of file
diff --git a/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/function/ComputeUpdateFunctionFactory.java b/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/function/ComputeUpdateFunctionFactory.java
new file mode 100644
index 0000000..a0dca3d
--- /dev/null
+++ b/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/function/ComputeUpdateFunctionFactory.java
@@ -0,0 +1,272 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.pregelix.runtime.function;
+
+import java.io.DataOutput;
+import java.io.DataOutputStream;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.io.Writable;
+
+import edu.uci.ics.hyracks.api.comm.IFrameWriter;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.pregelix.api.graph.GlobalAggregator;
+import edu.uci.ics.pregelix.api.graph.Vertex;
+import edu.uci.ics.pregelix.api.util.ArrayListWritable;
+import edu.uci.ics.pregelix.api.util.ArrayListWritable.ArrayIterator;
+import edu.uci.ics.pregelix.api.util.BspUtils;
+import edu.uci.ics.pregelix.api.util.FrameTupleUtils;
+import edu.uci.ics.pregelix.dataflow.base.IConfigurationFactory;
+import edu.uci.ics.pregelix.dataflow.std.base.IUpdateFunction;
+import edu.uci.ics.pregelix.dataflow.std.base.IUpdateFunctionFactory;
+import edu.uci.ics.pregelix.dataflow.util.ResetableByteArrayOutputStream;
+
+@SuppressWarnings({ "rawtypes", "unchecked" })
+public class ComputeUpdateFunctionFactory implements IUpdateFunctionFactory {
+    private static final long serialVersionUID = 1L;
+    private final IConfigurationFactory confFactory;
+
+    public ComputeUpdateFunctionFactory(IConfigurationFactory confFactory) {
+        this.confFactory = confFactory;
+    }
+
+    @Override
+    public IUpdateFunction createFunction() {
+        return new IUpdateFunction() {
+            // for writing intermediate data
+            private final ArrayTupleBuilder tbMsg = new ArrayTupleBuilder(2);
+            private final ArrayTupleBuilder tbAlive = new ArrayTupleBuilder(2);
+            private final ArrayTupleBuilder tbTerminate = new ArrayTupleBuilder(1);
+            private final ArrayTupleBuilder tbGlobalAggregate = new ArrayTupleBuilder(1);
+            private final ArrayTupleBuilder tbInsert = new ArrayTupleBuilder(2);
+            private final ArrayTupleBuilder tbDelete = new ArrayTupleBuilder(1);
+
+            // for writing out to message channel
+            private IFrameWriter writerMsg;
+            private FrameTupleAppender appenderMsg;
+            private ByteBuffer bufferMsg;
+
+            // for writing out to alive message channel
+            private IFrameWriter writerAlive;
+            private FrameTupleAppender appenderAlive;
+            private ByteBuffer bufferAlive;
+            private boolean pushAlive;
+
+            // for writing out termination detection control channel
+            private IFrameWriter writerTerminate;
+            private FrameTupleAppender appenderTerminate;
+            private ByteBuffer bufferTerminate;
+            private boolean terminate = true;
+
+            // for writing out termination detection control channel
+            private IFrameWriter writerGlobalAggregate;
+            private FrameTupleAppender appenderGlobalAggregate;
+            private ByteBuffer bufferGlobalAggregate;
+            private GlobalAggregator aggregator;
+
+            // for writing out to insert vertex channel
+            private IFrameWriter writerInsert;
+            private FrameTupleAppender appenderInsert;
+            private ByteBuffer bufferInsert;
+
+            // for writing out to delete vertex channel
+            private IFrameWriter writerDelete;
+            private FrameTupleAppender appenderDelete;
+            private ByteBuffer bufferDelete;
+
+            private Vertex vertex;
+            private ResetableByteArrayOutputStream bbos = new ResetableByteArrayOutputStream();
+            private DataOutput output = new DataOutputStream(bbos);
+
+            private ArrayIterator msgIterator = new ArrayIterator();
+            private final List<IFrameWriter> writers = new ArrayList<IFrameWriter>();
+            private final List<FrameTupleAppender> appenders = new ArrayList<FrameTupleAppender>();
+            private final List<ArrayTupleBuilder> tbs = new ArrayList<ArrayTupleBuilder>();
+            private Configuration conf;
+            private boolean dynamicStateLength;
+
+            @Override
+            public void open(IHyracksTaskContext ctx, RecordDescriptor rd, IFrameWriter... writers)
+                    throws HyracksDataException {
+                this.conf = confFactory.createConfiguration();
+                this.dynamicStateLength = BspUtils.getDynamicVertexValueSize(conf);
+                this.aggregator = BspUtils.createGlobalAggregator(conf);
+                this.aggregator.init();
+
+                this.writerMsg = writers[0];
+                this.bufferMsg = ctx.allocateFrame();
+                this.appenderMsg = new FrameTupleAppender(ctx.getFrameSize());
+                this.appenderMsg.reset(bufferMsg, true);
+                this.writers.add(writerMsg);
+                this.appenders.add(appenderMsg);
+
+                this.writerTerminate = writers[1];
+                this.bufferTerminate = ctx.allocateFrame();
+                this.appenderTerminate = new FrameTupleAppender(ctx.getFrameSize());
+                this.appenderTerminate.reset(bufferTerminate, true);
+
+                this.writerGlobalAggregate = writers[2];
+                this.bufferGlobalAggregate = ctx.allocateFrame();
+                this.appenderGlobalAggregate = new FrameTupleAppender(ctx.getFrameSize());
+                this.appenderGlobalAggregate.reset(bufferGlobalAggregate, true);
+
+                this.writerInsert = writers[3];
+                this.bufferInsert = ctx.allocateFrame();
+                this.appenderInsert = new FrameTupleAppender(ctx.getFrameSize());
+                this.appenderInsert.reset(bufferInsert, true);
+                this.writers.add(writerInsert);
+                this.appenders.add(appenderInsert);
+
+                this.writerDelete = writers[4];
+                this.bufferDelete = ctx.allocateFrame();
+                this.appenderDelete = new FrameTupleAppender(ctx.getFrameSize());
+                this.appenderDelete.reset(bufferDelete, true);
+                this.writers.add(writerDelete);
+                this.appenders.add(appenderDelete);
+
+                if (writers.length > 5) {
+                    this.writerAlive = writers[5];
+                    this.bufferAlive = ctx.allocateFrame();
+                    this.appenderAlive = new FrameTupleAppender(ctx.getFrameSize());
+                    this.appenderAlive.reset(bufferAlive, true);
+                    this.pushAlive = true;
+                    this.writers.add(writerAlive);
+                    this.appenders.add(appenderAlive);
+                }
+
+                tbs.add(tbMsg);
+                tbs.add(tbInsert);
+                tbs.add(tbDelete);
+                tbs.add(tbAlive);
+            }
+
+            @Override
+            public void process(Object[] tuple) throws HyracksDataException {
+                // vertex Id, msg content List, vertex Id, vertex
+                tbMsg.reset();
+                tbAlive.reset();
+
+                vertex = (Vertex) tuple[3];
+                vertex.setOutputWriters(writers);
+                vertex.setOutputAppenders(appenders);
+                vertex.setOutputTupleBuilders(tbs);
+
+                ArrayListWritable msgContentList = (ArrayListWritable) tuple[1];
+                msgContentList.reset(msgIterator);
+
+                if (!msgIterator.hasNext() && vertex.isHalted())
+                    return;
+
+                try {
+                    vertex.compute(msgIterator);
+                    vertex.finishCompute();
+                } catch (IOException e) {
+                    throw new HyracksDataException(e);
+                }
+
+                /**
+                 * this partition should not terminate
+                 */
+                if (terminate && (!vertex.isHalted() || vertex.hasMessage()))
+                    terminate = false;
+
+                aggregator.step(vertex);
+            }
+
+            @Override
+            public void close() throws HyracksDataException {
+                FrameTupleUtils.flushTuplesFinal(appenderMsg, writerMsg);
+                FrameTupleUtils.flushTuplesFinal(appenderInsert, writerInsert);
+                FrameTupleUtils.flushTuplesFinal(appenderDelete, writerDelete);
+
+                if (pushAlive)
+                    FrameTupleUtils.flushTuplesFinal(appenderAlive, writerAlive);
+                if (!terminate) {
+                    writeOutTerminationState();
+                }
+
+                /** write out global aggregate value */
+                writeOutGlobalAggregate();
+            }
+
+            private void writeOutGlobalAggregate() throws HyracksDataException {
+                try {
+                    /**
+                     * get partial aggregate result and flush to the final
+                     * aggregator
+                     */
+                    Writable agg = aggregator.finishPartial();
+                    agg.write(tbGlobalAggregate.getDataOutput());
+                    tbGlobalAggregate.addFieldEndOffset();
+                    appenderGlobalAggregate.append(tbGlobalAggregate.getFieldEndOffsets(),
+                            tbGlobalAggregate.getByteArray(), 0, tbGlobalAggregate.getSize());
+                    FrameTupleUtils.flushTuplesFinal(appenderGlobalAggregate, writerGlobalAggregate);
+                } catch (IOException e) {
+                    throw new HyracksDataException(e);
+                }
+            }
+
+            private void writeOutTerminationState() throws HyracksDataException {
+                try {
+                    tbTerminate.getDataOutput().writeLong(0);
+                    tbTerminate.addFieldEndOffset();
+                    appenderTerminate.append(tbTerminate.getFieldEndOffsets(), tbTerminate.getByteArray(), 0,
+                            tbTerminate.getSize());
+                    FrameTupleUtils.flushTuplesFinal(appenderTerminate, writerTerminate);
+                } catch (IOException e) {
+                    throw new HyracksDataException(e);
+                }
+            }
+
+            @Override
+            public void update(ITupleReference tupleRef, ArrayTupleBuilder cloneUpdateTb) throws HyracksDataException {
+                try {
+                    if (vertex != null && vertex.hasUpdate()) {
+                        if (!dynamicStateLength) {
+                            // in-place update
+                            int fieldCount = tupleRef.getFieldCount();
+                            for (int i = 1; i < fieldCount; i++) {
+                                byte[] data = tupleRef.getFieldData(i);
+                                int offset = tupleRef.getFieldStart(i);
+                                bbos.setByteArray(data, offset);
+                                vertex.write(output);
+                            }
+                        } else {
+                            // write the vertex id
+                            DataOutput tbOutput = cloneUpdateTb.getDataOutput();
+                            vertex.getVertexId().write(tbOutput);
+                            cloneUpdateTb.addFieldEndOffset();
+
+                            // write the vertex value
+                            vertex.write(tbOutput);
+                            cloneUpdateTb.addFieldEndOffset();
+                        }
+                    }
+                } catch (IOException e) {
+                    throw new HyracksDataException(e);
+                }
+            }
+        };
+    }
+}
diff --git a/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/function/StartComputeUpdateFunctionFactory.java b/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/function/StartComputeUpdateFunctionFactory.java
new file mode 100644
index 0000000..3d8a355
--- /dev/null
+++ b/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/function/StartComputeUpdateFunctionFactory.java
@@ -0,0 +1,276 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.pregelix.runtime.function;
+
+import java.io.DataOutput;
+import java.io.DataOutputStream;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.io.Writable;
+
+import edu.uci.ics.hyracks.api.comm.IFrameWriter;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.pregelix.api.graph.GlobalAggregator;
+import edu.uci.ics.pregelix.api.graph.MsgList;
+import edu.uci.ics.pregelix.api.graph.Vertex;
+import edu.uci.ics.pregelix.api.util.ArrayListWritable.ArrayIterator;
+import edu.uci.ics.pregelix.api.util.BspUtils;
+import edu.uci.ics.pregelix.api.util.FrameTupleUtils;
+import edu.uci.ics.pregelix.dataflow.base.IConfigurationFactory;
+import edu.uci.ics.pregelix.dataflow.std.base.IUpdateFunction;
+import edu.uci.ics.pregelix.dataflow.std.base.IUpdateFunctionFactory;
+import edu.uci.ics.pregelix.dataflow.util.ResetableByteArrayOutputStream;
+
+@SuppressWarnings({ "rawtypes", "unchecked" })
+public class StartComputeUpdateFunctionFactory implements IUpdateFunctionFactory {
+    private static final long serialVersionUID = 1L;
+    private final IConfigurationFactory confFactory;
+
+    public StartComputeUpdateFunctionFactory(IConfigurationFactory confFactory) {
+        this.confFactory = confFactory;
+    }
+
+    @Override
+    public IUpdateFunction createFunction() {
+        return new IUpdateFunction() {
+            // for writing intermediate data
+            private final ArrayTupleBuilder tbMsg = new ArrayTupleBuilder(2);
+            private final ArrayTupleBuilder tbAlive = new ArrayTupleBuilder(2);
+            private final ArrayTupleBuilder tbTerminate = new ArrayTupleBuilder(1);
+            private final ArrayTupleBuilder tbGlobalAggregate = new ArrayTupleBuilder(1);
+            private final ArrayTupleBuilder tbInsert = new ArrayTupleBuilder(2);
+            private final ArrayTupleBuilder tbDelete = new ArrayTupleBuilder(1);
+
+            // for writing out to message channel
+            private IFrameWriter writerMsg;
+            private FrameTupleAppender appenderMsg;
+            private ByteBuffer bufferMsg;
+
+            // for writing out to alive message channel
+            private IFrameWriter writerAlive;
+            private FrameTupleAppender appenderAlive;
+            private ByteBuffer bufferAlive;
+            private boolean pushAlive;
+
+            // for writing out termination detection control channel
+            private IFrameWriter writerGlobalAggregate;
+            private FrameTupleAppender appenderGlobalAggregate;
+            private ByteBuffer bufferGlobalAggregate;
+            private GlobalAggregator aggregator;
+
+            // for writing out the global aggregate
+            private IFrameWriter writerTerminate;
+            private FrameTupleAppender appenderTerminate;
+            private ByteBuffer bufferTerminate;
+            private boolean terminate = true;
+
+            // for writing out to insert vertex channel
+            private IFrameWriter writerInsert;
+            private FrameTupleAppender appenderInsert;
+            private ByteBuffer bufferInsert;
+
+            // for writing out to delete vertex channel
+            private IFrameWriter writerDelete;
+            private FrameTupleAppender appenderDelete;
+            private ByteBuffer bufferDelete;
+
+            // dummy empty msgList
+            private MsgList msgList = new MsgList();
+            private ArrayIterator msgIterator = new ArrayIterator();
+
+            private Vertex vertex;
+            private ResetableByteArrayOutputStream bbos = new ResetableByteArrayOutputStream();
+            private DataOutput output = new DataOutputStream(bbos);
+
+            private final List<IFrameWriter> writers = new ArrayList<IFrameWriter>();
+            private final List<FrameTupleAppender> appenders = new ArrayList<FrameTupleAppender>();
+            private final List<ArrayTupleBuilder> tbs = new ArrayList<ArrayTupleBuilder>();
+            private Configuration conf;
+            private boolean dynamicStateLength;
+
+            @Override
+            public void open(IHyracksTaskContext ctx, RecordDescriptor rd, IFrameWriter... writers)
+                    throws HyracksDataException {
+                this.conf = confFactory.createConfiguration();
+                this.dynamicStateLength = BspUtils.getDynamicVertexValueSize(conf);
+                this.aggregator = BspUtils.createGlobalAggregator(conf);
+                this.aggregator.init();
+
+                this.writerMsg = writers[0];
+                this.bufferMsg = ctx.allocateFrame();
+                this.appenderMsg = new FrameTupleAppender(ctx.getFrameSize());
+                this.appenderMsg.reset(bufferMsg, true);
+                this.writers.add(writerMsg);
+                this.appenders.add(appenderMsg);
+
+                this.writerTerminate = writers[1];
+                this.bufferTerminate = ctx.allocateFrame();
+                this.appenderTerminate = new FrameTupleAppender(ctx.getFrameSize());
+                this.appenderTerminate.reset(bufferTerminate, true);
+
+                this.writerGlobalAggregate = writers[2];
+                this.bufferGlobalAggregate = ctx.allocateFrame();
+                this.appenderGlobalAggregate = new FrameTupleAppender(ctx.getFrameSize());
+                this.appenderGlobalAggregate.reset(bufferGlobalAggregate, true);
+
+                this.writerInsert = writers[3];
+                this.bufferInsert = ctx.allocateFrame();
+                this.appenderInsert = new FrameTupleAppender(ctx.getFrameSize());
+                this.appenderInsert.reset(bufferInsert, true);
+                this.writers.add(writerInsert);
+                this.appenders.add(appenderInsert);
+
+                this.writerDelete = writers[4];
+                this.bufferDelete = ctx.allocateFrame();
+                this.appenderDelete = new FrameTupleAppender(ctx.getFrameSize());
+                this.appenderDelete.reset(bufferDelete, true);
+                this.writers.add(writerDelete);
+                this.appenders.add(appenderDelete);
+
+                if (writers.length > 5) {
+                    this.writerAlive = writers[5];
+                    this.bufferAlive = ctx.allocateFrame();
+                    this.appenderAlive = new FrameTupleAppender(ctx.getFrameSize());
+                    this.appenderAlive.reset(bufferAlive, true);
+                    this.pushAlive = true;
+                    this.writers.add(writerAlive);
+                    this.appenders.add(appenderAlive);
+                }
+                msgList.reset(msgIterator);
+
+                tbs.add(tbMsg);
+                tbs.add(tbInsert);
+                tbs.add(tbDelete);
+                tbs.add(tbAlive);
+            }
+
+            @Override
+            public void process(Object[] tuple) throws HyracksDataException {
+                // vertex Id, vertex
+                tbMsg.reset();
+                tbAlive.reset();
+
+                vertex = (Vertex) tuple[1];
+                vertex.setOutputWriters(writers);
+                vertex.setOutputAppenders(appenders);
+                vertex.setOutputTupleBuilders(tbs);
+
+                if (!msgIterator.hasNext() && vertex.isHalted())
+                    return;
+
+                try {
+                    vertex.compute(msgIterator);
+                    vertex.finishCompute();
+                } catch (IOException e) {
+                    throw new HyracksDataException(e);
+                }
+
+                /**
+                 * this partition should not terminate
+                 */
+                if (terminate && (!vertex.isHalted() || vertex.hasMessage()))
+                    terminate = false;
+
+                /**
+                 * call the global aggregator
+                 */
+                aggregator.step(vertex);
+            }
+
+            @Override
+            public void close() throws HyracksDataException {
+                FrameTupleUtils.flushTuplesFinal(appenderMsg, writerMsg);
+                FrameTupleUtils.flushTuplesFinal(appenderInsert, writerInsert);
+                FrameTupleUtils.flushTuplesFinal(appenderDelete, writerDelete);
+
+                if (pushAlive)
+                    FrameTupleUtils.flushTuplesFinal(appenderAlive, writerAlive);
+                if (!terminate) {
+                    writeOutTerminationState();
+                }
+
+                /** write out global aggregate value */
+                writeOutGlobalAggregate();
+            }
+
+            private void writeOutGlobalAggregate() throws HyracksDataException {
+                try {
+                    /**
+                     * get partial aggregate result and flush to the final
+                     * aggregator
+                     */
+                    Writable agg = aggregator.finishPartial();
+                    agg.write(tbGlobalAggregate.getDataOutput());
+                    tbGlobalAggregate.addFieldEndOffset();
+                    appenderGlobalAggregate.append(tbGlobalAggregate.getFieldEndOffsets(),
+                            tbGlobalAggregate.getByteArray(), 0, tbGlobalAggregate.getSize());
+                    FrameTupleUtils.flushTuplesFinal(appenderGlobalAggregate, writerGlobalAggregate);
+                } catch (IOException e) {
+                    throw new HyracksDataException(e);
+                }
+            }
+
+            private void writeOutTerminationState() throws HyracksDataException {
+                try {
+                    tbTerminate.getDataOutput().writeLong(0);
+                    tbTerminate.addFieldEndOffset();
+                    appenderTerminate.append(tbTerminate.getFieldEndOffsets(), tbTerminate.getByteArray(), 0,
+                            tbTerminate.getSize());
+                    FrameTupleUtils.flushTuplesFinal(appenderTerminate, writerTerminate);
+                } catch (IOException e) {
+                    throw new HyracksDataException(e);
+                }
+            }
+
+            @Override
+            public void update(ITupleReference tupleRef, ArrayTupleBuilder cloneUpdateTb) throws HyracksDataException {
+                try {
+                    if (vertex != null && vertex.hasUpdate()) {
+                        if (!dynamicStateLength) {
+                            // in-place update
+                            int fieldCount = tupleRef.getFieldCount();
+                            for (int i = 1; i < fieldCount; i++) {
+                                byte[] data = tupleRef.getFieldData(i);
+                                int offset = tupleRef.getFieldStart(i);
+                                bbos.setByteArray(data, offset);
+                                vertex.write(output);
+                            }
+                        } else {
+                            // write the vertex id
+                            DataOutput tbOutput = cloneUpdateTb.getDataOutput();
+                            vertex.getVertexId().write(tbOutput);
+                            cloneUpdateTb.addFieldEndOffset();
+
+                            // write the vertex value
+                            vertex.write(tbOutput);
+                            cloneUpdateTb.addFieldEndOffset();
+                        }
+                    }
+                } catch (IOException e) {
+                    throw new HyracksDataException(e);
+                }
+            }
+        };
+    }
+}
diff --git a/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/simpleagg/AccumulatingAggregatorFactory.java b/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/simpleagg/AccumulatingAggregatorFactory.java
new file mode 100644
index 0000000..8f63b6e
--- /dev/null
+++ b/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/simpleagg/AccumulatingAggregatorFactory.java
@@ -0,0 +1,142 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.pregelix.runtime.simpleagg;
+
+import org.apache.commons.lang3.tuple.Pair;
+
+import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.FrameTupleReference;
+import edu.uci.ics.hyracks.dataflow.std.group.AggregateState;
+import edu.uci.ics.hyracks.dataflow.std.group.IAggregatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.group.IAggregatorDescriptorFactory;
+import edu.uci.ics.pregelix.dataflow.std.base.IAggregateFunction;
+import edu.uci.ics.pregelix.dataflow.std.base.IAggregateFunctionFactory;
+
+public class AccumulatingAggregatorFactory implements IAggregatorDescriptorFactory {
+
+    private static final long serialVersionUID = 1L;
+    private IAggregateFunctionFactory[] aggFactories;
+
+    public AccumulatingAggregatorFactory(IAggregateFunctionFactory[] aggFactories) {
+        this.aggFactories = aggFactories;
+    }
+
+    @SuppressWarnings("unchecked")
+    @Override
+    public IAggregatorDescriptor createAggregator(IHyracksTaskContext ctx, RecordDescriptor inRecordDesc,
+            RecordDescriptor outRecordDescriptor, int[] aggKeys, int[] partialKeys) throws HyracksDataException {
+
+        return new IAggregatorDescriptor() {
+
+            private FrameTupleReference ftr = new FrameTupleReference();
+
+            @Override
+            public void init(ArrayTupleBuilder tupleBuilder, IFrameTupleAccessor accessor, int tIndex,
+                    AggregateState state) throws HyracksDataException {
+                Pair<ArrayBackedValueStorage[], IAggregateFunction[]> aggState = (Pair<ArrayBackedValueStorage[], IAggregateFunction[]>) state.state;
+                ArrayBackedValueStorage[] aggOutput = aggState.getLeft();
+                IAggregateFunction[] agg = aggState.getRight();
+
+                // initialize aggregate functions
+                for (int i = 0; i < agg.length; i++) {
+                    aggOutput[i].reset();
+                    try {
+                        agg[i].init();
+                    } catch (Exception e) {
+                        throw new HyracksDataException(e);
+                    }
+                }
+
+                ftr.reset(accessor, tIndex);
+                for (int i = 0; i < agg.length; i++) {
+                    try {
+                        agg[i].step(ftr);
+                    } catch (Exception e) {
+                        throw new HyracksDataException(e);
+                    }
+                }
+            }
+
+            @Override
+            public void aggregate(IFrameTupleAccessor accessor, int tIndex, IFrameTupleAccessor stateAccessor,
+                    int stateTupleIndex, AggregateState state) throws HyracksDataException {
+                Pair<ArrayBackedValueStorage[], IAggregateFunction[]> aggState = (Pair<ArrayBackedValueStorage[], IAggregateFunction[]>) state.state;
+                IAggregateFunction[] agg = aggState.getRight();
+                ftr.reset(accessor, tIndex);
+                for (int i = 0; i < agg.length; i++) {
+                    try {
+                        agg[i].step(ftr);
+                    } catch (Exception e) {
+                        throw new HyracksDataException(e);
+                    }
+                }
+            }
+
+            @Override
+            public void outputFinalResult(ArrayTupleBuilder tupleBuilder, IFrameTupleAccessor accessor, int tIndex,
+                    AggregateState state) throws HyracksDataException {
+                Pair<ArrayBackedValueStorage[], IAggregateFunction[]> aggState = (Pair<ArrayBackedValueStorage[], IAggregateFunction[]>) state.state;
+                ArrayBackedValueStorage[] aggOutput = aggState.getLeft();
+                IAggregateFunction[] agg = aggState.getRight();
+                for (int i = 0; i < agg.length; i++) {
+                    try {
+                        agg[i].finish();
+                        tupleBuilder.addField(aggOutput[i].getByteArray(), aggOutput[i].getStartOffset(),
+                                aggOutput[i].getLength());
+                    } catch (Exception e) {
+                        throw new HyracksDataException(e);
+                    }
+                }
+            }
+
+            @Override
+            public AggregateState createAggregateStates() {
+                IAggregateFunction[] agg = new IAggregateFunction[aggFactories.length];
+                ArrayBackedValueStorage[] aggOutput = new ArrayBackedValueStorage[aggFactories.length];
+                for (int i = 0; i < agg.length; i++) {
+                    aggOutput[i] = new ArrayBackedValueStorage();
+                    try {
+                        agg[i] = aggFactories[i].createAggregateFunction(aggOutput[i]);
+                    } catch (Exception e) {
+                        throw new IllegalStateException(e);
+                    }
+                }
+                return new AggregateState(Pair.of(aggOutput, agg));
+            }
+
+            @Override
+            public void reset() {
+
+            }
+
+            @Override
+            public void outputPartialResult(ArrayTupleBuilder tupleBuilder, IFrameTupleAccessor accessor, int tIndex,
+                    AggregateState state) throws HyracksDataException {
+                throw new IllegalStateException("this method should not be called");
+            }
+
+            @Override
+            public void close() {
+
+            }
+
+        };
+    }
+}
\ No newline at end of file
diff --git a/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/simpleagg/AggregationFunction.java b/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/simpleagg/AggregationFunction.java
new file mode 100644
index 0000000..1813dcc
--- /dev/null
+++ b/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/simpleagg/AggregationFunction.java
@@ -0,0 +1,120 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.pregelix.runtime.simpleagg;
+
+import java.io.DataInput;
+import java.io.DataInputStream;
+import java.io.DataOutput;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.io.WritableComparable;
+
+import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.comm.util.ByteBufferInputStream;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.FrameTupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+import edu.uci.ics.pregelix.api.graph.MessageCombiner;
+import edu.uci.ics.pregelix.api.graph.MsgList;
+import edu.uci.ics.pregelix.api.util.BspUtils;
+import edu.uci.ics.pregelix.dataflow.base.IConfigurationFactory;
+import edu.uci.ics.pregelix.dataflow.std.base.IAggregateFunction;
+
+@SuppressWarnings({ "rawtypes", "unchecked" })
+public class AggregationFunction implements IAggregateFunction {
+    private final Configuration conf;
+    private final boolean isFinalStage;
+    private final boolean partialAggAsInput;
+    private final DataOutput output;
+    private MessageCombiner combiner;
+    private ByteBufferInputStream keyInputStream = new ByteBufferInputStream();
+    private ByteBufferInputStream valueInputStream = new ByteBufferInputStream();
+    private DataInput keyInput = new DataInputStream(keyInputStream);
+    private DataInput valueInput = new DataInputStream(valueInputStream);
+    private WritableComparable key;
+    private Writable value;
+    private Writable combinedResult;
+    private MsgList msgList = new MsgList();
+    private boolean keyRead = false;
+
+    public AggregationFunction(IConfigurationFactory confFactory, DataOutput output, boolean isFinalStage,
+            boolean partialAggAsInput) throws HyracksDataException {
+        this.conf = confFactory.createConfiguration();
+        this.output = output;
+        this.isFinalStage = isFinalStage;
+        this.partialAggAsInput = partialAggAsInput;
+        msgList.setConf(this.conf);
+
+        combiner = BspUtils.createMessageCombiner(conf);
+        key = BspUtils.createVertexIndex(conf);
+        value = !partialAggAsInput ? BspUtils.createMessageValue(conf) : BspUtils.createPartialCombineValue(conf);
+    }
+
+    @Override
+    public void init() throws HyracksDataException {
+        keyRead = false;
+        combiner.init(msgList);
+    }
+
+    @Override
+    public void step(IFrameTupleReference tuple) throws HyracksDataException {
+        FrameTupleReference ftr = (FrameTupleReference) tuple;
+        IFrameTupleAccessor fta = ftr.getFrameTupleAccessor();
+        ByteBuffer buffer = fta.getBuffer();
+        int tIndex = ftr.getTupleIndex();
+
+        int keyStart = fta.getFieldSlotsLength() + fta.getTupleStartOffset(tIndex) + fta.getFieldStartOffset(tIndex, 0);
+        int valueStart = fta.getFieldSlotsLength() + fta.getTupleStartOffset(tIndex)
+                + fta.getFieldStartOffset(tIndex, 1);
+
+        keyInputStream.setByteBuffer(buffer, keyStart);
+        valueInputStream.setByteBuffer(buffer, valueStart);
+
+        try {
+            if (!keyRead) {
+                key.readFields(keyInput);
+                keyRead = true;
+            }
+            value.readFields(valueInput);
+            if (!partialAggAsInput) {
+                combiner.stepPartial(key, value);
+            } else {
+                combiner.stepFinal(key, value);
+            }
+        } catch (IOException e) {
+            throw new HyracksDataException(e);
+        }
+
+    }
+
+    @Override
+    public void finish() throws HyracksDataException {
+        try {
+            if (!isFinalStage) {
+                combinedResult = combiner.finishPartial();
+            } else {
+                combinedResult = combiner.finishFinal();
+            }
+            combinedResult.write(output);
+        } catch (IOException e) {
+            throw new HyracksDataException(e);
+        }
+    }
+
+}
diff --git a/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/simpleagg/AggregationFunctionFactory.java b/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/simpleagg/AggregationFunctionFactory.java
new file mode 100644
index 0000000..a09f688
--- /dev/null
+++ b/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/simpleagg/AggregationFunctionFactory.java
@@ -0,0 +1,43 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.pregelix.runtime.simpleagg;
+
+import java.io.DataOutput;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksException;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.pregelix.dataflow.base.IConfigurationFactory;
+import edu.uci.ics.pregelix.dataflow.std.base.IAggregateFunction;
+import edu.uci.ics.pregelix.dataflow.std.base.IAggregateFunctionFactory;
+
+public class AggregationFunctionFactory implements IAggregateFunctionFactory {
+    private static final long serialVersionUID = 1L;
+    private final IConfigurationFactory confFactory;
+    private final boolean isFinalStage;
+    private final boolean partialAggAsInput;
+
+    public AggregationFunctionFactory(IConfigurationFactory confFactory, boolean isFinalStage, boolean partialAggAsInput) {
+        this.confFactory = confFactory;
+        this.isFinalStage = isFinalStage;
+        this.partialAggAsInput = partialAggAsInput;
+    }
+
+    @Override
+    public IAggregateFunction createAggregateFunction(IDataOutputProvider provider) throws HyracksException {
+        DataOutput output = provider.getDataOutput();
+        return new AggregationFunction(confFactory, output, isFinalStage, partialAggAsInput);
+    }
+}
diff --git a/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/touchpoint/MergePartitionComputerFactory.java b/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/touchpoint/MergePartitionComputerFactory.java
new file mode 100644
index 0000000..8edddf6
--- /dev/null
+++ b/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/touchpoint/MergePartitionComputerFactory.java
@@ -0,0 +1,38 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.pregelix.runtime.touchpoint;
+
+import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
+import edu.uci.ics.hyracks.api.dataflow.value.ITuplePartitionComputer;
+import edu.uci.ics.hyracks.api.dataflow.value.ITuplePartitionComputerFactory;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+
+public class MergePartitionComputerFactory implements ITuplePartitionComputerFactory {
+    private static final long serialVersionUID = 1L;
+
+    @Override
+    public ITuplePartitionComputer createPartitioner() {
+        return new ITuplePartitionComputer() {
+
+            @Override
+            public int partition(IFrameTupleAccessor accessor, int tIndex, int nParts) throws HyracksDataException {
+                return 0;
+            }
+
+        };
+    }
+
+}
diff --git a/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/touchpoint/MsgListNullWriterFactory.java b/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/touchpoint/MsgListNullWriterFactory.java
new file mode 100644
index 0000000..97eac11
--- /dev/null
+++ b/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/touchpoint/MsgListNullWriterFactory.java
@@ -0,0 +1,44 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.pregelix.runtime.touchpoint;
+
+import java.io.DataOutput;
+import java.io.IOException;
+
+import edu.uci.ics.hyracks.api.dataflow.value.INullWriter;
+import edu.uci.ics.hyracks.api.dataflow.value.INullWriterFactory;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+
+public class MsgListNullWriterFactory implements INullWriterFactory {
+    private static final long serialVersionUID = 1L;
+    public static INullWriterFactory INSTANCE = new MsgListNullWriterFactory();
+
+    @Override
+    public INullWriter createNullWriter() {
+        return new INullWriter() {
+
+            @Override
+            public void writeNull(DataOutput out) throws HyracksDataException {
+                try {
+                    out.writeInt(0);
+                } catch (IOException e) {
+                    throw new HyracksDataException(e);
+                }
+            }
+
+        };
+    }
+
+}
diff --git a/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/touchpoint/PostSuperStepRuntimeHookFactory.java b/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/touchpoint/PostSuperStepRuntimeHookFactory.java
new file mode 100644
index 0000000..2150f2e
--- /dev/null
+++ b/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/touchpoint/PostSuperStepRuntimeHookFactory.java
@@ -0,0 +1,43 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.pregelix.runtime.touchpoint;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.pregelix.dataflow.std.base.IRuntimeHook;
+import edu.uci.ics.pregelix.dataflow.std.base.IRuntimeHookFactory;
+import edu.uci.ics.pregelix.dataflow.util.IterationUtils;
+
+public class PostSuperStepRuntimeHookFactory implements IRuntimeHookFactory {
+    private static final long serialVersionUID = 1L;
+    private final String giraphJobId;
+
+    public PostSuperStepRuntimeHookFactory(String giraphJobId) {
+        this.giraphJobId = giraphJobId;
+    }
+
+    @Override
+    public IRuntimeHook createRuntimeHook() {
+        return new IRuntimeHook() {
+
+            @Override
+            public void configure(IHyracksTaskContext ctx) throws HyracksDataException {
+                IterationUtils.endSuperStep(giraphJobId, ctx);
+            }
+
+        };
+    }
+
+}
diff --git a/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/touchpoint/PreSuperStepRuntimeHookFactory.java b/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/touchpoint/PreSuperStepRuntimeHookFactory.java
new file mode 100644
index 0000000..5f0ed9e
--- /dev/null
+++ b/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/touchpoint/PreSuperStepRuntimeHookFactory.java
@@ -0,0 +1,49 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.pregelix.runtime.touchpoint;
+
+import org.apache.hadoop.conf.Configuration;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.pregelix.dataflow.base.IConfigurationFactory;
+import edu.uci.ics.pregelix.dataflow.std.base.IRuntimeHook;
+import edu.uci.ics.pregelix.dataflow.std.base.IRuntimeHookFactory;
+import edu.uci.ics.pregelix.dataflow.util.IterationUtils;
+
+public class PreSuperStepRuntimeHookFactory implements IRuntimeHookFactory {
+    private static final long serialVersionUID = 1L;
+    private final IConfigurationFactory confFactory;
+    private final String giraphJobId;
+
+    public PreSuperStepRuntimeHookFactory(String giraphJobId, IConfigurationFactory confFactory) {
+        this.confFactory = confFactory;
+        this.giraphJobId = giraphJobId;
+    }
+
+    @Override
+    public IRuntimeHook createRuntimeHook() {
+        return new IRuntimeHook() {
+
+            @Override
+            public void configure(IHyracksTaskContext ctx) throws HyracksDataException {
+                Configuration conf = confFactory.createConfiguration();
+                IterationUtils.setProperties(giraphJobId, ctx, conf);
+            }
+
+        };
+    }
+
+}
diff --git a/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/touchpoint/RuntimeHookFactory.java b/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/touchpoint/RuntimeHookFactory.java
new file mode 100644
index 0000000..d968262
--- /dev/null
+++ b/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/touchpoint/RuntimeHookFactory.java
@@ -0,0 +1,58 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.pregelix.runtime.touchpoint;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.hdfs.ContextFactory;
+import edu.uci.ics.pregelix.api.graph.Vertex;
+import edu.uci.ics.pregelix.api.util.BspUtils;
+import edu.uci.ics.pregelix.dataflow.base.IConfigurationFactory;
+import edu.uci.ics.pregelix.dataflow.std.base.IRuntimeHook;
+import edu.uci.ics.pregelix.dataflow.std.base.IRuntimeHookFactory;
+
+public class RuntimeHookFactory implements IRuntimeHookFactory {
+
+    private static final long serialVersionUID = 1L;
+    private final IConfigurationFactory confFactory;
+
+    public RuntimeHookFactory(IConfigurationFactory confFactory) {
+        this.confFactory = confFactory;
+    }
+
+    @Override
+    public IRuntimeHook createRuntimeHook() {
+
+        return new IRuntimeHook() {
+            private ContextFactory ctxFactory = new ContextFactory();
+
+            @Override
+            public void configure(IHyracksTaskContext ctx) throws HyracksDataException {
+                Configuration conf = confFactory.createConfiguration();
+                try {
+                    TaskAttemptContext mapperContext = ctxFactory.createContext(conf, null);
+                    Vertex.setContext(mapperContext);
+                    BspUtils.setDefaultConfiguration(conf);
+                } catch (Exception e) {
+                    throw new HyracksDataException(e);
+                }
+            }
+
+        };
+    }
+}
diff --git a/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/touchpoint/VLongAscNormalizedKeyComputerFactory.java b/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/touchpoint/VLongAscNormalizedKeyComputerFactory.java
new file mode 100644
index 0000000..9181691
--- /dev/null
+++ b/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/touchpoint/VLongAscNormalizedKeyComputerFactory.java
@@ -0,0 +1,56 @@
+package edu.uci.ics.pregelix.runtime.touchpoint;
+
+import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputer;
+import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputerFactory;
+import edu.uci.ics.pregelix.api.util.SerDeUtils;
+
+public class VLongAscNormalizedKeyComputerFactory implements INormalizedKeyComputerFactory {
+    private static final long serialVersionUID = 1L;
+
+    @Override
+    public INormalizedKeyComputer createNormalizedKeyComputer() {
+        return new INormalizedKeyComputer() {
+            private static final int POSTIVE_LONG_MASK = (3 << 30);
+            private static final int NON_NEGATIVE_INT_MASK = (2 << 30);
+            private static final int NEGATIVE_LONG_MASK = (0 << 30);
+
+            @Override
+            public int normalize(byte[] bytes, int start, int length) {
+                long value = SerDeUtils.readVLong(bytes, start, length);
+                int highValue = (int) (value >> 32);
+                if (highValue > 0) {
+                    /**
+                     * larger than Integer.MAX
+                     */
+                    int highNmk = getKey(highValue);
+                    highNmk >>= 2;
+                    highNmk |= POSTIVE_LONG_MASK;
+                    return highNmk;
+                } else if (highValue == 0) {
+                    /**
+                     * smaller than Integer.MAX but >=0
+                     */
+                    int lowNmk = (int) value;
+                    lowNmk >>= 2;
+                    lowNmk |= NON_NEGATIVE_INT_MASK;
+                    return lowNmk;
+                } else {
+                    /**
+                     * less than 0; TODO: have not optimized for that
+                     */
+                    int highNmk = getKey(highValue);
+                    highNmk >>= 2;
+                    highNmk |= NEGATIVE_LONG_MASK;
+                    return highNmk;
+                }
+            }
+
+            private int getKey(int value) {
+                long unsignedFirstValue = (long) value;
+                int nmk = (int) ((unsignedFirstValue - ((long) Integer.MIN_VALUE)) & 0xffffffffL);
+                return nmk;
+            }
+
+        };
+    }
+}
diff --git a/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/touchpoint/VLongDescNormalizedKeyComputerFactory.java b/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/touchpoint/VLongDescNormalizedKeyComputerFactory.java
new file mode 100644
index 0000000..6b2738b
--- /dev/null
+++ b/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/touchpoint/VLongDescNormalizedKeyComputerFactory.java
@@ -0,0 +1,23 @@
+package edu.uci.ics.pregelix.runtime.touchpoint;
+
+import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputer;
+import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputerFactory;
+
+public class VLongDescNormalizedKeyComputerFactory implements INormalizedKeyComputerFactory {
+    private static final long serialVersionUID = 1L;
+    private final INormalizedKeyComputerFactory ascNormalizedKeyComputerFactory = new VLongAscNormalizedKeyComputerFactory();
+
+    @Override
+    public INormalizedKeyComputer createNormalizedKeyComputer() {
+        return new INormalizedKeyComputer() {
+            private INormalizedKeyComputer nmkComputer = ascNormalizedKeyComputerFactory.createNormalizedKeyComputer();
+
+            @Override
+            public int normalize(byte[] bytes, int start, int length) {
+                int nk = nmkComputer.normalize(bytes, start, length);
+                return (int) ((long) Integer.MAX_VALUE - (long) (nk - Integer.MIN_VALUE));
+            }
+
+        };
+    }
+}
diff --git a/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/touchpoint/VertexIdNullWriterFactory.java b/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/touchpoint/VertexIdNullWriterFactory.java
new file mode 100644
index 0000000..f0114dd
--- /dev/null
+++ b/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/touchpoint/VertexIdNullWriterFactory.java
@@ -0,0 +1,47 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.pregelix.runtime.touchpoint;
+
+import java.io.DataOutput;
+import java.io.IOException;
+
+import edu.uci.ics.hyracks.api.dataflow.value.INullWriter;
+import edu.uci.ics.hyracks.api.dataflow.value.INullWriterFactory;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+
+public class VertexIdNullWriterFactory implements INullWriterFactory {
+    private static final long serialVersionUID = 1L;
+    public static INullWriterFactory INSTANCE = new VertexIdNullWriterFactory();
+
+    @Override
+    public INullWriter createNullWriter() {
+        return new INullWriter() {
+
+            @Override
+            public void writeNull(DataOutput out) throws HyracksDataException {
+                try {
+                    /***
+                     * TODO: for now it only works for VLongWritable vertexId
+                     */
+                    out.write(0);
+                } catch (IOException e) {
+                    throw new HyracksDataException(e);
+                }
+            }
+
+        };
+    }
+
+}
diff --git a/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/touchpoint/VertexIdPartitionComputerFactory.java b/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/touchpoint/VertexIdPartitionComputerFactory.java
new file mode 100644
index 0000000..5eff497
--- /dev/null
+++ b/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/touchpoint/VertexIdPartitionComputerFactory.java
@@ -0,0 +1,51 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.pregelix.runtime.touchpoint;
+
+import java.io.DataInputStream;
+
+import org.apache.hadoop.io.Writable;
+
+import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.ITuplePartitionComputer;
+import edu.uci.ics.hyracks.api.dataflow.value.ITuplePartitionComputerFactory;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.comm.util.ByteBufferInputStream;
+
+public class VertexIdPartitionComputerFactory<K extends Writable, V extends Writable> implements
+        ITuplePartitionComputerFactory {
+    private static final long serialVersionUID = 1L;
+    private final ISerializerDeserializer<K> keyIO;
+
+    public VertexIdPartitionComputerFactory(ISerializerDeserializer<K> keyIO) {
+        this.keyIO = keyIO;
+    }
+
+    public ITuplePartitionComputer createPartitioner() {
+        return new ITuplePartitionComputer() {
+            private final ByteBufferInputStream bbis = new ByteBufferInputStream();
+            private final DataInputStream dis = new DataInputStream(bbis);
+
+            public int partition(IFrameTupleAccessor accessor, int tIndex, int nParts) throws HyracksDataException {
+                int keyStart = accessor.getTupleStartOffset(tIndex) + accessor.getFieldSlotsLength()
+                        + accessor.getFieldStartOffset(tIndex, 0);
+                bbis.setByteBuffer(accessor.getBuffer(), keyStart);
+                K key = keyIO.deserialize(dis);
+                return Math.abs(key.hashCode() % nParts);
+            }
+        };
+    }
+}
\ No newline at end of file
diff --git a/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/touchpoint/WritableDescComparingBinaryComparatorFactory.java b/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/touchpoint/WritableDescComparingBinaryComparatorFactory.java
new file mode 100644
index 0000000..2c7f4b2
--- /dev/null
+++ b/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/touchpoint/WritableDescComparingBinaryComparatorFactory.java
@@ -0,0 +1,42 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.pregelix.runtime.touchpoint;
+
+import org.apache.hadoop.io.RawComparator;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.dataflow.common.util.ReflectionUtils;
+
+public class WritableDescComparingBinaryComparatorFactory<T> implements IBinaryComparatorFactory {
+    private static final long serialVersionUID = 1L;
+
+    private Class<? extends RawComparator<T>> cmpClass;
+
+    public WritableDescComparingBinaryComparatorFactory(Class<? extends RawComparator<T>> cmpClass) {
+        this.cmpClass = cmpClass;
+    }
+
+    @Override
+    public IBinaryComparator createBinaryComparator() {
+        final RawComparator<T> instance = ReflectionUtils.createInstance(cmpClass);
+        return new IBinaryComparator() {
+            @Override
+            public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
+                return -instance.compare(b1, s1, l1, b2, s2, l2);
+            }
+        };
+    }
+}
\ No newline at end of file